hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
31a75a5de6817edf26be7b64cce143ae2a37bc84 | 2,101 | py | Python | lib/autoconnect/example/test_server.py | simotek/autoconnect | 7d956e5bef0bcfe22b7f06061f8024df62b004ab | [
"FTL"
]
| null | null | null | lib/autoconnect/example/test_server.py | simotek/autoconnect | 7d956e5bef0bcfe22b7f06061f8024df62b004ab | [
"FTL"
]
| null | null | null | lib/autoconnect/example/test_server.py | simotek/autoconnect | 7d956e5bef0bcfe22b7f06061f8024df62b004ab | [
"FTL"
]
| null | null | null | #
# test_server.py
#
# Copyright (C) 2001-2007 Oisin Mulvihill.
# Email: [email protected]
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library (see the file LICENSE.TXT); if not,
# write to the Free Software Foundation, Inc., 59 Temple Place,
# Suite 330, Boston, MA 02111-1307 USA.
#
# Date: 2001/12/06 15:54:30
#
import sys
import socket
import xmlrpclib
import autoconnect
from SimpleXMLRPCServer import SimpleXMLRPCServer
class Person:
def greet(self, name=''):
msg = "Hello, nice to meet you"
if name:
msg = "%s %s" % (msg, name)
return msg
class Server:
"""This server runs a simple XML-RPC server and its clients
automatically find it. Its magic ;)
"""
def __init__(self):
self.server = None
self.broadcaster = None
def main(self):
print "Starting XML-RPC server http://localhost:8000"
self.server = SimpleXMLRPCServer(("localhost", 8000))
self.server.register_instance(Person())
# Start the beckon to tell clients the servers XML-RPC URI:
print "Homing beacon running. Press Ctrl-C to exit."
self.broadcaster = autoconnect.beacon("http://localhost:8000")
try:
self.server.serve_forever()
except KeyboardInterrupt,e:
pass
self.server.server_close()
if __name__ == '__main__':
server = Server()
server.main()
| 30.449275 | 79 | 0.643027 | 953 | 0.453594 | 0 | 0 | 0 | 0 | 0 | 0 | 1,260 | 0.599714 |
31a8e14670c16c328e5688e740eae92822649a17 | 6,872 | py | Python | tests/test_loop_seer.py | Kyle-Kyle/angr | 345b2131a7a67e3a6ffc7d9fd475146a3e12f837 | [
"BSD-2-Clause"
]
| 6,132 | 2015-08-06T23:24:47.000Z | 2022-03-31T21:49:34.000Z | tests/test_loop_seer.py | Kyle-Kyle/angr | 345b2131a7a67e3a6ffc7d9fd475146a3e12f837 | [
"BSD-2-Clause"
]
| 2,272 | 2015-08-10T08:40:07.000Z | 2022-03-31T23:46:44.000Z | tests/test_loop_seer.py | Kyle-Kyle/angr | 345b2131a7a67e3a6ffc7d9fd475146a3e12f837 | [
"BSD-2-Clause"
]
| 1,155 | 2015-08-06T23:37:39.000Z | 2022-03-31T05:54:11.000Z | import os
import sys
import angr
import nose.tools
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
def test_various_loops():
p = angr.Project(os.path.join(test_location, 'x86_64', 'various_loops'), auto_load_libs=False)
cfg = p.analyses.CFGFast(normalize=True)
state = p.factory.entry_state()
state.register_plugin('loop_data', angr.state_plugins.SimStateLoopData())
dummy = p.loader.main_object.get_symbol('dummy')
bvs = state.solver.BVS(dummy.name, 8 * dummy.size)
state.memory.store(dummy.rebased_addr, bvs, endness='Iend_LE')
simgr = p.factory.simulation_manager(state)
simgr.use_technique(angr.exploration_techniques.LoopSeer(cfg=cfg, functions=None, bound=None))
simgr.run()
nose.tools.assert_equal(len(simgr.deadended), 10)
nose.tools.assert_equal(len(simgr.deadended[0].loop_data.back_edge_trip_counts), 14)
for i, d in enumerate(simgr.deadended):
f = p.kb.functions.function(name='symbolic_loop')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[l.entry.addr][0], i)
f = p.kb.functions.function(name='for_loop')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[l.entry.addr][0], 9)
f = p.kb.functions.function(name='while_loop')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[l.entry.addr][0], 9)
f = p.kb.functions.function(name='do_while_loop')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(d.loop_data.header_trip_counts[l.entry.addr][0], 9)
f = p.kb.functions.function(name='nullify')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(len(d.loop_data.back_edge_trip_counts[l.entry.addr]), 8)
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[l.entry.addr][0], 9)
f = p.kb.functions.function(name='nested_for_loop')
ol = p.analyses.LoopFinder(functions=[f]).loops[0]
il = ol.subloops[0]
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[ol.entry.addr][0], 3)
nose.tools.assert_equal(len(d.loop_data.back_edge_trip_counts[il.entry.addr]), 3)
nose.tools.assert_true(all(s == 3 for s in d.loop_data.back_edge_trip_counts[il.entry.addr]))
f = p.kb.functions.function(name='nested_while_loop')
ol = p.analyses.LoopFinder(functions=[f]).loops[0]
il = ol.subloops[0]
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[ol.entry.addr][0], 3)
nose.tools.assert_equal(len(d.loop_data.back_edge_trip_counts[il.entry.addr]), 3)
nose.tools.assert_true(all(s == 3 for s in d.loop_data.back_edge_trip_counts[il.entry.addr]))
f = p.kb.functions.function(name='nested_do_while_loop')
ol = p.analyses.LoopFinder(functions=[f]).loops[0]
il = ol.subloops[0]
nose.tools.assert_equal(d.loop_data.header_trip_counts[ol.entry.addr][0], 3)
nose.tools.assert_equal(len(d.loop_data.header_trip_counts[il.entry.addr]), 3)
nose.tools.assert_true(all(s == 3 for s in d.loop_data.header_trip_counts[il.entry.addr]))
f = p.kb.functions.function(name='break_for_loop')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(d.loop_data.back_edge_trip_counts[l.entry.addr][0], 9)
f = p.kb.functions.function(name='break_do_while_loop')
l = p.analyses.LoopFinder(functions=[f]).loops[0]
nose.tools.assert_equal(d.loop_data.header_trip_counts[l.entry.addr][0], 9)
def test_loops_with_invalid_parameter():
p = angr.Project(os.path.join(test_location, 'x86_64', 'test_loops'), auto_load_libs=False)
state = p.factory.entry_state()
state.register_plugin('loop_data', angr.state_plugins.SimStateLoopData())
simgr = p.factory.simulation_manager(state)
simgr.use_technique(angr.exploration_techniques.LoopSeer(functions=['main', 0x1234], bound=None))
simgr.run()
nose.tools.assert_equal(len(simgr.deadended[0].loop_data.back_edge_trip_counts), 3)
nose.tools.assert_equal(simgr.deadended[0].loop_data.back_edge_trip_counts[0x400665][0], 10)
nose.tools.assert_equal(len(simgr.deadended[0].loop_data.back_edge_trip_counts[0x400665]), 10)
nose.tools.assert_equal(simgr.deadended[0].loop_data.back_edge_trip_counts[0x400675][0], 10)
nose.tools.assert_equal(simgr.deadended[0].loop_data.back_edge_trip_counts[0x4006b2][0], 100)
def test_arrays():
p = angr.Project(os.path.join(test_location, 'x86_64', 'test_arrays'), auto_load_libs=False)
cfg = p.analyses.CFGFast(normalize=True)
state = p.factory.entry_state()
state.register_plugin('loop_data', angr.state_plugins.SimStateLoopData())
simgr = p.factory.simulation_manager(state)
simgr.use_technique(angr.exploration_techniques.LoopSeer(cfg=cfg, functions='main', bound=None))
simgr.run()
nose.tools.assert_equal(len(simgr.deadended[0].loop_data.back_edge_trip_counts), 2)
nose.tools.assert_equal(simgr.deadended[0].loop_data.back_edge_trip_counts[0x400636][0], 26)
nose.tools.assert_equal(simgr.deadended[0].loop_data.back_edge_trip_counts[0x4005fd][0], 26)
def test_loop_limiter():
p = angr.Project(os.path.join(test_location, 'x86_64', 'test_arrays'), auto_load_libs=False)
cfg = p.analyses.CFGFast(normalize=True)
state = p.factory.entry_state()
state.register_plugin('loop_data', angr.state_plugins.SimStateLoopData())
simgr = p.factory.simulation_manager(state)
simgr.use_technique(angr.exploration_techniques.LoopSeer(cfg=cfg, functions='main', bound=5))
simgr.run()
nose.tools.assert_true('spinning' in simgr.stashes)
nose.tools.assert_equal(simgr.spinning[0].loop_data.back_edge_trip_counts[0x4005fd][0], 6)
def test_loop_limiter_constant_loop():
p = angr.Project(os.path.join(test_location, 'x86_64', 'constant_loopseer'), auto_load_libs=False)
cfg = p.analyses.CFGFast(normalize=True)
state = p.factory.entry_state()
simgr = p.factory.simulation_manager(state)
simgr.use_technique(angr.exploration_techniques.LoopSeer(cfg=cfg, functions='main', bound=5, limit_concrete_loops=False))
simgr.run()
nose.tools.assert_true(simgr.deadended[0].regs.eax.concrete)
val = simgr.deadended[0].solver.eval_one(simgr.deadended[0].regs.eax)
nose.tools.assert_equal(val, 420)
if __name__ == "__main__":
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
g = globals().copy()
for k, v in g.items():
if k.startswith("test_") and hasattr(v, '__call__'):
print(k)
v()
| 42.159509 | 125 | 0.709983 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 421 | 0.061263 |
31aa23acdb0243f1a7dd745198a7dc1050b82ef5 | 1,726 | py | Python | shongololo/Imet_serial.py | swyngaard/shongololo | 0d11378fb0e61cae5da0e09c9eed10fd9195f20d | [
"Apache-2.0"
]
| null | null | null | shongololo/Imet_serial.py | swyngaard/shongololo | 0d11378fb0e61cae5da0e09c9eed10fd9195f20d | [
"Apache-2.0"
]
| null | null | null | shongololo/Imet_serial.py | swyngaard/shongololo | 0d11378fb0e61cae5da0e09c9eed10fd9195f20d | [
"Apache-2.0"
]
| null | null | null | import serial , time , os
import serial.tools.list_ports as port
import logging
sho_logger = logging.getLogger("shongololo_logger")
def open_imets(devices):
"""Tries to open as many imet device serial ports as there are
:return:
a list of socket handles
"""
imet_sockets = []
for d in range(len(devices)): # Create list of imet open ports
port = str(devices["Imet" + str(d)])
try:
ser = serial.Serial(port, baudrate=57600, parity=serial.PARITY_NONE, bytesize=serial.EIGHTBITS,stopbits=serial.STOPBITS_ONE, timeout=3.0, xonxoff=False)
imet_sockets.append(ser)
sho_logger.info("\n Successfully opened Imet device on port {}".format(devices["Imet" + str(d)]))
except serial.SerialException as e:
sho_logger.error(e)
sho_logger.critical("\nFailed to open imet on port {}".format(devices["Imet" + str(d)]))
return imet_sockets
def find_imets():
"""
Finds available imet serial ports and determines which device is attached to which /dev/ path
:rtype: object
:return:
A dictionary of devices labled as" imet<number starting from 0>
"""
device_dict = {}
imets = 0
portlist = list(port.comports())
for p in portlist:
sp = str(p)
if "FT230" in sp:
path = sp.split('-')[0]
device_dict["Imet" + str(imets)] = path[:-1]
imets = imets + 1
sho_logger.info("Found an Imet device on port: %s",path)
status=0
else:
pass
if imets==0:
sho_logger.error("No Imet devices found.")
else:
sho_logger.info("Found {} Imet devices".format(imets))
return device_dict
| 31.381818 | 164 | 0.618192 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 569 | 0.329664 |
31acd399985d83122352ad6b8e7282bfbeb0a214 | 1,754 | py | Python | libs/external_libs/docutils-0.4/test/test_transforms/test_peps.py | google-code-export/django-hotclub | d783a5bbcc06816289565f3eae6d99461188ca4a | [
"MIT"
]
| 3 | 2015-12-25T14:45:36.000Z | 2016-11-28T09:58:03.000Z | libs/external_libs/docutils-0.4/test/test_transforms/test_peps.py | indro/t2c | 56482ad4aed150f29353e054db2c97b567243bf8 | [
"MIT"
]
| null | null | null | libs/external_libs/docutils-0.4/test/test_transforms/test_peps.py | indro/t2c | 56482ad4aed150f29353e054db2c97b567243bf8 | [
"MIT"
]
| null | null | null | #! /usr/bin/env python
# Author: David Goodger
# Contact: [email protected]
# Revision: $Revision: 3915 $
# Date: $Date: 2005-10-02 03:06:42 +0200 (Sun, 02 Oct 2005) $
# Copyright: This module has been placed in the public domain.
"""
Tests for docutils.transforms.peps.
"""
from __init__ import DocutilsTestSupport
from docutils.transforms.peps import TargetNotes
from docutils.parsers.rst import Parser
def suite():
parser = Parser()
s = DocutilsTestSupport.TransformTestSuite(parser)
s.generateTests(totest)
return s
totest = {}
totest['target_notes'] = ((TargetNotes,), [
["""\
No references or targets exist, therefore
no "References" section should be generated.
""",
"""\
<document source="test data">
<paragraph>
No references or targets exist, therefore
no "References" section should be generated.
"""],
["""\
A target exists, here's the reference_.
A "References" section should be generated.
.. _reference: http://www.example.org
""",
"""\
<document source="test data">
<paragraph>
A target exists, here's the \n\
<reference name="reference" refname="reference">
reference
\n\
<footnote_reference auto="1" ids="id3" refname="TARGET_NOTE: id2">
.
A "References" section should be generated.
<target ids="reference" names="reference" refuri="http://www.example.org">
<section ids="id1">
<title>
References
<footnote auto="1" ids="id2" names="TARGET_NOTE:\ id2">
<paragraph>
<reference refuri="http://www.example.org">
http://www.example.org
"""],
])
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
| 25.42029 | 78 | 0.646522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,350 | 0.769669 |
31ad8e6cefd31380ff5fa1bdef5437fd290e10f2 | 380 | py | Python | rental_property/migrations/0011_alter_rentalunit_options.py | shumwe/rental-house-management-system | f97f22afa8bc2740ed08baa387c74b93e02fac0c | [
"MIT"
]
| 1 | 2022-03-16T13:29:30.000Z | 2022-03-16T13:29:30.000Z | rental_property/migrations/0011_alter_rentalunit_options.py | shumwe/rental-house-management-system | f97f22afa8bc2740ed08baa387c74b93e02fac0c | [
"MIT"
]
| null | null | null | rental_property/migrations/0011_alter_rentalunit_options.py | shumwe/rental-house-management-system | f97f22afa8bc2740ed08baa387c74b93e02fac0c | [
"MIT"
]
| null | null | null | # Generated by Django 4.0.2 on 2022-03-15 22:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('rental_property', '0010_alter_rentalunit_status'),
]
operations = [
migrations.AlterModelOptions(
name='rentalunit',
options={'verbose_name_plural': 'Rental Houses'},
),
]
| 21.111111 | 61 | 0.628947 | 295 | 0.776316 | 0 | 0 | 0 | 0 | 0 | 0 | 142 | 0.373684 |
31ade7fa4d1318ceab82ad2826fc1a70514e9372 | 951 | py | Python | AxesFrame.py | Toyuri453/RSSP-Python-demo | 0adf92ad765b5a9334d7e2830611b98c8c4eb26d | [
"MIT"
]
| 1 | 2021-05-22T18:06:49.000Z | 2021-05-22T18:06:49.000Z | AxesFrame.py | Toyuri453/RSSP-Python-demo | 0adf92ad765b5a9334d7e2830611b98c8c4eb26d | [
"MIT"
]
| null | null | null | AxesFrame.py | Toyuri453/RSSP-Python-demo | 0adf92ad765b5a9334d7e2830611b98c8c4eb26d | [
"MIT"
]
| null | null | null | import Terminal
class Axes():
def __init__(self, weak_terminal : 'Terminal.CartesianPoint'):
# self._initiator_x = weak_terminal._x
# self._initiator_y = weak_terminal._y
self._initiator = Terminal.CartesianPoint(0.0, 0.0, "UWB", "initiator")
self._weak_terminal = weak_terminal
self._terminal_set = {self._initiator._terminal_name : self._initiator, self._weak_terminal._terminal_name : self._weak_terminal}
self._terminal_measuring_point_set = {'Set' : {}} #Fill Later
print(self._terminal_set)
def add_terminal(self, terminal : 'Terminal.CartesianPoint'):
print("[DATA] Add Terminal {0} ".format(terminal))
self._terminal_set[terminal._terminal_name] = terminal
def show_terminal_names(self):
for key in self._terminal_set:
print("[DATA] Terminal Name: {0}, Color: {1}".format(key, self._terminal_set[key]._terminal_color)) | 50.052632 | 138 | 0.681388 | 932 | 0.980021 | 0 | 0 | 0 | 0 | 0 | 0 | 226 | 0.237645 |
31ae9dcd46623a64820f48a76a6115e399610104 | 1,583 | py | Python | api/generate.py | almeida-matheus/playlist-reader | c09393395a7a28d104a2bad28e3af4a8c23f8adf | [
"MIT"
]
| null | null | null | api/generate.py | almeida-matheus/playlist-reader | c09393395a7a28d104a2bad28e3af4a8c23f8adf | [
"MIT"
]
| null | null | null | api/generate.py | almeida-matheus/playlist-reader | c09393395a7a28d104a2bad28e3af4a8c23f8adf | [
"MIT"
]
| null | null | null | import re
from bs4 import BeautifulSoup # beautifulsoup4
import requests # requests
HEADER = {
"User-Agent": 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'
}
def catch_info(base,pattern,str_add=''):
'''base text, pattern to search, string to increment if necessary'''
array = []
for match in pattern.finditer(base.prettify()):
array.append(str_add+match.group(1))
return list(dict.fromkeys(array)) # set(array_video)
def generate(playlist_param):
try:
link = 'https://www.youtube.com/playlist?list=' + playlist_param
response = requests.get(link, headers=HEADER)
soup = BeautifulSoup(response.text, "html.parser")
pattern_title = re.compile(r'"title":{"runs":\[{"text":"(.*?)"}\],"accessibility"')
pattern_img = re.compile(r'{"url":"https:\/\/i(.*?)?sqp=')
pattern_video = re.compile(r'{"url":"\/watch(.*?)\\')
array_title = catch_info(soup,pattern_title)
array_img = catch_info(soup,pattern_img,'https://i')
array_video = catch_info(soup,pattern_video,'https://www.youtube.com/watch')
list_array_yt = list(zip(array_title,array_img,array_video))
response = []
for i, info in enumerate(list_array_yt):
response.append({"id": i, "title": info[0], "link_img": info[1], "link_video": info[2]})
return response
except Exception as e:
print(e)
return False
# response = generate('PLMKi-ss_sEoOZw9TB4iCrevTK60uY8wg0')
# print(response) | 37.690476 | 141 | 0.651927 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 565 | 0.356917 |
31ae9eb018cffff3e9fe4e96c1a466a559766558 | 1,118 | py | Python | homeassistant/components/deconz/const.py | hoverduck/core | 9fa23f6479e9cf8aed3fa6d2980ddc98dae06e7b | [
"Apache-2.0"
]
| 1 | 2020-01-08T21:47:59.000Z | 2020-01-08T21:47:59.000Z | homeassistant/components/deconz/const.py | hoverduck/core | 9fa23f6479e9cf8aed3fa6d2980ddc98dae06e7b | [
"Apache-2.0"
]
| 39 | 2020-08-31T14:55:47.000Z | 2022-03-31T06:02:16.000Z | homeassistant/components/deconz/const.py | stodev-com-br/home-assistant | 944d7b9d7e59ad878ae9f75b80f4cd418bad8296 | [
"Apache-2.0"
]
| null | null | null | """Constants for the deCONZ component."""
import logging
LOGGER = logging.getLogger(__package__)
DOMAIN = "deconz"
CONF_BRIDGE_ID = "bridgeid"
CONF_GROUP_ID_BASE = "group_id_base"
DEFAULT_PORT = 80
DEFAULT_ALLOW_CLIP_SENSOR = False
DEFAULT_ALLOW_DECONZ_GROUPS = True
DEFAULT_ALLOW_NEW_DEVICES = True
CONF_ALLOW_CLIP_SENSOR = "allow_clip_sensor"
CONF_ALLOW_DECONZ_GROUPS = "allow_deconz_groups"
CONF_ALLOW_NEW_DEVICES = "allow_new_devices"
CONF_MASTER_GATEWAY = "master"
SUPPORTED_PLATFORMS = [
"binary_sensor",
"climate",
"cover",
"light",
"scene",
"sensor",
"switch",
]
NEW_GROUP = "groups"
NEW_LIGHT = "lights"
NEW_SCENE = "scenes"
NEW_SENSOR = "sensors"
ATTR_DARK = "dark"
ATTR_OFFSET = "offset"
ATTR_ON = "on"
ATTR_VALVE = "valve"
DAMPERS = ["Level controllable output"]
WINDOW_COVERS = ["Window covering device", "Window covering controller"]
COVER_TYPES = DAMPERS + WINDOW_COVERS
POWER_PLUGS = ["On/Off light", "On/Off plug-in unit", "Smart plug"]
SIRENS = ["Warning device"]
SWITCH_TYPES = POWER_PLUGS + SIRENS
CONF_ANGLE = "angle"
CONF_GESTURE = "gesture"
CONF_XY = "xy"
| 21.5 | 72 | 0.739714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 422 | 0.37746 |
31aed2d6bc8b935fd6033025428a672731040be9 | 1,898 | py | Python | course_app/api/views.py | maks-nurgazy/diploma-project | 66889488ffaa0269e1be2df6f6c76a3ca68a3cfb | [
"MIT"
]
| null | null | null | course_app/api/views.py | maks-nurgazy/diploma-project | 66889488ffaa0269e1be2df6f6c76a3ca68a3cfb | [
"MIT"
]
| null | null | null | course_app/api/views.py | maks-nurgazy/diploma-project | 66889488ffaa0269e1be2df6f6c76a3ca68a3cfb | [
"MIT"
]
| null | null | null | import json
from rest_framework.generics import ListAPIView, get_object_or_404
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet
from course_app.api.serializers import CourseSerializer
from course_app.models import Course, Enrolled
from users.api.serializers import StudentSerializer
from users.models import Student
class CourseViewSet(ModelViewSet):
queryset = Course.objects.all()
serializer_class = CourseSerializer
class StudentCourseView(ListAPIView):
serializer_class = CourseSerializer
def get_queryset(self):
user = self.request.user
enrolls = user.enrolls
courses = []
for enroll in list(enrolls.all()):
courses.append(enroll.course)
return courses
class TeacherCourseView(ListAPIView):
serializer_class = CourseSerializer
def get_queryset(self):
teacher = self.request.user
return teacher.course_list
class CourseStudentsView(ListAPIView):
serializer_class = StudentSerializer
def get_queryset(self):
course_id = self.kwargs['course_id']
course = get_object_or_404(Course, id=course_id)
students = course.students
return students
class EnrollmentView(APIView):
def get(self, request, *args, **kwargs):
student = request.user
courses = Course.objects.filter(co_class=student.profile.st_class)
response = CourseSerializer(courses, many=True).data
return Response(response)
def post(self, request, *args, **kwargs):
courses = json.loads(request.body)['courses']
student = request.user
for course_id in courses:
Enrolled.objects.create(student=student, course_id=course_id)
return Response({"detail": "Enrolled"})
def put(self, request, *args, **kwargs):
pass
| 28.757576 | 74 | 0.714436 | 1,479 | 0.779241 | 0 | 0 | 0 | 0 | 0 | 0 | 38 | 0.020021 |
31b0a81b7e41eaa16ffc9d2a726e4978e07e1575 | 9,005 | py | Python | service/repository/repository_controller.py | yutiansut/cilantro | 3fa579999e7d5a6d6041ccc7e309c667fc7eac90 | [
"Apache-2.0"
]
| 3 | 2019-09-04T12:40:33.000Z | 2021-12-28T16:33:27.000Z | service/repository/repository_controller.py | yutiansut/cilantro | 3fa579999e7d5a6d6041ccc7e309c667fc7eac90 | [
"Apache-2.0"
]
| 97 | 2018-05-29T13:27:04.000Z | 2021-11-02T11:03:33.000Z | service/repository/repository_controller.py | yutiansut/cilantro | 3fa579999e7d5a6d6041ccc7e309c667fc7eac90 | [
"Apache-2.0"
]
| 16 | 2018-04-25T11:39:21.000Z | 2019-12-16T14:37:39.000Z | import os
import json
import logging
import yaml
from flask import Blueprint, jsonify, send_file, request, redirect
from service.errors import ApiError
from utils.repository import generate_repository_path, \
list_objects_in_repository
from utils.list_dir import list_dir
repository_controller = Blueprint('repository', __name__)
repository_dir = os.environ['REPOSITORY_DIR']
metadata_file = 'meta.json'
representation_dir = 'data'
sub_object_dir = 'parts'
viewers_config = os.path.join(os.environ['CONFIG_DIR'], "viewers.yml")
with open(viewers_config, 'r', encoding="utf-8") as viewers_file:
viewers = yaml.safe_load(viewers_file)
@repository_controller.route('', methods=['GET'], strict_slashes=False)
def list_repository():
"""
List the ids of all cilantro objects in the repository.
Returns a list of the object_ids
.. :quickref: Repository Controller; List IDs of objects in the repository
**Example request**:
.. sourcecode:: http
GET /repository/ HTTP/1.1
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
["foo", "bar"]
:reqheader Accept: application/json
:resheader Content-Type: application/json
:status 200: OK
:return: JSON array containing the ids of all cilantro objects in the
repository
"""
return jsonify(list_objects_in_repository())
@repository_controller.route('/object/<path:object_id>', methods=['GET'],
strict_slashes=False)
def get_object(object_id):
"""
Retrieve an cilantro (sub)object in the repository folder.
Returns A JSON object containing metadata, representations and sub_objects
of the cilantro object. This can be a subobject as well.
.. :quickref: Repository Controller; Retrieve (sub)object in the repository
**Example request**:
.. sourcecode:: http
GET /repository/object/<object_id> HTTP/1.1
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
{
"metadata": {
"description": "[PDFs teilweise verfugbar]",
"identification": "year",
"number": "",
"ojs_id": "issue-test-188",
"volume": "",
"year": 2018
},
"representations": [
"origin"
],
"sub_objects": [
"part_0001",
"part_0002"
]
}
**Example response ERROR**:
.. sourcecode:: http
HTTP/1.1 404 NOT FOUND
{
"error": {
"code": "object_not_found",
"message": "No object with id test_object was found"
},
"success": false
}
:reqheader Accept: application/json
:param str object_id: The id of the object
:resheader Content-Type: application/json
:status 200: OK
:status 404: cilantro object was not found
:return: JSON object containing metadata, representations and sub_objects
of the cilantro (sub)object
"""
path = os.path.join(repository_dir, generate_repository_path(object_id))
if os.path.isdir(path):
with open(os.path.join(path, metadata_file)) as json_data:
metadata = json.load(json_data)
representations = list_dir(os.path.join(path, representation_dir),
sorted=True, ignore_not_found=True)
sub_objects = list_dir(os.path.join(path, sub_object_dir), sorted=True,
ignore_not_found=True)
return jsonify({
'metadata': metadata,
'representations': representations,
'sub_objects': sub_objects})
else:
raise ApiError("object_not_found",
f"No object with id {object_id} was found", 404)
@repository_controller.route('/representation/<path:object_id>/<rep_name>',
methods=['GET'], strict_slashes=False)
def get_representation(object_id, rep_name):
"""
Retrieve a representation of a cilantro (sub)object.
Returns A JSON array containing all files of the representation.
.. :quickref: Repository Controller; Retrieve a (sub)object representation
**Example request**:
.. sourcecode:: http
GET /repository/representation/<object_id>/<rep_name> HTTP/1.1
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
[
"merged.pdf"
]
**Example response ERROR**:
.. sourcecode:: http
HTTP/1.1 404 NOT FOUND
{
"error": {
"code": "representation_not_found",
"message": "No representation jpg for object with id
test_object was found"
},
"success": false
}
:reqheader Accept: application/json
:param str object_id: The id of the (sub) object
:param str rep_name: The name of the representation
:resheader Content-Type: application/json
:status 200: OK
:status 404: representation was not found
:return: JSON array containing all files of the representation
"""
path = os.path.join(repository_dir, generate_repository_path(object_id),
representation_dir, rep_name)
if os.path.isdir(path):
files = list_dir(path, sorted=True, ignore_not_found=True)
return jsonify(files)
else:
raise ApiError("representation_not_found",
f"No representation {rep_name} for object with "
f"id {object_id} was found", 404)
@repository_controller.route(
'/file/<path:object_id>/data/<path:rep_name>/<file>', methods=['GET'],
strict_slashes=False)
def get_file(object_id, rep_name, file):
"""
Retrieve a file from a representation of a cilantro (sub)object.
Returns the file's content
.. :quickref: Repository Controller; Retrieve a file from a representation
**Example request**:
.. sourcecode:: http
GET /repository/file/<object_id>/data/<rep_name>/<file> HTTP/1.1
Note that for sub-object the 'object_id' looks like:
"<parent-object_id>/part_0001"
**Example response ERROR**:
.. sourcecode:: http
HTTP/1.1 404 NOT FOUND
{
"error": {
"code": "file_not_found",
"message": "No file test_file.jpg was found in representation
jpg of object test_object"
},
"success": false
}
:reqheader Accept: *
:param str object_id: The id of the object
:param str rep_name: The name of the representation
:param str file: The name of the file
:resheader Content-Type: *
:status 200: OK
:status 404: file was not found
:return: Downloadable file
"""
path = os.path.join(repository_dir, generate_repository_path(object_id),
representation_dir, rep_name, file)
if os.path.isfile(path):
return handle_file_request(path)
else:
raise ApiError("file_not_found",
f"No file {file} was found in representation {rep_name}"
f" of object {object_id}", 404)
@repository_controller.route('/file/<path:object_id>/<file>',
methods=['GET'], strict_slashes=False)
def get_meta_file(object_id, file):
"""
Retrieve a file from the root of a cilantro (sub)object.
Returns the file's content. Files on root level are normally metdata files.
.. :quickref: Repository Controller; Retrieve metadatafile of (sub)object
**Example request**:
.. sourcecode:: http
GET /repository/file/<object_id>/<file> HTTP/1.1
**Example response ERROR**:
.. sourcecode:: http
HTTP/1.1 404 NOT FOUND
{
"error": {
"code": "file_not_found",
"message": "No file test_file.jpg was found in object
test_object"
},
"success": false
}
:reqheader Accept: application/json
:param str object_id: The id of the object
:param str file: Name of the file
:resheader Content-Type: application/json
:status 200: OK
:status 404: file was not found
:return: Downloadable file
"""
path = os.path.join(repository_dir, generate_repository_path(object_id),
file)
if os.path.isfile(path):
return send_file(path)
else:
raise ApiError("file_not_found",
f"No file {file} was found in object {object_id}", 404)
def handle_file_request(path):
if request.headers.get('Accept') == '*/*':
return send_file(path)
elif request.accept_mimetypes.accept_html:
ext = os.path.splitext(path)[1][1:]
if ext in viewers:
url = viewers[ext] + path[len(repository_dir):]
return redirect(url, code=303)
return send_file(path)
| 28.769968 | 79 | 0.608329 | 0 | 0 | 0 | 0 | 7,986 | 0.886841 | 0 | 0 | 5,882 | 0.653193 |
31b23312e6643e95278a2225ec84f190096c74fe | 69 | py | Python | src/python_import/C/cc.py | matiastang/matias-python | b7785217e5d386c01198305751ecd562259ea2b7 | [
"MIT"
]
| null | null | null | src/python_import/C/cc.py | matiastang/matias-python | b7785217e5d386c01198305751ecd562259ea2b7 | [
"MIT"
]
| null | null | null | src/python_import/C/cc.py | matiastang/matias-python | b7785217e5d386c01198305751ecd562259ea2b7 | [
"MIT"
]
| null | null | null | #!/usr/bin/python3
#coding=utf-8
def cc_debug():
print(__name__) | 13.8 | 19 | 0.681159 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.449275 |
31b24a17c488a7eb71036a8c8f97645c1213787a | 24,392 | py | Python | plugin.audio.podcasts/addon.py | stobb3s/kodi-addon-podcast | de834d10031e372996ec34b2cd989a6d105168a8 | [
"MIT"
]
| null | null | null | plugin.audio.podcasts/addon.py | stobb3s/kodi-addon-podcast | de834d10031e372996ec34b2cd989a6d105168a8 | [
"MIT"
]
| null | null | null | plugin.audio.podcasts/addon.py | stobb3s/kodi-addon-podcast | de834d10031e372996ec34b2cd989a6d105168a8 | [
"MIT"
]
| null | null | null | from datetime import datetime
import base64
import os
import re
import requests
import sys
import urllib.parse
import xmltodict
import xbmc
import xbmcgui
import xbmcplugin
import xbmcaddon
import xbmcvfs
__PLUGIN_ID__ = "plugin.audio.podcasts"
# see https://forum.kodi.tv/showthread.php?tid=112916
_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May",
"Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
GPODDER_API = {
"login": "%s/api/2/auth/%s/login.json",
"subscriptions": "%s/subscriptions/%s.%s"
}
settings = xbmcaddon.Addon(id=__PLUGIN_ID__)
addon_dir = xbmcvfs.translatePath(settings.getAddonInfo('path'))
class HttpStatusError(Exception):
message = ""
def __init__(self, msg):
self.message = msg
class Mediathek:
_GROUPS = 10
_ENTRIES = 10
addon_handle = None
def __init__(self):
pass
def _parse_outlines_from_opml(self, outline):
if type(outline) is not list:
outline = [outline]
entries = []
for i, o in enumerate(outline):
name = o["@title"] if "@title" in o else o["@text"]
if not name and "@xmlUrl" in o:
m = re.match(
"^https?:\/\/([^\/]+).*\/?.*\/([^\/]+)\/?$", o["@xmlUrl"])
if m:
name = "%s %s...%s" % (settings.getLocalizedString(
32053), m.groups()[0][:20], m.groups()[1][-40:])
entry = {
"path": str(i),
"name": name,
"node": []
}
if "@type" in o and o["@type"] == "rss" and "@xmlUrl" in o:
entry["params"] = [{
"rss": o["@xmlUrl"]
}]
entries.append(entry)
elif "outline" in o:
entry["node"] = self._parse_outlines_from_opml(
o["outline"])
entries.append(entry)
return entries
def _play_latest(self, url):
try:
title, description, image, items = self._load_rss(url)
item = items[0]
li = self._create_list_item(item)
xbmcplugin.setResolvedUrl(self.addon_handle, True, li)
except HttpStatusError as error:
xbmcgui.Dialog().notification(settings.getLocalizedString(32090), error.message)
def _create_list_item(self, item):
li = xbmcgui.ListItem(label=item["name"])
if "description" in item:
li.setProperty("label2", item["description"])
if "stream_url" in item:
li.setPath(item["stream_url"])
if "type" in item:
if item["type"] == "video":
li.setInfo(item["type"], {
"title": item["name"],
"plot": item["description"] if "description" in item else ""
})
elif item["type"] == "music":
li.setInfo(item["type"], {
"title": item["name"]
})
if "icon" in item and item["icon"]:
li.setArt({"icon": item["icon"]})
else:
li.setArt({"icon": os.path.join(
addon_dir, "resources", "assets", "icon.png")}
)
if "date" in item and item["date"]:
if "setDateTime" in dir(li): # available since Kodi v20
li.setDateTime(item["date"].strftime("%Y-%m-%dT%H:%M:%SZ"))
else:
pass
if "specialsort" in item:
li.setProperty("SpecialSort", item["specialsort"])
if "duration" in item and item["duration"] >= 0:
li.setInfo("music", {"duration": item["duration"]})
li.setInfo("video", {"duration": item["duration"]})
return li
def _add_list_item(self, entry, path):
def _build_param_string(params, current=""):
if params == None:
return current
for obj in params:
for name in obj:
enc_value = base64.urlsafe_b64encode(
obj[name].encode("utf-8"))
current += "?" if len(current) == 0 else "&"
current += name + "=" + str(enc_value, "utf-8")
return current
if path == "/":
path = ""
item_path = path + "/" + entry["path"]
param_string = ""
if "params" in entry:
param_string = _build_param_string(entry["params"],
current=param_string)
li = self._create_list_item(entry)
if "stream_url" in entry:
url = entry["stream_url"]
else:
url = "".join(
["plugin://", __PLUGIN_ID__, item_path, param_string])
is_folder = "node" in entry
li.setProperty("IsPlayable", "false" if is_folder else "true")
xbmcplugin.addDirectoryItem(handle=self.addon_handle,
listitem=li,
url=url,
isFolder=is_folder)
def _http_request(self, url, headers={}, method="GET"):
useragent = f"{settings.getAddonInfo('id')}/{settings.getAddonInfo('version')} (Kodi/{xbmc.getInfoLabel('System.BuildVersionShort')})"
headers["User-Agent"] = useragent
if method == "GET":
req = requests.get
elif method == "POST":
req = requests.post
else:
raise HttpStatusError(settings.getLocalizedString(32091) % method)
try:
res = req(url, headers=headers)
except requests.exceptions.RequestException as error:
xbmc.log("Request Exception: %s" % str(error), xbmc.LOGERROR)
raise HttpStatusError(settings.getLocalizedString(32092))
if res.status_code == 200:
return res.text, res.cookies
else:
raise HttpStatusError(settings.getLocalizedString(
32093) % (res.status_code, url))
def _load_rss(self, url):
def _parse_item(_ci):
if "enclosure" in _ci and "@url" in _ci["enclosure"]:
stream_url = _ci["enclosure"]["@url"]
if _ci["enclosure"]["@type"].split("/")[0] == "video":
_type = "video"
else:
_type = "music"
elif "guid" in _ci and _ci["guid"]:
# not supported yet
return None
else:
return None
if "itunes:image" in _ci and "@href" in _ci["itunes:image"]:
item_image = _ci["itunes:image"]["@href"]
else:
item_image = image
if "pubDate" in _ci:
_f = re.findall(
"(\d{1,2}) (\w{3}) (\d{4}) (\d{2}):(\d{2}):(\d{2})", _ci["pubDate"])
if _f:
_m = _MONTHS.index(_f[0][1]) + 1
pubDate = datetime(year=int(_f[0][2]), month=_m, day=int(_f[0][0]), hour=int(
_f[0][3]), minute=int(_f[0][4]), second=int(_f[0][5]))
else:
pubDate = None
if "itunes:duration" in _ci:
try:
duration = int(_ci["itunes:duration"]) #if duration is already in seconds
except:
try: #try converting HH:MM:SS or MM:SS string to integer seconds
durationList = _ci["itunes:duration"].split(":")
if len(durationList) == 3: #HH:MM:SS
duration = int(durationList[0]) * 3600 + int(durationList[1]) * 60 + int(durationList[2])
elif len(durationList) == 2: #MM:SS
duration = int(durationList[0]) * 60 + int(durationList[1])
else:
duration = -1
except:
duration = -1
else:
duration = -1
return {
"name": _ci["title"],
"description": _ci["description"] if "description" in _ci else "",
"date": pubDate,
"icon": item_image,
"stream_url": stream_url,
"type": _type,
"duration": duration
}
res, cookies = self._http_request(url)
if not res.startswith("<?xml"):
raise HttpStatusError("%s %s" % (
settings.getLocalizedString(32094), url))
else:
rss_feed = xmltodict.parse(res)
channel = rss_feed["rss"]["channel"]
title = channel["title"] if "title" in channel else ""
description = channel["description"] if "description" in channel else ""
if "image" in channel and "url" in channel["image"]:
image = channel["image"]["url"]
elif "itunes:image" in channel:
image = channel["itunes:image"]["@href"]
else:
image = None
items = []
if type(channel["item"]) is list:
for _ci in channel["item"]:
item = _parse_item(_ci)
if item is not None:
items += [item]
else:
item = _parse_item(channel["item"])
if item is not None:
items += [item]
return title, description, image, items
def _render_rss(self, path, url):
def _update_Image(path, image):
if path.startswith("/pod-"):
_p = path[5:].split("/")
settings.setSetting("group_%i_rss_%i_icon" %
(int(_p[0]), int(_p[1])), image)
try:
title, description, image, items = self._load_rss(url)
if image:
_update_Image(path, image)
except HttpStatusError as error:
xbmc.log("HTTP Status Error: %s, path=%s" %
(error.message, path), xbmc.LOGERROR)
xbmcgui.Dialog().notification(settings.getLocalizedString(32090), error.message)
else:
if len(items) > 0 and settings.getSetting("anchor") == "true":
entry = {
"path": "latest",
"name": "%s (%s)" % (title, settings.getLocalizedString(32052)),
"description": description,
"icon": image,
"date": datetime.now(),
"specialsort": "top",
"type": items[0]["type"],
"params": [
{
"play_latest": url
}
]
}
self._add_list_item(entry, path)
for item in items:
li = self._create_list_item(item)
xbmcplugin.addDirectoryItem(handle=self.addon_handle,
listitem=li,
url=item["stream_url"],
isFolder=False)
if "setDateTime" in dir(li): # available since Kodi v20
xbmcplugin.addSortMethod(
self.addon_handle, xbmcplugin.SORT_METHOD_DATE)
xbmcplugin.endOfDirectory(self.addon_handle)
def _browse(self, dir_structure, path, updateListing=False):
def _get_node_by_path(path):
if path == "/":
return dir_structure[0]
tokens = path.split("/")[1:]
node = dir_structure[0]
while len(tokens) > 0:
path = tokens.pop(0)
for n in node["node"]:
if n["path"] == path:
node = n
break
return node
node = _get_node_by_path(path)
for entry in node["node"]:
self._add_list_item(entry, path)
xbmcplugin.addSortMethod(
self.addon_handle, xbmcplugin.SORT_METHOD_FULLPATH)
xbmcplugin.addSortMethod(
self.addon_handle, xbmcplugin.SORT_METHOD_LABEL)
xbmcplugin.endOfDirectory(
self.addon_handle, updateListing=updateListing)
def _parse_opml(self, data):
opml_data = xmltodict.parse(data)
entries = self._parse_outlines_from_opml(
opml_data["opml"]["body"]["outline"])
return opml_data["opml"]["head"]["title"], entries
def _open_opml_file(self, path):
with open(path) as _opml_file:
return _opml_file.read()
def _build_dir_structure(self):
groups = []
# opml files / podcasts lists
for g in range(self._GROUPS):
if settings.getSetting("opml_file_%i" % g) == "":
continue
path = os.path.join(
addon_dir, settings.getSetting("opml_file_%i" % g))
try:
name, nodes = self._parse_opml(self._open_opml_file(path))
groups.append({
"path": "opml-%i" % g,
"name": name,
"node": nodes
})
except:
xbmc.log("Cannot read opml file %s" % path, xbmc.LOGERROR)
# rss feeds from settings
for g in range(self._GROUPS):
if settings.getSetting("group_%i_enable" % g) == "false":
continue
entries = []
for e in range(self._ENTRIES):
if settings.getSetting("group_%i_rss_%i_enable" % (g, e)) == "false":
continue
icon = settings.getSetting("group_%i_rss_%i_icon"
% (g, e))
entries += [{
"path": "%i" % e,
"name": settings.getSetting("group_%i_rss_%i_name"
% (g, e)),
"params": [
{
"rss": settings.getSetting("group_%i_rss_%i_url" % (g, e))
}
],
"icon": icon,
"node": []
}]
groups += [{
"path": "pod-%i" % g,
"name": settings.getSetting("group_%i_name" % g),
"node": entries
}]
return [
{ # root
"path": "",
"node": groups
}
]
def handle(self, argv):
def decode_param(encoded_param):
return base64.urlsafe_b64decode(encoded_param).decode("utf-8")
self.addon_handle = int(argv[1])
path = urllib.parse.urlparse(argv[0]).path.replace("//", "/")
url_params = urllib.parse.parse_qs(argv[2][1:])
if "rss" in url_params:
url = decode_param(url_params["rss"][0])
self._render_rss(path, url)
elif "play_latest" in url_params:
url = decode_param(url_params["play_latest"][0])
self._play_latest(url)
else:
_dir_structure = self._build_dir_structure()
self._browse(dir_structure=_dir_structure, path=path)
def _login_at_gpodder(self):
auth_string = "%s:%s" % (settings.getSetting(
"gpodder_username"), settings.getSetting("gpodder_password"))
b64auth = {
"Authorization": "Basic %s" % base64.urlsafe_b64encode(auth_string.encode("utf-8")).decode("utf-8")
}
response, cookies = self._http_request(
GPODDER_API["login"] % (settings.getSetting("gpodder_hostname"),
settings.getSetting("gpodder_username")), b64auth, "POST")
if "sessionid" not in cookies:
raise HttpStatusError(settings.getLocalizedString(32095))
return cookies["sessionid"]
def _load_gpodder_subscriptions(self, sessionid):
session_cookie = {
"Cookie": "%s=%s" % ("sessionid", sessionid)
}
response, cookies = self._http_request(
GPODDER_API["subscriptions"] % (settings.getSetting("gpodder_hostname"),
settings.getSetting(
"gpodder_username"),
"opml"), session_cookie)
return response
def _select_opml_file(self):
path = xbmcgui.Dialog().browse(
type=1, heading=settings.getLocalizedString(32070), shares="", mask=".xml|.opml")
if path == "":
return None, None
try:
return self._parse_opml(self._open_opml_file(path))
except:
xbmc.log("Cannot read opml file %s" % path, xbmc.LOGERROR)
return None, None
def _select_feeds(self, name, entries, freeslots):
selection = [e["name"]
for e in entries if "params" in e and len(e["params"]) == 1 and "rss" in e["params"][0]]
ok = False
while not ok:
feeds = xbmcgui.Dialog().multiselect(
settings.getLocalizedString(32071), selection)
if feeds == None:
ok = True
elif len(feeds) == 0:
xbmcgui.Dialog().ok(settings.getLocalizedString(32072),
settings.getLocalizedString(32073))
elif len(feeds) > freeslots:
xbmcgui.Dialog().ok(settings.getLocalizedString(32074),
settings.getLocalizedString(32075) % freeslots)
else:
ok = True
return feeds
def _select_target_group(self):
names = list()
freeslots = list()
for g in range(self._GROUPS):
free = sum("false" == settings.getSetting(
"group_%i_rss_%i_enable" % (g, r)) for r in range(self._ENTRIES))
freeslots.append(free)
names.append("%s %i: %s (%i %s)" %
(
settings.getLocalizedString(32000),
g + 1,
settings.getSetting("group_%i_name" % g),
free,
settings.getLocalizedString(32077)
))
selected = xbmcgui.Dialog().select(settings.getLocalizedString(32076), names)
if selected > -1 and freeslots[selected] == 0:
xbmcgui.Dialog().ok(heading=settings.getLocalizedString(32078),
message=settings.getLocalizedString(32084))
return -1, 0
elif selected == -1:
return -1, 0
else:
return selected, freeslots[selected]
def _apply_to_group(self, entries, group, feeds):
settings.setSetting("group_%i_enable" % group, "True")
i, j = 0, 0
while(i < self._ENTRIES):
if j < len(feeds) and "false" == settings.getSetting("group_%i_rss_%i_enable" % (group, i)):
settings.setSetting("group_%i_rss_%i_enable" %
(group, i), "True")
settings.setSetting("group_%i_rss_%i_name" %
(group, i), entries[feeds[j]]["name"])
settings.setSetting("group_%i_rss_%i_url" % (
group, i), entries[feeds[j]]["params"][0]["rss"])
settings.setSetting("group_%i_rss_%i_icon" % (group, i), "")
j += 1
i += 1
def _save_opml_file(self, data):
opml = xmltodict.parse(data)
filename = "%s.opml" % re.sub(
"[^A-Za-z0-9']", " ", opml["opml"]["head"]["title"])
path = xbmcgui.Dialog().browse(
type=3, heading=settings.getLocalizedString(32080), shares="")
if not path:
return None, None
try:
fullpath = "%s%s" % (path, filename)
with open(fullpath, "w") as _file:
_file.write(data)
return fullpath, filename
except:
xbmcgui.Dialog().ok(heading=settings.getLocalizedString(
32081), message=settings.getLocalizedString(32082))
return None, None
def _select_target_opml_slot(self, heading, multi=False):
selection = list()
for g in range(self._GROUPS):
filename = settings.getSetting("opml_file_%i" % g)
selection.append("%s %i%s" % (settings.getLocalizedString(
32023), g + 1, ": %s" % filename if filename else ""))
dialog = xbmcgui.Dialog().multiselect if multi else xbmcgui.Dialog().select
return dialog(heading, selection)
def import_opml(self):
# Step 1: Select target group
group, freeslots = self._select_target_group()
if group == -1:
return
# Step 2: Select file
name, entries = self._select_opml_file()
if name == None:
return
# Step 3: Select feeds
feeds = self._select_feeds(name, entries, freeslots)
if feeds == None:
return
# Step 4: Confirm
self._apply_to_group(entries, group, feeds)
# Success
xbmcgui.Dialog().notification(settings.getLocalizedString(
32085), settings.getLocalizedString(32086))
def import_gpodder_subscriptions(self):
# Step 1: Select target group
group, freeslots = self._select_target_group()
if group == -1:
return
# Step 2: query subscriptions from gPodder
try:
sessionid = self._login_at_gpodder()
name, entries = self._parse_opml(
self._load_gpodder_subscriptions(sessionid))
except HttpStatusError as error:
xbmcgui.Dialog().ok(settings.getLocalizedString(32090), error.message)
return
# Step 3: Select feeds
feeds = self._select_feeds(name, entries, freeslots)
if feeds == None:
return
# Step 4: Apply to group
self._apply_to_group(entries, group, feeds)
# Success
xbmcgui.Dialog().notification(settings.getLocalizedString(
32085), settings.getLocalizedString(32086))
def download_gpodder_subscriptions(self):
# Step 1: download subscriptions from gPodder
try:
sessionid = self._login_at_gpodder()
opml_data = self._load_gpodder_subscriptions(sessionid)
except HttpStatusError as error:
xbmcgui.Dialog().ok(settings.getLocalizedString(32090), error.message)
return
# Step 2: Save file in folder
path, filename = self._save_opml_file(opml_data)
if not path:
return
# Success
xbmcgui.Dialog().notification(settings.getLocalizedString(
32085), "%s %s" % (settings.getLocalizedString(32083), filename))
# Step 3: Select target opml slot
slot = self._select_target_opml_slot(
settings.getLocalizedString(32079))
if slot == -1:
return
settings.setSetting("opml_file_%i" % slot, path)
# Success
xbmcgui.Dialog().notification(settings.getLocalizedString(
32085), settings.getLocalizedString(32086))
def unassign_opml(self):
# Step 1: Select slots
slots = self._select_target_opml_slot(
settings.getLocalizedString(32087), multi=True)
if slots == None or len(slots) == 0:
return
# Step 2: empty slots
for slot in slots:
settings.setSetting("opml_file_%i" % slot, " ")
# Success
xbmcgui.Dialog().notification(settings.getLocalizedString(
32085), settings.getLocalizedString(32086))
if __name__ == '__main__':
mediathek = Mediathek()
if sys.argv[1] == "import_gpodder_subscriptions":
mediathek.import_gpodder_subscriptions()
elif sys.argv[1] == "import_opml":
mediathek.import_opml()
elif sys.argv[1] == "download_gpodder_subscriptions":
mediathek.download_gpodder_subscriptions()
elif sys.argv[1] == "unassign_opml":
mediathek.unassign_opml()
else:
mediathek.handle(sys.argv)
| 32.17942 | 142 | 0.508158 | 23,292 | 0.954903 | 0 | 0 | 0 | 0 | 0 | 0 | 3,660 | 0.150049 |
31b296cff20ef1265ad6524a7cf4ad27623881bc | 2,832 | py | Python | .venv/Lib/site-packages/lemoncheesecake/reporting/savingstrategy.py | yadavdeepa365/HUDL_PYTHON | e1d5d264e3748f0add18258496f5a850e16b7ee6 | [
"MIT"
]
| 34 | 2017-06-12T18:50:36.000Z | 2021-11-29T01:59:07.000Z | .venv/Lib/site-packages/lemoncheesecake/reporting/savingstrategy.py | yadavdeepa365/HUDL_PYTHON | e1d5d264e3748f0add18258496f5a850e16b7ee6 | [
"MIT"
]
| 25 | 2017-12-07T13:35:29.000Z | 2022-03-10T01:27:58.000Z | .venv/Lib/site-packages/lemoncheesecake/reporting/savingstrategy.py | yadavdeepa365/HUDL_PYTHON | e1d5d264e3748f0add18258496f5a850e16b7ee6 | [
"MIT"
]
| 4 | 2019-05-05T03:19:00.000Z | 2021-10-06T13:12:05.000Z | import re
import time
from lemoncheesecake.events import TestSessionSetupEndEvent, TestSessionTeardownEndEvent, \
TestEndEvent, SuiteSetupEndEvent, SuiteTeardownEndEvent, SuiteEndEvent, SteppedEvent
from lemoncheesecake.reporting.report import ReportLocation
DEFAULT_REPORT_SAVING_STRATEGY = "at_each_failed_test"
def _is_end_of_result_event(event):
if isinstance(event, TestEndEvent):
return ReportLocation.in_test(event.test)
if isinstance(event, SuiteSetupEndEvent):
return ReportLocation.in_suite_setup(event.suite)
if isinstance(event, SuiteTeardownEndEvent):
return ReportLocation.in_suite_teardown(event.suite)
if isinstance(event, TestSessionSetupEndEvent):
return ReportLocation.in_test_session_setup()
if isinstance(event, TestSessionTeardownEndEvent):
return ReportLocation.in_test_session_teardown()
return None
def save_at_each_suite_strategy(event, _):
return isinstance(event, SuiteEndEvent)
def save_at_each_test_strategy(event, _):
return _is_end_of_result_event(event) is not None
def save_at_each_failed_test_strategy(event, report):
location = _is_end_of_result_event(event)
if location:
result = report.get(location)
return result and result.status == "failed"
else:
return False
def save_at_each_log_strategy(event, _):
return isinstance(event, SteppedEvent)
class SaveAtInterval(object):
def __init__(self, interval):
self.interval = interval
self.last_saving = None
def __call__(self, event, report):
now = time.time()
if self.last_saving:
must_be_saved = now > self.last_saving + self.interval
if must_be_saved:
self.last_saving = now
return must_be_saved
else:
self.last_saving = now # not a saving but an initialization
return False
def make_report_saving_strategy(expression):
# first, try with a static expression
static_expressions = {
"at_end_of_tests": None, # no need to an intermediate report saving in this case
"at_each_suite": save_at_each_suite_strategy,
"at_each_test": save_at_each_test_strategy,
"at_each_failed_test": save_at_each_failed_test_strategy,
"at_each_log": save_at_each_log_strategy,
"at_each_event": save_at_each_log_strategy # deprecated since 1.4.5, "at_each_log" must be used instead
}
try:
return static_expressions[expression]
except KeyError:
pass
# second, try with "every_Ns"
m = re.compile(r"^every[_ ](\d+)s$").match(expression)
if m:
return SaveAtInterval(int(m.group(1)))
# ok... nothing we know about
raise ValueError("Invalid expression '%s' for report saving strategy" % expression)
| 31.120879 | 112 | 0.715042 | 504 | 0.177966 | 0 | 0 | 0 | 0 | 0 | 0 | 442 | 0.156073 |
31b3246b48b5cc2ea21a0461162a64666ab485f1 | 4,676 | py | Python | genshin/models/genshin/chronicle/notes.py | thesadru/genshin.py | 806b8d0dd059a06605e66dead917fdf550a552bc | [
"MIT"
]
| 63 | 2021-10-04T19:53:54.000Z | 2022-03-30T07:21:03.000Z | genshin/models/genshin/chronicle/notes.py | thesadru/genshin.py | 806b8d0dd059a06605e66dead917fdf550a552bc | [
"MIT"
]
| 17 | 2021-11-16T20:42:52.000Z | 2022-03-31T10:11:52.000Z | genshin/models/genshin/chronicle/notes.py | thesadru/genshin.py | 806b8d0dd059a06605e66dead917fdf550a552bc | [
"MIT"
]
| 10 | 2021-10-16T22:41:41.000Z | 2022-02-19T17:55:23.000Z | """Genshin chronicle notes."""
import datetime
import typing
import pydantic
from genshin.models.genshin import character
from genshin.models.model import Aliased, APIModel
__all__ = ["Expedition", "ExpeditionCharacter", "Notes"]
def _process_timedelta(time: typing.Union[int, datetime.timedelta, datetime.datetime]) -> datetime.datetime:
if isinstance(time, int):
time = datetime.datetime.fromtimestamp(time).astimezone()
if isinstance(time, datetime.timedelta):
time = datetime.datetime.now().astimezone() + time
if time < datetime.datetime(2000, 1, 1).astimezone():
delta = datetime.timedelta(seconds=int(time.timestamp()))
time = datetime.datetime.now().astimezone() + delta
time = time.replace(second=0, microsecond=0)
return time
class ExpeditionCharacter(character.BaseCharacter):
"""Expedition character."""
class Expedition(APIModel):
"""Real-Time note expedition."""
character: ExpeditionCharacter = Aliased("avatar_side_icon")
status: typing.Literal["Ongoing", "Finished"]
remaining_time: datetime.timedelta = Aliased("remained_time")
@property
def finished(self) -> bool:
"""Whether the expedition has finished."""
return self.remaining_time <= datetime.timedelta(0)
@property
def completion_time(self) -> datetime.datetime:
return datetime.datetime.now().astimezone() + self.remaining_time
@pydantic.validator("character", pre=True)
def __complete_character(cls, v: typing.Any) -> ExpeditionCharacter:
if isinstance(v, str):
return ExpeditionCharacter(icon=v) # type: ignore
return v
class TransformerTimedelta(datetime.timedelta):
"""Transformer recovery time."""
@property
def timedata(self) -> typing.Tuple[int, int, int, int]:
seconds: int = super().seconds
days: int = super().days
hour, second = divmod(seconds, 3600)
minute, second = divmod(second, 60)
return days, hour, minute, second
@property
def hours(self) -> int:
return self.timedata[1]
@property
def minutes(self) -> int:
return self.timedata[2]
@property
def seconds(self) -> int:
return self.timedata[3]
class Notes(APIModel):
"""Real-Time notes."""
current_resin: int
max_resin: int
remaining_resin_recovery_time: datetime.timedelta = Aliased("resin_recovery_time")
current_realm_currency: int = Aliased("current_home_coin")
max_realm_currency: int = Aliased("max_home_coin")
remaining_realm_currency_recovery_time: datetime.timedelta = Aliased("home_coin_recovery_time")
completed_commissions: int = Aliased("finished_task_num")
max_commissions: int = Aliased("total_task_num")
claimed_commission_reward: bool = Aliased("is_extra_task_reward_received")
remaining_resin_discounts: int = Aliased("remain_resin_discount_num")
max_resin_discounts: int = Aliased("resin_discount_num_limit")
remaining_transformer_recovery_time: typing.Optional[TransformerTimedelta]
expeditions: typing.Sequence[Expedition]
max_expeditions: int = Aliased("max_expedition_num")
@property
def resin_recovery_time(self) -> datetime.datetime:
"""The remaining time until resin recovery in seconds."""
return datetime.datetime.now().astimezone() + self.remaining_resin_recovery_time
@property
def realm_currency_recovery_time(self) -> datetime.datetime:
"""The remaining time until realm currency recovery in seconds."""
return datetime.datetime.now().astimezone() + self.remaining_realm_currency_recovery_time
@property
def transformer_recovery_time(self) -> typing.Optional[datetime.datetime]:
"""The remaining time until realm currency recovery in seconds."""
if self.remaining_transformer_recovery_time is None:
return None
remaining = datetime.datetime.now().astimezone() + self.remaining_transformer_recovery_time
return remaining
@pydantic.root_validator(pre=True)
def __flatten_transformer(cls, values: typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Any]:
if "transformer_recovery_time" in values:
return values
if values.get("transformer") and values["transformer"]["obtained"]:
t = values["transformer"]["recovery_time"]
delta = TransformerTimedelta(days=t["Day"], hours=t["Hour"], minutes=t["Minute"], seconds=t["Second"])
values["remaining_transformer_recovery_time"] = delta
else:
values["remaining_transformer_recovery_time"] = None
return values
| 34.131387 | 114 | 0.700385 | 3,867 | 0.826989 | 0 | 0 | 2,470 | 0.528229 | 0 | 0 | 902 | 0.1929 |
31b5fadfad64338920b6d7434a87ab13e7d9dd53 | 1,222 | py | Python | src/the_tale/the_tale/common/bbcode/renderer.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
]
| 85 | 2017-11-21T12:22:02.000Z | 2022-03-27T23:07:17.000Z | src/the_tale/the_tale/common/bbcode/renderer.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
]
| 545 | 2017-11-04T14:15:04.000Z | 2022-03-27T14:19:27.000Z | src/the_tale/the_tale/common/bbcode/renderer.py | al-arz/the-tale | 542770257eb6ebd56a5ac44ea1ef93ff4ab19eb5 | [
"BSD-3-Clause"
]
| 45 | 2017-11-11T12:36:30.000Z | 2022-02-25T06:10:44.000Z |
import smart_imports
smart_imports.all()
class Renderer:
__slots__ = ('tags', '_renderer')
def __init__(self, tags):
self.tags = tags
self._renderer = postmarkup.create(include=[],
use_pygments=False,
annotate_links=False)
for tag in tags:
self._renderer.tag_factory.add_tag(tag.tag_class, tag.value, *tag.args, **tag.kwargs)
def render(self, *args, **kwargs):
try:
kwargs['cosmetic_replace'] = False
kwargs['encoding'] = 'utf-8'
return self._renderer.render_to_html(*args, **kwargs)
except Exception:
return 'Текст нельзя отформатировать. Возможно Вы ошиблись при вводе тегов.'
def html_command_line(self):
lines = ['<div class="pgf-bb-command-line command-line">']
for tag in self.tags:
single = 'data-single="true"' if tag.single else ''
line = f'<a class="pgf-bb-command" href="#" data-tag="{tag.value}" {single} rel="tooltip" title=\'{tag.example}\'>[{tag.value}]</a>'
lines.append(line)
lines.append('</div>')
return '\n'.join(lines)
| 31.333333 | 144 | 0.561375 | 1,233 | 0.964034 | 0 | 0 | 0 | 0 | 0 | 0 | 385 | 0.301016 |
31b8b818299558855ad3e395eb84d71adc230d9a | 6,860 | py | Python | tests/python/unittest/test_meta_schedule_custom_rule_winograd_cpu.py | psrivas2/relax | 4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a | [
"Apache-2.0"
]
| 11 | 2021-11-02T00:49:16.000Z | 2021-11-19T02:17:00.000Z | tests/python/unittest/test_meta_schedule_custom_rule_winograd_cpu.py | psrivas2/relax | 4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a | [
"Apache-2.0"
]
| 16 | 2021-11-02T00:17:12.000Z | 2021-11-21T20:47:52.000Z | tests/python/unittest/test_meta_schedule_custom_rule_winograd_cpu.py | psrivas2/relax | 4329af78eb1dc4c4ff8a61d3bf39aa4034e9cb2a | [
"Apache-2.0"
]
| 4 | 2021-11-05T18:17:23.000Z | 2021-11-11T06:22:00.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-docstring
import tvm
from tvm import meta_schedule as ms
from tvm.ir import IRModule
from tvm.meta_schedule.testing.conv2d_winograd_cpu import conv2d_winograd_cpu
from tvm.target import Target
from tvm.tir.schedule import Schedule, Trace
def _get_mod():
# pylint: disable=invalid-name
def inline(sch: Schedule):
b1 = sch.get_block(name="A")
b2 = sch.get_block(name="B")
sch.compute_inline(block=b1)
sch.compute_inline(block=b2)
def input_tile_data_pad(sch: Schedule):
b78 = sch.get_block(name="input_tile")
l80 = sch.sample_compute_location(block=b78, decision=4)
sch.compute_at(block=b78, loop=l80, preserve_unit_loops=True)
b81 = sch.get_block(name="data_pad")
l83 = sch.sample_compute_location(block=b81, decision=-2)
sch.compute_at(block=b81, loop=l83, preserve_unit_loops=True)
def data_pack(sch: Schedule):
b18 = sch.get_block(name="data_pack")
l19, l20, l21, l22, l23, l24 = sch.get_loops(block=b18)
sch.unroll(loop=l19)
sch.unroll(loop=l20)
v25, v26 = sch.sample_perfect_tile(
n=2,
loop=l21,
max_innermost_factor=64,
decision=[9, 1],
)
l27, l28 = sch.split(loop=l21, factors=[v25, v26])
v29, v30 = sch.sample_perfect_tile(
n=2,
loop=l22,
max_innermost_factor=64,
decision=[32, 4],
)
l31, l32 = sch.split(loop=l22, factors=[v29, v30])
sch.unroll(loop=l23)
sch.unroll(loop=l24)
sch.reorder(l27, l31, l28, l32, l19, l20, l23, l24)
def bgemm(sch: Schedule):
bgemm = sch.get_block(name="bgemm")
write_cache = sch.cache_write(
block=bgemm,
write_buffer_index=0,
storage_scope="global",
)
sch.annotate(
block_or_loop=bgemm,
ann_key="meta_schedule.tiling_structure",
ann_val="SSRSRS",
)
# b33, b34 = b34, b33
l35, l36, l37, l38, l39 = sch.get_loops(block=bgemm)
v40, v41, v42, v43 = sch.sample_perfect_tile(
n=4,
loop=l35,
max_innermost_factor=64,
decision=[1, 2, 3, 1],
)
l44, l45, l46, l47 = sch.split(loop=l35, factors=[v40, v41, v42, v43])
v48, v49, v50, v51 = sch.sample_perfect_tile(
n=4,
loop=l36,
max_innermost_factor=64,
decision=[1, 1, 1, 6],
)
l52, l53, l54, l55 = sch.split(loop=l36, factors=[v48, v49, v50, v51])
v56, v57, v58, v59 = sch.sample_perfect_tile(
n=4,
loop=l37,
max_innermost_factor=64,
decision=[1, 1, 1, 9],
)
l60, l61, l62, l63 = sch.split(loop=l37, factors=[v56, v57, v58, v59])
v64, v65, v66, v67 = sch.sample_perfect_tile(
n=4,
loop=l38,
max_innermost_factor=64,
decision=[2, 1, 16, 4],
)
l68, l69, l70, l71 = sch.split(loop=l38, factors=[v64, v65, v66, v67])
v72, v73 = sch.sample_perfect_tile(
n=2,
loop=l39,
max_innermost_factor=64,
decision=[16, 8],
)
l74, l75 = sch.split(loop=l39, factors=[v72, v73])
sch.reorder(
# fmt: off
l44, l52, l60, l68,
l45, l53, l61, l69,
l74,
l46, l54, l62, l70,
l75,
l47, l55, l63, l71,
# fmt: on
)
sch.reverse_compute_at(block=write_cache, loop=l69, preserve_unit_loops=True)
def inverse(sch: Schedule):
b3 = sch.get_block(name="inverse")
l4, l5, l6, l7, l8, l9 = sch.get_loops(block=b3)
sch.unroll(loop=l4)
sch.unroll(loop=l5)
v10, v11 = sch.sample_perfect_tile(
n=2,
loop=l6,
max_innermost_factor=64,
decision=[1, 9],
)
l12, l13 = sch.split(loop=l6, factors=[v10, v11])
v14, v15 = sch.sample_perfect_tile(
n=2,
loop=l7,
max_innermost_factor=64,
decision=[2, 64],
)
l16, l17 = sch.split(loop=l7, factors=[v14, v15])
sch.unroll(loop=l8)
sch.unroll(loop=l9)
sch.reorder(l12, l16, l13, l17, l4, l5, l8, l9)
# pylint: enable=invalid-name
sch = Schedule(mod=conv2d_winograd_cpu)
inline(sch)
data_pack(sch)
input_tile_data_pad(sch)
bgemm(sch)
inverse(sch)
return sch.mod
def test_conv2d_winograd_cpu():
mod = conv2d_winograd_cpu
mod = IRModule({"main": mod})
target = Target("llvm --num-cores=16")
context = ms.TuneContext(
mod=mod,
target=target,
task_name="Custom Search Space Task",
space_generator=ms.space_generator.PostOrderApply(),
sch_rules=ms.default_config.schedule_rules(
None,
target,
),
)
context.initialize()
post_order_apply = context.space_generator
(sch,) = post_order_apply.generate_design_space(mod)
decisions = dict(
zip(
[i for i in sch.trace.insts[:-4] if i.kind.name.startswith("Sample")],
[
# data_pack
[9, 1],
[32, 4],
# input_tile
4,
# data_pad
-2,
# inverse
[1, 9],
[2, 64],
# bgemm
[1, 2, 3, 1],
[1, 1, 1, 6],
[1, 1, 1, 9],
[2, 1, 16, 4],
[16, 8],
],
)
)
trace = Trace(sch.trace.insts[:-4], decisions=decisions)
sch = Schedule(mod=mod)
trace.apply_to_schedule(sch, remove_postproc=False)
answer = sch.mod
expected = _get_mod()
tvm.ir.assert_structural_equal(answer, expected)
if __name__ == "__main__":
test_conv2d_winograd_cpu()
| 32.358491 | 85 | 0.559329 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,126 | 0.16414 |
31b9a252afcfedd5b4624ba9c3b0dea6a7505e81 | 10,253 | py | Python | model_zoo/official/cv/FCN8s/src/nets/FCN8s.py | LottieWang/mindspore | 1331c7e432fb691d1cfa625ab7cc7451dcfc7ce0 | [
"Apache-2.0"
]
| null | null | null | model_zoo/official/cv/FCN8s/src/nets/FCN8s.py | LottieWang/mindspore | 1331c7e432fb691d1cfa625ab7cc7451dcfc7ce0 | [
"Apache-2.0"
]
| null | null | null | model_zoo/official/cv/FCN8s/src/nets/FCN8s.py | LottieWang/mindspore | 1331c7e432fb691d1cfa625ab7cc7451dcfc7ce0 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import mindspore.nn as nn
from mindspore.ops import operations as P
class FCN8s(nn.Cell):
def __init__(self, n_class):
super().__init__()
self.n_class = n_class
self.conv1 = nn.SequentialCell(
nn.Conv2d(in_channels=3, out_channels=64,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(64),
nn.ReLU(),
nn.Conv2d(in_channels=64, out_channels=64,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(64),
nn.ReLU()
)
self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv2 = nn.SequentialCell(
nn.Conv2d(in_channels=64, out_channels=128,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(in_channels=128, out_channels=128,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(128),
nn.ReLU()
)
self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv3 = nn.SequentialCell(
nn.Conv2d(in_channels=128, out_channels=256,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(in_channels=256, out_channels=256,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(256),
nn.ReLU(),
nn.Conv2d(in_channels=256, out_channels=256,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(256),
nn.ReLU()
)
self.pool3 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv4 = nn.SequentialCell(
nn.Conv2d(in_channels=256, out_channels=512,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(in_channels=512, out_channels=512,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(in_channels=512, out_channels=512,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.pool4 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv5 = nn.SequentialCell(
nn.Conv2d(in_channels=512, out_channels=512,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(in_channels=512, out_channels=512,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(512),
nn.ReLU(),
nn.Conv2d(in_channels=512, out_channels=512,
kernel_size=3, weight_init='xavier_uniform'),
nn.BatchNorm2d(512),
nn.ReLU()
)
self.pool5 = nn.MaxPool2d(kernel_size=2, stride=2)
self.conv6 = nn.SequentialCell(
nn.Conv2d(in_channels=512, out_channels=4096,
kernel_size=7, weight_init='xavier_uniform'),
nn.BatchNorm2d(4096),
nn.ReLU(),
)
self.conv7 = nn.SequentialCell(
nn.Conv2d(in_channels=4096, out_channels=4096,
kernel_size=1, weight_init='xavier_uniform'),
nn.BatchNorm2d(4096),
nn.ReLU(),
)
self.score_fr = nn.Conv2d(in_channels=4096, out_channels=self.n_class,
kernel_size=1, weight_init='xavier_uniform')
self.upscore2 = nn.Conv2dTranspose(in_channels=self.n_class, out_channels=self.n_class,
kernel_size=4, stride=2, weight_init='xavier_uniform')
self.score_pool4 = nn.Conv2d(in_channels=512, out_channels=self.n_class,
kernel_size=1, weight_init='xavier_uniform')
self.upscore_pool4 = nn.Conv2dTranspose(in_channels=self.n_class, out_channels=self.n_class,
kernel_size=4, stride=2, weight_init='xavier_uniform')
self.score_pool3 = nn.Conv2d(in_channels=256, out_channels=self.n_class,
kernel_size=1, weight_init='xavier_uniform')
self.upscore8 = nn.Conv2dTranspose(in_channels=self.n_class, out_channels=self.n_class,
kernel_size=16, stride=8, weight_init='xavier_uniform')
self.shape = P.Shape()
self.cast = P.Cast()
def set_model_parallel_shard_strategy(self, device_num):
self.conv2d_strategy = ((1, 1, 1, device_num), (1, 1, 1, 1))
self.bn_strategy = ((1, 1, 1, device_num), (1,), (1,), (1,), (1,))
self.relu_strategy = ((1, 1, 1, device_num),)
self.maxpool_strategy = ((1, 1, 1, device_num),)
self.add_strategy = ((1, 1, 1, device_num), (1, 1, 1, device_num))
self.conv1.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv1.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv1.cell_list[2].relu.shard(self.relu_strategy)
self.conv1.cell_list[3].conv2d.shard(self.conv2d_strategy)
self.conv1.cell_list[4].bn_train.shard(self.bn_strategy)
self.conv1.cell_list[5].relu.shard(self.relu_strategy)
self.pool1.max_pool.shard(self.maxpool_strategy)
self.conv2.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv2.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv2.cell_list[2].relu.shard(self.relu_strategy)
self.conv2.cell_list[3].conv2d.shard(self.conv2d_strategy)
self.conv2.cell_list[4].bn_train.shard(self.bn_strategy)
self.conv2.cell_list[5].relu.shard(self.relu_strategy)
self.pool2.max_pool.shard(self.maxpool_strategy)
self.conv3.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv3.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv3.cell_list[2].relu.shard(self.relu_strategy)
self.conv3.cell_list[3].conv2d.shard(self.conv2d_strategy)
self.conv3.cell_list[4].bn_train.shard(self.bn_strategy)
self.conv3.cell_list[5].relu.shard(self.relu_strategy)
self.conv3.cell_list[6].conv2d.shard(self.conv2d_strategy)
self.conv3.cell_list[7].bn_train.shard(self.bn_strategy)
self.conv3.cell_list[8].relu.shard(self.relu_strategy)
self.pool3.max_pool.shard(self.maxpool_strategy)
self.conv4.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv4.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv4.cell_list[2].relu.shard(self.relu_strategy)
self.conv4.cell_list[3].conv2d.shard(self.conv2d_strategy)
self.conv4.cell_list[4].bn_train.shard(self.bn_strategy)
self.conv4.cell_list[5].relu.shard(self.relu_strategy)
self.conv4.cell_list[6].conv2d.shard(self.conv2d_strategy)
self.conv4.cell_list[7].bn_train.shard(self.bn_strategy)
self.conv4.cell_list[8].relu.shard(self.relu_strategy)
self.pool4.max_pool.shard(self.maxpool_strategy)
self.conv5.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv5.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv5.cell_list[2].relu.shard(self.relu_strategy)
self.conv5.cell_list[3].conv2d.shard(self.conv2d_strategy)
self.conv5.cell_list[4].bn_train.shard(self.bn_strategy)
self.conv5.cell_list[5].relu.shard(self.relu_strategy)
self.conv5.cell_list[6].conv2d.shard(self.conv2d_strategy)
self.conv5.cell_list[7].bn_train.shard(self.bn_strategy)
self.conv5.cell_list[8].relu.shard(self.relu_strategy)
self.pool5.max_pool.shard(((1, 1, 1, device_num),))
self.conv6.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv6.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv6.cell_list[2].relu.shard(self.relu_strategy)
self.conv7.cell_list[0].conv2d.shard(self.conv2d_strategy)
self.conv7.cell_list[1].bn_train.shard(self.bn_strategy)
self.conv7.cell_list[2].relu.shard(self.relu_strategy)
self.score_fr.conv2d.shard(self.conv2d_strategy)
self.upscore2.conv2d_transpose.shard(self.conv2d_strategy)
self.score_pool4.conv2d.shard(self.conv2d_strategy)
self.upscore_pool4.conv2d_transpose.shard(self.conv2d_strategy)
self.score_pool3.conv2d.shard(self.conv2d_strategy)
self.upscore8.conv2d_transpose.shard(self.conv2d_strategy)
self.add1.shard(self.add_strategy)
self.add2.shard(self.add_strategy)
def construct(self, x):
x1 = self.conv1(x)
p1 = self.pool1(x1)
x2 = self.conv2(p1)
p2 = self.pool2(x2)
x3 = self.conv3(p2)
p3 = self.pool3(x3)
x4 = self.conv4(p3)
p4 = self.pool4(x4)
x5 = self.conv5(p4)
p5 = self.pool5(x5)
x6 = self.conv6(p5)
x7 = self.conv7(x6)
sf = self.score_fr(x7)
u2 = self.upscore2(sf)
s4 = self.score_pool4(p4)
f4 = s4 + u2
u4 = self.upscore_pool4(f4)
s3 = self.score_pool3(p3)
f3 = s3 + u4
out = self.upscore8(f3)
return out
| 48.592417 | 103 | 0.609968 | 9,494 | 0.925973 | 0 | 0 | 0 | 0 | 0 | 0 | 1,003 | 0.097825 |
31b9c954ad4a3b83232dea2da9aa347a1d45c611 | 1,208 | py | Python | scripts/seqrun_processing/sync_seqrun_data_from_remote.py | imperial-genomics-facility/data-management-python | 7b867d8d4562a49173d0b823bdc4bf374a3688f0 | [
"Apache-2.0"
]
| 7 | 2018-05-08T07:28:08.000Z | 2022-02-21T14:56:49.000Z | scripts/seqrun_processing/sync_seqrun_data_from_remote.py | imperial-genomics-facility/data-management-python | 7b867d8d4562a49173d0b823bdc4bf374a3688f0 | [
"Apache-2.0"
]
| 15 | 2021-08-19T12:32:20.000Z | 2022-02-09T19:52:51.000Z | scripts/seqrun_processing/sync_seqrun_data_from_remote.py | imperial-genomics-facility/data-management-python | 7b867d8d4562a49173d0b823bdc4bf374a3688f0 | [
"Apache-2.0"
]
| 2 | 2017-05-12T15:20:10.000Z | 2020-05-07T16:25:11.000Z | #!/usr/bin/env python
import argparse
from igf_data.task_tracking.igf_slack import IGF_slack
from igf_data.process.data_transfer.sync_seqrun_data_on_remote import Sync_seqrun_data_from_remote
parser = argparse.ArgumentParser()
parser.add_argument('-r','--remote_server', required=True, help='Remote server address')
parser.add_argument('-p','--remote_base_path', required=True, help='Seqrun directory path in remote dir')
parser.add_argument('-d','--dbconfig', required=True, help='Database configuration file path')
parser.add_argument('-o','--output_dir', required=True, help='Local output directory path')
parser.add_argument('-n','--slack_config', required=True, help='Slack configuration file path')
args = parser.parse_args()
remote_server = args.remote_server
remote_base_path = args.remote_base_path
dbconfig = args.dbconfig
output_dir = args.output_dir
slack_config = args.slack_config
if __name__=='__main__':
try:
slack_obj=IGF_slack(slack_config=slack_config)
## FIX ME
except Exception as e:
message = 'Error while syncing sequencing run directory from remote server: {0}'.format(e)
slack_obj.post_message_to_channel(message,reaction='fail')
raise ValueError(message)
| 43.142857 | 105 | 0.78394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 369 | 0.305464 |
31ba54fbc1b1ed1f7e053b99d91ae0c4606e4d0f | 314 | py | Python | pydashlite/arrays/sum_by.py | glowlex/pydashlite | cbc96478fa610aeae95b5584b406aa0c35b89db1 | [
"MIT"
]
| null | null | null | pydashlite/arrays/sum_by.py | glowlex/pydashlite | cbc96478fa610aeae95b5584b406aa0c35b89db1 | [
"MIT"
]
| null | null | null | pydashlite/arrays/sum_by.py | glowlex/pydashlite | cbc96478fa610aeae95b5584b406aa0c35b89db1 | [
"MIT"
]
| null | null | null | from typing import Callable, Iterable, TypeVar
T = TypeVar('T')
Num = TypeVar('Num', int, float)
def sumBy(array: Iterable[T], iteratee: Callable[[T], Num] = None, start: Num = 0) -> Num:
if iteratee is None:
return sum([y for y in array], start)
return sum([iteratee(y) for y in array], start)
| 28.545455 | 90 | 0.646497 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0.025478 |
31ba5edab7671efdaef9d530b3fadbb3b92a5249 | 344 | py | Python | Ten_Most_Common_Words.py | mcjohnchristopher/Python_Samples | 738f3b7d9baa7f4e396647f380118eba66ea645c | [
"CC0-1.0"
]
| null | null | null | Ten_Most_Common_Words.py | mcjohnchristopher/Python_Samples | 738f3b7d9baa7f4e396647f380118eba66ea645c | [
"CC0-1.0"
]
| null | null | null | Ten_Most_Common_Words.py | mcjohnchristopher/Python_Samples | 738f3b7d9baa7f4e396647f380118eba66ea645c | [
"CC0-1.0"
]
| null | null | null | fhand = (romeo.txt)
counts = dict()
for line in fhand:
words = line.split()
for word in words():
count word = count.get(word, 0) + 1
st = list
for Key,Value in count.items():
st.append((val,key))
st.sort(reverse = true)
for val,key in st[:10]:
print key, val
#Using Sorted Function
sorted [(v,k) for k,v in c.items()]: | 20.235294 | 38 | 0.622093 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 23 | 0.06686 |
31bc16b72d793c3793926c951b8b26eb5b85e70b | 43 | py | Python | python/testData/intentions/convertLambdaToFunction.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
]
| 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/intentions/convertLambdaToFunction.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
]
| 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/intentions/convertLambdaToFunction.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
]
| 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | newlist = lambda x<caret>, y: (x+y)/y
x = 1 | 21.5 | 37 | 0.581395 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
31bc57d9152ec85878460b40dfe42e1115dfd96e | 615 | py | Python | src/grpc_client.py | thealphadollar/py-grpcio-pg | aed6de047e4843f3bdf86184a0a2c5a1ecd6beb1 | [
"MIT"
]
| null | null | null | src/grpc_client.py | thealphadollar/py-grpcio-pg | aed6de047e4843f3bdf86184a0a2c5a1ecd6beb1 | [
"MIT"
]
| null | null | null | src/grpc_client.py | thealphadollar/py-grpcio-pg | aed6de047e4843f3bdf86184a0a2c5a1ecd6beb1 | [
"MIT"
]
| null | null | null | import grpc
from consts import PORT, SERVER_CERT
from grpc_generated_files import api_pb2, api_pb2_grpc
def main(stub):
request = api_pb2.ApiRequest(
name="Shivam",
message="Hey there!"
)
response = stub.ApiEndpoint(request)
print(response)
if __name__ == "__main__":
with open(SERVER_CERT, 'rb') as f:
server_cert = f.read()
creds = grpc.ssl_channel_credentials(server_cert)
# the server IP should be in the common name of the certificate
channel = grpc.secure_channel(f'localhost:{PORT}', creds)
stub = api_pb2_grpc.ApiStub(channel)
main(stub)
| 25.625 | 67 | 0.692683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 116 | 0.188618 |
31bd00426914dc97a2be62873f494b2813748a77 | 1,432 | py | Python | 0188.Best Time to Buy and Sell Stock IV/solution.py | zhlinh/leetcode | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | [
"Apache-2.0"
]
| null | null | null | 0188.Best Time to Buy and Sell Stock IV/solution.py | zhlinh/leetcode | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | [
"Apache-2.0"
]
| null | null | null | 0188.Best Time to Buy and Sell Stock IV/solution.py | zhlinh/leetcode | 6dfa0a4df9ec07b2c746a13c8257780880ea04af | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: [email protected]
Version: 0.0.1
Created Time: 2016-03-23
Last_modify: 2016-03-23
******************************************
'''
'''
Say you have an array for which the ith element is
the price of a given stock on day i.
Design an algorithm to find the maximum profit.
You may complete at most k transactions.
Note:
You may not engage in multiple transactions at the same time
(ie, you must sell the stock before you buy again).
Credits:
Special thanks to @Freezen for adding this problem and creating all test cases.
'''
class Solution(object):
def maxProfit(self, k, prices):
"""
:type k: int
:type prices: List[int]
:rtype: int
"""
n = len(prices)
if n == 0:
return 0
if k > n // 2:
return self.quickSolve(prices)
hold = [-2 ** 31] * (k + 1)
release = [0] * (k + 1)
for p in prices:
for i in range(k):
hold[i+1] = max(hold[i+1], release[i] - p)
release[i+1] = max(release[i+1], hold[i+1] + p)
return release[k]
def quickSolve(self, prices):
res = 0
for i in range(1, len(prices)):
if prices[i] - prices[i-1] > 0:
res += prices[i] - prices[i-1]
return res
| 26.518519 | 79 | 0.515363 | 760 | 0.530726 | 0 | 0 | 0 | 0 | 0 | 0 | 753 | 0.525838 |
31bdffc8c81e843699509af2486f317c1a1c36b7 | 35,087 | py | Python | gips/gistmodel/post_processing.py | accsc/gips | 6b20b2b0fa76ee24b04237b1edd5c8a26738d460 | [
"MIT"
]
| 1 | 2021-04-24T10:29:39.000Z | 2021-04-24T10:29:39.000Z | gips/gistmodel/post_processing.py | accsc/gips | 6b20b2b0fa76ee24b04237b1edd5c8a26738d460 | [
"MIT"
]
| null | null | null | gips/gistmodel/post_processing.py | accsc/gips | 6b20b2b0fa76ee24b04237b1edd5c8a26738d460 | [
"MIT"
]
| 2 | 2021-02-16T14:18:59.000Z | 2021-06-04T05:09:22.000Z | import numpy as np
import copy
from gips import FLOAT
from gips import DOUBLE
class post_processing(object):
def __init__(self, fitter, x, pairs=False, prefix=None):
self.fitter = fitter
self.x = x
self.pairs = pairs
self.case = 0
score_dict = { 4 : self.parms4,
5 : self.parms5,
6 : self.parms6
}
mode_dict = { 0 : self.mode0,
1 : self.mode1,
3 : self.mode3,
4 : self.mode4,
5 : self.mode5,
6 : self.mode6,
7 : self.mode7
}
self.score = score_dict[self.fitter.parms]
self.process = mode_dict[self.fitter.mode]
self.prefix = prefix
if type(self.prefix)==type(None) \
or self.prefix=="":
self.prefix = ""
else:
self.prefix = "%s" %self.prefix
self.set_x(self.x)
self.set_case(0)
self.process_rec = False
self.process_cplx = False
self.process_lig = False
def set_x(self, x):
self.x = copy.copy(x)
### Apply the solution to the scoring function
self.fitter.gist_functional(self.x)
self.fitter._f_process(self.x)
def set_case(self, case):
self.case = case
self.name = self.fitter.name[case]
### |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~|
### |OVERVIEW OF THE DATA STRUCTURE IN THE FITTER OBJECT|
### |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~|
###
### Experimental data stored with gdat_fit_lib
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### self.dg = np.zeros(self.N_case, dtype=DOUBLE)
### self.dh = np.zeros(self.N_case, dtype=DOUBLE)
### self.ds = np.zeros(self.N_case, dtype=DOUBLE)
###
###
### GIST data generated with gdat_fit_lib (receptor)
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### self.E = np.zeros((self.N_rec, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.S = np.zeros((self.N_rec, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.g = np.zeros((self.N_rec, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.w = np.zeros(self.N_pos, dtype=DOUBLE)
### self.vol = np.zeros((self.N_pos, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### Which pose belongs to which receptor/gistdata
### self.ind_rec = np.zeros(self.N_pos, dtype=np.int32)
### Which pose belongs to which case
### self.ind_case = np.zeros(self.N_pos, dtype=np.int32)
###
###
### GIST data generated with gdat_fit_lib (complex)
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### self.E_cplx = np.zeros((self.N_cplx, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.S_cplx = np.zeros((self.N_cplx, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.g_cplx = np.zeros((self.N_cplx, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.w_cplx = np.zeros(self.N_cplx, dtype=DOUBLE)
### self.vol_cplx = np.zeros((self.N_cplx, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.ind_rec_cplx = np.arange(self.N_cplx, dtype=np.int32)
### self.ind_case_cplx = np.zeros(self.N_cplx, dtype=np.int32)
###
###
### GIST data generated with gdat_fit_lib (ligand)
### ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### self.E_lig = np.zeros((self.N_lig, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.S_lig = np.zeros((self.N_lig, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.g_lig = np.zeros((self.N_lig, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.w_lig = np.zeros(self.N_lig, dtype=DOUBLE)
### self.vol_lig = np.zeros((self.N_lig, self.maxdim[0], self.maxdim[1], self.maxdim[2]), dtype=DOUBLE)
### self.ind_rec_lig = np.arange(self.N_lig, dtype=np.int32)
### self.ind_case_lig = np.zeros(self.N_lig, dtype=np.int32)
###
def mode0(self, callback=None):
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
def mode1(self, callback=None):
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : self.x[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
def mode2(self, callback=None):
pass
def mode3(self, callback=None):
if not self.pairs:
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
### The complex
### ~~~~~~~~~~~
if self.process_cplx:
valid_poses_cplx = np.where(self.fitter.ind_case_cplx==self.case)[0]
valid_recep_cplx = self.fitter.ind_rec_cplx[valid_poses_cplx]
i=0
for pose, recep in zip(valid_poses_cplx, valid_recep_cplx):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_cplx[recep],
self.fitter.S_cplx[recep],
self.fitter.g_cplx[recep],
self.fitter.vol_cplx[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_cplx[recep],
self.fitter.pdat_cplx[pose],
prefix="%s.%d.%s" %(self.name, i, "cplx"),
**kwargs)
i += 1
### The ligand
### ~~~~~~~~~~
if self.process_lig:
valid_poses_lig = np.where(self.fitter.ind_case_lig==self.case)[0]
valid_recep_lig = self.fitter.ind_rec_lig[valid_poses_lig]
i=0
for pose, recep in zip(valid_poses_lig, valid_recep_lig):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_lig[recep],
self.fitter.S_lig[recep],
self.fitter.g_lig[recep],
self.fitter.vol_lig[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_lig[recep],
self.fitter.pdat_lig[pose],
prefix="%s.%d.%s" %(self.name, i, "lig"),
**kwargs)
i += 1
def mode4(self, callback=None):
if not self.pairs:
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : self.x[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
### The complex
### ~~~~~~~~~~~
if self.process_cplx:
valid_poses_cplx = np.where(self.fitter.ind_case_cplx==self.case)[0]
valid_recep_cplx = self.fitter.ind_rec_cplx[valid_poses_cplx]
i=0
for pose, recep in zip(valid_poses_cplx, valid_recep_cplx):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_cplx[recep],
self.fitter.S_cplx[recep],
self.fitter.g_cplx[recep],
self.fitter.vol_cplx[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : self.x[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_cplx[recep],
self.fitter.pdat_cplx[pose],
prefix="%s.%d.%s" %(self.name, i, "cplx"),
**kwargs)
i += 1
### The ligand
### ~~~~~~~~~~
if self.process_lig:
valid_poses_lig = np.where(self.fitter.ind_case_lig==self.case)[0]
valid_recep_lig = self.fitter.ind_rec_lig[valid_poses_lig]
i=0
for pose, recep in zip(valid_poses_lig, valid_recep_lig):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_lig[recep],
self.fitter.S_lig[recep],
self.fitter.g_lig[recep],
self.fitter.vol_lig[pose],
self.x)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : self.x[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_lig[recep],
self.fitter.pdat_lig[pose],
prefix="%s.%d.%s" %(self.name, i, "lig"),
**kwargs)
i += 1
def mode5(self, callback=None):
if not self.pairs:
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
_xr = np.zeros(self.fitter.parms, dtype=DOUBLE)
_xr[:-2] = self.x[:-4]
_xr[-2] = self.x[-4]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
_xr)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
### The complex
### ~~~~~~~~~~~
if self.process_cplx:
valid_poses_cplx = np.where(self.fitter.ind_case_cplx==self.case)[0]
valid_recep_cplx = self.fitter.ind_rec_cplx[valid_poses_cplx]
_xc = np.zeros(self.fitter.parms, dtype=DOUBLE)
_xc[:-2] = self.x[:-4]
_xc[-2] = self.x[-3]
i=0
for pose, recep in zip(valid_poses_cplx, valid_recep_cplx):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_cplx[recep],
self.fitter.S_cplx[recep],
self.fitter.g_cplx[recep],
self.fitter.vol_cplx[pose],
_xc)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_cplx[recep],
self.fitter.pdat_cplx[pose],
prefix="%s.%d.%s" %(self.name, i, "cplx"),
**kwargs)
i += 1
### The ligand
### ~~~~~~~~~~
if self.process_lig:
_xl = np.zeros(self.fitter.parms, dtype=DOUBLE)
_xl[:-2] = self.x[:-4]
_xl[-2] = self.x[-2]
valid_poses_lig = np.where(self.fitter.ind_case_lig==self.case)[0]
valid_recep_lig = self.fitter.ind_rec_lig[valid_poses_lig]
i=0
for pose, recep in zip(valid_poses_lig, valid_recep_lig):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_lig[recep],
self.fitter.S_lig[recep],
self.fitter.g_lig[recep],
self.fitter.vol_lig[pose],
_xl)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : self.x[-1]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_lig[recep],
self.fitter.pdat_lig[pose],
prefix="%s.%d.%s" %(self.name, i, "lig"),
**kwargs)
i += 1
def mode6(self, callback=None):
if not self.pairs:
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
_xr = np.zeros(self.fitter.parms+1, dtype=DOUBLE)
_xr[:-3] = self.x[:-5]
_xr[-3] = self.x[-5]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
_xr)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : _xr[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
### The complex
### ~~~~~~~~~~~
if self.process_cplx:
valid_poses_cplx = np.where(self.fitter.ind_case_cplx==self.case)[0]
valid_recep_cplx = self.fitter.ind_rec_cplx[valid_poses_cplx]
_xc = np.zeros(self.fitter.parms+1, dtype=DOUBLE)
_xc[:-3] = self.x[:-5]
_xc[-3] = self.x[-4]
i=0
for pose, recep in zip(valid_poses_cplx, valid_recep_cplx):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_cplx[recep],
self.fitter.S_cplx[recep],
self.fitter.g_cplx[recep],
self.fitter.vol_cplx[pose],
_xc)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : _xc[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_cplx[recep],
self.fitter.pdat_cplx[pose],
prefix="%s.%d.%s" %(self.name, i, "cplx"),
**kwargs)
i += 1
### The ligand
### ~~~~~~~~~~
if self.process_lig:
valid_poses_lig = np.where(self.fitter.ind_case_lig==self.case)[0]
valid_recep_lig = self.fitter.ind_rec_lig[valid_poses_lig]
_xl = np.zeros(self.fitter.parms+1, dtype=DOUBLE)
_xl[:-3] = self.x[:-5]
_xl[-3] = self.x[-3]
i=0
for pose, recep in zip(valid_poses_lig, valid_recep_lig):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_lig[recep],
self.fitter.S_lig[recep],
self.fitter.g_lig[recep],
self.fitter.vol_lig[pose],
_xl)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : _xl[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_lig[recep],
self.fitter.pdat_lig[pose],
prefix="%s.%d.%s" %(self.name, i, "lig"),
**kwargs)
i += 1
def mode7(self, callback=None):
if self.process_rec and not self.pairs:
_xr = np.zeros(self.fitter.parms+1, dtype=DOUBLE)
if self.process_cplx:
_xc = np.zeros(self.fitter.parms+1, dtype=DOUBLE)
if self.process_lig:
_xl = np.zeros(self.fitter.parms+1, dtype=DOUBLE)
###
### For parms=4:
###
### with pairs:
### -----------
### x[0] = e_co (Cplx)
### x[1] = e_co (Lig)
### x[2] = s_co (Cplx)
### x[3] = s_co (Lig)
### x[4] = g_co (Cplx)
### x[5] = g_co (Lig)
### x[6] = C_E
### x[7] = C_S
###
### without pairs:
### --------------
### x[0] = e_co (Rec)
### x[1] = e_co (Cplx)
### x[2] = e_co (Lig)
### x[3] = s_co (Rec)
### x[4] = s_co (Cplx)
### x[5] = s_co (Lig)
### x[6] = g_co (Rec)
### x[7] = g_co (Cplx)
### x[8] = g_co (Lig)
### x[9] = C_E
### x[10] = C_S
if self.fitter.parms==4:
if self.pairs:
if self.process_cplx:
_xc[:-2] = self.x[[0,2,4]]
if self.process_lig:
_xl[:-2] = self.x[[1,3,5]]
else:
if self.process_rec:
_xr[:-2] = self.x[[0,3,6]]
if self.process_cplx:
_xc[:-2] = self.x[[1,4,7]]
if self.process_lig:
_xl[:-2] = self.x[[2,5,8]]
###
### For parms=5:
###
### with pairs:
### -----------
### x[0] = A
### x[1] = e_co (Cplx)
### x[2] = e_co (Lig)
### x[3] = s_co (Cplx)
### x[4] = s_co (Lig)
### x[5] = g_co (Cplx)
### x[6] = g_co (Lig)
### x[7] = C_E
### x[8] = C_S
###
### without pairs:
### --------------
### x[0] = A
### x[1] = e_co (Rec)
### x[2] = e_co (Cplx)
### x[3] = e_co (Lig)
### x[4] = s_co (Rec)
### x[5] = s_co (Cplx)
### x[6] = s_co (Lig)
### x[7] = g_co (Rec)
### x[8] = g_co (Cplx)
### x[9] = g_co (Lig)
### x[10] = C_E
### x[11] = C_S
elif self.fitter.parms==5:
if self.pairs:
if self.process_cplx:
_xc[:-2] = self.x[[0,1,3,5]]
if self.process_lig:
_xl[:-2] = self.x[[0,2,4,6]]
else:
if self.process_rec:
_xr[:-2] = self.x[[0,1,4,7]]
if self.process_cplx:
_xc[:-2] = self.x[[0,2,5,8]]
if self.process_lig:
_xl[:-2] = self.x[[0,3,6,9]]
###
### For parms=6:
###
### with pairs:
### -----------
### x[0] = E_aff
### x[1] = e_co (Cplx)
### x[2] = e_co (Lig)
### x[3] = S_aff
### x[4] = s_co (Cplx)
### x[5] = s_co (Lig)
### x[6] = g_co (Cplx)
### x[7] = g_co (Lig)
### x[8] = C_E
### x[9] = C_S
###
### without pairs:
### --------------
### x[0] = E_aff
### x[1] = e_co (Rec)
### x[2] = e_co (Cplx)
### x[3] = e_co (Lig)
### x[4] = S_aff
### x[5] = s_co (Rec)
### x[6] = s_co (Cplx)
### x[7] = s_co (Lig)
### x[8] = g_co (Rec)
### x[9] = g_co (Cplx)
### x[10] = g_co (Lig)
### x[11] = C_E
### x[12] = C_S
elif self.fitter.parms==6:
if self.pairs:
if self.process_cplx:
_xc[:-2] = self.x[[0,1,3,4,6]]
if self.process_lig:
_xl[:-2] = self.x[[0,2,3,5,7]]
else:
if self.process_rec:
_xr[:-2] = self.x[[0,1,4,5,8]]
if self.process_cplx:
_xc[:-2] = self.x[[0,2,4,6,9]]
if self.process_lig:
_xl[:-2] = self.x[[0,3,4,7,10]]
if not self.pairs:
### The receptor
### ~~~~~~~~~~~~
if self.process_rec:
valid_poses = np.where(self.fitter.ind_case==self.case)[0]
valid_recep = self.fitter.ind_rec[valid_poses]
i=0
for pose, recep in zip(valid_poses, valid_recep):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E[recep],
self.fitter.S[recep],
self.fitter.g[recep],
self.fitter.vol[pose],
_xr)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[0],
"C" : _xr[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat[recep],
self.fitter.pdat[pose],
prefix="%s.%d.%s" %(self.name, i, "rec"),
**kwargs)
i += 1
### The complex
### ~~~~~~~~~~~
if self.process_cplx:
valid_poses_cplx = np.where(self.fitter.ind_case_cplx==self.case)[0]
valid_recep_cplx = self.fitter.ind_rec_cplx[valid_poses_cplx]
i=0
for pose, recep in zip(valid_poses_cplx, valid_recep_cplx):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_cplx[recep],
self.fitter.S_cplx[recep],
self.fitter.g_cplx[recep],
self.fitter.vol_cplx[pose],
_xc)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : _xc[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_cplx[recep],
self.fitter.pdat_cplx[pose],
prefix="%s.%d.%s" %(self.name, i, "cplx"),
**kwargs)
i += 1
### The ligand
### ~~~~~~~~~~
if self.process_lig:
valid_poses_lig = np.where(self.fitter.ind_case_lig==self.case)[0]
valid_recep_lig = self.fitter.ind_rec_lig[valid_poses_lig]
i=0
for pose, recep in zip(valid_poses_lig, valid_recep_lig):
E_grid_val, S_grid_val, gv_grid_val = self.score(self.fitter.E_lig[recep],
self.fitter.S_lig[recep],
self.fitter.g_lig[recep],
self.fitter.vol_lig[pose],
_xl)
if callback != None:
kwargs = { "pose" : pose,
"radius" : self.fitter.radiusadd[1],
"C" : _xl[-2:]
}
callback(E_grid_val,
S_grid_val,
gv_grid_val,
self.fitter.gdat_lig[recep],
self.fitter.pdat_lig[pose],
prefix="%s.%d.%s" %(self.name, i, "lig"),
**kwargs)
i += 1
def parms4(self, E_grid, S_grid, g_grid, vol_grid, x):
E = np.zeros_like(E_grid)
S = np.zeros_like(S_grid)
g = np.zeros_like(g_grid)
valids_E = np.where(E_grid>x[0])
valids_S = np.where(S_grid>x[1])
valids_g = np.where(g_grid>x[2])
E[valids_E] = np.copy(E_grid[valids_E])
S[valids_S] = np.copy(S_grid[valids_S])
g[valids_g] = np.copy(g_grid[valids_g])
E_grid_val = np.zeros_like(E)
S_grid_val = np.zeros_like(S)
gv_grid_val = np.zeros_like(g)
### This is probably wrong:
#E_grid_val[valids_g] = E[valids_g] * vol_grid[valids_g] / g[valids_g] * 0.0332
#S_grid_val[valids_g] = S[valids_g] * vol_grid[valids_g] / g[valids_g] * 0.0332 * -1.
### This is how it should be:
### Note: 0.125 is the volume of one voxel
E_grid_val[valids_g] = E[valids_g] * vol_grid[valids_g] * g[valids_g] * 0.0332 * 0.125
S_grid_val[valids_g] = S[valids_g] * vol_grid[valids_g] * g[valids_g] * 0.0332 * 0.125
gv_grid_val[valids_g] = vol_grid[valids_g]*g[valids_g]
return E_grid_val, S_grid_val, gv_grid_val
def parms5(self, E_grid, S_grid, g_grid, vol_grid, x):
E = np.zeros_like(E_grid)
S = np.zeros_like(S_grid)
g = np.zeros_like(g_grid)
E[np.where(E_grid>x[1])] = 1.
S[np.where(S_grid>x[2])] = 1.
g[np.where(g_grid>x[3])] = 1.
E_grid_val = E*g*vol_grid*x[0]
S_grid_val = S*g*vol_grid*x[0]
gv_grid_val = vol_grid*g
return E_grid_val, S_grid_val, gv_grid_val
def parms6(self, E_grid, S_grid, g_grid, vol_grid, x):
E = np.zeros_like(E_grid)
S = np.zeros_like(S_grid)
g = np.zeros_like(g_grid)
E[np.where(E_grid>x[1])] = 1.
S[np.where(S_grid>x[3])] = 1.
g[np.where(g_grid>x[4])] = 1.
E_grid_val = E*g*vol_grid*x[0]
S_grid_val = S*g*vol_grid*x[2]
gv_grid_val = vol_grid*g
return E_grid_val, S_grid_val, gv_grid_val | 38.899113 | 109 | 0.378545 | 35,007 | 0.99772 | 0 | 0 | 0 | 0 | 0 | 0 | 5,507 | 0.156953 |
31be5bcba5067c3d0f88dba211c9dc9337d0bf13 | 2,560 | py | Python | src/Cogs/InfoCog.py | kodyVS/Discord-Bot-Development | 389bf69871adbe289f162ddbeeaf681023ca1f02 | [
"MIT"
]
| 5 | 2020-05-27T20:03:45.000Z | 2020-06-24T11:27:26.000Z | src/Cogs/InfoCog.py | kodyVS/Discord-Bot-Development | 389bf69871adbe289f162ddbeeaf681023ca1f02 | [
"MIT"
]
| 11 | 2020-05-28T10:56:26.000Z | 2020-07-02T13:38:02.000Z | src/Cogs/InfoCog.py | kodyVS/Discord-Bot-Development | 389bf69871adbe289f162ddbeeaf681023ca1f02 | [
"MIT"
]
| 3 | 2020-05-28T20:31:02.000Z | 2020-06-17T23:51:51.000Z | from discord.ext import commands
import discord
import requests
from bs4 import BeautifulSoup
# work in progress! more languages welcome!
class InfoCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name = 'docs', brief = 'programming language documentation', description = 'documentation for languages, access by calling `.docs <language> <query>`', aliases = ['documentation', 'info'])
async def docs(self, ctx, language: str, query):
# access docs based on language
if language == 'python' or language == 'python3':
full_link = 'https://docs.python.org/3/genindex-all.html'
page = requests.get(full_link).content
soup = BeautifulSoup(page, 'html.parser')
link_descriptions = []
for link in soup.findAll('a'):
if query in link.contents[0]:
link_descriptions.append(f"[{link.contents[0]}](https://docs.python.org/3/{link['href']})")
link_descriptions = list(dict.fromkeys(link_descriptions))
link_descriptions = link_descriptions[:10]
### TODO: multi-lingual docs support (devdocs.io?)
### TODO: faster searching (current 4-5 secs)
### TODO: filter results -> currently only pick top ten, and there are some odd results as well
embed = discord.Embed(title="Python 3 Docs", color = 0x00ff00)
embed.add_field(name=f'{len(link_descriptions)} results found for `{query}` :', value='\n'.join(
link_descriptions), inline=False)
embed.set_thumbnail(url=
'https://upload.wikimedia.org/wikipedia/commons/thumb/c/c3/Python-logo-notext.svg/240px-Python-logo-notext.svg.png')
await ctx.send(embed=embed)
@commands.command(name='github', brief = 'view top 10 daily github repos', description = 'see the names and descriptions of the top x github repos today with `.github x` (default 10)', aliases=['gh'])
async def github(self, ctx, amount: int = 10):
'''Gets the GitHub first < amount > repositories without embeds'''
page = requests.get(
'https://github-trending-api.now.sh/repositories?q=sort=stars&order=desc&since=daily')
response = [
f"{entry['description']}: {'<' + entry['url'] + '>'}\n" for entry in page.json()[:amount]]
embed = discord.Embed(
title=f"**GitHub's top {str(amount)} today**", description='\n'.join(response), color=0x00ff00)
await ctx.send(embed=embed)
| 49.230769 | 210 | 0.632031 | 2,419 | 0.944922 | 0 | 0 | 2,327 | 0.908984 | 1,911 | 0.746484 | 1,123 | 0.438672 |
31beb5620648da5af3d9f0847e1c1d7a84954a2a | 451 | py | Python | Python/List/37.drop.py | angelmpalomares/ModelAndLanguagesForBioInformatics | 0b981bfcdc2a58ad72da3513e783ef75e53c205c | [
"MIT"
]
| null | null | null | Python/List/37.drop.py | angelmpalomares/ModelAndLanguagesForBioInformatics | 0b981bfcdc2a58ad72da3513e783ef75e53c205c | [
"MIT"
]
| 1 | 2021-06-08T07:44:38.000Z | 2021-06-08T07:53:10.000Z | Python/List/37.drop.py | angelmpalomares/ModelAndLanguagesForBioInformatics | 0b981bfcdc2a58ad72da3513e783ef75e53c205c | [
"MIT"
]
| 2 | 2021-04-11T10:13:57.000Z | 2021-06-07T23:20:31.000Z | def drop(i_list: list,n:int) -> list:
"""
Drop at multiple of n from the list
:param n: Drop from the list i_list every N element
:param i_list: The source list
:return: The returned list
"""
assert(n>0)
_shallow_list = []
k=1
for element in i_list:
if k % n != 0:
_shallow_list.append(element)
k+=1
return _shallow_list
if __name__ == "__main__":
print(drop([1,2,3,4,5],6)) | 25.055556 | 55 | 0.585366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.405765 |
31bebc7c278939b7860a6be8a6a8aa404c030728 | 7,929 | py | Python | cil/balanced_experience_replay.py | itaicaspi-intel/advanced-coach | 2f1d15a8c75d9e3c1742fc553daad5e52f4d5188 | [
"Apache-2.0"
]
| 1 | 2019-02-18T05:57:42.000Z | 2019-02-18T05:57:42.000Z | cil/balanced_experience_replay.py | itaicaspi-intel/advanced-coach | 2f1d15a8c75d9e3c1742fc553daad5e52f4d5188 | [
"Apache-2.0"
]
| null | null | null | cil/balanced_experience_replay.py | itaicaspi-intel/advanced-coach | 2f1d15a8c75d9e3c1742fc553daad5e52f4d5188 | [
"Apache-2.0"
]
| null | null | null | #
# Copyright (c) 2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import operator
import random
from enum import Enum
from typing import List, Tuple, Any, Union
import numpy as np
from rl_coach.core_types import Transition
from rl_coach.memories.memory import MemoryGranularity
from rl_coach.memories.non_episodic.experience_replay import ExperienceReplayParameters, ExperienceReplay
from rl_coach.schedules import Schedule, ConstantSchedule
class BalancedExperienceReplayParameters(ExperienceReplayParameters):
def __init__(self):
super().__init__()
self.max_size = (MemoryGranularity.Transitions, 1000000)
self.allow_duplicates_in_batch_sampling = False
self.num_classes = 0
self.state_key_with_the_class_index = 'class'
@property
def path(self):
return 'cil.balanced_experience_replay:BalancedExperienceReplay'
"""
A replay buffer which allows sampling batches which are balanced in terms of the classes that are sampled
"""
class BalancedExperienceReplay(ExperienceReplay):
def __init__(self, max_size: Tuple[MemoryGranularity, int], allow_duplicates_in_batch_sampling: bool=True,
num_classes: int=0, state_key_with_the_class_index: Any='class'):
"""
:param max_size: the maximum number of transitions or episodes to hold in the memory
:param allow_duplicates_in_batch_sampling: allow having the same transition multiple times in a batch
:param num_classes: the number of classes in the replayed data
:param state_key_with_the_class_index: the class index is assumed to be a value in the state dictionary.
this parameter determines the key to retrieve the class index value
"""
super().__init__(max_size, allow_duplicates_in_batch_sampling)
self.current_class_to_sample_from = 0
self.num_classes = num_classes
self.state_key_with_the_class_index = state_key_with_the_class_index
self.transitions = [[] for _ in range(self.num_classes)]
self.transitions_order = []
if self.num_classes < 2:
raise ValueError("The number of classes for a balanced replay buffer should be at least 2. "
"The number of classes that were defined are: {}".format(self.num_classes))
def store(self, transition: Transition, lock: bool=True) -> None:
"""
Store a new transition in the memory.
:param transition: a transition to store
:param lock: if true, will lock the readers writers lock. this can cause a deadlock if an inheriting class
locks and then calls store with lock = True
:return: None
"""
if lock:
self.reader_writer_lock.lock_writing_and_reading()
self._num_transitions += 1
if self.state_key_with_the_class_index not in transition.state.keys():
raise ValueError("The class index was not present in the state of the transition under the given key ({})"
.format(self.state_key_with_the_class_index))
class_idx = transition.state[self.state_key_with_the_class_index]
if class_idx >= self.num_classes:
raise ValueError("The given class index is outside the defined number of classes for the replay buffer. "
"The given class was: {} and the number of classes defined is: {}"
.format(class_idx, self.num_classes))
self.transitions[class_idx].append(transition)
self.transitions_order.append(class_idx)
self._enforce_max_length()
if lock:
self.reader_writer_lock.release_writing_and_reading()
def sample(self, size: int) -> List[Transition]:
"""
Sample a batch of transitions form the replay buffer. If the requested size is larger than the number
of samples available in the replay buffer then the batch will return empty.
:param size: the size of the batch to sample
:return: a batch (list) of selected transitions from the replay buffer
"""
self.reader_writer_lock.lock_writing()
if size % self.num_classes != 0:
raise ValueError("Sampling batches from a balanced replay buffer should be done only using batch sizes "
"which are a multiple of the number of classes. The number of classes defined is: {} "
"and the batch size requested is: {}".format(self.num_classes, size))
batch_size_from_each_class = size // self.num_classes
if self.allow_duplicates_in_batch_sampling:
transitions_idx = [np.random.randint(len(class_transitions), size=batch_size_from_each_class)
for class_transitions in self.transitions]
else:
for class_idx, class_transitions in enumerate(self.transitions):
if self.num_transitions() < batch_size_from_each_class:
raise ValueError("The replay buffer cannot be sampled since there are not enough transitions yet. "
"There are currently {} transitions for class {}"
.format(len(class_transitions), class_idx))
transitions_idx = [np.random.choice(len(class_transitions), size=batch_size_from_each_class, replace=False)
for class_transitions in self.transitions]
batch = []
for class_idx, class_transitions_idx in enumerate(transitions_idx):
batch += [self.transitions[class_idx][i] for i in class_transitions_idx]
self.reader_writer_lock.release_writing()
return batch
def remove_transition(self, transition_index: int, lock: bool=True) -> None:
raise ValueError("It is not possible to remove specific transitions with a balanced replay buffer")
def get_transition(self, transition_index: int, lock: bool=True) -> Union[None, Transition]:
raise ValueError("It is not possible to access specific transitions with a balanced replay buffer")
def _enforce_max_length(self) -> None:
"""
Make sure that the size of the replay buffer does not pass the maximum size allowed.
If it passes the max size, the oldest transition in the replay buffer will be removed.
This function does not use locks since it is only called internally
:return: None
"""
granularity, size = self.max_size
if granularity == MemoryGranularity.Transitions:
while size != 0 and self.num_transitions() > size:
self._num_transitions -= 1
del self.transitions[self.transitions_order[0]][0]
del self.transitions_order[0]
else:
raise ValueError("The granularity of the replay buffer can only be set in terms of transitions")
def clean(self, lock: bool=True) -> None:
"""
Clean the memory by removing all the episodes
:return: None
"""
if lock:
self.reader_writer_lock.lock_writing_and_reading()
self.transitions = [[] for _ in range(self.num_classes)]
self.transitions_order = []
self._num_transitions = 0
if lock:
self.reader_writer_lock.release_writing_and_reading()
| 46.098837 | 119 | 0.67436 | 6,840 | 0.862656 | 0 | 0 | 102 | 0.012864 | 0 | 0 | 3,265 | 0.41178 |
31c2501833007cbf35b7052dce9d35c5762beec9 | 271 | py | Python | tools/schedprof/schedprof/mutex.py | ivochkin/dfk | 9d38cc657c18e065e2249865244eba9067b49b2a | [
"MIT"
]
| 1 | 2016-05-23T16:18:41.000Z | 2016-05-23T16:18:41.000Z | tools/schedprof/schedprof/mutex.py | ivochkin/dfk | 9d38cc657c18e065e2249865244eba9067b49b2a | [
"MIT"
]
| null | null | null | tools/schedprof/schedprof/mutex.py | ivochkin/dfk | 9d38cc657c18e065e2249865244eba9067b49b2a | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from schedprof.enumerated_instance import EnumeratedInstance
class Mutex(EnumeratedInstance):
def __init__(self):
super(Mutex, self).__init__(Mutex)
self.acquired_by = None
self.wait_queue = []
| 22.583333 | 60 | 0.678967 | 160 | 0.590406 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.162362 |
31c2954ee75af37e6605445fd453d42700c71a8e | 1,606 | py | Python | openpype/hosts/houdini/plugins/publish/increment_current_file.py | jonclothcat/OpenPype | d1208cbebc0a7f378de0062ccd653295c6399195 | [
"MIT"
]
| 87 | 2021-05-07T08:40:46.000Z | 2022-03-19T00:36:25.000Z | openpype/hosts/houdini/plugins/publish/increment_current_file.py | jonclothcat/OpenPype | d1208cbebc0a7f378de0062ccd653295c6399195 | [
"MIT"
]
| 1,019 | 2021-04-26T06:22:56.000Z | 2022-03-31T16:30:43.000Z | openpype/hosts/houdini/plugins/publish/increment_current_file.py | jonclothcat/OpenPype | d1208cbebc0a7f378de0062ccd653295c6399195 | [
"MIT"
]
| 33 | 2021-04-29T12:35:54.000Z | 2022-03-25T14:48:42.000Z | import pyblish.api
import avalon.api
from openpype.api import version_up
from openpype.action import get_errored_plugins_from_data
class IncrementCurrentFile(pyblish.api.InstancePlugin):
"""Increment the current file.
Saves the current scene with an increased version number.
"""
label = "Increment current file"
order = pyblish.api.IntegratorOrder + 9.0
hosts = ["houdini"]
families = ["colorbleed.usdrender", "redshift_rop"]
targets = ["local"]
def process(self, instance):
# This should be a ContextPlugin, but this is a workaround
# for a bug in pyblish to run once for a family: issue #250
context = instance.context
key = "__hasRun{}".format(self.__class__.__name__)
if context.data.get(key, False):
return
else:
context.data[key] = True
context = instance.context
errored_plugins = get_errored_plugins_from_data(context)
if any(
plugin.__name__ == "HoudiniSubmitPublishDeadline"
for plugin in errored_plugins
):
raise RuntimeError(
"Skipping incrementing current file because "
"submission to deadline failed."
)
# Filename must not have changed since collecting
host = avalon.api.registered_host()
current_file = host.current_file()
assert (
context.data["currentFile"] == current_file
), "Collected filename from current scene name."
new_filepath = version_up(current_file)
host.save(new_filepath)
| 30.884615 | 67 | 0.643213 | 1,471 | 0.91594 | 0 | 0 | 0 | 0 | 0 | 0 | 521 | 0.324408 |
31c40d9158227b21f8cb192d674e54997bf631fc | 2,586 | py | Python | 968 Binary Tree Cameras.py | krishna13052001/LeetCode | cd6ec626bea61f0bd9e8493622074f9e69a7a1c3 | [
"MIT"
]
| 872 | 2015-06-15T12:02:41.000Z | 2022-03-30T08:44:35.000Z | 968 Binary Tree Cameras.py | nadeemshaikh-github/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | [
"MIT"
]
| 8 | 2015-06-21T15:11:59.000Z | 2022-02-01T11:22:34.000Z | 968 Binary Tree Cameras.py | nadeemshaikh-github/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | [
"MIT"
]
| 328 | 2015-06-28T03:10:35.000Z | 2022-03-29T11:05:28.000Z | #!/usr/bin/python3
"""
Given a binary tree, we install cameras on the nodes of the tree.
Each camera at a node can monitor its parent, itself, and its immediate children.
Calculate the minimum number of cameras needed to monitor all nodes of the tree.
Example 1:
Input: [0,0,null,0,0]
Output: 1
Explanation: One camera is enough to monitor all nodes if placed as shown.
Example 2:
Input: [0,0,null,0,null,0,null,null,0]
Output: 2
Explanation: At least two cameras are needed to monitor all nodes of the tree.
The above image shows one of the valid configurations of camera placement.
Note:
The number of nodes in the given tree will be in the range [1, 1000].
Every node has value 0.
"""
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.covered = {None}
self.cnt = 0
def minCameraCover(self, root: TreeNode) -> int:
"""
Greedy?
Bottom up, cover leaf's parent is strictly better than cover leaf
"""
self.dfs(root, None)
if root not in self.covered:
self.covered.add(root)
self.cnt += 1
return self.cnt
def dfs(self, node, pi):
"""
post order
rely on the parents to cover it
"""
if not node:
return
self.dfs(node.left, node)
self.dfs(node.right, node)
if node.left not in self.covered or node.right not in self.covered:
self.cnt += 1
self.covered.add(node.left)
self.covered.add(node.right)
self.covered.add(node)
self.covered.add(pi)
class SolutionErrror:
def __init__(self):
self.covered = set()
def minCameraCover(self, root: TreeNode) -> int:
"""
Greedy?
Top-down, no good.
Bottom up, cover leaf's parent is strictly better than cover leaf
"""
dummy = TreeNode(0)
dummy.left = root
self.dfs(root, dummy)
self.covered.discard(dummy) # swallow KeyError
return len(self.covered)
def dfs(self, node, pi):
"""
post order
"""
if not node:
return
self.dfs(node.left, node)
self.dfs(node.right, node)
# post oder
if (
(not node.left or node.left in self.covered) and
(not node.right or node.right in self.covered)
):
self.covered.add(pi)
return
| 23.944444 | 81 | 0.587394 | 1,841 | 0.71191 | 0 | 0 | 0 | 0 | 0 | 0 | 1,108 | 0.428461 |
31c5aa8dd90d83d5636e20aed612d574a8d8b309 | 308 | py | Python | examples/get_tiktoks_by_sound.py | twitter-79/TikTok-Api | 366fd2b157dfa66586aa6edc91c50ccf1cf00785 | [
"MIT"
]
| 2,095 | 2019-05-26T17:07:02.000Z | 2022-03-31T11:42:32.000Z | examples/get_tiktoks_by_sound.py | twitter-79/TikTok-Api | 366fd2b157dfa66586aa6edc91c50ccf1cf00785 | [
"MIT"
]
| 633 | 2019-06-11T06:42:03.000Z | 2022-03-31T11:43:42.000Z | examples/get_tiktoks_by_sound.py | twitter-79/TikTok-Api | 366fd2b157dfa66586aa6edc91c50ccf1cf00785 | [
"MIT"
]
| 630 | 2019-09-30T18:11:58.000Z | 2022-03-30T16:23:01.000Z | from TikTokApi import TikTokApi
api = TikTokApi.get_instance()
count = 30
# You can find this from a tiktok getting method in another way or find songs from the discoverMusic method.
sound_id = "6601861313180207878"
tiktoks = api.by_sound(sound_id, count=count)
for tiktok in tiktoks:
print(tiktok)
| 22 | 108 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 129 | 0.418831 |
31c5c0e1d9a4cbb4ae462748e1bb8e68224eb9d2 | 4,464 | py | Python | code/SimPleAC_pof_paperplots.py | 1ozturkbe/robustSPpaper | d90f01945f96c5bd45f3518665e52d920820e4d7 | [
"MIT"
]
| null | null | null | code/SimPleAC_pof_paperplots.py | 1ozturkbe/robustSPpaper | d90f01945f96c5bd45f3518665e52d920820e4d7 | [
"MIT"
]
| 7 | 2018-06-25T14:51:29.000Z | 2019-06-26T18:20:12.000Z | code/SimPleAC_pof_paperplots.py | 1ozturkbe/robustSPpaper | d90f01945f96c5bd45f3518665e52d920820e4d7 | [
"MIT"
]
| null | null | null | from builtins import str
from builtins import range
from robust.simulations.simulate import filter_gamma_result_dict
from SimPleAC_save import load_obj
import pickle as pickle
import numpy as np
import matplotlib.pyplot as plt
from SimPleAC_pof_simulate import pof_parameters
if __name__ == "__main__":
# Retrieving pof parameters
[model, methods, gammas, number_of_iterations,
min_num_of_linear_sections, max_num_of_linear_sections, verbosity, linearization_tolerance,
number_of_time_average_solves, uncertainty_sets, nominal_solution, directly_uncertain_vars_subs, parallel,
nominal_number_of_constraints, nominal_solve_time] = pof_parameters()
method = methods[0] # only care about Best Pairs
# Loading results
margin = {}
nGammas = nmargins = len(gammas)
margins = gammas
margin['solutions'] = {}
for i in range(nmargins):
margin['solutions'][margins[i]] = pickle.load(open("marginResults/" +
str(margins[i]), 'rb'))
margin['number_of_constraints'] = load_obj('marginnumber_of_constraints', 'marginResults')
margin['simulation_results'] = load_obj('marginsimulation_results', 'marginResults')
gamma = {}
gamma['solutions'] = {}
for i in range(nGammas):
for j in range(len(methods)):
for k in range((len(uncertainty_sets))):
gamma['solutions'][gammas[i], methods[j]['name'], uncertainty_sets[k]] = pickle.load(open(
"gammaResults\\" + str((gammas[i], methods[j]['name'], uncertainty_sets[k])), 'rb'))
gamma['solve_times'] = load_obj('gammasolve_times', 'gammaResults')
gamma['simulation_results'] = load_obj('gammasimulation_results', 'gammaResults')
gamma['number_of_constraints'] = load_obj('gammanumber_of_constraints', 'gammaResults')
# Plotting of cost and probability of failure
objective_name = 'Total fuel weight'
objective_units = 'N'
title = ''
filteredResults = [margin['solutions'],
filter_gamma_result_dict(gamma['solutions'], 1, method['name'], 2, 'box'),
filter_gamma_result_dict(gamma['solutions'], 1, method['name'], 2, 'ellipsoidal')]
filteredSimulations = [margin['simulation_results'],
filter_gamma_result_dict(gamma['simulation_results'], 1, method['name'], 2, 'box'),
filter_gamma_result_dict(gamma['simulation_results'], 1, method['name'], 2, 'ellipsoidal')]
objective_varkey = 'W_{f_m}'
legend_keys = ['margins', 'box', 'ellipsoidal']
edgecolors = ['#FFBF00', '#CC0000', '#008000']
facecolors = ['#FFE135','#FF2052', '#8DB600']
fig, ax1 = plt.subplots()
ax2 = ax1.twinx()
lines = []
mincost = 1e10
maxcost = 0
for i in range(len(legend_keys)):
sims = list(filteredSimulations[i].items())
pofs = []
objective_costs = []
objective_stddev = []
for j in sims:
pofs.append(j[1][0])
objective_costs.append(j[1][1])
objective_stddev.append(j[1][2])
mincost = np.min([mincost] + objective_costs)
maxcost = np.max([maxcost] + objective_costs)
lines.append(ax1.plot(gammas, objective_costs, color=edgecolors[i], label=legend_keys[i] + ', cost'))
inds = np.nonzero(np.ones(len(gammas)) - pofs)[0]
uppers = [objective_costs[ind] + objective_stddev[ind] for ind in inds]
lowers = [objective_costs[ind] - objective_stddev[ind] for ind in inds]
x = [gammas[ind] for ind in inds]
ax1.fill_between(x, lowers, uppers,
alpha=0.5, edgecolor = edgecolors[i], facecolor = facecolors[i])
lines.append(ax2.plot(gammas, pofs, color=edgecolors[i], label=legend_keys[i] + ', PoF'))
ax1.set_xlabel(r'Uncertainty Set Scaling Factor $\Gamma$', fontsize=12)
ax1.set_ylabel('Cost [' + objective_name + ' (' + objective_units.capitalize() + ')]', fontsize=12)
ax2.set_ylabel("Probability of Failure", fontsize=12)
ax1.set_ylim([mincost, maxcost])
ax2.set_ylim([0, 1])
plt.title(title, fontsize=12)
labs = [lines[l][0].get_label() for l in [1,3,5,0,2,4]]
ax1.legend(labs, loc="lower right", fontsize=9, numpoints=1)
# ax1.legend(loc="lower right", fontsize=10, numpoints=1)
# fig.legend(loc="lower right", fontsize=10, numpoints=1)
plt.show()
| 49.054945 | 124 | 0.641801 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,011 | 0.226478 |
31c6e6ace01eea05877a86d1f6316d5a911da292 | 588 | py | Python | test/show-cifar10.py | tom01h/deep-learning-from-scratch | acb3c31976cd736b4abd21c3e8ab81c3bf0eb9bb | [
"MIT"
]
| 3 | 2018-10-11T16:19:18.000Z | 2022-01-16T07:48:06.000Z | test/show-cifar10.py | tom01h/deep-learning-from-scratch | acb3c31976cd736b4abd21c3e8ab81c3bf0eb9bb | [
"MIT"
]
| null | null | null | test/show-cifar10.py | tom01h/deep-learning-from-scratch | acb3c31976cd736b4abd21c3e8ab81c3bf0eb9bb | [
"MIT"
]
| null | null | null | # coding: utf-8
import sys, os
sys.path.append(os.pardir) # 親ディレクトリのファイルをインポートするための設定
import numpy as np
from dataset.cifar10 import load_cifar10
from PIL import Image
np.set_printoptions(threshold=100)
(x_train, t_train), (x_test, t_test) = load_cifar10(flatten=False)
sample_image = x_test[0:100].reshape((10, 10, 3, 32, 32)).transpose((0, 3, 1, 4, 2)).reshape((320, 320, 3)) # 先頭100個をタイル状に並べ替える
Image.fromarray(np.uint8(sample_image*255)).save('sample.png')
print(t_test[0:100].reshape(10,10))
#pil_img = Image.fromarray(np.uint8(sample_image*255))
#pil_img.show()
| 34.588235 | 128 | 0.727891 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 225 | 0.337838 |
31c78d6966a9d84a523a15b22e795f490c2201f9 | 44 | py | Python | vertex-server/signals/__init__.py | aoswalt/greenlite-hardware | 056ed78829519f49adab60dbcf67878243fe764e | [
"MIT"
]
| null | null | null | vertex-server/signals/__init__.py | aoswalt/greenlite-hardware | 056ed78829519f49adab60dbcf67878243fe764e | [
"MIT"
]
| 1 | 2016-11-01T23:55:07.000Z | 2016-11-01T23:55:07.000Z | vertex-server/signals/__init__.py | aoswalt/greenlite-hardware | 056ed78829519f49adab60dbcf67878243fe764e | [
"MIT"
]
| null | null | null | from . import lights
from . import schedule
| 14.666667 | 22 | 0.772727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
31c7910d7253d24e22e70937e36be79e678386eb | 10,533 | py | Python | PWWS/fool.py | ForeverZyh/ASCC | 2d76d679889953501c469221a37d486e7ee42ded | [
"MIT"
]
| 21 | 2021-03-22T07:14:29.000Z | 2022-03-24T02:05:25.000Z | PWWS/fool.py | ForeverZyh/ASCC | 2d76d679889953501c469221a37d486e7ee42ded | [
"MIT"
]
| 2 | 2021-04-07T11:31:01.000Z | 2022-01-10T03:41:10.000Z | PWWS/fool.py | ForeverZyh/ASCC | 2d76d679889953501c469221a37d486e7ee42ded | [
"MIT"
]
| 4 | 2021-05-05T18:44:13.000Z | 2021-07-29T03:09:50.000Z | # coding: utf-8
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import argparse
import os
import numpy as np
from read_files import split_imdb_files, split_yahoo_files, split_agnews_files
from word_level_process import word_process, get_tokenizer
from char_level_process import char_process
from neural_networks import word_cnn, char_cnn, bd_lstm, lstm
from adversarial_tools import ForwardGradWrapper, adversarial_paraphrase
import tensorflow as tf
from keras import backend as K
import time
from unbuffered import Unbuffered
sys.stdout = Unbuffered(sys.stdout)
config = tf.ConfigProto(allow_soft_placement=True)
config.gpu_options.allow_growth = True
K.set_session(tf.Session(config=config))
# os.environ["CUDA_VISIBLE_DEVICES"] = "1"
parser = argparse.ArgumentParser(
description='Craft adversarial examples for a text classifier.')
parser.add_argument('--clean_samples_cap',
help='Amount of clean(test) samples to fool',
type=int, default=1000)
parser.add_argument('-m', '--model',
help='The model of text classifier',
choices=['word_cnn', 'char_cnn', 'word_lstm', 'word_bdlstm'],
default='word_cnn')
parser.add_argument('-d', '--dataset',
help='Data set',
choices=['imdb', 'agnews', 'yahoo'],
default='imdb')
parser.add_argument('-l', '--level',
help='The level of process dataset',
choices=['word', 'char'],
default='word')
def write_origin_input_texts(origin_input_texts_path, test_texts, test_samples_cap=None):
if test_samples_cap is None:
test_samples_cap = len(test_texts)
with open(origin_input_texts_path, 'a') as f:
for i in range(test_samples_cap):
f.write(test_texts[i] + '\n')
def fool_text_classifier():
clean_samples_cap = args.clean_samples_cap # 1000
print('clean_samples_cap:', clean_samples_cap)
# get tokenizer
dataset = args.dataset
tokenizer = get_tokenizer(opt)
# Read data set
x_test = y_test = None
test_texts = None
if dataset == 'imdb':
train_texts, train_labels, dev_texts, dev_labels, test_texts, test_labels = split_imdb_files(opt)
if args.level == 'word':
x_train, y_train, x_test, y_test = word_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif args.level == 'char':
x_train, y_train, x_test, y_test = char_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif dataset == 'agnews':
train_texts, train_labels, test_texts, test_labels = split_agnews_files()
if args.level == 'word':
x_train, y_train, x_test, y_test = word_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif args.level == 'char':
x_train, y_train, x_test, y_test = char_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif dataset == 'yahoo':
train_texts, train_labels, test_texts, test_labels = split_yahoo_files()
if args.level == 'word':
x_train, y_train, x_test, y_test = word_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif args.level == 'char':
x_train, y_train, x_test, y_test = char_process(train_texts, train_labels, test_texts, test_labels, dataset)
# Write clean examples into a txt file
clean_texts_path = r'./fool_result/{}/clean_{}.txt'.format(dataset, str(clean_samples_cap))
if not os.path.isfile(clean_texts_path):
write_origin_input_texts(clean_texts_path, test_texts)
# Select the model and load the trained weights
assert args.model[:4] == args.level
model = None
if args.model == "word_cnn":
model = word_cnn(dataset)
elif args.model == "word_bdlstm":
model = bd_lstm(dataset)
elif args.model == "char_cnn":
model = char_cnn(dataset)
elif args.model == "word_lstm":
model = lstm(dataset)
model_path = r'./runs/{}/{}.dat'.format(dataset, args.model)
model.load_weights(model_path)
print('model path:', model_path)
# evaluate classification accuracy of model on clean samples
scores_origin = model.evaluate(x_test[:clean_samples_cap], y_test[:clean_samples_cap])
print('clean samples origin test_loss: %f, accuracy: %f' % (scores_origin[0], scores_origin[1]))
all_scores_origin = model.evaluate(x_test, y_test)
print('all origin test_loss: %f, accuracy: %f' % (all_scores_origin[0], all_scores_origin[1]))
grad_guide = ForwardGradWrapper(model)
classes_prediction = grad_guide.predict_classes(x_test[: clean_samples_cap])
print('Crafting adversarial examples...')
successful_perturbations = 0
failed_perturbations = 0
sub_rate_list = []
NE_rate_list = []
start_cpu = time.clock()
adv_text_path = r'./fool_result/{}/{}/adv_{}.txt'.format(dataset, args.model, str(clean_samples_cap))
change_tuple_path = r'./fool_result/{}/{}/change_tuple_{}.txt'.format(dataset, args.model, str(clean_samples_cap))
file_1 = open(adv_text_path, "a")
file_2 = open(change_tuple_path, "a")
for index, text in enumerate(test_texts[: clean_samples_cap]):
sub_rate = 0
NE_rate = 0
if np.argmax(y_test[index]) == classes_prediction[index]:
# If the ground_true label is the same as the predicted label
adv_doc, adv_y, sub_rate, NE_rate, change_tuple_list = adversarial_paraphrase(input_text=text,
true_y=np.argmax(y_test[index]),
grad_guide=grad_guide,
tokenizer=tokenizer,
dataset=dataset,
level=args.level)
if adv_y != np.argmax(y_test[index]):
successful_perturbations += 1
print('{}. Successful example crafted.'.format(index))
else:
failed_perturbations += 1
print('{}. Failure.'.format(index))
text = adv_doc
sub_rate_list.append(sub_rate)
NE_rate_list.append(NE_rate)
file_2.write(str(index) + str(change_tuple_list) + '\n')
file_1.write(text + " sub_rate: " + str(sub_rate) + "; NE_rate: " + str(NE_rate) + "\n")
end_cpu = time.clock()
print('CPU second:', end_cpu - start_cpu)
mean_sub_rate = sum(sub_rate_list) / len(sub_rate_list)
mean_NE_rate = sum(NE_rate_list) / len(NE_rate_list)
print('mean substitution rate:', mean_sub_rate)
print('mean NE rate:', mean_NE_rate)
file_1.close()
file_2.close()
def fool_text_classifier_pytorch(model, dataset='imdb'):
clean_samples_cap = 100
print('clean_samples_cap:', clean_samples_cap)
# get tokenizer
tokenizer = get_tokenizer(opt)
# Read data set
x_test = y_test = None
test_texts = None
if dataset == 'imdb':
train_texts, train_labels, dev_texts, dev_labels, test_texts, test_labels = split_imdb_files(opt)
x_train, y_train, x_test, y_test = word_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif dataset == 'agnews':
train_texts, train_labels, test_texts, test_labels = split_agnews_files()
x_train, y_train, x_test, y_test = word_process(train_texts, train_labels, test_texts, test_labels, dataset)
elif dataset == 'yahoo':
train_texts, train_labels, test_texts, test_labels = split_yahoo_files()
x_train, y_train, x_test, y_test = word_process(train_texts, train_labels, test_texts, test_labels, dataset)
grad_guide = ForwardGradWrapper_pytorch(model)
classes_prediction = grad_guide.predict_classes(x_test[: clean_samples_cap])
print('Crafting adversarial examples...')
successful_perturbations = 0
failed_perturbations = 0
sub_rate_list = []
NE_rate_list = []
start_cpu = time.clock()
adv_text_path = r'./fool_result/{}/adv_{}.txt'.format(dataset, str(clean_samples_cap))
change_tuple_path = r'./fool_result/{}/change_tuple_{}.txt'.format(dataset, str(clean_samples_cap))
file_1 = open(adv_text_path, "a")
file_2 = open(change_tuple_path, "a")
for index, text in enumerate(test_texts[: clean_samples_cap]):
sub_rate = 0
NE_rate = 0
if np.argmax(y_test[index]) == classes_prediction[index]:
# If the ground_true label is the same as the predicted label
adv_doc, adv_y, sub_rate, NE_rate, change_tuple_list = adversarial_paraphrase(input_text=text,
true_y=np.argmax(y_test[index]),
grad_guide=grad_guide,
tokenizer=tokenizer,
dataset=dataset,
level='word')
if adv_y != np.argmax(y_test[index]):
successful_perturbations += 1
print('{}. Successful example crafted.'.format(index))
else:
failed_perturbations += 1
print('{}. Failure.'.format(index))
text = adv_doc
sub_rate_list.append(sub_rate)
NE_rate_list.append(NE_rate)
file_2.write(str(index) + str(change_tuple_list) + '\n')
file_1.write(text + " sub_rate: " + str(sub_rate) + "; NE_rate: " + str(NE_rate) + "\n")
end_cpu = time.clock()
print('CPU second:', end_cpu - start_cpu)
mean_sub_rate = sum(sub_rate_list) / len(sub_rate_list)
mean_NE_rate = sum(NE_rate_list) / len(NE_rate_list)
print('mean substitution rate:', mean_sub_rate)
print('mean NE rate:', mean_NE_rate)
file_1.close()
file_2.close()
if __name__ == '__main__':
args = parser.parse_args()
fool_text_classifier()
| 46.606195 | 122 | 0.619102 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,548 | 0.146967 |
31c871b146933705ca94093543636c2b4a72c392 | 22,970 | py | Python | test_training_data.py | miermans/gym-2048 | 39f2cf375ef936284677a97b373aa2b97c8e45fc | [
"MIT"
]
| null | null | null | test_training_data.py | miermans/gym-2048 | 39f2cf375ef936284677a97b373aa2b97c8e45fc | [
"MIT"
]
| 2 | 2021-05-26T20:24:09.000Z | 2021-05-27T08:44:54.000Z | test_training_data.py | miermans/gym-2048 | 39f2cf375ef936284677a97b373aa2b97c8e45fc | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
from __future__ import absolute_import
import numpy as np
import os
import pytest
import tempfile
import training_data
class TestTrainingData():
def test_add(self):
td = training_data.training_data()
assert np.array_equal(td.get_x(), np.empty([0, 4, 4], dtype=np.int))
assert np.array_equal(td.get_y_digit(), np.empty([0, 1], dtype=np.int))
assert np.allclose(td.get_reward(), np.empty([0, 1], dtype=np.float))
assert np.array_equal(td.get_next_x(), np.empty([0, 4, 4], dtype=np.int))
assert np.array_equal(td.get_done(), np.empty([0, 1], dtype=np.bool))
td.add(np.ones([1, 4, 4]), 1, 4, np.zeros([1, 4, 4]), True)
assert np.array_equal(td.get_x(), np.ones([1, 4, 4], dtype=np.int))
assert np.array_equal(td.get_y_digit(), np.array([[1]], dtype=np.int))
assert np.allclose(td.get_reward(), np.array([[4]], dtype=np.float))
assert np.array_equal(td.get_next_x(), np.zeros([1, 4, 4], dtype=np.int))
assert np.array_equal(td.get_done(), np.array([[1]], dtype=np.bool))
def test_get_x_stacked(self):
td = training_data.training_data()
td.add(np.full([4, 4], 2), 0, 4, np.zeros([4, 4]))
td.add(np.full([4, 4], 8), 1, 8, np.ones([4, 4]))
td.add(np.full([4, 4], 2048), 1, 8, np.ones([4, 4]))
expected_x_stacked = np.array([
[
[[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
[[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
[[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
[[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
],
[
[[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
[[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
[[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
[[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
],
[
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]],
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]],
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]],
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0]]
]
], dtype=np.int)
assert np.array_equal(td.get_x_stacked(), expected_x_stacked)
def test_get_y_one_hot(self):
td = training_data.training_data()
td.add(np.ones([4, 4]), 0, 4, np.zeros([4, 4]))
td.add(np.zeros([4, 4]), 1, 8, np.ones([4, 4]))
td.add(np.zeros([4, 4]), 3, 8, np.ones([4, 4]))
td.add(np.zeros([4, 4]), 2, 8, np.ones([4, 4]))
expected_y_one_hot = np.array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 0]
], dtype=np.int)
assert np.array_equal(td.get_y_one_hot(), expected_y_one_hot)
def test_get_total_reward(self):
td = training_data.training_data()
td.add(np.ones([4, 4]), 0, 4, np.zeros([4, 4]))
td.add(np.zeros([4, 4]), 1, 8, np.ones([4, 4]))
td.add(np.zeros([4, 4]), 3, 16, np.ones([4, 4]))
td.add(np.zeros([4, 4]), 2, 32, np.ones([4, 4]))
assert td.get_total_reward() == 60
def test_get_highest_tile(self):
td = training_data.training_data()
td.add(np.full((4, 4), 1), 0, 4, np.full((4, 4), 2))
td.add(np.full((4, 4), 2), 0, 4, np.full((4, 4), 4))
assert td.get_highest_tile() == 4
def test_get_n(self):
td = training_data.training_data()
td.add(np.ones([4, 4]), 1, 4, np.zeros([4, 4]))
td.add(np.zeros([4, 4]), 2, 8, np.ones([4, 4]))
(state, action, reward, next_state, done) = td.get_n(1)
assert np.array_equal(state, np.zeros([4, 4], dtype=np.int))
assert action == 2
assert reward == pytest.approx(8.)
assert np.array_equal(next_state, np.ones([4, 4], dtype=np.int))
def test_hflip(self):
td = training_data.training_data()
board1 = np.array([[1, 1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
board2 = np.array([[0, 0, 0, 0],
[2, 4, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
td.add(board1, 1, 2, board2)
td.add(board2, 2, 0, board1)
td.hflip()
expected_x = np.array([
[[0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[0, 0, 0, 0], [0, 0, 4, 2], [0, 0, 0, 0], [0, 0, 0, 0]]
], dtype=np.int)
expected_y_digit = np.array([
[3],
[2]
], dtype=np.int)
expected_reward = np.array([
[2],
[0],
], dtype=np.float)
expected_next_x = np.array([
[[0, 0, 0, 0], [0, 0, 4, 2], [0, 0, 0, 0], [0, 0, 0, 0]],
[[0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
], dtype=np.int)
assert np.array_equal(td.get_x(), expected_x)
assert np.array_equal(td.get_y_digit(), expected_y_digit)
assert np.allclose(td.get_reward(), expected_reward)
assert np.allclose(td.get_next_x(), expected_next_x)
def test_rotate(self):
td = training_data.training_data()
board1 = np.array([[1, 1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
board2 = np.array([[0, 0, 0, 0],
[2, 4, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
td.add(board1, 1, 2, board2)
td.add(board2, 2, 0, board1)
td.rotate(3)
expected_x = np.array([
[[0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 4, 0, 0], [0, 2, 0, 0]]
], dtype=np.int)
expected_y_digit = np.array([
[0],
[1]
], dtype=np.int)
expected_reward = np.array([
[2],
[0],
], dtype=np.float)
expected_next_x = np.array([
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 4, 0, 0], [0, 2, 0, 0]],
[[0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]]
], dtype=np.int)
assert np.array_equal(td.get_x(), expected_x)
assert np.array_equal(td.get_y_digit(), expected_y_digit)
assert np.allclose(td.get_reward(), expected_reward)
assert np.array_equal(td.get_next_x(), expected_next_x)
def test_augment(self):
td = training_data.training_data()
initial_board = np.array([[1, 1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
next_board = np.array([[0, 0, 0, 2],
[0, 2, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
td.add(initial_board, 1, 4, next_board)
td.augment()
assert td.size() == 8
expected_x = np.array([
[[1, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[0, 0, 0, 1], [0, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 0, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [1, 1, 0, 0]],
[[0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 0], [1, 0, 0, 0]],
[[1, 0, 0, 0], [1, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
], dtype=np.int)
expected_y_digit = np.array([
[1],
[3],
[2],
[0],
[3],
[1],
[0],
[2]
], dtype=np.int)
expected_reward = np.array([
[4],
[4],
[4],
[4],
[4],
[4],
[4],
[4]
], dtype=np.float)
expected_next_x = np.array([
[[0, 0, 0, 2], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], # Original
[[2, 0, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0], [0, 0, 0, 0]], # Hflip'd
[[0, 0, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0], [0, 0, 0, 2]], # Original, rotated 90 degrees
[[0, 0, 0, 2], [0, 0, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0]], # Hflip, rotated 90 degrees
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 2, 0], [2, 0, 0, 0]], # Original, rotated 180 degrees
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 2]], # Hflip, rotated 180 degrees
[[2, 0, 0, 0], [0, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0]], # Original, rotate 270 degrees
[[0, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [2, 0, 0, 0]] # Hflip, rotated 270 degrees
], dtype=np.int)
assert np.array_equal(td.get_x(), expected_x)
assert np.array_equal(td.get_y_digit(), expected_y_digit)
assert np.allclose(td.get_reward(), expected_reward)
assert np.array_equal(td.get_next_x(), expected_next_x)
def test_merge(self):
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 1, 16, np.zeros([1, 4, 4]))
td2 = training_data.training_data()
td2.add(np.zeros([1, 4, 4]), 2, 0, np.ones([1, 4, 4]))
td.merge(td2)
expected_x = np.array([
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
], dtype=np.int)
expected_y_digit = np.array([
[1],
[2]
], dtype=np.int)
expected_reward = np.array([
[16],
[0]
], dtype=np.float)
expected_next_x = np.array([
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]
], dtype=np.int)
assert np.array_equal(td.get_x(), expected_x)
assert np.array_equal(td.get_y_digit(), expected_y_digit)
assert np.allclose(td.get_reward(), expected_reward)
assert np.array_equal(td.get_next_x(), expected_next_x)
def test_split(self):
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 1, 16, np.zeros([1, 4, 4]))
td2 = training_data.training_data()
td2.add(np.zeros([1, 4, 4]), 2, 0, np.ones([1, 4, 4]))
td.merge(td2)
a, b = td.split()
assert np.array_equal(a.get_x(), np.ones([1, 4, 4]))
assert np.array_equal(a.get_y_digit(), [[1]])
assert np.array_equal(a.get_reward(), [[16]])
assert np.array_equal(a.get_next_x(), np.zeros([1, 4, 4]))
assert np.array_equal(b.get_x(), np.zeros([1, 4, 4]))
assert np.array_equal(b.get_y_digit(), [[2]])
assert np.array_equal(b.get_reward(), [[0]])
assert np.array_equal(b.get_next_x(), np.ones([1, 4, 4]))
def test_sample(self):
td = training_data.training_data()
td.add(np.zeros([1, 4, 4]), 0, 0, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 1, 1, np.ones([1, 4, 4]))
sample = td.sample([1])
assert sample.size() == 1
assert sample.get_y_digit() in [[[0]], [[1]]]
if sample.get_y_digit() == 0:
assert np.array_equal(sample.get_x(), np.zeros([1, 4, 4]))
if sample.get_y_digit() == 1:
assert np.array_equal(sample.get_x(), np.ones([1, 4, 4]))
def test_size(self):
td = training_data.training_data()
assert td.size() == 0
td.add(np.ones([1, 4, 4]), 0, 4, np.zeros([1, 4, 4]))
assert td.size() == 1
def test_log2_rewards(self):
# Set up training data
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 0, 0, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 1, 2, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 2, 4, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 3, 16, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 0, 75, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 1, 2048, np.zeros([1, 4, 4]))
td.log2_rewards()
expected_reward = np.array([
[0], [1], [2], [4], [6.2288], [11]
], dtype=np.float)
assert np.allclose(td.get_reward(), expected_reward)
expected_action = np.array([
[0], [1], [2], [3], [0], [1]
], dtype=np.int)
assert np.allclose(td.get_y_digit(), expected_action)
def test_get_discounted_return(self):
# Set up training data
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 0, 4, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 1, 2, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 2, 16, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 3, 2, np.zeros([1, 4, 4]))
# Test using default gamma value of 0.9
td2 = td.copy()
discounted_return = td2.get_discounted_return()
expected_return = np.array([
[20.218], [18.02], [17.8], [2.0]
], dtype=np.float)
assert np.allclose(discounted_return, expected_return)
# Test using gamma value of 0, should have no effect on rewards
td2 = td.copy()
discounted_return = td2.get_discounted_return(gamma=0.0)
expected_return = np.array([
[4], [2], [16], [2]
], dtype=np.float)
assert np.allclose(discounted_return, expected_return)
# Test end of episode
td3 = training_data.training_data()
td3.add(np.ones([1, 4, 4]), 0, 4, np.zeros([1, 4, 4]), False)
td3.add(np.ones([1, 4, 4]), 1, 2, np.zeros([1, 4, 4]), True)
td3.add(np.ones([1, 4, 4]), 2, 16, np.zeros([1, 4, 4]), False)
td3.add(np.ones([1, 4, 4]), 3, 2, np.zeros([1, 4, 4]), True)
discounted_return = td3.get_discounted_return()
expected_return = np.array([
[5.8], [2.0], [17.8], [2.0]
], dtype=np.float)
assert np.allclose(discounted_return, expected_return)
def test_normalize_rewards(self):
# Test calculating mean and standard deviation
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 1, 4, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 2, 4, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 3, 8, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 0, 16, np.zeros([1, 4, 4]))
td.normalize_rewards()
expected_reward = np.array([
[-0.8165], [-0.8165], [0.], [1.633],
], dtype=np.float)
assert np.allclose(td.get_reward(), expected_reward)
# Test specifying mean and standard deviation
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 1, 4, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 2, 4, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 3, 8, np.zeros([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 0, 16, np.zeros([1, 4, 4]))
td.normalize_rewards(mean=8, sd=1)
expected_reward = np.array([
[-4.], [-4.], [0.], [8.],
], dtype=np.float)
assert np.allclose(td.get_reward(), expected_reward)
def test_normalize_boards(self):
# Test calculating mean and standard deviation
td = training_data.training_data()
td.add(np.full((1, 4, 4), 4), 1, 4, np.full((1, 4, 4), 8))
td.add(np.full((1, 4, 4), 8), 2, 4, np.full((1, 4, 4), 16))
td.add(np.full((1, 4, 4), 16), 3, 4, np.full((1, 4, 4), 32))
td.add(np.full((1, 4, 4), 32), 4, 4, np.full((1, 4, 4), 64))
td.normalize_boards()
mean = 15.
sd = 10.7238052947636
a = (4. - mean) / sd
b = (8. - mean) / sd
c = (16. - mean) / sd
d = (32. - mean) / sd
e = (64. - mean) / sd
expected_x = np.array([
[[a, a, a, a], [a, a, a, a], [a, a, a, a], [a, a, a, a]],
[[b, b, b, b], [b, b, b, b], [b, b, b, b], [b, b, b, b]],
[[c, c, c, c], [c, c, c, c], [c, c, c, c], [c, c, c, c]],
[[d, d, d, d], [d, d, d, d], [d, d, d, d], [d, d, d, d]]
], dtype=np.float)
assert np.allclose(td.get_x(), expected_x)
expected_next_x = np.array([
[[b, b, b, b], [b, b, b, b], [b, b, b, b], [b, b, b, b]],
[[c, c, c, c], [c, c, c, c], [c, c, c, c], [c, c, c, c]],
[[d, d, d, d], [d, d, d, d], [d, d, d, d], [d, d, d, d]],
[[e, e, e, e], [e, e, e, e], [e, e, e, e], [e, e, e, e]]
], dtype=np.float)
assert np.allclose(td.get_next_x(), expected_next_x)
def test_save_restore(self):
# Set up training data
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 0, 4, np.zeros([1, 4, 4]))
td.add(np.zeros([1, 4, 4]), 1, 2, np.ones([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 2, 16, np.zeros([1, 4, 4]))
td.add(np.zeros([1, 4, 4]), 3, 2, np.ones([1, 4, 4]))
temp_dir = tempfile.mkdtemp()
temp_filename = os.path.join(temp_dir, 'data.csv')
td.export_csv(temp_filename)
td2 = training_data.training_data()
td2.import_csv(temp_filename)
expected_x = np.array([
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
], dtype=np.int)
expected_y_digit = np.array([
[0],
[1],
[2],
[3]
], dtype=np.int)
expected_reward = np.array([
[4],
[2],
[16],
[2]
], dtype=np.float)
expected_next_x = np.array([
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]
], dtype=np.int)
assert np.array_equal(td2.get_x(), expected_x)
assert np.array_equal(td2.get_y_digit(), expected_y_digit)
assert np.allclose(td2.get_reward(), expected_reward)
assert np.array_equal(td2.get_next_x(), expected_next_x)
os.remove(temp_filename)
os.rmdir(temp_dir)
def test_shuffle(self):
td = training_data.training_data()
n = 5
for i in range(n):
# Use "is odd" for done
td.add(np.full((1, 4, 4), i), i, i, np.full((1, 4, 4), i), (i % 2) == 1)
td.shuffle()
for i in range(n):
# Find where this has been shuffled too
index_of_val = np.where(td.get_y_digit() == i)[0].item(0)
# Check that all parts of this equal i
arrays = td.get_n(index_of_val)
for a in arrays:
if a.dtype is np.dtype(np.bool):
assert((a == ((i % 2) == 1)).all())
else:
assert((a == i).all())
def test_make_boards_unique(self):
td = training_data.training_data()
td.add(np.ones([1, 4, 4]), 0, 4, np.zeros([1, 4, 4]))
td.add(np.zeros([1, 4, 4]), 1, 2, np.ones([1, 4, 4]))
td.add(np.ones([1, 4, 4]), 2, 16, np.zeros([1, 4, 4]))
td.add(np.zeros([1, 4, 4]), 3, 2, np.ones([1, 4, 4]))
td.make_boards_unique()
expected_x = np.array([
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
], dtype=np.int)
expected_y_digit = np.array([
[0],
[1]
], dtype=np.int)
expected_reward = np.array([
[4],
[2]
], dtype=np.float)
expected_next_x = np.array([
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]],
[[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]]
], dtype=np.int)
assert np.array_equal(td.get_x(), expected_x)
assert np.array_equal(td.get_y_digit(), expected_y_digit)
assert np.allclose(td.get_reward(), expected_reward)
assert np.array_equal(td.get_next_x(), expected_next_x)
if __name__ == '__main__':
import pytest
pytest.main()
| 44.173077 | 101 | 0.428515 | 22,760 | 0.990858 | 0 | 0 | 0 | 0 | 0 | 0 | 660 | 0.028733 |
31c9193304e5d2e005f4e4e2afb188aa62cfb25b | 2,218 | py | Python | tests/test_db_exam_psd.py | awenhaowenchao/bee | cac7522f8994aa3067c6e0a1bb3613de5c577129 | [
"MIT"
]
| 4 | 2019-11-12T05:01:42.000Z | 2022-02-23T01:52:11.000Z | tests/test_db_exam_psd.py | awenhaowenchao/bee | cac7522f8994aa3067c6e0a1bb3613de5c577129 | [
"MIT"
]
| 6 | 2021-03-19T08:13:39.000Z | 2022-03-02T15:00:19.000Z | tests/test_db_exam_psd.py | awenhaowenchao/bee | cac7522f8994aa3067c6e0a1bb3613de5c577129 | [
"MIT"
]
| null | null | null | from datetime import datetime
from bee import Psd, CX, On, T
from bee import Model, IntegerField, StringField, DateTimeField, Equal, W, C
db_exam = Psd.open("exam")
# 1) sing table count search, SELECT COUNT(*) AS COUNT FROM t_teacher
with db_exam.connection() as conn:
teacher_count = db_exam.Select(*CX("COUNT(*)", "COUNT")).From("t_teacher").int()
print("total techer count is %s" % teacher_count)
# 2) sing table search, SELECT * FROM t_teacher
with db_exam.connection() as conn:
teachers = db_exam.Select(*CX("*")).From("t_teacher").list()
print(teachers)
# 3) sing table search, SELECT * FROM t_teacher convert values to model of Teacher
class Teacher(Model):
__table__ = 't_teacher'
id = IntegerField(primary_key=True)
name = StringField()
with db_exam.connection() as conn:
teachers = db_exam.Select(*CX("*")).From("t_teacher").list(Teacher)
print(teachers)
# 4) sing table search, SELECT * FROM t_teacher WHERE id=? convert values to model of Teacher
with db_exam.connection() as conn:
teachers = db_exam.Select(*CX("*")).From("t_teacher").Where(W().equal("id", 1004)).list(Teacher)
print(teachers)
# 5) tow table Join search, SELECT DISTINCT id,cid,score FROM t_student JOIN t_sc ON id=sid WHERE id=?
with db_exam.connection() as conn:
result = db_exam.Query(C("id", "cid", "score"), True)\
.From("t_student")\
.Join("t_sc", On("id", "sid"))\
.Where(Equal("id", 1001))\
.list()
print(result)
#or use alias mode like 'SELECT DISTINCT s.id,sc.cid,sc.score FROM t_student AS s JOIN t_sc AS sc ON s.id=sc.sid WHERE s.id=?'
with db_exam.connection() as conn:
result = db_exam.Query(C("s.id", "sc.cid", "sc.score"), True)\
.From(T("t_student", "s"))\
.Join(T("t_sc", "sc"), On("s.id", "sc.sid"))\
.Where(Equal("s.id", 1001))\
.list()
print(result)
# 6) with transaction
with db_exam.transaction():
# insert sql
# update sql
# raise exception
# update Sql
pass
# 7) sing table search, SELECT * FROM t_student limit 0, 5
with db_exam.connection() as conn:
students = db_exam.Select(*CX("*")).From("t_student").limit(1, 5).list()
print(students)
| 31.239437 | 126 | 0.654193 | 115 | 0.051849 | 0 | 0 | 0 | 0 | 0 | 0 | 896 | 0.403968 |
31cb176f4032d56f3c4634406ddd887dbecb2fe6 | 19,781 | py | Python | slybot/slybot/plugins/scrapely_annotations/builder.py | coolkunal64/ht | b7c52d5604dd75ea4086a6ff92eaa2db85bb145c | [
"BSD-3-Clause"
]
| null | null | null | slybot/slybot/plugins/scrapely_annotations/builder.py | coolkunal64/ht | b7c52d5604dd75ea4086a6ff92eaa2db85bb145c | [
"BSD-3-Clause"
]
| null | null | null | slybot/slybot/plugins/scrapely_annotations/builder.py | coolkunal64/ht | b7c52d5604dd75ea4086a6ff92eaa2db85bb145c | [
"BSD-3-Clause"
]
| null | null | null | import json
from scrapy import Selector
from scrapy.utils.spider import arg_to_iter
from scrapely.htmlpage import parse_html, HtmlTag, HtmlDataFragment
from collections import defaultdict
from itertools import tee, count, groupby
from operator import itemgetter
from slybot.utils import (serialize_tag, add_tagids, remove_tagids, TAGID,
OPEN_TAG, CLOSE_TAG, UNPAIRED_TAG, GENERATEDTAGID)
from .migration import _get_parent, short_guid
class Annotations(object):
def save_extraction_data(self, data, template, **options):
"""
data = {
extracts: [
{
annotations: {"content": "Title"},
id: "id-string",
required: [],
tagid: 12,
# All keys below are optional
variant: 0,
text-content: "name-of-text-content-field",
ignore: True,
ignore_beneath: True,
insert_after: True,
slice: [2, 16],
item_container: True,
container_id: "parent-id-string",
schema_id: "schema-id-string",
repeated: true,
siblings: 2,
field: "field-id-to-be-added-to-in-parent-container"
}
]
}
"""
annotation_data = _clean_annotation_data(data.get('extracts', []))
data['extracts'] = annotation_data
body = template.get('body') or 'original_body'
if body not in template:
if 'original_body' in template:
body = 'original_body'
else:
bodies = [k for k, v in template.items()
if v and k.endswith('_body')]
if bodies:
body = bodies[0]
html = template[body]
template['annotated_body'] = apply_annotations(
annotation_data, html, bool(options.get('legacy')))
return data
def _clean_annotation_data(data):
result = []
sticky_count, stickies = count(1), set()
for ann in data:
if ann.get('item_container'):
ann['annotations'] = {'#portia-content': '#dummy'}
ann['text-content'] = '#portia-content'
elif 'data' in ann:
modified_annotations = {}
grp = itemgetter('attribute')
for _id, value in ann['data'].items():
value['id'] = '%s|%s' % (ann['id'], _id)
sorted_annotations = sorted(ann['data'].values(), key=grp)
for attribute, annotations in groupby(sorted_annotations, grp):
modified_annotations[attribute] = list(annotations)
ann['annotations'] = modified_annotations
elif 'annotations' in ann:
filtered_annotations = {}
for k, v in ann['annotations'].items():
if not v:
continue
if v == '#sticky':
next_sticky = '_sticky%s' % next(sticky_count)
stickies.add(next_sticky)
v = next_sticky
filtered_annotations[k] = v
ann['annotations'] = filtered_annotations
ann['required'] = list((set(ann.get('required', [])) | stickies) &
set(filtered_annotations.values()))
elif "ignore" in ann or "ignore_beneath" in ann:
pass
else:
continue
result.append(ann)
return result
def _get_data_id(annotation):
"""Get id (a str) of an annotation."""
if isinstance(annotation, HtmlTag):
return annotation.attributes[TAGID]
def _gen_annotation_info(annotations, legacy=False):
data = {}
annotation_data = []
for annotation in arg_to_iter(annotations):
if 'annotations' in annotation:
annotation_data.append({
'id': annotation.get('id', short_guid()),
'annotations': annotation.get('annotations', {}),
'required': annotation.get('required', []),
'required_fields': annotation.get('required', []),
'variant': int(annotation.get('variant', 0)),
'generated': annotation.get('generated', False),
'text-content': annotation.get('text-content', 'content'),
'item_container': annotation.get('item_container', False),
'container_id': annotation.get('container_id'),
'schema_id': annotation.get('schema_id'),
'repeated': annotation.get('repeated'),
'siblings': annotation.get('siblings'),
'field': annotation.get('field'),
'selector': annotation.get('selector'),
'selection_mode': annotation.get('selection_mode'),
'min_jump': annotation.get('min_jump', -1),
'max_separator': annotation.get('max_separator', -1),
'xpath': annotation.get('xpath')
})
if 'ignore' in annotation or 'ignore_beneath' in annotation:
if annotation.get('ignore_beneath'):
data['data-scrapy-ignore-beneath'] = 'true'
elif annotation.get('ignore'):
data['data-scrapy-ignore'] = 'true'
if annotation_data:
if legacy:
annotation_data = annotation_data[0]
serialized = json.dumps(annotation_data).replace('"', '"')
data['data-scrapy-annotate'] = serialized
return data
def _get_generated_annotation(element, annotations, nodes, html_body, inserts,
legacy=False):
eid = insert_after_tag = _get_data_id(element)
text_strings = _get_text_nodes(nodes, html_body)
text_content = ''.join((s.lstrip() for s in text_strings))
pre_selected = []
for annotation in annotations:
start, end = _get_generated_slice(annotation)
pre_selected.append((text_content[0:start], text_content[start:end],
annotation))
tag_stack = [insert_after_tag]
next_text_node = ''
for i, node in enumerate(nodes):
if isinstance(node, HtmlTag):
if node.tag_type == OPEN_TAG:
tagid = node.attributes.get(TAGID, '').strip()
if tagid:
tag_stack.append(tagid)
elif node.tag_type == CLOSE_TAG and tag_stack:
insert_after_tag = tag_stack.pop()
elif (isinstance(node, HtmlDataFragment) and len(tag_stack) == 1):
text = html_body[node.start:node.end]
# This allows for a clean way to insert fragments up until the
# next tag in apply_annotations if we have already inserted a new
# generated tag
if not node.is_text_content and inserts.get(insert_after_tag):
inserts[insert_after_tag].append(text)
continue
removed = 0
inserted = False
for j, (pre, selected, annotation) in enumerate(pre_selected[:]):
if selected and selected in text:
previous, post = text.split(selected, 1)
if previous.strip() in pre:
pre_selected.pop(j - removed)
removed += 1
generated = _generate_elem(
annotation, selected, legacy)
# Next immediate text node will be returned and added
# to the new document. Other text nodes within this
# node will be added after other child nodes have been
# closed.
if (insert_after_tag == eid and
not annotation.get('insert_after')):
next_text_node += previous + generated
inserted = True
else:
inserts[insert_after_tag].extend([previous,
generated])
text = post
if inserted:
next_text_node += text
else:
inserts[insert_after_tag].append(text)
return next_text_node
def _get_text_nodes(nodes, html_body):
text = []
open_tags = 0
for node in nodes:
if isinstance(node, HtmlTag):
if node.tag_type == OPEN_TAG:
open_tags += 1
elif node.tag_type == CLOSE_TAG:
open_tags -= 1
elif (isinstance(node, HtmlDataFragment) and
node.is_text_content and open_tags == 0):
text.append(html_body[node.start:node.end])
return text
def _get_generated_slice(annotation):
annotation_slice = annotation.get('slice', [0])[:2]
if not annotation_slice:
annotation_slice = [0, 0]
elif len(annotation_slice) < 2:
annotation_slice.append(annotation_slice[0])
return annotation_slice
def _generate_elem(annotation, text, legacy=False):
sections = ['<ins']
annotation_info = _gen_annotation_info(annotation, legacy)
annotation_info[GENERATEDTAGID] = annotation.get('id')
attributes = []
for key, value in annotation_info.items():
attributes.append('%s="%s"' % (key, value))
sections.append(' '.join(attributes))
if len(sections) > 1:
sections[0] += ' '
sections.extend(['>', text, '</ins>'])
return ''.join(sections)
def _get_inner_nodes(target, open_tags=1, insert_after=False,
stop_on_next=False):
nodes = []
while open_tags > -0:
elem = next(target)
if isinstance(elem, HtmlTag):
if elem.tag_type == OPEN_TAG:
open_tags += 1
if stop_on_next and elem.attributes.get(TAGID) is not None:
return nodes
elif (stop_on_next and
elem.tag_type == UNPAIRED_TAG and
elem.attributes.get(TAGID) is not None):
return nodes
elif elem.tag_type == CLOSE_TAG:
open_tags -= 1
nodes.append(elem)
if insert_after:
return _get_inner_nodes(target, stop_on_next=True)
return nodes
def _add_element(element, output, html):
if '__added' not in element.attributes:
output.append(html[element.start:element.end])
element.attributes['__added'] = True
return element
def _annotation_key(a):
return a.get('generated', False) + sum(a.get('slice', []))
def _filter_annotations(annotations):
selector, tagid = [], []
for ann in annotations:
if ann:
if ann.get('selector'):
selector.append(ann)
elif ann.get('tagid') and (ann.get('annotations') or
ann.get('ignore')):
tagid.append(ann)
return selector, tagid
def _merge_annotations_by_selector(annotations):
def grouper(x):
return x.get('selector')
annotations.sort(key=grouper)
return [list(annos) for _, annos in groupby(annotations, key=grouper)]
def apply_selector_annotations(annotations, target_page):
page = Selector(text=target_page)
converted_annotations = []
tagid_selector_map = {}
added_repeated = {}
containers = {}
for annotation in annotations:
if annotation.get('item_container'):
containers[annotation['id']] = annotation
selector = annotation.get('selector')
tagid, elems = tagid_for_annotation(annotation, page)
if tagid is not None:
annotation['tagid'] = tagid
if selector:
tagid_selector_map[tagid] = selector
converted_annotations.append(annotation)
# Create container for repeated field annotation
if (annotation.get('repeated') and
not annotation.get('item_container') and
elems is not None and len(elems) and
len(annotation.get('annotations')) == 1):
repeated_parent = add_repeated_field(annotation, elems, page)
if repeated_parent:
converted_annotations.append(repeated_parent)
container_id = repeated_parent['container_id']
added_repeated[container_id] = repeated_parent
if added_repeated:
for container_id, child in added_repeated.items():
container = containers[container_id]
if container['tagid'] != child['tagid']:
continue
_, elems = tagid_for_annotation(container, page)
parent = elems[0].getparent()
container['tagid'] = int(parent.attrib.get('data-tagid', 1e9))
return _merge_annotations_by_selector(converted_annotations)
def tagid_for_annotation(annotation, page):
selector = annotation.get('selector')
if not selector:
return None, None
elems = []
while selector and not elems:
elems = [elem._root for elem in page.css(selector)]
selector = ' > '.join(selector.split(' > ')[1:])
if not elems:
return None, None
tagids = [int(e.attrib.get('data-tagid', 1e9)) for e in elems]
return min(tagids), elems
def add_repeated_field(annotation, elems, page):
parent = _get_parent(elems, page)
field = annotation['annotations'].values()[0][0]['field']
container_id = '%s#parent' % annotation['id']
if len(parent):
tagid = int(parent.attrib.get('data-tagid', 1e9))
parent_annotation = {
'item_container': True,
'id': container_id,
'annotations': {'#portia-content': '#dummy'},
'text-content': '#portia-content',
'container_id': annotation['container_id'],
'field': field,
'tagid': tagid
}
annotation['item_container'] = True
annotation['field'] = field
annotation['container_id'] = container_id
return parent_annotation
def apply_annotations(annotations, target_page, legacy=False):
selector_annotations, tagid_annotations = _filter_annotations(annotations)
inserts = defaultdict(list)
numbered_html = add_tagids(target_page)
if selector_annotations:
converted_annotations = apply_selector_annotations(
selector_annotations, numbered_html)
tagid_annotations += converted_annotations
target = iter(parse_html(numbered_html))
output, tag_stack = [], []
element = next(target)
last_id = 0
# XXX: A dummy element is added to the end so if the last annotation is
# generated it will be added to the output
filtered = defaultdict(list)
for grouped in tagid_annotations:
for ann in arg_to_iter(grouped):
filtered[ann['tagid']].append(ann)
dummy = [(1e9, [{}])]
sorted_annotations = sorted([(int(k), v) for k, v in filtered.items()] +
dummy)
try:
for aid, annotation_data in sorted_annotations:
# Move target until replacement/insertion point
while True:
while not isinstance(element, HtmlTag) or element.tag == 'ins':
output.append(numbered_html[element.start:element.end])
element = next(target)
if element.tag_type in {OPEN_TAG, UNPAIRED_TAG}:
last_id = element.attributes.get(TAGID)
tag_stack.append(last_id)
if element.tag_type in {CLOSE_TAG, UNPAIRED_TAG} and tag_stack:
if ('__added' not in element.attributes and
last_id is not None and aid is not None and
int(last_id) < int(aid)):
output.append(numbered_html[element.start:element.end])
element.attributes['__added'] = True
last_inserted = tag_stack.pop()
to_insert = inserts.pop(last_inserted, None)
if to_insert:
output.extend(to_insert)
# Skip all nodes up to the next HtmlTag as these
# have already been added
while True:
element = next(target)
try:
last_id = element.attributes.get(TAGID,
last_id)
except AttributeError:
pass
if isinstance(element, HtmlTag):
break
continue
if (last_id is not None and aid is not None and
int(last_id) < int(aid)):
if '__added' not in element.attributes:
output.append(numbered_html[element.start:element.end])
element.attributes['__added'] = True
element = next(target)
else:
break
generated = []
next_generated = []
regular_annotations = []
# Place generated annotations at the end and sort by slice
for annotation in sorted(annotation_data, key=_annotation_key):
if annotation.get('generated'):
if annotation.get('insert_after'):
next_generated.append(annotation)
else:
generated.append(annotation)
else:
regular_annotations.append(annotation)
# Add annotations data as required
if regular_annotations:
annotation_info = _gen_annotation_info(regular_annotations,
legacy)
for key, val in annotation_info.items():
element.attributes[key] = val
next_text_section = ''
if generated:
inner_data, target = tee(target)
nodes = _get_inner_nodes(inner_data)
next_text_section = _get_generated_annotation(
element, generated, nodes, numbered_html, inserts,
legacy)
if next_generated:
inner_data, target = tee(target)
open_tags = 0 if element.tag_type == UNPAIRED_TAG else 1
nodes = _get_inner_nodes(inner_data, open_tags=open_tags,
insert_after=True)
next_text_section = _get_generated_annotation(
element, next_generated, nodes, numbered_html, inserts,
legacy)
if '__added' not in element.attributes:
output.append(serialize_tag(element))
element.attributes['__added'] = True
# If an <ins> tag has been inserted we need to move forward
if next_text_section:
while True:
elem = next(target)
if (isinstance(elem, HtmlDataFragment) and
elem.is_text_content):
break
output.append(numbered_html[elem.start:elem.end])
output.append(next_text_section)
# Reached the end of the document
except StopIteration:
output.append(numbered_html[element.start:element.end])
else:
for element in target:
output.append(numbered_html[element.start:element.end])
return remove_tagids(''.join(output))
| 41.210417 | 79 | 0.553258 | 1,613 | 0.081543 | 0 | 0 | 0 | 0 | 0 | 0 | 3,202 | 0.161873 |
31ce1f02b533697e41ca279b2476dd124fe63eb7 | 9,840 | py | Python | scripts/slave/recipes/mojo.py | bopopescu/chromium-build | f8e42c70146c1b668421ee6358dc550a955770a3 | [
"BSD-3-Clause"
]
| null | null | null | scripts/slave/recipes/mojo.py | bopopescu/chromium-build | f8e42c70146c1b668421ee6358dc550a955770a3 | [
"BSD-3-Clause"
]
| null | null | null | scripts/slave/recipes/mojo.py | bopopescu/chromium-build | f8e42c70146c1b668421ee6358dc550a955770a3 | [
"BSD-3-Clause"
]
| 1 | 2020-07-22T09:16:32.000Z | 2020-07-22T09:16:32.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'adb',
'depot_tools/bot_update',
'depot_tools/gclient',
'goma',
'recipe_engine/context',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
'recipe_engine/url',
'depot_tools/tryserver',
]
def _CheckoutSteps(api, buildername):
# Checkout mojo and its dependencies (specified in DEPS) using gclient
api.gclient.set_config('mojo')
if 'Android' in buildername:
api.gclient.apply_config('android')
api.bot_update.ensure_checkout()
api.gclient.runhooks()
def _BuildSteps(api, buildername, is_debug, is_official):
mojob_path = api.path['checkout'].join('mojo', 'tools', 'mojob.py')
args = []
gn_args = []
if 'Android' in buildername:
args += ['--android']
if 'ASan' in buildername:
args += ['--asan']
if api.tryserver.is_tryserver:
args += ['--dcheck_always_on']
env = {}
goma_dir = ''
if 'Win' not in buildername:
# Disable Goma on Windows as it makes the build much slower (> 1 hour vs
# 15 minutes). Try renabling once we have trybots and the cache would be
# warm.
goma_dir = api.goma.ensure_goma()
env['GOMA_SERVICE_ACCOUNT_JSON_FILE'] = api.goma.service_account_json_path
if is_debug:
build_type = "--debug"
elif is_official:
build_type = "--official"
else:
build_type = "--release"
if goma_dir:
env['GOMA_DIR'] = goma_dir
with api.context(env=env):
with api.context(cwd=api.path['checkout']):
api.python('mojob gn',
mojob_path,
args=['gn', build_type] + args + gn_args)
api.python('mojob build',
mojob_path,
args=['build', build_type] + args)
def _DeviceCheckStep(api):
known_devices_path = api.path.join(
api.path.expanduser('~'), '.android', 'known_devices.json')
# Device recovery.
args = [
'--known-devices-file', known_devices_path,
'--adb-path', api.adb.adb_path(),
'-v'
]
api.step(
'device_recovery',
[api.path['checkout'].join('third_party', 'catapult', 'devil',
'devil', 'android', 'tools',
'device_recovery.py')] + args,
infra_step=True)
# Device provisioning.
api.python(
'provision_device',
api.path['checkout'].join('third_party', 'catapult', 'devil',
'devil', 'android', 'tools',
'provision_devices.py'),
infra_step=True)
# Device Status.
try:
buildbot_file = '/home/chrome-bot/.adb_device_info'
args = [
'--json-output', api.json.output(),
'--known-devices-file', known_devices_path,
'--buildbot-path', buildbot_file,
'-v', '--overwrite-known-devices-files',
]
result = api.python(
'device_status',
api.path['checkout'].join('third_party', 'catapult', 'devil', 'devil',
'android', 'tools', 'device_status.py'),
args=args,
infra_step=True)
return result
except api.step.InfraFailure as f:
params = {
'summary': ('Device Offline on %s %s' %
(api.properties['mastername'], api.properties['bot_id'])),
'comment': ('Buildbot: %s\n(Please do not change any labels)' %
api.properties['buildername']),
'labels': 'Restrict-View-Google,OS-Android,Infra-Client,Infra-Labs',
}
link = ('https://code.google.com/p/chromium/issues/entry?%s' %
api.url.urlencode(params))
f.result.presentation.links.update({
'report a bug': link
})
raise
def _GetTestConfig(api):
buildername = api.properties.get('buildername')
test_config = {}
if 'Android' in buildername:
test_config['target_os'] = 'android'
elif 'Linux' in buildername:
test_config['target_os'] = 'linux'
elif 'Win' in buildername:
test_config['target_os'] = 'windows'
else:
raise NotImplementedError('Unknown platform') # pragma: no cover
test_config['is_debug'] = 'dbg' in buildername
if 'Official' in buildername:
# This is not reached, as we only have Android official builds.
raise NotImplementedError(
'Testing not supported for official builds') # pragma: no cover
if 'Perf' in buildername:
test_config['test_types'] = ['perf']
else:
test_config['test_types'] = ['default']
if 'ASan' in buildername:
test_config['sanitizer'] = 'asan'
test_config['master_name'] = api.properties.get('mastername')
test_config['builder_name'] = api.properties.get('buildername')
test_config['build_number'] = api.properties.get('buildnumber')
test_config['test_results_server'] = api.properties.get(
'test_results_server', 'test-results.appspot.com')
test_config['dcheck_always_on'] = api.tryserver.is_tryserver
return test_config
def _TestSteps(api):
get_test_list_path = api.path['checkout'].join('mojo', 'tools',
'get_test_list.py')
test_config = _GetTestConfig(api)
test_out = [{'name': u'Hello', 'command': ['world']}]
result = api.python('get_test_list', get_test_list_path,
args=[api.json.input(test_config), api.json.output()],
step_test_data=lambda: api.json.test_api.output(test_out))
test_list = result.json.output
with api.step.defer_results():
for entry in test_list:
name = str(entry['name']) # api.step() wants a non-Unicode string.
command = entry['command']
with api.context(cwd=api.path['checkout']):
api.step(name, command)
def _UploadShellAndApps(api, buildername):
upload_path = api.path['checkout'].join('mojo', 'tools', 'upload_binaries.py')
is_android = 'Android' in buildername
args = []
if is_android:
args.append('--android')
if 'Official' in buildername:
args.append('--official')
api.python('upload shell and app binaries', upload_path, args)
def RunSteps(api):
buildername = api.properties.get('buildername')
_CheckoutSteps(api, buildername)
is_debug = 'dbg' in buildername
is_official = 'Official' in buildername
_BuildSteps(api, buildername, is_debug, is_official)
is_linux = 'Linux' in buildername
is_win = 'Win' in buildername
is_android = 'Android' in buildername
is_tester = 'Tests' in buildername
is_try = api.tryserver.is_tryserver
is_asan = 'ASan' in buildername
is_perf = 'Perf' in buildername
if is_android and is_tester:
_DeviceCheckStep(api)
upload_binaries = ((is_linux or is_android)
and not is_debug and not is_try and not is_perf and not is_asan)
if not is_tester and not is_linux and not is_win:
# TODO(blundell): Eliminate this special case
# once there's an Android release tester bot.
if upload_binaries and is_android:
_UploadShellAndApps(api, buildername)
return
_TestSteps(api)
# TODO(blundell): Remove the "and not is_android" once there's an
# Android release tester bot and I've removed the logic uploading the
# shell on Android above.
if upload_binaries and not is_android:
_UploadShellAndApps(api, buildername)
def GenTests(api):
tests = [
['mojo_linux', 'Mojo Linux'],
['mojo_linux_dbg', 'Mojo Linux (dbg)'],
['mojo_linux_asan', 'Mojo Linux ASan'],
['mojo_linux_asan_dbg', 'Mojo Linux ASan (dbg)'],
['mojo_android_builder', 'Mojo Android Builder'],
['mojo_android_official', 'Mojo Android Official Builder'],
['mojo_android_dbg', 'Mojo Android (dbg)'],
['mojo_android_builder_tests_dbg', 'Mojo Android Builder Tests (dbg)'],
['mojo_win_dbg', 'Mojo Win (dbg)'],
['mojo_linux_perf', 'Mojo Linux Perf']
]
for test_name, buildername in tests:
test = api.test(test_name) + api.properties.generic(buildername=buildername)
if 'Android' in buildername and 'Tests' in buildername:
test += api.step_data("device_status", api.json.output([
{
"battery": {
"status": "5",
"scale": "100",
"temperature": "249",
"level": "100",
"AC powered": "false",
"health": "2",
"voltage": "4286",
"Wireless powered": "false",
"USB powered": "true",
"technology": "Li-ion",
"present": "true"
},
"wifi_ip": "",
"imei_slice": "Unknown",
"ro.build.id": "LRX21O",
"build_detail":
"google/razor/flo:5.0/LRX21O/1570415:userdebug/dev-keys",
"serial": "07a00ca4",
"ro.build.product": "flo",
"adb_status": "device",
"blacklisted": False,
"usb_status": True,
},
{
"adb_status": "offline",
"blacklisted": True,
"serial": "03e0363a003c6ad4",
"usb_status": False,
},
{
"adb_status": "unauthorized",
"blacklisted": True,
"serial": "03e0363a003c6ad5",
"usb_status": True,
},
{
"adb_status": "device",
"blacklisted": True,
"serial": "03e0363a003c6ad6",
"usb_status": True,
},
{}
]))
yield test
yield(api.test('mojo_linux_try') +
api.properties.tryserver(buildername="Mojo Linux Try"))
yield(api.test('mojo_android_builder_tests_dbg_fail_device_check') +
api.properties.tryserver(buildername="Mojo Android Builder Tests (dbg)") +
api.step_data("device_status", retcode=1))
| 32.582781 | 80 | 0.613821 | 0 | 0 | 2,489 | 0.252947 | 0 | 0 | 0 | 0 | 3,950 | 0.401423 |
31d0500f716a9df50359013b1a61c1658f74b81f | 3,101 | py | Python | lib/navigation/AreaFighting.py | sadnecc/pb | 3142764e350f92034eb671cfc3d1dae42dea0f0a | [
"MIT"
]
| null | null | null | lib/navigation/AreaFighting.py | sadnecc/pb | 3142764e350f92034eb671cfc3d1dae42dea0f0a | [
"MIT"
]
| null | null | null | lib/navigation/AreaFighting.py | sadnecc/pb | 3142764e350f92034eb671cfc3d1dae42dea0f0a | [
"MIT"
]
| 1 | 2021-05-30T11:15:02.000Z | 2021-05-30T11:15:02.000Z | # -*- coding:utf8 -*-
import random
import time
from lib.navigation.PathFinding import Pathfinding
from lib.control.Control import Control
from lib.unit.Player import Player
from lib.struct.CoordiPoint import CoordiPoint
# 区域打怪
class AreaFighting(Pathfinding):
# area_pos: 区域4角坐标。顺序为:左上,右上,左下,右下
def __init__(self, control: Control, player: Player, area_pos, move_type=0):
Pathfinding.__init__(self, control=control, player=player)
self.area_pos = area_pos
self.hander_area = open("tmp/logs/" + self.getFormatTime(False) + "_areafighting.log", 'a+')
self.start_pos = self.getNowPos()
self.move_type = move_type
# 回到区域内
def goto_area(self):
nowPos = self.getNowPos()
if not AreaFighting.pos_in_area(nowPos,self.area_pos) and self.player.getStatus()['combat'] == 0:
print(nowPos.toString())
print("not in area #################################################################################")
print("not in area #################################################################################", file=self.hander_area)
from lib.navigation.EnemyFinder import EnemyFinder
EnemyFinder(self.player,self.control).clear_target() #在区域外选中怪先取消掉,免得走回区域后又跑出来打这个怪
# 直接走向区域中心,没有多余路点
self.walk(
self.__get_center_of_area(),
move_type=self.move_type,
sleep=0.3,
precision=0.3,
last=3,
combat_exit=True
)
#self.player.not_combat_recover()
self.player.combat_recover()
return True
# 获取区域中心点坐标
# 计算方法:使用中点公式计算一i条对角线的中点即可
def __get_center_of_area(self):
left_top = self.area_pos["leftTop"]
right_bottom = self.area_pos["rightBottom"]
center = [(left_top[0] + right_bottom[0]) / 2, (left_top[1] + right_bottom[1]) / 2]
print("center:")
print(center)
return CoordiPoint(center[0], center[1])
# 判断给定坐标是否在区域内
# 向量叉积(顺时针方向)
# 四边形内的点都在顺时针(逆时针)向量的同一边,即夹角小于90o,向量积同向
# a = (B.x - A.x)*(y - A.y) - (B.y - A.y)*(x - A.x);
# b = (C.x - B.x)*(y - B.y) - (C.y - B.y)*(x - B.x);
# c = (D.x - C.x)*(y - C.y) - (D.y - C.y)*(x - C.x);
# d = (A.x - D.x)*(y - D.y) - (A.y - D.y)*(x - D.x);
@staticmethod
def pos_in_area(pos: CoordiPoint,area):
A = CoordiPoint(area["leftTop"][0], area["leftTop"][1])
B = CoordiPoint(area["rightTop"][0], area["rightTop"][1])
C = CoordiPoint(area["rightBottom"][0], area["rightBottom"][1])
D = CoordiPoint(area["leftBottom"][0], area["leftBottom"][1])
a = (B.x - A.x) * (pos.y - A.y) - (B.y - A.y) * (pos.x - A.x)
b = (C.x - B.x) * (pos.y - B.y) - (C.y - B.y) * (pos.x - B.x)
c = (D.x - C.x) * (pos.y - C.y) - (D.y - C.y) * (pos.x - C.x)
d = (A.x - D.x) * (pos.y - D.y) - (A.y - D.y) * (pos.x - D.x)
if (a > 0 and b > 0 and c > 0 and d > 0) or (a < 0 and b < 0 and c < 0 and d < 0):
return True
return False
| 43.676056 | 137 | 0.525637 | 3,179 | 0.930348 | 0 | 0 | 745 | 0.218028 | 0 | 0 | 1,136 | 0.332455 |
31d09d41c952173a6ae2b73dccad4ea1fbc25f01 | 722 | py | Python | compress.py | willemwouters/PhotoboothPi | 7ef65d1411af15ea51e23ea8ddbd598affd2680d | [
"Beerware"
]
| null | null | null | compress.py | willemwouters/PhotoboothPi | 7ef65d1411af15ea51e23ea8ddbd598affd2680d | [
"Beerware"
]
| null | null | null | compress.py | willemwouters/PhotoboothPi | 7ef65d1411af15ea51e23ea8ddbd598affd2680d | [
"Beerware"
]
| null | null | null | import os
import time
import sys
if(len(sys.argv) is 1):
path="/home/pi/storage/"
else:
path=sys.argv[1]
try:
arr=[]
for filename in os.listdir(path):
if("2018-09" in filename):
arr.append(filename)
for f in arr:
filen = os.path.splitext(f)[0]
if(("%s.h264" % filen) in arr) and (("%s.mp3" % filen) in arr and ("%s.mp4" % filen) not in arr):
if(("%s.h264" % filen) == f):
time.sleep(1)
os.system("ffmpeg -i %s -i %s -c:v copy -c:a aac -strict experimental %s" % (path + f, path + filen + ".mp3", path + filen + ".mp4"))
os.system("rm %s %s" % (path + filen + ".mp3", path + f))
except:
print "d" | 30.083333 | 149 | 0.50831 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 156 | 0.216066 |
31d152e1371b0f6b06f2bd25172cc000079294dd | 71 | py | Python | html/en/reference/graphs/sage/graphs/graph_plot-2.py | sagemath/documentation | 65dcf569b6e95bfae7c76b40a46af3a9f77479f4 | [
"Apache-2.0"
]
| 10 | 2015-05-17T10:52:08.000Z | 2022-03-28T12:15:09.000Z | html/en/reference/graphs/sage/graphs/graph_plot-2.py | sagemath/documentation | 65dcf569b6e95bfae7c76b40a46af3a9f77479f4 | [
"Apache-2.0"
]
| 19 | 2015-05-15T17:06:31.000Z | 2021-08-25T09:13:17.000Z | html/en/reference/graphs/sage/graphs/graph_plot-2.py | sagemath/documentation | 65dcf569b6e95bfae7c76b40a46af3a9f77479f4 | [
"Apache-2.0"
]
| 21 | 2015-12-15T21:19:29.000Z | 2022-01-03T14:24:20.000Z | petersen_spring = Graph(':I`ES@obGkqegW~')
sphinx_plot(petersen_spring) | 35.5 | 42 | 0.802817 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 17 | 0.239437 |
31d15ba780a95a70da6a42fd922fdf3f8a69aedc | 77 | py | Python | module00/ex05/kata00.py | MedAymenF/42AI-Python-bootcamp | 41af2221b95b305ee08ee8f582e68700f1a8c32b | [
"Apache-2.0"
]
| null | null | null | module00/ex05/kata00.py | MedAymenF/42AI-Python-bootcamp | 41af2221b95b305ee08ee8f582e68700f1a8c32b | [
"Apache-2.0"
]
| null | null | null | module00/ex05/kata00.py | MedAymenF/42AI-Python-bootcamp | 41af2221b95b305ee08ee8f582e68700f1a8c32b | [
"Apache-2.0"
]
| null | null | null | t = (19, 42, 21)
print(f"The {len(t)} numbers are: {t[0]}, {t[1]}, {t[2]}")
| 19.25 | 58 | 0.467532 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 51 | 0.662338 |
31d16107e52098b68243258cade721f1a3c378e4 | 3,002 | py | Python | examples/cochrane-simplification/log_regression/bow_newsela_lm_tokens.py | AshOlogn/transformers | c4b8c360d4aa78642f4a815ddd2ba9c9fa304c8d | [
"Apache-2.0"
]
| null | null | null | examples/cochrane-simplification/log_regression/bow_newsela_lm_tokens.py | AshOlogn/transformers | c4b8c360d4aa78642f4a815ddd2ba9c9fa304c8d | [
"Apache-2.0"
]
| null | null | null | examples/cochrane-simplification/log_regression/bow_newsela_lm_tokens.py | AshOlogn/transformers | c4b8c360d4aa78642f4a815ddd2ba9c9fa304c8d | [
"Apache-2.0"
]
| null | null | null | import json
import os
from os.path import join
from random import shuffle
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer
from sklearn.preprocessing import MinMaxScaler, normalize
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import cross_val_score, StratifiedKFold, train_test_split
from sklearn.metrics import accuracy_score
from transformers import BertTokenizer, BertConfig, BartTokenizer
def make_vector(text, tokenizer):
token_ids = tokenizer.encode(text)[1:-1]
count_vector = np.zeros(tokenizer.vocab_size, dtype=np.int16)
for ID in token_ids:
count_vector[ID] += 1
return count_vector
def dataloader(data_dir, batch_size=5000):
names = [x[:-6] for x in os.listdir(data_dir) if x[-5:] == '3.txt']
index = 0
while index < len(names):
cur_names = names[index:index+batch_size]
tuples = []
for name in cur_names:
hard = open(join(data_dir, f'{name}.0.txt')).read()
simple = open(join(data_dir, f'{name}.3.txt')).read()
tuples.append((hard, simple))
yield tuples
index += batch_size
def construct_dataset(tuples, tokenizer):
X = np.empty((2*len(tuples), tokenizer.vocab_size), dtype=np.int16)
y = np.empty(2*len(tuples), dtype=np.int16)
index = 0
for s,t in tuples:
X[index] = make_vector(s, tokenizer)
X[index+1] = make_vector(t, tokenizer)
y[index] = 0
y[index+1] = 1
index += 2
return X, y
def get_vocab(tokenizer):
tokens = [tokenizer.decode([i], clean_up_tokenization_spaces=False) for i in range(tokenizer.vocab_size)]
return tokens
def simple_term_counts(data_dir='data/newsela/articles'):
tokenizer = BartTokenizer.from_pretrained('facebook/bart-large-xsum')
model = LogisticRegression(max_iter=100)
for batch in dataloader(data_dir):
X, y = construct_dataset(batch, tokenizer)
#X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
#apply feature scaling
#X_train = normalize(X_train)
#X_test = normalize(X_test)
#model.fit(X_train, y_train)
#predictions = model.predict(X_test)
#print(accuracy_score(y_test, predictions))
X = normalize(X)
model.fit(X, y)
vocab = get_vocab(tokenizer)
weights = np.squeeze(model.coef_, axis=0).tolist()
sorted_weights = filter(lambda x: len(x[1].strip()) > 0, zip(range(tokenizer.vocab_size), vocab, weights))
sorted_weights = list(sorted(sorted_weights, key=lambda x: x[2]))
with open('data/logr_weights/bart_freq_newsela_ids.txt', 'w') as f:
for ID, word, weight in sorted_weights:
f.write(f'{ID} {weight}\n')
with open('data/logr_weights/bart_freq_newsela_tokens.txt', 'w') as f:
for ID, word, weight in sorted_weights:
f.write(f'{word} {weight}\n')
print(simple_term_counts())
| 31.93617 | 110 | 0.672552 | 0 | 0 | 492 | 0.163891 | 0 | 0 | 0 | 0 | 482 | 0.16056 |
31d1dcdc84557e4ef3daa2e742b3df53f7c45b0e | 1,569 | py | Python | fdrtd_server/exceptions.py | UNakade/server | f659524242d01fe67f9801ab41fabf46640ad590 | [
"MIT"
]
| null | null | null | fdrtd_server/exceptions.py | UNakade/server | f659524242d01fe67f9801ab41fabf46640ad590 | [
"MIT"
]
| null | null | null | fdrtd_server/exceptions.py | UNakade/server | f659524242d01fe67f9801ab41fabf46640ad590 | [
"MIT"
]
| null | null | null | import logging as _logging
def handle_exception(e):
if isinstance(e, ApiError):
_logging.exception(e.message)
return e.message, e.statuscode
_logging.exception(repr(e))
return None, 500
class ApiError(Exception):
def __init__(self, statuscode, message):
self.statuscode = statuscode
self.message = message
def __str__(self):
return self.message
class InternalServerError(ApiError):
def __init__(self, message):
super().__init__(500, f'internal server error: {message}')
class NotAvailable(ApiError):
def __init__(self, missing):
super().__init__(501, f'not implemented / not available: {missing}')
class MissingParameter(ApiError):
def __init__(self, missing):
super().__init__(400, f'missing parameter: {missing}')
class InvalidParameter(ApiError):
def __init__(self, parameter, invalid):
super().__init__(400, f'invalid parameter: {parameter} = {invalid}')
class InvalidIdentifier(ApiError):
def __init__(self, identifier, invalid):
super().__init__(404, f'invalid identifier: {identifier} = {invalid}')
class MicroserviceNotFound(ApiError):
def __init__(self, missing):
super().__init__(404, f'microservice not available: {missing}')
class FunctionNotFound(ApiError):
def __init__(self, missing):
super().__init__(404, f'function not available: {missing}')
class FunctionNotPublic(ApiError):
def __init__(self, missing):
super().__init__(403, f'function not public: {missing}')
| 22.414286 | 78 | 0.684512 | 1,324 | 0.84385 | 0 | 0 | 0 | 0 | 0 | 0 | 312 | 0.198853 |
31d2496126215c595f14a661c15b593c05970c11 | 3,081 | py | Python | TCU/usageexample/automationexample.py | p--q/TCU | 51e2569cdc7553344f2ce279107a39677c3b106e | [
"BSD-3-Clause"
]
| null | null | null | TCU/usageexample/automationexample.py | p--q/TCU | 51e2569cdc7553344f2ce279107a39677c3b106e | [
"BSD-3-Clause"
]
| null | null | null | TCU/usageexample/automationexample.py | p--q/TCU | 51e2569cdc7553344f2ce279107a39677c3b106e | [
"BSD-3-Clause"
]
| null | null | null | #!/opt/libreoffice5.4/program/python
# -*- coding: utf-8 -*-
import unohelper # オートメーションには必須(必須なのはuno)。
def macro():
ctx = XSCRIPTCONTEXT.getComponentContext() # コンポーネントコンテクストの取得。
smgr = ctx.getServiceManager() # サービスマネージャーの取得。
tcu = smgr.createInstanceWithContext("pq.Tcu", ctx) # サービス名か実装名でインスタンス化。
print("\n".join(tcu.treelines(ctx)))
g_exportedScripts = macro, #マクロセレクターに限定表示させる関数をタプルで指定。
if __name__ == "__main__": # オートメーションで実行するとき
def automation(): # オートメーションのためにglobalに出すのはこの関数のみにする。
import officehelper
from functools import wraps
import sys
from com.sun.star.beans import PropertyValue
from com.sun.star.script.provider import XScriptContext
def connectOffice(func): # funcの前後でOffice接続の処理
@wraps(func)
def wrapper(): # LibreOfficeをバックグラウンドで起動してコンポーネントテクストとサービスマネジャーを取得する。
try:
ctx = officehelper.bootstrap() # コンポーネントコンテクストの取得。
except:
print("Could not establish a connection with a running office.", file=sys.stderr)
sys.exit()
print("Connected to a running office ...")
smgr = ctx.getServiceManager() # サービスマネジャーの取得。
print("Using {} {}".format(*_getLOVersion(ctx, smgr))) # LibreOfficeのバージョンを出力。
return func(ctx, smgr) # 引数の関数の実行。
def _getLOVersion(ctx, smgr): # LibreOfficeの名前とバージョンを返す。
cp = smgr.createInstanceWithContext('com.sun.star.configuration.ConfigurationProvider', ctx)
node = PropertyValue(Name = 'nodepath', Value = 'org.openoffice.Setup/Product' ) # share/registry/main.xcd内のノードパス。
ca = cp.createInstanceWithArguments('com.sun.star.configuration.ConfigurationAccess', (node,))
return ca.getPropertyValues(('ooName', 'ooSetupVersion')) # LibreOfficeの名前とバージョンをタプルで返す。
return wrapper
@connectOffice # createXSCRIPTCONTEXTの引数にctxとsmgrを渡すデコレータ。
def createXSCRIPTCONTEXT(ctx, smgr): # XSCRIPTCONTEXTを生成。
class ScriptContext(unohelper.Base, XScriptContext):
def __init__(self, ctx):
self.ctx = ctx
def getComponentContext(self):
return self.ctx
def getDesktop(self):
return ctx.getByName('/singletons/com.sun.star.frame.theDesktop') # com.sun.star.frame.Desktopはdeprecatedになっている。
def getDocument(self):
return self.getDesktop().getCurrentComponent()
return ScriptContext(ctx)
XSCRIPTCONTEXT = createXSCRIPTCONTEXT() # XSCRIPTCONTEXTの取得。
doc = XSCRIPTCONTEXT.getDocument() # 現在開いているドキュメントを取得。
doctype = "scalc", "com.sun.star.sheet.SpreadsheetDocument" # Calcドキュメントを開くとき。
# doctype = "swriter", "com.sun.star.text.TextDocument" # Writerドキュメントを開くとき。
if (doc is None) or (not doc.supportsService(doctype[1])): # ドキュメントが取得できなかった時またはCalcドキュメントではない時
XSCRIPTCONTEXT.getDesktop().loadComponentFromURL("private:factory/{}".format(doctype[0]), "_blank", 0, ()) # ドキュメントを開く。ここでdocに代入してもドキュメントが開く前にmacro()が呼ばれてしまう。
flg = True
while flg:
doc = XSCRIPTCONTEXT.getDocument() # 現在開いているドキュメントを取得。
if doc is not None:
flg = (not doc.supportsService(doctype[1])) # ドキュメントタイプが確認できたらwhileを抜ける。
return XSCRIPTCONTEXT
XSCRIPTCONTEXT = automation() # XSCRIPTCONTEXTを取得。
macro() # マクロの実行。
| 50.508197 | 162 | 0.73937 | 397 | 0.099176 | 0 | 0 | 1,256 | 0.313765 | 0 | 0 | 2,188 | 0.54659 |
31d28cfa4763d607d589139656b5abdc86e64785 | 77 | py | Python | last_char.py | AkhilaSaiBejjarapu/Python | 238cc7692cf2e93eb585a03967b8d688ee3760f2 | [
"MIT"
]
| null | null | null | last_char.py | AkhilaSaiBejjarapu/Python | 238cc7692cf2e93eb585a03967b8d688ee3760f2 | [
"MIT"
]
| null | null | null | last_char.py | AkhilaSaiBejjarapu/Python | 238cc7692cf2e93eb585a03967b8d688ee3760f2 | [
"MIT"
]
| null | null | null | word=input()
last_letter=(len(word)-1)
result=word[last_letter]
print(result) | 19.25 | 25 | 0.779221 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
31d43ead09e1c7effc26eae228b072a20a8b0310 | 3,261 | py | Python | simple_retry/decorators.py | nicolasmota/retry_decorator | 65eab450e65fe8c08d07cd213628e655baa5ae55 | [
"MIT"
]
| 11 | 2018-03-06T17:09:50.000Z | 2018-10-26T04:31:50.000Z | simple_retry/decorators.py | nicolasmota/retry_decorator | 65eab450e65fe8c08d07cd213628e655baa5ae55 | [
"MIT"
]
| 9 | 2018-03-06T03:56:44.000Z | 2018-10-26T04:48:42.000Z | simple_retry/decorators.py | nicolasmota/retry_decorator | 65eab450e65fe8c08d07cd213628e655baa5ae55 | [
"MIT"
]
| 2 | 2018-03-15T03:11:14.000Z | 2018-07-07T17:11:06.000Z | import time
from functools import wraps
import asyncio
from simple_retry.simple_retry.helpers import (
format_retry_message,
has_retries_to_go,
log_message
)
def retry(Except, retries=5, delay=0, logger=None, level='info', multiple=1):
def deco_retry(function):
@wraps(function)
def f_retry(*args, **kwargs):
tries = 1
mdelay = delay
while has_retries_to_go(
tries_performed=tries,
retries_limit=retries
):
try:
return function(*args, **kwargs)
except Except as e:
log_message(
logger=logger,
level=level,
exception=e,
tries_performed=tries,
retries_limit=retries,
wait_delay_multiple=multiple
)
time.sleep(mdelay)
mdelay *= multiple
tries += 1
return function(*args, **kwargs)
return f_retry
return deco_retry
def coroutine_retry(
Except,
retries=5,
delay=0,
logger=None,
level='info',
multiple=1
):
def deco_retry(function):
@asyncio.coroutine
@wraps(function)
def f_retry(*args, **kwargs):
tries = 1
mdelay = delay
while has_retries_to_go(
tries_performed=tries,
retries_limit=retries
):
try:
return (yield from (function(*args, **kwargs)))
except Except as e:
log_message(
logger=logger,
level=level,
exception=e,
tries_performed=tries,
retries_limit=retries,
wait_delay_multiple=multiple
)
yield from (asyncio.sleep(mdelay))
mdelay *= multiple
tries += 1
return (yield from function(*args, **kwargs))
return f_retry
return deco_retry
def async_retry(
Except,
retries=5,
delay=0,
logger=None,
level='info',
multiple=1
):
def deco_retry(function):
@wraps(function)
async def f_retry(*args, **kwargs):
tries = 1
mdelay = delay
while has_retries_to_go(
tries_performed=tries,
retries_limit=retries
):
try:
return await (function(*args, **kwargs))
except Except as e:
log_message(
logger=logger,
level=level,
exception=e,
tries_performed=tries,
retries_limit=retries,
wait_delay_multiple=multiple
)
await (asyncio.sleep(mdelay))
mdelay *= multiple
tries += 1
return await (function(*args, **kwargs))
return f_retry
return deco_retry
| 26.512195 | 77 | 0.462435 | 0 | 0 | 1,074 | 0.329347 | 2,535 | 0.777369 | 819 | 0.25115 | 18 | 0.00552 |
31d44c5f099da57a280d3e04440215f00f79e111 | 153 | py | Python | environment.py | bopopescu/cbrc-devteam-blog | eb4f7977d112b1ee692dad60ed46802d2ee243f4 | [
"Apache-2.0"
]
| null | null | null | environment.py | bopopescu/cbrc-devteam-blog | eb4f7977d112b1ee692dad60ed46802d2ee243f4 | [
"Apache-2.0"
]
| null | null | null | environment.py | bopopescu/cbrc-devteam-blog | eb4f7977d112b1ee692dad60ed46802d2ee243f4 | [
"Apache-2.0"
]
| 1 | 2020-07-24T03:59:01.000Z | 2020-07-24T03:59:01.000Z | # application environment
import settings
import sys
sys.path.append(settings.app_home_dir)
sys.path.append(settings.app_settings["app_lib_dir"])
| 21.857143 | 54 | 0.797386 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.254902 |
31d4b6913b04eb19080a816c2290d803b8ff2f23 | 8,618 | py | Python | cogeo_mosaic/backends/base.py | drnextgis/cogeo-mosaic | 034d0124a2da894c2bb432b1c0cebba7f716edbd | [
"MIT"
]
| null | null | null | cogeo_mosaic/backends/base.py | drnextgis/cogeo-mosaic | 034d0124a2da894c2bb432b1c0cebba7f716edbd | [
"MIT"
]
| null | null | null | cogeo_mosaic/backends/base.py | drnextgis/cogeo-mosaic | 034d0124a2da894c2bb432b1c0cebba7f716edbd | [
"MIT"
]
| null | null | null | """cogeo_mosaic.backend.base: base Backend class."""
import abc
import itertools
from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union
import attr
import mercantile
from cachetools import TTLCache, cached
from cachetools.keys import hashkey
from morecantile import TileMatrixSet
from rio_tiler.constants import WEB_MERCATOR_TMS
from rio_tiler.errors import PointOutsideBounds
from rio_tiler.io import BaseReader, COGReader
from rio_tiler.models import ImageData
from rio_tiler.mosaic import mosaic_reader
from rio_tiler.tasks import multi_values
from cogeo_mosaic.backends.utils import find_quadkeys, get_hash
from cogeo_mosaic.cache import cache_config
from cogeo_mosaic.errors import NoAssetFoundError
from cogeo_mosaic.models import Info, Metadata
from cogeo_mosaic.mosaic import MosaicJSON
from cogeo_mosaic.utils import bbox_union
def _convert_to_mosaicjson(value: Union[Dict, MosaicJSON]):
if value is not None:
return MosaicJSON(**dict(value))
@attr.s
class BaseBackend(BaseReader):
"""Base Class for cogeo-mosaic backend storage.
Attributes:
path (str): mosaic path.
mosaic_def (MosaicJSON, optional): mosaicJSON document.
reader (rio_tiler.io.BaseReader): Dataset reader. Defaults to `rio_tiler.io.COGReader`.
reader_options (dict): Options to forward to the reader config.
tms (morecantile.TileMatrixSet, optional): TileMatrixSet grid definition. **READ ONLY attribute**. Defaults to `WebMercatorQuad`.
bbox (tuple): mosaic bounds (left, bottom, right, top). **READ ONLY attribute**. Defaults to `(-180, -90, 180, 90)`.
minzoom (int): mosaic Min zoom level. **READ ONLY attribute**. Defaults to `0`.
maxzoom (int): mosaic Max zoom level. **READ ONLY attribute**. Defaults to `30`
"""
path: str = attr.ib()
mosaic_def: MosaicJSON = attr.ib(default=None, converter=_convert_to_mosaicjson)
reader: Type[BaseReader] = attr.ib(default=COGReader)
reader_options: Dict = attr.ib(factory=dict)
# TMS is outside the init because mosaicJSON and cogeo-mosaic only
# works with WebMercator (mercantile) for now.
tms: TileMatrixSet = attr.ib(init=False, default=WEB_MERCATOR_TMS)
# default values for bounds and zoom
bounds: Tuple[float, float, float, float] = attr.ib(
init=False, default=(-180, -90, 180, 90)
)
minzoom: int = attr.ib(init=False, default=0)
maxzoom: int = attr.ib(init=False, default=30)
_backend_name: str
_file_byte_size: Optional[int] = 0
def __attrs_post_init__(self):
"""Post Init: if not passed in init, try to read from self.path."""
self.mosaic_def = self.mosaic_def or self._read()
self.minzoom = self.mosaic_def.minzoom
self.maxzoom = self.mosaic_def.maxzoom
self.bounds = self.mosaic_def.bounds
@abc.abstractmethod
def _read(self) -> MosaicJSON:
"""Fetch mosaic definition"""
@abc.abstractmethod
def write(self, overwrite: bool = True):
"""Upload new MosaicJSON to backend."""
def update(
self,
features: Sequence[Dict],
add_first: bool = True,
quiet: bool = False,
**kwargs,
):
"""Update existing MosaicJSON on backend."""
new_mosaic = MosaicJSON.from_features(
features,
self.mosaic_def.minzoom,
self.mosaic_def.maxzoom,
quadkey_zoom=self.quadkey_zoom,
quiet=quiet,
**kwargs,
)
for quadkey, new_assets in new_mosaic.tiles.items():
tile = mercantile.quadkey_to_tile(quadkey)
assets = self.assets_for_tile(*tile)
assets = [*new_assets, *assets] if add_first else [*assets, *new_assets]
# add custom sorting algorithm (e.g based on path name)
self.mosaic_def.tiles[quadkey] = assets
bounds = bbox_union(new_mosaic.bounds, self.mosaic_def.bounds)
self.mosaic_def._increase_version()
self.mosaic_def.bounds = bounds
self.mosaic_def.center = (
(bounds[0] + bounds[2]) / 2,
(bounds[1] + bounds[3]) / 2,
self.mosaic_def.minzoom,
)
self.bounds = bounds
self.write(overwrite=True)
def assets_for_tile(self, x: int, y: int, z: int) -> List[str]:
"""Retrieve assets for tile."""
return self.get_assets(x, y, z)
def assets_for_point(self, lng: float, lat: float) -> List[str]:
"""Retrieve assets for point."""
tile = mercantile.tile(lng, lat, self.quadkey_zoom)
return self.get_assets(tile.x, tile.y, tile.z)
@cached(
TTLCache(maxsize=cache_config.maxsize, ttl=cache_config.ttl),
key=lambda self, x, y, z: hashkey(self.path, x, y, z, self.mosaicid),
)
def get_assets(self, x: int, y: int, z: int) -> List[str]:
"""Find assets."""
mercator_tile = mercantile.Tile(x=x, y=y, z=z)
quadkeys = find_quadkeys(mercator_tile, self.quadkey_zoom)
return list(
itertools.chain.from_iterable(
[self.mosaic_def.tiles.get(qk, []) for qk in quadkeys]
)
)
def tile( # type: ignore
self, x: int, y: int, z: int, reverse: bool = False, **kwargs: Any,
) -> Tuple[ImageData, List[str]]:
"""Get Tile from multiple observation."""
mosaic_assets = self.assets_for_tile(x, y, z)
if not mosaic_assets:
raise NoAssetFoundError(f"No assets found for tile {z}-{x}-{y}")
if reverse:
mosaic_assets = list(reversed(mosaic_assets))
def _reader(asset: str, x: int, y: int, z: int, **kwargs: Any) -> ImageData:
with self.reader(asset, **self.reader_options) as src_dst:
return src_dst.tile(x, y, z, **kwargs)
return mosaic_reader(mosaic_assets, _reader, x, y, z, **kwargs)
def point(
self, lon: float, lat: float, reverse: bool = False, **kwargs: Any,
) -> List:
"""Get Point value from multiple observation."""
mosaic_assets = self.assets_for_point(lon, lat)
if not mosaic_assets:
raise NoAssetFoundError(f"No assets found for point ({lon},{lat})")
if reverse:
mosaic_assets = list(reversed(mosaic_assets))
def _reader(asset: str, lon: float, lat: float, **kwargs) -> Dict:
with self.reader(asset, **self.reader_options) as src_dst:
return src_dst.point(lon, lat, **kwargs)
if "allowed_exceptions" not in kwargs:
kwargs.update({"allowed_exceptions": (PointOutsideBounds,)})
return list(multi_values(mosaic_assets, _reader, lon, lat, **kwargs).items())
def info(self, quadkeys: bool = False) -> Info: # type: ignore
"""Mosaic info."""
return Info(
bounds=self.mosaic_def.bounds,
center=self.mosaic_def.center,
maxzoom=self.mosaic_def.maxzoom,
minzoom=self.mosaic_def.minzoom,
name=self.mosaic_def.name if self.mosaic_def.name else "mosaic",
quadkeys=[] if not quadkeys else self._quadkeys,
)
@property
def metadata(self) -> Metadata: # type: ignore
"""Retrieve Mosaic metadata
Returns
-------
MosaicJSON as dict without `tiles` key.
"""
return Metadata(**self.mosaic_def.dict())
@property
def center(self):
"""Return center from the mosaic definition."""
return self.mosaic_def.center
@property
def mosaicid(self) -> str:
"""Return sha224 id of the mosaicjson document."""
return get_hash(**self.mosaic_def.dict(exclude_none=True))
@property
def _quadkeys(self) -> List[str]:
"""Return the list of quadkey tiles."""
return list(self.mosaic_def.tiles)
@property
def quadkey_zoom(self) -> int:
"""Return Quadkey zoom property."""
return self.mosaic_def.quadkey_zoom or self.mosaic_def.minzoom
############################################################################
# Not Implemented methods
# BaseReader required those method to be implemented
def stats(self):
"""PlaceHolder for BaseReader.stats."""
raise NotImplementedError
def preview(self):
"""PlaceHolder for BaseReader.preview."""
raise NotImplementedError
def part(self):
"""PlaceHolder for BaseReader.part."""
raise NotImplementedError
def feature(self):
"""PlaceHolder for BaseReader.feature."""
raise NotImplementedError
| 36.058577 | 137 | 0.636343 | 7,618 | 0.883964 | 0 | 0 | 7,626 | 0.884892 | 0 | 0 | 2,174 | 0.252263 |
31d61f0a33b68e1cb755859a34a3948798308cb2 | 5,190 | py | Python | userge/core/methods/decorators/on_filters.py | wildyvpn-network/bot | 87459495000bd6004b8f62a9cb933c164da9ef29 | [
"MIT"
]
| null | null | null | userge/core/methods/decorators/on_filters.py | wildyvpn-network/bot | 87459495000bd6004b8f62a9cb933c164da9ef29 | [
"MIT"
]
| null | null | null | userge/core/methods/decorators/on_filters.py | wildyvpn-network/bot | 87459495000bd6004b8f62a9cb933c164da9ef29 | [
"MIT"
]
| null | null | null | # pylint: disable=missing-module-docstring
#
# Copyright (C) 2020 by UsergeTeam@Github, < https://github.com/UsergeTeam >.
#
# This file is part of < https://github.com/UsergeTeam/Userge > project,
# and is released under the "GNU v3.0 License Agreement".
# Please see < https://github.com/uaudith/Userge/blob/master/LICENSE >
#
# All rights reserved.
__all__ = ['OnFilters']
from pyrogram.filters import Filter as RawFilter
from ... import types
from . import RawDecorator
class OnFilters(RawDecorator): # pylint: disable=missing-class-docstring
def on_filters(self, # pylint: disable=arguments-differ
filters: RawFilter,
group: int = 0,
allow_private: bool = True,
allow_bots: bool = True,
allow_groups: bool = True,
allow_channels: bool = True,
only_admins: bool = False,
allow_via_bot: bool = True,
check_client: bool = True,
check_downpath: bool = False,
check_change_info_perm: bool = False,
check_edit_perm: bool = False,
check_delete_perm: bool = False,
check_restrict_perm: bool = False,
check_promote_perm: bool = False,
check_invite_perm: bool = False,
check_pin_perm: bool = False) -> RawDecorator._PYRORETTYPE:
"""\nDecorator for handling filters
Parameters:
filters (:obj:`~pyrogram.filters`):
Pass one or more filters to allow only a subset of
messages to be passed in your function.
group (``int``, *optional*):
The group identifier, defaults to 0.
allow_private (``bool``, *optional*):
If ``False``, prohibit private chats, defaults to True.
allow_bots (``bool``, *optional*):
If ``False``, prohibit bot chats, defaults to True.
allow_groups (``bool``, *optional*):
If ``False``, prohibit group chats, defaults to True.
allow_channels (``bool``, *optional*):
If ``False``, prohibit channel chats, defaults to True.
only_admins (``bool``, *optional*):
If ``True``, client should be an admin, defaults to False.
allow_via_bot (``bool``, *optional*):
If ``True``, allow this via your bot, defaults to True.
check_client (``bool``, *optional*):
If ``True``, check client is bot or not before execute, defaults to True.
check_downpath (``bool``, *optional*):
If ``True``, check downpath and make if not exist, defaults to False.
check_change_info_perm (``bool``, *optional*):
If ``True``, check user has change_info permission before execute,
defaults to False.
check_edit_perm (``bool``, *optional*):
If ``True``, check user has edit permission before execute,
defaults to False.
check_delete_perm (``bool``, *optional*):
If ``True``, check user has delete permission before execute,
defaults to False.
check_restrict_perm (``bool``, *optional*):
If ``True``, check user has restrict permission before execute,
defaults to False.
check_promote_perm (``bool``, *optional*):
If ``True``, check user has promote permission before execute,
defaults to False.
check_invite_perm (``bool``, *optional*):
If ``True``, check user has invite permission before execute,
defaults to False.
check_pin_perm (``bool``, *optional*):
If ``True``, check user has pin permission before execute,
defaults to False.
"""
return self._build_decorator(
types.raw.Filter.parse(client=self,
filters=filters,
group=group,
allow_private=allow_private,
allow_bots=allow_bots,
allow_groups=allow_groups,
allow_channels=allow_channels,
only_admins=only_admins,
allow_via_bot=allow_via_bot,
check_client=check_client,
check_downpath=check_downpath,
check_change_info_perm=check_change_info_perm,
check_edit_perm=check_edit_perm,
check_delete_perm=check_delete_perm,
check_restrict_perm=check_restrict_perm,
check_promote_perm=check_promote_perm,
check_invite_perm=check_invite_perm,
check_pin_perm=check_pin_perm))
| 43.613445 | 89 | 0.527746 | 4,710 | 0.907514 | 0 | 0 | 0 | 0 | 0 | 0 | 2,953 | 0.568979 |
31d79e6d0a59cc3302d9155c1c4c15215d0a9e1b | 1,387 | py | Python | pygromos/tests/test_submission/test_hpc_queuing_submission_scheduling.py | pultar/PyGromosTools | 3c104c560c2e654972a036e2060b120ade96f655 | [
"MIT"
]
| 13 | 2021-03-17T09:29:37.000Z | 2022-01-14T20:42:16.000Z | pygromos/tests/test_submission/test_hpc_queuing_submission_scheduling.py | pultar/PyGromosTools | 3c104c560c2e654972a036e2060b120ade96f655 | [
"MIT"
]
| 185 | 2021-03-03T14:24:55.000Z | 2022-03-31T18:39:29.000Z | pygromos/tests/test_submission/test_hpc_queuing_submission_scheduling.py | pultar/PyGromosTools | 3c104c560c2e654972a036e2060b120ade96f655 | [
"MIT"
]
| 13 | 2021-03-03T14:18:06.000Z | 2022-02-17T09:48:55.000Z | import unittest, tempfile
from pygromos.simulations.hpc_queuing.job_scheduling.schedulers import simulation_scheduler
from pygromos.data.simulation_parameters_templates import template_md
from pygromos.data.topology_templates import blank_topo_template
from pygromos.simulations.hpc_queuing.submission_systems import DUMMY
from pygromos.files.gromos_system.gromos_system import Gromos_System
from pygromos.tests.in_testfiles import in_test_file_path
from pygromos.tests.test_files import out_test_root_dir
class test_MD_scheduler(unittest.TestCase):
submissionSystem = DUMMY
def setUp(self) -> None:
self.tmp_test_dir = tempfile.mkdtemp(dir=out_test_root_dir, prefix="scheduling_Dummy_")
def test_do(self):
in_cnf = in_test_file_path+"/cnf/in_cnf1.cnf"
out_dir_path = self.tmp_test_dir
in_simSystem = Gromos_System(system_name="test_do", work_folder=out_dir_path,
in_top_path=blank_topo_template, in_cnf_path=in_cnf, in_imd_path=template_md,
in_gromosXX_bin_dir=None, in_gromosPP_bin_dir=None)
submission_system = self.submissionSystem()
simulation_scheduler.do(in_simSystem=in_simSystem, out_dir_path=out_dir_path,
submission_system=submission_system,
simulation_run_num=2, verbose= True)
| 46.233333 | 114 | 0.746215 | 876 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.033165 |
31d7a9d787341b47673ced552899077d803f3aa3 | 1,934 | py | Python | tutorials.py | Xython/pattern-matching | 17ccdb68189353f1c63032013f5ef6f1ca4c0902 | [
"MIT"
]
| 20 | 2017-12-31T05:45:47.000Z | 2021-05-15T22:08:21.000Z | tutorials.py | Xython/Destruct.py | 17ccdb68189353f1c63032013f5ef6f1ca4c0902 | [
"MIT"
]
| null | null | null | tutorials.py | Xython/Destruct.py | 17ccdb68189353f1c63032013f5ef6f1ca4c0902 | [
"MIT"
]
| 1 | 2018-01-12T04:54:19.000Z | 2018-01-12T04:54:19.000Z | # -*- coding: utf-8 -*-
"""
Created on Sat Dec 30 17:03:01 2017
@author: misakawa
"""
from pattern_matching import Match, when, var, T, t, _, overwrite
from numpy.random import randint
@overwrite(var[(t == int) | (t == float)], var[(t == int) | (t == float)])
def add(a, b):
return a + b
@when(var[t == str], var[t == str])
def add(a, b):
return a + b
class Bound1:
pass
class Bound2:
pass
class Bound3(Bound1, Bound2):
def __repr__(self):
return "bound3"
class Bound4(Bound3):
pass
@when(_[(t != Bound3) & (t < Bound4)])
def add():
return 2
@when(_)
def add():
return 3
assert add(1, 1) == 2
assert add(Bound2()) == 2
assert add(Bound3()) == 3
@when(_[int], _[Bound1], var)
def add(u):
return u
assert add(1, Bound1(), 'last') == 'last'
def is_type(x):
return isinstance(x, type)
m = Match(1, 2, (3, int))
[a, b, c] = m.case(var[int], var, *var[tuple]).get
assert a == 1 and b == 2 and c == ((3, int), )
[c2] = m.case((_, _, (_, var.when(is_type)))).get
assert c2 == int
@overwrite(_ == None)
def summary():
return 0
@when([var[int], *(_ == [])], var)
def summary(head, res):
return head + res
@when([var[int], *var[list]], var)
def summary(head, tail, res):
return summary(tail, res + head)
@when(var[list])
def summary(lst):
return summary(lst, 0)
assert summary(list(range(100))) == 4950
@overwrite([var, *var])
def qsort(head, tail):
lowers = [i for i in tail if i < head]
highers = [i for i in tail if i >= head]
return qsort(lowers) + [head] + qsort(highers)
@when(var)
def qsort(lst):
return lst
qsort(randint(0, 500, size=(1200, )))
@when(_[t.when(lambda _: _ == int)])
def trait_test():
return 1
assert trait_test(1) == 1
class Population:
num: int = 1000
@when(var[t.when(lambda _: hasattr(_, 'num'))])
def trait_test(x):
return x.num
assert trait_test(Population()) == 1000
| 14.763359 | 74 | 0.588418 | 189 | 0.097725 | 0 | 0 | 988 | 0.510858 | 0 | 0 | 110 | 0.056877 |
31d8a178060a23e17a236c26a55e351c521d366e | 3,823 | py | Python | RepositoryBootstrap/EnvironmentDiffs.py | davidbrownell/v3-Common_Environment | 8f42f256e573cbd83cbf9813db9958025ddf12f2 | [
"BSL-1.0"
]
| null | null | null | RepositoryBootstrap/EnvironmentDiffs.py | davidbrownell/v3-Common_Environment | 8f42f256e573cbd83cbf9813db9958025ddf12f2 | [
"BSL-1.0"
]
| 1 | 2018-06-08T06:45:16.000Z | 2018-06-08T06:45:16.000Z | RepositoryBootstrap/EnvironmentDiffs.py | davidbrownell/v3-Common_Environment | 8f42f256e573cbd83cbf9813db9958025ddf12f2 | [
"BSL-1.0"
]
| 1 | 2018-06-08T04:15:17.000Z | 2018-06-08T04:15:17.000Z | # ----------------------------------------------------------------------
# |
# | EnvironmentDiffs.py
# |
# | David Brownell <[email protected]>
# | 2018-06-02 22:19:34
# |
# ----------------------------------------------------------------------
# |
# | Copyright David Brownell 2018-22.
# | Distributed under the Boost Software License, Version 1.0.
# | (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
# |
# ----------------------------------------------------------------------
"""Displays changes made by an environment during activation."""
import json
import os
import sys
import textwrap
import six
import CommonEnvironment
from CommonEnvironment import CommandLine
from CommonEnvironment.Shell.All import CurrentShell
from RepositoryBootstrap import Constants
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
@CommandLine.EntryPoint
@CommandLine.Constraints( output_stream=None,
)
def Before( decorate=False,
output_stream=sys.stdout,
):
_Display(GetOriginalEnvironment(), output_stream, decorate)
return 0
# ----------------------------------------------------------------------
@CommandLine.EntryPoint
@CommandLine.Constraints( output_stream=None,
)
def After( decorate=False,
output_stream=sys.stdout,
):
original_env = GetOriginalEnvironment()
# Compare to the current environment
this_env = dict(os.environ)
differences = {}
for k, v in six.iteritems(this_env):
if ( k not in original_env or
original_env[k] != v
):
differences[k] = v
_Display(differences, output_stream, decorate)
return 0
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
def GetOriginalEnvironment():
# Get the original environment
generated_dir = os.getenv(Constants.DE_REPO_GENERATED_NAME)
assert os.path.isdir(generated_dir), generated_dir
original_environment_filename = os.path.join(generated_dir, Constants.GENERATED_ACTIVATION_ORIGINAL_ENVIRONMENT_FILENAME)
assert os.path.isfile(original_environment_filename), original_environment_filename
with open(original_environment_filename) as f:
return json.load(f)
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
def _Display(content, output_stream, decorate):
if not isinstance(content, six.string_types):
content = json.dumps(content)
if decorate:
output_stream.write(textwrap.dedent(
"""\
//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//
{}
//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//
""").format(content))
else:
output_stream.write(content)
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
if __name__ == "__main__":
try: sys.exit(CommandLine.Main())
except KeyboardInterrupt: pass
| 36.409524 | 126 | 0.437876 | 0 | 0 | 0 | 0 | 813 | 0.21266 | 0 | 0 | 1,812 | 0.473973 |
31da9f9cf7a9feda53f89aeafcbeadfbe26ac626 | 7,837 | py | Python | tests/model/test_ocrd_mets.py | wrznr/pyocrd | 25c4dd8c60285b7877803e2b627d72c8c0a4ab1e | [
"Apache-2.0"
]
| null | null | null | tests/model/test_ocrd_mets.py | wrznr/pyocrd | 25c4dd8c60285b7877803e2b627d72c8c0a4ab1e | [
"Apache-2.0"
]
| null | null | null | tests/model/test_ocrd_mets.py | wrznr/pyocrd | 25c4dd8c60285b7877803e2b627d72c8c0a4ab1e | [
"Apache-2.0"
]
| null | null | null | from datetime import datetime
from os.path import join
from tests.base import TestCase, main, assets, copy_of_directory
from ocrd_utils import (
initLogging,
VERSION,
MIMETYPE_PAGE
)
from ocrd_models import OcrdMets
# pylint: disable=protected-access,deprecated-method,too-many-public-methods
class TestOcrdMets(TestCase):
def setUp(self):
self.mets = OcrdMets(filename=assets.url_of('SBB0000F29300010000/data/mets.xml'))
initLogging()
def test_unique_identifier(self):
self.assertEqual(self.mets.unique_identifier, 'http://resolver.staatsbibliothek-berlin.de/SBB0000F29300010000', 'Right identifier')
self.mets.unique_identifier = 'foo'
self.assertEqual(self.mets.unique_identifier, 'foo', 'Right identifier after change')
def test_unique_identifier_from_nothing(self):
mets = OcrdMets.empty_mets()
self.assertEqual(mets.unique_identifier, None, 'no identifier')
mets.unique_identifier = 'foo'
self.assertEqual(mets.unique_identifier, 'foo', 'Right identifier after change')
as_string = mets.to_xml().decode('utf-8')
self.assertIn('ocrd/core v%s' % VERSION, as_string)
self.assertIn('CREATEDATE="%d-%d-%02dT' % (
datetime.now().year,
datetime.now().month,
datetime.now().day,
), as_string)
def test_str(self):
mets = OcrdMets(content='<mets/>')
self.assertEqual(str(mets), 'OcrdMets[fileGrps=[],files=[]]')
def test_override_constructor_args(self):
id2file = {'foo': {}}
mets = OcrdMets(id2file, content='<mets/>')
self.assertEqual(mets._file_by_id, id2file)
def test_file_groups(self):
self.assertEqual(len(self.mets.file_groups), 17, '17 file groups')
def test_find_files(self):
self.assertEqual(len(self.mets.find_files()), 35, '35 files total')
self.assertEqual(len(self.mets.find_files(fileGrp='OCR-D-IMG')), 3, '3 files in "OCR-D-IMG"')
self.assertEqual(len(self.mets.find_files(pageId='PHYS_0001')), 17, '17 files for page "PHYS_0001"')
self.assertEqual(len(self.mets.find_files(pageId='PHYS_0001-NOTEXIST')), 0, '0 pages for "PHYS_0001-NOTEXIST"')
self.assertEqual(len(self.mets.find_files(mimetype='image/tiff')), 13, '13 image/tiff')
self.assertEqual(len(self.mets.find_files(mimetype=MIMETYPE_PAGE)), 20, '20 ' + MIMETYPE_PAGE)
self.assertEqual(len(self.mets.find_files(url='OCR-D-IMG/FILE_0005_IMAGE.tif')), 1, '1 xlink:href="OCR-D-IMG/FILE_0005_IMAGE.tif"')
def test_find_files_local_only(self):
self.assertEqual(len(self.mets.find_files(pageId='PHYS_0001', local_only=True)), 3, '3 local files for page "PHYS_0001"')
def test_physical_pages(self):
self.assertEqual(len(self.mets.physical_pages), 3, '3 physical pages')
def test_physical_pages_from_empty_mets(self):
mets = OcrdMets(content="<mets></mets>")
self.assertEqual(len(mets.physical_pages), 0, 'no physical page')
mets.add_file('OUTPUT', ID="foo123", pageId="foobar")
self.assertEqual(len(mets.physical_pages), 1, '1 physical page')
def test_add_group(self):
mets = OcrdMets.empty_mets()
self.assertEqual(len(mets.file_groups), 0, '0 file groups')
mets.add_file_group('TEST')
self.assertEqual(len(mets.file_groups), 1, '1 file groups')
mets.add_file_group('TEST')
self.assertEqual(len(mets.file_groups), 1, '1 file groups')
def test_add_file(self):
mets = OcrdMets.empty_mets()
self.assertEqual(len(mets.file_groups), 0, '0 file groups')
self.assertEqual(len(mets.find_files(fileGrp='OUTPUT')), 0, '0 files in "OUTPUT"')
f = mets.add_file('OUTPUT', ID="foo123", mimetype="bla/quux", pageId="foobar")
f2 = mets.add_file('OUTPUT', ID="foo1232", mimetype="bla/quux", pageId="foobar")
self.assertEqual(f.pageId, 'foobar', 'pageId set')
self.assertEqual(len(mets.file_groups), 1, '1 file groups')
self.assertEqual(len(mets.find_files(fileGrp='OUTPUT')), 2, '2 files in "OUTPUT"')
mets.set_physical_page_for_file('barfoo', f, order='300', orderlabel="page 300")
self.assertEqual(f.pageId, 'barfoo', 'pageId changed')
mets.set_physical_page_for_file('quux', f2, order='302', orderlabel="page 302")
self.assertEqual(f2.pageId, 'quux', 'pageId changed')
mets.set_physical_page_for_file('barfoo', f2, order='301', orderlabel="page 301")
self.assertEqual(f2.pageId, 'barfoo', 'pageId changed')
self.assertEqual(len(mets.file_groups), 1, '1 file group')
def test_add_file_ID_fail(self):
f = self.mets.add_file('OUTPUT', ID='best-id-ever', mimetype="beep/boop")
self.assertEqual(f.ID, 'best-id-ever', "ID kept")
with self.assertRaises(Exception) as cm:
self.mets.add_file('OUTPUT', ID='best-id-ever', mimetype="boop/beep")
self.assertEqual(str(cm.exception), "File with ID='best-id-ever' already exists")
f2 = self.mets.add_file('OUTPUT', ID='best-id-ever', mimetype="boop/beep", force=True)
self.assertEqual(f._el, f2._el)
def test_filegrp_from_file(self):
f = self.mets.find_files(fileGrp='OCR-D-IMG')[0]
self.assertEqual(f.fileGrp, 'OCR-D-IMG')
def test_add_file_no_id(self):
with self.assertRaisesRegex(Exception, "Must set ID of the mets:file"):
self.mets.add_file('FOO')
def test_add_file_no_pageid(self):
f = self.mets.add_file('OUTPUT', mimetype="bla/quux", ID="foo3")
self.assertEqual(f.pageId, None, 'No pageId')
def test_file_pageid(self):
f = self.mets.find_files()[0]
self.assertEqual(f.pageId, 'PHYS_0001')
f.pageId = 'foo'
self.assertEqual(f.pageId, 'foo')
def test_agent(self):
# Processor(workspace=self.workspace)
mets = self.mets
beforelen = len(mets.agents)
mets.add_agent('foo bar v0.0.1', 'OTHER', 'OTHER', 'YETOTHERSTILL')
# print(['%s'%x for x in mets.agents])
self.assertEqual(len(mets.agents), beforelen + 1)
def test_metshdr(self):
"""
Test whether metsHdr is created on-demand
"""
mets = OcrdMets(content="<mets></mets>")
self.assertFalse(mets._tree.getroot().getchildren())
mets.add_agent()
self.assertEqual(len(mets._tree.getroot().getchildren()), 1)
def test_nocontent_nofilename(self):
with self.assertRaisesRegex(Exception, "Must pass 'filename' or 'content' to"):
OcrdMets()
def test_encoding_entities(self):
mets = OcrdMets(content="""
<mets>
<metsHdr>
<agent>
<name>Őh śéé Áŕ</name>
<note>OCR-D</note>
</agent>
</metsHdr>
</mets>
""")
self.assertIn('Őh śéé Áŕ', mets.to_xml().decode('utf-8'))
def test_remove_file_group(self):
"""
Test removal of filegrp
"""
with copy_of_directory(assets.path_to('SBB0000F29300010000/data')) as tempdir:
mets = OcrdMets(filename=join(tempdir, 'mets.xml'))
self.assertEqual(len(mets.file_groups), 17)
self.assertEqual(len(mets.find_files()), 35)
# print()
# before = sorted([x.ID for x in mets.find_files()])
with self.assertRaisesRegex(Exception, "not empty"):
mets.remove_file_group('OCR-D-GT-ALTO')
mets.remove_file_group('OCR-D-GT-PAGE', recursive=True)
# print([x for x in before if x not in sorted([x.ID for x in mets.find_files()])])
self.assertEqual(len(mets.file_groups), 16)
self.assertEqual(len(mets.find_files()), 33)
if __name__ == '__main__':
main()
| 44.528409 | 139 | 0.643486 | 7,502 | 0.955791 | 0 | 0 | 0 | 0 | 0 | 0 | 2,215 | 0.282202 |
31db05913c960fafbf96871656aa566e21ebbd4d | 7,862 | py | Python | robo_gym/envs/ur/ur_avoidance_basic.py | psFournier/robo-gym | 0e67a36c0cbeac885c53b92de8f3f1f13e286c9a | [
"MIT"
]
| 236 | 2020-04-15T10:50:45.000Z | 2022-03-31T14:28:52.000Z | robo_gym/envs/ur/ur_avoidance_basic.py | psFournier/robo-gym | 0e67a36c0cbeac885c53b92de8f3f1f13e286c9a | [
"MIT"
]
| 36 | 2020-07-13T17:11:32.000Z | 2022-02-21T14:01:33.000Z | robo_gym/envs/ur/ur_avoidance_basic.py | psFournier/robo-gym | 0e67a36c0cbeac885c53b92de8f3f1f13e286c9a | [
"MIT"
]
| 51 | 2020-04-24T08:58:31.000Z | 2022-03-18T17:14:23.000Z | """
Environment for basic obstacle avoidance controlling a robotic arm from UR.
In this environment the obstacle is only moving up and down in a vertical line in front of the robot.
The goal is for the robot to stay within a predefined minimum distance to the moving obstacle.
When feasible the robot should continue to the original configuration,
otherwise wait for the obstacle to move away before proceeding
"""
import numpy as np
from typing import Tuple
from robo_gym_server_modules.robot_server.grpc_msgs.python import robot_server_pb2
from robo_gym.envs.simulation_wrapper import Simulation
from robo_gym.envs.ur.ur_base_avoidance_env import URBaseAvoidanceEnv
# base, shoulder, elbow, wrist_1, wrist_2, wrist_3
JOINT_POSITIONS = [-1.57, -1.31, -1.31, -2.18, 1.57, 0.0]
DEBUG = True
MINIMUM_DISTANCE = 0.3 # the distance [cm] the robot should keep to the obstacle
class BasicAvoidanceUR(URBaseAvoidanceEnv):
"""Universal Robots UR basic obstacle avoidance environment.
Args:
rs_address (str): Robot Server address. Formatted as 'ip:port'. Defaults to None.
fix_base (bool): Wether or not the base joint stays fixed or is moveable. Defaults to False.
fix_shoulder (bool): Wether or not the shoulder joint stays fixed or is moveable. Defaults to False.
fix_elbow (bool): Wether or not the elbow joint stays fixed or is moveable. Defaults to False.
fix_wrist_1 (bool): Wether or not the wrist 1 joint stays fixed or is moveable. Defaults to False.
fix_wrist_2 (bool): Wether or not the wrist 2 joint stays fixed or is moveable. Defaults to False.
fix_wrist_3 (bool): Wether or not the wrist 3 joint stays fixed or is moveable. Defaults to True.
ur_model (str): determines which ur model will be used in the environment. Defaults to 'ur5'.
include_polar_to_elbow (bool): determines wether or not the polar coordinates to the elbow joint are included in the state. Defaults to False.
Attributes:
ur (:obj:): Robot utilities object.
client (:obj:str): Robot Server client.
real_robot (bool): True if the environment is controlling a real robot.
"""
max_episode_steps = 1000
def _set_initial_robot_server_state(self, rs_state, fixed_object_position = None) -> robot_server_pb2.State:
if fixed_object_position:
state_msg = super()._set_initial_robot_server_state(rs_state=rs_state, fixed_object_position=fixed_object_position)
return state_msg
z_amplitude = np.random.default_rng().uniform(low=0.09, high=0.35)
z_frequency = 0.125
z_offset = np.random.default_rng().uniform(low=0.2, high=0.6)
string_params = {"object_0_function": "triangle_wave"}
float_params = {"object_0_x": 0.12,
"object_0_y": 0.34,
"object_0_z_amplitude": z_amplitude,
"object_0_z_frequency": z_frequency,
"object_0_z_offset": z_offset}
state = {}
state_msg = robot_server_pb2.State(state = state, float_params = float_params,
string_params = string_params, state_dict = rs_state)
return state_msg
def reset(self, joint_positions = JOINT_POSITIONS, fixed_object_position = None) -> np.array:
"""Environment reset.
Args:
joint_positions (list[6] or np.array[6]): robot joint positions in radians.
fixed_object_position (list[3]): x,y,z fixed position of object
"""
self.prev_action = np.zeros(6)
state = super().reset(joint_positions = joint_positions, fixed_object_position = fixed_object_position)
return state
def reward(self, rs_state, action) -> Tuple[float, bool, dict]:
env_state = self._robot_server_state_to_env_state(rs_state)
reward = 0
done = False
info = {}
# Reward weights
close_distance_weight = -2
delta_joint_weight = 1
action_usage_weight = 1
rapid_action_weight = -0.2
# Difference in joint position current vs. starting position
delta_joint_pos = env_state[9:15]
# Calculate distance to the obstacle
obstacle_coord = np.array([rs_state['object_0_to_ref_translation_x'], rs_state['object_0_to_ref_translation_y'], rs_state['object_0_to_ref_translation_z']])
ee_coord = np.array([rs_state['ee_to_ref_translation_x'], rs_state['ee_to_ref_translation_y'], rs_state['ee_to_ref_translation_z']])
forearm_coord = np.array([rs_state['forearm_to_ref_translation_x'], rs_state['forearm_to_ref_translation_y'], rs_state['forearm_to_ref_translation_z']])
distance_to_ee = np.linalg.norm(obstacle_coord - ee_coord)
distance_to_forearm = np.linalg.norm(obstacle_coord - forearm_coord)
distance_to_target = np.min([distance_to_ee, distance_to_forearm])
# Reward staying close to the predefined joint position
if abs(env_state[-6:]).sum() < 0.1 * action.size:
reward += delta_joint_weight * (1 - (abs(delta_joint_pos).sum()/(0.1 * action.size))) * (1/1000)
# Reward for not acting
if abs(action).sum() <= action.size:
reward += action_usage_weight * (1 - (np.square(action).sum()/action.size)) * (1/1000)
# Negative reward if actions change to rapidly between steps
for i in range(len(action)):
if abs(action[i] - self.prev_action[i]) > 0.5:
reward += rapid_action_weight * (1/1000)
# Negative reward if the obstacle is close than the predefined minimum distance
if distance_to_target < MINIMUM_DISTANCE:
reward += close_distance_weight * (1/self.max_episode_steps)
# Check if there is a collision
collision = True if rs_state['in_collision'] == 1 else False
if collision:
done = True
info['final_status'] = 'collision'
info['target_coord'] = obstacle_coord
self.last_position_on_success = []
if self.elapsed_steps >= self.max_episode_steps:
done = True
info['final_status'] = 'success'
info['target_coord'] = obstacle_coord
self.last_position_on_success = []
return reward, done, info
def step(self, action) -> Tuple[np.array, float, bool, dict]:
if type(action) == list: action = np.array(action)
state, reward, done, info = super().step(action)
self.prev_action = self.add_fixed_joints(action)
return state, reward, done, info
class BasicAvoidanceURSim(BasicAvoidanceUR, Simulation):
cmd = "roslaunch ur_robot_server ur_robot_server.launch \
world_name:=tabletop_sphere50.world \
reference_frame:=base_link \
max_velocity_scale_factor:=0.2 \
action_cycle_rate:=20 \
rviz_gui:=false \
gazebo_gui:=true \
objects_controller:=true \
rs_mode:=1moving2points \
n_objects:=1.0 \
object_0_model_name:=sphere50 \
object_0_frame:=target"
def __init__(self, ip=None, lower_bound_port=None, upper_bound_port=None, gui=False, ur_model='ur5', **kwargs):
self.cmd = self.cmd + ' ' + 'ur_model:=' + ur_model
Simulation.__init__(self, self.cmd, ip, lower_bound_port, upper_bound_port, gui, **kwargs)
BasicAvoidanceUR.__init__(self, rs_address=self.robot_server_ip, ur_model=ur_model, **kwargs)
class BasicAvoidanceURRob(BasicAvoidanceUR):
real_robot = True
# roslaunch ur_robot_server ur_robot_server.launch ur_model:=ur5 real_robot:=true rviz_gui:=true gui:=true reference_frame:=base max_velocity_scale_factor:=0.2 action_cycle_rate:=20 rs_mode:=moving | 47.077844 | 197 | 0.672602 | 6,778 | 0.862122 | 0 | 0 | 0 | 0 | 0 | 0 | 3,453 | 0.439201 |
31dbeeeb585ae91b3ec528faf0591108ed8cc73b | 848 | py | Python | hear_me_django_app/accounts/management/commands/initial_users.py | kamil1marczak/hear_me_django_app | 2a567c15acddbf6bf183c6c637a3785c2a9c9c5c | [
"MIT"
]
| null | null | null | hear_me_django_app/accounts/management/commands/initial_users.py | kamil1marczak/hear_me_django_app | 2a567c15acddbf6bf183c6c637a3785c2a9c9c5c | [
"MIT"
]
| null | null | null | hear_me_django_app/accounts/management/commands/initial_users.py | kamil1marczak/hear_me_django_app | 2a567c15acddbf6bf183c6c637a3785c2a9c9c5c | [
"MIT"
]
| null | null | null | from django.contrib.auth import get_user_model
from django.contrib.auth.hashers import make_password
from django.core.management.base import BaseCommand
from ._private import populate_user
User = get_user_model()
class Command(BaseCommand):
help = 'admin deployment'
def add_arguments(self, parser):
parser.add_argument('total', type=int, help='Indicates the number of users to be created')
def handle(self, *args, **kwargs):
total = kwargs['total']
populate_user(number=total)
obj, created = User.objects.get_or_create(name="root", password=make_password('Kamil100!'), is_superuser=True)
message = "Successfully populated database with initial users"
if created:
message += f" Superuser {obj.name} ha been created"
self.stdout.write(self.style.SUCCESS(message))
| 36.869565 | 118 | 0.714623 | 632 | 0.745283 | 0 | 0 | 0 | 0 | 0 | 0 | 186 | 0.21934 |
31dd0da78d51189eef9e478f249f06c8a43016ca | 1,789 | py | Python | config/constants.py | flopezag/fiware-tsc-dashboard | af80673707c9b2fb85c9f4aa12bce12a20ef4431 | [
"Apache-2.0"
]
| null | null | null | config/constants.py | flopezag/fiware-tsc-dashboard | af80673707c9b2fb85c9f4aa12bce12a20ef4431 | [
"Apache-2.0"
]
| 37 | 2017-02-23T09:08:58.000Z | 2019-08-13T09:34:40.000Z | config/constants.py | flopezag/fiware-tsc-dashboard | af80673707c9b2fb85c9f4aa12bce12a20ef4431 | [
"Apache-2.0"
]
| 2 | 2017-12-19T15:06:33.000Z | 2019-05-02T17:24:45.000Z | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
##
# Copyright 2017 FIWARE Foundation, e.V.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
##
__author__ = 'fla'
GOOGLE_ACCOUNTS_BASE_URL = 'https://accounts.google.com'
APPLICATION_NAME = 'TSC Enablers Dashboard'
CREDENTIAL_DIR = '.credentials'
CREDENTIAL_FILE = 'sheets.googleapis.com.json'
DB_NAME = 'enablers-dashboard.db'
DB_FOLDER = 'dbase'
LOG_FILE = 'tsc-dashboard.log'
# We need to add 16 rows in the number of enablers list corresponding to:
# - Title
# - Report date
# - Data sources updated on
# - Source
# - Units
# - Enabler Impl
# - INCUBATED
# - DEVELOPMENT
# - SUPPORT
# - DEPRECATED
# - And 6 extra blank rows between them
FIXED_ROWS = 16
# We keep the firsts row without change in the sheet (sheet title)
INITIAL_ROW = 2
# The number of columns to delete corresponds to:
# Source, Catalogue, ReadTheDocs, Docker, GitHub, Coverall, Academy, HelpDesk, Backlog, GitHub_Open_Issues,
# GitHub_Closed_Issues, GitHub_Adopters, GitHub_Adopters_Open_Issues, GitHub_Adopters_Closed_Issues,
# GitHub_Comits, GitHub_Forks, GitHub_Watchers, GitHub_Stars, Jira_WorkItem_Not_Closed, Jira_WorkItem_Closed
# + Extra 2 = 22
FIXED_COLUMNS = 22
# We start to delete from the initial column
INITIAL_COLUMN = 1
| 31.946429 | 108 | 0.755729 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,541 | 0.861375 |
31dd6e6741a804d90f5239811383ca0cdca9f19d | 12,218 | py | Python | tensornetwork/backends/backend_test.py | ashoknar/TensorNetwork | 82636b75a0c53b5447c84d9a4e85226fe0e6f43a | [
"Apache-2.0"
]
| null | null | null | tensornetwork/backends/backend_test.py | ashoknar/TensorNetwork | 82636b75a0c53b5447c84d9a4e85226fe0e6f43a | [
"Apache-2.0"
]
| null | null | null | tensornetwork/backends/backend_test.py | ashoknar/TensorNetwork | 82636b75a0c53b5447c84d9a4e85226fe0e6f43a | [
"Apache-2.0"
]
| null | null | null | """Tests for graphmode_tensornetwork."""
import builtins
import sys
import pytest
import numpy as np
from tensornetwork import connect, contract, Node
from tensornetwork.backends.base_backend import BaseBackend
from tensornetwork.backends import backend_factory
def clean_tensornetwork_modules():
for mod in list(sys.modules.keys()):
if mod.startswith('tensornetwork'):
sys.modules.pop(mod, None)
@pytest.fixture(autouse=True)
def clean_backend_import():
#never do this outside testing
clean_tensornetwork_modules()
yield # use as teardown
clean_tensornetwork_modules()
@pytest.fixture
def no_backend_dependency(monkeypatch):
import_orig = builtins.__import__
# pylint: disable=redefined-builtin
def mocked_import(name, globals, locals, fromlist, level):
if name in ['torch', 'tensorflow', 'jax']:
raise ImportError()
return import_orig(name, globals, locals, fromlist, level)
monkeypatch.setattr(builtins, '__import__', mocked_import)
# Nuke the cache.
backend_factory._INSTANTIATED_BACKENDS = dict()
@pytest.mark.usefixtures('no_backend_dependency')
def test_backend_pytorch_missing_cannot_initialize_backend():
#pylint: disable=import-outside-toplevel
with pytest.raises(ImportError):
# pylint: disable=import-outside-toplevel
from tensornetwork.backends.pytorch.pytorch_backend import PyTorchBackend
PyTorchBackend()
@pytest.mark.usefixtures('no_backend_dependency')
def test_backend_tensorflow_missing_cannot_initialize_backend():
#pylint: disable=import-outside-toplevel
with pytest.raises(ImportError):
# pylint: disable=import-outside-toplevel
from tensornetwork.backends.tensorflow.tensorflow_backend \
import TensorFlowBackend
TensorFlowBackend()
@pytest.mark.usefixtures('no_backend_dependency')
def test_backend_jax_missing_cannot_initialize_backend():
#pylint: disable=import-outside-toplevel
with pytest.raises(ImportError):
# pylint: disable=import-outside-toplevel
from tensornetwork.backends.jax.jax_backend import JaxBackend
JaxBackend()
@pytest.mark.usefixtures('no_backend_dependency')
def test_config_backend_missing_can_import_config():
#not sure why config is imported here?
#pylint: disable=import-outside-toplevel
#pylint: disable=unused-variable
import tensornetwork.config
with pytest.raises(ImportError):
#pylint: disable=import-outside-toplevel
#pylint: disable=unused-variable
import torch
with pytest.raises(ImportError):
#pylint: disable=import-outside-toplevel
#pylint: disable=unused-variable
import tensorflow as tf
with pytest.raises(ImportError):
#pylint: disable=import-outside-toplevel
#pylint: disable=unused-variable
import jax
@pytest.mark.usefixtures('no_backend_dependency')
def test_import_tensornetwork_without_backends():
#pylint: disable=import-outside-toplevel
#pylint: disable=unused-variable
#pylint: disable=reimported
import tensornetwork
#pylint: disable=import-outside-toplevel
import tensornetwork.backends.pytorch.pytorch_backend
#pylint: disable=import-outside-toplevel
import tensornetwork.backends.tensorflow.tensorflow_backend
#pylint: disable=import-outside-toplevel
import tensornetwork.backends.jax.jax_backend
#pylint: disable=import-outside-toplevel
import tensornetwork.backends.numpy.numpy_backend
with pytest.raises(ImportError):
#pylint: disable=import-outside-toplevel
#pylint: disable=unused-variable
import torch
with pytest.raises(ImportError):
#pylint: disable=unused-variable
#pylint: disable=import-outside-toplevel
import tensorflow as tf
with pytest.raises(ImportError):
#pylint: disable=unused-variable
#pylint: disable=import-outside-toplevel
import jax
@pytest.mark.usefixtures('no_backend_dependency')
def test_basic_numpy_network_without_backends():
#pylint: disable=import-outside-toplevel
#pylint: disable=reimported
#pylint: disable=unused-variable
import tensornetwork
a = Node(np.ones((10,)), backend="numpy")
b = Node(np.ones((10,)), backend="numpy")
edge = connect(a[0], b[0])
final_node = contract(edge)
assert final_node.tensor == np.array(10.)
with pytest.raises(ImportError):
#pylint: disable=unused-variable
#pylint: disable=import-outside-toplevel
import torch
with pytest.raises(ImportError):
#pylint: disable=unused-variable
#pylint: disable=import-outside-toplevel
import tensorflow as tf
with pytest.raises(ImportError):
#pylint: disable=unused-variable
#pylint: disable=import-outside-toplevel
import jax
@pytest.mark.usefixtures('no_backend_dependency')
def test_basic_network_without_backends_raises_error():
#pylint: disable=import-outside-toplevel
#pylint: disable=reimported
#pylint: disable=unused-variable
import tensornetwork
with pytest.raises(ImportError):
Node(np.ones((2, 2)), backend="jax")
with pytest.raises(ImportError):
Node(np.ones((2, 2)), backend="tensorflow")
with pytest.raises(ImportError):
Node(np.ones((2, 2)), backend="pytorch")
def test_base_backend_name():
backend = BaseBackend()
assert backend.name == "base backend"
def test_base_backend_tensordot_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.tensordot(np.ones((2, 2)), np.ones((2, 2)), axes=[[0], [0]])
def test_base_backend_reshape_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.reshape(np.ones((2, 2)), (4, 1))
def test_base_backend_transpose_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.transpose(np.ones((2, 2)), [0, 1])
def test_base_backend_slice_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.slice(np.ones((2, 2)), (0, 1), (1, 1))
def test_base_backend_svd_decompositon_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.svd_decomposition(np.ones((2, 2)), 0)
def test_base_backend_qr_decompositon_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.qr_decomposition(np.ones((2, 2)), 0)
def test_base_backend_rq_decompositon_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.rq_decomposition(np.ones((2, 2)), 0)
def test_base_backend_shape_concat_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.shape_concat([np.ones((2, 2)), np.ones((2, 2))], 0)
def test_base_backend_shape_tensor_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.shape_tensor(np.ones((2, 2)))
def test_base_backend_shape_tuple_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.shape_tuple(np.ones((2, 2)))
def test_base_backend_shape_prod_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.shape_prod(np.ones((2, 2)))
def test_base_backend_sqrt_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.sqrt(np.ones((2, 2)))
def test_base_backend_diag_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.diag(np.ones((2, 2)))
def test_base_backend_convert_to_tensor_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.convert_to_tensor(np.ones((2, 2)))
def test_base_backend_trace_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.trace(np.ones((2, 2)))
def test_base_backend_outer_product_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.outer_product(np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_einsul_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.einsum("ii", np.ones((2, 2)))
def test_base_backend_norm_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.norm(np.ones((2, 2)))
def test_base_backend_eye_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.eye(2, dtype=np.float64)
def test_base_backend_ones_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.ones((2, 2), dtype=np.float64)
def test_base_backend_zeros_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.zeros((2, 2), dtype=np.float64)
def test_base_backend_randn_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.randn((2, 2))
def test_base_backend_random_uniforl_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.random_uniform((2, 2))
def test_base_backend_conj_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.conj(np.ones((2, 2)))
def test_base_backend_eigh_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.eigh(np.ones((2, 2)))
def test_base_backend_eigs_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.eigs(np.ones((2, 2)))
def test_base_backend_eigs_lanczos_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.eigsh_lanczos(lambda x: x, np.ones((2)))
def test_base_backend_addition_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.addition(np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_subtraction_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.subtraction(np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_multiply_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.multiply(np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_divide_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.divide(np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_index_update_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.index_update(np.ones((2, 2)), np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_inv_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.inv(np.ones((2, 2)))
def test_base_backend_sin_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.sin(np.ones((2, 2)))
def test_base_backend_cos_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.cos(np.ones((2, 2)))
def test_base_backend_exp_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.exp(np.ones((2, 2)))
def test_base_backend_log_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.log(np.ones((2, 2)))
def test_base_backend_expm_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.expm(np.ones((2, 2)))
def test_base_backend_sparse_shape_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.sparse_shape(np.ones((2, 2)))
def test_base_backend_broadcast_right_multiplication_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.broadcast_right_multiplication(np.ones((2, 2)), np.ones((2, 2)))
def test_base_backend_broadcast_left_multiplication_not_implemented():
backend = BaseBackend()
with pytest.raises(NotImplementedError):
backend.broadcast_left_multiplication(np.ones((2, 2)), np.ones((2, 2)))
def test_backend_instantiation(backend):
backend1 = backend_factory.get_backend(backend)
backend2 = backend_factory.get_backend(backend)
assert backend1 is backend2
| 29.8 | 77 | 0.761581 | 0 | 0 | 151 | 0.012359 | 4,644 | 0.380095 | 0 | 0 | 1,867 | 0.152807 |
31dd79c83f754d036eb084c170cefc01374db92c | 633 | py | Python | src/GUI/Plotter.py | sbooeshaghi/pegasus | 32ca075b38a72a7955209657a8326ac749f658a3 | [
"BSD-2-Clause"
]
| 1 | 2021-08-31T13:30:25.000Z | 2021-08-31T13:30:25.000Z | src/GUI/Plotter.py | pachterlab/pegasus | 32ca075b38a72a7955209657a8326ac749f658a3 | [
"BSD-2-Clause"
]
| 1 | 2020-10-27T16:42:55.000Z | 2020-10-27T16:42:55.000Z | src/GUI/Plotter.py | pachterlab/pegasus | 32ca075b38a72a7955209657a8326ac749f658a3 | [
"BSD-2-Clause"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pyqtgraph as pg
import numpy as np
class CustomWidget(pg.GraphicsWindow):
pg.setConfigOption('background', 'w')
pg.setConfigOption('foreground', 'k')
def __init__(self, parent=None, **kargs):
pg.GraphicsWindow.__init__(self, **kargs)
self.setParent(parent)
self.setWindowTitle('pyqtgraph example: Scrolling Plots')
self.p = self.addPlot(labels = {'left':'Position', 'bottom':'Time'})
self.data = np.zeros(10)
self.curve = self.p.plot(self.data, pen='b')
if __name__ == '__main__':
w = CustomWidget()
w.show() | 31.65 | 77 | 0.63981 | 479 | 0.756714 | 0 | 0 | 0 | 0 | 0 | 0 | 153 | 0.241706 |
31de64f0189fb656e61e3cf8d36bbc5c08efed8c | 2,733 | py | Python | tests/test_collapsible.py | TehMillhouse/sphinxawesome-theme | 5130b8b4c2546ceaccf37353fa6a0bfb4526303c | [
"MIT"
]
| 17 | 2020-07-10T12:05:07.000Z | 2022-03-08T03:40:49.000Z | tests/test_collapsible.py | TehMillhouse/sphinxawesome-theme | 5130b8b4c2546ceaccf37353fa6a0bfb4526303c | [
"MIT"
]
| 475 | 2020-05-22T09:44:25.000Z | 2022-03-27T08:01:23.000Z | tests/test_collapsible.py | TehMillhouse/sphinxawesome-theme | 5130b8b4c2546ceaccf37353fa6a0bfb4526303c | [
"MIT"
]
| 10 | 2020-12-23T11:14:57.000Z | 2022-02-13T08:51:02.000Z | """Tests for collapsible definition lists.
When the option ``html_collapsible_definitions``
is ``True``, some HTML classes should be added
to some definition lists but not all of them.
"""
from pathlib import Path
import pytest
from sphinx.application import Sphinx
from .util import parse_html
@pytest.mark.sphinx(
"html",
testroot="collapsible",
confoverrides={"html_theme": "sphinxawesome_theme"},
freshenv=True,
)
def test_no_permalinks(app: Sphinx) -> None:
"""It tests that there are no permalinks."""
app.config.html_permalinks = False # type: ignore[attr-defined]
app.build()
tree = parse_html(Path(app.outdir) / "index.html")
dl = tree("dl")
assert len(dl) == 2
headerlinks = tree("a", class_="headerlink")
assert len(headerlinks) == 0
@pytest.mark.sphinx(
"html",
testroot="collapsible",
confoverrides={"html_theme": "sphinxawesome_theme"},
freshenv=True,
)
def test_no_collapsible_definitions(app: Sphinx) -> None:
"""By default, no classes should be added."""
app.build()
tree = parse_html(Path(app.outdir) / "index.html")
dl = tree("dl")
assert len(dl) == 2
assert str(dl[0]).replace("\n", "") == (
'<dl class="simple"><dt>term</dt><dd><p>definition</p></dd></dl>'
)
assert dl[1]["class"] == ["std", "option", "code-definition"]
dt, dd = (c for c in dl[1].children if c.strip is None)
assert dt.name == "dt"
assert "accordion" not in dt["class"]
assert dd.name == "dd"
assert "class" not in dd
expand_more_button = dt("button", class_="expand-more")
assert len(expand_more_button) == 0
@pytest.mark.sphinx(
"html",
testroot="collapsible",
confoverrides={"html_theme": "sphinxawesome_theme"},
freshenv=True,
)
def test_collapsible_definitions(app: Sphinx) -> None:
"""It tests the correct classes being added to the definition lists.
It should not add the classes to normal definition lists.
"""
# if specified in 'confoverrides', this returns a warning
app.config.html_collapsible_definitions = True # type: ignore[attr-defined]
app.build()
tree = parse_html(Path(app.outdir) / "index.html")
dl = tree("dl")
assert len(dl) == 2
assert str(dl[0]).replace("\n", "") == (
'<dl class="simple"><dt>term</dt><dd><p>definition</p></dd></dl>'
)
assert "code-definition" in dl[1]["class"]
dt, dd = (c for c in dl[1].children if c.strip is None)
assert dt.name == "dt"
assert dt["class"] == ["sig", "sig-object", "std", "accordion"]
assert dd.name == "dd"
assert dd["class"] == ["panel"]
expand_more_button = dt("button", class_="expand-more")
assert len(expand_more_button) == 1
| 30.032967 | 80 | 0.642883 | 0 | 0 | 0 | 0 | 2,425 | 0.887303 | 0 | 0 | 1,089 | 0.398463 |
31df4d7e972bd1519fc475be70b05e383b709299 | 1,618 | py | Python | Iris Network/Conclusion/task.py | jetbrains-academy/Machine-Learning-101 | 7b583dbff1e90115296dcaeac78ca88363c158c9 | [
"MIT"
]
| null | null | null | Iris Network/Conclusion/task.py | jetbrains-academy/Machine-Learning-101 | 7b583dbff1e90115296dcaeac78ca88363c158c9 | [
"MIT"
]
| 10 | 2021-11-22T16:51:52.000Z | 2022-02-14T12:57:57.000Z | Iris Network/Conclusion/task.py | jetbrains-academy/Machine-Learning-101 | 7b583dbff1e90115296dcaeac78ca88363c158c9 | [
"MIT"
]
| null | null | null | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from network import NN
from evaluate import accuracy
def read_data(fpath):
iris = pd.read_csv(fpath)
iris.loc[iris['species'] == 'virginica', 'species'] = 0
iris.loc[iris['species'] == 'versicolor', 'species'] = 1
iris.loc[iris['species'] == 'setosa', 'species'] = 2
iris = iris[iris['species'] != 2]
return iris[['petal_length', 'petal_width']].values, iris[['species']].values.astype('uint8')
def plot_data(X, y):
plt.scatter(X[:, 0], X[:, 1], c=y[:, 0], s=40, cmap=plt.cm.Spectral)
plt.title("IRIS DATA | Blue - Versicolor, Red - Virginica ")
plt.xlabel('Petal Length')
plt.ylabel('Petal Width')
plt.show()
def train_test_split(X, y, ratio=0.8):
indices = np.arange(X.shape[0])
np.random.shuffle(indices)
train_len = int(X.shape[0] * ratio)
return X[indices[:train_len]], y[indices[:train_len]], X[indices[train_len:]], y[indices[train_len:]]
if __name__ == '__main__':
X, y = read_data('iris.csv')
# comment the following line if you don't need the plot anymore
plot_data(X, y)
X_train, y_train, X_test, y_test = train_test_split(X, y, 0.7)
nn = NN(len(X[0]), 5, 1)
output = nn.feedforward(X_train)
print(output)
print(f'w1 before backward propagation: \n{nn.w1} \nw2 before backward propagation:\n{nn.w2}')
nn.backward(X_train, y_train, output)
print(f'w1 after backward propagation: \n{nn.w1} \nw2 after backward propagation:\n{nn.w2}')
nn.train(X_train, y_train)
print("Accuracy:")
print(accuracy(nn, X_test, y_test))
| 33.708333 | 105 | 0.65513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 479 | 0.296044 |
31e177fb5a84a661f6f3ed3c32e0ead9540dfcd1 | 1,160 | py | Python | agentless/crypto.py | tinyauth/agentless | 50f30dbb11007fd58c057a38c61783bff282603f | [
"Apache-2.0"
]
| null | null | null | agentless/crypto.py | tinyauth/agentless | 50f30dbb11007fd58c057a38c61783bff282603f | [
"Apache-2.0"
]
| null | null | null | agentless/crypto.py | tinyauth/agentless | 50f30dbb11007fd58c057a38c61783bff282603f | [
"Apache-2.0"
]
| null | null | null | from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import padding, rsa
backend = default_backend()
def generate_private_key():
key = rsa.generate_private_key(
public_exponent=65537,
key_size=4096,
backend=backend,
)
return key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
).decode('utf-8')
def load_private_key(private_key_pem):
return serialization.load_pem_private_key(
private_key_pem,
password=None,
backend=backend,
)
def public_key_from_private_key(private_key):
public_key = private_key.public_key()
foo = public_key.public_bytes(
encoding=serialization.Encoding.OpenSSH,
format=serialization.PublicFormat.OpenSSH,
).decode('utf-8')
return foo
def ssh_sign_data(key, data):
return key.sign(
data,
padding=padding.PKCS1v15(),
algorithm=hashes.SHA1(),
)
| 25.777778 | 66 | 0.712931 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.012069 |
31e18d81d721e6fc0a0c74da919f35393478b123 | 324 | py | Python | test/programytest/storage/entities/test_nodes.py | cdoebler1/AIML2 | ee692ec5ea3794cd1bc4cc8ec2a6b5e5c20a0d6a | [
"MIT"
]
| 345 | 2016-11-23T22:37:04.000Z | 2022-03-30T20:44:44.000Z | test/programytest/storage/entities/test_nodes.py | MikeyBeez/program-y | 00d7a0c7d50062f18f0ab6f4a041068e119ef7f0 | [
"MIT"
]
| 275 | 2016-12-07T10:30:28.000Z | 2022-02-08T21:28:33.000Z | test/programytest/storage/entities/test_nodes.py | VProgramMist/modified-program-y | f32efcafafd773683b3fe30054d5485fe9002b7d | [
"MIT"
]
| 159 | 2016-11-28T18:59:30.000Z | 2022-03-20T18:02:44.000Z | import unittest
import unittest.mock
from programy.storage.entities.nodes import NodesStore
class NodesStoreTest(unittest.TestCase):
def test_load(self):
store = NodesStore()
with self.assertRaises(NotImplementedError):
collector = unittest.mock.Mock()
store.load(collector)
| 21.6 | 54 | 0.703704 | 227 | 0.700617 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
31e1ce88e4424fa367dbbc4289f23529ddd13fe8 | 1,939 | py | Python | sphinx/source/tutorial/exercises/stocks.py | minrk/bokeh | ae4366e508355afc06b5fc62f1ee399635ab909d | [
"BSD-3-Clause"
]
| null | null | null | sphinx/source/tutorial/exercises/stocks.py | minrk/bokeh | ae4366e508355afc06b5fc62f1ee399635ab909d | [
"BSD-3-Clause"
]
| null | null | null | sphinx/source/tutorial/exercises/stocks.py | minrk/bokeh | ae4366e508355afc06b5fc62f1ee399635ab909d | [
"BSD-3-Clause"
]
| null | null | null |
import numpy as np
import pandas as pd
from bokeh.plotting import *
# Here is some code to read in some stock data from the Yahoo Finance API
AAPL = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=AAPL&a=0&b=1&c=2000",
parse_dates=['Date'])
GOOG = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=GOOG&a=0&b=1&c=2000",
parse_dates=['Date'])
MSFT = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=MSFT&a=0&b=1&c=2000",
parse_dates=['Date'])
IBM = pd.read_csv(
"http://ichart.yahoo.com/table.csv?s=IBM&a=0&b=1&c=2000",
parse_dates=['Date'])
output_file("stocks.html", title="stocks.py example")
# EXERCISE: turn on plot hold
# EXERCISE: finish this line plot, and add more for the other stocks. Each one should
# have a legend, and its own color.
line(
AAPL['Date'], # x coordinates
AAPL['Adj Close'], # y coordinates
color='#A6CEE3', # set a color for the line
legend='AAPL', # attach a legend label
x_axis_type = "datetime", # NOTE: only needed on first
tools="pan,wheel_zoom,box_zoom,reset,previewsave" # NOTE: only needed on first
)
# EXERCISE: style the plot, set a title, lighten the gridlines, etc.
# EXERCISE: start a new figure
# Here is some code to compute the 30-day moving average for AAPL
aapl = AAPL['Adj Close']
aapl_dates = AAPL['Date']
window_size = 30
window = np.ones(window_size)/float(window_size)
aapl_avg = np.convolve(aapl, window, 'same')
# EXERCISE: plot a scatter of circles for the individual AAPL prices with legend
# 'close'. Remember to set the x axis type and tools on the first renderer.
# EXERCISE: plot a line of the AAPL moving average data with the legeng 'avg'
# EXERCISE: style the plot, set a title, lighten the gridlines, etc.
show() # open a browser
| 34.017544 | 85 | 0.638473 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,227 | 0.6328 |
31e29f3d6b52be28f77756b4ec61862d6adf938c | 1,828 | py | Python | nni/retiarii/converter/visualize.py | qfyin/nni | 59a1ccf8eba68b94974e84fc3834f38d851faf89 | [
"MIT"
]
| 3 | 2021-02-23T14:01:43.000Z | 2021-03-29T16:19:32.000Z | nni/retiarii/converter/visualize.py | qfyin/nni | 59a1ccf8eba68b94974e84fc3834f38d851faf89 | [
"MIT"
]
| 1 | 2021-01-17T08:53:56.000Z | 2021-01-17T08:53:56.000Z | nni/retiarii/converter/visualize.py | qfyin/nni | 59a1ccf8eba68b94974e84fc3834f38d851faf89 | [
"MIT"
]
| 1 | 2020-12-21T11:15:54.000Z | 2020-12-21T11:15:54.000Z | import graphviz
def convert_to_visualize(graph_ir, vgraph):
for name, graph in graph_ir.items():
if name == '_training_config':
continue
with vgraph.subgraph(name='cluster'+name) as subgraph:
subgraph.attr(color='blue')
cell_node = {}
ioput = {'_inputs': '{}-{}'.format(name, '_'.join(graph['inputs'])),
'_outputs': '{}-{}'.format(name, '_'.join(graph['outputs']))}
subgraph.node(ioput['_inputs'])
subgraph.node(ioput['_outputs'])
for node_name, node_value in graph['nodes'].items():
value = node_value['operation']
if value['type'] == '_cell':
cell_input_name = '{}-{}'.format(value['cell_name'], '_'.join(graph_ir[value['cell_name']]['inputs']))
cell_output_name = '{}-{}'.format(value['cell_name'], '_'.join(graph_ir[value['cell_name']]['outputs']))
cell_node[node_name] = (cell_input_name, cell_output_name)
print('cell: ', node_name, cell_input_name, cell_output_name)
else:
subgraph.node(node_name)
for edge in graph['edges']:
src = edge['head'][0]
if src == '_inputs':
src = ioput['_inputs']
elif src in cell_node:
src = cell_node[src][1]
dst = edge['tail'][0]
if dst == '_outputs':
dst = ioput['_outputs']
elif dst in cell_node:
dst = cell_node[dst][0]
subgraph.edge(src, dst)
def visualize_model(graph_ir):
vgraph = graphviz.Digraph('G', filename='vgraph', format='jpg')
convert_to_visualize(graph_ir, vgraph)
vgraph.render()
| 43.52381 | 124 | 0.516958 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 301 | 0.164661 |
31e44da249242c3967f376c2f079200c57cbe554 | 155 | py | Python | Python/Tests/TestData/TestDiscoverer/ConfigUnittest/Product/prefix_not_included.py | techkey/PTVS | 8355e67eedd8e915ca49bd38a2f36172696fd903 | [
"Apache-2.0"
]
| 404 | 2019-05-07T02:21:57.000Z | 2022-03-31T17:03:04.000Z | Python/Tests/TestData/TestDiscoverer/ConfigUnittest/Product/prefix_not_included.py | techkey/PTVS | 8355e67eedd8e915ca49bd38a2f36172696fd903 | [
"Apache-2.0"
]
| 1,672 | 2019-05-06T21:09:38.000Z | 2022-03-31T23:16:04.000Z | Python/Tests/TestData/TestDiscoverer/ConfigUnittest/Product/prefix_not_included.py | techkey/PTVS | 8355e67eedd8e915ca49bd38a2f36172696fd903 | [
"Apache-2.0"
]
| 186 | 2019-05-13T03:17:37.000Z | 2022-03-31T16:24:05.000Z | import unittest
class PrefixNotIncluded(unittest.TestCase):
def test_not_included(self):
pass
if __name__ == '__main__':
unittest.main()
| 17.222222 | 43 | 0.709677 | 89 | 0.574194 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.064516 |
31e59bd3f15670f0f52fb2ebf16c987e7332b1b1 | 885 | py | Python | customBackground.py | VisweshK/Jashmup | ca0cf639000734c5aea8583d9477af9a387f6d46 | [
"MIT"
]
| null | null | null | customBackground.py | VisweshK/Jashmup | ca0cf639000734c5aea8583d9477af9a387f6d46 | [
"MIT"
]
| null | null | null | customBackground.py | VisweshK/Jashmup | ca0cf639000734c5aea8583d9477af9a387f6d46 | [
"MIT"
]
| null | null | null | '''
This is the class to create a scrolling background.
Because the background was so large, it was made to be a .jpg.
'''
import pygame, os
class Background(pygame.sprite.Sprite):
# Initialize the sprite.
def __init__(self,disp):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load(os.path.join("images", "spacebackground.jpg"))
self.image = self.image.convert()
self.rect = self.image.get_rect()
self.dx = 10
self.reset()
# Constantly have the sprite move to the left.
# If the right side of the image moves beyond the right side of the screen, reset the image.
def update(self):
self.rect.left -= self.dx
if self.rect.right <= 800:
self.reset()
# Reset the image's left side to the left side of the screen.
def reset(self):
self.rect.left = 0
| 31.607143 | 96 | 0.638418 | 733 | 0.828249 | 0 | 0 | 0 | 0 | 0 | 0 | 382 | 0.431638 |
31e6ea9406db1015334a06a90ed69fe2df85ccfc | 1,705 | py | Python | src/python/squarepants/file_utils.py | ericzundel/mvn2pants | 59776864939515bc0cae28e1b89944ce55b98b21 | [
"Apache-2.0"
]
| 8 | 2015-04-14T22:37:56.000Z | 2021-01-20T19:46:40.000Z | src/python/squarepants/file_utils.py | ericzundel/mvn2pants | 59776864939515bc0cae28e1b89944ce55b98b21 | [
"Apache-2.0"
]
| 1 | 2016-01-13T23:19:14.000Z | 2016-01-22T22:47:48.000Z | src/python/squarepants/file_utils.py | ericzundel/mvn2pants | 59776864939515bc0cae28e1b89944ce55b98b21 | [
"Apache-2.0"
]
| 3 | 2015-12-13T08:35:34.000Z | 2018-08-01T17:44:59.000Z | import os
import shutil
from contextlib import contextmanager
from tempfile import mkdtemp, mktemp
@contextmanager
def temporary_dir():
"""Returns a temporary directory that gets cleaned up when the context manager exits."""
tempdir = mkdtemp()
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
@contextmanager
def temporary_file():
"""Returns a temporary file that gets cleaned up when the context manager exits."""
tempfile = mktemp()
try:
yield tempfile
finally:
os.remove(tempfile)
@contextmanager
def frozen_dir(path):
"""Ensures that the contents of the directory are the same after exiting as before entering."""
with temporary_dir() as tempdir:
backup = os.path.join(tempdir, 'backup')
shutil.copytree(path, backup)
try:
yield path
finally:
shutil.rmtree(path, ignore_errors=True)
shutil.move(backup, path)
def file_pattern_exists_in_subdir(subdir, pattern):
"""Search for a file pattern recursively in a subdirectory
:param subdir: directory to search recursively
:param re.RegexObject pattern: compiled regular expression object from re.compile()
:return: True if a file with the named pattern exists in the subdirectory
:rtype: bool
"""
for (dirpath, dirnames, filenames) in os.walk(subdir):
for filename in filenames:
if pattern.match(filename):
return True
return False
def touch(fname, times=None, makedirs=False):
"""Creates the specified file at the named path (and optionally sets the time)."""
if makedirs:
directory = os.path.dirname(fname)
if not os.path.exists(directory):
os.makedirs(directory)
with open(fname, 'a'):
os.utime(fname, times)
| 26.230769 | 97 | 0.719062 | 0 | 0 | 736 | 0.431672 | 784 | 0.459824 | 0 | 0 | 650 | 0.381232 |
31e76ef0dddf511d5e363ce2b9c0502413fbe8c1 | 1,366 | py | Python | docs/DSDC/miniprez/miniprez/continuous_integration.py | thoppe/Presentation_Topics | e9aba07e9ab087b44e6044c6082ba8e873a9b4fd | [
"MIT"
]
| 2 | 2018-12-03T17:03:19.000Z | 2018-12-10T16:42:39.000Z | docs/DSDC/miniprez/miniprez/continuous_integration.py | thoppe/Presentation_Topics_in_NLP | e9aba07e9ab087b44e6044c6082ba8e873a9b4fd | [
"MIT"
]
| 1 | 2019-02-19T15:12:19.000Z | 2019-02-19T15:12:19.000Z | docs/DSDC/miniprez/miniprez/continuous_integration.py | thoppe/Presentation_Topics_in_NLP | e9aba07e9ab087b44e6044c6082ba8e873a9b4fd | [
"MIT"
]
| 1 | 2019-02-19T12:51:37.000Z | 2019-02-19T12:51:37.000Z | import asyncio
import os
from parser import miniprez_markdown, build_body
import logging
logger = logging.getLogger("miniprez")
async def file_watcher(target_file, sleep_time=0.5):
"""
Watchs a file. If modified, yield the filename.
Yield the filename once to start.
"""
# Yield the file first
yield target_file, 0
latest_modification_time = os.path.getmtime(target_file)
while True:
current_time = os.path.getmtime(target_file)
if current_time > latest_modification_time:
delta = current_time - latest_modification_time
latest_modification_time = current_time
yield target_file, delta
await asyncio.sleep(sleep_time)
async def parser_loop(f_markdown, sleep_time=0.5):
"""
Main event loop. If the target file is modified, or new start a build.
"""
async for f_target, dt in file_watcher(f_markdown, sleep_time):
build_html(f_target)
def build_html(f_target):
"""
Build the html from the markdown.
"""
f_html_output = f_target.replace(".md", ".html")
logger.info(f"Building {f_target} to {f_html_output}")
with open(f_target) as FIN:
markdown = FIN.read()
html = miniprez_markdown(markdown)
soup = build_body(html)
with open(f_html_output, "w") as FOUT:
FOUT.write(soup.prettify())
| 24.836364 | 74 | 0.678624 | 0 | 0 | 586 | 0.42899 | 0 | 0 | 824 | 0.603221 | 325 | 0.237921 |
31e9ecb8c0d331c1cbff40bfe30ea5db0aed7e97 | 3,943 | py | Python | rl_algorithms/dqn/linear.py | yonghangzhou/rl_algorithms | fe373bf77c9007e4e1d7134e1d610131125fa4b7 | [
"MIT"
]
| 1 | 2020-11-12T07:48:49.000Z | 2020-11-12T07:48:49.000Z | rl_algorithms/dqn/linear.py | yonghangzhou/rl_algorithms | fe373bf77c9007e4e1d7134e1d610131125fa4b7 | [
"MIT"
]
| null | null | null | rl_algorithms/dqn/linear.py | yonghangzhou/rl_algorithms | fe373bf77c9007e4e1d7134e1d610131125fa4b7 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""Linear module for dqn algorithms
- Author: Kyunghwan Kim
- Contact: [email protected]
"""
import math
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from rl_algorithms.common.helper_functions import numpy2floattensor
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class NoisyLinear(nn.Module):
"""Noisy linear module for NoisyNet.
References:
https://github.com/higgsfield/RL-Adventure/blob/master/5.noisy%20dqn.ipynb
https://github.com/Kaixhin/Rainbow/blob/master/model.py
Attributes:
in_features (int): input size of linear module
out_features (int): output size of linear module
std_init (float): initial std value
weight_mu (nn.Parameter): mean value weight parameter
weight_sigma (nn.Parameter): std value weight parameter
bias_mu (nn.Parameter): mean value bias parameter
bias_sigma (nn.Parameter): std value bias parameter
"""
def __init__(self, in_features: int, out_features: int, std_init: float = 0.5):
"""Initialize."""
super(NoisyLinear, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.std_init = std_init
self.weight_mu = nn.Parameter(torch.Tensor(out_features, in_features))
self.weight_sigma = nn.Parameter(torch.Tensor(out_features, in_features))
self.register_buffer("weight_epsilon", torch.Tensor(out_features, in_features))
self.bias_mu = nn.Parameter(torch.Tensor(out_features))
self.bias_sigma = nn.Parameter(torch.Tensor(out_features))
self.register_buffer("bias_epsilon", torch.Tensor(out_features))
self.reset_parameters()
self.reset_noise()
def reset_parameters(self):
"""Reset trainable network parameters (factorized gaussian noise)."""
mu_range = 1 / math.sqrt(self.in_features)
self.weight_mu.data.uniform_(-mu_range, mu_range)
self.weight_sigma.data.fill_(self.std_init / math.sqrt(self.in_features))
self.bias_mu.data.uniform_(-mu_range, mu_range)
self.bias_sigma.data.fill_(self.std_init / math.sqrt(self.out_features))
@staticmethod
def scale_noise(size: int) -> torch.Tensor:
"""Set scale to make noise (factorized gaussian noise)."""
x = numpy2floattensor(np.random.normal(loc=0.0, scale=1.0, size=size), device)
return x.sign().mul(x.abs().sqrt())
def reset_noise(self):
"""Make new noise."""
epsilon_in = self.scale_noise(self.in_features)
epsilon_out = self.scale_noise(self.out_features)
# outer product
self.weight_epsilon.copy_(epsilon_out.ger(epsilon_in))
self.bias_epsilon.copy_(epsilon_out)
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""Forward method implementation.
We don't use separate statements on train / eval mode.
It doesn't show remarkable difference of performance.
"""
return F.linear(
x,
self.weight_mu + self.weight_sigma * self.weight_epsilon,
self.bias_mu + self.bias_sigma * self.bias_epsilon,
)
class NoisyLinearConstructor:
"""Constructor class for changing hyper parameters of NoisyLinear.
Attributes:
std_init (float): initial std value
"""
def __init__(self, std_init: float = 0.5):
"""Initialize."""
self.std_init = std_init
def __call__(self, in_features: int, out_features: int) -> NoisyLinear:
"""Return NoisyLinear instance set hyper parameters"""
return NoisyLinear(in_features, out_features, self.std_init)
class NoisyMLPHandler:
"""Includes methods to handle noisy linear."""
def reset_noise(self):
"""Re-sample noise"""
for _, module in self.named_children():
module.reset_noise()
| 33.700855 | 87 | 0.673852 | 3,572 | 0.905909 | 0 | 0 | 260 | 0.06594 | 0 | 0 | 1,412 | 0.358103 |
31ea1b716a1b8a3e2fc957132ac8497e9ccd0dcb | 10,826 | py | Python | 2015/day7/2015-day7-part2.py | matt-the-ogre/advent-of-code | 7188089d4db4a99fa09ef8366137fe28d1c28205 | [
"MIT"
]
| 1 | 2021-12-03T18:17:54.000Z | 2021-12-03T18:17:54.000Z | 2015/day7/2015-day7-part2.py | matt-the-ogre/advent-of-code | 7188089d4db4a99fa09ef8366137fe28d1c28205 | [
"MIT"
]
| null | null | null | 2015/day7/2015-day7-part2.py | matt-the-ogre/advent-of-code | 7188089d4db4a99fa09ef8366137fe28d1c28205 | [
"MIT"
]
| null | null | null | # Advent of Code - 2015 - Day 7
# --- Day 7: Some Assembly Required ---
# This year, Santa brought little Bobby Tables a set of wires and bitwise logic gates! Unfortunately, little Bobby is a little under the recommended age range, and he needs help assembling the circuit.
# Each wire has an identifier (some lowercase letters) and can carry a 16-bit signal (a number from 0 to 65535). A signal is provided to each wire by a gate, another wire, or some specific value. Each wire can only get a signal from one source, but can provide its signal to multiple destinations. A gate provides no signal until all of its inputs have a signal.
# The included instructions booklet describes how to connect the parts together: x AND y -> z means to connect wires x and y to an AND gate, and then connect its output to wire z.
# For example:
# 123 -> x means that the signal 123 is provided to wire x.
# x AND y -> z means that the bitwise AND of wire x and wire y is provided to wire z.
# p LSHIFT 2 -> q means that the value from wire p is left-shifted by 2 and then provided to wire q.
# NOT e -> f means that the bitwise complement of the value from wire e is provided to wire f.
# Other possible gates include OR (bitwise OR) and RSHIFT (right-shift). If, for some reason, you'd like to emulate the circuit instead, almost all programming languages (for example, C, JavaScript, or Python) provide operators for these gates.
# For example, here is a simple circuit:
# 123 -> x
# 456 -> y
# x AND y -> d
# x OR y -> e
# x LSHIFT 2 -> f
# y RSHIFT 2 -> g
# NOT x -> h
# NOT y -> i
# After it is run, these are the signals on the wires:
# d: 72
# e: 507
# f: 492
# g: 114
# h: 65412
# i: 65079
# x: 123
# y: 456
# In little Bobby's kit's instructions booklet (provided as your puzzle input), what signal is ultimately provided to wire a?
import time, math
def createCircuitDict():
global circuitStrings
global circuitDict
# this function takes the string as input (circuitStrings) and converts them (parses them) into a dictionary (circuitDict)
for circuitLine in circuitStrings:
# the string "->" is the delimeter (sp?) between the left side (input) and the wire name (dictionary key)
leftSide = circuitLine[0 : circuitLine.find("->") - 1]
# if debug:
# print("leftSide:", leftSide)
rightSide = circuitLine[circuitLine.find("->") + 3 : ]
# if debug:
# print("rightSide:", rightSide)
# we set the outputValue to nan (not a number) as a way of checking if we have successfully evaluated the wires inputs or not: default = nan, not evaluated
outputValue = math.nan
# check for numeric input string -- this is easy, just make it the output
if leftSide.isnumeric():
leftSide = int(leftSide)
outputValue = leftSide # simple -- the input to this wire is also it's output
# check for duplicate wire names (dictionary keys) in the input string
if circuitDict.get(rightSide) != None:
print("Weird... dictionary key ", rightSide, "already exists. This shouldn't happen.")
circuitDict[rightSide] = {"input" : leftSide, "output" : outputValue}
def evaluateInput(circuit, operator):
global circuitDict
# if debug:
# print(circuit, operator)
# check left argument for circuit name or number
inputWire1 = circuitDict[circuit]["input"][: circuitDict[circuit]["input"].find(operator) - 1]
inputWire2 = circuitDict[circuit]["input"][circuitDict[circuit]["input"].find(operator) + len(operator) + 1 : ]
# if debug:
# print(circuit, "=", inputWire1, operator, inputWire2)
# look up the output of the inputWire
if inputWire1.isnumeric():
input1 = int(inputWire1)
else:
input1 = circuitDict[inputWire1]["output"]
if inputWire2.isnumeric():
input2 = int(inputWire2)
else:
input2 = circuitDict[inputWire2]["output"]
if math.isnan(input1):
# print("input wire 1 isn't calculated yet")
pass
elif math.isnan(input2):
# print("input wire 2 isn't calculated yet")
pass
else:
# do the bitwise complement on the input number and assign it to the output of this wire
if operator == "AND":
circuitDict[circuit]["output"] = input1 & input2
elif operator == "OR":
circuitDict[circuit]["output"] = input1 | input2
elif operator == "LSHIFT":
circuitDict[circuit]["output"] = input1 << input2
elif operator == "RSHIFT":
circuitDict[circuit]["output"] = input1 >> input2
else:
print("Unknown operator", operator)
# check for rollunder 0
# this occurs because we are using a signed integer for what should be an unsigned 16-bit integer
# TODO figure out if Python has an unsigned 16-bit integer type
if circuitDict[circuit]["output"] < 0:
# if debug:
# print("result under zero, fix it")
circuitDict[circuit]["output"] = 65535 + circuitDict[circuit]["output"]
def doConnection():
global circuitDict
unfinishedCount = len(circuitDict)
lowCount = unfinishedCount
while unfinishedCount:
unfinishedCount = len(circuitDict)
if debug:
print("lowCount", lowCount)
for circuit in circuitDict:
# if the output is not a number, evaluate the input
if math.isnan(circuitDict[circuit]["output"]):
# parse the left side
# we can have NOT, AND, OR, LSHIFT, and RSHIFT as possible commands
if "NOT" in circuitDict[circuit]["input"]:
# operation is logical NOT, invert the input line to be the output
inputWire1 = circuitDict[circuit]["input"][circuitDict[circuit]["input"].find("NOT")+4 : ]
# if debug:
# print(circuit, "= NOT", inputWire1)
# look up the output of the inputWire
if inputWire1.isnumeric():
input1 = int(inputWire1)
else:
input1 = circuitDict[inputWire1]["output"]
if math.isnan(input1):
# print("input wire isn't calculated yet")
pass
else:
# do the bitwise complement on the input number and assign it to the output of this wire
circuitDict[circuit]["output"] = ~input1
# check for rollunder 0
if circuitDict[circuit]["output"] < 0:
# if debug:
# print("result under zero, fix it")
circuitDict[circuit]["output"] = 65536 + circuitDict[circuit]["output"]
elif "AND" in circuitDict[circuit]["input"]:
evaluateInput(circuit, "AND")
elif "OR" in circuitDict[circuit]["input"]:
evaluateInput(circuit, "OR")
elif "LSHIFT" in circuitDict[circuit]["input"]:
evaluateInput(circuit, "LSHIFT")
elif "RSHIFT" in circuitDict[circuit]["input"]:
evaluateInput(circuit, "RSHIFT")
else:
# simplest case -- one input only!
# copy the input wire
# this could be improved by doing it only if the inputWire is resolved
inputWire1 = circuitDict[circuit]["input"]
if debug:
print("simplest case circuit", circuit, " inputWire", inputWire1)
circuitDict[circuit]["output"] = circuitDict[inputWire1]["output"]
else:
# this circuit is done, move on
# if debug:
# print("circuit",circuit,"is done with output ", circuitDict[circuit]["output"], "Break.")
pass
if math.isnan(circuitDict[circuit]["output"]) is False:
# this output is calculated, decrement the unfinished counter
unfinishedCount -= 1
if unfinishedCount < lowCount:
lowCount = unfinishedCount
# if debug:
# print("unfinishedCount", unfinishedCount)
startTime = time.perf_counter() # time in seconds (float)
debug = False
timing = True
unitTesting = False
# maybe a dictionary again?
# circuitStrings = {"a" : {"input" : 1, "output" : NaN}}
# parse the input text file to set up the circuitStrings inputs, then just roll through the dictionary to calculate the outputs
# how will I be sure that the output has been calculated to be the input for the next circuitStrings?
# can I assume the input file is "in order"? Probably not.
# does this mean some sort of recursion algorithm?
# maybe if I populate the outputs with 'NaN' (or Python equivalent) then check that it's not that before using it's output
# I can make it recurse through the inputs, calculating any that have fully realized inputs?
circuitStrings = []
circuitDict = {}
# unit tests, kind of
if unitTesting:
print("Unit Testing")
circuitStrings = ["123 -> x","456 -> y", "x AND y -> d", "x OR y -> e", "x LSHIFT 2 -> f", "y RSHIFT 2 -> g", "NOT x -> h", "NOT y -> i"]
else:
# read the input text file into a variable called presents
with open("2015/day7/input-part2.txt","r") as inputString:
circuitStrings = inputString.readlines()
# remove newlines
for i in range(0, len(circuitStrings)):
circuitStrings[i] = circuitStrings[i].rstrip()
# parse the input to create the dictionary
createCircuitDict()
doConnection()
# show the circuits
if debug:
for circuit in circuitDict:
print(circuit,":",circuitDict[circuit])
if unitTesting:
testPass = False
testPassOutput = {"d": {"output" : 72}, "e": {"output" : 507}, "f": {"output" : 492}, "g": {"output" : 114}, "h": {"output" : 65412}, "i": {"output" : 65079}, "x": {"output" : 123}, "y": {"output" : 456}}
for wire in testPassOutput:
testPassWire = testPassOutput[wire]["output"]
circuitWire = circuitDict[wire]["output"]
if debug:
print("wire", wire, "test:", testPassWire, "calc:", circuitWire)
testPass = testPassWire == circuitWire
if testPass is False:
break
print("testPass:", testPass)
else:
print(circuitDict["a"]["output"])
# this answer for my input is 46065 (part 1), 14134 (part 2)
endTime = time.perf_counter() # time in seconds (float)
if timing:
print("Execution took ", endTime - startTime, " seconds.")
| 42.289063 | 362 | 0.608627 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,647 | 0.521615 |
31ecb15b99e3ceb267fe3088d539b5b22c952d38 | 1,346 | py | Python | flink-ai-flow/examples/workflow_on_event/workflows/init/init.py | lisy09/flink-ai-extended | 011a5a332f7641f66086653e715d0596eab2e107 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
]
| null | null | null | flink-ai-flow/examples/workflow_on_event/workflows/init/init.py | lisy09/flink-ai-extended | 011a5a332f7641f66086653e715d0596eab2e107 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
]
| null | null | null | flink-ai-flow/examples/workflow_on_event/workflows/init/init.py | lisy09/flink-ai-extended | 011a5a332f7641f66086653e715d0596eab2e107 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
]
| null | null | null | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import ai_flow as af
hourly_data_dir = '/tmp/hourly_data'
process_result_base_path = '/tmp/hourly_processed'
daily_data_base_path = '/tmp/daily_data'
daily_result = '/tmp/daily_result'
def init():
af.register_dataset(name='hourly_data', uri=hourly_data_dir)
af.register_dataset(name='hourly_data_processed', uri=process_result_base_path)
af.register_dataset(name='daily_data', uri=daily_data_base_path)
af.register_dataset(name='daily_data_result', uri=daily_result)
if __name__ == '__main__':
af.init_ai_flow_context()
init()
| 35.421053 | 83 | 0.770431 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 925 | 0.687221 |
31ee3bc132db64859847221802dd7bff470b9ce3 | 977 | py | Python | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/bobcat/profiles/Profile_WiSUN.py | SiliconLabs/Gecko_SDK | 991121c706578c9a2135b6f75cc88856e8c64bdc | [
"Zlib"
]
| 82 | 2016-06-29T17:24:43.000Z | 2021-04-16T06:49:17.000Z | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/bobcat/profiles/Profile_WiSUN.py | SiliconLabs/Gecko_SDK | 991121c706578c9a2135b6f75cc88856e8c64bdc | [
"Zlib"
]
| 2 | 2017-02-13T10:07:17.000Z | 2017-03-22T21:28:26.000Z | platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/bobcat/profiles/Profile_WiSUN.py | SiliconLabs/Gecko_SDK | 991121c706578c9a2135b6f75cc88856e8c64bdc | [
"Zlib"
]
| 56 | 2016-08-02T10:50:50.000Z | 2021-07-19T08:57:34.000Z | from pyradioconfig.parts.ocelot.profiles.Profile_WiSUN import Profile_WiSUN_Ocelot
from pyradioconfig.parts.common.profiles.bobcat_regs import build_modem_regs_bobcat
from pyradioconfig.parts.common.profiles.profile_common import buildCrcOutputs, buildFecOutputs, buildFrameOutputs, \
buildWhiteOutputs
class Profile_WiSUN_Bobcat(Profile_WiSUN_Ocelot):
def __init__(self):
self._profileName = "WiSUN"
self._readable_name = "WiSUN Profile"
self._category = ""
self._description = "Profile used for WiSUN PHYs"
self._default = False
self._activation_logic = ""
self._family = "bobcat"
def build_register_profile_outputs(self, model, profile):
family = self._family
build_modem_regs_bobcat(model, profile, family)
buildFrameOutputs(model, profile, family)
buildCrcOutputs(model, profile, family)
buildWhiteOutputs(model, profile)
buildFecOutputs(model, profile) | 42.478261 | 117 | 0.73695 | 669 | 0.684749 | 0 | 0 | 0 | 0 | 0 | 0 | 63 | 0.064483 |
31ee781effe2a319a7f8d1c8b7b12faf33878337 | 1,846 | py | Python | tests/dgds_functions_test.py | openearth/hydro-engine-service | 8e7eea489ee241dad2d6d8152d1c30af8a09a8d1 | [
"MIT"
]
| 4 | 2019-02-15T13:53:01.000Z | 2021-12-13T09:53:02.000Z | tests/dgds_functions_test.py | openearth/hydro-engine-service | 8e7eea489ee241dad2d6d8152d1c30af8a09a8d1 | [
"MIT"
]
| 12 | 2018-12-19T08:30:29.000Z | 2021-04-21T12:59:59.000Z | tests/dgds_functions_test.py | openearth/hydro-engine-service | 8e7eea489ee241dad2d6d8152d1c30af8a09a8d1 | [
"MIT"
]
| 4 | 2018-10-17T23:48:21.000Z | 2020-08-05T18:36:14.000Z | import logging
import pytest
from . import auth
from hydroengine_service import dgds_functions
logger = logging.getLogger(__name__)
class TestDGDSFunctions:
@pytest.mark.parametrize('source, start_date, end_date, limit',
[
('projects/dgds-gee/bathymetry/gebco/2019', None, None, 10),
('projects/dgds-gee/glossis/currents', None, None, None),
('projects/dgds-gee/glossis/waterlevel', '2020-11-01', '2020-12-01', None),
('projects/dgds-gee/glossis/wind', '2020-11-01', '2020-11-10', 10),
('projects/dgds-gee/glossis/waveheight', None, None, None),
('projects/dgds-gee/gloffis/weather', None, None, 5),
('projects/dgds-gee/gloffis/hydro', None, None, 5),
('projects/dgds-gee/metocean/waves/percentiles', None, None, 5),
('projects/dgds-gee/chasm/waves', None, None, None),
('projects/dgds-gee/chasm/wind', None, None, None),
('projects/dgds-gee/crucial/evaporation_deficit', None, None, None),
('projects/dgds-gee/crucial/groundwater_declining_trend', None, None, None),
('projects/dgds-gee/msfd/chlorophyll', None, None, None)
])
def test_get_image_collection_info(self, source, start_date, end_date, limit):
image_date_list = dgds_functions.get_image_collection_info(source, start_date, end_date, limit)
assert len(image_date_list) >= 1
assert "imageId" in image_date_list[0]
assert "date" in image_date_list[0]
| 51.277778 | 109 | 0.538462 | 1,709 | 0.925785 | 0 | 0 | 1,679 | 0.909534 | 0 | 0 | 598 | 0.323944 |
31ee7dd58797f57d854758b0971c25c71826cd28 | 2,485 | py | Python | smol_opyt/logistic_problem.py | abelsiqueira/smol-opyt | 58901906eb3129f4aae9edc7893bba624c5a0686 | [
"MIT"
]
| null | null | null | smol_opyt/logistic_problem.py | abelsiqueira/smol-opyt | 58901906eb3129f4aae9edc7893bba624c5a0686 | [
"MIT"
]
| 5 | 2021-08-02T02:04:48.000Z | 2021-08-02T02:27:57.000Z | smol_opyt/logistic_problem.py | abelsiqueira/smol-opyt | 58901906eb3129f4aae9edc7893bba624c5a0686 | [
"MIT"
]
| null | null | null | from math import log
import numpy as np
from numpy import linalg as la
class LogisticProblem:
"""Class for the logistic regression method for classification."""
def __init__(self, feat_mtx, y):
"""Create a Logistic Problem with matrix `feat_mtx` n by p and vector `y` of 0s and 1s with size n.
A bias is added to the model as the first variable."""
self._feat_mtx = feat_mtx
self._y = y
p = feat_mtx.shape[1]
self.beta = np.zeros(p + 1)
def sigmoid(self, v):
"""Compute sigmoid(v) = 1 / (1 + exp(-v)"""
return 1 / (1 + np.exp(-v))
def predict(self, feat_mtx=None, beta=None):
if feat_mtx is None:
feat_mtx = self._feat_mtx
if beta is None:
beta = self.beta
return self.sigmoid(beta[0] + np.dot(feat_mtx, beta[1:]))
def cross_entropy(self, yhat):
"""Compute the cross entropy, given by
sum y[i] * log(yhat[i]) + (1 - y[i]) * log(1 - yhat[i])"""
n = len(self._y)
c = 0.0
for i in range(0, n):
c += self._y[i] * log(
yhat[i]) + (1 - self._y[i]) * log(1 - yhat[i])
return c
def cross_entropy_gradient(self, yhat):
"""Assuming yhat_i = sigmoid(x_i^T beta), returns
sum (y[i] - yhat) * x_i
"""
n = len(self._y)
p = len(self.beta)
g = np.zeros(p)
for i in range(0, n):
g = g + (self._y[i] - yhat[i]) * np.array(
[1.0, *self._feat_mtx[i, :]])
return g
def solve(self):
"""Solve the logistic regression problem"""
max_iter = 1000
iter_count = 0
yhat = self.predict()
loss = self.cross_entropy(yhat)
gradloss = self.cross_entropy_gradient(yhat)
while la.norm(gradloss) > 1e-6 and iter_count < max_iter:
alpha = 1.0
slope = la.norm(gradloss)**2
beta_new = self.beta + alpha * gradloss
yhat = self.predict(beta=beta_new)
loss_new = self.cross_entropy(yhat)
while loss_new < loss + 1e-4 * alpha * slope:
alpha = alpha / 2
beta_new = self.beta + alpha * gradloss
yhat = self.predict(beta=beta_new)
loss_new = self.cross_entropy(yhat)
self.beta = beta_new
loss = loss_new
gradloss = self.cross_entropy_gradient(yhat)
iter_count += 1
| 33.133333 | 107 | 0.534004 | 2,411 | 0.970221 | 0 | 0 | 0 | 0 | 0 | 0 | 522 | 0.21006 |
9ec42ebdeb8c357fae82c9abfd68ebde784ec5ba | 1,280 | py | Python | TeamClassificationUtils.py | Neerajj9/Computer-Vision-based-Offside-Detection-in-soccer | 744bfc636463f24c4f78f25684864c2ce4abb43f | [
"MIT"
]
| 8 | 2020-10-17T14:54:53.000Z | 2022-02-09T11:03:01.000Z | TeamClassificationUtils.py | Neerajj9/Computer-Vision-based-Offside-Detection-in-soccer | 744bfc636463f24c4f78f25684864c2ce4abb43f | [
"MIT"
]
| 4 | 2021-01-03T16:02:29.000Z | 2021-11-23T03:26:01.000Z | TeamClassificationUtils.py | Neerajj9/Computer-Vision-based-Offside-Detection-in-soccer | 744bfc636463f24c4f78f25684864c2ce4abb43f | [
"MIT"
]
| 2 | 2021-04-10T07:05:55.000Z | 2021-09-19T23:22:18.000Z | import numpy as np
# TODO : add code for referee
def get_team_classifications(teamColor1, teamColor2, refColor, keeper1Color, keeper2Color, pose_estimations):
for pose in pose_estimations:
if len(pose[1]) < 2:
pose.append('color not found')
continue
colorDiffs = {}
colorList = np.array(pose[1][0]) + np.array(pose[1][1])
colorList = np.divide(colorList, 2)
colorList = colorList.tolist()
diffTeam1 = list(abs(np.array(teamColor1) - np.array(colorList)))
colorDiffs['team1'] = diffTeam1
diffTeam2 = list(abs(np.array(teamColor2) - np.array(colorList)))
colorDiffs['team2'] = diffTeam2
diffRef = list(abs(np.array(refColor) - np.array(colorList)))
colorDiffs['ref'] = diffRef
diffKeep1 = list(abs(np.array(refColor) - np.array(colorList)))
colorDiffs['keep1'] = diffKeep1
diffKeep2 = list(abs(np.array(refColor) - np.array(colorList)))
colorDiffs['keep2'] = diffKeep2
for key in colorDiffs.keys():
colorDiffs[key] = sum(colorDiffs[key]) / len(colorDiffs[key])
colorDiffs = {k: v for k, v in sorted(colorDiffs.items(), key=lambda item: item[1])}
for key in colorDiffs.keys():
pose.append(key)
break
return pose_estimations | 33.684211 | 109 | 0.651563 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 79 | 0.061719 |
9ec50e4a84db3516536add2eb38a5493aef3c343 | 856 | py | Python | examples/PTSD/mpi_tmp/PTSD_cognet.py | zeroknowledgediscovery/cognet | 3acc2f05451ccbc228bf9c02e5d357b40b0c3e4f | [
"MIT"
]
| null | null | null | examples/PTSD/mpi_tmp/PTSD_cognet.py | zeroknowledgediscovery/cognet | 3acc2f05451ccbc228bf9c02e5d357b40b0c3e4f | [
"MIT"
]
| null | null | null | examples/PTSD/mpi_tmp/PTSD_cognet.py | zeroknowledgediscovery/cognet | 3acc2f05451ccbc228bf9c02e5d357b40b0c3e4f | [
"MIT"
]
| null | null | null | from mpi4py.futures import MPIPoolExecutor
import numpy as np
import pandas as pd
from quasinet.qnet import Qnet, qdistance, load_qnet, qdistance_matrix
from quasinet.qsampling import qsample, targeted_qsample
qnet=load_qnet('../results/PTSD_cognet_test.joblib')
w = 304
h = w
p_all = pd.read_csv("tmp_samples_as_strings.csv", header=None).values.astype(str)[:]
def distfunc(x,y):
d=qdistance(x,y,qnet,qnet)
return d
def dfunc_line(k):
line = np.zeros(w)
y = p_all[k]
for j in range(w):
if j > k:
x = p_all[j]
line[j] = distfunc(x, y)
return line
if __name__ == '__main__':
with MPIPoolExecutor() as executor:
result = executor.map(dfunc_line, range(h))
result = pd.DataFrame(result)
result = result.to_numpy()
result = pd.DataFrame(np.maximum(result, result.transpose()))
result.to_csv('tmp_distmatrix.csv',index=None,header=None) | 27.612903 | 84 | 0.73715 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.109813 |
9ec518765538fd6d2d3d18e0ed23d60b0ac69f7f | 58 | py | Python | tests/__init__.py | bio2bel/famplex | 3a1dfb0f3da3eb33c2b4de658cf02ffb6b5bebaa | [
"MIT"
]
| null | null | null | tests/__init__.py | bio2bel/famplex | 3a1dfb0f3da3eb33c2b4de658cf02ffb6b5bebaa | [
"MIT"
]
| 3 | 2018-07-24T14:32:41.000Z | 2018-08-10T11:17:49.000Z | tests/__init__.py | bio2bel/famplex | 3a1dfb0f3da3eb33c2b4de658cf02ffb6b5bebaa | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""Tests for Bio2BEL FamPlex."""
| 14.5 | 32 | 0.551724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 55 | 0.948276 |
9ec5885a6003a25f321416770e39cf31583e933d | 4,778 | py | Python | dfainductor/algorithms/searchers.py | ctlab/DFA-Inductor-py | c9f0906101a4c83f125ab8c487dc2eac7a52d310 | [
"MIT"
]
| 2 | 2020-06-03T11:27:45.000Z | 2021-08-30T04:14:48.000Z | dfainductor/algorithms/searchers.py | ctlab/DFA-Inductor-py | c9f0906101a4c83f125ab8c487dc2eac7a52d310 | [
"MIT"
]
| 1 | 2021-07-14T18:43:58.000Z | 2021-07-14T18:43:58.000Z | dfainductor/algorithms/searchers.py | ctlab/DFA-Inductor-py | c9f0906101a4c83f125ab8c487dc2eac7a52d310 | [
"MIT"
]
| null | null | null | from typing import List
from pysat.solvers import Solver
from ..variables import VarPool
from .reductions import ClauseGenerator
from ..examples import BaseExamplesProvider
from ..logging_utils import *
from ..statistics import STATISTICS
from ..structures import APTA, DFA, InconsistencyGraph
class LSUS:
_solver: Solver
def __init__(self,
apta: APTA,
ig: InconsistencyGraph,
solver_name: str,
sb_strategy: str,
cegar_mode: str,
examples_provider: BaseExamplesProvider,
assumptions_mode: str) -> None:
self._apta = apta
self._ig = ig
self._solver_name = solver_name
self._sb_strategy = sb_strategy
self._cegar_mode = cegar_mode
self._examples_provider = examples_provider
self._assumptions_mode = assumptions_mode
self._var_pool: VarPool = VarPool()
self._clause_generator = ClauseGenerator(self._apta,
self._ig,
self._var_pool,
self._assumptions_mode,
self._sb_strategy)
def _try_to_synthesize_dfa(self, size: int, assumptions: List[int]) -> Optional[DFA]:
log_info('Vars in CNF: {0}'.format(self._solver.nof_vars()))
log_info('Clauses in CNF: {0}'.format(self._solver.nof_clauses()))
STATISTICS.start_solving_timer()
is_sat = self._solver.solve(assumptions=assumptions)
STATISTICS.stop_solving_timer()
if is_sat:
assignment = self._solver.get_model()
dfa = DFA()
for i in range(size):
dfa.add_state(
DFA.State.StateStatus.from_bool(assignment[self._var_pool.var('z', i) - 1] > 0)
)
for i in range(size):
for label in range(self._apta.alphabet_size):
for j in range(size):
if assignment[self._var_pool.var('y', i, label, j) - 1] > 0:
dfa.add_transition(i, self._apta.alphabet[label], j)
return dfa
else:
return None
def search(self, lower_bound: int, upper_bound: int) -> Optional[DFA]:
self._solver = Solver(self._solver_name)
log_info('Solver has been started.')
for size in range(lower_bound, upper_bound + 1):
if self._assumptions_mode == 'none' and size > lower_bound:
self._solver = Solver(self._solver_name)
log_info('Solver has been restarted.')
log_br()
log_info('Trying to build a DFA with {0} states.'.format(size))
STATISTICS.start_formula_timer()
if self._assumptions_mode != 'none' and size > lower_bound:
self._clause_generator.generate_with_new_size(self._solver, size - 1, size)
else:
self._clause_generator.generate(self._solver, size)
STATISTICS.stop_formula_timer()
assumptions = self._clause_generator.build_assumptions(size, self._solver)
while True:
dfa = self._try_to_synthesize_dfa(size, assumptions)
if dfa:
counter_examples = self._examples_provider.get_counter_examples(dfa)
if counter_examples:
log_info('An inconsistent DFA with {0} states is found.'.format(size))
log_info('Added {0} counterexamples.'.format(len(counter_examples)))
STATISTICS.start_apta_building_timer()
(new_nodes_from, changed_statuses) = self._apta.add_examples(counter_examples)
STATISTICS.stop_apta_building_timer()
STATISTICS.start_ig_building_timer()
self._ig.update(new_nodes_from)
STATISTICS.stop_ig_building_timer()
STATISTICS.start_formula_timer()
self._clause_generator.generate_with_new_counterexamples(self._solver, size,
new_nodes_from,
changed_statuses)
STATISTICS.stop_formula_timer()
continue
break
if not dfa:
log_info('Not found a DFA with {0} states.'.format(size))
else:
log_success('The DFA with {0} states is found!'.format(size))
return dfa
return None
| 43.834862 | 102 | 0.547928 | 4,479 | 0.937422 | 0 | 0 | 0 | 0 | 0 | 0 | 295 | 0.061741 |
9ec5b4570de1244cfecc950781db192eb22b2b73 | 22,697 | py | Python | lc_sqlalchemy_dbutils/manager.py | libcommon/sqlalchemy-dbutils-py | 39b2fb0fc51279a4d1c8a2b6fe250f8cff44d1b1 | [
"MIT"
]
| null | null | null | lc_sqlalchemy_dbutils/manager.py | libcommon/sqlalchemy-dbutils-py | 39b2fb0fc51279a4d1c8a2b6fe250f8cff44d1b1 | [
"MIT"
]
| null | null | null | lc_sqlalchemy_dbutils/manager.py | libcommon/sqlalchemy-dbutils-py | 39b2fb0fc51279a4d1c8a2b6fe250f8cff44d1b1 | [
"MIT"
]
| null | null | null | ## -*- coding: UTF8 -*-
## manager.py
## Copyright (c) 2020 libcommon
##
## Permission is hereby granted, free of charge, to any person obtaining a copy
## of this software and associated documentation files (the "Software"), to deal
## in the Software without restriction, including without limitation the rights
## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
## copies of the Software, and to permit persons to whom the Software is
## furnished to do so, subject to the following conditions:
##
## The above copyright notice and this permission notice shall be included in all
## copies or substantial portions of the Software.
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
## SOFTWARE.
from getpass import getpass
import os
from pathlib import Path
from typing import Any, Optional, Union
from sqlalchemy import create_engine as sqla_create_engine, MetaData
from sqlalchemy.engine import Engine
from sqlalchemy.engine.url import make_url, URL
from sqlalchemy.orm import scoped_session as ScopedSession, Session, sessionmaker as SessionMaker
from sqlalchemy.orm.query import Query
__author__ = "libcommon"
DBManagerSessionFactory = Union[ScopedSession, SessionMaker]
DBManagerSession = Union[ScopedSession, Session]
ConnectionURL = Union[str, URL]
class DBManager:
"""SQLAlchemy ORM database connection manager with
utility methods for connecting to, querying, performing/rolling back
transactions on, and deleting records from the database. Agnostic to
database backend and designed for use within a single process (not shared
by multiple processes.)
"""
__slots__ = ("_engine", "_scoped_sessions", "_session", "_session_factory", "connection_url", "metadata",)
@classmethod
def from_file(cls, config_path_str: str) -> "DBManager":
"""
Args:
config_path => path to file containing connection URL
Description:
Reads connection URL from config file and creates instance of class.
Will validate connection URL and if it doesn't have password, will prompt user.
Preconditions:
Connection URL must be a valid RFC1738 URL and must be the only content in the file.
Raises:
FileNotFoundError: if provided config_path isn't an existing file
ValueError: if validation (parsing) of connection URL fails
"""
# Ensure config_path is existing file
config_path = Path(config_path_str)
if not config_path.is_file():
raise FileNotFoundError(str(config_path))
# Read first line from file and use as connection URL
with open(str(config_path)) as config_file:
connection_url_str = config_file.read().strip()
# Parse connection URL into various components
try:
connection_url = make_url(connection_url_str)
except Exception as exc:
raise ValueError("Failed to parse URL from file ({})".format(exc))
# If is not SQLite file and password not provided, get password from user
if not ("sqlite" in connection_url.drivername or connection_url.password):
passwd = getpass("Enter database password: ")
connection_url.password = passwd
return cls(connection_url)
def __init__(self,
connection_url: ConnectionURL,
metadata: Optional[MetaData] = None,
scoped_sessions: bool = False):
if isinstance(connection_url, str):
connection_url = make_url(connection_url)
self.connection_url = connection_url
self.metadata = metadata
self._scoped_sessions = scoped_sessions
self._engine: Optional[Engine] = None
self._session: Optional[Session] = None
self._session_factory: Optional[DBManagerSessionFactory] = None
def create_engine(self, **kwargs) -> "DBManager":
"""
Args:
kwargs => passed to SQLAlchemy Engine constructor
Description:
Create SQLAlchemy Engine using self.connection_url.
See: https://docs.sqlalchemy.org/en/13/core/engines.html
Preconditions:
N/A
Raises:
RuntimeError: if self.engine is already set and persist is True
"""
# Ensure self._engine isn't already defined
# NOTE: Consider whether this implementation makes sense, or if it makes more sense
# to simply dispose of existing engine (with DEBUG log) before creating new one.
if self._engine:
raise RuntimeError("Cannot attach new Engine without removing existing one")
# Create SQLAlchemy Engine with connection URL
engine = sqla_create_engine(self.connection_url, **kwargs)
self._engine = engine
return self
def close_engine(self) -> "DBManager":
"""
Args:
N/A
Description:
Close and dispose of existing Engine and connection pool on
self._engine if defined.
Preconditions:
N/A
Raises:
N/A
"""
# If have active session, close it before engine
if self.session():
self.close_session()
# If self._engine defined
if self._engine:
# Dispose of existing connection pool
self._engine.dispose()
self._engine = None
return self
def with_metadata(self, metadata: MetaData) -> "DBManager":
"""
Args:
N/A
Description:
Setter for self.metadata using builder pattern.
Preconditions:
N/A
Raises:
N/A
"""
self.metadata = metadata
return self
def bootstrap_db(self) -> "DBManager":
"""
Args:
N/A
Description:
Create all tables defined in self.metadata.
See: https://docs.sqlalchemy.org/en/13/core/metadata.html
Preconditions:
N/A
Raises:
N/A
"""
if not self._engine:
raise RuntimeError("Cannot bootstrap database without an Engine")
if not self.metadata:
raise RuntimeError("Cannot bootstrap database with MetaData")
self.metadata.create_all(self._engine)
return self
def create_session_factory(self, **kwargs) -> "DBManager":
"""
Args:
kwargs => passed to SQLAlchemy sessionmaker constructor
Description:
Create SQLAlchemy scoped_session if self._scoped_sessions is True,
otherwise sessionmaker. All kwargs are passed to sessionmaker constructor.
This method should only be called _once_ by the DBManager. SQLAlchemy doesn't
recommend manually closing all sessions, and the mechanics for doing so have changed
across versions.
See: https://docs.sqlalchemy.org/en/13/orm/session_api.html#session-and-sessionmaker
and https://docs.sqlalchemy.org/en/13/orm/contextual.html#sqlalchemy.orm.scoping.scoped_session
and https://docs.sqlalchemy.org/en/13/orm/session_api.html#sqlalchemy.orm.session.sessionmaker.close_all
Preconditions:
N/A
Raises:
RuntimeError: if self._session_factory is already defined, or
if self._engine isn't defined
"""
# Ensure self._session_factory isn't already defined
if self._session_factory:
raise RuntimeError("Session factory already created")
# Ensure self._engine is defined
if not self._engine:
raise RuntimeError("Cannot create session factory without an Engine")
# Generate sessionmaker session factory
self._session_factory = SessionMaker(bind=self._engine, **kwargs)
# If scoped sessions, wrap in scoped_sessions factory
if self._scoped_sessions:
self._session_factory = ScopedSession(self._session_factory)
return self
def connect(self, bootstrap: bool = False) -> "DBManager":
"""
Args:
N/A
Description:
Create database engine and session factory (but _not_ active session).
gen_session must be called subsequently to create an active session.
If bootstrap specified, use self.metdata and self._engine to create all tables,
indexes, views, etc.
Preconditions:
N/A
Raises:
ValueError: if bootstrap and self.metadata isn't defined
"""
# Generate database engine if needed
if not self._engine:
self.create_engine()
# Bootstrap database if asked
if bootstrap:
self.bootstrap_db()
# Generate session factory if needed
if not self._session_factory:
self.create_session_factory()
return self
def gen_session(self, persist: bool = True) -> DBManagerSession:
"""
Args:
persist => whether to persist created session on self
Description:
Generate new database session. If persist is True, assign new session
to self._session. In this way, the DBManager can act simply as a factory for new sessions,
or as a more complete DB manager. Use the `session` method to access the active session.
See: https://docs.sqlalchemy.org/en/13/orm/session_basics.html#basics-of-using-a-session
Preconditions:
N/A
Raises:
RuntimeError: if self._session_factory hasn't been created yet, or
if self._session is already set and persist is True (for non-scoped sessions)
"""
# Ensure session factory has been created
if not self._session_factory:
raise RuntimeError("Session factory must be created before a session can be generated")
# If scoped sessions, return scoped session manager
if self._scoped_sessions:
return self._session_factory # type: ignore
# Otherwise, generate new session from session factory
session = self._session_factory()
# If persist session to self, ensure self.session isn't already defined
if persist:
if self._session:
raise RuntimeError("Cannot attach new Session without removing existing Session")
self._session = session
return session
def session(self) -> Optional[DBManagerSession]:
"""
Args:
N/A
Description:
Current session (if exists).
Preconditions:
N/A
Raises:
N/A
"""
# If scoped sessions, return scoped session manager
if self._scoped_sessions:
return self._session_factory # type: ignore
# Otherwise, return self._session
return self._session
def close_session(self) -> "DBManager":
"""
Args:
N/A
Description:
Close the current session.
Preconditions:
N/A
Raises:
N/A
"""
# If scoped sessions and session factory has been initialized,
# remove current session
if self._scoped_sessions and self._session_factory:
self._session_factory.remove() # type: ignore
# If session on self, close it
elif self._session:
self._session.close()
self._session = None
return self
def _assert_session(self) -> DBManagerSession:
"""
Args:
N/A
Description:
Raise ValueError if no existing session. If scoped_sessions
is True, then requires self._session_factory to be defined.
Otherwise, requires self._session to be defined (non-None).
Preconditions:
N/A
Raises:
ValueError: if self._session not defined
"""
session = self.session()
if not session:
raise RuntimeError("Must have active session")
return session
def query(self, model: Any, **kwargs) -> Query:
"""
Args:
model => model of table to query
kwargs => passed to query.filter method
Description:
Wrapper for Session.query, with option to build WHERE clause.
See: https://docs.sqlalchemy.org/en/13/orm/session_api.html#sqlalchemy.orm.session.Session.query
Preconditions:
record is instance of class whose parent class was created using SQLAlchemy's declarative_base.
Raises:
RuntimeError: if self._session isn't defined
"""
# Ensure active session
session = self._assert_session()
query = session.query(model)
for arg in kwargs:
query = query.filter(getattr(model, arg) == kwargs[arg])
return query
def add(self, record: Any, commit: bool = False) -> "DBManager":
"""
Args:
record => record to add to session
commit => whether to commit the transaction after adding record to session
Description:
Wrapper for Session.add, with option to commit the transaction.
See: https://docs.sqlalchemy.org/en/13/orm/session_api.html#sqlalchemy.orm.session.Session.add
Preconditions:
record is instance of class whose parent class was created using SQLAlchemy's declarative_base.
Raises:
RuntimeError: if self._session isn't defined
"""
# Ensure active session
session = self._assert_session()
# Add record to session
session.add(record)
# Commit if asked
if commit:
session.commit()
return self
def delete(self, record: Any, commit: bool = False) -> "DBManager":
"""
Args:
record => record to delete from session
commit => whether to commit the transaction after deleting record from session
Description:
Wrapper for Session.delete, with option to commit the transaction.
See: https://docs.sqlalchemy.org/en/13/orm/session_api.html#sqlalchemy.orm.session.Session.delete
Preconditions:
record is instance of class whose parent class was created using SQLAlchemy's declarative_base.
Raises:
RuntimeError: if self._session isn't defined
"""
# Ensure active session
session = self._assert_session()
# Delete record from session
session.delete(record)
# Commit if asked
if commit:
session.commit()
return self
def commit(self) -> "DBManager":
"""
Args:
N/A
Description:
Wrapper for Session.commit.
See: https://docs.sqlalchemy.org/en/13/orm/session_api.html#sqlalchemy.orm.session.Session.commit
Preconditions:
N/A
Raises:
RuntimeError: if self._session isn't defined
"""
# Ensure active session
session = self._assert_session()
session.commit()
return self
def rollback(self) -> "DBManager":
"""
Args:
N/A
Description:
Wrapper for Session.rollback.
See: https://docs.sqlalchemy.org/en/13/orm/session_api.html#sqlalchemy.orm.session.Session.rollback
Preconditions:
N/A
Raises:
RuntimeError: if self._session isn't defined
"""
# Ensure active session
session = self._assert_session()
session.rollback()
return self
if os.environ.get("ENVIRONMENT") == "TEST":
import unittest
from unittest.mock import patch, mock_open
from tests.common import BaseTable, User
class TestDBManager(unittest.TestCase):
"""Tests for DBManager API."""
def setUp(self):
self.connection_url_default = "postgresql://dbuser@pghost10/appdb"
self.connection_url_with_password = "postgresql://dbuser:kx%25jj5%2Fg@pghost10/appdb"
self.connection_url_sqlite = "sqlite://"
def test_from_file_invalid_filepath(self):
"""Test that invalid filepath to DBManager.from_file
raises FileNotFoundError.
"""
nonexistent_filepath = Path().cwd().joinpath("url_config.txt")
self.assertRaises(FileNotFoundError, DBManager.from_file, nonexistent_filepath)
def test_from_file_invalid_url(self):
"""Test that invalid URL in file passed to DBManager.from_file
raises ValueError.
"""
# |--| port is not number
connection_url = "postgresql+pg8000://dbuser:kx%25jj5%2Fg@pghost10:port/appdb"
with patch("{}.open".format(__name__), mock_open(read_data=connection_url)):
self.assertRaises(ValueError, DBManager.from_file, __file__)
def test_from_file_no_passwd_sqlite(self):
"""Test that if connection URL isn't for SQLite and no
password provided, prompts for password and updates
database connection URL.
"""
passwd = "passphrase"
with patch("{}.getpass".format(__name__), return_value=passwd), \
patch("{}.open".format(__name__), mock_open(read_data=self.connection_url_default)):
manager = DBManager.from_file(__file__)
self.assertEqual(passwd, manager.connection_url.password)
def test_create_engine_with_existing(self):
"""Test that engine creation raises RuntimeError when engine
is already set.
"""
manager = DBManager(self.connection_url_sqlite).create_engine()
self.assertRaises(RuntimeError, manager.create_engine)
def test_close_engine_with_existing(self):
"""Test that engine is set to None if already set."""
manager = DBManager(self.connection_url_sqlite).create_engine()
manager.close_engine()
self.assertIsNone(manager._engine)
def test_bootstrap_db(self):
"""Test that bootstrap_db raises RuntimeError without Engine and MetaData."""
manager = DBManager(self.connection_url_sqlite)
# Bootstrap database without Engine
self.assertRaises(RuntimeError, manager.bootstrap_db)
manager.create_engine()
# Bootstrap database without MetaData
self.assertRaises(RuntimeError, manager.bootstrap_db)
def test_create_session_factory_without_engine(self):
"""Test that session factory creation raises RuntimeError without Engine."""
manager = DBManager(self.connection_url_sqlite)
self.assertRaises(RuntimeError, manager.create_session_factory)
def test_create_session_factory_with_existing(self):
"""Test that session factory creation raises RuntimeError with
existing session factory.
"""
manager = DBManager(self.connection_url_sqlite).connect()
self.assertRaises(RuntimeError, manager.create_session_factory)
def test_gen_session_without_factory(self):
"""Test that session generation raises RuntimeError without session factory."""
manager = DBManager(self.connection_url_sqlite)
self.assertRaises(RuntimeError, manager.gen_session)
manager.create_engine()
self.assertRaises(RuntimeError, manager.gen_session)
def test_gen_session_non_scoped_persist(self):
"""Test that non-scoped session persists to self if persist is True."""
manager = DBManager(self.connection_url_sqlite).connect()
session = manager.gen_session(persist=True)
self.assertIsNotNone(session)
self.assertEqual(session, manager._session)
def test_close_session_with_existing(self):
"""Test that persisted session is set to None if already set."""
manager = DBManager(self.connection_url_sqlite).connect()
manager.gen_session(persist=True)
self.assertIsNotNone(manager._session)
manager.close_session()
self.assertIsNone(manager._session)
def test_session_methods_no_session(self):
"""Test that query, add, delete, commit, and rollback methods fail
without existing Session.
"""
manager = DBManager(self.connection_url_sqlite, metadata=BaseTable.metadata).connect()
user_record = User(first_name="Samuel", last_name="Jackson", email="[email protected]")
self.assertRaises(RuntimeError, manager.query, User)
self.assertRaises(RuntimeError, manager.commit)
self.assertRaises(RuntimeError, manager.rollback)
for method_name in ("add", "delete"):
with self.subTest(test_name=method_name):
self.assertRaises(RuntimeError, getattr(manager, method_name), user_record)
def test_query_where_clause_kwargs(self):
"""Test that kwargs supplied to query get properly passed to session.query.filter
to build WHERE clause.
"""
manager = DBManager(self.connection_url_sqlite, metadata=BaseTable.metadata).connect()
manager.gen_session(persist=True)
expected_query = ("SELECT \"user\".id, \"user\".first_name, \"user\".last_name, \"user\".email "
"FROM \"user\" "
"WHERE \"user\".first_name = 'Samuel' AND \"user\".email = '[email protected]'")
query_str = (str(manager
.query(User, first_name="Samuel", email="[email protected]")
.statement
.compile(compile_kwargs={"literal_binds": True}))
.replace("\n", ""))
self.assertEqual(expected_query, query_str)
| 40.821942 | 119 | 0.628233 | 20,815 | 0.917082 | 0 | 0 | 1,560 | 0.068732 | 0 | 0 | 12,622 | 0.556109 |
9ec6363df3d16f3e41bfd55d3ca8396d912ca17a | 160 | py | Python | mazeexperiment/__main__.py | NickAnderegg/rpacr-mazeexperiment | 3afe6afb10b4ad61a169645e59f2ad0d0f92f565 | [
"MIT"
]
| null | null | null | mazeexperiment/__main__.py | NickAnderegg/rpacr-mazeexperiment | 3afe6afb10b4ad61a169645e59f2ad0d0f92f565 | [
"MIT"
]
| null | null | null | mazeexperiment/__main__.py | NickAnderegg/rpacr-mazeexperiment | 3afe6afb10b4ad61a169645e59f2ad0d0f92f565 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""mazeexperiment.__main__: executed when mazeexperiment directory is called as script."""
from .mazeexperiment import main
main()
| 20 | 90 | 0.71875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.70625 |
9ec70101de03b36989296a10649d2dea72a92c80 | 1,608 | py | Python | kafka_demo_1/producer.py | Aguinore/udemy_kafka_demo | 5f8383e1381dba2ddc0fc656b3cdc66b98258aad | [
"MIT"
]
| null | null | null | kafka_demo_1/producer.py | Aguinore/udemy_kafka_demo | 5f8383e1381dba2ddc0fc656b3cdc66b98258aad | [
"MIT"
]
| null | null | null | kafka_demo_1/producer.py | Aguinore/udemy_kafka_demo | 5f8383e1381dba2ddc0fc656b3cdc66b98258aad | [
"MIT"
]
| null | null | null | from tweepy import StreamListener, OAuthHandler, Stream
from configs import Configs
import sys
class StdOutListener(StreamListener):
def __init__(self, kafka_producer, topic):
super().__init__()
self.kafka_producer = kafka_producer
self.topic = topic
""" A listener handles tweets that are received from the stream.
"""
def on_data(self, data):
self.kafka_producer.produce(topic=self.topic, value=data)
print(data)
return True
def on_error(self, status):
print(status)
def exit_gracefully(kafka_producer):
if kafka_producer is not None:
kafka_producer.flush(30)
print('kafka producer flushed')
sys.exit(0)
def create_twitter_client(kafka_producer, configs):
listener = StdOutListener(kafka_producer, configs.kafka_topic)
auth = OAuthHandler(configs.consumer_key, configs.consumer_secret)
auth.set_access_token(configs.access_token_key, configs.access_token_secret)
return Stream(auth, listener)
def create_kafka_producer():
# https://www.confluent.io/blog/introduction-to-apache-kafka-for-python-programmers/
from confluent_kafka import Producer
p = Producer({'bootstrap.servers': 'localhost:9092',
'acks': 'all',
'enable.idempotence': 'true',
'compression.type': 'snappy'})
return p
configs = Configs()
producer = None
try:
producer = create_kafka_producer()
client = create_twitter_client(producer, configs)
client.filter(track=configs.twitter_topics)
finally:
exit_gracefully(producer)
| 26.8 | 88 | 0.697139 | 452 | 0.281095 | 0 | 0 | 0 | 0 | 0 | 0 | 278 | 0.172886 |
9ec74c2b027410af0c055e866b7e76cb8dc5f04e | 1,717 | py | Python | demo/examples/stability/advection_d2q4.py | bgraille/pylbm | fd4419933e05b85be364232fddedfcb4f7275e1f | [
"BSD-3-Clause"
]
| 106 | 2016-09-13T07:19:17.000Z | 2022-03-19T13:41:55.000Z | demo/examples/stability/advection_d2q4.py | gouarin/pylbm | fd4419933e05b85be364232fddedfcb4f7275e1f | [
"BSD-3-Clause"
]
| 53 | 2017-09-18T04:51:19.000Z | 2022-01-19T21:36:23.000Z | demo/examples/stability/advection_d2q4.py | gouarin/pylbm | fd4419933e05b85be364232fddedfcb4f7275e1f | [
"BSD-3-Clause"
]
| 33 | 2016-06-17T13:21:17.000Z | 2021-11-11T16:57:46.000Z |
"""
Stability analysis of the
D2Q4 solver for the advection equation
d_t(u) + c_x d_x(u) + c_y d_y(u) = 0
"""
import sympy as sp
import pylbm
# pylint: disable=invalid-name
# symbolic variables
U, X, Y = sp.symbols('U, X, Y')
# symbolic parameters
LA, CX, CY = sp.symbols('lambda, cx, cy', constants=True)
S_1, S_2 = sp.symbols('s1, s2', constants=True)
# numerical parameters
la = 1. # velocity of the scheme
s_1, s_2 = 2., 1. # relaxation parameters
c_x, c_y = 0.5, 0.25 # velocity of the advection equation
dico = {
'dim': 2,
'scheme_velocity': LA,
'schemes': [
{
'velocities': [1, 2, 3, 4],
'conserved_moments': U,
'polynomials': [1, X, Y, X**2-Y**2],
'relaxation_parameters': [0, S_1, S_1, S_2],
'equilibrium': [
U,
CX*U, CY*U,
(CX**2-CY**2)*U
],
},
],
'parameters': {
LA: la,
S_1: s_1,
S_2: s_2,
CX: c_x,
CY: c_y,
},
'relative_velocity': [CX, CY],
}
scheme = pylbm.Scheme(dico)
stab = pylbm.Stability(scheme)
stab.visualize({
'parameters': {
CX: {
'range': [0, 1],
'init': c_x,
'step': 0.01,
},
CY: {
'range': [0, 1],
'init': c_y,
'step': 0.01,
},
S_1: {
'name': r"$s_1$",
'range': [0, 2],
'init': s_1,
'step': 0.01,
},
S_2: {
'name': r"$s_2$",
'range': [0, 2],
'init': s_2,
'step': 0.01,
},
},
'number_of_wave_vectors': 4096,
})
| 20.939024 | 58 | 0.438556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 606 | 0.352941 |
9ec7841a173dc4c19d7dac5f98e4c9ddedd5460c | 157 | py | Python | glimix_core/_util/_array.py | Horta/limix-inference | 1ba102fc544f8d307412d361b574da9d4c166f8e | [
"MIT"
]
| 7 | 2019-06-10T12:27:25.000Z | 2021-07-23T16:36:04.000Z | glimix_core/_util/_array.py | Horta/limix-inference | 1ba102fc544f8d307412d361b574da9d4c166f8e | [
"MIT"
]
| 12 | 2017-05-28T10:59:31.000Z | 2021-05-17T20:11:00.000Z | glimix_core/_util/_array.py | Horta/limix-inference | 1ba102fc544f8d307412d361b574da9d4c166f8e | [
"MIT"
]
| 5 | 2017-08-27T20:13:45.000Z | 2022-02-14T06:33:14.000Z | from numpy import reshape
def vec(x):
return reshape(x, (-1,) + x.shape[2:], order="F")
def unvec(x, shape):
return reshape(x, shape, order="F")
| 15.7 | 53 | 0.611465 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.038217 |
9ec859c40962ecf3e9c555e76fd3db0d87f04e0f | 3,386 | py | Python | src/tests/component/test_engine_manager.py | carbonblack/cbc-binary-toolkit | 92c90b80e3c3e0b5c2473ef2086d2ce2fb651db4 | [
"MIT"
]
| 8 | 2020-05-12T18:08:52.000Z | 2021-12-27T06:11:00.000Z | src/tests/component/test_engine_manager.py | carbonblack/cbc-binary-toolkit | 92c90b80e3c3e0b5c2473ef2086d2ce2fb651db4 | [
"MIT"
]
| 4 | 2020-05-13T16:07:49.000Z | 2020-06-30T18:47:14.000Z | src/tests/component/test_engine_manager.py | carbonblack/cbc-binary-toolkit | 92c90b80e3c3e0b5c2473ef2086d2ce2fb651db4 | [
"MIT"
]
| 3 | 2020-05-16T19:57:57.000Z | 2020-11-01T08:43:31.000Z | # -*- coding: utf-8 -*-
# *******************************************************
# Copyright (c) VMware, Inc. 2020-2021. All Rights Reserved.
# SPDX-License-Identifier: MIT
# *******************************************************
# *
# * DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
# * WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
# * EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
# * WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
# * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""Unit tests for the analysis engine"""
import pytest
from cbc_binary_toolkit import InitializationError
from cbc_binary_toolkit.config import Config
from cbc_binary_toolkit.engine import LocalEngineManager
from cbc_binary_toolkit.schemas import EngineResponseSchema
from tests.component.engine_fixtures.mock_engine import MockLocalEngine
from tests.component.schema_fixtures.mock_data import VALID_BINARY_METADATA, MISSING_FIELDS_BINARY_METADATA
ENGINE_NAME = "MockEngine"
@pytest.fixture(scope="session")
def config():
"""Configuration for all the test cases in this module."""
return Config.load(f"""
id: cbc_binary_toolkit
version: 0.0.1
engine:
name: {ENGINE_NAME}
type: local
_provider: tests.component.engine_fixtures.mock_engine.MockLocalEngineFactory
Test: TestPassed
""")
# ==================================== Unit TESTS BELOW ====================================
def test_create_engine(config):
"""Test successful creation of MockLocalEngine"""
manager = LocalEngineManager(config)
assert isinstance(manager.create_engine(), MockLocalEngine)
def test_analyze(config):
"""Test analyze pass through"""
manager = LocalEngineManager(config)
assert EngineResponseSchema.validate(manager.analyze(VALID_BINARY_METADATA))
@pytest.mark.parametrize("input", [
MISSING_FIELDS_BINARY_METADATA,
{}
])
def test_analyze_invalid_schema(config, input):
"""Test analyze pass through"""
manager = LocalEngineManager(config)
result = manager.analyze(input)
if result["binary_hash"] is not None:
result = EngineResponseSchema.validate(result)
assert not result["success"]
@pytest.mark.parametrize("engine_config, exception", [
["""
id: cbc_binary_toolkit
engine:
name: {ENGINE_NAME}
type: unknown
num_threads: 1
_provider: tests.component.engine_fixtures.mock_engine.MockLocalEngineFactory
""", InitializationError],
["""
id: cbc_binary_toolkit
engine:
name: {ENGINE_NAME}
type: local
_provider: INVALID.INVALID
""", ImportError],
["""
id: cbc_binary_toolkit
engine:
name: {ENGINE_NAME}
type: local
_provider: cbc_binary_toolkit.engine.LocalEngineFactory
""", NotImplementedError],
[f"""
id: cbc_binary_toolkit
version: 0.0.1
engine:
name: {ENGINE_NAME}
type: local
_provider: tests.component.engine_fixtures.mock_engine.MockLocalEngineFactory
""", AssertionError]
])
def test_failed_init(engine_config, exception):
"""Test raised exceptions on init of LocalEngineManager"""
config = Config.load(engine_config)
with pytest.raises(exception):
LocalEngineManager(config)
| 30.781818 | 107 | 0.672475 | 0 | 0 | 0 | 0 | 1,854 | 0.547549 | 0 | 0 | 1,958 | 0.578263 |
9ec95a1a1ec287a29e316037c8a1f39e97c4bff8 | 97 | py | Python | funolympics/apps.py | codeema/Yokiyo | 2e710bca487ee393784c116b7db2db7337f73d40 | [
"MIT"
]
| null | null | null | funolympics/apps.py | codeema/Yokiyo | 2e710bca487ee393784c116b7db2db7337f73d40 | [
"MIT"
]
| 6 | 2020-05-20T15:29:55.000Z | 2021-09-08T02:02:43.000Z | funolympics/apps.py | codeema/Yokiyo | 2e710bca487ee393784c116b7db2db7337f73d40 | [
"MIT"
]
| null | null | null | from django.apps import AppConfig
class FunolympicsConfig(AppConfig):
name = 'funolympics'
| 16.166667 | 35 | 0.773196 | 60 | 0.618557 | 0 | 0 | 0 | 0 | 0 | 0 | 13 | 0.134021 |
9eca8cb06280c8af6786e7a410286dc58b44dac0 | 5,734 | py | Python | src/gt4sd/algorithms/generation/polymer_blocks/core.py | hhhsu0825/gt4sd-core | 4a1fe9da58d2f33bba2fba64604427e037ad7a46 | [
"MIT"
]
| null | null | null | src/gt4sd/algorithms/generation/polymer_blocks/core.py | hhhsu0825/gt4sd-core | 4a1fe9da58d2f33bba2fba64604427e037ad7a46 | [
"MIT"
]
| null | null | null | src/gt4sd/algorithms/generation/polymer_blocks/core.py | hhhsu0825/gt4sd-core | 4a1fe9da58d2f33bba2fba64604427e037ad7a46 | [
"MIT"
]
| null | null | null | """PaccMann vanilla generator trained on polymer building blocks (catalysts/monomers)."""
import logging
import os
from dataclasses import field
from typing import ClassVar, Dict, Optional, TypeVar
from ....domains.materials import SmallMolecule, validate_molecules
from ....exceptions import InvalidItem
from ....training_pipelines.core import TrainingPipelineArguments
from ....training_pipelines.paccmann.core import PaccMannSavingArguments
from ...core import AlgorithmConfiguration, GeneratorAlgorithm, Untargeted
from ...registry import ApplicationsRegistry
from .implementation import Generator
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
T = type(None)
S = TypeVar("S", bound=SmallMolecule)
class PolymerBlocks(GeneratorAlgorithm[S, T]):
def __init__(
self, configuration: AlgorithmConfiguration, target: Optional[T] = None
):
"""Polymer blocks generation.
Args:
configuration: domain and application
specification, defining types and validations.
target: unused since it is not a conditional generator.
Example:
An example for generating small molecules (SMILES) that resembles
monomers/catalysts for polymer synthesis::
configuration = PolymerBlocksGenerator()
polymer_blocks = PolymerBlocks(configuration=configuration)
items = list(polymer_blocks.sample(10))
print(items)
"""
configuration = self.validate_configuration(configuration)
# TODO there might also be a validation/check on the target input
super().__init__(
configuration=configuration,
target=None, # type:ignore
)
def get_generator(
self,
configuration: AlgorithmConfiguration[S, T],
target: Optional[T],
) -> Untargeted:
"""Get the function to sample batches via the Generator.
Args:
configuration: helps to set up the application.
target: context or condition for the generation. Unused in the algorithm.
Returns:
callable generating a batch of items.
"""
logger.info("ensure artifacts for the application are present.")
self.local_artifacts = configuration.ensure_artifacts()
implementation: Generator = configuration.get_conditional_generator( # type: ignore
self.local_artifacts
)
return implementation.sample
def validate_configuration(
self, configuration: AlgorithmConfiguration
) -> AlgorithmConfiguration:
# TODO raise InvalidAlgorithmConfiguration
assert isinstance(configuration, AlgorithmConfiguration)
return configuration
@ApplicationsRegistry.register_algorithm_application(PolymerBlocks)
class PolymerBlocksGenerator(AlgorithmConfiguration[SmallMolecule, None]):
"""Configuration to generate subunits of polymers."""
algorithm_type: ClassVar[str] = "generation"
domain: ClassVar[str] = "materials"
algorithm_version: str = "v0"
batch_size: int = field(
default=32,
metadata=dict(description="Batch size used for the generative model sampling."),
)
generated_length: int = field(
default=100,
metadata=dict(
description="Maximum length in tokens of the generated molcules (relates to the SMILES length)."
),
)
def get_target_description(self) -> Optional[Dict[str, str]]:
"""Get description of the target for generation.
Returns:
target description, returns None in case no target is used.
"""
return None
def get_conditional_generator(self, resources_path: str) -> Generator:
return Generator(
resources_path=resources_path,
generated_length=self.generated_length,
batch_size=self.batch_size,
)
def validate_item(self, item: str) -> SmallMolecule:
(
molecules,
_,
) = validate_molecules([item])
if molecules[0] is None:
raise InvalidItem(
title="InvalidSMILES",
detail=f'rdkit.Chem.MolFromSmiles returned None for "{item}"',
)
return SmallMolecule(item)
@classmethod
def get_filepath_mappings_for_training_pipeline_arguments(
cls, training_pipeline_arguments: TrainingPipelineArguments
) -> Dict[str, str]:
"""Ger filepath mappings for the given training pipeline arguments.
Args:
training_pipeline_arguments: training pipeline arguments.
Returns:
a mapping between artifacts' files and training pipeline's output files.
"""
if isinstance(training_pipeline_arguments, PaccMannSavingArguments):
return {
"smiles_language.pkl": os.path.join(
training_pipeline_arguments.model_path,
f"{training_pipeline_arguments.training_name}.lang",
),
"params.json": os.path.join(
training_pipeline_arguments.model_path,
training_pipeline_arguments.training_name,
"model_params.json",
),
"weights.pt": os.path.join(
training_pipeline_arguments.model_path,
training_pipeline_arguments.training_name,
"weights",
"best_rec.pt",
),
}
else:
return super().get_filepath_mappings_for_training_pipeline_arguments(
training_pipeline_arguments
)
| 35.614907 | 108 | 0.646495 | 4,923 | 0.858563 | 0 | 0 | 2,944 | 0.513429 | 0 | 0 | 2,021 | 0.352459 |
9ecbaf805798824811c8f44248c90470a6ab1527 | 4,458 | py | Python | src/form/panel/MultiPanel.py | kaorin/vmd_sizing | e609299a0acaa17bd34487314b05bab6af6819d8 | [
"MIT"
]
| 32 | 2019-05-05T13:08:51.000Z | 2022-03-11T07:13:27.000Z | src/form/panel/MultiPanel.py | kaorin/vmd_sizing | e609299a0acaa17bd34487314b05bab6af6819d8 | [
"MIT"
]
| 3 | 2019-07-13T03:06:15.000Z | 2021-11-03T10:30:15.000Z | src/form/panel/MultiPanel.py | kaorin/vmd_sizing | e609299a0acaa17bd34487314b05bab6af6819d8 | [
"MIT"
]
| 11 | 2019-07-15T17:49:09.000Z | 2022-03-20T10:40:27.000Z | # -*- coding: utf-8 -*-
#
import wx
import wx.lib.newevent
from form.panel.BasePanel import BasePanel
from form.parts.SizingFileSet import SizingFileSet
from module.MMath import MRect, MVector3D, MVector4D, MQuaternion, MMatrix4x4 # noqa
from utils import MFileUtils # noqa
from utils.MLogger import MLogger # noqa
logger = MLogger(__name__)
class MultiPanel(BasePanel):
def __init__(self, frame: wx.Frame, parent: wx.Notebook, tab_idx: int, file_hitories: dict):
super().__init__(frame, parent, tab_idx)
self.file_hitories = file_hitories
self.header_panel = wx.Panel(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL)
self.header_sizer = wx.BoxSizer(wx.VERTICAL)
self.description_txt = wx.StaticText(self.header_panel, wx.ID_ANY, "複数人数モーションなどを比率を合わせてサイジングする事ができます。2人目以降を指定して下さい。" \
+ "\n縮尺を強制的に変えてますので、足などが元モーションからズレる場合があります。" \
+ "\n間違えてファイルセットを追加してしまった場合は、4つのファイル欄をすべて空にしてください。", wx.DefaultPosition, wx.DefaultSize, 0)
self.header_sizer.Add(self.description_txt, 0, wx.ALL, 5)
self.btn_sizer = wx.BoxSizer(wx.HORIZONTAL)
# ファイルセットクリアボタン
self.clear_btn_ctrl = wx.Button(self.header_panel, wx.ID_ANY, u"ファイルセットクリア", wx.DefaultPosition, wx.DefaultSize, 0)
self.clear_btn_ctrl.SetToolTip(u"既に入力されたデータをすべて空にします。")
self.clear_btn_ctrl.Bind(wx.EVT_BUTTON, self.on_clear_set)
self.btn_sizer.Add(self.clear_btn_ctrl, 0, wx.ALL, 5)
# ファイルセットクリアボタン
self.add_btn_ctrl = wx.Button(self.header_panel, wx.ID_ANY, u"ファイルセット追加", wx.DefaultPosition, wx.DefaultSize, 0)
self.add_btn_ctrl.SetToolTip(u"サイジングに必要なファイルセットをパネルに追加します。")
self.add_btn_ctrl.Bind(wx.EVT_BUTTON, self.on_add_set)
self.btn_sizer.Add(self.add_btn_ctrl, 0, wx.ALL, 5)
self.header_sizer.Add(self.btn_sizer, 0, wx.ALIGN_RIGHT | wx.ALL, 5)
self.header_panel.SetSizer(self.header_sizer)
self.header_panel.Layout()
self.sizer.Add(self.header_panel, 0, wx.EXPAND | wx.ALL, 5)
# ファイルセット
self.file_set_list = []
# ファイルセット用基本Sizer
self.set_base_sizer = wx.BoxSizer(wx.VERTICAL)
self.scrolled_window = MultiFileSetScrolledWindow(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, \
wx.FULL_REPAINT_ON_RESIZE | wx.VSCROLL | wx.ALWAYS_SHOW_SB)
# self.scrolled_window.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_3DLIGHT))
# self.scrolled_window.SetBackgroundColour("BLUE")
self.scrolled_window.SetScrollRate(5, 5)
self.scrolled_window.set_file_set_list(self.file_set_list)
self.scrolled_window.SetSizer(self.set_base_sizer)
self.scrolled_window.Layout()
self.sizer.Add(self.scrolled_window, 1, wx.ALL | wx.EXPAND | wx.FIXED_MINSIZE, 5)
self.fit()
def on_add_set(self, event: wx.Event):
self.file_set_list.append(SizingFileSet(self.frame, self.scrolled_window, self.file_hitories, len(self.file_set_list) + 2))
self.set_base_sizer.Add(self.file_set_list[-1].set_sizer, 0, wx.ALL, 5)
self.set_base_sizer.Layout()
# スクロールバーの表示のためにサイズ調整
self.sizer.Layout()
# self.sizer.FitInside(self.scrolled_window)
if self.frame.arm_panel_ctrl.arm_alignment_finger_flg_ctrl.GetValue() and len(self.file_set_list) > 0:
self.frame.on_popup_finger_warning(event)
event.Skip()
def on_clear_set(self, event: wx.Event):
for file_set in self.file_set_list:
file_set.motion_vmd_file_ctrl.file_ctrl.SetPath("")
file_set.rep_model_file_ctrl.file_ctrl.SetPath("")
file_set.org_model_file_ctrl.file_ctrl.SetPath("")
file_set.output_vmd_file_ctrl.file_ctrl.SetPath("")
# フォーム無効化
def disable(self):
self.file_set.disable()
# フォーム無効化
def enable(self):
self.file_set.enable()
class MultiFileSetScrolledWindow(wx.ScrolledWindow):
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
def set_file_set_list(self, file_set_list):
self.file_set_list = file_set_list
def set_output_vmd_path(self, event, is_force=False):
for file_set in self.file_set_list:
file_set.set_output_vmd_path(event, is_force)
| 42.056604 | 136 | 0.680126 | 4,641 | 0.9282 | 0 | 0 | 0 | 0 | 0 | 0 | 1,093 | 0.2186 |
9ecd3fdffb0348d1335d2b0ee06d51e7c7681296 | 1,261 | py | Python | androgui.py | nawfling/androguard | 67b992ce0feeeb01bc69a99257916487689c3bcf | [
"Apache-2.0"
]
| 1 | 2019-03-29T19:24:23.000Z | 2019-03-29T19:24:23.000Z | androgui.py | adiltirur/malware_classification | 67b992ce0feeeb01bc69a99257916487689c3bcf | [
"Apache-2.0"
]
| null | null | null | androgui.py | adiltirur/malware_classification | 67b992ce0feeeb01bc69a99257916487689c3bcf | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
"""Androguard Gui"""
import argparse
import os
import sys
from androguard.core import androconf
from androguard.gui.mainwindow import MainWindow
from PyQt5 import QtWidgets, QtGui
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Androguard GUI")
parser.add_argument("-d", "--debug", action="store_true", default=False)
parser.add_argument("-i", "--input_file", default=None)
parser.add_argument("-p", "--input_plugin", default=None)
args = parser.parse_args()
if args.debug:
androconf.set_debug()
# We need that to save huge sessions when leaving and avoid
# RuntimeError: maximum recursion depth exceeded while pickling an object
# or
# RuntimeError: maximum recursion depth exceeded in cmp
# http://stackoverflow.com/questions/2134706/hitting-maximum-recursion-depth-using-pythons-pickle-cpickle
sys.setrecursionlimit(50000)
app = QtWidgets.QApplication(sys.argv)
app.setWindowIcon(QtGui.QIcon(os.path.join(androconf.CONF['data_prefix'], "androguard.ico")))
window = MainWindow(input_file=args.input_file,
input_plugin=args.input_plugin)
window.resize(1024, 768)
window.show()
sys.exit(app.exec_())
| 31.525 | 109 | 0.716891 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 455 | 0.360825 |
9ecd99d19c3e1460adaaef7fa6dcf5ae53718429 | 2,551 | py | Python | python-trunk/sfapi2/sflib/ZSI/wstools/XMLname.py | raychorn/svn_molten-magma | 8aa2ff2340707eecae6514943e86f5afba9cd54a | [
"CC0-1.0"
]
| null | null | null | python-trunk/sfapi2/sflib/ZSI/wstools/XMLname.py | raychorn/svn_molten-magma | 8aa2ff2340707eecae6514943e86f5afba9cd54a | [
"CC0-1.0"
]
| null | null | null | python-trunk/sfapi2/sflib/ZSI/wstools/XMLname.py | raychorn/svn_molten-magma | 8aa2ff2340707eecae6514943e86f5afba9cd54a | [
"CC0-1.0"
]
| null | null | null | """Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <[email protected]>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id: XMLname.py 25 2006-05-24 18:12:14Z misha $"
from re import *
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x=="." or x=='-' or x=="_"
def _NCNameStartChar(x):
return x.isalpha() or x=="_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen==1): hexval = "000" + hexval
elif (hexlen==2): hexval = "00" + hexval
elif (hexlen==3): hexval = "0" + hexval
elif (hexlen==4): hexval = "" + hexval
elif (hexlen==5): hexval = "000" + hexval
elif (hexlen==6): hexval = "00" + hexval
elif (hexlen==7): hexval = "0" + hexval
elif (hexlen==8): hexval = "" + hexval
else: raise Exception, "Illegal Value returned from hex(ord(x))"
return "_x"+ hexval + "_"
def _fromUnicodeHex(x):
return eval( r'u"\u'+x[2:-1]+'"' )
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1 :
(prefix, localname) = string.split(':',1)
else:
prefix = None
localname = string
T = unicode(localname)
N = len(localname)
X = [];
for i in range(N) :
if i< N-1 and T[i]==u'_' and T[i+1]==u'x':
X.append(u'_x005F_')
elif i==0 and N >= 3 and \
( T[0]==u'x' or T[0]==u'X' ) and \
( T[1]==u'm' or T[1]==u'M' ) and \
( T[2]==u'l' or T[2]==u'L' ):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i==0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
return u''.join(X)
def fromXMLname(string):
"""Convert XML name to unicode string."""
retval = sub(r'_xFFFF_','', string )
def fun( matchobj ):
return _fromUnicodeHex( matchobj.group(0) )
retval = sub(r'_x[0-9A-Za-z]+_', fun, retval )
return retval
| 28.662921 | 79 | 0.547236 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 867 | 0.339867 |
9ecf156b5761ad136db575bc3923db3ea214ba15 | 5,939 | py | Python | mmtbx/validation/regression/tst_restraints.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
]
| 155 | 2016-11-23T12:52:16.000Z | 2022-03-31T15:35:44.000Z | mmtbx/validation/regression/tst_restraints.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
]
| 590 | 2016-12-10T11:31:18.000Z | 2022-03-30T23:10:09.000Z | mmtbx/validation/regression/tst_restraints.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
]
| 115 | 2016-11-15T08:17:28.000Z | 2022-02-09T15:30:14.000Z |
from __future__ import absolute_import, division, print_function
from libtbx.utils import null_out
from libtbx import easy_pickle
from six.moves import cStringIO as StringIO
def run_validation(pdb_file, ignore_hd=True):
from mmtbx.validation import restraints
import mmtbx.command_line
cmdline = mmtbx.command_line.load_model_and_data(
args=[pdb_file],
master_phil=mmtbx.command_line.generic_simple_input_phil(),
process_pdb_file=True,
require_data=False,
out=null_out())
validation = restraints.combined(
pdb_hierarchy=cmdline.pdb_hierarchy,
xray_structure=cmdline.xray_structure,
geometry_restraints_manager=cmdline.geometry,
ignore_hd=ignore_hd)
return validation
def exercise_simple():
# extracted from 1lyz, with hydrogens from reduce
pdb_in = """
ATOM 1 N LYS A 1 3.296 9.888 10.739 1.00 7.00 N
ATOM 2 CA LYS A 1 2.439 10.217 9.791 1.00 6.00 C
ATOM 3 C LYS A 1 2.439 11.997 9.160 1.00 6.00 C
ATOM 4 O LYS A 1 2.637 12.656 10.107 1.00 8.00 O
ATOM 5 CB LYS A 1 0.659 10.086 8.844 1.00 6.00 C
ATOM 6 CG LYS A 1 0.198 10.415 8.086 1.00 6.00 C
ATOM 7 CD LYS A 1 -1.187 10.086 8.212 1.00 6.00 C
ATOM 8 CE LYS A 1 -2.175 10.086 7.264 1.00 6.00 C
ATOM 9 NZ LYS A 1 -3.527 9.869 7.288 1.00 7.00 N
ATOM 0 H1 LYS A 1 3.156 9.045 10.986 1.00 7.00 H
ATOM 0 H2 LYS A 1 4.127 9.972 10.431 1.00 7.00 H
ATOM 0 H3 LYS A 1 3.184 10.425 11.440 1.00 7.00 H
ATOM 0 HA LYS A 1 2.772 9.314 9.912 1.00 6.00 H
ATOM 0 HB2 LYS A 1 0.584 9.128 8.712 1.00 6.00 H
ATOM 0 HB3 LYS A 1 0.046 10.323 9.557 1.00 6.00 H
ATOM 0 HG2 LYS A 1 0.310 11.376 8.015 1.00 6.00 H
ATOM 0 HG3 LYS A 1 0.563 10.027 7.276 1.00 6.00 H
ATOM 0 HD2 LYS A 1 -1.193 9.186 8.573 1.00 6.00 H
ATOM 0 HD3 LYS A 1 -1.516 10.674 8.910 1.00 6.00 H
ATOM 0 HE2 LYS A 1 -2.097 10.964 6.860 1.00 6.00 H
ATOM 0 HE3 LYS A 1 -1.857 9.444 6.610 1.00 6.00 H
ATOM 0 HZ1 LYS A 1 -3.725 9.170 6.774 1.00 7.00 H
ATOM 0 HZ2 LYS A 1 -3.787 9.706 8.123 1.00 7.00 H
ATOM 0 HZ3 LYS A 1 -3.949 10.590 6.982 1.00 7.00 H
ATOM 10 N VAL A 2 2.637 12.722 7.707 1.00 7.00 N
ATOM 11 CA VAL A 2 2.307 14.172 7.580 1.00 6.00 C
ATOM 12 C VAL A 2 0.857 14.041 6.949 1.00 6.00 C
ATOM 13 O VAL A 2 0.659 13.843 5.875 1.00 8.00 O
ATOM 14 CB VAL A 2 3.625 14.172 6.759 1.00 6.00 C
ATOM 15 CG1 VAL A 2 3.494 15.491 6.317 1.00 6.00 C
ATOM 16 CG2 VAL A 2 4.746 13.843 7.580 1.00 6.00 C
ATOM 0 H VAL A 2 2.920 12.338 6.992 1.00 7.00 H
ATOM 0 HA VAL A 2 2.195 14.925 8.181 1.00 6.00 H
ATOM 0 HB VAL A 2 3.767 13.528 6.048 1.00 6.00 H
ATOM 0 HG11 VAL A 2 4.250 15.721 5.755 1.00 6.00 H
ATOM 0 HG12 VAL A 2 2.674 15.582 5.808 1.00 6.00 H
ATOM 0 HG13 VAL A 2 3.467 16.087 7.081 1.00 6.00 H
ATOM 0 HG21 VAL A 2 5.554 13.850 7.043 1.00 6.00 H
ATOM 0 HG22 VAL A 2 4.827 14.495 8.294 1.00 6.00 H
ATOM 0 HG23 VAL A 2 4.620 12.960 7.962 1.00 6.00 H
END
"""
pdb_file = "tst_validate_restraints_simple.pdb"
open(pdb_file, "w").write(pdb_in)
v1 = run_validation(pdb_file, ignore_hd=True)
out1 = StringIO()
v1.show(out=out1)
assert ("""
----------Chiral volumes----------
atoms ideal model delta sigma residual deviation
A 1 LYS CA
A 1 LYS N
A 1 LYS C
A 1 LYS CB 2.57 1.12 1.45 2.00e-01 5.25e+01 7.2*sigma
""" in "\n".join([ l.rstrip() for l in out1.getvalue().splitlines() ]))
s = easy_pickle.dumps(v1)
v1p = easy_pickle.loads(s)
out1p = StringIO()
v1p.show(out=out1p)
assert (out1.getvalue() == out1p.getvalue())
v2 = run_validation(pdb_file, ignore_hd=False)
out2 = StringIO()
v2.show(out=out2)
assert (out2.getvalue() != out1.getvalue())
assert ("""\
A 1 LYS HA 110.00 57.00 53.00 3.00e+00 3.12e+02 17.7*sigma
A 1 LYS N
A 1 LYS CA
""" in "\n".join([ l.rstrip() for l in out2.getvalue().splitlines() ]))
#
# C-alpha-only model (from 3b5d)
pdb_raw = """\
CRYST1 115.100 43.700 76.400 90.00 108.10 90.00 C 1 2 1 8
ATOM 1 CA TYR A 6 -7.551 -11.355 -17.946 1.00148.04 C
ATOM 2 CA LEU A 7 -8.052 -8.804 -20.730 1.00310.75 C
ATOM 3 CA GLY A 8 -10.874 -6.691 -19.353 1.00158.95 C
ATOM 4 CA GLY A 9 -9.359 -7.332 -15.966 1.00217.68 C
ATOM 5 CA ALA A 10 -5.806 -6.508 -16.946 1.00239.12 C
ATOM 6 CA ILE A 11 -7.024 -3.514 -18.905 1.00103.16 C
ATOM 7 CA LEU A 12 -10.023 -2.071 -17.056 1.00230.80 C
ATOM 8 CA ALA A 13 -7.313 -1.820 -14.420 1.00141.04 C
"""
pdb_file = "tst_validate_restraints_calpha.pdb"
open(pdb_file, "w").write(pdb_raw)
v1 = run_validation(pdb_file, ignore_hd=True)
if (__name__ == "__main__"):
exercise_simple()
print("OK")
| 51.198276 | 79 | 0.506651 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,453 | 0.74979 |
9ecfe7e3194f0f7656e10dd2b39c230900905bf9 | 887 | py | Python | Python/repeated-dna-sequences.py | sm2774us/leetcode_interview_prep_2021 | 33b41bea66c266b733372d9a8b9d2965cd88bf8c | [
"Fair"
]
| null | null | null | Python/repeated-dna-sequences.py | sm2774us/leetcode_interview_prep_2021 | 33b41bea66c266b733372d9a8b9d2965cd88bf8c | [
"Fair"
]
| null | null | null | Python/repeated-dna-sequences.py | sm2774us/leetcode_interview_prep_2021 | 33b41bea66c266b733372d9a8b9d2965cd88bf8c | [
"Fair"
]
| null | null | null | # Time: O(n)
# Space: O(n)
import collections
class Solution(object):
def findRepeatedDnaSequences(self, s):
"""
:type s: str
:rtype: List[str]
"""
dict, rolling_hash, res = {}, 0, []
for i in range(len(s)):
rolling_hash = ((rolling_hash << 3) & 0x3fffffff) | (ord(s[i]) & 7)
if rolling_hash not in dict:
dict[rolling_hash] = True
elif dict[rolling_hash]:
res.append(s[i - 9: i + 1])
dict[rolling_hash] = False
return res
def findRepeatedDnaSequences2(self, s):
"""
:type s: str
:rtype: List[str]
"""
l, r = [], []
if len(s) < 10: return []
for i in range(len(s) - 9):
l.extend([s[i:i + 10]])
return [k for k, v in collections.Counter(l).items() if v > 1]
| 25.342857 | 79 | 0.476888 | 835 | 0.941375 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.169109 |
9ecff0d2def72853bb2077007cb31a53e1e71834 | 231 | py | Python | recipe/app.py | Udayan-Coding/examples | 720515bf614f4edd08c734cc5a708d8a2618522d | [
"MIT"
]
| 1 | 2021-01-04T17:17:14.000Z | 2021-01-04T17:17:14.000Z | recipe/app.py | Udayan-Coding/examples | 720515bf614f4edd08c734cc5a708d8a2618522d | [
"MIT"
]
| null | null | null | recipe/app.py | Udayan-Coding/examples | 720515bf614f4edd08c734cc5a708d8a2618522d | [
"MIT"
]
| 1 | 2021-01-31T11:10:44.000Z | 2021-01-31T11:10:44.000Z | from flask import Flask, render_template, request
app = Flask(__name__)
@app.route("/")
def hello():
return render_template("index.html", name="WORLD!")
@app.route("/about")
def about():
return render_template("about.html")
| 19.25 | 53 | 0.709957 | 0 | 0 | 0 | 0 | 154 | 0.666667 | 0 | 0 | 43 | 0.186147 |
9ed032bb75772e44674a7c37bb30bc62c636bc41 | 3,695 | py | Python | step2.py | mosheliv/tfcollab1 | 50da5683fb40a50cb957aeca2d28bc9f72440813 | [
"MIT"
]
| null | null | null | step2.py | mosheliv/tfcollab1 | 50da5683fb40a50cb957aeca2d28bc9f72440813 | [
"MIT"
]
| null | null | null | step2.py | mosheliv/tfcollab1 | 50da5683fb40a50cb957aeca2d28bc9f72440813 | [
"MIT"
]
| null | null | null | """
Usage:
# From tensorflow/models/
# Create train data:
python generate_tfrecord.py --csv_input=data/train_labels.csv --output_path=train.record
# Create test data:
python generate_tfrecord.py --csv_input=data/test_labels.csv --output_path=test.record
"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import os
import io
import pandas as pd
import tensorflow as tf
from PIL import Image
from collections import namedtuple, OrderedDict
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def _bytes_list_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
def _float_list_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def _int64_list_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
flags = tf.app.flags
flags.DEFINE_string('image_dir', '', 'Path to the image directory')
flags.DEFINE_string('csv_input', '', 'Path to the CSV input')
flags.DEFINE_string('output_path', '', 'Path to output TFRecord')
FLAGS = flags.FLAGS
# TO-DO replace this with label map
def class_text_to_int(row_label):
if row_label == 'Blackbird':
return 1
else:
None
def split(df, group):
data = namedtuple('data', ['filename', 'object'])
gb = df.groupby(group)
return [data(filename, gb.get_group(x)) for filename, x in zip(gb.groups.keys(), gb.groups)]
def create_tf_example(group, path):
with tf.gfile.GFile(os.path.join(path, '{}'.format(group.filename)), 'rb') as fid:
encoded_jpg = fid.read()
encoded_jpg_io = io.BytesIO(encoded_jpg)
image = Image.open(encoded_jpg_io)
width, height = image.size
filename = group.filename.encode('utf8')
image_format = b'jpg'
xmins = []
xmaxs = []
ymins = []
ymaxs = []
classes_text = []
classes = []
for index, row in group.object.iterrows():
xmins.append(row['xmin'])
xmaxs.append(row['xmax'])
ymins.append(row['ymin'])
ymaxs.append(row['ymax'])
classes_text.append(row['class'].encode('utf8'))
classes.append(class_text_to_int(row['class']))
tf_example = tf.train.Example(features=tf.train.Features(feature={
'image/height': _int64_feature(height),
'image/width': _int64_feature(width),
'image/filename': _bytes_feature(filename),
'image/source_id': _bytes_feature(filename),
'image/encoded': _bytes_feature(encoded_jpg),
'image/format': _bytes_feature(image_format),
'image/object/bbox/xmin': _float_list_feature(xmins),
'image/object/bbox/xmax': _float_list_feature(xmaxs),
'image/object/bbox/ymin': _float_list_feature(ymins),
'image/object/bbox/ymax': _float_list_feature(ymaxs),
'image/object/class/text': _bytes_list_feature(classes_text),
'image/object/class/label': _int64_list_feature(classes),
}))
return tf_example
def main(_):
writer = tf.python_io.TFRecordWriter(FLAGS.output_path)
path = FLAGS.image_dir
examples = pd.read_csv(FLAGS.csv_input)
print(examples.columns.values)
grouped = split(examples, 'filename')
for group in grouped:
tf_example = create_tf_example(group, path)
writer.write(tf_example.SerializeToString())
writer.close()
output_path = os.path.join(os.getcwd(), FLAGS.output_path)
print('Successfully created the TFRecords: {}'.format(output_path))
if __name__ == '__main__':
tf.app.run()
| 32.991071 | 96 | 0.700677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 818 | 0.22138 |
9ed0bf65b8f404e11c189c592c88427ef28a69fc | 685 | py | Python | lh_lib/sensors/esp32/touch.py | lh70/s-connect-python | 5a4ca17690ec700b36faf69ea744c514f532cc48 | [
"Apache-2.0"
]
| null | null | null | lh_lib/sensors/esp32/touch.py | lh70/s-connect-python | 5a4ca17690ec700b36faf69ea744c514f532cc48 | [
"Apache-2.0"
]
| null | null | null | lh_lib/sensors/esp32/touch.py | lh70/s-connect-python | 5a4ca17690ec700b36faf69ea744c514f532cc48 | [
"Apache-2.0"
]
| null | null | null | from machine import Pin
from lh_lib.sensors.sensor import AbstractSensor
class Touch(AbstractSensor):
"""
This represents a touch sensor with integrated Logic, where there is only one output pin,
which digitally represents the touched state.
pin:integer can be one of all available GPIO pins: 0-19, 21-23, 25-27, 32-39
it is NOT recommended to pick one of the following pins: (1, 3) -> serial, (6, 7, 8, 11, 16, 17) -> embedded flash
"""
def __init__(self, pin=35):
super().__init__()
self.pin = Pin(pin, Pin.IN)
"""
sets 0 for LOW and 1 for HIGH
"""
def update(self):
self.value = self.pin.value()
| 28.541667 | 130 | 0.636496 | 608 | 0.887591 | 0 | 0 | 0 | 0 | 0 | 0 | 413 | 0.60292 |
9ed2a743eca4dbe121cf458e5a0377ba7b5dca61 | 385 | py | Python | algorithms/python/118.py | viing937/leetcode | e21ca52c98bddf59e43522c0aace5e8cf84350eb | [
"MIT"
]
| 3 | 2016-10-01T10:15:09.000Z | 2017-07-09T02:53:36.000Z | algorithms/python/118.py | viing937/leetcode | e21ca52c98bddf59e43522c0aace5e8cf84350eb | [
"MIT"
]
| null | null | null | algorithms/python/118.py | viing937/leetcode | e21ca52c98bddf59e43522c0aace5e8cf84350eb | [
"MIT"
]
| null | null | null | class Solution:
def generate(self, numRows):
"""
:type numRows: int
:rtype: List[List[int]]
"""
if numRows == 0: return []
rls = [[1]]
for i in range(2, numRows+1):
row = [1] * i
for j in range(1, i-1):
row[j] = rls[-1][j-1] + rls[-1][j]
rls.append(row)
return rls
| 25.666667 | 50 | 0.415584 | 384 | 0.997403 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.192208 |
9ed2d77b6c8c12c27e466fb716c2e65ea3ea3aaa | 2,579 | py | Python | squeeze_and_excitation_networks/datasets/data_loader.py | younnggsuk/CV-Paper-Implementation | fecd67d3f216872976f9b38445ce1c1f9ef1ac02 | [
"MIT"
]
| 4 | 2021-06-03T13:56:51.000Z | 2021-11-05T06:22:25.000Z | densely_connected_convolutional_networks/datasets/data_loader.py | younnggsuk/CV-Paper-Implementation | fecd67d3f216872976f9b38445ce1c1f9ef1ac02 | [
"MIT"
]
| null | null | null | densely_connected_convolutional_networks/datasets/data_loader.py | younnggsuk/CV-Paper-Implementation | fecd67d3f216872976f9b38445ce1c1f9ef1ac02 | [
"MIT"
]
| 1 | 2022-03-28T09:34:03.000Z | 2022-03-28T09:34:03.000Z | import os
import cv2
import albumentations as A
from albumentations.pytorch import ToTensorV2
from torch.utils.data import Dataset, DataLoader
from sklearn.model_selection import train_test_split
__all__ = ['CatDogDataset', 'fetch_dataloader']
class CatDogDataset(Dataset):
def __init__(self, file_paths, labels, transform=None):
self.file_paths = file_paths
self.labels = labels
self.transform = transform
def __len__(self):
return len(self.file_paths)
def __getitem__(self, idx):
label = self.labels[idx]
file_path = self.file_paths[idx]
image = cv2.imread(file_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
if self.transform:
transformed = self.transform(image=image)
image = transformed["image"]
return image, label
def fetch_dataloader(types, data_dir, batch_size, num_workers):
dataloaders = {}
train_dir = os.path.join(data_dir, "train")
train_files = sorted(os.listdir(train_dir))
train_labels = []
for file in train_files:
if "cat" in file:
train_labels.append(0)
else:
train_labels.append(1)
train_file_paths = [os.path.join(train_dir, path) for path in train_files]
train_file_paths, val_file_paths, train_labels, val_labels = train_test_split(
train_file_paths, train_labels, stratify=train_labels, random_state=42
)
train_transform = A.Compose([
A.SmallestMaxSize(max_size=256),
A.HorizontalFlip(p=0.5),
A.RandomCrop(224, 224),
A.Normalize(),
ToTensorV2()
])
eval_transform = A.Compose([
A.SmallestMaxSize(max_size=256),
A.CenterCrop(224, 224),
A.Normalize(),
ToTensorV2()
])
for split in ['train', 'val', 'test']:
if split in types:
if split == 'train':
dl = DataLoader(CatDogDataset(train_file_paths,
train_labels,
train_transform),
batch_size, shuffle=True, num_workers=num_workers)
elif split == "val":
dl = DataLoader(CatDogDataset(val_file_paths,
val_labels,
eval_transform),
batch_size, shuffle=False, num_workers=num_workers)
dataloaders[split] = dl
return dataloaders | 29.643678 | 83 | 0.579294 | 649 | 0.251648 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.031795 |
9ed44c7c52a922019ce69deffde3525039c1362a | 4,203 | py | Python | seq2seq_utils.py | mumbihere/summarizer | c230115c7d2d3bb659e9a0e402266178743f8de6 | [
"MIT"
]
| null | null | null | seq2seq_utils.py | mumbihere/summarizer | c230115c7d2d3bb659e9a0e402266178743f8de6 | [
"MIT"
]
| null | null | null | seq2seq_utils.py | mumbihere/summarizer | c230115c7d2d3bb659e9a0e402266178743f8de6 | [
"MIT"
]
| null | null | null | from keras.preprocessing.text import text_to_word_sequence
from keras.models import Sequential
from keras.layers import Activation, TimeDistributed, Dense, RepeatVector, recurrent, Embedding
from keras.layers.recurrent import LSTM
from keras.optimizers import Adam, RMSprop
from nltk import FreqDist
import numpy as np
import os
import datetime
def load_data(source, dist, max_len, vocab_size):
# Reading raw text from source and destination files
f = open(source, 'r')
X_data = f.read()
f.close()
f = open(dist, 'r')
y_data = f.read()
f.close()
# Splitting raw text into array of sequences
X = [text_to_word_sequence(x)[::-1] for x, y in zip(X_data.split('\n'), y_data.split('\n')) if len(x) > 0 and len(y) > 0 and len(x) <= max_len and len(y) <= max_len]
y = [text_to_word_sequence(y) for x, y in zip(X_data.split('\n'), y_data.split('\n')) if len(x) > 0 and len(y) > 0 and len(x) <= max_len and len(y) <= max_len]
# Creating the vocabulary set with the most common words
dist = FreqDist(np.hstack(X))
X_vocab = dist.most_common(vocab_size-1)
dist = FreqDist(np.hstack(y))
y_vocab = dist.most_common(vocab_size-1)
# Creating an array of words from the vocabulary set, we will use this array as index-to-word dictionary
X_ix_to_word = [word[0] for word in X_vocab]
# Adding the word "ZERO" to the beginning of the array
X_ix_to_word.insert(0, 'ZERO')
# Adding the word 'UNK' to the end of the array (stands for UNKNOWN words)
X_ix_to_word.append('UNK')
# Creating the word-to-index dictionary from the array created above
X_word_to_ix = {word:ix for ix, word in enumerate(X_ix_to_word)}
# Converting each word to its index value
for i, sentence in enumerate(X):
for j, word in enumerate(sentence):
if word in X_word_to_ix:
X[i][j] = X_word_to_ix[word]
else:
X[i][j] = X_word_to_ix['UNK']
y_ix_to_word = [word[0] for word in y_vocab]
y_ix_to_word.insert(0, 'ZERO')
y_ix_to_word.append('UNK')
y_word_to_ix = {word:ix for ix, word in enumerate(y_ix_to_word)}
for i, sentence in enumerate(y):
for j, word in enumerate(sentence):
if word in y_word_to_ix:
y[i][j] = y_word_to_ix[word]
else:
y[i][j] = y_word_to_ix['UNK']
return (X, len(X_vocab)+2, X_word_to_ix, X_ix_to_word, y, len(y_vocab)+2, y_word_to_ix, y_ix_to_word)
def load_test_data(source, X_word_to_ix, max_len):
f = open(source, 'r')
X_data = f.read()
f.close()
X = [text_to_word_sequence(x)[::-1] for x in X_data.split('\n') if len(x) > 0 and len(x) <= max_len]
for i, sentence in enumerate(X):
for j, word in enumerate(sentence):
if word in X_word_to_ix:
X[i][j] = X_word_to_ix[word]
else:
X[i][j] = X_word_to_ix['UNK']
return X
def create_model(X_vocab_len, X_max_len, y_vocab_len, y_max_len, hidden_size, num_layers):
model = Sequential()
# Creating encoder network
model.add(Embedding(X_vocab_len, 1000, input_length=X_max_len, mask_zero=True))
model.add(LSTM(hidden_size))
model.add(RepeatVector(y_max_len))
# Creating decoder network
for _ in range(num_layers):
model.add(LSTM(hidden_size, return_sequences=True))
model.add(TimeDistributed(Dense(y_vocab_len)))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
return model
def process_data(word_sentences, max_len, word_to_ix):
# Vectorizing each element in each sequence
sequences = np.zeros((len(word_sentences), max_len, len(word_to_ix)))
for i, sentence in enumerate(word_sentences):
for j, word in enumerate(sentence):
sequences[i, j, word] = 1.
return sequences
def find_checkpoint_file(folder):
checkpoint_file = [f for f in os.listdir(folder) if 'checkpoint' in f]
if len(checkpoint_file) == 0:
return []
modified_time = [os.path.getmtime(f) for f in checkpoint_file]
return checkpoint_file[np.argmax(modified_time)] | 39.650943 | 169 | 0.664287 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 720 | 0.171306 |
9ed4b01964cfce5140c8270d443eb2c516032d63 | 2,830 | py | Python | SAMAE/data/__init__.py | Lisa-pa/SAMAE | 8d52fd6f8c2634c82f2071233e9796ea322f6360 | [
"MIT"
]
| null | null | null | SAMAE/data/__init__.py | Lisa-pa/SAMAE | 8d52fd6f8c2634c82f2071233e9796ea322f6360 | [
"MIT"
]
| 4 | 2021-03-20T09:31:02.000Z | 2022-03-12T00:51:19.000Z | SAMAE/data/__init__.py | Lisa-pa/AponeurosesDetection | 8d52fd6f8c2634c82f2071233e9796ea322f6360 | [
"MIT"
]
| null | null | null |
"""Standard test images.
"""
import os
from skimage.io import imread
data_dir = os.path.abspath(os.path.dirname(__file__))
__all__ = ['data_dir', 'circle', 'skmuscimg']
def _load(f, as_gray=False):
"""Load an image file located in the data directory.
Parameters
----------
f : string
File name.
as_gray : bool, optional
Whether to convert the image to grayscale.
Returns
-------
img : ndarray
Image loaded from ``data_dir``.
"""
# importing io is quite slow since it scans all the backends
# we lazy import it here
return imread(f, as_gray=as_gray)
def circle():
"""Synthetic image of a circle
Returns
-------
circle : (xdim, ydim) bool ndarray
Circle image.
"""
return _load(os.path.join(data_dir, "circle.bmp"))
def skmuscimg():
"""Cropped US image of a musculoskeletal muscle
"""
return _load(os.path.join(data_dir, "skmuscle.jpg"))
def panoimg():
"""Panoramic US image of a musculoskeletal muscle
"""
return _load(os.path.join(data_dir, "panoramic_echo.jpg"))
def simpleimg():
"""Simple US image of a musculoskeletal muscle
"""
return _load(os.path.join(data_dir, "simple_echo.jpg"))
def downloadFromDropbox(tok, path2file):
"""Download an image from a Dropbox account.
Args:
tok (string): access token that connects to the wanted
app in Dropbox account
path2file (string): Path of the file to download, in the
app corresponding to the above token.
Output:
image (numpy.ndarray): 3-channel color image, with
coefficients' type == uint8
Example:
1) Register a new app in the App Console of your Dropbox
account. Set up parameters as you want.
2) In Dropbox>Applications>MyApp, import your data.
3) In the settings page of MyApp, generate a token and copy it.
It should look like a random string of letters and figures,
as below. (!!!This access token can be used to access your
account via the API. Don’t share your access token with anyone!!!)
> token = 'Q8yhHQ4wquAAAAAAAAABRPb9LYdKAr2WGcmhhJ8egiX4_Qak6YZwBw4GUpX9DVeb' //token not available anymore
> path = '/cropped_20181002_153426_image.jpg'
> dt = downloadFromDropbox(token, path);
"""
import dropbox
import numpy as np
import cv2
dbx = dropbox.Dropbox(tok)
try:
metadata, file = dbx.files_download(path2file)
except dropbox.exceptions.HttpError as err:
print('*** HTTP error', err)
return None
data = np.frombuffer(file.content, np.uint8)
image = cv2.imdecode(data, 1)
return image | 28.877551 | 114 | 0.621908 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,970 | 0.695621 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.