code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
from webserver import application
if __name__ == '__main__':
application.run() | [
"webserver.application.run"
] | [((63, 80), 'webserver.application.run', 'application.run', ([], {}), '()\n', (78, 80), False, 'from webserver import application\n')] |
from smartcard.System import readers
from smartcard.util import toHexString
from smartcard.ATR import ATR
from smartcard.CardType import AnyCardType
from smartcard.pcsc import PCSCExceptions
import sys
import logging
logging.basicConfig(
format='%(asctime)s\t%(levelname)s\t%(message)s')
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
r = None
try:
r = readers()
except PCSCExceptions.EstablishContextException:
logger.critical("Could not contact pcscd")
exit(1)
if len(r) < 1:
logger.critical("error: No readers available!")
exit(1)
logger.info("Available readers: " + str(r))
# TODO: let user pick reader
reader = r[0]
logger.info("Using: " + str(reader))
connection = reader.createConnection()
connection.connect()
# ACR magic numbers
ACR_MUTE = [0xFF, 0x00, 0x52, 0x00, 0x00]
ACR_UNMUTE = [0xFF, 0x00, 0x52, 0xFF, 0x00]
ACR_GETUID = [0xFF, 0xCA, 0x00, 0x00, 0x00]
ACR_FIRMVER = [0xFF, 0x00, 0x48, 0x00, 0x00]
# TODO: check where getuid and firmver belong
# General magic numbers
data_write_command = [0xff, 0xd6, 0x00] # Append blocknr, data len, data
# Desfire specific magic numbers
blocknr = 0x0
desfire_write_uid_command_size = 0x0e
desfire_write_uid_command = [0xff, 0x00, 0x00, blocknr, desfire_write_uid_command_size, 0xd4, 0x42, 0x90, 0xf0, 0xcc, 0xcc, 0x10]
desfire_backdoor_command_one = [0xff, 0xca, 0x00, 0x00, 0x00]
desfire_backdoor_command_two = [0xff, 0x00, 0x00, 0x00, 0x04, 0xd4, 0x4a, 0x01, 0x00]
# Classic specific magic numbers
#TODO
def _write(data):
data_as_hex = ' '.join(format(x, '02x') for x in data)
logger.debug("Writing data: " + str(data_as_hex))
returndata, sw1, sw2=connection.transmit(data)
logger.info("Got status words: %02X %02X" % (sw1, sw2))
if ((sw1, sw2) == (0x90, 0x0)):
return (True, returndata)
elif (sw1, sw2) == (0x63, 0x0):
logger.error("Got bad response")
return (False, None)
def get_uid():
status, retdata = _write(ACR_GETUID)
return retdata
def write_data_block(blocknr, userdata):
userdata_values = bytes.fromhex(userdata)
# Note, mfclassic only allows writing of 16 bytes at a time (that's one block)
assert len(userdata_values) == 16
write_command = data_write_command + [blocknr, len(userdata_values)]
for b in userdata_values:
write_command.append(b)
_write(write_command)
def write_uid_desfire(newuid: str):
uid_values = bytes.fromhex(newuid)
assert len(uid_values) == 7
logger.info("Setting uid to " + str(uid_values))
write_command = desfire_write_uid_command + [i for i in uid_values]
_write(desfire_backdoor_command_one)
_write(desfire_backdoor_command_two)
_write(write_command)
def main():
logger.error("Be careful! Somewhere along the line I borked my only 7B card. This current code is not known-good")
'''
write_uid_desfire("aabbccddeeff11")
uid = get_uid()
print("UID: " + ' '.join('{:02x}'.format(x) for x in uid))
'''
if __name__ == "__main__":
main() | [
"logging.basicConfig",
"smartcard.System.readers",
"logging.getLogger"
] | [((218, 287), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s\t%(levelname)s\t%(message)s"""'}), "(format='%(asctime)s\\t%(levelname)s\\t%(message)s')\n", (237, 287), False, 'import logging\n'), ((302, 329), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (319, 329), False, 'import logging\n'), ((381, 390), 'smartcard.System.readers', 'readers', ([], {}), '()\n', (388, 390), False, 'from smartcard.System import readers\n')] |
import pytest
from _pytest.monkeypatch import MonkeyPatch
from cx_core import ReleaseHoldController
from cx_core.controller import Controller
from pytest_mock import MockerFixture
from tests.test_utils import fake_fn
class FakeReleaseHoldController(ReleaseHoldController):
def hold_loop(self):
pass
def default_delay(self) -> int:
return 500
@pytest.fixture
def sut_before_init(mocker: MockerFixture) -> FakeReleaseHoldController:
controller = FakeReleaseHoldController() # type: ignore
controller.args = {}
mocker.patch.object(Controller, "init")
mocker.patch.object(controller, "sleep")
return controller
@pytest.fixture
@pytest.mark.asyncio
async def sut(sut_before_init: FakeReleaseHoldController) -> FakeReleaseHoldController:
await sut_before_init.init()
return sut_before_init
@pytest.mark.asyncio
async def test_init(sut_before_init: FakeReleaseHoldController, mocker: MockerFixture):
await sut_before_init.init()
assert sut_before_init.delay == 500
@pytest.mark.asyncio
async def test_release(sut: FakeReleaseHoldController):
sut.on_hold = True
await sut.release()
assert not sut.on_hold
@pytest.mark.asyncio
async def test_hold(
sut: FakeReleaseHoldController,
monkeypatch: MonkeyPatch,
mocker: MockerFixture,
):
monkeypatch.setattr(sut, "hold_loop", fake_fn(to_return=True, async_=True))
hold_loop_patch = mocker.patch.object(sut, "hold_loop")
await sut.hold()
hold_loop_patch.assert_called_once()
@pytest.mark.parametrize(
"action, on_hold_input, hold_release_toogle, continue_call",
[
("hold", False, False, True),
("hold", True, False, False),
("hold", False, True, True),
("hold", True, True, False),
("release", True, True, True),
("release", True, False, True),
("release", False, True, True),
("release", False, False, True),
],
)
@pytest.mark.asyncio
async def test_before_action(
sut: FakeReleaseHoldController,
action: str,
on_hold_input: bool,
hold_release_toogle: bool,
continue_call: bool,
):
sut.on_hold = on_hold_input
sut.hold_release_toggle = hold_release_toogle
output = await sut.before_action(action)
assert output == continue_call
| [
"pytest.mark.parametrize",
"tests.test_utils.fake_fn"
] | [((1529, 1885), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""action, on_hold_input, hold_release_toogle, continue_call"""', "[('hold', False, False, True), ('hold', True, False, False), ('hold', False,\n True, True), ('hold', True, True, False), ('release', True, True, True),\n ('release', True, False, True), ('release', False, True, True), (\n 'release', False, False, True)]"], {}), "(\n 'action, on_hold_input, hold_release_toogle, continue_call', [('hold', \n False, False, True), ('hold', True, False, False), ('hold', False, True,\n True), ('hold', True, True, False), ('release', True, True, True), (\n 'release', True, False, True), ('release', False, True, True), (\n 'release', False, False, True)])\n", (1552, 1885), False, 'import pytest\n'), ((1364, 1400), 'tests.test_utils.fake_fn', 'fake_fn', ([], {'to_return': '(True)', 'async_': '(True)'}), '(to_return=True, async_=True)\n', (1371, 1400), False, 'from tests.test_utils import fake_fn\n')] |
import ClientAPI
import MarsCommand
NamedColors = {}
NamedColorsOrig = {
'AliceBlue': ClientAPI.ColorEx.AliceBlue,
'AntiqueWhite': ClientAPI.ColorEx.AntiqueWhite,
'Aqua': ClientAPI.ColorEx.Aqua,
'Aquamarine': ClientAPI.ColorEx.Aquamarine,
'Azure': ClientAPI.ColorEx.Azure,
'Beige': ClientAPI.ColorEx.Beige,
'Bisque': ClientAPI.ColorEx.Bisque,
'Black': ClientAPI.ColorEx.Black,
'BlanchedAlmond': ClientAPI.ColorEx.BlanchedAlmond,
'Blue': ClientAPI.ColorEx.Blue,
'BlueViolet': ClientAPI.ColorEx.BlueViolet,
'Brown': ClientAPI.ColorEx.Brown,
'BurlyWood': ClientAPI.ColorEx.BurlyWood,
'CadetBlue': ClientAPI.ColorEx.CadetBlue,
'Chartreuse': ClientAPI.ColorEx.Chartreuse,
'Chocolate': ClientAPI.ColorEx.Chocolate,
'Coral': ClientAPI.ColorEx.Coral,
'CornflowerBlue': ClientAPI.ColorEx.CornflowerBlue,
'Cornsilk': ClientAPI.ColorEx.Cornsilk,
'Crimson': ClientAPI.ColorEx.Crimson,
'Cyan': ClientAPI.ColorEx.Cyan,
'DarkBlue': ClientAPI.ColorEx.DarkBlue,
'DarkCyan': ClientAPI.ColorEx.DarkCyan,
'DarkGoldenrod': ClientAPI.ColorEx.DarkGoldenrod,
'DarkGray': ClientAPI.ColorEx.DarkGray,
'DarkGreen': ClientAPI.ColorEx.DarkGreen,
'DarkKhaki': ClientAPI.ColorEx.DarkKhaki,
'DarkMagenta': ClientAPI.ColorEx.DarkMagenta,
'DarkOliveGreen': ClientAPI.ColorEx.DarkOliveGreen,
'DarkOrange': ClientAPI.ColorEx.DarkOrange,
'DarkOrchid': ClientAPI.ColorEx.DarkOrchid,
'DarkRed': ClientAPI.ColorEx.DarkRed,
'DarkSalmon': ClientAPI.ColorEx.DarkSalmon,
'DarkSeaGreen': ClientAPI.ColorEx.DarkSeaGreen,
'DarkSlateBlue': ClientAPI.ColorEx.DarkSlateBlue,
'DarkSlateGray': ClientAPI.ColorEx.DarkSlateGray,
'DarkTurquoise': ClientAPI.ColorEx.DarkTurquoise,
'DarkViolet': ClientAPI.ColorEx.DarkViolet,
'DeepPink': ClientAPI.ColorEx.DeepPink,
'DeepSkyBlue': ClientAPI.ColorEx.DeepSkyBlue,
'DimGray': ClientAPI.ColorEx.DimGray,
'DodgerBlue': ClientAPI.ColorEx.DodgerBlue,
'Firebrick': ClientAPI.ColorEx.Firebrick,
'FloralWhite': ClientAPI.ColorEx.FloralWhite,
'ForestGreen': ClientAPI.ColorEx.ForestGreen,
'Fuchsia': ClientAPI.ColorEx.Fuchsia,
'Gainsboro': ClientAPI.ColorEx.Gainsboro,
'GhostWhite': ClientAPI.ColorEx.GhostWhite,
'Gold': ClientAPI.ColorEx.Gold,
'Goldenrod': ClientAPI.ColorEx.Goldenrod,
'Gray': ClientAPI.ColorEx.Gray,
'Green': ClientAPI.ColorEx.Green,
'GreenYellow': ClientAPI.ColorEx.GreenYellow,
'Honeydew': ClientAPI.ColorEx.Honeydew,
'HotPink': ClientAPI.ColorEx.HotPink,
'IndianRed': ClientAPI.ColorEx.IndianRed,
'Indigo': ClientAPI.ColorEx.Indigo,
'Ivory': ClientAPI.ColorEx.Ivory,
'Khaki': ClientAPI.ColorEx.Khaki,
'Lavender': ClientAPI.ColorEx.Lavender,
'LavenderBlush': ClientAPI.ColorEx.LavenderBlush,
'LawnGreen': ClientAPI.ColorEx.LawnGreen,
'LemonChiffon': ClientAPI.ColorEx.LemonChiffon,
'LightBlue': ClientAPI.ColorEx.LightBlue,
'LightCoral': ClientAPI.ColorEx.LightCoral,
'LightCyan': ClientAPI.ColorEx.LightCyan,
'LightGoldenrodYellow': ClientAPI.ColorEx.LightGoldenrodYellow,
'LightGreen': ClientAPI.ColorEx.LightGreen,
'LightGray': ClientAPI.ColorEx.LightGray,
'LightPink': ClientAPI.ColorEx.LightPink,
'LightSalmon': ClientAPI.ColorEx.LightSalmon,
'LightSeaGreen': ClientAPI.ColorEx.LightSeaGreen,
'LightSkyBlue': ClientAPI.ColorEx.LightSkyBlue,
'LightSlateGray': ClientAPI.ColorEx.LightSlateGray,
'LightSteelBlue': ClientAPI.ColorEx.LightSteelBlue,
'LightYellow': ClientAPI.ColorEx.LightYellow,
'Lime': ClientAPI.ColorEx.Lime,
'LimeGreen': ClientAPI.ColorEx.LimeGreen,
'Linen': ClientAPI.ColorEx.Linen,
'Magenta': ClientAPI.ColorEx.Magenta,
'Maroon': ClientAPI.ColorEx.Maroon,
'MediumAquamarine': ClientAPI.ColorEx.MediumAquamarine,
'MediumBlue': ClientAPI.ColorEx.MediumBlue,
'MediumOrchid': ClientAPI.ColorEx.MediumOrchid,
'MediumPurple': ClientAPI.ColorEx.MediumPurple,
'MediumSeaGreen': ClientAPI.ColorEx.MediumSeaGreen,
'MediumSlateBlue': ClientAPI.ColorEx.MediumSlateBlue,
'MediumSpringGreen': ClientAPI.ColorEx.MediumSpringGreen,
'MediumTurquoise': ClientAPI.ColorEx.MediumTurquoise,
'MediumVioletRed': ClientAPI.ColorEx.MediumVioletRed,
'MidnightBlue': ClientAPI.ColorEx.MidnightBlue,
'MistyRose': ClientAPI.ColorEx.MistyRose,
'Moccasin': ClientAPI.ColorEx.Moccasin,
'NavajoWhite': ClientAPI.ColorEx.NavajoWhite,
'Navy': ClientAPI.ColorEx.Navy,
'OldLace': ClientAPI.ColorEx.OldLace,
'Olive': ClientAPI.ColorEx.Olive,
'OliveDrab': ClientAPI.ColorEx.OliveDrab,
'Orange': ClientAPI.ColorEx.Orange,
'OrangeRed': ClientAPI.ColorEx.OrangeRed,
'Orchid': ClientAPI.ColorEx.Orchid,
'PaleGoldenrod': ClientAPI.ColorEx.PaleGoldenrod,
'PaleGreen': ClientAPI.ColorEx.PaleGreen,
'PaleTurquoise': ClientAPI.ColorEx.PaleTurquoise,
'PaleVioletRed': ClientAPI.ColorEx.PaleVioletRed,
'PapayaWhip': ClientAPI.ColorEx.PapayaWhip,
'PeachPuff': ClientAPI.ColorEx.PeachPuff,
'Peru': ClientAPI.ColorEx.Peru,
'Pink': ClientAPI.ColorEx.Pink,
'Plum': ClientAPI.ColorEx.Plum,
'PowderBlue': ClientAPI.ColorEx.PowderBlue,
'Purple': ClientAPI.ColorEx.Purple,
'Red': ClientAPI.ColorEx.Red,
'RosyBrown': ClientAPI.ColorEx.RosyBrown,
'RoyalBlue': ClientAPI.ColorEx.RoyalBlue,
'SaddleBrown': ClientAPI.ColorEx.SaddleBrown,
'Salmon': ClientAPI.ColorEx.Salmon,
'SandyBrown': ClientAPI.ColorEx.SandyBrown,
'SeaGreen': ClientAPI.ColorEx.SeaGreen,
'SeaShell': ClientAPI.ColorEx.SeaShell,
'Sienna': ClientAPI.ColorEx.Sienna,
'Silver': ClientAPI.ColorEx.Silver,
'SkyBlue': ClientAPI.ColorEx.SkyBlue,
'SlateBlue': ClientAPI.ColorEx.SlateBlue,
'SlateGray': ClientAPI.ColorEx.SlateGray,
'Snow': ClientAPI.ColorEx.Snow,
'SpringGreen': ClientAPI.ColorEx.SpringGreen,
'SteelBlue': ClientAPI.ColorEx.SteelBlue,
'Tan': ClientAPI.ColorEx.Tan,
'Teal': ClientAPI.ColorEx.Teal,
'Thistle': ClientAPI.ColorEx.Thistle,
'Tomato': ClientAPI.ColorEx.Tomato,
'Turquoise': ClientAPI.ColorEx.Turquoise,
'Violet': ClientAPI.ColorEx.Violet,
'Wheat': ClientAPI.ColorEx.Wheat,
'White': ClientAPI.ColorEx.White,
'WhiteSmoke': ClientAPI.ColorEx.WhiteSmoke,
'Yellow': ClientAPI.ColorEx.Yellow,
'YellowGreen': ClientAPI.ColorEx.YellowGreen,
}
def DumpColorNames(args=""):
colorStr = ""
for colorName in NamedColors.keys():
colorStr = colorStr + '%s, ' % colorName
ClientAPI.Write(colorStr)
def ColorValue(args):
ClientAPI.Write(NamedColors[args.lower()].ToString())
def GetColor(args):
colorName = args.lower()
if colorName in NamedColors:
return NamedColors[colorName]
else:
return None
# make copies of the colors in the dictionary with all lower case names
for colorName in NamedColorsOrig.keys():
NamedColors[colorName.lower()] = NamedColorsOrig[colorName]
MarsCommand.RegisterCommandHandler("colornames", DumpColorNames)
MarsCommand.RegisterCommandHandler("colorvalue", ColorValue)
| [
"MarsCommand.RegisterCommandHandler",
"ClientAPI.Write"
] | [((7042, 7106), 'MarsCommand.RegisterCommandHandler', 'MarsCommand.RegisterCommandHandler', (['"""colornames"""', 'DumpColorNames'], {}), "('colornames', DumpColorNames)\n", (7076, 7106), False, 'import MarsCommand\n'), ((7107, 7167), 'MarsCommand.RegisterCommandHandler', 'MarsCommand.RegisterCommandHandler', (['"""colorvalue"""', 'ColorValue'], {}), "('colorvalue', ColorValue)\n", (7141, 7167), False, 'import MarsCommand\n'), ((6593, 6618), 'ClientAPI.Write', 'ClientAPI.Write', (['colorStr'], {}), '(colorStr)\n', (6608, 6618), False, 'import ClientAPI\n')] |
from PySide2.QtWidgets import QDialog, QVBoxLayout
from hexrd.ui.tree_views.base_dict_tree_item_model import (
BaseTreeItemModel, BaseDictTreeItemModel, BaseDictTreeView
)
from hexrd.ui.tree_views.tree_item import TreeItem
from hexrd.ui.tree_views.value_column_delegate import ValueColumnDelegate
# Global constants
KEY_COL = BaseTreeItemModel.KEY_COL
VALUE_COL = KEY_COL + 1
class DictTreeItemModel(BaseDictTreeItemModel):
def __init__(self, dictionary, parent=None):
super().__init__(dictionary, parent)
self.root_item = TreeItem(['key', 'value'])
self.rebuild_tree()
def recursive_add_tree_items(self, cur_config, cur_tree_item):
if isinstance(cur_config, dict):
keys = cur_config.keys()
elif isinstance(cur_config, list):
keys = range(len(cur_config))
else:
# This must be a value.
cur_tree_item.set_data(VALUE_COL, cur_config)
return
for key in keys:
path = self.path_to_value(cur_tree_item, 0) + [key]
if path in self.blacklisted_paths or str(key).startswith('_'):
continue
data = [key, None]
tree_item = self.add_tree_item(data, cur_tree_item)
self.recursive_add_tree_items(cur_config[key], tree_item)
def path_to_value(self, tree_item, column):
return self.path_to_item(tree_item)
class DictTreeView(BaseDictTreeView):
def __init__(self, dictionary, parent=None):
super().__init__(parent)
self.setModel(DictTreeItemModel(dictionary, parent=self))
self.setItemDelegateForColumn(
VALUE_COL, ValueColumnDelegate(self))
self.resizeColumnToContents(KEY_COL)
self.resizeColumnToContents(VALUE_COL)
self.header().resizeSection(KEY_COL, 200)
self.header().resizeSection(VALUE_COL, 200)
class DictTreeViewDialog(QDialog):
def __init__(self, dictionary, parent=None):
super().__init__(parent)
self.setLayout(QVBoxLayout(self))
self.tree_view = DictTreeView(dictionary, self)
self.layout().addWidget(self.tree_view)
self.resize(500, 500)
def expand_rows(self):
return self.tree_view.expand_rows()
@property
def editable(self):
return self.tree_view.editable
@editable.setter
def editable(self, v):
self.tree_view.editable = v
def set_single_selection_mode(self):
self.tree_view.set_single_selection_mode()
def set_multi_selection_mode(self):
self.tree_view.set_multi_selection_mode()
def set_extended_selection_mode(self):
self.tree_view.set_extended_selection_mode()
@property
def selected_items(self):
return self.tree_view.selected_items
| [
"hexrd.ui.tree_views.value_column_delegate.ValueColumnDelegate",
"hexrd.ui.tree_views.tree_item.TreeItem",
"PySide2.QtWidgets.QVBoxLayout"
] | [((554, 580), 'hexrd.ui.tree_views.tree_item.TreeItem', 'TreeItem', (["['key', 'value']"], {}), "(['key', 'value'])\n", (562, 580), False, 'from hexrd.ui.tree_views.tree_item import TreeItem\n'), ((1668, 1693), 'hexrd.ui.tree_views.value_column_delegate.ValueColumnDelegate', 'ValueColumnDelegate', (['self'], {}), '(self)\n', (1687, 1693), False, 'from hexrd.ui.tree_views.value_column_delegate import ValueColumnDelegate\n'), ((2035, 2052), 'PySide2.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (2046, 2052), False, 'from PySide2.QtWidgets import QDialog, QVBoxLayout\n')] |
#!/usr/bin/env python3
import os
import time
import sys
import argparse
from dotenv import load_dotenv
from harvester import *
'''
LAUNCH
'''
# ftp default values
server = None
port = 21
username = 'anonymous'
password = '<PASSWORD>'
# load ftp variables
load_dotenv() # loads environment vars from the '.env' file if present
server = os.getenv('FTP_SERVER')
port = os.getenv('FTP_PORT', port)
username = os.getenv('FTP_USERNAME', username)
password = os.getenv('FTP_PASSWORD', password)
# file names used by this script
listfile = 'harvestlist.txt'
cache_file = '.harvestcache'
# the default interval for backups
default_interval = 5
# list of files to look for
files = []
if not os.path.isfile(listfile):
print('ERROR: File \'' + listfile + '\' required, but not found in the current working directory')
sys.exit(1)
try:
with open(listfile, 'r') as f:
for one in f.readlines():
one = one.strip()
one = one.split('#')[0]
if one: # not empty
files.append(one)
except IOError as e:
print('ERROR: File \'' + listfile + '\': ' + str(e))
if server is None:
print('ERROR: Environment variable {} not specified'.format('FTP_SERVER'))
sys.exit(1)
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='\
FTP Harvest.\n\
If no arguments are provided, the default behavior is equivalent to:\n\
{} -lb 5\n\n\
Uses these environment variables:\n\
* FTP_SERVER\n\
* FTP_PORT (=21)\n\
* FTP_USERNAME (=anonymous)\n\
* FTP_PASSWORD (=anonymous)\n\
FTP_SERVER must be specified and be a valid FTP server address'.format(sys.argv[0]))
parser.add_argument('-l', '--load', action='store_true',
help='loads files specified in {} from the FTP server'.format(listfile))
parser.add_argument('-b', '--backup', type=int, metavar='INTERVAL', default=0,
help='periodically saves files in {} to the FTP server; INTERVAL is in minutes'.format(listfile))
parser.add_argument('-s', '--save', action='store_true',
help='sends current files to the FTP server')
result = parser.parse_args(sys.argv[1:])
# default behavior when no arguments are provided
if result.backup == 0 and not result.load and not result.save:
result.backup = default_interval
result.load = True
print('Invoking default behavior')
print('Connecting to FTP: {}'.format(server))
connection = FTPConnection(server, port, username, password)
harvester = Harvester(connection, files)
if result.save:
print('Sending files to the server...')
harvester.save()
print('Sending finished')
if result.load:
print('Loading from server...')
harvester.load()
if result.backup == 0:
# caches file modification times so that the when this program is
# run again with -b it can access this data
harvester.save_cache(cache_file)
print('Loading finished')
if result.backup > 0:
print('Started backup cycle (interval = {} min)'.format(result.backup))
if not result.load:
harvester.load_cache(cache_file)
while True:
time.sleep(result.backup * 60)
harvester.save()
| [
"os.getenv",
"time.sleep",
"dotenv.load_dotenv",
"os.path.isfile",
"sys.exit"
] | [((260, 273), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (271, 273), False, 'from dotenv import load_dotenv\n'), ((341, 364), 'os.getenv', 'os.getenv', (['"""FTP_SERVER"""'], {}), "('FTP_SERVER')\n", (350, 364), False, 'import os\n'), ((372, 399), 'os.getenv', 'os.getenv', (['"""FTP_PORT"""', 'port'], {}), "('FTP_PORT', port)\n", (381, 399), False, 'import os\n'), ((411, 446), 'os.getenv', 'os.getenv', (['"""FTP_USERNAME"""', 'username'], {}), "('FTP_USERNAME', username)\n", (420, 446), False, 'import os\n'), ((458, 493), 'os.getenv', 'os.getenv', (['"""FTP_PASSWORD"""', 'password'], {}), "('FTP_PASSWORD', password)\n", (467, 493), False, 'import os\n'), ((691, 715), 'os.path.isfile', 'os.path.isfile', (['listfile'], {}), '(listfile)\n', (705, 715), False, 'import os\n'), ((824, 835), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (832, 835), False, 'import sys\n'), ((1225, 1236), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1233, 1236), False, 'import sys\n'), ((3117, 3147), 'time.sleep', 'time.sleep', (['(result.backup * 60)'], {}), '(result.backup * 60)\n', (3127, 3147), False, 'import time\n')] |
"""Application config."""
from dataclasses import dataclass
import os
from typing import Optional
from .database import create_conn_config, ConnectionParameters
def _get_app_key_from_file() -> Optional[str]:
try:
with open("/run/secrets/app_key", "r") as key_file:
return str(key_file.readline())
except FileNotFoundError:
return None
def get_app_key() -> str:
"""Get application key from environment."""
key = os.getenv("APP_KEY", _get_app_key_from_file())
if not key:
raise ValueError("An APP_KEY environment variable is required.")
return key
@dataclass
class Config:
"""Application configuration object."""
database: ConnectionParameters
jwt_key: str
def create_default_config() -> Config:
return Config(
database=create_conn_config(
user=os.getenv('DB_USER', 'test'),
password=os.getenv('DB_PASS', '<PASSWORD>'),
host=os.getenv('DB_HOST', 'localhost'),
port=int(os.getenv('DB_PORT', '5432')),
database=os.getenv('DB_NAME', 'dev')),
jwt_key=get_app_key())
| [
"os.getenv"
] | [((852, 880), 'os.getenv', 'os.getenv', (['"""DB_USER"""', '"""test"""'], {}), "('DB_USER', 'test')\n", (861, 880), False, 'import os\n'), ((903, 937), 'os.getenv', 'os.getenv', (['"""DB_PASS"""', '"""<PASSWORD>"""'], {}), "('DB_PASS', '<PASSWORD>')\n", (912, 937), False, 'import os\n'), ((956, 989), 'os.getenv', 'os.getenv', (['"""DB_HOST"""', '"""localhost"""'], {}), "('DB_HOST', 'localhost')\n", (965, 989), False, 'import os\n'), ((1064, 1091), 'os.getenv', 'os.getenv', (['"""DB_NAME"""', '"""dev"""'], {}), "('DB_NAME', 'dev')\n", (1073, 1091), False, 'import os\n'), ((1012, 1040), 'os.getenv', 'os.getenv', (['"""DB_PORT"""', '"""5432"""'], {}), "('DB_PORT', '5432')\n", (1021, 1040), False, 'import os\n')] |
from .logging import get_logger
logger = get_logger(__name__)
from asyncio import get_event_loop, gather
try:
from asyncio import get_running_loop
except ImportError:
get_running_loop = get_event_loop
try:
from asyncio import create_task
except ImportError:
create_task = None
try:
from asyncio import run
except ImportError:
run = None
try:
from contextlib import AsyncExitStack
except ImportError:
AsyncExitStack = None
def create_task_polyfill(coro):
loop = get_running_loop()
task = loop.create_task(coro)
return task
def run_polyfill(main):
loop = get_running_loop()
try:
return loop.run_until_complete(main)
finally:
_cancel_all_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
def _cancel_all_tasks(loop):
to_cancel = tasks.all_tasks(loop)
if to_cancel:
for task in to_cancel:
task.cancel()
loop.run_until_complete(gather(*to_cancel, loop=loop, return_exceptions=True))
class AsyncExitStackPolyfill:
def __init__(self):
self._stack = []
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
for cm in reversed(self._stack):
try:
await cm.__aexit__(exc_type, exc_val, exc_tb)
except BaseException as e:
logger.exception('cm.__aexit__ failed: %r; cm: %r', e, cm)
self._stack = None
async def enter_async_context(self, cm):
res = await cm.__aenter__()
self._stack.append(cm)
return res
if create_task is None:
create_task = create_task_polyfill
if run is None:
run = run_polyfill
if AsyncExitStack is None:
AsyncExitStack = AsyncExitStackPolyfill
| [
"asyncio.get_running_loop",
"asyncio.gather"
] | [((505, 523), 'asyncio.get_running_loop', 'get_running_loop', ([], {}), '()\n', (521, 523), False, 'from asyncio import get_running_loop\n'), ((611, 629), 'asyncio.get_running_loop', 'get_running_loop', ([], {}), '()\n', (627, 629), False, 'from asyncio import get_running_loop\n'), ((964, 1017), 'asyncio.gather', 'gather', (['*to_cancel'], {'loop': 'loop', 'return_exceptions': '(True)'}), '(*to_cancel, loop=loop, return_exceptions=True)\n', (970, 1017), False, 'from asyncio import get_event_loop, gather\n')] |
import typing
import sys
sys.setrecursionlimit(1 << 20)
import dataclasses
@dataclasses.dataclass
class Node():
parent: int
size: int = 1
rank: int = 0
class UnionFind():
def __init__(
self,
n: int,
) -> typing.NoReturn:
self.__a = [Node(i) for i in range(n)]
def find(
self,
u: int,
) -> int:
a = self.__a
pu = a[u].parent
if pu == u: return u
pu = self.find(pu)
a[u].parent = pu
return pu
def groups(self) -> typing.List[
typing.List[int]
]:
n = len(self.__a)
g = [[] for _ in range(n)]
for u in range(n):
g[self.find(u)].append(u)
return [x for x in g if x]
def same(
self,
u: int,
v: int,
) -> bool:
return self.find(u) == self.find(v)
def size(
self,
u: int,
) -> int:
return self.__a[self.find(u)].size
def unite(
self,
u: int,
v: int,
) -> typing.NoReturn:
u = self.find(u)
v = self.find(v)
if u == v: return
a = self.__a
if a[u].size < a[v].size:
u, v = v, u
a[u].size += a[v].size
a[v].parent = u | [
"sys.setrecursionlimit"
] | [((25, 55), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(1 << 20)'], {}), '(1 << 20)\n', (46, 55), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
#
desc = 'Scatter plot'
#phash = '5fd219cc653c85a5'
phash = '5f425b484525bf95'
def plot():
from matplotlib import pyplot as plt
from matplotlib import style
import numpy as np
fig = plt.figure()
with plt.style.context(('fivethirtyeight')):
np.random.seed(123)
plt.scatter(
np.linspace(0, 100, 101),
np.linspace(0, 100, 101) + 15 * np.random.rand(101)
)
return fig
| [
"numpy.random.rand",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.style.context",
"numpy.linspace",
"numpy.random.seed"
] | [((224, 236), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (234, 236), True, 'from matplotlib import pyplot as plt\n'), ((246, 282), 'matplotlib.pyplot.style.context', 'plt.style.context', (['"""fivethirtyeight"""'], {}), "('fivethirtyeight')\n", (263, 282), True, 'from matplotlib import pyplot as plt\n'), ((294, 313), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (308, 313), True, 'import numpy as np\n'), ((351, 375), 'numpy.linspace', 'np.linspace', (['(0)', '(100)', '(101)'], {}), '(0, 100, 101)\n', (362, 375), True, 'import numpy as np\n'), ((393, 417), 'numpy.linspace', 'np.linspace', (['(0)', '(100)', '(101)'], {}), '(0, 100, 101)\n', (404, 417), True, 'import numpy as np\n'), ((425, 444), 'numpy.random.rand', 'np.random.rand', (['(101)'], {}), '(101)\n', (439, 444), True, 'import numpy as np\n')] |
from csignal_tests import *
from array import array
import random
import struct
import socket
import ctypes
import unittest
class TestsBitPacker( unittest.TestCase ):
def test_get_bytes_add_bits( self ):
bitPacker = python_bit_packer_initialize()
self.assertNotEquals( bitPacker, None )
data = "\x12\x34\x56\x78"
for datum in data:
byte = ord( datum )
self.assertEquals( bit_packer_add_bits( ( byte >> 4 ), 4, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( byte, 4, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
string = python_bit_packer_get_bytes( bitPacker )
self.assertNotEquals( string, None )
self.assertEquals( len( string ), len( data ) )
for index in range( len( string ) ):
self.assertEquals( string[ index ], data[ index ] )
for datum in data:
byte = ord( datum )
self.assertEquals( bit_packer_add_bits( ( byte >> 6 ), 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 4 ), 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 2 ), 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( byte, 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
string = python_bit_packer_get_bytes( bitPacker )
self.assertNotEquals( string, None )
self.assertEquals( len( string ), len( data ) * 2 )
for index in range( len( data ) ):
self.assertEquals( string[ index ], data[ index ] )
for index in range( len( data ) ):
self.assertEquals( string[ index + len( data ) ], data[ index ] )
for datum in data:
byte = ord( datum )
self.assertEquals( bit_packer_add_bits( ( byte >> 7 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 6 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 5 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 4 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 3 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 2 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 1 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( byte, 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
string = python_bit_packer_get_bytes( bitPacker )
self.assertNotEquals( string, None )
self.assertEquals( len( string ), len( data ) * 3 )
for index in range( len( data ) ):
self.assertEquals( string[ index ], data[ index ] )
for index in range( len( data ) ):
self.assertEquals( string[ index + len( data ) ], data[ index ] )
for index in range( len( data ) ):
self.assertEquals( string[ index + ( 2 * len( data ) ) ], data[ index ] )
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
def test_get_bytes( self ):
bitPacker = python_bit_packer_initialize()
self.assertNotEquals( bitPacker, None )
data = "Hello"
self.assertEquals (
python_bit_packer_add_bytes (
data,
bitPacker
),
CPC_ERROR_CODE_NO_ERROR
)
string = python_bit_packer_get_bytes( bitPacker )
self.assertNotEquals( string, None )
self.assertEquals( len( string ), len( data ) )
for index in range( len( string ) ):
self.assertEquals( string[ index ], data[ index ] )
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
bitPacker = python_bit_packer_initialize()
data = struct.pack( "I", 1722 )
self.assertEquals (
python_bit_packer_add_bytes (
data,
bitPacker
),
CPC_ERROR_CODE_NO_ERROR
)
string = python_bit_packer_get_bytes( bitPacker )
self.assertNotEquals( string, None )
self.assertEquals( len( string ), len( data ) )
for index in range( len( string ) ):
self.assertEquals( string[ index ], data[ index ] )
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
bitPacker = python_bit_packer_initialize()
data = ""
for index in range( 100 ):
value = 32767 * random.normalvariate( 0, 1 )
value = socket.htonl( struct.unpack( "I", struct.pack( "i", int( value ) ) )[ 0 ] )
if( 0 > value ):
value = struct.pack( "i", value )
value = struct.unpack( "I", value )[ 0 ]
value = struct.pack( "I", value )
data = data + value
self.assertEquals (
python_bit_packer_add_bytes (
data,
bitPacker
),
CPC_ERROR_CODE_NO_ERROR
)
string = python_bit_packer_get_bytes( bitPacker )
self.assertNotEquals( string, None )
self.assertEquals( len( string ), len( data ) )
for index in range( len( string ) ):
self.assertEquals( string[ index ], data[ index ] )
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
def test_bit_packer_negative( self ):
self.assertNotEquals( bit_packer_destroy( None ), CPC_ERROR_CODE_NO_ERROR )
dataBuffer = array( 'B', "1" )
self.assertNotEquals( bit_packer_add_bits( dataBuffer[ 0 ], 8, None ), CPC_ERROR_CODE_NO_ERROR )
self.assertNotEquals( python_bit_packer_add_bytes( "1", None ), CPC_ERROR_CODE_NO_ERROR )
self.assertNotEquals( python_bit_packer_add_bytes( 1, None ), CPC_ERROR_CODE_NO_ERROR )
bitPacker = python_bit_packer_initialize()
self.assertNotEquals( bitPacker, None )
self.assertNotEquals( bit_packer_add_bits( dataBuffer[ 0 ], 12, None ), CPC_ERROR_CODE_NO_ERROR )
self.assertNotEquals( python_bit_packer_add_bytes( None, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_destroy( bitPacker ), CPC_ERROR_CODE_NO_ERROR )
def test_add_bits( self ):
bitPacker = python_bit_packer_initialize()
self.assertNotEquals( bitPacker, None )
dataBuffer = array( 'B', "\x12\x34\x56\x78" )
for byte in dataBuffer:
self.assertEquals( bit_packer_add_bits( ( byte >> 4 ), 4, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( byte, 4, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
for byte in dataBuffer:
self.assertEquals( bit_packer_add_bits( ( byte >> 6 ), 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 4 ), 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 2 ), 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( byte, 2, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
for byte in dataBuffer:
self.assertEquals( bit_packer_add_bits( ( byte >> 7 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 6 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 5 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 4 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 3 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 2 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( ( byte >> 1 ), 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals( bit_packer_add_bits( byte, 1, bitPacker ), CPC_ERROR_CODE_NO_ERROR )
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
def test_add_bytes( self ):
bitPacker = python_bit_packer_initialize()
self.assertNotEquals( bitPacker, None )
data = "Hello"
self.assertEquals (
python_bit_packer_add_bytes (
data,
bitPacker
),
CPC_ERROR_CODE_NO_ERROR
)
data = struct.pack( "I", 1722 )
self.assertEquals (
python_bit_packer_add_bytes (
data,
bitPacker
),
CPC_ERROR_CODE_NO_ERROR
)
data = ""
for index in range( 100 ):
value = 32767 * random.normalvariate( 0, 1 )
value = struct.pack( "i", int( value ) )
data = data + value
self.assertEquals (
python_bit_packer_add_bytes (
data,
bitPacker
),
CPC_ERROR_CODE_NO_ERROR
)
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
def test_initialize_destroy( self ):
bitPacker = python_bit_packer_initialize()
self.assertNotEquals( bitPacker, None )
self.assertEquals (
bit_packer_destroy( bitPacker ),
CPC_ERROR_CODE_NO_ERROR
)
if __name__ == '__main__':
cpc_log_set_log_level( CPC_LOG_LEVEL_ERROR )
csignal_initialize()
unittest.main()
csignal_terminate()
| [
"random.normalvariate",
"array.array",
"struct.pack",
"struct.unpack",
"unittest.main"
] | [((9398, 9413), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9411, 9413), False, 'import unittest\n'), ((3869, 3891), 'struct.pack', 'struct.pack', (['"""I"""', '(1722)'], {}), "('I', 1722)\n", (3880, 3891), False, 'import struct\n'), ((5550, 5565), 'array.array', 'array', (['"""B"""', '"""1"""'], {}), "('B', '1')\n", (5555, 5565), False, 'from array import array\n'), ((6377, 6398), 'array.array', 'array', (['"""B"""', '"""\x124Vx"""'], {}), "('B', '\\x124Vx')\n", (6382, 6398), False, 'from array import array\n'), ((8367, 8389), 'struct.pack', 'struct.pack', (['"""I"""', '(1722)'], {}), "('I', 1722)\n", (8378, 8389), False, 'import struct\n'), ((4809, 4832), 'struct.pack', 'struct.pack', (['"""I"""', 'value'], {}), "('I', value)\n", (4820, 4832), False, 'import struct\n'), ((4560, 4586), 'random.normalvariate', 'random.normalvariate', (['(0)', '(1)'], {}), '(0, 1)\n', (4580, 4586), False, 'import random\n'), ((4719, 4742), 'struct.pack', 'struct.pack', (['"""i"""', 'value'], {}), "('i', value)\n", (4730, 4742), False, 'import struct\n'), ((8643, 8669), 'random.normalvariate', 'random.normalvariate', (['(0)', '(1)'], {}), '(0, 1)\n', (8663, 8669), False, 'import random\n'), ((4761, 4786), 'struct.unpack', 'struct.unpack', (['"""I"""', 'value'], {}), "('I', value)\n", (4774, 4786), False, 'import struct\n')] |
#!/usr/bin/env python
#
# Cloudlet Infrastructure for Mobile Computing
# - Task Assistance
#
# Author:
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# Copyright (C) 2011-2013 Carnegie Mellon University
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import multiprocessing
import os
import pprint
import Queue
import socket
import struct
import sys
import threading
import time
from base64 import b64decode, b64encode
import cv2
import numpy as np
from gabrieltool.statemachine import runner, wca_state_machine_pb2, fsm
import gabriel
import gabriel.proxy
LOG = gabriel.logging.getLogger(__name__)
def raw2cv_image(raw_data, gray_scale=False):
img_array = np.asarray(bytearray(raw_data), dtype=np.int8)
if gray_scale:
cv_image = cv2.imdecode(img_array, 0)
else:
cv_image = cv2.imdecode(img_array, -1)
return cv_image
class CookingProxy(gabriel.proxy.CognitiveProcessThread):
def __init__(self, fsm_path, image_queue, output_queue, engine_id, log_flag=True):
super(CookingProxy, self).__init__(image_queue, output_queue, engine_id)
self.log_flag = log_flag
self._fsm = None
with open(fsm_path, 'rb') as f:
self._fsm = fsm.StateMachine.from_bytes(f.read())
self._fsm_runner = runner.Runner(self._fsm)
def terminate(self):
super(CookingProxy, self).terminate()
def handle(self, header, data):
LOG.info("received new image")
# status success is needed
header['status'] = "success"
# default
result = {}
img = raw2cv_image(data)
inst = self._fsm_runner.feed(img)
# gotcha: the Gabriel client expects the absence of 'speech' and 'image'
# keys when there is no such feedback
if inst.audio:
result['speech'] = inst.audio
if inst.image:
result['image'] = b64encode(inst.image)
LOG.info('Current State: {}'.format(self._fsm_runner.current_state))
return json.dumps(result)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Gabriel FSM Cognitive Engine')
parser.add_argument('--fsm_path', action="store", dest="fsm_path", help='Path to the fsm', required=True)
args, unknown = parser.parse_known_args()
settings = gabriel.util.process_command_line(unknown)
ip_addr, port = gabriel.network.get_registry_server_address(settings.address)
service_list = gabriel.network.get_service_list(ip_addr, port)
LOG.info("Gabriel Server :")
LOG.info(pprint.pformat(service_list))
video_ip = service_list.get(gabriel.ServiceMeta.VIDEO_TCP_STREAMING_IP)
video_port = service_list.get(gabriel.ServiceMeta.VIDEO_TCP_STREAMING_PORT)
ucomm_ip = service_list.get(gabriel.ServiceMeta.UCOMM_SERVER_IP)
ucomm_port = service_list.get(gabriel.ServiceMeta.UCOMM_SERVER_PORT)
# image receiving thread
image_queue = Queue.Queue(gabriel.Const.APP_LEVEL_TOKEN_SIZE)
print("TOKEN SIZE OF OFFLOADING ENGINE: %d" % gabriel.Const.APP_LEVEL_TOKEN_SIZE)
video_streaming = gabriel.proxy.SensorReceiveClient((video_ip, video_port), image_queue)
video_streaming.start()
video_streaming.isDaemon = True
# app proxy
result_queue = multiprocessing.Queue()
app_proxy = CookingProxy(args.fsm_path, image_queue, result_queue, engine_id="Sandwich")
app_proxy.start()
app_proxy.isDaemon = True
# result pub/sub
result_pub = gabriel.proxy.ResultPublishClient((ucomm_ip, ucomm_port), result_queue)
result_pub.start()
result_pub.isDaemon = True
try:
while True:
time.sleep(1)
except Exception as e:
pass
except KeyboardInterrupt as e:
sys.stdout.write("user exits\n")
finally:
if video_streaming is not None:
video_streaming.terminate()
if app_proxy is not None:
app_proxy.terminate()
result_pub.terminate()
| [
"gabriel.logging.getLogger",
"gabriel.proxy.SensorReceiveClient",
"argparse.ArgumentParser",
"gabriel.network.get_service_list",
"base64.b64encode",
"json.dumps",
"pprint.pformat",
"gabriel.proxy.ResultPublishClient",
"time.sleep",
"cv2.imdecode",
"gabrieltool.statemachine.runner.Runner",
"gabriel.util.process_command_line",
"multiprocessing.Queue",
"Queue.Queue",
"gabriel.network.get_registry_server_address",
"sys.stdout.write"
] | [((1104, 1139), 'gabriel.logging.getLogger', 'gabriel.logging.getLogger', (['__name__'], {}), '(__name__)\n', (1129, 1139), False, 'import gabriel\n'), ((2608, 2675), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Gabriel FSM Cognitive Engine"""'}), "(description='Gabriel FSM Cognitive Engine')\n", (2631, 2675), False, 'import argparse\n'), ((2848, 2890), 'gabriel.util.process_command_line', 'gabriel.util.process_command_line', (['unknown'], {}), '(unknown)\n', (2881, 2890), False, 'import gabriel\n'), ((2912, 2973), 'gabriel.network.get_registry_server_address', 'gabriel.network.get_registry_server_address', (['settings.address'], {}), '(settings.address)\n', (2955, 2973), False, 'import gabriel\n'), ((2993, 3040), 'gabriel.network.get_service_list', 'gabriel.network.get_service_list', (['ip_addr', 'port'], {}), '(ip_addr, port)\n', (3025, 3040), False, 'import gabriel\n'), ((3464, 3511), 'Queue.Queue', 'Queue.Queue', (['gabriel.Const.APP_LEVEL_TOKEN_SIZE'], {}), '(gabriel.Const.APP_LEVEL_TOKEN_SIZE)\n', (3475, 3511), False, 'import Queue\n'), ((3620, 3690), 'gabriel.proxy.SensorReceiveClient', 'gabriel.proxy.SensorReceiveClient', (['(video_ip, video_port)', 'image_queue'], {}), '((video_ip, video_port), image_queue)\n', (3653, 3690), False, 'import gabriel\n'), ((3791, 3814), 'multiprocessing.Queue', 'multiprocessing.Queue', ([], {}), '()\n', (3812, 3814), False, 'import multiprocessing\n'), ((4000, 4071), 'gabriel.proxy.ResultPublishClient', 'gabriel.proxy.ResultPublishClient', (['(ucomm_ip, ucomm_port)', 'result_queue'], {}), '((ucomm_ip, ucomm_port), result_queue)\n', (4033, 4071), False, 'import gabriel\n'), ((1289, 1315), 'cv2.imdecode', 'cv2.imdecode', (['img_array', '(0)'], {}), '(img_array, 0)\n', (1301, 1315), False, 'import cv2\n'), ((1345, 1372), 'cv2.imdecode', 'cv2.imdecode', (['img_array', '(-1)'], {}), '(img_array, -1)\n', (1357, 1372), False, 'import cv2\n'), ((1808, 1832), 'gabrieltool.statemachine.runner.Runner', 'runner.Runner', (['self._fsm'], {}), '(self._fsm)\n', (1821, 1832), False, 'from gabrieltool.statemachine import runner, wca_state_machine_pb2, fsm\n'), ((2527, 2545), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (2537, 2545), False, 'import json\n'), ((3087, 3115), 'pprint.pformat', 'pprint.pformat', (['service_list'], {}), '(service_list)\n', (3101, 3115), False, 'import pprint\n'), ((2413, 2434), 'base64.b64encode', 'b64encode', (['inst.image'], {}), '(inst.image)\n', (2422, 2434), False, 'from base64 import b64decode, b64encode\n'), ((4168, 4181), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4178, 4181), False, 'import time\n'), ((4265, 4297), 'sys.stdout.write', 'sys.stdout.write', (['"""user exits\n"""'], {}), "('user exits\\n')\n", (4281, 4297), False, 'import sys\n')] |
import os
from io import BytesIO as BaseBytesIO
from uuid import uuid4
from django.test import TestCase as BaseTestCase
from django_webtest import WebTest as BaseWebTest
from elastic_panel import panel
from feedhq import es
from feedhq.utils import get_redis_connection
from rache import job_key
from requests import Response
TEST_DATA = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data')
_old_pretty_json = panel._pretty_json
def _pretty_json(data):
pretty = _old_pretty_json(data)
if isinstance(pretty, str):
pretty = pretty.encode()
return pretty
panel._pretty_json = _pretty_json
class BytesIO(BaseBytesIO):
def read(self, *args, **kwargs):
kwargs.pop('decode_content', None)
return super().read(*args, **kwargs)
def data_file(name):
return os.path.join(TEST_DATA, name)
def responses(code, path=None, redirection=None, data=None,
url=None,
headers=None):
if headers is None:
headers = {'Content-Type': 'text/xml'}
response = Response()
response.status_code = code
if path is not None and redirection is None:
with open(data_file(path), 'rb') as f:
response.raw = BytesIO(f.read())
elif data is not None:
response._content = data.encode('utf-8')
if redirection is not None:
temp = Response()
temp.status_code = 301 if 'permanent' in redirection else 302
temp.url = path
response.history.append(temp)
response.url = redirection
headers['location'] = path
if url is None:
if redirection is not None:
url = redirection
else:
url = 'https://example.com/{}'.format(str(uuid4()))
response.url = url
response.headers = headers
return response
def resolve_url(url, *args, **kwargs):
response = Response()
response.status_code = 200
response.url = url
return response
class ESTests(object):
def counts(self, user, **kwargs):
es_entries = es.manager.user(user)
for name, filters in kwargs.items():
es_entries = es_entries.query_aggregate(name, **filters)
results = es_entries.fetch(per_page=0)['aggregations']['entries']
return {name: results[name]['doc_count'] for name in kwargs}
class TestCase(ESTests, BaseTestCase):
def tearDown(self): # noqa
"""Clean up the rache:* redis keys"""
get_redis_connection().flushdb()
setUp = tearDown
class WebTest(ESTests, BaseWebTest):
pass
def patch_job(name, **kwargs):
redis = get_redis_connection()
for key, value in list(kwargs.items()):
if value is None:
redis.hdel(job_key(name), key)
kwargs.pop(key)
redis.hmset(job_key(name), kwargs)
| [
"rache.job_key",
"feedhq.utils.get_redis_connection",
"feedhq.es.manager.user",
"requests.Response",
"os.path.join",
"uuid.uuid4",
"os.path.dirname"
] | [((815, 844), 'os.path.join', 'os.path.join', (['TEST_DATA', 'name'], {}), '(TEST_DATA, name)\n', (827, 844), False, 'import os\n'), ((1046, 1056), 'requests.Response', 'Response', ([], {}), '()\n', (1054, 1056), False, 'from requests import Response\n'), ((1860, 1870), 'requests.Response', 'Response', ([], {}), '()\n', (1868, 1870), False, 'from requests import Response\n'), ((2582, 2604), 'feedhq.utils.get_redis_connection', 'get_redis_connection', ([], {}), '()\n', (2602, 2604), False, 'from feedhq.utils import get_redis_connection\n'), ((370, 395), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (385, 395), False, 'import os\n'), ((1353, 1363), 'requests.Response', 'Response', ([], {}), '()\n', (1361, 1363), False, 'from requests import Response\n'), ((2029, 2050), 'feedhq.es.manager.user', 'es.manager.user', (['user'], {}), '(user)\n', (2044, 2050), False, 'from feedhq import es\n'), ((2762, 2775), 'rache.job_key', 'job_key', (['name'], {}), '(name)\n', (2769, 2775), False, 'from rache import job_key\n'), ((2435, 2457), 'feedhq.utils.get_redis_connection', 'get_redis_connection', ([], {}), '()\n', (2455, 2457), False, 'from feedhq.utils import get_redis_connection\n'), ((2698, 2711), 'rache.job_key', 'job_key', (['name'], {}), '(name)\n', (2705, 2711), False, 'from rache import job_key\n'), ((1720, 1727), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1725, 1727), False, 'from uuid import uuid4\n')] |
'''tzinfo timezone information for Portugal.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Portugal(DstTzInfo):
'''Portugal timezone definition. See datetime.tzinfo for details'''
zone = 'Portugal'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1912,1,1,0,36,32),
d(1916,6,17,23,0,0),
d(1916,11,1,0,0,0),
d(1917,2,28,23,0,0),
d(1917,10,14,23,0,0),
d(1918,3,1,23,0,0),
d(1918,10,14,23,0,0),
d(1919,2,28,23,0,0),
d(1919,10,14,23,0,0),
d(1920,2,29,23,0,0),
d(1920,10,14,23,0,0),
d(1921,2,28,23,0,0),
d(1921,10,14,23,0,0),
d(1924,4,16,23,0,0),
d(1924,10,14,23,0,0),
d(1926,4,17,23,0,0),
d(1926,10,2,23,0,0),
d(1927,4,9,23,0,0),
d(1927,10,1,23,0,0),
d(1928,4,14,23,0,0),
d(1928,10,6,23,0,0),
d(1929,4,20,23,0,0),
d(1929,10,5,23,0,0),
d(1931,4,18,23,0,0),
d(1931,10,3,23,0,0),
d(1932,4,2,23,0,0),
d(1932,10,1,23,0,0),
d(1934,4,7,23,0,0),
d(1934,10,6,23,0,0),
d(1935,3,30,23,0,0),
d(1935,10,5,23,0,0),
d(1936,4,18,23,0,0),
d(1936,10,3,23,0,0),
d(1937,4,3,23,0,0),
d(1937,10,2,23,0,0),
d(1938,3,26,23,0,0),
d(1938,10,1,23,0,0),
d(1939,4,15,23,0,0),
d(1939,11,18,23,0,0),
d(1940,2,24,23,0,0),
d(1940,10,5,23,0,0),
d(1941,4,5,23,0,0),
d(1941,10,5,23,0,0),
d(1942,3,14,23,0,0),
d(1942,4,25,22,0,0),
d(1942,8,15,22,0,0),
d(1942,10,24,23,0,0),
d(1943,3,13,23,0,0),
d(1943,4,17,22,0,0),
d(1943,8,28,22,0,0),
d(1943,10,30,23,0,0),
d(1944,3,11,23,0,0),
d(1944,4,22,22,0,0),
d(1944,8,26,22,0,0),
d(1944,10,28,23,0,0),
d(1945,3,10,23,0,0),
d(1945,4,21,22,0,0),
d(1945,8,25,22,0,0),
d(1945,10,27,23,0,0),
d(1946,4,6,23,0,0),
d(1946,10,5,23,0,0),
d(1947,4,6,2,0,0),
d(1947,10,5,2,0,0),
d(1948,4,4,2,0,0),
d(1948,10,3,2,0,0),
d(1949,4,3,2,0,0),
d(1949,10,2,2,0,0),
d(1951,4,1,2,0,0),
d(1951,10,7,2,0,0),
d(1952,4,6,2,0,0),
d(1952,10,5,2,0,0),
d(1953,4,5,2,0,0),
d(1953,10,4,2,0,0),
d(1954,4,4,2,0,0),
d(1954,10,3,2,0,0),
d(1955,4,3,2,0,0),
d(1955,10,2,2,0,0),
d(1956,4,1,2,0,0),
d(1956,10,7,2,0,0),
d(1957,4,7,2,0,0),
d(1957,10,6,2,0,0),
d(1958,4,6,2,0,0),
d(1958,10,5,2,0,0),
d(1959,4,5,2,0,0),
d(1959,10,4,2,0,0),
d(1960,4,3,2,0,0),
d(1960,10,2,2,0,0),
d(1961,4,2,2,0,0),
d(1961,10,1,2,0,0),
d(1962,4,1,2,0,0),
d(1962,10,7,2,0,0),
d(1963,4,7,2,0,0),
d(1963,10,6,2,0,0),
d(1964,4,5,2,0,0),
d(1964,10,4,2,0,0),
d(1965,4,4,2,0,0),
d(1965,10,3,2,0,0),
d(1966,4,3,2,0,0),
d(1976,9,26,0,0,0),
d(1977,3,27,0,0,0),
d(1977,9,25,0,0,0),
d(1978,4,2,0,0,0),
d(1978,10,1,0,0,0),
d(1979,4,1,0,0,0),
d(1979,9,30,1,0,0),
d(1980,3,30,0,0,0),
d(1980,9,28,1,0,0),
d(1981,3,29,1,0,0),
d(1981,9,27,1,0,0),
d(1982,3,28,1,0,0),
d(1982,9,26,1,0,0),
d(1983,3,27,2,0,0),
d(1983,9,25,1,0,0),
d(1984,3,25,1,0,0),
d(1984,9,30,1,0,0),
d(1985,3,31,1,0,0),
d(1985,9,29,1,0,0),
d(1986,3,30,1,0,0),
d(1986,9,28,1,0,0),
d(1987,3,29,1,0,0),
d(1987,9,27,1,0,0),
d(1988,3,27,1,0,0),
d(1988,9,25,1,0,0),
d(1989,3,26,1,0,0),
d(1989,9,24,1,0,0),
d(1990,3,25,1,0,0),
d(1990,9,30,1,0,0),
d(1991,3,31,1,0,0),
d(1991,9,29,1,0,0),
d(1992,3,29,1,0,0),
d(1992,9,27,1,0,0),
d(1993,3,28,1,0,0),
d(1993,9,26,1,0,0),
d(1994,3,27,1,0,0),
d(1994,9,25,1,0,0),
d(1995,3,26,1,0,0),
d(1995,9,24,1,0,0),
d(1996,3,31,1,0,0),
d(1996,10,27,1,0,0),
d(1997,3,30,1,0,0),
d(1997,10,26,1,0,0),
d(1998,3,29,1,0,0),
d(1998,10,25,1,0,0),
d(1999,3,28,1,0,0),
d(1999,10,31,1,0,0),
d(2000,3,26,1,0,0),
d(2000,10,29,1,0,0),
d(2001,3,25,1,0,0),
d(2001,10,28,1,0,0),
d(2002,3,31,1,0,0),
d(2002,10,27,1,0,0),
d(2003,3,30,1,0,0),
d(2003,10,26,1,0,0),
d(2004,3,28,1,0,0),
d(2004,10,31,1,0,0),
d(2005,3,27,1,0,0),
d(2005,10,30,1,0,0),
d(2006,3,26,1,0,0),
d(2006,10,29,1,0,0),
d(2007,3,25,1,0,0),
d(2007,10,28,1,0,0),
d(2008,3,30,1,0,0),
d(2008,10,26,1,0,0),
d(2009,3,29,1,0,0),
d(2009,10,25,1,0,0),
d(2010,3,28,1,0,0),
d(2010,10,31,1,0,0),
d(2011,3,27,1,0,0),
d(2011,10,30,1,0,0),
d(2012,3,25,1,0,0),
d(2012,10,28,1,0,0),
d(2013,3,31,1,0,0),
d(2013,10,27,1,0,0),
d(2014,3,30,1,0,0),
d(2014,10,26,1,0,0),
d(2015,3,29,1,0,0),
d(2015,10,25,1,0,0),
d(2016,3,27,1,0,0),
d(2016,10,30,1,0,0),
d(2017,3,26,1,0,0),
d(2017,10,29,1,0,0),
d(2018,3,25,1,0,0),
d(2018,10,28,1,0,0),
d(2019,3,31,1,0,0),
d(2019,10,27,1,0,0),
d(2020,3,29,1,0,0),
d(2020,10,25,1,0,0),
d(2021,3,28,1,0,0),
d(2021,10,31,1,0,0),
d(2022,3,27,1,0,0),
d(2022,10,30,1,0,0),
d(2023,3,26,1,0,0),
d(2023,10,29,1,0,0),
d(2024,3,31,1,0,0),
d(2024,10,27,1,0,0),
d(2025,3,30,1,0,0),
d(2025,10,26,1,0,0),
d(2026,3,29,1,0,0),
d(2026,10,25,1,0,0),
d(2027,3,28,1,0,0),
d(2027,10,31,1,0,0),
d(2028,3,26,1,0,0),
d(2028,10,29,1,0,0),
d(2029,3,25,1,0,0),
d(2029,10,28,1,0,0),
d(2030,3,31,1,0,0),
d(2030,10,27,1,0,0),
d(2031,3,30,1,0,0),
d(2031,10,26,1,0,0),
d(2032,3,28,1,0,0),
d(2032,10,31,1,0,0),
d(2033,3,27,1,0,0),
d(2033,10,30,1,0,0),
d(2034,3,26,1,0,0),
d(2034,10,29,1,0,0),
d(2035,3,25,1,0,0),
d(2035,10,28,1,0,0),
d(2036,3,30,1,0,0),
d(2036,10,26,1,0,0),
d(2037,3,29,1,0,0),
d(2037,10,25,1,0,0),
]
_transition_info = [
i(-2220,0,'LMT'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(7200,7200,'WEMT'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(7200,7200,'WEMT'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(7200,7200,'WEMT'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(7200,7200,'WEMT'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,0,'CET'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(7200,3600,'CEST'),
i(3600,0,'CET'),
i(3600,0,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
i(3600,3600,'WEST'),
i(0,0,'WET'),
]
Portugal = Portugal()
| [
"pytz.tzinfo.memorized_ttinfo",
"pytz.tzinfo.memorized_datetime"
] | [((330, 349), 'pytz.tzinfo.memorized_datetime', 'd', (['(1)', '(1)', '(1)', '(0)', '(0)', '(0)'], {}), '(1, 1, 1, 0, 0, 0)\n', (331, 349), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((346, 370), 'pytz.tzinfo.memorized_datetime', 'd', (['(1912)', '(1)', '(1)', '(0)', '(36)', '(32)'], {}), '(1912, 1, 1, 0, 36, 32)\n', (347, 370), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((367, 391), 'pytz.tzinfo.memorized_datetime', 'd', (['(1916)', '(6)', '(17)', '(23)', '(0)', '(0)'], {}), '(1916, 6, 17, 23, 0, 0)\n', (368, 391), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((388, 411), 'pytz.tzinfo.memorized_datetime', 'd', (['(1916)', '(11)', '(1)', '(0)', '(0)', '(0)'], {}), '(1916, 11, 1, 0, 0, 0)\n', (389, 411), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((408, 432), 'pytz.tzinfo.memorized_datetime', 'd', (['(1917)', '(2)', '(28)', '(23)', '(0)', '(0)'], {}), '(1917, 2, 28, 23, 0, 0)\n', (409, 432), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((429, 454), 'pytz.tzinfo.memorized_datetime', 'd', (['(1917)', '(10)', '(14)', '(23)', '(0)', '(0)'], {}), '(1917, 10, 14, 23, 0, 0)\n', (430, 454), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((451, 474), 'pytz.tzinfo.memorized_datetime', 'd', (['(1918)', '(3)', '(1)', '(23)', '(0)', '(0)'], {}), '(1918, 3, 1, 23, 0, 0)\n', (452, 474), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((471, 496), 'pytz.tzinfo.memorized_datetime', 'd', (['(1918)', '(10)', '(14)', '(23)', '(0)', '(0)'], {}), '(1918, 10, 14, 23, 0, 0)\n', (472, 496), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((493, 517), 'pytz.tzinfo.memorized_datetime', 'd', (['(1919)', '(2)', '(28)', '(23)', '(0)', '(0)'], {}), '(1919, 2, 28, 23, 0, 0)\n', (494, 517), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((514, 539), 'pytz.tzinfo.memorized_datetime', 'd', (['(1919)', '(10)', '(14)', '(23)', '(0)', '(0)'], {}), '(1919, 10, 14, 23, 0, 0)\n', (515, 539), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((536, 560), 'pytz.tzinfo.memorized_datetime', 'd', (['(1920)', '(2)', '(29)', '(23)', '(0)', '(0)'], {}), '(1920, 2, 29, 23, 0, 0)\n', (537, 560), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((557, 582), 'pytz.tzinfo.memorized_datetime', 'd', (['(1920)', '(10)', '(14)', '(23)', '(0)', '(0)'], {}), '(1920, 10, 14, 23, 0, 0)\n', (558, 582), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((579, 603), 'pytz.tzinfo.memorized_datetime', 'd', (['(1921)', '(2)', '(28)', '(23)', '(0)', '(0)'], {}), '(1921, 2, 28, 23, 0, 0)\n', (580, 603), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((600, 625), 'pytz.tzinfo.memorized_datetime', 'd', (['(1921)', '(10)', '(14)', '(23)', '(0)', '(0)'], {}), '(1921, 10, 14, 23, 0, 0)\n', (601, 625), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((622, 646), 'pytz.tzinfo.memorized_datetime', 'd', (['(1924)', '(4)', '(16)', '(23)', '(0)', '(0)'], {}), '(1924, 4, 16, 23, 0, 0)\n', (623, 646), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((643, 668), 'pytz.tzinfo.memorized_datetime', 'd', (['(1924)', '(10)', '(14)', '(23)', '(0)', '(0)'], {}), '(1924, 10, 14, 23, 0, 0)\n', (644, 668), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((665, 689), 'pytz.tzinfo.memorized_datetime', 'd', (['(1926)', '(4)', '(17)', '(23)', '(0)', '(0)'], {}), '(1926, 4, 17, 23, 0, 0)\n', (666, 689), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((686, 710), 'pytz.tzinfo.memorized_datetime', 'd', (['(1926)', '(10)', '(2)', '(23)', '(0)', '(0)'], {}), '(1926, 10, 2, 23, 0, 0)\n', (687, 710), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((707, 730), 'pytz.tzinfo.memorized_datetime', 'd', (['(1927)', '(4)', '(9)', '(23)', '(0)', '(0)'], {}), '(1927, 4, 9, 23, 0, 0)\n', (708, 730), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((727, 751), 'pytz.tzinfo.memorized_datetime', 'd', (['(1927)', '(10)', '(1)', '(23)', '(0)', '(0)'], {}), '(1927, 10, 1, 23, 0, 0)\n', (728, 751), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((748, 772), 'pytz.tzinfo.memorized_datetime', 'd', (['(1928)', '(4)', '(14)', '(23)', '(0)', '(0)'], {}), '(1928, 4, 14, 23, 0, 0)\n', (749, 772), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((769, 793), 'pytz.tzinfo.memorized_datetime', 'd', (['(1928)', '(10)', '(6)', '(23)', '(0)', '(0)'], {}), '(1928, 10, 6, 23, 0, 0)\n', (770, 793), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((790, 814), 'pytz.tzinfo.memorized_datetime', 'd', (['(1929)', '(4)', '(20)', '(23)', '(0)', '(0)'], {}), '(1929, 4, 20, 23, 0, 0)\n', (791, 814), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((811, 835), 'pytz.tzinfo.memorized_datetime', 'd', (['(1929)', '(10)', '(5)', '(23)', '(0)', '(0)'], {}), '(1929, 10, 5, 23, 0, 0)\n', (812, 835), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((832, 856), 'pytz.tzinfo.memorized_datetime', 'd', (['(1931)', '(4)', '(18)', '(23)', '(0)', '(0)'], {}), '(1931, 4, 18, 23, 0, 0)\n', (833, 856), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((853, 877), 'pytz.tzinfo.memorized_datetime', 'd', (['(1931)', '(10)', '(3)', '(23)', '(0)', '(0)'], {}), '(1931, 10, 3, 23, 0, 0)\n', (854, 877), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((874, 897), 'pytz.tzinfo.memorized_datetime', 'd', (['(1932)', '(4)', '(2)', '(23)', '(0)', '(0)'], {}), '(1932, 4, 2, 23, 0, 0)\n', (875, 897), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((894, 918), 'pytz.tzinfo.memorized_datetime', 'd', (['(1932)', '(10)', '(1)', '(23)', '(0)', '(0)'], {}), '(1932, 10, 1, 23, 0, 0)\n', (895, 918), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((915, 938), 'pytz.tzinfo.memorized_datetime', 'd', (['(1934)', '(4)', '(7)', '(23)', '(0)', '(0)'], {}), '(1934, 4, 7, 23, 0, 0)\n', (916, 938), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((935, 959), 'pytz.tzinfo.memorized_datetime', 'd', (['(1934)', '(10)', '(6)', '(23)', '(0)', '(0)'], {}), '(1934, 10, 6, 23, 0, 0)\n', (936, 959), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((956, 980), 'pytz.tzinfo.memorized_datetime', 'd', (['(1935)', '(3)', '(30)', '(23)', '(0)', '(0)'], {}), '(1935, 3, 30, 23, 0, 0)\n', (957, 980), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((977, 1001), 'pytz.tzinfo.memorized_datetime', 'd', (['(1935)', '(10)', '(5)', '(23)', '(0)', '(0)'], {}), '(1935, 10, 5, 23, 0, 0)\n', (978, 1001), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((998, 1022), 'pytz.tzinfo.memorized_datetime', 'd', (['(1936)', '(4)', '(18)', '(23)', '(0)', '(0)'], {}), '(1936, 4, 18, 23, 0, 0)\n', (999, 1022), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1019, 1043), 'pytz.tzinfo.memorized_datetime', 'd', (['(1936)', '(10)', '(3)', '(23)', '(0)', '(0)'], {}), '(1936, 10, 3, 23, 0, 0)\n', (1020, 1043), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1040, 1063), 'pytz.tzinfo.memorized_datetime', 'd', (['(1937)', '(4)', '(3)', '(23)', '(0)', '(0)'], {}), '(1937, 4, 3, 23, 0, 0)\n', (1041, 1063), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1060, 1084), 'pytz.tzinfo.memorized_datetime', 'd', (['(1937)', '(10)', '(2)', '(23)', '(0)', '(0)'], {}), '(1937, 10, 2, 23, 0, 0)\n', (1061, 1084), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1081, 1105), 'pytz.tzinfo.memorized_datetime', 'd', (['(1938)', '(3)', '(26)', '(23)', '(0)', '(0)'], {}), '(1938, 3, 26, 23, 0, 0)\n', (1082, 1105), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1102, 1126), 'pytz.tzinfo.memorized_datetime', 'd', (['(1938)', '(10)', '(1)', '(23)', '(0)', '(0)'], {}), '(1938, 10, 1, 23, 0, 0)\n', (1103, 1126), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1123, 1147), 'pytz.tzinfo.memorized_datetime', 'd', (['(1939)', '(4)', '(15)', '(23)', '(0)', '(0)'], {}), '(1939, 4, 15, 23, 0, 0)\n', (1124, 1147), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1144, 1169), 'pytz.tzinfo.memorized_datetime', 'd', (['(1939)', '(11)', '(18)', '(23)', '(0)', '(0)'], {}), '(1939, 11, 18, 23, 0, 0)\n', (1145, 1169), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1166, 1190), 'pytz.tzinfo.memorized_datetime', 'd', (['(1940)', '(2)', '(24)', '(23)', '(0)', '(0)'], {}), '(1940, 2, 24, 23, 0, 0)\n', (1167, 1190), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1187, 1211), 'pytz.tzinfo.memorized_datetime', 'd', (['(1940)', '(10)', '(5)', '(23)', '(0)', '(0)'], {}), '(1940, 10, 5, 23, 0, 0)\n', (1188, 1211), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1208, 1231), 'pytz.tzinfo.memorized_datetime', 'd', (['(1941)', '(4)', '(5)', '(23)', '(0)', '(0)'], {}), '(1941, 4, 5, 23, 0, 0)\n', (1209, 1231), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1228, 1252), 'pytz.tzinfo.memorized_datetime', 'd', (['(1941)', '(10)', '(5)', '(23)', '(0)', '(0)'], {}), '(1941, 10, 5, 23, 0, 0)\n', (1229, 1252), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1249, 1273), 'pytz.tzinfo.memorized_datetime', 'd', (['(1942)', '(3)', '(14)', '(23)', '(0)', '(0)'], {}), '(1942, 3, 14, 23, 0, 0)\n', (1250, 1273), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1270, 1294), 'pytz.tzinfo.memorized_datetime', 'd', (['(1942)', '(4)', '(25)', '(22)', '(0)', '(0)'], {}), '(1942, 4, 25, 22, 0, 0)\n', (1271, 1294), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1291, 1315), 'pytz.tzinfo.memorized_datetime', 'd', (['(1942)', '(8)', '(15)', '(22)', '(0)', '(0)'], {}), '(1942, 8, 15, 22, 0, 0)\n', (1292, 1315), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1312, 1337), 'pytz.tzinfo.memorized_datetime', 'd', (['(1942)', '(10)', '(24)', '(23)', '(0)', '(0)'], {}), '(1942, 10, 24, 23, 0, 0)\n', (1313, 1337), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1334, 1358), 'pytz.tzinfo.memorized_datetime', 'd', (['(1943)', '(3)', '(13)', '(23)', '(0)', '(0)'], {}), '(1943, 3, 13, 23, 0, 0)\n', (1335, 1358), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1355, 1379), 'pytz.tzinfo.memorized_datetime', 'd', (['(1943)', '(4)', '(17)', '(22)', '(0)', '(0)'], {}), '(1943, 4, 17, 22, 0, 0)\n', (1356, 1379), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1376, 1400), 'pytz.tzinfo.memorized_datetime', 'd', (['(1943)', '(8)', '(28)', '(22)', '(0)', '(0)'], {}), '(1943, 8, 28, 22, 0, 0)\n', (1377, 1400), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1397, 1422), 'pytz.tzinfo.memorized_datetime', 'd', (['(1943)', '(10)', '(30)', '(23)', '(0)', '(0)'], {}), '(1943, 10, 30, 23, 0, 0)\n', (1398, 1422), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1419, 1443), 'pytz.tzinfo.memorized_datetime', 'd', (['(1944)', '(3)', '(11)', '(23)', '(0)', '(0)'], {}), '(1944, 3, 11, 23, 0, 0)\n', (1420, 1443), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1440, 1464), 'pytz.tzinfo.memorized_datetime', 'd', (['(1944)', '(4)', '(22)', '(22)', '(0)', '(0)'], {}), '(1944, 4, 22, 22, 0, 0)\n', (1441, 1464), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1461, 1485), 'pytz.tzinfo.memorized_datetime', 'd', (['(1944)', '(8)', '(26)', '(22)', '(0)', '(0)'], {}), '(1944, 8, 26, 22, 0, 0)\n', (1462, 1485), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1482, 1507), 'pytz.tzinfo.memorized_datetime', 'd', (['(1944)', '(10)', '(28)', '(23)', '(0)', '(0)'], {}), '(1944, 10, 28, 23, 0, 0)\n', (1483, 1507), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1504, 1528), 'pytz.tzinfo.memorized_datetime', 'd', (['(1945)', '(3)', '(10)', '(23)', '(0)', '(0)'], {}), '(1945, 3, 10, 23, 0, 0)\n', (1505, 1528), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1525, 1549), 'pytz.tzinfo.memorized_datetime', 'd', (['(1945)', '(4)', '(21)', '(22)', '(0)', '(0)'], {}), '(1945, 4, 21, 22, 0, 0)\n', (1526, 1549), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1546, 1570), 'pytz.tzinfo.memorized_datetime', 'd', (['(1945)', '(8)', '(25)', '(22)', '(0)', '(0)'], {}), '(1945, 8, 25, 22, 0, 0)\n', (1547, 1570), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1567, 1592), 'pytz.tzinfo.memorized_datetime', 'd', (['(1945)', '(10)', '(27)', '(23)', '(0)', '(0)'], {}), '(1945, 10, 27, 23, 0, 0)\n', (1568, 1592), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1589, 1612), 'pytz.tzinfo.memorized_datetime', 'd', (['(1946)', '(4)', '(6)', '(23)', '(0)', '(0)'], {}), '(1946, 4, 6, 23, 0, 0)\n', (1590, 1612), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1609, 1633), 'pytz.tzinfo.memorized_datetime', 'd', (['(1946)', '(10)', '(5)', '(23)', '(0)', '(0)'], {}), '(1946, 10, 5, 23, 0, 0)\n', (1610, 1633), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1630, 1652), 'pytz.tzinfo.memorized_datetime', 'd', (['(1947)', '(4)', '(6)', '(2)', '(0)', '(0)'], {}), '(1947, 4, 6, 2, 0, 0)\n', (1631, 1652), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1649, 1672), 'pytz.tzinfo.memorized_datetime', 'd', (['(1947)', '(10)', '(5)', '(2)', '(0)', '(0)'], {}), '(1947, 10, 5, 2, 0, 0)\n', (1650, 1672), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1669, 1691), 'pytz.tzinfo.memorized_datetime', 'd', (['(1948)', '(4)', '(4)', '(2)', '(0)', '(0)'], {}), '(1948, 4, 4, 2, 0, 0)\n', (1670, 1691), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1688, 1711), 'pytz.tzinfo.memorized_datetime', 'd', (['(1948)', '(10)', '(3)', '(2)', '(0)', '(0)'], {}), '(1948, 10, 3, 2, 0, 0)\n', (1689, 1711), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1708, 1730), 'pytz.tzinfo.memorized_datetime', 'd', (['(1949)', '(4)', '(3)', '(2)', '(0)', '(0)'], {}), '(1949, 4, 3, 2, 0, 0)\n', (1709, 1730), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1727, 1750), 'pytz.tzinfo.memorized_datetime', 'd', (['(1949)', '(10)', '(2)', '(2)', '(0)', '(0)'], {}), '(1949, 10, 2, 2, 0, 0)\n', (1728, 1750), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1747, 1769), 'pytz.tzinfo.memorized_datetime', 'd', (['(1951)', '(4)', '(1)', '(2)', '(0)', '(0)'], {}), '(1951, 4, 1, 2, 0, 0)\n', (1748, 1769), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1766, 1789), 'pytz.tzinfo.memorized_datetime', 'd', (['(1951)', '(10)', '(7)', '(2)', '(0)', '(0)'], {}), '(1951, 10, 7, 2, 0, 0)\n', (1767, 1789), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1786, 1808), 'pytz.tzinfo.memorized_datetime', 'd', (['(1952)', '(4)', '(6)', '(2)', '(0)', '(0)'], {}), '(1952, 4, 6, 2, 0, 0)\n', (1787, 1808), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1805, 1828), 'pytz.tzinfo.memorized_datetime', 'd', (['(1952)', '(10)', '(5)', '(2)', '(0)', '(0)'], {}), '(1952, 10, 5, 2, 0, 0)\n', (1806, 1828), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1825, 1847), 'pytz.tzinfo.memorized_datetime', 'd', (['(1953)', '(4)', '(5)', '(2)', '(0)', '(0)'], {}), '(1953, 4, 5, 2, 0, 0)\n', (1826, 1847), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1844, 1867), 'pytz.tzinfo.memorized_datetime', 'd', (['(1953)', '(10)', '(4)', '(2)', '(0)', '(0)'], {}), '(1953, 10, 4, 2, 0, 0)\n', (1845, 1867), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1864, 1886), 'pytz.tzinfo.memorized_datetime', 'd', (['(1954)', '(4)', '(4)', '(2)', '(0)', '(0)'], {}), '(1954, 4, 4, 2, 0, 0)\n', (1865, 1886), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1883, 1906), 'pytz.tzinfo.memorized_datetime', 'd', (['(1954)', '(10)', '(3)', '(2)', '(0)', '(0)'], {}), '(1954, 10, 3, 2, 0, 0)\n', (1884, 1906), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1903, 1925), 'pytz.tzinfo.memorized_datetime', 'd', (['(1955)', '(4)', '(3)', '(2)', '(0)', '(0)'], {}), '(1955, 4, 3, 2, 0, 0)\n', (1904, 1925), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1922, 1945), 'pytz.tzinfo.memorized_datetime', 'd', (['(1955)', '(10)', '(2)', '(2)', '(0)', '(0)'], {}), '(1955, 10, 2, 2, 0, 0)\n', (1923, 1945), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1942, 1964), 'pytz.tzinfo.memorized_datetime', 'd', (['(1956)', '(4)', '(1)', '(2)', '(0)', '(0)'], {}), '(1956, 4, 1, 2, 0, 0)\n', (1943, 1964), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1961, 1984), 'pytz.tzinfo.memorized_datetime', 'd', (['(1956)', '(10)', '(7)', '(2)', '(0)', '(0)'], {}), '(1956, 10, 7, 2, 0, 0)\n', (1962, 1984), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((1981, 2003), 'pytz.tzinfo.memorized_datetime', 'd', (['(1957)', '(4)', '(7)', '(2)', '(0)', '(0)'], {}), '(1957, 4, 7, 2, 0, 0)\n', (1982, 2003), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2000, 2023), 'pytz.tzinfo.memorized_datetime', 'd', (['(1957)', '(10)', '(6)', '(2)', '(0)', '(0)'], {}), '(1957, 10, 6, 2, 0, 0)\n', (2001, 2023), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2020, 2042), 'pytz.tzinfo.memorized_datetime', 'd', (['(1958)', '(4)', '(6)', '(2)', '(0)', '(0)'], {}), '(1958, 4, 6, 2, 0, 0)\n', (2021, 2042), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2039, 2062), 'pytz.tzinfo.memorized_datetime', 'd', (['(1958)', '(10)', '(5)', '(2)', '(0)', '(0)'], {}), '(1958, 10, 5, 2, 0, 0)\n', (2040, 2062), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2059, 2081), 'pytz.tzinfo.memorized_datetime', 'd', (['(1959)', '(4)', '(5)', '(2)', '(0)', '(0)'], {}), '(1959, 4, 5, 2, 0, 0)\n', (2060, 2081), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2078, 2101), 'pytz.tzinfo.memorized_datetime', 'd', (['(1959)', '(10)', '(4)', '(2)', '(0)', '(0)'], {}), '(1959, 10, 4, 2, 0, 0)\n', (2079, 2101), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2098, 2120), 'pytz.tzinfo.memorized_datetime', 'd', (['(1960)', '(4)', '(3)', '(2)', '(0)', '(0)'], {}), '(1960, 4, 3, 2, 0, 0)\n', (2099, 2120), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2117, 2140), 'pytz.tzinfo.memorized_datetime', 'd', (['(1960)', '(10)', '(2)', '(2)', '(0)', '(0)'], {}), '(1960, 10, 2, 2, 0, 0)\n', (2118, 2140), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2137, 2159), 'pytz.tzinfo.memorized_datetime', 'd', (['(1961)', '(4)', '(2)', '(2)', '(0)', '(0)'], {}), '(1961, 4, 2, 2, 0, 0)\n', (2138, 2159), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2156, 2179), 'pytz.tzinfo.memorized_datetime', 'd', (['(1961)', '(10)', '(1)', '(2)', '(0)', '(0)'], {}), '(1961, 10, 1, 2, 0, 0)\n', (2157, 2179), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2176, 2198), 'pytz.tzinfo.memorized_datetime', 'd', (['(1962)', '(4)', '(1)', '(2)', '(0)', '(0)'], {}), '(1962, 4, 1, 2, 0, 0)\n', (2177, 2198), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2195, 2218), 'pytz.tzinfo.memorized_datetime', 'd', (['(1962)', '(10)', '(7)', '(2)', '(0)', '(0)'], {}), '(1962, 10, 7, 2, 0, 0)\n', (2196, 2218), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2215, 2237), 'pytz.tzinfo.memorized_datetime', 'd', (['(1963)', '(4)', '(7)', '(2)', '(0)', '(0)'], {}), '(1963, 4, 7, 2, 0, 0)\n', (2216, 2237), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2234, 2257), 'pytz.tzinfo.memorized_datetime', 'd', (['(1963)', '(10)', '(6)', '(2)', '(0)', '(0)'], {}), '(1963, 10, 6, 2, 0, 0)\n', (2235, 2257), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2254, 2276), 'pytz.tzinfo.memorized_datetime', 'd', (['(1964)', '(4)', '(5)', '(2)', '(0)', '(0)'], {}), '(1964, 4, 5, 2, 0, 0)\n', (2255, 2276), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2273, 2296), 'pytz.tzinfo.memorized_datetime', 'd', (['(1964)', '(10)', '(4)', '(2)', '(0)', '(0)'], {}), '(1964, 10, 4, 2, 0, 0)\n', (2274, 2296), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2293, 2315), 'pytz.tzinfo.memorized_datetime', 'd', (['(1965)', '(4)', '(4)', '(2)', '(0)', '(0)'], {}), '(1965, 4, 4, 2, 0, 0)\n', (2294, 2315), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2312, 2335), 'pytz.tzinfo.memorized_datetime', 'd', (['(1965)', '(10)', '(3)', '(2)', '(0)', '(0)'], {}), '(1965, 10, 3, 2, 0, 0)\n', (2313, 2335), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2332, 2354), 'pytz.tzinfo.memorized_datetime', 'd', (['(1966)', '(4)', '(3)', '(2)', '(0)', '(0)'], {}), '(1966, 4, 3, 2, 0, 0)\n', (2333, 2354), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2351, 2374), 'pytz.tzinfo.memorized_datetime', 'd', (['(1976)', '(9)', '(26)', '(0)', '(0)', '(0)'], {}), '(1976, 9, 26, 0, 0, 0)\n', (2352, 2374), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2371, 2394), 'pytz.tzinfo.memorized_datetime', 'd', (['(1977)', '(3)', '(27)', '(0)', '(0)', '(0)'], {}), '(1977, 3, 27, 0, 0, 0)\n', (2372, 2394), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2391, 2414), 'pytz.tzinfo.memorized_datetime', 'd', (['(1977)', '(9)', '(25)', '(0)', '(0)', '(0)'], {}), '(1977, 9, 25, 0, 0, 0)\n', (2392, 2414), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2411, 2433), 'pytz.tzinfo.memorized_datetime', 'd', (['(1978)', '(4)', '(2)', '(0)', '(0)', '(0)'], {}), '(1978, 4, 2, 0, 0, 0)\n', (2412, 2433), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2430, 2453), 'pytz.tzinfo.memorized_datetime', 'd', (['(1978)', '(10)', '(1)', '(0)', '(0)', '(0)'], {}), '(1978, 10, 1, 0, 0, 0)\n', (2431, 2453), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2450, 2472), 'pytz.tzinfo.memorized_datetime', 'd', (['(1979)', '(4)', '(1)', '(0)', '(0)', '(0)'], {}), '(1979, 4, 1, 0, 0, 0)\n', (2451, 2472), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2469, 2492), 'pytz.tzinfo.memorized_datetime', 'd', (['(1979)', '(9)', '(30)', '(1)', '(0)', '(0)'], {}), '(1979, 9, 30, 1, 0, 0)\n', (2470, 2492), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2489, 2512), 'pytz.tzinfo.memorized_datetime', 'd', (['(1980)', '(3)', '(30)', '(0)', '(0)', '(0)'], {}), '(1980, 3, 30, 0, 0, 0)\n', (2490, 2512), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2509, 2532), 'pytz.tzinfo.memorized_datetime', 'd', (['(1980)', '(9)', '(28)', '(1)', '(0)', '(0)'], {}), '(1980, 9, 28, 1, 0, 0)\n', (2510, 2532), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2529, 2552), 'pytz.tzinfo.memorized_datetime', 'd', (['(1981)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(1981, 3, 29, 1, 0, 0)\n', (2530, 2552), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2549, 2572), 'pytz.tzinfo.memorized_datetime', 'd', (['(1981)', '(9)', '(27)', '(1)', '(0)', '(0)'], {}), '(1981, 9, 27, 1, 0, 0)\n', (2550, 2572), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2569, 2592), 'pytz.tzinfo.memorized_datetime', 'd', (['(1982)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(1982, 3, 28, 1, 0, 0)\n', (2570, 2592), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2589, 2612), 'pytz.tzinfo.memorized_datetime', 'd', (['(1982)', '(9)', '(26)', '(1)', '(0)', '(0)'], {}), '(1982, 9, 26, 1, 0, 0)\n', (2590, 2612), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2609, 2632), 'pytz.tzinfo.memorized_datetime', 'd', (['(1983)', '(3)', '(27)', '(2)', '(0)', '(0)'], {}), '(1983, 3, 27, 2, 0, 0)\n', (2610, 2632), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2629, 2652), 'pytz.tzinfo.memorized_datetime', 'd', (['(1983)', '(9)', '(25)', '(1)', '(0)', '(0)'], {}), '(1983, 9, 25, 1, 0, 0)\n', (2630, 2652), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2649, 2672), 'pytz.tzinfo.memorized_datetime', 'd', (['(1984)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(1984, 3, 25, 1, 0, 0)\n', (2650, 2672), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2669, 2692), 'pytz.tzinfo.memorized_datetime', 'd', (['(1984)', '(9)', '(30)', '(1)', '(0)', '(0)'], {}), '(1984, 9, 30, 1, 0, 0)\n', (2670, 2692), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2689, 2712), 'pytz.tzinfo.memorized_datetime', 'd', (['(1985)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(1985, 3, 31, 1, 0, 0)\n', (2690, 2712), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2709, 2732), 'pytz.tzinfo.memorized_datetime', 'd', (['(1985)', '(9)', '(29)', '(1)', '(0)', '(0)'], {}), '(1985, 9, 29, 1, 0, 0)\n', (2710, 2732), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2729, 2752), 'pytz.tzinfo.memorized_datetime', 'd', (['(1986)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(1986, 3, 30, 1, 0, 0)\n', (2730, 2752), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2749, 2772), 'pytz.tzinfo.memorized_datetime', 'd', (['(1986)', '(9)', '(28)', '(1)', '(0)', '(0)'], {}), '(1986, 9, 28, 1, 0, 0)\n', (2750, 2772), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2769, 2792), 'pytz.tzinfo.memorized_datetime', 'd', (['(1987)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(1987, 3, 29, 1, 0, 0)\n', (2770, 2792), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2789, 2812), 'pytz.tzinfo.memorized_datetime', 'd', (['(1987)', '(9)', '(27)', '(1)', '(0)', '(0)'], {}), '(1987, 9, 27, 1, 0, 0)\n', (2790, 2812), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2809, 2832), 'pytz.tzinfo.memorized_datetime', 'd', (['(1988)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(1988, 3, 27, 1, 0, 0)\n', (2810, 2832), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2829, 2852), 'pytz.tzinfo.memorized_datetime', 'd', (['(1988)', '(9)', '(25)', '(1)', '(0)', '(0)'], {}), '(1988, 9, 25, 1, 0, 0)\n', (2830, 2852), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2849, 2872), 'pytz.tzinfo.memorized_datetime', 'd', (['(1989)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(1989, 3, 26, 1, 0, 0)\n', (2850, 2872), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2869, 2892), 'pytz.tzinfo.memorized_datetime', 'd', (['(1989)', '(9)', '(24)', '(1)', '(0)', '(0)'], {}), '(1989, 9, 24, 1, 0, 0)\n', (2870, 2892), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2889, 2912), 'pytz.tzinfo.memorized_datetime', 'd', (['(1990)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(1990, 3, 25, 1, 0, 0)\n', (2890, 2912), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2909, 2932), 'pytz.tzinfo.memorized_datetime', 'd', (['(1990)', '(9)', '(30)', '(1)', '(0)', '(0)'], {}), '(1990, 9, 30, 1, 0, 0)\n', (2910, 2932), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2929, 2952), 'pytz.tzinfo.memorized_datetime', 'd', (['(1991)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(1991, 3, 31, 1, 0, 0)\n', (2930, 2952), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2949, 2972), 'pytz.tzinfo.memorized_datetime', 'd', (['(1991)', '(9)', '(29)', '(1)', '(0)', '(0)'], {}), '(1991, 9, 29, 1, 0, 0)\n', (2950, 2972), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2969, 2992), 'pytz.tzinfo.memorized_datetime', 'd', (['(1992)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(1992, 3, 29, 1, 0, 0)\n', (2970, 2992), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((2989, 3012), 'pytz.tzinfo.memorized_datetime', 'd', (['(1992)', '(9)', '(27)', '(1)', '(0)', '(0)'], {}), '(1992, 9, 27, 1, 0, 0)\n', (2990, 3012), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3009, 3032), 'pytz.tzinfo.memorized_datetime', 'd', (['(1993)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(1993, 3, 28, 1, 0, 0)\n', (3010, 3032), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3029, 3052), 'pytz.tzinfo.memorized_datetime', 'd', (['(1993)', '(9)', '(26)', '(1)', '(0)', '(0)'], {}), '(1993, 9, 26, 1, 0, 0)\n', (3030, 3052), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3049, 3072), 'pytz.tzinfo.memorized_datetime', 'd', (['(1994)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(1994, 3, 27, 1, 0, 0)\n', (3050, 3072), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3069, 3092), 'pytz.tzinfo.memorized_datetime', 'd', (['(1994)', '(9)', '(25)', '(1)', '(0)', '(0)'], {}), '(1994, 9, 25, 1, 0, 0)\n', (3070, 3092), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3089, 3112), 'pytz.tzinfo.memorized_datetime', 'd', (['(1995)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(1995, 3, 26, 1, 0, 0)\n', (3090, 3112), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3109, 3132), 'pytz.tzinfo.memorized_datetime', 'd', (['(1995)', '(9)', '(24)', '(1)', '(0)', '(0)'], {}), '(1995, 9, 24, 1, 0, 0)\n', (3110, 3132), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3129, 3152), 'pytz.tzinfo.memorized_datetime', 'd', (['(1996)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(1996, 3, 31, 1, 0, 0)\n', (3130, 3152), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3149, 3173), 'pytz.tzinfo.memorized_datetime', 'd', (['(1996)', '(10)', '(27)', '(1)', '(0)', '(0)'], {}), '(1996, 10, 27, 1, 0, 0)\n', (3150, 3173), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3170, 3193), 'pytz.tzinfo.memorized_datetime', 'd', (['(1997)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(1997, 3, 30, 1, 0, 0)\n', (3171, 3193), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3190, 3214), 'pytz.tzinfo.memorized_datetime', 'd', (['(1997)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(1997, 10, 26, 1, 0, 0)\n', (3191, 3214), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3211, 3234), 'pytz.tzinfo.memorized_datetime', 'd', (['(1998)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(1998, 3, 29, 1, 0, 0)\n', (3212, 3234), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3231, 3255), 'pytz.tzinfo.memorized_datetime', 'd', (['(1998)', '(10)', '(25)', '(1)', '(0)', '(0)'], {}), '(1998, 10, 25, 1, 0, 0)\n', (3232, 3255), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3252, 3275), 'pytz.tzinfo.memorized_datetime', 'd', (['(1999)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(1999, 3, 28, 1, 0, 0)\n', (3253, 3275), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3272, 3296), 'pytz.tzinfo.memorized_datetime', 'd', (['(1999)', '(10)', '(31)', '(1)', '(0)', '(0)'], {}), '(1999, 10, 31, 1, 0, 0)\n', (3273, 3296), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3293, 3316), 'pytz.tzinfo.memorized_datetime', 'd', (['(2000)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(2000, 3, 26, 1, 0, 0)\n', (3294, 3316), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3313, 3337), 'pytz.tzinfo.memorized_datetime', 'd', (['(2000)', '(10)', '(29)', '(1)', '(0)', '(0)'], {}), '(2000, 10, 29, 1, 0, 0)\n', (3314, 3337), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3334, 3357), 'pytz.tzinfo.memorized_datetime', 'd', (['(2001)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(2001, 3, 25, 1, 0, 0)\n', (3335, 3357), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3354, 3378), 'pytz.tzinfo.memorized_datetime', 'd', (['(2001)', '(10)', '(28)', '(1)', '(0)', '(0)'], {}), '(2001, 10, 28, 1, 0, 0)\n', (3355, 3378), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3375, 3398), 'pytz.tzinfo.memorized_datetime', 'd', (['(2002)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(2002, 3, 31, 1, 0, 0)\n', (3376, 3398), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3395, 3419), 'pytz.tzinfo.memorized_datetime', 'd', (['(2002)', '(10)', '(27)', '(1)', '(0)', '(0)'], {}), '(2002, 10, 27, 1, 0, 0)\n', (3396, 3419), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3416, 3439), 'pytz.tzinfo.memorized_datetime', 'd', (['(2003)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(2003, 3, 30, 1, 0, 0)\n', (3417, 3439), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3436, 3460), 'pytz.tzinfo.memorized_datetime', 'd', (['(2003)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(2003, 10, 26, 1, 0, 0)\n', (3437, 3460), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3457, 3480), 'pytz.tzinfo.memorized_datetime', 'd', (['(2004)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(2004, 3, 28, 1, 0, 0)\n', (3458, 3480), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3477, 3501), 'pytz.tzinfo.memorized_datetime', 'd', (['(2004)', '(10)', '(31)', '(1)', '(0)', '(0)'], {}), '(2004, 10, 31, 1, 0, 0)\n', (3478, 3501), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3498, 3521), 'pytz.tzinfo.memorized_datetime', 'd', (['(2005)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(2005, 3, 27, 1, 0, 0)\n', (3499, 3521), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3518, 3542), 'pytz.tzinfo.memorized_datetime', 'd', (['(2005)', '(10)', '(30)', '(1)', '(0)', '(0)'], {}), '(2005, 10, 30, 1, 0, 0)\n', (3519, 3542), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3539, 3562), 'pytz.tzinfo.memorized_datetime', 'd', (['(2006)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(2006, 3, 26, 1, 0, 0)\n', (3540, 3562), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3559, 3583), 'pytz.tzinfo.memorized_datetime', 'd', (['(2006)', '(10)', '(29)', '(1)', '(0)', '(0)'], {}), '(2006, 10, 29, 1, 0, 0)\n', (3560, 3583), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3580, 3603), 'pytz.tzinfo.memorized_datetime', 'd', (['(2007)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(2007, 3, 25, 1, 0, 0)\n', (3581, 3603), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3600, 3624), 'pytz.tzinfo.memorized_datetime', 'd', (['(2007)', '(10)', '(28)', '(1)', '(0)', '(0)'], {}), '(2007, 10, 28, 1, 0, 0)\n', (3601, 3624), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3621, 3644), 'pytz.tzinfo.memorized_datetime', 'd', (['(2008)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(2008, 3, 30, 1, 0, 0)\n', (3622, 3644), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3641, 3665), 'pytz.tzinfo.memorized_datetime', 'd', (['(2008)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(2008, 10, 26, 1, 0, 0)\n', (3642, 3665), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3662, 3685), 'pytz.tzinfo.memorized_datetime', 'd', (['(2009)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(2009, 3, 29, 1, 0, 0)\n', (3663, 3685), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3682, 3706), 'pytz.tzinfo.memorized_datetime', 'd', (['(2009)', '(10)', '(25)', '(1)', '(0)', '(0)'], {}), '(2009, 10, 25, 1, 0, 0)\n', (3683, 3706), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3703, 3726), 'pytz.tzinfo.memorized_datetime', 'd', (['(2010)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(2010, 3, 28, 1, 0, 0)\n', (3704, 3726), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3723, 3747), 'pytz.tzinfo.memorized_datetime', 'd', (['(2010)', '(10)', '(31)', '(1)', '(0)', '(0)'], {}), '(2010, 10, 31, 1, 0, 0)\n', (3724, 3747), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3744, 3767), 'pytz.tzinfo.memorized_datetime', 'd', (['(2011)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(2011, 3, 27, 1, 0, 0)\n', (3745, 3767), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3764, 3788), 'pytz.tzinfo.memorized_datetime', 'd', (['(2011)', '(10)', '(30)', '(1)', '(0)', '(0)'], {}), '(2011, 10, 30, 1, 0, 0)\n', (3765, 3788), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3785, 3808), 'pytz.tzinfo.memorized_datetime', 'd', (['(2012)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(2012, 3, 25, 1, 0, 0)\n', (3786, 3808), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3805, 3829), 'pytz.tzinfo.memorized_datetime', 'd', (['(2012)', '(10)', '(28)', '(1)', '(0)', '(0)'], {}), '(2012, 10, 28, 1, 0, 0)\n', (3806, 3829), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3826, 3849), 'pytz.tzinfo.memorized_datetime', 'd', (['(2013)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(2013, 3, 31, 1, 0, 0)\n', (3827, 3849), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3846, 3870), 'pytz.tzinfo.memorized_datetime', 'd', (['(2013)', '(10)', '(27)', '(1)', '(0)', '(0)'], {}), '(2013, 10, 27, 1, 0, 0)\n', (3847, 3870), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3867, 3890), 'pytz.tzinfo.memorized_datetime', 'd', (['(2014)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(2014, 3, 30, 1, 0, 0)\n', (3868, 3890), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3887, 3911), 'pytz.tzinfo.memorized_datetime', 'd', (['(2014)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(2014, 10, 26, 1, 0, 0)\n', (3888, 3911), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3908, 3931), 'pytz.tzinfo.memorized_datetime', 'd', (['(2015)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(2015, 3, 29, 1, 0, 0)\n', (3909, 3931), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3928, 3952), 'pytz.tzinfo.memorized_datetime', 'd', (['(2015)', '(10)', '(25)', '(1)', '(0)', '(0)'], {}), '(2015, 10, 25, 1, 0, 0)\n', (3929, 3952), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3949, 3972), 'pytz.tzinfo.memorized_datetime', 'd', (['(2016)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(2016, 3, 27, 1, 0, 0)\n', (3950, 3972), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3969, 3993), 'pytz.tzinfo.memorized_datetime', 'd', (['(2016)', '(10)', '(30)', '(1)', '(0)', '(0)'], {}), '(2016, 10, 30, 1, 0, 0)\n', (3970, 3993), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((3990, 4013), 'pytz.tzinfo.memorized_datetime', 'd', (['(2017)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(2017, 3, 26, 1, 0, 0)\n', (3991, 4013), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4010, 4034), 'pytz.tzinfo.memorized_datetime', 'd', (['(2017)', '(10)', '(29)', '(1)', '(0)', '(0)'], {}), '(2017, 10, 29, 1, 0, 0)\n', (4011, 4034), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4031, 4054), 'pytz.tzinfo.memorized_datetime', 'd', (['(2018)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(2018, 3, 25, 1, 0, 0)\n', (4032, 4054), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4051, 4075), 'pytz.tzinfo.memorized_datetime', 'd', (['(2018)', '(10)', '(28)', '(1)', '(0)', '(0)'], {}), '(2018, 10, 28, 1, 0, 0)\n', (4052, 4075), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4072, 4095), 'pytz.tzinfo.memorized_datetime', 'd', (['(2019)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(2019, 3, 31, 1, 0, 0)\n', (4073, 4095), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4092, 4116), 'pytz.tzinfo.memorized_datetime', 'd', (['(2019)', '(10)', '(27)', '(1)', '(0)', '(0)'], {}), '(2019, 10, 27, 1, 0, 0)\n', (4093, 4116), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4113, 4136), 'pytz.tzinfo.memorized_datetime', 'd', (['(2020)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(2020, 3, 29, 1, 0, 0)\n', (4114, 4136), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4133, 4157), 'pytz.tzinfo.memorized_datetime', 'd', (['(2020)', '(10)', '(25)', '(1)', '(0)', '(0)'], {}), '(2020, 10, 25, 1, 0, 0)\n', (4134, 4157), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4154, 4177), 'pytz.tzinfo.memorized_datetime', 'd', (['(2021)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(2021, 3, 28, 1, 0, 0)\n', (4155, 4177), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4174, 4198), 'pytz.tzinfo.memorized_datetime', 'd', (['(2021)', '(10)', '(31)', '(1)', '(0)', '(0)'], {}), '(2021, 10, 31, 1, 0, 0)\n', (4175, 4198), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4195, 4218), 'pytz.tzinfo.memorized_datetime', 'd', (['(2022)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(2022, 3, 27, 1, 0, 0)\n', (4196, 4218), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4215, 4239), 'pytz.tzinfo.memorized_datetime', 'd', (['(2022)', '(10)', '(30)', '(1)', '(0)', '(0)'], {}), '(2022, 10, 30, 1, 0, 0)\n', (4216, 4239), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4236, 4259), 'pytz.tzinfo.memorized_datetime', 'd', (['(2023)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(2023, 3, 26, 1, 0, 0)\n', (4237, 4259), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4256, 4280), 'pytz.tzinfo.memorized_datetime', 'd', (['(2023)', '(10)', '(29)', '(1)', '(0)', '(0)'], {}), '(2023, 10, 29, 1, 0, 0)\n', (4257, 4280), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4277, 4300), 'pytz.tzinfo.memorized_datetime', 'd', (['(2024)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(2024, 3, 31, 1, 0, 0)\n', (4278, 4300), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4297, 4321), 'pytz.tzinfo.memorized_datetime', 'd', (['(2024)', '(10)', '(27)', '(1)', '(0)', '(0)'], {}), '(2024, 10, 27, 1, 0, 0)\n', (4298, 4321), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4318, 4341), 'pytz.tzinfo.memorized_datetime', 'd', (['(2025)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(2025, 3, 30, 1, 0, 0)\n', (4319, 4341), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4338, 4362), 'pytz.tzinfo.memorized_datetime', 'd', (['(2025)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(2025, 10, 26, 1, 0, 0)\n', (4339, 4362), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4359, 4382), 'pytz.tzinfo.memorized_datetime', 'd', (['(2026)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(2026, 3, 29, 1, 0, 0)\n', (4360, 4382), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4379, 4403), 'pytz.tzinfo.memorized_datetime', 'd', (['(2026)', '(10)', '(25)', '(1)', '(0)', '(0)'], {}), '(2026, 10, 25, 1, 0, 0)\n', (4380, 4403), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4400, 4423), 'pytz.tzinfo.memorized_datetime', 'd', (['(2027)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(2027, 3, 28, 1, 0, 0)\n', (4401, 4423), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4420, 4444), 'pytz.tzinfo.memorized_datetime', 'd', (['(2027)', '(10)', '(31)', '(1)', '(0)', '(0)'], {}), '(2027, 10, 31, 1, 0, 0)\n', (4421, 4444), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4441, 4464), 'pytz.tzinfo.memorized_datetime', 'd', (['(2028)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(2028, 3, 26, 1, 0, 0)\n', (4442, 4464), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4461, 4485), 'pytz.tzinfo.memorized_datetime', 'd', (['(2028)', '(10)', '(29)', '(1)', '(0)', '(0)'], {}), '(2028, 10, 29, 1, 0, 0)\n', (4462, 4485), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4482, 4505), 'pytz.tzinfo.memorized_datetime', 'd', (['(2029)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(2029, 3, 25, 1, 0, 0)\n', (4483, 4505), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4502, 4526), 'pytz.tzinfo.memorized_datetime', 'd', (['(2029)', '(10)', '(28)', '(1)', '(0)', '(0)'], {}), '(2029, 10, 28, 1, 0, 0)\n', (4503, 4526), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4523, 4546), 'pytz.tzinfo.memorized_datetime', 'd', (['(2030)', '(3)', '(31)', '(1)', '(0)', '(0)'], {}), '(2030, 3, 31, 1, 0, 0)\n', (4524, 4546), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4543, 4567), 'pytz.tzinfo.memorized_datetime', 'd', (['(2030)', '(10)', '(27)', '(1)', '(0)', '(0)'], {}), '(2030, 10, 27, 1, 0, 0)\n', (4544, 4567), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4564, 4587), 'pytz.tzinfo.memorized_datetime', 'd', (['(2031)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(2031, 3, 30, 1, 0, 0)\n', (4565, 4587), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4584, 4608), 'pytz.tzinfo.memorized_datetime', 'd', (['(2031)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(2031, 10, 26, 1, 0, 0)\n', (4585, 4608), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4605, 4628), 'pytz.tzinfo.memorized_datetime', 'd', (['(2032)', '(3)', '(28)', '(1)', '(0)', '(0)'], {}), '(2032, 3, 28, 1, 0, 0)\n', (4606, 4628), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4625, 4649), 'pytz.tzinfo.memorized_datetime', 'd', (['(2032)', '(10)', '(31)', '(1)', '(0)', '(0)'], {}), '(2032, 10, 31, 1, 0, 0)\n', (4626, 4649), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4646, 4669), 'pytz.tzinfo.memorized_datetime', 'd', (['(2033)', '(3)', '(27)', '(1)', '(0)', '(0)'], {}), '(2033, 3, 27, 1, 0, 0)\n', (4647, 4669), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4666, 4690), 'pytz.tzinfo.memorized_datetime', 'd', (['(2033)', '(10)', '(30)', '(1)', '(0)', '(0)'], {}), '(2033, 10, 30, 1, 0, 0)\n', (4667, 4690), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4687, 4710), 'pytz.tzinfo.memorized_datetime', 'd', (['(2034)', '(3)', '(26)', '(1)', '(0)', '(0)'], {}), '(2034, 3, 26, 1, 0, 0)\n', (4688, 4710), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4707, 4731), 'pytz.tzinfo.memorized_datetime', 'd', (['(2034)', '(10)', '(29)', '(1)', '(0)', '(0)'], {}), '(2034, 10, 29, 1, 0, 0)\n', (4708, 4731), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4728, 4751), 'pytz.tzinfo.memorized_datetime', 'd', (['(2035)', '(3)', '(25)', '(1)', '(0)', '(0)'], {}), '(2035, 3, 25, 1, 0, 0)\n', (4729, 4751), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4748, 4772), 'pytz.tzinfo.memorized_datetime', 'd', (['(2035)', '(10)', '(28)', '(1)', '(0)', '(0)'], {}), '(2035, 10, 28, 1, 0, 0)\n', (4749, 4772), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4769, 4792), 'pytz.tzinfo.memorized_datetime', 'd', (['(2036)', '(3)', '(30)', '(1)', '(0)', '(0)'], {}), '(2036, 3, 30, 1, 0, 0)\n', (4770, 4792), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4789, 4813), 'pytz.tzinfo.memorized_datetime', 'd', (['(2036)', '(10)', '(26)', '(1)', '(0)', '(0)'], {}), '(2036, 10, 26, 1, 0, 0)\n', (4790, 4813), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4810, 4833), 'pytz.tzinfo.memorized_datetime', 'd', (['(2037)', '(3)', '(29)', '(1)', '(0)', '(0)'], {}), '(2037, 3, 29, 1, 0, 0)\n', (4811, 4833), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4830, 4854), 'pytz.tzinfo.memorized_datetime', 'd', (['(2037)', '(10)', '(25)', '(1)', '(0)', '(0)'], {}), '(2037, 10, 25, 1, 0, 0)\n', (4831, 4854), True, 'from pytz.tzinfo import memorized_datetime as d\n'), ((4887, 4905), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(-2220)', '(0)', '"""LMT"""'], {}), "(-2220, 0, 'LMT')\n", (4888, 4905), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((4905, 4919), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (4906, 4919), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((4919, 4940), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (4920, 4940), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((4940, 4954), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (4941, 4954), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((4954, 4975), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (4955, 4975), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((4975, 4989), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (4976, 4989), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((4989, 5010), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (4990, 5010), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5010, 5024), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5011, 5024), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5024, 5045), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5025, 5045), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5045, 5059), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5046, 5059), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5059, 5080), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5060, 5080), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5080, 5094), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5081, 5094), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5094, 5115), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5095, 5115), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5115, 5129), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5116, 5129), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5129, 5150), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5130, 5150), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5150, 5164), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5151, 5164), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5164, 5185), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5165, 5185), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5185, 5199), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5186, 5199), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5199, 5220), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5200, 5220), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5220, 5234), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5221, 5234), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5234, 5255), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5235, 5255), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5255, 5269), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5256, 5269), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5269, 5290), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5270, 5290), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5290, 5304), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5291, 5304), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5304, 5325), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5305, 5325), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5325, 5339), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5326, 5339), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5339, 5360), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5340, 5360), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5360, 5374), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5361, 5374), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5374, 5395), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5375, 5395), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5395, 5409), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5396, 5409), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5409, 5430), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5410, 5430), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5430, 5444), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5431, 5444), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5444, 5465), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5445, 5465), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5465, 5479), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5466, 5479), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5479, 5500), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5480, 5500), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5500, 5514), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5501, 5514), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5514, 5535), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5515, 5535), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5535, 5549), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5536, 5549), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5549, 5570), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5550, 5570), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5570, 5584), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5571, 5584), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5584, 5605), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5585, 5605), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5605, 5619), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5606, 5619), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5619, 5640), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5620, 5640), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5640, 5654), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5641, 5654), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5654, 5675), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5655, 5675), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5675, 5696), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(7200)', '"""WEMT"""'], {}), "(7200, 7200, 'WEMT')\n", (5676, 5696), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5696, 5717), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5697, 5717), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5717, 5731), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5718, 5731), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5731, 5752), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5732, 5752), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5752, 5773), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(7200)', '"""WEMT"""'], {}), "(7200, 7200, 'WEMT')\n", (5753, 5773), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5773, 5794), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5774, 5794), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5794, 5808), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5795, 5808), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5808, 5829), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5809, 5829), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5829, 5850), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(7200)', '"""WEMT"""'], {}), "(7200, 7200, 'WEMT')\n", (5830, 5850), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5850, 5871), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5851, 5871), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5871, 5885), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5872, 5885), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5885, 5906), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5886, 5906), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5906, 5927), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(7200)', '"""WEMT"""'], {}), "(7200, 7200, 'WEMT')\n", (5907, 5927), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5927, 5948), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5928, 5948), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5948, 5962), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5949, 5962), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5962, 5983), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5963, 5983), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5983, 5997), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (5984, 5997), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((5997, 6018), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (5998, 6018), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6018, 6032), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6019, 6032), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6032, 6053), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6033, 6053), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6053, 6067), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6054, 6067), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6067, 6088), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6068, 6088), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6088, 6102), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6089, 6102), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6102, 6123), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6103, 6123), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6123, 6137), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6124, 6137), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6137, 6158), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6138, 6158), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6158, 6172), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6159, 6172), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6172, 6193), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6173, 6193), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6193, 6207), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6194, 6207), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6207, 6228), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6208, 6228), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6228, 6242), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6229, 6242), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6242, 6263), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6243, 6263), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6263, 6277), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6264, 6277), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6277, 6298), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6278, 6298), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6298, 6312), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6299, 6312), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6312, 6333), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6313, 6333), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6333, 6347), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6334, 6347), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6347, 6368), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6348, 6368), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6368, 6382), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6369, 6382), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6382, 6403), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6383, 6403), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6403, 6417), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6404, 6417), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6417, 6438), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6418, 6438), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6438, 6452), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6439, 6452), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6452, 6473), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6453, 6473), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6473, 6487), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6474, 6487), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6487, 6508), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6488, 6508), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6508, 6522), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6509, 6522), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6522, 6543), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6523, 6543), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6543, 6557), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6544, 6557), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6557, 6578), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6558, 6578), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6578, 6592), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6579, 6592), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6592, 6613), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6593, 6613), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6613, 6627), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6614, 6627), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6627, 6644), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(0)', '"""CET"""'], {}), "(3600, 0, 'CET')\n", (6628, 6644), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6644, 6658), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6645, 6658), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6658, 6679), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6659, 6679), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6679, 6693), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6680, 6693), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6693, 6714), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6694, 6714), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6714, 6728), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6715, 6728), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6728, 6749), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6729, 6749), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6749, 6763), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6750, 6763), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6763, 6784), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6764, 6784), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6784, 6798), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6785, 6798), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6798, 6819), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6799, 6819), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6819, 6833), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6820, 6833), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6833, 6854), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6834, 6854), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6854, 6868), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6855, 6868), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6868, 6889), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6869, 6889), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6889, 6903), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6890, 6903), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6903, 6924), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6904, 6924), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6924, 6938), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6925, 6938), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6938, 6959), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6939, 6959), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6959, 6973), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6960, 6973), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6973, 6994), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (6974, 6994), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((6994, 7008), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (6995, 7008), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7008, 7029), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7009, 7029), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7029, 7043), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7030, 7043), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7043, 7064), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7044, 7064), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7064, 7078), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7065, 7078), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7078, 7099), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7079, 7099), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7099, 7113), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7100, 7113), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7113, 7134), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7114, 7134), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7134, 7148), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7135, 7148), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7148, 7169), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7149, 7169), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7169, 7183), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7170, 7183), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7183, 7204), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7184, 7204), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7204, 7221), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(0)', '"""CET"""'], {}), "(3600, 0, 'CET')\n", (7205, 7221), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7221, 7242), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(3600)', '"""CEST"""'], {}), "(7200, 3600, 'CEST')\n", (7222, 7242), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7242, 7259), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(0)', '"""CET"""'], {}), "(3600, 0, 'CET')\n", (7243, 7259), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7259, 7280), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(3600)', '"""CEST"""'], {}), "(7200, 3600, 'CEST')\n", (7260, 7280), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7280, 7297), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(0)', '"""CET"""'], {}), "(3600, 0, 'CET')\n", (7281, 7297), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7297, 7318), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(7200)', '(3600)', '"""CEST"""'], {}), "(7200, 3600, 'CEST')\n", (7298, 7318), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7318, 7335), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(0)', '"""CET"""'], {}), "(3600, 0, 'CET')\n", (7319, 7335), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7335, 7353), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(0)', '"""WEST"""'], {}), "(3600, 0, 'WEST')\n", (7336, 7353), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7353, 7367), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7354, 7367), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7367, 7388), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7368, 7388), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7388, 7402), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7389, 7402), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7402, 7423), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7403, 7423), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7423, 7437), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7424, 7437), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7437, 7458), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7438, 7458), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7458, 7472), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7459, 7472), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7472, 7493), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7473, 7493), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7493, 7507), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7494, 7507), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7507, 7528), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7508, 7528), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7528, 7542), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7529, 7542), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7542, 7563), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7543, 7563), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7563, 7577), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7564, 7577), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7577, 7598), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7578, 7598), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7598, 7612), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7599, 7612), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7612, 7633), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7613, 7633), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7633, 7647), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7634, 7647), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7647, 7668), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7648, 7668), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7668, 7682), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7669, 7682), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7682, 7703), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7683, 7703), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7703, 7717), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7704, 7717), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7717, 7738), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7718, 7738), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7738, 7752), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7739, 7752), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7752, 7773), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7753, 7773), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7773, 7787), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7774, 7787), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7787, 7808), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7788, 7808), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7808, 7822), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7809, 7822), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7822, 7843), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7823, 7843), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7843, 7857), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7844, 7857), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7857, 7878), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7858, 7878), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7878, 7892), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7879, 7892), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7892, 7913), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7893, 7913), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7913, 7927), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7914, 7927), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7927, 7948), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7928, 7948), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7948, 7962), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7949, 7962), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7962, 7983), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7963, 7983), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7983, 7997), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (7984, 7997), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((7997, 8018), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (7998, 8018), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8018, 8032), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8019, 8032), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8032, 8053), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8033, 8053), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8053, 8067), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8054, 8067), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8067, 8088), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8068, 8088), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8088, 8102), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8089, 8102), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8102, 8123), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8103, 8123), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8123, 8137), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8124, 8137), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8137, 8158), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8138, 8158), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8158, 8172), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8159, 8172), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8172, 8193), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8173, 8193), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8193, 8207), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8194, 8207), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8207, 8228), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8208, 8228), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8228, 8242), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8229, 8242), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8242, 8263), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8243, 8263), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8263, 8277), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8264, 8277), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8277, 8298), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8278, 8298), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8298, 8312), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8299, 8312), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8312, 8333), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8313, 8333), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8333, 8347), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8334, 8347), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8347, 8368), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8348, 8368), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8368, 8382), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8369, 8382), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8382, 8403), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8383, 8403), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8403, 8417), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8404, 8417), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8417, 8438), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8418, 8438), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8438, 8452), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8439, 8452), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8452, 8473), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8453, 8473), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8473, 8487), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8474, 8487), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8487, 8508), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8488, 8508), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8508, 8522), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8509, 8522), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8522, 8543), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8523, 8543), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8543, 8557), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8544, 8557), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8557, 8578), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8558, 8578), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8578, 8592), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8579, 8592), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8592, 8613), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8593, 8613), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8613, 8627), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8614, 8627), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8627, 8648), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8628, 8648), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8648, 8662), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8649, 8662), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8662, 8683), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8663, 8683), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8683, 8697), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8684, 8697), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8697, 8718), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8698, 8718), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8718, 8732), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8719, 8732), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8732, 8753), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8733, 8753), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8753, 8767), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8754, 8767), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8767, 8788), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(3600)', '(3600)', '"""WEST"""'], {}), "(3600, 3600, 'WEST')\n", (8768, 8788), True, 'from pytz.tzinfo import memorized_ttinfo as i\n'), ((8788, 8802), 'pytz.tzinfo.memorized_ttinfo', 'i', (['(0)', '(0)', '"""WET"""'], {}), "(0, 0, 'WET')\n", (8789, 8802), True, 'from pytz.tzinfo import memorized_ttinfo as i\n')] |
from logging import getLogger
import luigi
import numpy as np
import gokart
from redshells.model import FeatureAggregationSimilarityModel
from redshells.model.feature_aggregation_similarity_model import FeatureAggregationSimilarityDataset
logger = getLogger(__name__)
class TrainFeatureAggregationSimilarityModel(gokart.TaskOnKart):
dataset_task = gokart.TaskInstanceParameter(description='An instance of task which outputs `FeatureAggregationSimilarityDataset`.')
embedding_size = luigi.IntParameter() # type: int
learning_rate = luigi.FloatParameter() # type: float
batch_size = luigi.IntParameter() # type: int
epoch_size = luigi.IntParameter() # type: int
test_size_rate = luigi.FloatParameter() # type: float
early_stopping_patience = luigi.IntParameter() # type: int
max_data_size = luigi.IntParameter() # type: int
output_file_path = luigi.Parameter(default='model/feature_aggregation)similarity_model.pkl') # type: str
def requires(self):
return self.dataset_task
def output(self):
return self.make_target(self.output_file_path)
def run(self):
dataset = self.load() # type: FeatureAggregationSimilarityDataset
feature_size = dataset.x_item_features.shape[1]
item_size = max(np.max(dataset.x_item_indices), np.max(dataset.y_item_indices))
max_feature_index = max(np.max(dataset.x_item_features), np.max(dataset.y_item_features))
logger.debug(f'embedding_size={self.embedding_size},'
f'learning_rate={self.learning_rate},'
f'feature_size={feature_size},'
f'item_size={item_size},'
f'max_feature_index={max_feature_index}')
model = FeatureAggregationSimilarityModel(
embedding_size=self.embedding_size,
learning_rate=self.learning_rate,
feature_size=feature_size,
item_size=item_size,
max_feature_index=max_feature_index)
model.fit(
dataset=dataset.get(size=self.max_data_size, batch_size=self.batch_size),
epoch_size=self.epoch_size,
test_size_rate=self.test_size_rate,
early_stopping_patience=self.early_stopping_patience)
self.dump(model)
| [
"logging.getLogger",
"luigi.FloatParameter",
"luigi.IntParameter",
"numpy.max",
"redshells.model.FeatureAggregationSimilarityModel",
"gokart.TaskInstanceParameter",
"luigi.Parameter"
] | [((252, 271), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (261, 271), False, 'from logging import getLogger\n'), ((358, 479), 'gokart.TaskInstanceParameter', 'gokart.TaskInstanceParameter', ([], {'description': '"""An instance of task which outputs `FeatureAggregationSimilarityDataset`."""'}), "(description=\n 'An instance of task which outputs `FeatureAggregationSimilarityDataset`.')\n", (386, 479), False, 'import gokart\n'), ((496, 516), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (514, 516), False, 'import luigi\n'), ((550, 572), 'luigi.FloatParameter', 'luigi.FloatParameter', ([], {}), '()\n', (570, 572), False, 'import luigi\n'), ((605, 625), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (623, 625), False, 'import luigi\n'), ((656, 676), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (674, 676), False, 'import luigi\n'), ((711, 733), 'luigi.FloatParameter', 'luigi.FloatParameter', ([], {}), '()\n', (731, 733), False, 'import luigi\n'), ((779, 799), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (797, 799), False, 'import luigi\n'), ((833, 853), 'luigi.IntParameter', 'luigi.IntParameter', ([], {}), '()\n', (851, 853), False, 'import luigi\n'), ((890, 963), 'luigi.Parameter', 'luigi.Parameter', ([], {'default': '"""model/feature_aggregation)similarity_model.pkl"""'}), "(default='model/feature_aggregation)similarity_model.pkl')\n", (905, 963), False, 'import luigi\n'), ((1753, 1950), 'redshells.model.FeatureAggregationSimilarityModel', 'FeatureAggregationSimilarityModel', ([], {'embedding_size': 'self.embedding_size', 'learning_rate': 'self.learning_rate', 'feature_size': 'feature_size', 'item_size': 'item_size', 'max_feature_index': 'max_feature_index'}), '(embedding_size=self.embedding_size,\n learning_rate=self.learning_rate, feature_size=feature_size, item_size=\n item_size, max_feature_index=max_feature_index)\n', (1786, 1950), False, 'from redshells.model import FeatureAggregationSimilarityModel\n'), ((1288, 1318), 'numpy.max', 'np.max', (['dataset.x_item_indices'], {}), '(dataset.x_item_indices)\n', (1294, 1318), True, 'import numpy as np\n'), ((1320, 1350), 'numpy.max', 'np.max', (['dataset.y_item_indices'], {}), '(dataset.y_item_indices)\n', (1326, 1350), True, 'import numpy as np\n'), ((1384, 1415), 'numpy.max', 'np.max', (['dataset.x_item_features'], {}), '(dataset.x_item_features)\n', (1390, 1415), True, 'import numpy as np\n'), ((1417, 1448), 'numpy.max', 'np.max', (['dataset.y_item_features'], {}), '(dataset.y_item_features)\n', (1423, 1448), True, 'import numpy as np\n')] |
from typing import Iterable
from knx_stack.definition.layer.application.a_group_value_response.con import Msg
from knx_stack.decode.layer.application import a_group_value
def decode(state: "knx_stack.State", msg: "knx_stack.Msg") -> Iterable[Msg]:
group_values = a_group_value.decode(state, msg)
group_values_response = [
Msg(asap=group_value.asap, dpt=group_value.dpt, status=state.ldata.status)
for group_value in group_values
]
return group_values_response
| [
"knx_stack.decode.layer.application.a_group_value.decode",
"knx_stack.definition.layer.application.a_group_value_response.con.Msg"
] | [((269, 301), 'knx_stack.decode.layer.application.a_group_value.decode', 'a_group_value.decode', (['state', 'msg'], {}), '(state, msg)\n', (289, 301), False, 'from knx_stack.decode.layer.application import a_group_value\n'), ((340, 414), 'knx_stack.definition.layer.application.a_group_value_response.con.Msg', 'Msg', ([], {'asap': 'group_value.asap', 'dpt': 'group_value.dpt', 'status': 'state.ldata.status'}), '(asap=group_value.asap, dpt=group_value.dpt, status=state.ldata.status)\n', (343, 414), False, 'from knx_stack.definition.layer.application.a_group_value_response.con import Msg\n')] |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from StringIO import StringIO
from appengine_wrappers import webapp
from appengine_wrappers import memcache
from appengine_wrappers import urlfetch
from branch_utility import BranchUtility
from server_instance import ServerInstance
import svn_constants
import time
# The default channel to serve docs for if no channel is specified.
_DEFAULT_CHANNEL = 'stable'
class Handler(webapp.RequestHandler):
def __init__(self, request, response):
super(Handler, self).__init__(request, response)
def _HandleGet(self, path):
channel_name, real_path = BranchUtility.SplitChannelNameFromPath(path)
if channel_name == _DEFAULT_CHANNEL:
self.redirect('/%s' % real_path)
return
if channel_name is None:
channel_name = _DEFAULT_CHANNEL
# TODO(kalman): Check if |path| is a directory and serve path/index.html
# rather than special-casing apps/extensions.
if real_path.strip('/') == 'apps':
real_path = 'apps/index.html'
if real_path.strip('/') == 'extensions':
real_path = 'extensions/index.html'
server_instance = ServerInstance.GetOrCreate(channel_name)
canonical_path = server_instance.path_canonicalizer.Canonicalize(real_path)
if real_path != canonical_path:
self.redirect(canonical_path)
return
ServerInstance.GetOrCreate(channel_name).Get(real_path,
self.request,
self.response)
def _HandleCron(self, path):
# Cron strategy:
#
# Find all public template files and static files, and render them. Most of
# the time these won't have changed since the last cron run, so it's a
# little wasteful, but hopefully rendering is really fast (if it isn't we
# have a problem).
class MockResponse(object):
def __init__(self):
self.status = 200
self.out = StringIO()
self.headers = {}
def set_status(self, status):
self.status = status
def clear(self, *args):
pass
class MockRequest(object):
def __init__(self, path):
self.headers = {}
self.path = path
self.url = '//localhost/%s' % path
channel = path.split('/')[-1]
logging.info('cron/%s: starting' % channel)
server_instance = ServerInstance.GetOrCreate(channel)
def run_cron_for_dir(d):
error = None
start_time = time.time()
files = [f for f in server_instance.content_cache.GetFromFileListing(d)
if not f.endswith('/')]
for f in files:
try:
server_instance.Get(f, MockRequest(f), MockResponse())
except error:
logging.error('cron/%s: error rendering %s/%s: %s' % (
channel, d, f, error))
logging.info('cron/%s: rendering %s files in %s took %s seconds' % (
channel, len(files), d, time.time() - start_time))
return error
# Don't use "or" since we want to evaluate everything no matter what.
was_error = any((run_cron_for_dir(svn_constants.PUBLIC_TEMPLATE_PATH),
run_cron_for_dir(svn_constants.STATIC_PATH)))
if was_error:
self.response.status = 500
self.response.out.write('Failure')
else:
self.response.status = 200
self.response.out.write('Success')
logging.info('cron/%s: finished' % channel)
def _RedirectSpecialCases(self, path):
google_dev_url = 'http://developer.google.com/chrome'
if path == '/' or path == '/index.html':
self.redirect(google_dev_url)
return True
if path == '/apps.html':
self.redirect('/apps/about_apps.html')
return True
return False
def _RedirectFromCodeDotGoogleDotCom(self, path):
if (not self.request.url.startswith(('http://code.google.com',
'https://code.google.com'))):
return False
new_url = 'http://developer.chrome.com/'
# switch to https if necessary
if (self.request.url.startswith('https')):
new_url = new_url.replace('http', 'https', 1)
path = path.split('/')
if len(path) > 0 and path[0] == 'chrome':
path.pop(0)
for channel in BranchUtility.GetAllBranchNames():
if channel in path:
position = path.index(channel)
path.pop(position)
path.insert(0, channel)
new_url += '/'.join(path)
self.redirect(new_url)
return True
def get(self):
path = self.request.path
if self._RedirectSpecialCases(path):
return
if path.startswith('/cron'):
self._HandleCron(path)
return
# Redirect paths like "directory" to "directory/". This is so relative
# file paths will know to treat this as a directory.
if os.path.splitext(path)[1] == '' and path[-1] != '/':
self.redirect(path + '/')
return
path = path.strip('/')
if self._RedirectFromCodeDotGoogleDotCom(path):
return
self._HandleGet(path)
| [
"StringIO.StringIO",
"server_instance.ServerInstance.GetOrCreate",
"os.path.splitext",
"logging.info",
"branch_utility.BranchUtility.SplitChannelNameFromPath",
"branch_utility.BranchUtility.GetAllBranchNames",
"time.time",
"logging.error"
] | [((749, 793), 'branch_utility.BranchUtility.SplitChannelNameFromPath', 'BranchUtility.SplitChannelNameFromPath', (['path'], {}), '(path)\n', (787, 793), False, 'from branch_utility import BranchUtility\n'), ((1269, 1309), 'server_instance.ServerInstance.GetOrCreate', 'ServerInstance.GetOrCreate', (['channel_name'], {}), '(channel_name)\n', (1295, 1309), False, 'from server_instance import ServerInstance\n'), ((2424, 2467), 'logging.info', 'logging.info', (["('cron/%s: starting' % channel)"], {}), "('cron/%s: starting' % channel)\n", (2436, 2467), False, 'import logging\n'), ((2491, 2526), 'server_instance.ServerInstance.GetOrCreate', 'ServerInstance.GetOrCreate', (['channel'], {}), '(channel)\n', (2517, 2526), False, 'from server_instance import ServerInstance\n'), ((3502, 3545), 'logging.info', 'logging.info', (["('cron/%s: finished' % channel)"], {}), "('cron/%s: finished' % channel)\n", (3514, 3545), False, 'import logging\n'), ((4358, 4391), 'branch_utility.BranchUtility.GetAllBranchNames', 'BranchUtility.GetAllBranchNames', ([], {}), '()\n', (4389, 4391), False, 'from branch_utility import BranchUtility\n'), ((2595, 2606), 'time.time', 'time.time', ([], {}), '()\n', (2604, 2606), False, 'import time\n'), ((1481, 1521), 'server_instance.ServerInstance.GetOrCreate', 'ServerInstance.GetOrCreate', (['channel_name'], {}), '(channel_name)\n', (1507, 1521), False, 'from server_instance import ServerInstance\n'), ((2082, 2092), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (2090, 2092), False, 'from StringIO import StringIO\n'), ((4907, 4929), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (4923, 4929), False, 'import os\n'), ((2856, 2932), 'logging.error', 'logging.error', (["('cron/%s: error rendering %s/%s: %s' % (channel, d, f, error))"], {}), "('cron/%s: error rendering %s/%s: %s' % (channel, d, f, error))\n", (2869, 2932), False, 'import logging\n'), ((3057, 3068), 'time.time', 'time.time', ([], {}), '()\n', (3066, 3068), False, 'import time\n')] |
from django import template
from posts.helpers import parse_id_from_url
register = template.Library()
def zip_lists(a, b):
return zip(a, b)
def return_first(a):
return a.first()
def parse_user_id(value):
id = parse_id_from_url(value)
return id
register.filter('zip', zip_lists)
register.filter('first', return_first)
register.filter('parse_id', parse_user_id)
| [
"posts.helpers.parse_id_from_url",
"django.template.Library"
] | [((84, 102), 'django.template.Library', 'template.Library', ([], {}), '()\n', (100, 102), False, 'from django import template\n'), ((227, 251), 'posts.helpers.parse_id_from_url', 'parse_id_from_url', (['value'], {}), '(value)\n', (244, 251), False, 'from posts.helpers import parse_id_from_url\n')] |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'Spyonweb API wrapper',
'author': '<NAME>',
'url': 'https://github.com/krmaxwell/spyonweb',
'download_url': 'https://github.com/krmaxwell/spyonweb',
'author_email': '<EMAIL>',
'version': '0.1',
'install_requires': ['nose', 'pre-commit', 'requests', 'bottle', 'TRX'],
'packages': ['spyonweb'],
'scripts': [],
'name': 'spyonweb'
}
setup(**config)
| [
"distutils.core.setup"
] | [((492, 507), 'distutils.core.setup', 'setup', ([], {}), '(**config)\n', (497, 507), False, 'from distutils.core import setup\n')] |
import pickle
import sqlite3
import numpy as np
import os
from vectorizer import vect
def update_model(db_path,model,batch_size=10000):
conn = sqlite3.connect(db_path)
c = conn.cursor()
c.execute('SELECT * from review_db')
results = c.fetchmany(batch_size)
while results:
data = np.array(results)
X = data[:,0]
y = data[:,1].astype(int)
classes = np.array([0,1])
X_train = vect.transform(X)
clf.partial_fit(X_train,y,classes=classes)
results = c.fetchmany(batch_size)
conn.close()
return None
cur_dir = os.path.dirname(__file__)
clf = pickle.load(open(os.path.join(cur_dir,
'pkl_objects',
'classifier.pkl'), 'rb'))
db = os.path.join(cur_dir,'reviews.sqlite')
update_model(db_path=db,model=clf,batch_size=10000)
pickle.dump(clf,open(os.path.join(cur_dir,
'pkl_objects','classifier.pkl'),'wb'),
protocol=4) | [
"sqlite3.connect",
"os.path.join",
"os.path.dirname",
"numpy.array",
"vectorizer.vect.transform"
] | [((531, 556), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (546, 556), False, 'import os\n'), ((651, 690), 'os.path.join', 'os.path.join', (['cur_dir', '"""reviews.sqlite"""'], {}), "(cur_dir, 'reviews.sqlite')\n", (663, 690), False, 'import os\n'), ((147, 171), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (162, 171), False, 'import sqlite3\n'), ((290, 307), 'numpy.array', 'np.array', (['results'], {}), '(results)\n', (298, 307), True, 'import numpy as np\n'), ((365, 381), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (373, 381), True, 'import numpy as np\n'), ((393, 410), 'vectorizer.vect.transform', 'vect.transform', (['X'], {}), '(X)\n', (407, 410), False, 'from vectorizer import vect\n'), ((580, 634), 'os.path.join', 'os.path.join', (['cur_dir', '"""pkl_objects"""', '"""classifier.pkl"""'], {}), "(cur_dir, 'pkl_objects', 'classifier.pkl')\n", (592, 634), False, 'import os\n'), ((763, 817), 'os.path.join', 'os.path.join', (['cur_dir', '"""pkl_objects"""', '"""classifier.pkl"""'], {}), "(cur_dir, 'pkl_objects', 'classifier.pkl')\n", (775, 817), False, 'import os\n')] |
import sys
try:
n = int(sys.argv[1])
except:
n = 100
import sys # provides getsizeof function
data = []
for k in range(n): # NOTE: must fix choice of n
a = len(data) # number of elements
b = sys.getsizeof(data) # actual size in bytes
print('Length: {0:3d}; Size in bytes: {1:4d}'.format(a, b))
data.append(None) # increase length by one
| [
"sys.getsizeof"
] | [((279, 298), 'sys.getsizeof', 'sys.getsizeof', (['data'], {}), '(data)\n', (292, 298), False, 'import sys\n')] |
import numpy as np
import clumpy
from clumpy import datasets
from clumpy import plots
from clumpy.gmeans import make_new_clusterer
import hdbscan
X = datasets.fetch_10kdiabetes_embedding()
cluster_centers = []
clusterer = hdbscan.HDBSCAN(min_cluster_size=100).fit(X)
for label in np.unique(clusterer.labels_):
if label == -1:
continue
X[clusterer.labels_ == label]
cluster_centers.append(X[clusterer.labels_ == label].mean(axis=0))
clusterer = make_new_clusterer(X, np.array(cluster_centers))
#plots.plot_clusters(X, clusterer.labels_, clusterer.cluster_centers_)
print('fitting rules')
importances = clumpy.rules.ova_forest_importance(X, clusterer.labels_, top_k=1)
| [
"numpy.unique",
"clumpy.rules.ova_forest_importance",
"numpy.array",
"clumpy.datasets.fetch_10kdiabetes_embedding",
"hdbscan.HDBSCAN"
] | [((151, 189), 'clumpy.datasets.fetch_10kdiabetes_embedding', 'datasets.fetch_10kdiabetes_embedding', ([], {}), '()\n', (187, 189), False, 'from clumpy import datasets\n'), ((283, 311), 'numpy.unique', 'np.unique', (['clusterer.labels_'], {}), '(clusterer.labels_)\n', (292, 311), True, 'import numpy as np\n'), ((625, 690), 'clumpy.rules.ova_forest_importance', 'clumpy.rules.ova_forest_importance', (['X', 'clusterer.labels_'], {'top_k': '(1)'}), '(X, clusterer.labels_, top_k=1)\n', (659, 690), False, 'import clumpy\n'), ((489, 514), 'numpy.array', 'np.array', (['cluster_centers'], {}), '(cluster_centers)\n', (497, 514), True, 'import numpy as np\n'), ((225, 262), 'hdbscan.HDBSCAN', 'hdbscan.HDBSCAN', ([], {'min_cluster_size': '(100)'}), '(min_cluster_size=100)\n', (240, 262), False, 'import hdbscan\n')] |
import numpy as np
import cv2
import glob
import sys
import os
VIDEO_MODE = False
IMAGE_FILE = ""
VIDEO_FOLDER = "./"
FRAMES = 100
#Para rodar, inseri-lo na pasta run da aplicação
#scp -P4422 <EMAIL>:/home/modfreitas/parsec/parsec-3.0/ext/splash2x/apps/volrend/run/saida__* ~/Documentos/PCD/saidas
if(len(sys.argv) == 2 and sys.argv[1]=="-v"):
VIDEO_MODE = True
elif(len(sys.argv) == 3 and sys.argv[1]=="-v"):
VIDEO_MODE = True
VIDEO_FOLDER = sys.argv[2]
elif(len(sys.argv) == 5 and sys.argv[1]=="-v" and sys.argv[3]=="-f"):
VIDEO_MODE = True
VIDEO_FOLDER = sys.argv[2]
try:
FRAMES = int(sys.argv[4])
except:
print("Numero inválido de frames")
exit(1)
elif(len(sys.argv) == 2):
IMAGE_FILE = sys.argv[1]
else:
print("USO:")
print("Para geracao imagens: ArquivoTXT")
print("Para geracao video: -v [pastaArquivosTXT] -f [frames]")
exit(1)
if(VIDEO_MODE):
files = [f for f in os.listdir(VIDEO_FOLDER) if f.endswith(".txt")]
writer = cv2.VideoWriter('output.avi', cv2.VideoWriter_fourcc(*"FMP4"), 20.0, (640,480))
if(len(files)==0):
print("Não foram achado arquivos de textos nesta pasta")
exit(1)
for frameid ,file in enumerate(files):
img = np.zeros((380, 380), dtype=np.uint8)
i = 0
pad_r = -1
pad_l = -1
pad_t = -1
try:
f = open(VIDEO_FOLDER+"/"+file, "r")
for line in f.readlines():
if(line == "\n" or line == ""):
continue
r = np.array(line.replace("\n", "0").split("|"), dtype=np.uint8)
if(pad_l == -1):
pad_l = pad_t = int((380-len(r))/2)
pad_r = int(np.ceil((380-len(r))/2))
img[i+pad_t, :] = np.pad(r, pad_width=(pad_l, pad_r), constant_values=(0, 0))
i += 1
if(frameid > FRAMES):
break;
img = np.pad(img, ((int((480-380)/2), int((480-380)/2)), (int((640-380)/2), int((640-380)/2))), 'constant', constant_values=((0, 0), (0, 0)))
torgb = cv2.cvtColor(img,cv2.COLOR_GRAY2RGB)
print(f"Quadro {frameid}, {file}")
writer.write(torgb)
f.close()
except:
print("Arquivo de texto não existente: "+VIDEO_FOLDER)
exit(1)
print('Saída: output.avi')
writer.release();
cv2.destroyAllWindows()
else:
img = np.zeros((380, 380), dtype=np.uint8)
try:
f = open(IMAGE_FILE, "r")
i = 0
pad_r = -1
pad_l = -1
pad_t = -1
for line in f.readlines():
if(line == "\n" or line == ""):
continue
r = np.array(line.replace("\n", "0").split("|"), dtype=np.uint8)
if(pad_l == -1):
pad_l = pad_t = int((380-len(r))/2)
pad_r = int(np.ceil((380-len(r))/2))
img[i+pad_t, :] = np.pad(r, pad_width=(pad_l, pad_r), constant_values=(0, 0))
i += 1
print("Saída: {}.png".format(IMAGE_FILE.split("/")[-1].split(".")[0]))
cv2.imwrite(IMAGE_FILE.split("/")[-1].split(".")[0] + '.png', img)
cv2.destroyAllWindows()
f.close()
except:
print("Arquivo não existente")
exit(1)
| [
"os.listdir",
"numpy.zeros",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"cv2.VideoWriter_fourcc",
"numpy.pad"
] | [((2489, 2512), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (2510, 2512), False, 'import cv2\n'), ((2530, 2566), 'numpy.zeros', 'np.zeros', (['(380, 380)'], {'dtype': 'np.uint8'}), '((380, 380), dtype=np.uint8)\n', (2538, 2566), True, 'import numpy as np\n'), ((1045, 1076), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'FMP4'"], {}), "(*'FMP4')\n", (1067, 1076), False, 'import cv2\n'), ((1257, 1293), 'numpy.zeros', 'np.zeros', (['(380, 380)'], {'dtype': 'np.uint8'}), '((380, 380), dtype=np.uint8)\n', (1265, 1293), True, 'import numpy as np\n'), ((3350, 3373), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3371, 3373), False, 'import cv2\n'), ((954, 978), 'os.listdir', 'os.listdir', (['VIDEO_FOLDER'], {}), '(VIDEO_FOLDER)\n', (964, 978), False, 'import os\n'), ((2175, 2212), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_GRAY2RGB'], {}), '(img, cv2.COLOR_GRAY2RGB)\n', (2187, 2212), False, 'import cv2\n'), ((3108, 3167), 'numpy.pad', 'np.pad', (['r'], {'pad_width': '(pad_l, pad_r)', 'constant_values': '(0, 0)'}), '(r, pad_width=(pad_l, pad_r), constant_values=(0, 0))\n', (3114, 3167), True, 'import numpy as np\n'), ((1821, 1880), 'numpy.pad', 'np.pad', (['r'], {'pad_width': '(pad_l, pad_r)', 'constant_values': '(0, 0)'}), '(r, pad_width=(pad_l, pad_r), constant_values=(0, 0))\n', (1827, 1880), True, 'import numpy as np\n')] |
from repro.model.preactresnet import PreActBlock, PreActResNet
def build(input_size, num_classes):
return PreActResNet(PreActBlock, [3,4,6,3], input_size=input_size, num_classes=num_classes)
| [
"repro.model.preactresnet.PreActResNet"
] | [((112, 204), 'repro.model.preactresnet.PreActResNet', 'PreActResNet', (['PreActBlock', '[3, 4, 6, 3]'], {'input_size': 'input_size', 'num_classes': 'num_classes'}), '(PreActBlock, [3, 4, 6, 3], input_size=input_size, num_classes=\n num_classes)\n', (124, 204), False, 'from repro.model.preactresnet import PreActBlock, PreActResNet\n')] |
import pandas
filename = "data.xlsx"
sheet = "stats_104102"
book = pandas.read_excel(filename, sheetname=sheet, header=1)
book = book.sort_values(by=2015, ascending=False)
print(book)
| [
"pandas.read_excel"
] | [((69, 123), 'pandas.read_excel', 'pandas.read_excel', (['filename'], {'sheetname': 'sheet', 'header': '(1)'}), '(filename, sheetname=sheet, header=1)\n', (86, 123), False, 'import pandas\n')] |
#!/usr/bin/env python
"""Management Scripts."""
from flask_script import Manager
from app import app
manager = Manager(app)
if __name__ == '__main__':
manager.run()
| [
"flask_script.Manager"
] | [((114, 126), 'flask_script.Manager', 'Manager', (['app'], {}), '(app)\n', (121, 126), False, 'from flask_script import Manager\n')] |
"""
"""
## python imports
from random import randint
## source.python imports
from effects.base import TempEntity
from engines.sound import StreamSound
from engines.precache import Model
from entities.entity import Entity
from filters.players import PlayerIter
from listeners.tick import Repeat
from weapons.manager import weapon_manager
## warcraft.package imports
from warcraft.commands.messages import send_wcs_saytext_by_index
from warcraft.players import player_dict
from warcraft.race import Race
from warcraft.registration import events, clientcommands
from warcraft.skill import Skill
from warcraft.utility import classproperty, CooldownDict
## __all__ declaration
__all__ = ("OrcishHorde", )
## OrcishHorde declaration
chain_sound = StreamSound('source-python/warcraft/chain_lightning.wav', download=True)
root_sound = StreamSound('source-python/warcraft/root.mp3', download=True)
class OrcishHorde(Race):
image = "https://liquipedia.net/commons/images/thumb/7/76/Orcrace.png/200px-Orcrace.png"
@classproperty
def description(cls):
return 'Recoded Orcish Horde. (Kryptonite)'
@classproperty
def max_level(cls):
return 99
@classproperty
def requirement_sort_key(cls):
return 3
@OrcishHorde.add_skill
class BloodFury(Skill):
laser = Model('sprites/lgtning.vmt', True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.can_crit = False
self.counter = 0
self.repeater = None
self.beam = TempEntity('BeamPoints', alpha=255, red=0, green=255, blue=0,
life_time=1.0, model_index=self.laser.index,
start_width=3, end_width=3, frame_rate=255,
halo_index=self.laser.index)
@classproperty
def description(cls):
return 'You are able to hit vital points, causing major damage.'
@classproperty
def max_level(cls):
return 8
_msg_a = '{{GREEN}}Critical strike {{PALE_GREEN}}on {{RED}}{name} {{PALE_GREEN}}caused {{DULL_RED}}vital damage!'
@events('player_spawn', 'skill_level_up')
def _on_spawn_start_repeat(self, player, **kwargs):
self.can_crit = True
self.counter = 0
self.repeater = Repeat(self._on_game_tick, kwargs={}, cancel_on_level_end=True)
self.repeater.start(0.1)
@events('player_death')
def _on_death_stop_repeat(self, player, **kwargs):
if self.repeater:
self.repeater.stop()
@events('player_pre_attack')
def _on_player_pre_attack(self, attacker, victim, info, **kwargs):
if self.level == 0:
return
if self.can_crit:
info.damage *= 1 + 0.2 * self.level
send_wcs_saytext_by_index(self._msg_a.format(name=victim.name), attacker.index)
weapon = attacker.active_weapon
if weapon and weapon.weapon_name.split("_")[-1] not in weapon_manager.projectiles:
start_location = weapon.origin.copy()
start_location.z += 40
end_location = attacker.get_view_coordinates()
self.beam.create(start_point=start_location, end_point=end_location)
self.can_crit = False
self.counter = 0
def _on_game_tick(self):
self.counter += 1
if self.counter == 256 - (self.level * 2):
self.can_crit = True
@OrcishHorde.add_skill
class EarthgrabTotem(Skill):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model = Model('sprites/blueflare1.vmt', True)
self.model._precache()
self.effect = TempEntity('BeamRingPoint', start_radius=120,
end_radius=0, model_index=self.model.index, halo_index=self.model.index,
life_time=1.5, amplitude=10, red=10, green=255, blue=10, alpha=245, flags=0,
start_width=6, end_width=6)
if not root_sound.is_precached:
root_sound.precache()
@classproperty
def description(cls):
return 'Root your enemies to the ground, 16-24% chance.'
@classproperty
def max_level(cls):
return 8
_msg_a = '{{GREEN}}Rooted {{RED}}{name} {{PALE_GREEN}}to the ground.'
_msg_b = '{{PALE_GREEN}}You have been {{GREEN}}rooted {{PALE_GREEN}}to the ground by {{RED}}{name}.'
@events('player_pre_attack')
def _on_player_pre_attack(self, attacker, victim, **kwargs):
if self.level == 0:
return
if randint(1, 100) <= 16 + self.level and not victim.stuck:
victim.stuck = True
victim.delay(1.5, victim.__setattr__, args=('stuck', False))
send_wcs_saytext_by_index(self._msg_a.format(name=victim.name), attacker.index)
send_wcs_saytext_by_index(self._msg_b.format(name=attacker.name), victim.index)
root_sound.index = victim.index
root_sound.origin = victim.origin
root_sound.play()
self.effect.create(center=victim.origin)
self.effect.create(center=victim.origin, start_radius=80)
@OrcishHorde.add_skill
class Reincarnation(Skill):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.weapons = []
self.location = None
@classproperty
def description(cls):
return 'Upon death, the shamans will ressurect you in your old location, 25-33% chance.'
@classproperty
def max_level(cls):
return 8
_msg_a = '{ORANGE}Respawning {PALE_GREEN}in {GREEN}1 {PALE_GREEN}second.'
def _force_drop_weapons(self, player):
for index in player.weapon_indexes(not_filters='knife'):
entity = Entity(index)
player.drop_weapon(entity.pointer, None, None)
@events('player_pre_victim')
def _on_pre_death_obtain_weapons(self, victim, **kwargs):
self.weapons = [Entity(index).class_name for index in victim.weapon_indexes(
not_filters='knife')
]
self.location = victim.origin.copy()
self.location.z += 1
@events('player_death')
def _on_death_respawn(self, player, **kwargs):
if self.level == 0:
return
if randint(1, 101) <= 25 + self.level:
player.delay(1.5, player.spawn)
player.delay(2, self._force_drop_weapons, args=(player, ))
for weapon in self.weapons:
player.delay(3, player.give_named_item, args=(weapon, ))
if self.location:
player.delay(2.2, player.teleport, args=(self.location, ))
send_wcs_saytext_by_index(self._msg_a, player.index)
@OrcishHorde.add_skill
class ChainLightning(Skill):
laser = Model('sprites/lgtning.vmt', True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cooldowns = CooldownDict()
self.beam = TempEntity('BeamPoints', alpha=255, red=255, green=200, blue=200,
life_time=1.0, start_width=15, end_width=15, frame_rate=255)
self.laser = Model('sprites/lgtning.vmt')
self.laser._precache()
@classproperty
def description(cls):
return 'You channel a lightning rod which ricochets from player to player.'
@classproperty
def max_level(cls):
return 8
@classmethod
def is_available(cls, player):
return player.race.level > 8
_msg_a = '{GREEN}Chain Lightning {RED}hit enemies{PALE_GREEN}!'
_msg_c = '{{GREEN}}Chain Lightning {{PALE_GREEN}}is on cooldown for {{DULL_RED}}{time:0.1f} {{PALE_GREEN}}seconds.'
_msg_f = '{GREEN}Chain Lightning {PALE_GREEN}found {DULL_RED}no enemies{PALE_GREEN}!'
def _find_closest_player(self, player, team_index, length=99999, exclusions=[]):
_target = None
for target in player_dict.values():
if target.dead or target.team_index == team_index or target in exclusions or target.ultimate_immune:
continue
_distance = player.origin.get_distance(target.origin)
if _distance < length:
_target = target
length = _distance
return _target
def _find_chain_players(self, player, length, count):
_last_target = player
team_index = player.team_index
_targets = []
while count > 0:
if not _last_target:
break
_target = self._find_closest_player(_last_target, team_index, length, _targets)
_targets.append(_target)
_last_target = _target
count -= 1
return _targets
@events('player_spawn')
def _on_player_spawn_reset(self, player, **kwargs):
self.cooldowns['ultimate'] = 4
@clientcommands('ultimate')
def _on_player_ultimate(self, player, **kwargs):
if self.level == 0:
return
_cooldown = self.cooldowns['ultimate']
if _cooldown <= 0:
last_target = player
targets = self._find_chain_players(player, 500, 3)
if targets[0] == None:
send_wcs_saytext_by_index(self._msg_f, player.index)
return
for target in targets:
if not target:
continue
target.take_damage(20+5*self.level, attacker_index=player.index, skip_hooks=True)
location1 = last_target.origin.copy()
location2 = target.origin.copy()
location1.z += 40
location2.z += 40
self.beam.create(start_point=location1, end_point=location2, halo=self.laser, model=self.laser)
last_target = target
chain_sound.index = player.index
chain_sound.origin = player.origin
chain_sound.play()
send_wcs_saytext_by_index(self._msg_a, player.index)
self.cooldowns['ultimate'] = 20
else:
send_wcs_saytext_by_index(self._msg_c.format(time=_cooldown), player.index) | [
"warcraft.players.player_dict.values",
"listeners.tick.Repeat",
"engines.sound.StreamSound",
"entities.entity.Entity",
"warcraft.commands.messages.send_wcs_saytext_by_index",
"warcraft.utility.CooldownDict",
"warcraft.registration.clientcommands",
"warcraft.registration.events",
"effects.base.TempEntity",
"random.randint",
"engines.precache.Model"
] | [((753, 825), 'engines.sound.StreamSound', 'StreamSound', (['"""source-python/warcraft/chain_lightning.wav"""'], {'download': '(True)'}), "('source-python/warcraft/chain_lightning.wav', download=True)\n", (764, 825), False, 'from engines.sound import StreamSound\n'), ((839, 900), 'engines.sound.StreamSound', 'StreamSound', (['"""source-python/warcraft/root.mp3"""'], {'download': '(True)'}), "('source-python/warcraft/root.mp3', download=True)\n", (850, 900), False, 'from engines.sound import StreamSound\n'), ((1317, 1351), 'engines.precache.Model', 'Model', (['"""sprites/lgtning.vmt"""', '(True)'], {}), "('sprites/lgtning.vmt', True)\n", (1322, 1351), False, 'from engines.precache import Model\n'), ((2061, 2101), 'warcraft.registration.events', 'events', (['"""player_spawn"""', '"""skill_level_up"""'], {}), "('player_spawn', 'skill_level_up')\n", (2067, 2101), False, 'from warcraft.registration import events, clientcommands\n'), ((2340, 2362), 'warcraft.registration.events', 'events', (['"""player_death"""'], {}), "('player_death')\n", (2346, 2362), False, 'from warcraft.registration import events, clientcommands\n'), ((2483, 2510), 'warcraft.registration.events', 'events', (['"""player_pre_attack"""'], {}), "('player_pre_attack')\n", (2489, 2510), False, 'from warcraft.registration import events, clientcommands\n'), ((4336, 4363), 'warcraft.registration.events', 'events', (['"""player_pre_attack"""'], {}), "('player_pre_attack')\n", (4342, 4363), False, 'from warcraft.registration import events, clientcommands\n'), ((5787, 5814), 'warcraft.registration.events', 'events', (['"""player_pre_victim"""'], {}), "('player_pre_victim')\n", (5793, 5814), False, 'from warcraft.registration import events, clientcommands\n'), ((6093, 6115), 'warcraft.registration.events', 'events', (['"""player_death"""'], {}), "('player_death')\n", (6099, 6115), False, 'from warcraft.registration import events, clientcommands\n'), ((6739, 6773), 'engines.precache.Model', 'Model', (['"""sprites/lgtning.vmt"""', '(True)'], {}), "('sprites/lgtning.vmt', True)\n", (6744, 6773), False, 'from engines.precache import Model\n'), ((8629, 8651), 'warcraft.registration.events', 'events', (['"""player_spawn"""'], {}), "('player_spawn')\n", (8635, 8651), False, 'from warcraft.registration import events, clientcommands\n'), ((8753, 8779), 'warcraft.registration.clientcommands', 'clientcommands', (['"""ultimate"""'], {}), "('ultimate')\n", (8767, 8779), False, 'from warcraft.registration import events, clientcommands\n'), ((1540, 1728), 'effects.base.TempEntity', 'TempEntity', (['"""BeamPoints"""'], {'alpha': '(255)', 'red': '(0)', 'green': '(255)', 'blue': '(0)', 'life_time': '(1.0)', 'model_index': 'self.laser.index', 'start_width': '(3)', 'end_width': '(3)', 'frame_rate': '(255)', 'halo_index': 'self.laser.index'}), "('BeamPoints', alpha=255, red=0, green=255, blue=0, life_time=1.0,\n model_index=self.laser.index, start_width=3, end_width=3, frame_rate=\n 255, halo_index=self.laser.index)\n", (1550, 1728), False, 'from effects.base import TempEntity\n'), ((2237, 2300), 'listeners.tick.Repeat', 'Repeat', (['self._on_game_tick'], {'kwargs': '{}', 'cancel_on_level_end': '(True)'}), '(self._on_game_tick, kwargs={}, cancel_on_level_end=True)\n', (2243, 2300), False, 'from listeners.tick import Repeat\n'), ((3552, 3589), 'engines.precache.Model', 'Model', (['"""sprites/blueflare1.vmt"""', '(True)'], {}), "('sprites/blueflare1.vmt', True)\n", (3557, 3589), False, 'from engines.precache import Model\n'), ((3643, 3880), 'effects.base.TempEntity', 'TempEntity', (['"""BeamRingPoint"""'], {'start_radius': '(120)', 'end_radius': '(0)', 'model_index': 'self.model.index', 'halo_index': 'self.model.index', 'life_time': '(1.5)', 'amplitude': '(10)', 'red': '(10)', 'green': '(255)', 'blue': '(10)', 'alpha': '(245)', 'flags': '(0)', 'start_width': '(6)', 'end_width': '(6)'}), "('BeamRingPoint', start_radius=120, end_radius=0, model_index=\n self.model.index, halo_index=self.model.index, life_time=1.5, amplitude\n =10, red=10, green=255, blue=10, alpha=245, flags=0, start_width=6,\n end_width=6)\n", (3653, 3880), False, 'from effects.base import TempEntity\n'), ((6883, 6897), 'warcraft.utility.CooldownDict', 'CooldownDict', ([], {}), '()\n', (6895, 6897), False, 'from warcraft.utility import classproperty, CooldownDict\n'), ((6918, 7049), 'effects.base.TempEntity', 'TempEntity', (['"""BeamPoints"""'], {'alpha': '(255)', 'red': '(255)', 'green': '(200)', 'blue': '(200)', 'life_time': '(1.0)', 'start_width': '(15)', 'end_width': '(15)', 'frame_rate': '(255)'}), "('BeamPoints', alpha=255, red=255, green=200, blue=200, life_time\n =1.0, start_width=15, end_width=15, frame_rate=255)\n", (6928, 7049), False, 'from effects.base import TempEntity\n'), ((7078, 7106), 'engines.precache.Model', 'Model', (['"""sprites/lgtning.vmt"""'], {}), "('sprites/lgtning.vmt')\n", (7083, 7106), False, 'from engines.precache import Model\n'), ((7829, 7849), 'warcraft.players.player_dict.values', 'player_dict.values', ([], {}), '()\n', (7847, 7849), False, 'from warcraft.players import player_dict\n'), ((5708, 5721), 'entities.entity.Entity', 'Entity', (['index'], {}), '(index)\n', (5714, 5721), False, 'from entities.entity import Entity\n'), ((6238, 6253), 'random.randint', 'randint', (['(1)', '(101)'], {}), '(1, 101)\n', (6245, 6253), False, 'from random import randint\n'), ((6620, 6672), 'warcraft.commands.messages.send_wcs_saytext_by_index', 'send_wcs_saytext_by_index', (['self._msg_a', 'player.index'], {}), '(self._msg_a, player.index)\n', (6645, 6672), False, 'from warcraft.commands.messages import send_wcs_saytext_by_index\n'), ((9842, 9894), 'warcraft.commands.messages.send_wcs_saytext_by_index', 'send_wcs_saytext_by_index', (['self._msg_a', 'player.index'], {}), '(self._msg_a, player.index)\n', (9867, 9894), False, 'from warcraft.commands.messages import send_wcs_saytext_by_index\n'), ((4500, 4515), 'random.randint', 'randint', (['(1)', '(100)'], {}), '(1, 100)\n', (4507, 4515), False, 'from random import randint\n'), ((5901, 5914), 'entities.entity.Entity', 'Entity', (['index'], {}), '(index)\n', (5907, 5914), False, 'from entities.entity import Entity\n'), ((9115, 9167), 'warcraft.commands.messages.send_wcs_saytext_by_index', 'send_wcs_saytext_by_index', (['self._msg_f', 'player.index'], {}), '(self._msg_f, player.index)\n', (9140, 9167), False, 'from warcraft.commands.messages import send_wcs_saytext_by_index\n')] |
import logging
import os
import signal
import time
from game_runner import GameRunner
# from snake_game_executor import SnakeGameExecutor
log = logging.getLogger("simulation_controller")
class SimulationController(object):
def __init__(self, args):
self.args = args
self.run_counter = 0
self.main_pid = None
self.execution_stopped = False
def initial_batch(self):
raise NotImplementedError()
def create_batch_from_results(self, results):
""" Create the next batch and return an estimate of the work
done in the previous batch.
:param results: the list of all the results for the previous batch
:return: (new_batch, estimate_work_previous_batch)
"""
raise NotImplementedError()
def simulation_interrupted(self):
pass
def run_simulation(self):
if self.execution_stopped:
log.warning("Can't train: the execution has been stopped.")
return
# Set our own signal handler for SIGINT
signal.signal(signal.SIGINT, self.__signal_handler)
self.main_pid = os.getpid()
global_start_time = time.time()
with GameRunner() as executor:
# with SnakeGameExecutor(self.args) as executor:
batch = self.initial_batch()
while len(batch) > 0:
start_time = time.time()
batch_size = len(batch)
log.info('Running new batch: %d jobs.', batch_size)
self.run_counter += batch_size
result_generator = executor.run_batch(batch)
if result_generator:
batch = self.create_batch_from_results(result_generator)
batch_duration = time.time() - start_time
log.info('Batch: %d simulations in %g sec:\n => %g sim/sec',
batch_size,
batch_duration,
batch_size / batch_duration)
else:
time.sleep(2)
if self.execution_stopped:
self.training_interrupted()
break
simulation_duration = time.time() - global_start_time
log.info('Ran %d simulations in %g sec.',
self.run_counter, simulation_duration)
# Unregister the signal handler
signal.signal(signal.SIGINT, signal.SIG_DFL)
def __signal_handler(self, sig, frame):
if os.getpid() != self.main_pid:
return
if sig == signal.SIGINT:
if not self.execution_stopped:
log.critical('SIGINT received, stopping...')
self.execution_stopped = True
# Unregister the signal handler
signal.signal(signal.SIGINT, signal.SIG_DFL)
| [
"logging.getLogger",
"signal.signal",
"time.sleep",
"game_runner.GameRunner",
"os.getpid",
"time.time"
] | [((146, 188), 'logging.getLogger', 'logging.getLogger', (['"""simulation_controller"""'], {}), "('simulation_controller')\n", (163, 188), False, 'import logging\n'), ((1050, 1101), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'self.__signal_handler'], {}), '(signal.SIGINT, self.__signal_handler)\n', (1063, 1101), False, 'import signal\n'), ((1126, 1137), 'os.getpid', 'os.getpid', ([], {}), '()\n', (1135, 1137), False, 'import os\n'), ((1167, 1178), 'time.time', 'time.time', ([], {}), '()\n', (1176, 1178), False, 'import time\n'), ((2405, 2449), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (2418, 2449), False, 'import signal\n'), ((1192, 1204), 'game_runner.GameRunner', 'GameRunner', ([], {}), '()\n', (1202, 1204), False, 'from game_runner import GameRunner\n'), ((2218, 2229), 'time.time', 'time.time', ([], {}), '()\n', (2227, 2229), False, 'import time\n'), ((2506, 2517), 'os.getpid', 'os.getpid', ([], {}), '()\n', (2515, 2517), False, 'import os\n'), ((1380, 1391), 'time.time', 'time.time', ([], {}), '()\n', (1389, 1391), False, 'import time\n'), ((2804, 2848), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal.SIG_DFL'], {}), '(signal.SIGINT, signal.SIG_DFL)\n', (2817, 2848), False, 'import signal\n'), ((2055, 2068), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2065, 2068), False, 'import time\n'), ((1763, 1774), 'time.time', 'time.time', ([], {}), '()\n', (1772, 1774), False, 'import time\n')] |
"""This is a home-rolled class to simplify usage of jinja2 templates.
You should be able to follow the example templates in /static/templates and their use in main.py,
and never have to touch this class"""
import os
import jinja2
# This constant references the /static/template folder from the path that core.py lives in.
# (.. to go back 1 folder)
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__) + "/../static/template"),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
def respond(template_file, params=None):
if params is None:
params = {}
# params can be a good place to automatically inject GAE's login/logout variables
# or user/session variables.
tmpl = JINJA_ENVIRONMENT.get_template(template_file)
return tmpl.render(params)
| [
"os.path.dirname"
] | [((430, 455), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (445, 455), False, 'import os\n')] |
#! usr/bin/python3
# -*- coding: utf-8 -*-
#
# Flicket - copyright <NAME>: <EMAIL>
import json
import plotly
from application.flicket.models.flicket_models import FlicketDomain
from application.flicket.models.flicket_models import FlicketInstitute
from application.flicket.models.flicket_models import FlicketStatus, FlicketRequestStage
from application.flicket.models.flicket_models import FlicketTicket
def count_domain_tickets(domain, stage):
query = (
FlicketTicket.query.join(FlicketDomain)
.join(FlicketRequestStage)
.filter(FlicketDomain.domain == domain)
.filter(FlicketRequestStage.request_stage == stage)
)
return query.count()
def create_pie_chart_dict():
"""
:return:
"""
stages = FlicketRequestStage.query
domains = FlicketDomain.query
graphs = []
for domain in domains:
graph_title = domain.domain
graph_labels = []
graph_values = []
for stage in stages:
graph_labels.append(stage.request_stage)
graph_values.append(count_domain_tickets(graph_title, stage.request_stage))
# append graphs if have values.
if any(graph_values):
graphs.append(
dict(
data=[
dict(
labels=graph_labels,
values=graph_values,
type="pie",
marker=dict(
colors=[
"darkorange",
"darkgreen",
"green",
"lightgreen",
]
),
sort=False,
)
],
layout=dict(
title=graph_title,
autosize=True,
margin=dict(b=0, t=40, l=0, r=0),
height=400,
),
)
)
ids = [f"Graph {i}" for i, _ in enumerate(graphs)]
graph_json = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
return ids, graph_json
| [
"application.flicket.models.flicket_models.FlicketTicket.query.join",
"json.dumps"
] | [((2211, 2265), 'json.dumps', 'json.dumps', (['graphs'], {'cls': 'plotly.utils.PlotlyJSONEncoder'}), '(graphs, cls=plotly.utils.PlotlyJSONEncoder)\n', (2221, 2265), False, 'import json\n'), ((475, 514), 'application.flicket.models.flicket_models.FlicketTicket.query.join', 'FlicketTicket.query.join', (['FlicketDomain'], {}), '(FlicketDomain)\n', (499, 514), False, 'from application.flicket.models.flicket_models import FlicketTicket\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('market_data', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='orderhistory',
options={'verbose_name': 'Uncompressed History Data', 'verbose_name_plural': 'Uncompressed History Data'},
),
migrations.AlterUniqueTogether(
name='itemregionstat',
unique_together=set([('mapregion', 'invtype')]),
),
migrations.AlterUniqueTogether(
name='itemregionstathistory',
unique_together=set([('mapregion', 'invtype', 'date')]),
),
migrations.AlterUniqueTogether(
name='orderhistory',
unique_together=set([('mapregion', 'invtype', 'date')]),
),
migrations.AlterIndexTogether(
name='orders',
index_together=set([('mapregion', 'invtype', 'is_active')]),
),
]
| [
"django.db.migrations.AlterModelOptions"
] | [((244, 408), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""orderhistory"""', 'options': "{'verbose_name': 'Uncompressed History Data', 'verbose_name_plural':\n 'Uncompressed History Data'}"}), "(name='orderhistory', options={'verbose_name':\n 'Uncompressed History Data', 'verbose_name_plural':\n 'Uncompressed History Data'})\n", (272, 408), False, 'from django.db import models, migrations\n')] |
"""Module for registering model fields for translation, for use by django-modeltranslation."""
from modeltranslation.translator import TranslationOptions
from modeltranslation.decorators import register
from home.translation_helper import add_language_content_panels
from .models import AboutPage, AboutSubPage, CaseStudyIndexPage, CaseStudyPage, HistoryPage, PeoplePage
@register(AboutPage)
class AboutPageTR(TranslationOptions):
"""Class declaring which fields of the AboutPage model to translate."""
fields = AboutPage.translation_fields
add_language_content_panels(AboutPage)
@register(AboutSubPage)
class AboutSubPageTR(TranslationOptions):
"""Class declaring which fields of the AboutSubPage model to translate."""
fields = AboutSubPage.translation_fields
add_language_content_panels(AboutSubPage)
@register(CaseStudyIndexPage)
class CaseStudyIndexPageTR(TranslationOptions):
"""Class declaring which fields of the CaseStudyIndexPage model to translate."""
fields = CaseStudyIndexPage.translation_fields
add_language_content_panels(CaseStudyIndexPage)
@register(CaseStudyPage)
class CaseStudyPageTR(TranslationOptions):
"""Class declaring which fields of the CaseStudyPage model to translate."""
fields = CaseStudyPage.translation_fields
add_language_content_panels(CaseStudyPage)
@register(HistoryPage)
class HistoryPageTR(TranslationOptions):
"""Class declaring which fields of the HistoryPage model to translate."""
fields = HistoryPage.translation_fields
add_language_content_panels(HistoryPage)
@register(PeoplePage)
class PeoplePageTR(TranslationOptions):
"""Class declaring which fields of the PeoplePage model to translate."""
fields = PeoplePage.translation_fields
add_language_content_panels(PeoplePage)
| [
"modeltranslation.decorators.register",
"home.translation_helper.add_language_content_panels"
] | [((376, 395), 'modeltranslation.decorators.register', 'register', (['AboutPage'], {}), '(AboutPage)\n', (384, 395), False, 'from modeltranslation.decorators import register\n'), ((556, 594), 'home.translation_helper.add_language_content_panels', 'add_language_content_panels', (['AboutPage'], {}), '(AboutPage)\n', (583, 594), False, 'from home.translation_helper import add_language_content_panels\n'), ((598, 620), 'modeltranslation.decorators.register', 'register', (['AboutSubPage'], {}), '(AboutSubPage)\n', (606, 620), False, 'from modeltranslation.decorators import register\n'), ((790, 831), 'home.translation_helper.add_language_content_panels', 'add_language_content_panels', (['AboutSubPage'], {}), '(AboutSubPage)\n', (817, 831), False, 'from home.translation_helper import add_language_content_panels\n'), ((835, 863), 'modeltranslation.decorators.register', 'register', (['CaseStudyIndexPage'], {}), '(CaseStudyIndexPage)\n', (843, 863), False, 'from modeltranslation.decorators import register\n'), ((1051, 1098), 'home.translation_helper.add_language_content_panels', 'add_language_content_panels', (['CaseStudyIndexPage'], {}), '(CaseStudyIndexPage)\n', (1078, 1098), False, 'from home.translation_helper import add_language_content_panels\n'), ((1102, 1125), 'modeltranslation.decorators.register', 'register', (['CaseStudyPage'], {}), '(CaseStudyPage)\n', (1110, 1125), False, 'from modeltranslation.decorators import register\n'), ((1298, 1340), 'home.translation_helper.add_language_content_panels', 'add_language_content_panels', (['CaseStudyPage'], {}), '(CaseStudyPage)\n', (1325, 1340), False, 'from home.translation_helper import add_language_content_panels\n'), ((1344, 1365), 'modeltranslation.decorators.register', 'register', (['HistoryPage'], {}), '(HistoryPage)\n', (1352, 1365), False, 'from modeltranslation.decorators import register\n'), ((1532, 1572), 'home.translation_helper.add_language_content_panels', 'add_language_content_panels', (['HistoryPage'], {}), '(HistoryPage)\n', (1559, 1572), False, 'from home.translation_helper import add_language_content_panels\n'), ((1576, 1596), 'modeltranslation.decorators.register', 'register', (['PeoplePage'], {}), '(PeoplePage)\n', (1584, 1596), False, 'from modeltranslation.decorators import register\n'), ((1760, 1799), 'home.translation_helper.add_language_content_panels', 'add_language_content_panels', (['PeoplePage'], {}), '(PeoplePage)\n', (1787, 1799), False, 'from home.translation_helper import add_language_content_panels\n')] |
from __future__ import unicode_literals
import warnings
from django.contrib.sitemaps.views import x_robots_tag
from django.contrib.sites.shortcuts import get_current_site
from django.core.paginator import EmptyPage, PageNotAnInteger
from django.http import Http404
from django.template.response import TemplateResponse
import six
from .sitemaps import DisplayableSitemap
class DisplayableSitemap(DisplayableSitemap):
"""
Sitemap class for Django's sitemaps framework that returns
all published items for models that subclass ``Displayable``.
"""
@x_robots_tag
def sitemap(request, sitemaps, section=None,
template_name='sitemap_mobile.xml', content_type='application/xml',
mimetype=None):
if mimetype:
warnings.warn("The mimetype keyword argument is deprecated, use "
"content_type instead", DeprecationWarning, stacklevel=2)
content_type = mimetype
req_protocol = 'https' if request.is_secure() else 'http'
req_site = get_current_site(request)
if section is not None:
if section not in sitemaps:
raise Http404("No sitemap available for section: %r" % section)
maps = [sitemaps[section]]
else:
maps = list(six.itervalues(sitemaps))
page = request.GET.get("p", 1)
urls = []
for site in maps:
try:
if callable(site):
site = site()
urls.extend(site.get_urls(page=page, site=req_site,
protocol=req_protocol))
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page '%s'" % page)
return TemplateResponse(request, template_name, {'urlset': urls},
content_type=content_type)
| [
"django.template.response.TemplateResponse",
"six.itervalues",
"warnings.warn",
"django.http.Http404",
"django.contrib.sites.shortcuts.get_current_site"
] | [((1045, 1070), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', (['request'], {}), '(request)\n', (1061, 1070), False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((1820, 1910), 'django.template.response.TemplateResponse', 'TemplateResponse', (['request', 'template_name', "{'urlset': urls}"], {'content_type': 'content_type'}), "(request, template_name, {'urlset': urls}, content_type=\n content_type)\n", (1836, 1910), False, 'from django.template.response import TemplateResponse\n'), ((783, 912), 'warnings.warn', 'warnings.warn', (['"""The mimetype keyword argument is deprecated, use content_type instead"""', 'DeprecationWarning'], {'stacklevel': '(2)'}), "(\n 'The mimetype keyword argument is deprecated, use content_type instead',\n DeprecationWarning, stacklevel=2)\n", (796, 912), False, 'import warnings\n'), ((1166, 1223), 'django.http.Http404', 'Http404', (["('No sitemap available for section: %r' % section)"], {}), "('No sitemap available for section: %r' % section)\n", (1173, 1223), False, 'from django.http import Http404\n'), ((1301, 1325), 'six.itervalues', 'six.itervalues', (['sitemaps'], {}), '(sitemaps)\n', (1315, 1325), False, 'import six\n'), ((1683, 1714), 'django.http.Http404', 'Http404', (["('Page %s empty' % page)"], {}), "('Page %s empty' % page)\n", (1690, 1714), False, 'from django.http import Http404\n'), ((1774, 1804), 'django.http.Http404', 'Http404', (['("No page \'%s\'" % page)'], {}), '("No page \'%s\'" % page)\n', (1781, 1804), False, 'from django.http import Http404\n')] |
# bot.py
import logging
import time
import traceback
from typing import Callable, Dict
from urllib import parse
from flask import Flask, request
from db import *
from helper import *
from message import Message
# Flask App
app = Flask(__name__)
logging.basicConfig(filename='guide.log', level=logging.INFO)
def command_guide(message: Message):
title = message.text
found, guide = get_guide(message.group_id, title)
if found:
content, picture_url = guide
if picture_url:
send_message(message.group_id, content, picture_url=picture_url)
else:
send_message(message.group_id, content)
else:
msg = f"Could not find a guide titled: {title}"
if len(guide) > 0:
msg += "\nPerhaps you meant:\n\t" + \
"\n\t".join([g[0] for g in guide])
send_message(message.group_id, msg)
def command_create(message: Message):
group_id = message.group_id
title = message.read_next()
found, ignore = get_guide(group_id, title)
if found:
send_message(group_id, "The guide already exists")
else:
if "?latex " in message.text:
content, expr = message.text.split("?latex ", 1)
try:
picture_url = save_image(compile_latex(expr))
create_guide(group_id, title, content, picture_url)
send_message(group_id, picture_url=picture_url)
except:
send_message(group_id,
"Oops an error occured while parsing :(\n"
"If you want to parse a formula, then surround the "
"expression with $")
else:
create_guide(group_id, title, message.text, message.picture_url)
def command_delete(message: Message):
delete_guide(message.group_id, message.text)
def command_edit(message: Message):
group_id = message.group_id
title = message.read_next()
found, ignore = get_guide(group_id, title)
if not found:
send_message(group_id, "The guide does not exist")
else:
if "?latex " in message.text:
content, expr = message.text.split("?latex ", 1)
try:
picture_url = save_image(compile_latex(expr))
edit_guide(group_id, title, content, picture_url)
send_message(group_id, picture_url=picture_url)
except:
send_message(group_id,
"Oops an error occured while parsing :(\n"
"If you want to parse a formula, then surround the "
"expression with $")
else:
edit_guide(group_id, title, message.text, message.picture_url)
def command_search(message: Message):
guides = search_guide(message.group_id, message.text)
if len(guides) > 0:
msg = "Found the following guides:\n\t" + \
"\n\t".join([g[0] for g in guides])
else:
msg = "Could not find any guides :("
send_message(message.group_id, msg)
def command_latex(message: Message):
try:
latex = compile_latex(message.text)
image_url = save_image(latex)
send_message(message.group_id,
picture_url=image_url)
except:
print(traceback.format_exc(), flush=True)
send_message(message.group_id,
"Oops an error occured while parsing :(\n"
"If you want to parse a formula, then surround the "
"expression with $")
def command_google(message: Message):
text = message.text
query = "lmgtfy.app/?q=" + parse.quote_plus(text)
send_message(message.group_id, query)
def command_help(message: Message):
help_msg = """𝗚𝘂𝗶𝗱𝗲 bot created by JP
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
?𝗴𝘂𝗶𝗱𝗲 <name>
?𝗰𝗿𝗲𝗮𝘁𝗲 <name | "phrase"> <content>
?𝗲𝗱𝗶𝘁 <name | "phrase"> <content>
?𝗱𝗲𝗹𝗲𝘁𝗲 <name>
?𝘀𝗲𝗮𝗿𝗰𝗵 <name>
?𝗹𝗮𝘁𝗲𝘅 <latex formula>
▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄▄"""
send_message(message.group_id, help_msg)
commands: Dict[str, Callable] = {
'guide': command_guide,
'create': command_create,
'delete': command_delete,
'edit': command_edit,
'search': command_search,
'latex': command_latex,
'google': command_google,
'help': command_help,
}
@app.route("/", methods=['POST'])
def webhook():
try:
if request is None:
return "OK", 200
time.sleep(0.5)
message = Message(request)
if message.from_user():
if message.check_prefix(prefix):
command_name = message.read_next(check_quote=False)
if command_name in commands:
commands[command_name](message)
return "OK", 200
except:
print(sys.exc_info())
return "Error", 500
| [
"logging.basicConfig",
"traceback.format_exc",
"message.Message",
"flask.Flask",
"time.sleep",
"urllib.parse.quote_plus"
] | [((233, 248), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (238, 248), False, 'from flask import Flask, request\n'), ((250, 311), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""guide.log"""', 'level': 'logging.INFO'}), "(filename='guide.log', level=logging.INFO)\n", (269, 311), False, 'import logging\n'), ((3691, 3713), 'urllib.parse.quote_plus', 'parse.quote_plus', (['text'], {}), '(text)\n', (3707, 3713), False, 'from urllib import parse\n'), ((4450, 4465), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (4460, 4465), False, 'import time\n'), ((4484, 4500), 'message.Message', 'Message', (['request'], {}), '(request)\n', (4491, 4500), False, 'from message import Message\n'), ((3341, 3363), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3361, 3363), False, 'import traceback\n')] |
from PIL import Image
import PIL.ExifTags as ExifTags
def get_gps(fname):
# open img files
im = Image.open(fname)
# Get exif information in dictionary form
exif = {
ExifTags.TAGS[k]: v
for k, v in im._getexif().items()
if k in ExifTags.TAGS
}
# get gps info
gps_tags = exif["GPSInfo"]
gps = {
ExifTags.GPSTAGS.get(t, t): gps_tags[t]
for t in gps_tags
}
# Get latitude and longitude information
def conv_deg(v):
# Converting fractions to degrees
d = float(v[0])
m = float(v[1])
s = float(v[2])
return d + (m / 60.0) + (s / 3600.0)
lat = conv_deg(gps["GPSLatitude"])
lat_ref = gps["GPSLatitudeRef"]
if lat_ref != "N": lat = 0 - lat
lon = conv_deg(gps["GPSLongitude"])
lon_ref = gps["GPSLongitudeRef"]
if lon_ref != "E": lon = 0 - lon
return lat, lon
if __name__ == "__main__":
try:
lat, lon = get_gps("./img/IMG_6269.jpg")
print(lat, lon)
except:
print("No GPS info") | [
"PIL.Image.open",
"PIL.ExifTags.GPSTAGS.get"
] | [((105, 122), 'PIL.Image.open', 'Image.open', (['fname'], {}), '(fname)\n', (115, 122), False, 'from PIL import Image\n'), ((360, 386), 'PIL.ExifTags.GPSTAGS.get', 'ExifTags.GPSTAGS.get', (['t', 't'], {}), '(t, t)\n', (380, 386), True, 'import PIL.ExifTags as ExifTags\n')] |
# Generated by Django 3.2.5 on 2021-07-23 19:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('jogos', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('arbitragem', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='grupoarbitragem',
name='admins_grupo',
field=models.ManyToManyField(related_name='admins_grupo', to=settings.AUTH_USER_MODEL, verbose_name='Administradores do Grupo'),
),
migrations.AddField(
model_name='grupoarbitragem',
name='membros_grupo',
field=models.ManyToManyField(related_name='membros_grupo', to=settings.AUTH_USER_MODEL, verbose_name='Membros do Grupo'),
),
migrations.AddField(
model_name='grupoarbitragem',
name='requisicao_pendente',
field=models.ManyToManyField(related_name='requisicao_pendente', to=settings.AUTH_USER_MODEL, verbose_name='Usuários com Requisição Pendente'),
),
migrations.AddField(
model_name='escala',
name='arbitro',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL, verbose_name='Árbitro'),
),
migrations.AddField(
model_name='escala',
name='partida',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jogos.partida', verbose_name='Partida'),
),
migrations.AddField(
model_name='escala',
name='posicao',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='arbitragem.posicaoescala', verbose_name='Posição'),
),
]
| [
"django.db.migrations.swappable_dependency",
"django.db.models.ManyToManyField",
"django.db.models.ForeignKey"
] | [((282, 339), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (313, 339), False, 'from django.db import migrations, models\n'), ((529, 655), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""admins_grupo"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Administradores do Grupo"""'}), "(related_name='admins_grupo', to=settings.\n AUTH_USER_MODEL, verbose_name='Administradores do Grupo')\n", (551, 655), False, 'from django.db import migrations, models\n'), ((786, 905), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""membros_grupo"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Membros do Grupo"""'}), "(related_name='membros_grupo', to=settings.\n AUTH_USER_MODEL, verbose_name='Membros do Grupo')\n", (808, 905), False, 'from django.db import migrations, models\n'), ((1042, 1183), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""requisicao_pendente"""', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Usuários com Requisição Pendente"""'}), "(related_name='requisicao_pendente', to=settings.\n AUTH_USER_MODEL, verbose_name='Usuários com Requisição Pendente')\n", (1064, 1183), False, 'from django.db import migrations, models\n'), ((1299, 1419), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': 'settings.AUTH_USER_MODEL', 'verbose_name': '"""Árbitro"""'}), "(on_delete=django.db.models.deletion.PROTECT, to=settings.\n AUTH_USER_MODEL, verbose_name='Árbitro')\n", (1316, 1419), False, 'from django.db import migrations, models\n'), ((1535, 1646), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""jogos.partida"""', 'verbose_name': '"""Partida"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'jogos.partida', verbose_name='Partida')\n", (1552, 1646), False, 'from django.db import migrations, models\n'), ((1762, 1884), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""arbitragem.posicaoescala"""', 'verbose_name': '"""Posição"""'}), "(on_delete=django.db.models.deletion.PROTECT, to=\n 'arbitragem.posicaoescala', verbose_name='Posição')\n", (1779, 1884), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
"""
Created on Wed July 08 13:12:15 2020
@author: kunal
"""
from networkx.generators import line
from align.schema import graph, instance
from align.schema.graph import Graph
from collections import Counter
from itertools import combinations
from align.schema import SubCircuit, Instance
from .util import compare_two_nodes, reduced_SD_neighbors
from ..schema.types import set_context
from ..schema import constraint
import pprint
import logging
logger = logging.getLogger(__name__)
class process_arrays:
"""
Improves placement for circuits with arrays such as DAC, ADC, equalizer
Creates a hierarchy for repeated elements
"""
def __init__(self, ckt, match_pairs):
"""
Args:
ckt_data (dict): all subckt graph, names and port
design_setup (dict): information from setup file
library (list): list of library elements in dict format
existing_generator (list): list of names of existing generators
"""
self.dl = ckt.parent
self.ckt = ckt
self.graph = Graph(ckt)
self.stop_points = list()
self.condition = True
self.is_digital = False
for const in ckt.constraints:
if isinstance(const, constraint.PowerPorts) or\
isinstance(const, constraint.GroundPorts) or \
isinstance(const, constraint.ClockPorts):
self.stop_points.extend(const.ports)
elif isinstance(const, constraint.IdentifyArray):
self.condition = const.isTrue
elif isinstance(const, constraint.IsDigital):
self.is_digital = const.isTrue
self.match_pairs = {k: v for k, v in match_pairs.items() if len(v) > 1}
self.name = ckt.name
self.iconst = ckt.constraints
self.hier_sp = set()
self.align_block_const = dict()
self.new_hier_instances = dict()
self._filter_start_points_from_match_pairs()
self._check_array_start_points(self.stop_points)
def _filter_start_points_from_match_pairs(self):
for k, pair in self.match_pairs.items():
logger.debug(f"all pairs from {k}:{pair}")
if "start_point" in pair.keys():
if pair["start_point"] and isinstance(pair["start_point"][0], str):
# Check later for CTDTDSM
self.hier_sp.update(pair["start_point"])
del pair["start_point"]
logger.debug(f"New symmetrical start points {pair}")
logger.debug(f"updated match pairs: {pprint.pformat(self.match_pairs, indent=4)}")
def _check_array_start_points(self, traversed):
logger.debug(f"new hier start points: {self.hier_sp}")
for sp in sorted(self.hier_sp):
logger.debug(
f"Searching arrays from:{sp}\
traversed: {traversed} \
existing match pairs: {pprint.pformat(self.match_pairs, indent=4)}"
)
if sp not in self.graph.nodes():
logger.debug(f"{sp} not found in graph {self.graph.nodes()}")
continue
array = self.find_array(sp, traversed)
if array:
logger.debug(f"found array instances {array}")
logger.debug(f"updated match pairs: {pprint.pformat(self.match_pairs, indent=4)}")
def find_array(self, start_node: str, traversed: list):
"""
Creates array hierarchies starting from input node
Parameters
----------
node : str
node with high fanout.
traversed : list
DESCRIPTION.
"""
if not self.condition:
logger.info(f"auto-array generation set to false")
return
elif self.is_digital:
logger.info(f'cant identify array in digital ckt {self.name}')
return
node_hier = {}
lvl1 = list(set(self.graph.neighbors(start_node)) - set(traversed))
node_hier[start_node] = self.matching_groups(start_node, lvl1)
logger.debug(f"new hierarchy points {node_hier} from {start_node}")
if len(node_hier[start_node]) == 0:
return
for group in sorted(node_hier[start_node], key=lambda group: len(group)):
if len(group) > 0:
templates = {}
match_grps = {}
for el in sorted(group):
match_grps[el] = [el]
templates[start_node] = list()
visited = group + self.stop_points + [el] + [start_node]
array = match_grps.copy()
self.trace_template(match_grps, visited, templates[start_node], array)
logger.debug(f"similar groups final from {start_node}:{array}")
# converts results to a 2D/1D list
return self.process_results(start_node, array)
def process_results(self, start_node, array):
if not array:
logger.debug(f"no symmetry from {start_node}")
return
array_2D = list()
for inst_list in array.values():
array_2D.append([inst for inst in inst_list \
if self.ckt.get_element(inst)])
if len(array_2D[0])==1:
self.align_block_const[start_node] = [inst[0] for inst in array_2D]
return self.align_block_const[start_node]
else:
self.new_hier_instances[start_node] = array_2D
return array_2D
def matching_groups(self, node, lvl1: list):
similar_groups = list()
logger.debug(f"creating groups for all neighbors: {lvl1}")
# TODO: modify this best case complexity from n*(n-1) to n complexity
for l1_node1, l1_node2 in combinations(lvl1, 2):
if compare_two_nodes(self.graph, l1_node1, l1_node2) and \
self.graph.get_edge_data(node, l1_node1)['pin'] == \
self.graph.get_edge_data(node, l1_node2)['pin']:
found_flag = 0
logger.debug(f"similar groups {similar_groups}")
for index, sublist in enumerate(similar_groups):
if l1_node1 in sublist and l1_node2 in sublist:
found_flag = 1
break
if l1_node1 in sublist:
similar_groups[index].append(l1_node2)
found_flag = 1
break
elif l1_node2 in sublist:
similar_groups[index].append(l1_node1)
found_flag = 1
break
if found_flag == 0:
similar_groups.append([l1_node1, l1_node2])
return similar_groups
def trace_template(self, match_grps, visited, template, array):
next_match = {}
traversed = visited.copy()
logger.debug(f"tracing groups {match_grps} visited {visited}")
for source, groups in match_grps.items():
next_match[source] = list()
for node in groups:
nbrs = set(self.graph.neighbors(node)) - set(traversed)
lvl1 = [nbr for nbr in nbrs if reduced_SD_neighbors(self.graph, node, nbr)]
# logger.debug(f"lvl1 {lvl1} {set(self.graph.neighbors(node))} {traversed}")
next_match[source].extend(lvl1)
visited += lvl1
if not next_match[source]:
del next_match[source]
if next_match and self.match_branches(next_match):
for source in array.keys():
if source in next_match.keys():
array[source] += next_match[source]
template += next_match[list(next_match.keys())[0]]
logger.debug(f"found matching lvl {template}, {match_grps}")
if self.check_non_convergence(next_match):
self.trace_template(next_match, visited, template, array)
def match_branches(self, nodes_dict):
logger.debug(f"matching next lvl branches {nodes_dict} stop points: {self.stop_points}")
nbr_values = {}
for node, nbrs in nodes_dict.items():
super_list = list()
for nbr in nbrs:
if self.graph._is_element(self.graph.nodes[nbr]):
inst = self.graph.element(nbr)
# logger.debug(f"instance {inst}")
# super_list.append(inst.model)
super_list.append(inst.abstract_name)
else:
super_list.append("net")
logger.debug(f"all probable neighbors from {node} {super_list}")
nbr_values[node] = Counter(super_list)
logger.debug(f"all nbr properties {nbr_values}")
_, main = nbr_values.popitem()
for node, val in nbr_values.items():
if val == main:
continue
else:
return False
return True
def check_non_convergence(self, match: dict):
vals = list()
for val in match.values():
common_node = set(val).intersection(vals)
common_element = [node for node in common_node if self.graph._is_element(node)]
if common_element:
logger.debug(f"{common_element} already existing , ending further array search")
return False
else:
vals += val
logger.debug("not converging level")
return True
def add_align_block_const(self):
logger.debug(f"AlignBlock const: {self.align_block_const}")
for key, inst_list in self.align_block_const.items():
logger.debug(f"align instances: {inst_list}")
h_blocks = [inst for inst in inst_list \
if inst in self.graph]
if len(h_blocks) > 0:
with set_context(self.iconst):
self.iconst.append(constraint.Align(line="h_center", instances=h_blocks))
# del self.match_pairs[key]
logger.debug(f"AlignBlock const update {self.iconst}")
# hier_keys = [key for key, value in self.match_pairs.items() if "name" in value.keys()]
# for key in hier_keys:
# del self.match_pairs[key]
# return hier_keys
def add_new_array_hier(self):
logger.debug(f"New hierarchy instances: {self.new_hier_instances}")
sub_hier_elements = set()
for key, array_2D in self.new_hier_instances.items():
logger.debug(f"new hier instances: {array_2D}")
all_inst = [inst for template in array_2D for inst in template
if inst in self.graph and inst not in sub_hier_elements]
#Filter repeated elements across array of obejcts
repeated_elements = set([inst for inst, count in Counter(all_inst).items() if count>1])
all_inst = set(all_inst) - repeated_elements
array_2D = [list(set(array_1D) - repeated_elements) for array_1D in array_2D]
sub_hier_elements.update(all_inst)
if len(all_inst) <=1:
logger.debug(f"not enough elements to create a hierarchy")
continue
new_array_hier_name = "ARRAY_HIER_" + key
create_new_hiearchy(self.dl, self.name, new_array_hier_name, all_inst)
all_template_names = list()
for template in array_2D:
template_name = self.get_new_subckt_name("ARRAY_TEMPLATE")
create_new_hiearchy(self.dl, new_array_hier_name, template_name, template)
all_template_names.append(template_name)
self.add_array_placement_constraints(new_array_hier_name, all_template_names)
def add_array_placement_constraints(self, hier, modules):
#TODO make it sizing aware
# array placement constraint
arre_hier_const = self.dl.find(hier).constraints
with set_context(arre_hier_const):
instances = ['X_'+module for module in modules]
arre_hier_const.append(constraint.Align(line="h_center", instances=instances))
arre_hier_const.append(constraint.SameTemplate(instances=instances))
# template placement constraint
# for template in modules:
# template_module = self.dl.find(template)
# all_inst = [inst.name for inst in template_module.elements]
# with set_context(template_module.constraints):
# template_module.constraints.append(constraint.Align(line="v_center", instances=all_inst))
def get_new_subckt_name(self, name):
count =1
new_name = name
while self.ckt.parent.find(new_name):
new_name = name + str(count)
count +=1
return new_name
def create_new_hiearchy(dl, parent_name, child_name, elements, pins_map=None):
parent = dl.find(parent_name)
# Create a subckt and add to library
logger.info(f"adding new array hierarchy {child_name} elements {elements}")
if not pins_map:
pins_map = {}
G = Graph(parent)
logger.debug(f"{parent.elements}")
for ele in elements:
if parent.get_element(ele):
pins_map.update({net:net for net in G.neighbors(ele)})
logger.debug(f"pins {pins_map} {elements} {parent.pins}")
pins_map = { net:net for net in pins_map.keys()
if net in parent.pins or
(set(G.neighbors(net))-set(elements))
}
if not pins_map:
logger.error(f"can't create module with no pins")
return
logger.debug(f"new subckt pins : {pins_map}")
assert not dl.find(child_name), f"subcircuit {child_name} already existing"
with set_context(dl):
logger.debug(pins_map.keys)
child = SubCircuit(name=child_name, pins=list(pins_map.keys()))
dl.append(child)
# Add all instances of groupblock to new subckt
pes = list()
with set_context(child.elements):
for ele in elements:
pe = parent.get_element(ele)
if pe:
pes.append(pe)
child.elements.append(pe)
# Transfer global constraints
with set_context(child.constraints):
for const in list(parent.constraints):
if any(
isinstance(const, x)
for x in [
constraint.HorizontalDistance,
constraint.VerticalDistance,
constraint.BlockDistance,
constraint.CompactPlacement,
]
):
child.constraints.append(const)
# Remove elements from subckt then add new_subckt instance
inst_name = "X_"+child_name
with set_context(parent.elements):
for pe in pes:
parent.elements.remove(pe)
X1 = Instance(
name=inst_name,
model=child_name,
pins=pins_map,
generator=child_name,
abstract_name = child_name
)
parent.elements.append(X1) | [
"logging.getLogger",
"align.schema.graph.Graph",
"pprint.pformat",
"collections.Counter",
"itertools.combinations",
"align.schema.Instance"
] | [((483, 510), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (500, 510), False, 'import logging\n'), ((1094, 1104), 'align.schema.graph.Graph', 'Graph', (['ckt'], {}), '(ckt)\n', (1099, 1104), False, 'from align.schema.graph import Graph\n'), ((5761, 5782), 'itertools.combinations', 'combinations', (['lvl1', '(2)'], {}), '(lvl1, 2)\n', (5773, 5782), False, 'from itertools import combinations\n'), ((13080, 13093), 'align.schema.graph.Graph', 'Graph', (['parent'], {}), '(parent)\n', (13085, 13093), False, 'from align.schema.graph import Graph\n'), ((14844, 14954), 'align.schema.Instance', 'Instance', ([], {'name': 'inst_name', 'model': 'child_name', 'pins': 'pins_map', 'generator': 'child_name', 'abstract_name': 'child_name'}), '(name=inst_name, model=child_name, pins=pins_map, generator=\n child_name, abstract_name=child_name)\n', (14852, 14954), False, 'from align.schema import SubCircuit, Instance\n'), ((8705, 8724), 'collections.Counter', 'Counter', (['super_list'], {}), '(super_list)\n', (8712, 8724), False, 'from collections import Counter\n'), ((2601, 2643), 'pprint.pformat', 'pprint.pformat', (['self.match_pairs'], {'indent': '(4)'}), '(self.match_pairs, indent=4)\n', (2615, 2643), False, 'import pprint\n'), ((3343, 3385), 'pprint.pformat', 'pprint.pformat', (['self.match_pairs'], {'indent': '(4)'}), '(self.match_pairs, indent=4)\n', (3357, 3385), False, 'import pprint\n'), ((2972, 3014), 'pprint.pformat', 'pprint.pformat', (['self.match_pairs'], {'indent': '(4)'}), '(self.match_pairs, indent=4)\n', (2986, 3014), False, 'import pprint\n'), ((10838, 10855), 'collections.Counter', 'Counter', (['all_inst'], {}), '(all_inst)\n', (10845, 10855), False, 'from collections import Counter\n')] |
"""
Problem:
1.4 Palindrome Permutation: Given a texting, write a function to check if it is a permutation of a palin
drome. A palindrome is a word or phrase that is the same forwards and backwards. A permutation
is a rearrangement of letters. The palindrome does not need to be limited to just dictionary words.
EXAMPLE
Input: <NAME>
Output: True (permutations: "taco cat", "atco eta", etc.)
Hints: #106, #121, #134, #136
--
Questions:
- Do we ignore case sensitive letters?
A: I think we should convert to lowercase.
- Should spaces be ignored?
A: I would remove all of them
- Is an empty texting a valid palindrome?
A: Yes
--
Algorithm:
Examples:
'': True
'a': True
'ab': False
'bb': True
'bba': True
'Tact Coa': True
'aaabbb' = 'ababab': False
We can have only one letter with an odd count. So, we can check for it and
if we find more than one, we can return False.
"""
from collections import Counter
def palindrome_permutation(text):
if text == "":
return True
text = text.lower().replace(" ", "")
letter_count = Counter(text)
odd_count_letters = 0
for key in letter_count:
if letter_count[key] % 2 != 0:
odd_count_letters += 1
if odd_count_letters > 1:
return False
return True
def test(text, expected_answer):
answer = palindrome_permutation(text)
if answer != expected_answer:
raise Exception(
f"Answer {answer} is wrong. Expected answer is {expected_answer}"
)
if __name__ == "__main__":
test("", True)
test("a", True)
test("ab", False)
test("bb", True)
test("bba", True)
test("Tact Coa", True)
test("aaabbb", False)
print("All tests passed!")
| [
"collections.Counter"
] | [((1057, 1070), 'collections.Counter', 'Counter', (['text'], {}), '(text)\n', (1064, 1070), False, 'from collections import Counter\n')] |
# -----------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# -----------------------------------------------------------------------------
import configparser
import unittest
from unittest import mock
from azdev.operations.style import _config_file_path
class TestConfigFilePath(unittest.TestCase):
def test_unsupported_code_style_checker(self):
with self.assertRaises(ValueError):
_config_file_path(style_type="unknown")
def test_pylint_config_without_setup(self):
mocked_config = configparser.ConfigParser()
mocked_config.add_section("cli")
mocked_config.set("cli", "repo_path", "")
mocked_config.add_section("ext")
mocked_config.set("ext", "repo_paths", "")
with mock.patch("azdev.operations.style.get_azdev_config", return_value=mocked_config):
r = _config_file_path(style_type="pylint")
self.assertTrue(r[0].endswith("/config_files/cli_pylintrc"))
self.assertTrue(r[1].endswith("/config_files/ext_pylintrc"))
def test_pylint_config_with_partially_setup(self):
cli_repo_path = "~/Azure/azure-cli"
mocked_config = configparser.ConfigParser()
mocked_config.add_section("cli")
mocked_config.set("cli", "repo_path", cli_repo_path)
mocked_config.add_section("ext")
mocked_config.set("ext", "repo_paths", "")
with mock.patch("azdev.operations.style.get_azdev_config", return_value=mocked_config):
r = _config_file_path(style_type="pylint")
self.assertEqual(r[0], cli_repo_path + "/pylintrc")
self.assertTrue(r[1].endswith("/config_files/ext_pylintrc"))
def test_pylint_config_with_all_setup(self):
cli_repo_path = "~/Azure/azure-cli"
ext_repo_path = "~/Azure/azure-cli-extensions"
mocked_config = configparser.ConfigParser()
mocked_config.add_section("cli")
mocked_config.set("cli", "repo_path", cli_repo_path)
mocked_config.add_section("ext")
mocked_config.set("ext", "repo_paths", ext_repo_path)
with mock.patch("azdev.operations.style.get_azdev_config", return_value=mocked_config):
r = _config_file_path()
self.assertEqual(r[0], cli_repo_path + "/pylintrc")
self.assertTrue(r[1], "/pylintrc")
def test_flake8_config_wihtout_setup(self):
mocked_config = configparser.ConfigParser()
mocked_config.add_section("cli")
mocked_config.set("cli", "repo_path", "")
mocked_config.add_section("ext")
mocked_config.set("ext", "repo_paths", "")
with mock.patch("azdev.operations.style.get_azdev_config", return_value=mocked_config):
r = _config_file_path(style_type="flake8")
self.assertTrue(r[0].endswith("/config_files/cli.flake8"))
self.assertTrue(r[1].endswith("/config_files/ext.flake8"))
def test_flake8_config_with_partially_setup(self):
ext_repo_path = "~/Azure/azure-cli-extensions"
mocked_config = configparser.ConfigParser()
mocked_config.add_section("cli")
mocked_config.set("cli", "repo_path", "")
mocked_config.add_section("ext")
mocked_config.set("ext", "repo_paths", ext_repo_path)
with mock.patch("azdev.operations.style.get_azdev_config", return_value=mocked_config):
r = _config_file_path(style_type="flake8")
self.assertTrue(r[0].endswith("/config_files/cli.flake8"))
self.assertTrue(r[1].endswith(ext_repo_path + "/.flake8"))
def test_flake9_config_with_all_setup(self):
cli_repo_path = "~/Azure/azure-cli"
ext_repo_path = "~/Azure/azure-cli-extensions"
mocked_config = configparser.ConfigParser()
mocked_config.add_section("cli")
mocked_config.set("cli", "repo_path", cli_repo_path)
mocked_config.add_section("ext")
mocked_config.set("ext", "repo_paths", ext_repo_path)
with mock.patch("azdev.operations.style.get_azdev_config", return_value=mocked_config):
r = _config_file_path(style_type="flake8")
self.assertTrue(r[0].endswith(cli_repo_path + "/.flake8"))
self.assertTrue(r[1].endswith(ext_repo_path + "/.flake8"))
| [
"unittest.mock.patch",
"configparser.ConfigParser",
"azdev.operations.style._config_file_path"
] | [((701, 728), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (726, 728), False, 'import configparser\n'), ((1334, 1361), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1359, 1361), False, 'import configparser\n'), ((2018, 2045), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (2043, 2045), False, 'import configparser\n'), ((2568, 2595), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (2593, 2595), False, 'import configparser\n'), ((3209, 3236), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (3234, 3236), False, 'import configparser\n'), ((3899, 3926), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (3924, 3926), False, 'import configparser\n'), ((588, 627), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {'style_type': '"""unknown"""'}), "(style_type='unknown')\n", (605, 627), False, 'from azdev.operations.style import _config_file_path\n'), ((926, 1012), 'unittest.mock.patch', 'mock.patch', (['"""azdev.operations.style.get_azdev_config"""'], {'return_value': 'mocked_config'}), "('azdev.operations.style.get_azdev_config', return_value=\n mocked_config)\n", (936, 1012), False, 'from unittest import mock\n'), ((1025, 1063), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {'style_type': '"""pylint"""'}), "(style_type='pylint')\n", (1042, 1063), False, 'from azdev.operations.style import _config_file_path\n'), ((1570, 1656), 'unittest.mock.patch', 'mock.patch', (['"""azdev.operations.style.get_azdev_config"""'], {'return_value': 'mocked_config'}), "('azdev.operations.style.get_azdev_config', return_value=\n mocked_config)\n", (1580, 1656), False, 'from unittest import mock\n'), ((1669, 1707), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {'style_type': '"""pylint"""'}), "(style_type='pylint')\n", (1686, 1707), False, 'from azdev.operations.style import _config_file_path\n'), ((2265, 2351), 'unittest.mock.patch', 'mock.patch', (['"""azdev.operations.style.get_azdev_config"""'], {'return_value': 'mocked_config'}), "('azdev.operations.style.get_azdev_config', return_value=\n mocked_config)\n", (2275, 2351), False, 'from unittest import mock\n'), ((2364, 2383), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {}), '()\n', (2381, 2383), False, 'from azdev.operations.style import _config_file_path\n'), ((2793, 2879), 'unittest.mock.patch', 'mock.patch', (['"""azdev.operations.style.get_azdev_config"""'], {'return_value': 'mocked_config'}), "('azdev.operations.style.get_azdev_config', return_value=\n mocked_config)\n", (2803, 2879), False, 'from unittest import mock\n'), ((2892, 2930), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {'style_type': '"""flake8"""'}), "(style_type='flake8')\n", (2909, 2930), False, 'from azdev.operations.style import _config_file_path\n'), ((3445, 3531), 'unittest.mock.patch', 'mock.patch', (['"""azdev.operations.style.get_azdev_config"""'], {'return_value': 'mocked_config'}), "('azdev.operations.style.get_azdev_config', return_value=\n mocked_config)\n", (3455, 3531), False, 'from unittest import mock\n'), ((3544, 3582), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {'style_type': '"""flake8"""'}), "(style_type='flake8')\n", (3561, 3582), False, 'from azdev.operations.style import _config_file_path\n'), ((4146, 4232), 'unittest.mock.patch', 'mock.patch', (['"""azdev.operations.style.get_azdev_config"""'], {'return_value': 'mocked_config'}), "('azdev.operations.style.get_azdev_config', return_value=\n mocked_config)\n", (4156, 4232), False, 'from unittest import mock\n'), ((4245, 4283), 'azdev.operations.style._config_file_path', '_config_file_path', ([], {'style_type': '"""flake8"""'}), "(style_type='flake8')\n", (4262, 4283), False, 'from azdev.operations.style import _config_file_path\n')] |
import tensorflow as tf
import tensorflow_probability as tfp
class HSCIC() :
def __init__(self,
amplitude = 1.0,
length_scale = 0.1,
regularization = 0.01,
**kwargs):
# kernel model
self.kernel = tfp.math.psd_kernels.ExponentiatedQuadratic(
amplitude = amplitude,
length_scale = length_scale
)
self.regularization = regularization
# get loss givne a single instance z
def __call__(self,
Y : tf.Tensor,
A : tf.Tensor,
X : tf.Tensor,
H : tf.Tensor):
# reshape arrays
if tf.rank(A) == 1 : A = tf.reshape(A, [tf.shape(A)[0], 1])
if tf.rank(X) == 1 : X = tf.reshape(X, [tf.shape(X)[0], 1])
if tf.rank(Y) == 1 : Y = tf.reshape(Y, [tf.shape(Y)[0], 1])
if tf.rank(H) == 1 : H = tf.reshape(H, [tf.shape(H)[0], 1])
A = tf.cast(A, dtype='float32')
X = tf.cast(X, dtype='float32')
Y = tf.cast(Y, dtype='float32')
H = tf.cast(H, dtype='float32')
X = tf.concat([X, H], axis = 1)
# get Kernel matrices
gram_A = self.kernel.matrix(A, A)
gram_X = self.kernel.matrix(X, X)
gram_Y = self.kernel.matrix(Y, Y)
gram_A = tf.cast(gram_A, dtype='float32')
gram_X = tf.cast(gram_X, dtype='float32')
gram_Y = tf.cast(gram_Y, dtype='float32')
# get HSCIC loss
res = tf.map_fn(fn=lambda X: self.inner_loss(X, gram_A, gram_X, gram_Y),
elems=gram_X)
res = tf.math.reduce_mean(res)
return res
# get loss givne a single instance z
def inner_loss(self, X, gram_A, gram_X, gram_Y) :
# get number of samples and make matrix W
n_samples = tf.cast(tf.shape(gram_Y)[0], dtype = 'float32')
identity = tf.eye(n_samples, dtype = 'float32')
W = gram_X + n_samples * self.regularization * identity
# solve linear system
if tf.rank(X) == 1: X = tf.reshape(X, [tf.shape(X)[0], 1])
f = tf.linalg.solve(tf.transpose(W), X)
fT = tf.transpose(f)
# get distributions
res = tf.einsum('ij,jk,kl', fT, gram_A * gram_Y, f)
M = tf.einsum('ij,jk', gram_A, f)
N = tf.einsum('ij,jk', gram_Y, f)
res = res - 2 * tf.einsum('ij,jk', fT, M * N)
P = tf.einsum('ij,jk,kl', fT, gram_A, f)
Q = tf.einsum('ij,jk,kl', fT, gram_Y, f)
res = res + P * Q
return res | [
"tensorflow.eye",
"tensorflow.shape",
"tensorflow.transpose",
"tensorflow.rank",
"tensorflow.einsum",
"tensorflow.concat",
"tensorflow.math.reduce_mean",
"tensorflow_probability.math.psd_kernels.ExponentiatedQuadratic",
"tensorflow.cast"
] | [((287, 382), 'tensorflow_probability.math.psd_kernels.ExponentiatedQuadratic', 'tfp.math.psd_kernels.ExponentiatedQuadratic', ([], {'amplitude': 'amplitude', 'length_scale': 'length_scale'}), '(amplitude=amplitude,\n length_scale=length_scale)\n', (330, 382), True, 'import tensorflow_probability as tfp\n'), ((966, 993), 'tensorflow.cast', 'tf.cast', (['A'], {'dtype': '"""float32"""'}), "(A, dtype='float32')\n", (973, 993), True, 'import tensorflow as tf\n'), ((1006, 1033), 'tensorflow.cast', 'tf.cast', (['X'], {'dtype': '"""float32"""'}), "(X, dtype='float32')\n", (1013, 1033), True, 'import tensorflow as tf\n'), ((1046, 1073), 'tensorflow.cast', 'tf.cast', (['Y'], {'dtype': '"""float32"""'}), "(Y, dtype='float32')\n", (1053, 1073), True, 'import tensorflow as tf\n'), ((1086, 1113), 'tensorflow.cast', 'tf.cast', (['H'], {'dtype': '"""float32"""'}), "(H, dtype='float32')\n", (1093, 1113), True, 'import tensorflow as tf\n'), ((1127, 1152), 'tensorflow.concat', 'tf.concat', (['[X, H]'], {'axis': '(1)'}), '([X, H], axis=1)\n', (1136, 1152), True, 'import tensorflow as tf\n'), ((1332, 1364), 'tensorflow.cast', 'tf.cast', (['gram_A'], {'dtype': '"""float32"""'}), "(gram_A, dtype='float32')\n", (1339, 1364), True, 'import tensorflow as tf\n'), ((1382, 1414), 'tensorflow.cast', 'tf.cast', (['gram_X'], {'dtype': '"""float32"""'}), "(gram_X, dtype='float32')\n", (1389, 1414), True, 'import tensorflow as tf\n'), ((1432, 1464), 'tensorflow.cast', 'tf.cast', (['gram_Y'], {'dtype': '"""float32"""'}), "(gram_Y, dtype='float32')\n", (1439, 1464), True, 'import tensorflow as tf\n'), ((1624, 1648), 'tensorflow.math.reduce_mean', 'tf.math.reduce_mean', (['res'], {}), '(res)\n', (1643, 1648), True, 'import tensorflow as tf\n'), ((1904, 1938), 'tensorflow.eye', 'tf.eye', (['n_samples'], {'dtype': '"""float32"""'}), "(n_samples, dtype='float32')\n", (1910, 1938), True, 'import tensorflow as tf\n'), ((2164, 2179), 'tensorflow.transpose', 'tf.transpose', (['f'], {}), '(f)\n', (2176, 2179), True, 'import tensorflow as tf\n'), ((2223, 2268), 'tensorflow.einsum', 'tf.einsum', (['"""ij,jk,kl"""', 'fT', '(gram_A * gram_Y)', 'f'], {}), "('ij,jk,kl', fT, gram_A * gram_Y, f)\n", (2232, 2268), True, 'import tensorflow as tf\n'), ((2281, 2310), 'tensorflow.einsum', 'tf.einsum', (['"""ij,jk"""', 'gram_A', 'f'], {}), "('ij,jk', gram_A, f)\n", (2290, 2310), True, 'import tensorflow as tf\n'), ((2323, 2352), 'tensorflow.einsum', 'tf.einsum', (['"""ij,jk"""', 'gram_Y', 'f'], {}), "('ij,jk', gram_Y, f)\n", (2332, 2352), True, 'import tensorflow as tf\n'), ((2419, 2455), 'tensorflow.einsum', 'tf.einsum', (['"""ij,jk,kl"""', 'fT', 'gram_A', 'f'], {}), "('ij,jk,kl', fT, gram_A, f)\n", (2428, 2455), True, 'import tensorflow as tf\n'), ((2468, 2504), 'tensorflow.einsum', 'tf.einsum', (['"""ij,jk,kl"""', 'fT', 'gram_Y', 'f'], {}), "('ij,jk,kl', fT, gram_Y, f)\n", (2477, 2504), True, 'import tensorflow as tf\n'), ((693, 703), 'tensorflow.rank', 'tf.rank', (['A'], {}), '(A)\n', (700, 703), True, 'import tensorflow as tf\n'), ((761, 771), 'tensorflow.rank', 'tf.rank', (['X'], {}), '(X)\n', (768, 771), True, 'import tensorflow as tf\n'), ((829, 839), 'tensorflow.rank', 'tf.rank', (['Y'], {}), '(Y)\n', (836, 839), True, 'import tensorflow as tf\n'), ((897, 907), 'tensorflow.rank', 'tf.rank', (['H'], {}), '(H)\n', (904, 907), True, 'import tensorflow as tf\n'), ((2047, 2057), 'tensorflow.rank', 'tf.rank', (['X'], {}), '(X)\n', (2054, 2057), True, 'import tensorflow as tf\n'), ((2131, 2146), 'tensorflow.transpose', 'tf.transpose', (['W'], {}), '(W)\n', (2143, 2146), True, 'import tensorflow as tf\n'), ((1844, 1860), 'tensorflow.shape', 'tf.shape', (['gram_Y'], {}), '(gram_Y)\n', (1852, 1860), True, 'import tensorflow as tf\n'), ((2377, 2406), 'tensorflow.einsum', 'tf.einsum', (['"""ij,jk"""', 'fT', '(M * N)'], {}), "('ij,jk', fT, M * N)\n", (2386, 2406), True, 'import tensorflow as tf\n'), ((730, 741), 'tensorflow.shape', 'tf.shape', (['A'], {}), '(A)\n', (738, 741), True, 'import tensorflow as tf\n'), ((798, 809), 'tensorflow.shape', 'tf.shape', (['X'], {}), '(X)\n', (806, 809), True, 'import tensorflow as tf\n'), ((866, 877), 'tensorflow.shape', 'tf.shape', (['Y'], {}), '(Y)\n', (874, 877), True, 'import tensorflow as tf\n'), ((934, 945), 'tensorflow.shape', 'tf.shape', (['H'], {}), '(H)\n', (942, 945), True, 'import tensorflow as tf\n'), ((2083, 2094), 'tensorflow.shape', 'tf.shape', (['X'], {}), '(X)\n', (2091, 2094), True, 'import tensorflow as tf\n')] |
import json
import sys
from direct.showbase.ShowBase import ShowBase
import panda3d.core as p3d
import cefpanda
p3d.load_prc_file_data('', 'win-size 1280 720')
class Game(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.accept('escape', sys.exit)
# Setup ui
self.ui = cefpanda.CEFPanda()
self.ui.set_js_function('update_options', self.update_options)
self.ui.load_file('ui/main.html')
options = json.dumps(self.get_options())
self.ui.exec_js_func('ui_update_options', options)
def get_options(self):
winprops = self.win.get_properties()
disp_info = self.pipe.get_display_information()
options = {
'selected_resolution': '{} x {}'.format(winprops.get_x_size(), winprops.get_y_size()),
'resolutions': sorted(list({
'{} x {}'.format(
disp_info.get_display_mode_width(i),
disp_info.get_display_mode_height(i)
)
for i in range(disp_info.get_total_display_modes())
}), key=lambda x: -int(x.split(' x ')[1])),
'fullscreen': winprops.get_fullscreen(),
}
return options
def update_options(self, options):
winprops = p3d.WindowProperties()
resx, resy = [int(i) for i in options['selected_resolution'].split(' x ')]
winprops.set_size(resx, resy)
winprops.set_fullscreen(options['fullscreen'])
self.win.request_properties(winprops)
if __name__ == '__main__':
APP = Game()
APP.run()
| [
"direct.showbase.ShowBase.ShowBase.__init__",
"panda3d.core.load_prc_file_data",
"cefpanda.CEFPanda",
"panda3d.core.WindowProperties"
] | [((115, 162), 'panda3d.core.load_prc_file_data', 'p3d.load_prc_file_data', (['""""""', '"""win-size 1280 720"""'], {}), "('', 'win-size 1280 720')\n", (137, 162), True, 'import panda3d.core as p3d\n'), ((219, 242), 'direct.showbase.ShowBase.ShowBase.__init__', 'ShowBase.__init__', (['self'], {}), '(self)\n', (236, 242), False, 'from direct.showbase.ShowBase import ShowBase\n'), ((322, 341), 'cefpanda.CEFPanda', 'cefpanda.CEFPanda', ([], {}), '()\n', (339, 341), False, 'import cefpanda\n'), ((1289, 1311), 'panda3d.core.WindowProperties', 'p3d.WindowProperties', ([], {}), '()\n', (1309, 1311), True, 'import panda3d.core as p3d\n')] |
from rdflib.graph import Graph, ConjunctiveGraph
from rdflib import Literal
from rdflib import RDF
from rdflib import RDFS
from rdflib import URIRef
from rdflib import Namespace
from rdflib import plugin
from rdflib.store import Store
from rdflib.term import BNode
import rdflib
import delphin.mrs
import delphin.variable
import delphin.predicate
# some useful namespaces
MRS = Namespace("http://www.delph-in.net/schema/mrs#")
ERG = Namespace("http://www.delph-in.net/schema/erg#")
DELPH = Namespace("http://www.delph-in.net/schema/")
POS = Namespace("http://www.delph-in.net/schema/pos#")
def mrs_to_rdf(m:delphin.mrs._mrs.MRS,
MRSI:rdflib.term.URIRef,
defaultGraph:rdflib.graph.ConjunctiveGraph=None) -> rdflib.graph.ConjunctiveGraph:
"""
Takes a PyDelphin MRS object "m" and serializes it into a named RDF graph inside a store.
Args:
m: a PyDelphin MRS instance to be converted into RDF format
MRSI: URI of the MRS instance being converted
defaultGraph : the conjunctive graph representing the profile graph. If not given, creates one.
Inplace function that alters the conjunctive graph with the serialized MRS and return the conjunctive graph as well.
In case of using it without giving the graph, it creates one and returns it.
"""
# Making the arguments behave well:
if defaultGraph is None:
defaultGraph = ConjunctiveGraph()
# MRS graph:
mrsGraph = Graph(store=defaultGraph.store, identifier=MRSI)
mrsGraph.add((MRSI, RDF.type, MRS.mrs))
# Creating the prefix of the MRS elements and relevant namespaces
insprefix = Namespace(MRSI + '#')
VARS = Namespace(insprefix + "variable-")
RELS = Namespace(insprefix + "EP-")
PREDS = Namespace(insprefix + "predicate-")
SORTINFO = Namespace(insprefix + "sortinfo-")
HCONS = Namespace(insprefix + "hcons-")
ICONS = Namespace(insprefix + "icons-")
# Adding top and index
mrsGraph.add((MRSI, RDF.type, MRS.mrs))
mrsGraph.add((MRSI, DELPH['hasTop'], VARS[m.top]))
mrsGraph.add((MRSI, DELPH['hasIndex'], VARS[m.index]))
# ALTERNATIVE: (BNode, DELPH['hasTop'], VARS[m.top])
# Populating the graphs
_vars_to_rdf(m, mrsGraph, VARS, SORTINFO)
_rels_to_rdf(m, mrsGraph, defaultGraph, MRSI, RELS, PREDS, VARS)
_hcons_to_rdf(m, mrsGraph, defaultGraph, MRSI, HCONS, VARS)
_icons_to_rdf(m, mrsGraph, defaultGraph, MRSI, ICONS, VARS)
return defaultGraph
def _vars_to_rdf(m, mrsGraph, VARS, SORTINFO):
"""
Converts the variables of a MRS to the RDF graph
Args:
m: a delphin mrs instance to be converted into RDF format
mrsGraph: a rdflib Graph where the MRS triples will be put.
VARS: the URI namespace dedicated to variables.
SORTINFO: the URI namespace dedicated to the sortinfo (morphosemantic information).
"""
for v in m.variables.items():
if delphin.variable.is_valid(v[0]):
# typing variables
if delphin.variable.type(v[0]) != 'h':
mrsGraph.add((VARS[v[0]], RDF.type, DELPH[delphin.variable.type(v[0])]))
else :
mrsGraph.add((VARS[v[0]], RDF.type, MRS['h']))
# variable properties:
if v[1] != {}:
mrsGraph.add((SORTINFO[v[0]], RDF.type, DELPH.SortInfo))
mrsGraph.add((VARS[v[0]], DELPH.hasSortInfo, SORTINFO[v[0]]))
for props in v[1].items():
mrsGraph.add((SORTINFO[v[0]], ERG[props[0].lower()], Literal(props[1])))
# it wouldn't be harmful to reassure that the property is defined in ERG, but it'll be like that for now.
else: # very rare event, should it be removed?
print("Invalid variable name")
def _rels_to_rdf(m, mrsGraph, defaultGraph, MRSI, RELS, PREDS, VARS):
"""
Converts the EPs of a MRS to the graph
Args:
m: a delphin mrs instance to be converted into RDF format
mrsGraph: a rdflib Graph where the MRS triples will be put.
defaultGraph: the conjunctive graph of the profile
MRSI: the node of the MRS instance being converted
RELS: the URI namespace dedicated to EPs
PREDS: the URI namespace dedicated to predicates
VARS: the URI namespace dedicated to variables
"""
for rel in range(len(m.rels)):
mrs_rel = m.rels[rel]
EPNode = RELS[f"{rel}"] #maybe label EPs in a different manner is better because they aren't ordered.
predNode = PREDS[f"{rel}"]
mrsGraph.add((MRSI, MRS.hasEP, EPNode))
mrsGraph.add((EPNode, RDF.type, MRS.ElementaryPredication))
mrsGraph.add((EPNode, MRS.hasLabel, VARS[mrs_rel.label]))
# graph.add((rdf_rel, MRS.var, VARS[mrs_rel.iv])) #not needed because ARG0 is already being included at the end of function
splittedPredicate = delphin.predicate.split(delphin.predicate.normalize(mrs_rel.predicate))
if delphin.predicate.is_surface(mrs_rel.predicate):
mrsGraph.add((predNode, RDF.type, DELPH.SurfacePredicate))
elif delphin.predicate.is_abstract(mrs_rel.predicate):
mrsGraph.add((predNode, RDF.type, DELPH.AbstractPredicate))
else: #not(delphin.predicate.is_valid(mrs_rel.predicate))
print("{} is an invalid predicate.".format(mrs_rel.predicate)) #revise; maybe something stronger.
mrsGraph.add((predNode, RDF.type, DELPH.Predicate)) #revise
mrsGraph.add((EPNode, DELPH.hasPredicate, predNode))
mrsGraph.add((predNode, DELPH.predText, Literal(delphin.predicate.normalize(mrs_rel.predicate))))
mrsGraph.add((EPNode, RDFS.label, Literal(f"{delphin.predicate.normalize(mrs_rel.predicate)}<{mrs_rel.cfrom},{mrs_rel.cto}>")))
if splittedPredicate[0] is not None: #here, lemma = name by now.
mrsGraph.add((predNode, DELPH.hasLemma, Literal(splittedPredicate[0])))
if splittedPredicate[1] is not None:
mrsGraph.add((predNode, DELPH.hasPos, POS[splittedPredicate[1]]))
if splittedPredicate[2] is not None:
mrsGraph.add((predNode, DELPH.hasSense, Literal(splittedPredicate[2])))
#lnk:
if mrs_rel.cfrom is not None:
mrsGraph.add((EPNode, DELPH.cfrom, Literal(mrs_rel.cfrom))) #integer
if mrs_rel.cto is not None:
mrsGraph.add((EPNode, DELPH.cto, Literal(mrs_rel.cto))) #integer
# parse arguments
for hole, arg in mrs_rel.args.items():
# mrs variables as arguments
if hole.lower() != "carg" :
mrsGraph.add((EPNode, MRS[hole.lower()], VARS[arg]))
else :
mrsGraph.add((EPNode, DELPH.carg, Literal(arg)))
def _hcons_to_rdf(m, mrsGraph, defaultGraph, MRSI, HCONS, VARS):
"""
Describes handle constraints "HCONS" in an MRS-RDF format
Args:
m: a delphin mrs instance to be converted into RDF format
mrsGraph: a rdflib Graph where the MRS triples will be put.
defaultGraph: the conjunctive graph of the profile
MRSI: the node of the MRS instance being converted
HCONS: the URI namespace dedicated to handle constraints
VARS: the URI namespace dedicated to variables
"""
for id_hcons in range(len(m.hcons)):
mrs_hcons = m.hcons[id_hcons]
HCONSNode = HCONS[f"{id_hcons}"]
# adds hcons to graphs
mrsGraph.add((MRSI, MRS.hasHcons, HCONSNode))
mrsGraph.add((HCONSNode, RDF.type, MRS[mrs_hcons.relation.capitalize()]))
mrsGraph.add((HCONSNode, MRS.highHcons, VARS[mrs_hcons.hi]))
mrsGraph.add((HCONSNode, MRS.lowHcons, VARS[mrs_hcons.lo]))
def _icons_to_rdf(m, mrsGraph, defaultGraph, MRSI, ICONS, VARS):
"""
Describes individual constraints "ICONS" in MRS-RDF format
Args:
m: a delphin mrs instance to be converted into RDF format
mrsGraph: a rdflib Graph where the MRS triples will be put.
defaultGraph: the conjunctive graph of the profile
MRSI: the node of the MRS instance being converted
ICONS: the URI namespace dedicated to individual constraints
VARS: the URI namespace dedicated to variables
"""
for id_icons in range(len(m.icons)):
mrs_icons = m.icons[id_icons]
ICONSNode = ICONS[f"{id_icons}"]
# adds icons to graphs
mrsGraph.add((MRSI, MRS.hasIcons, ICONSNode))
mrsGraph.add((ICONSNode, RDF.type, ERG[mrs_icons.relation]))
mrsGraph.add((ICONSNode, MRS.leftIcons, VARS[mrs_icons.left])) # should be revisited
mrsGraph.add((ICONSNode, MRS.rightIcons, VARS[mrs_icons.right])) # should be revisited
# by now, the ICONSs seems to be grammar-specific
# and this relation must be defined in ERG as an icons.
# As we don't have an exhaustive list of the possible icons in ERG (and any other grammar),
# we'll create on the final graph those icons. This is provisory
defaultGraph.add((ERG[mrs_icons.relation], RDF.type, RDFS.Class))
defaultGraph.add((ERG[mrs_icons.relation], RDFS.subClassOf, MRS.Icons))
| [
"rdflib.Namespace",
"rdflib.graph.ConjunctiveGraph",
"rdflib.graph.Graph",
"rdflib.Literal"
] | [((381, 429), 'rdflib.Namespace', 'Namespace', (['"""http://www.delph-in.net/schema/mrs#"""'], {}), "('http://www.delph-in.net/schema/mrs#')\n", (390, 429), False, 'from rdflib import Namespace\n'), ((436, 484), 'rdflib.Namespace', 'Namespace', (['"""http://www.delph-in.net/schema/erg#"""'], {}), "('http://www.delph-in.net/schema/erg#')\n", (445, 484), False, 'from rdflib import Namespace\n'), ((493, 537), 'rdflib.Namespace', 'Namespace', (['"""http://www.delph-in.net/schema/"""'], {}), "('http://www.delph-in.net/schema/')\n", (502, 537), False, 'from rdflib import Namespace\n'), ((544, 592), 'rdflib.Namespace', 'Namespace', (['"""http://www.delph-in.net/schema/pos#"""'], {}), "('http://www.delph-in.net/schema/pos#')\n", (553, 592), False, 'from rdflib import Namespace\n'), ((1468, 1516), 'rdflib.graph.Graph', 'Graph', ([], {'store': 'defaultGraph.store', 'identifier': 'MRSI'}), '(store=defaultGraph.store, identifier=MRSI)\n', (1473, 1516), False, 'from rdflib.graph import Graph, ConjunctiveGraph\n'), ((1653, 1674), 'rdflib.Namespace', 'Namespace', (["(MRSI + '#')"], {}), "(MRSI + '#')\n", (1662, 1674), False, 'from rdflib import Namespace\n'), ((1686, 1720), 'rdflib.Namespace', 'Namespace', (["(insprefix + 'variable-')"], {}), "(insprefix + 'variable-')\n", (1695, 1720), False, 'from rdflib import Namespace\n'), ((1732, 1760), 'rdflib.Namespace', 'Namespace', (["(insprefix + 'EP-')"], {}), "(insprefix + 'EP-')\n", (1741, 1760), False, 'from rdflib import Namespace\n'), ((1773, 1808), 'rdflib.Namespace', 'Namespace', (["(insprefix + 'predicate-')"], {}), "(insprefix + 'predicate-')\n", (1782, 1808), False, 'from rdflib import Namespace\n'), ((1824, 1858), 'rdflib.Namespace', 'Namespace', (["(insprefix + 'sortinfo-')"], {}), "(insprefix + 'sortinfo-')\n", (1833, 1858), False, 'from rdflib import Namespace\n'), ((1871, 1902), 'rdflib.Namespace', 'Namespace', (["(insprefix + 'hcons-')"], {}), "(insprefix + 'hcons-')\n", (1880, 1902), False, 'from rdflib import Namespace\n'), ((1915, 1946), 'rdflib.Namespace', 'Namespace', (["(insprefix + 'icons-')"], {}), "(insprefix + 'icons-')\n", (1924, 1946), False, 'from rdflib import Namespace\n'), ((1416, 1434), 'rdflib.graph.ConjunctiveGraph', 'ConjunctiveGraph', ([], {}), '()\n', (1432, 1434), False, 'from rdflib.graph import Graph, ConjunctiveGraph\n'), ((5941, 5970), 'rdflib.Literal', 'Literal', (['splittedPredicate[0]'], {}), '(splittedPredicate[0])\n', (5948, 5970), False, 'from rdflib import Literal\n'), ((6215, 6244), 'rdflib.Literal', 'Literal', (['splittedPredicate[2]'], {}), '(splittedPredicate[2])\n', (6222, 6244), False, 'from rdflib import Literal\n'), ((6346, 6368), 'rdflib.Literal', 'Literal', (['mrs_rel.cfrom'], {}), '(mrs_rel.cfrom)\n', (6353, 6368), False, 'from rdflib import Literal\n'), ((6461, 6481), 'rdflib.Literal', 'Literal', (['mrs_rel.cto'], {}), '(mrs_rel.cto)\n', (6468, 6481), False, 'from rdflib import Literal\n'), ((6791, 6803), 'rdflib.Literal', 'Literal', (['arg'], {}), '(arg)\n', (6798, 6803), False, 'from rdflib import Literal\n'), ((3564, 3581), 'rdflib.Literal', 'Literal', (['props[1]'], {}), '(props[1])\n', (3571, 3581), False, 'from rdflib import Literal\n')] |
import os
import sys
import numpy as np
import pandas as pd
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.neural_network import MLPClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import GridSearchCV
from imblearn.over_sampling import SMOTE
from imblearn.under_sampling import RandomUnderSampler
lib_path = os.path.abspath(os.path.join('Ml models'))
sys.path.append(lib_path)
from features_processing import features_processing
from grid_search_function import grid_search_function
if __name__ == '__main__':
train_original = pd.read_csv('Data/titanic_train_READY.csv')
train_original.set_index('PassengerId',inplace=True)
train = train_original.copy()
target = 'Survived'
[scaler, train] = features_processing(train, target, normalization=True)
# Choose all predictors except target
predictors = [x for x in train.columns if x not in [target]]
# # Oversampling with SMOTE
# X_res, y_res = SMOTE(kind='regular').fit_sample(train[predictors], train[target])
#
# train_res = pd.DataFrame(X_res, columns=train[predictors].columns)
# train_res.loc[:,'Survived'] = y_res
# train = train_res
# Random undersampling
rus = RandomUnderSampler(random_state=10)
X_res, y_res = rus.fit_sample(train[predictors], train[target])
train_res = pd.DataFrame(X_res, columns=train[predictors].columns)
train_res.loc[:,'Survived'] = y_res
train = train_res
param_test1 = {#'C':[0.5, 1, 2, 3, 5, 10, 20, 30, 35, 40, 50, 60],
'C': np.arange(0.1,2.1,0.05),
'penalty': ['l1', 'l2']
}
gsearch1 = GridSearchCV(estimator = LogisticRegression(
random_state=10,
max_iter=1000,
solver='saga'
),
param_grid = param_test1,
scoring='accuracy',n_jobs=-1,iid=False, cv=5)
# gsearch1.fit(train[predictors], train[target])
# clf = gsearch1
gsearch_chosen = LogisticRegression(
random_state=10,
max_iter=1000,
solver='saga',
C=1.1,
penalty='l1'
)
# gsearch_chosen.fit(train[predictors], train[target])
# # Print results
# print("Best parameters set found on development set:")
# print(clf.best_params_)
#
# print("Grid scores on development set:")
# means = clf.cv_results_['mean_test_score']
# stds = clf.cv_results_['std_test_score']
#
# for mean, std, params in zip(means, stds, clf.cv_results_['params']):
# print("%0.3f (+/-%0.03f) for %r"
# % (mean, std * 2, params))
# Predicting result for submission
submitting = False
if submitting:
# Lading test data
test = pd.read_csv('Data/titanic_test_READY.csv')
test.set_index('PassengerId',inplace=True)
[scaler, test_X] = features_processing(test, target, normalization=True, training=False, scaler=scaler)
y_predicted = gsearch_chosen.predict(test_X)
y_predicted_df = pd.DataFrame(y_predicted, columns={'Survived'}, index=test_X.index)
# y_predicted_df.sort_index(inplace=True)
y_predicted_df.to_csv('Kaggle submissions/titanic_submission2_accuracy_undersampled_LogR2.csv', sep=',', encoding='utf-8')
| [
"pandas.read_csv",
"numpy.arange",
"os.path.join",
"sklearn.linear_model.LogisticRegression",
"features_processing.features_processing",
"pandas.DataFrame",
"sys.path.append",
"imblearn.under_sampling.RandomUnderSampler"
] | [((418, 443), 'sys.path.append', 'sys.path.append', (['lib_path'], {}), '(lib_path)\n', (433, 443), False, 'import sys\n'), ((391, 416), 'os.path.join', 'os.path.join', (['"""Ml models"""'], {}), "('Ml models')\n", (403, 416), False, 'import os\n'), ((602, 645), 'pandas.read_csv', 'pd.read_csv', (['"""Data/titanic_train_READY.csv"""'], {}), "('Data/titanic_train_READY.csv')\n", (613, 645), True, 'import pandas as pd\n'), ((784, 838), 'features_processing.features_processing', 'features_processing', (['train', 'target'], {'normalization': '(True)'}), '(train, target, normalization=True)\n', (803, 838), False, 'from features_processing import features_processing\n'), ((1250, 1285), 'imblearn.under_sampling.RandomUnderSampler', 'RandomUnderSampler', ([], {'random_state': '(10)'}), '(random_state=10)\n', (1268, 1285), False, 'from imblearn.under_sampling import RandomUnderSampler\n'), ((1371, 1425), 'pandas.DataFrame', 'pd.DataFrame', (['X_res'], {'columns': 'train[predictors].columns'}), '(X_res, columns=train[predictors].columns)\n', (1383, 1425), True, 'import pandas as pd\n'), ((2223, 2313), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(10)', 'max_iter': '(1000)', 'solver': '"""saga"""', 'C': '(1.1)', 'penalty': '"""l1"""'}), "(random_state=10, max_iter=1000, solver='saga', C=1.1,\n penalty='l1')\n", (2241, 2313), False, 'from sklearn.linear_model import LogisticRegression\n'), ((1586, 1611), 'numpy.arange', 'np.arange', (['(0.1)', '(2.1)', '(0.05)'], {}), '(0.1, 2.1, 0.05)\n', (1595, 1611), True, 'import numpy as np\n'), ((3141, 3183), 'pandas.read_csv', 'pd.read_csv', (['"""Data/titanic_test_READY.csv"""'], {}), "('Data/titanic_test_READY.csv')\n", (3152, 3183), True, 'import pandas as pd\n'), ((3262, 3350), 'features_processing.features_processing', 'features_processing', (['test', 'target'], {'normalization': '(True)', 'training': '(False)', 'scaler': 'scaler'}), '(test, target, normalization=True, training=False,\n scaler=scaler)\n', (3281, 3350), False, 'from features_processing import features_processing\n'), ((3426, 3493), 'pandas.DataFrame', 'pd.DataFrame', (['y_predicted'], {'columns': "{'Survived'}", 'index': 'test_X.index'}), "(y_predicted, columns={'Survived'}, index=test_X.index)\n", (3438, 3493), True, 'import pandas as pd\n'), ((1715, 1780), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(10)', 'max_iter': '(1000)', 'solver': '"""saga"""'}), "(random_state=10, max_iter=1000, solver='saga')\n", (1733, 1780), False, 'from sklearn.linear_model import LogisticRegression\n')] |
from PyQt5.Qt import QObject
from PyQt5.QtWidgets import QWidget
from PyQt5.Qt import Qt
from PyQt5.Qt import QVBoxLayout
from mc.tools.DockTitleBarWidget import DockTitleBarWidget
from PyQt5.Qt import QActionGroup
from PyQt5.Qt import QKeySequence
from PyQt5.Qt import QAction
from mc.common.globalvars import gVar
from .BookmarksSideBar import BookmarksSideBar
from .HistorySideBar import HistorySideBar
class SideBar(QWidget):
def __init__(self, manager, window):
super().__init__(window)
self._window = window # BrowserWindow
self._layout = None # QVBoxLayout
self._titleBar = None # DockTitleBarWidget
self._manager = manager # SideBarManager
self.setObjectName('sidebar')
self.setAttribute(Qt.WA_DeleteOnClose)
self._layout = QVBoxLayout(self)
self._layout.setContentsMargins(0, 0, 0, 0)
self._layout.setSpacing(0)
self.setLayout(self._layout)
self._titleBar = DockTitleBarWidget('', self)
self._layout.addWidget(self._titleBar)
def showBookmarks(self):
self._titleBar.setTitle(_('Bookmarks'))
bar = BookmarksSideBar(self._window)
self.setWidget(bar)
def showHistory(self):
self._titleBar.setTitle(_('History'))
bar = HistorySideBar(self._window)
self.setWidget(bar)
def setTitle(self, title):
self._titleBar.setTitle(title)
def setWidget(self, widget):
if self._layout.count() == 2:
self._layout.removeItem(self._layout.itemAt(1))
if widget:
self._layout.addWidget(widget)
# Q_SLOTS
def close(self):
self._manager.closeSideBar()
p = self.parentWidget()
if p:
p.setFocus()
super().close()
class SideBarManager(QObject):
# for plugins only
_s_sidebars = {} # QHash<QString, QPointer<SideBarInterface>>
def __init__(self, parent):
'''
@param: parent BrowserWindow
'''
super().__init__(parent)
self._window = parent
self._sideBar = None # QPointer<SideBar>
self._menu = None # QMenu
self._activeBar = ''
def activeSideBar(self):
'''
@return: QString
'''
return self._activeBar
def createMenu(self, menu):
'''
@param: menu QMenu
'''
self._window.removeActions(menu.actions())
menu.clear()
group = QActionGroup(menu)
act = menu.addAction(_('Bookmarks'), self._slotShowSideBar)
act.setCheckable(True)
act.setShortcut(QKeySequence('Ctrl+Shift+B'))
act.setData('Bookmarks')
act.setChecked(self._activeBar == 'Bookmarks')
group.addAction(act)
act = menu.addAction(_('History'), self._slotShowSideBar)
act.setCheckable(True)
act.setShortcut(QKeySequence('Ctrl+H'))
act.setData('History')
act.setChecked(self._activeBar == 'History')
group.addAction(act)
for key, sidebar in self._s_sidebars.items():
if not sidebar: continue
# QAction
act = sidebar.createMenuAction()
act.setData(key)
act.setChecked(self._activeBar == key)
act.triggered.connect(self._slotShowSideBar)
menu.addAction(act)
group.addAction(act)
self._window.addActions(menu.actions())
def showSideBar(self, id_, toggle=True):
'''
@param: id_ QString
'''
if not id_ or id_ == 'None':
return
if not self._sideBar:
self._sideBar = self._window.addSideBar()
def destroyedFunc():
self._activeBar = ''
self._window.setSideBarNone()
# TODO: needed?
self._window.saveSideBarSettings()
self._sideBar = None
self._sideBar.destroyed.connect(destroyedFunc)
if id_ == self._activeBar:
if not toggle:
return
self._sideBar.close()
self._activeBar = ''
self._window.saveSideBarSettings()
return
if id_ == 'Bookmarks':
self._sideBar.showBookmarks()
elif id_ == 'History':
self._sideBar.showHistory()
else:
sidebar = self._s_sidebars.get(id_)
if not sidebar:
self._sideBar.close()
return
self._sideBar.setTitle(sidebar.title())
self._sideBar.setWidget(sidebar.createSideBarWidget(self._window))
self._activeBar = id_
self._window.saveSideBarSettings()
def sideBarRemoved(self, id_):
'''
@param: id_ QString
'''
if self._activeBar == id_ and self._sideBar:
self._sideBar.setWidget(None)
self._sideBar.close()
def closeSideBar(self):
if gVar.app.isClosing():
return
self._activeBar = ''
self._window.saveSideBarSettings()
def addSidebar(self, id_, interface):
'''
@param: id_ QString
@param: interface SideBarInterface
'''
self._s_sidebars[id_] = interface
def removeSidebar(self, interface):
'''
@param: interface SideBarInterface
'''
id_ = ''
for key, interf in self._s_sidebars.items():
if interface == interf:
id_ = key
break
else:
return
self._s_sidebars.pop(id_)
for window in gVar.app.windows():
window.sideBarManager().sideBarRemoved(id_)
# Q_SLOTS
def _slotShowSideBar(self):
act = self.sender()
if isinstance(act, QAction):
self.showSideBar(act.data())
| [
"PyQt5.Qt.QActionGroup",
"mc.tools.DockTitleBarWidget.DockTitleBarWidget",
"PyQt5.Qt.QVBoxLayout",
"PyQt5.Qt.QKeySequence",
"mc.common.globalvars.gVar.app.windows",
"mc.common.globalvars.gVar.app.isClosing"
] | [((807, 824), 'PyQt5.Qt.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (818, 824), False, 'from PyQt5.Qt import QVBoxLayout\n'), ((975, 1003), 'mc.tools.DockTitleBarWidget.DockTitleBarWidget', 'DockTitleBarWidget', (['""""""', 'self'], {}), "('', self)\n", (993, 1003), False, 'from mc.tools.DockTitleBarWidget import DockTitleBarWidget\n'), ((2458, 2476), 'PyQt5.Qt.QActionGroup', 'QActionGroup', (['menu'], {}), '(menu)\n', (2470, 2476), False, 'from PyQt5.Qt import QActionGroup\n'), ((4930, 4950), 'mc.common.globalvars.gVar.app.isClosing', 'gVar.app.isClosing', ([], {}), '()\n', (4948, 4950), False, 'from mc.common.globalvars import gVar\n'), ((5577, 5595), 'mc.common.globalvars.gVar.app.windows', 'gVar.app.windows', ([], {}), '()\n', (5593, 5595), False, 'from mc.common.globalvars import gVar\n'), ((2601, 2629), 'PyQt5.Qt.QKeySequence', 'QKeySequence', (['"""Ctrl+Shift+B"""'], {}), "('Ctrl+Shift+B')\n", (2613, 2629), False, 'from PyQt5.Qt import QKeySequence\n'), ((2870, 2892), 'PyQt5.Qt.QKeySequence', 'QKeySequence', (['"""Ctrl+H"""'], {}), "('Ctrl+H')\n", (2882, 2892), False, 'from PyQt5.Qt import QKeySequence\n')] |
from django.db import models
class Author(models.Model):
name = models.CharField(max_length=64)
def __str__(self):
return self.name
class Book(models.Model):
name = models.CharField(max_length=64)
author = models.ForeignKey(
Author,
on_delete=models.PROTECT,
related_name="books"
)
def __str__(self):
return self.name
| [
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((70, 101), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (86, 101), False, 'from django.db import models\n'), ((190, 221), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (206, 221), False, 'from django.db import models\n'), ((235, 308), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Author'], {'on_delete': 'models.PROTECT', 'related_name': '"""books"""'}), "(Author, on_delete=models.PROTECT, related_name='books')\n", (252, 308), False, 'from django.db import models\n')] |
import pymongo
import time
def get_db():
retries = 5
while True:
try:
client = pymongo.MongoClient(host="datastore", port=27017)
db = client["chat_db"]
return db
except pymongo.errors.ConnectionFailure as e:
if retries == 0:
raise e
retries -= 1
time.sleep(0.5)
db = get_db()
users_collection = db["Users"]
rooms_collection = db["Rooms"]
messages_collection = db["Messages"]
| [
"pymongo.MongoClient",
"time.sleep"
] | [((108, 157), 'pymongo.MongoClient', 'pymongo.MongoClient', ([], {'host': '"""datastore"""', 'port': '(27017)'}), "(host='datastore', port=27017)\n", (127, 157), False, 'import pymongo\n'), ((359, 374), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (369, 374), False, 'import time\n')] |
from ..builder import PIPELINES
import numpy as np
from ..builder import PIPELINES
import nibabel as nib
@PIPELINES.register_module()
class LoadImageFromNIIFile(object):
"""Load an image from file.
Required keys are "img_prefix" and "img_info" (a dict that must contain the
key "filename"). Added or updated keys are "filename", "img", "img_shape",
"ori_shape" (same as `img_shape`), "pad_shape" (same as `img_shape`),
"scale_factor" (1.0) and "img_norm_cfg" (means=0 and stds=1).
Args:
to_float32 (bool): Whether to convert the loaded image to a float32
numpy array. If set to False, the loaded image is an uint8 array.
Defaults to False.
color_type (str): The flag argument for :func:`mmcv.imfrombytes`.
Defaults to 'color'.
file_client_args (dict): Arguments to instantiate a FileClient.
See :class:`mmcv.fileio.FileClient` for details.
Defaults to ``dict(backend='disk')``.
imdecode_backend (str): Backend for :func:`mmcv.imdecode`. Default:
'cv2'
"""
def __init__(self):
pass
def __call__(self, results):
"""Call functions to load image and get image meta information.
Args:
results (dict): Result dict from :obj:`mmseg.CustomDataset`.
Returns:
dict: The dict contains loaded image and meta information.
"""
# filename = None
# img = None
if isinstance(results["img_info"]['filename'], list) or \
isinstance(results["img_info"]['filename'], tuple):
filename = []
img = []
for img_path in results["img_info"]['filename']:
filename.append(img_path)
results['img_affine_matrix'] = []
if "nii.gz" in img_path:
img_nii = nib.load(img_path)
# img_np = np.squeeze(img_nii.get_fdata(dtype=np.float32))
results['img_affine_matrix'].append(img_nii.affine)
if len(img) != 0:
assert img_nii.shape == img[0].shape, "different mode must have same image shape"
img.append(img_nii)
else:
print("[ERROR] Unspported 3D image format")
raise ValueError
results['img_shape'] = img[0].shape
results['ori_shape'] = img[0].shape
results['pad_shape'] = img[0].shape
results["num_mode"] = len(img)
else:
img_nii = nib.load(results["img_info"]['filename'])
# img_np = np.squeeze(img_nii.get_fdata(dtype=np.float32))
img = img_nii
filename = results['img_info']['filename']
results['img_affine_matrix'] = img_nii.affine
results['img_shape'] = img.shape
results['ori_shape'] = img.shape
results['pad_shape'] = img.shape
results["num_mode"] = 1
results['filename'] = filename
results['ori_filename'] = results['img_info']['filename']
results['img'] = img
try:
results['image_id'] = results['img_info']['image_id']
except:
results['image_id'] = None
results['scale_factor'] = 1.0
return results
def __repr__(self):
repr_str = self.__class__.__name__
# repr_str += f'(to_float32={self.to_float32},'
# repr_str += f"color_type='{self.color_type}',"
# repr_str += f"imdecode_backend='{self.imdecode_backend}')"
return repr_str
@PIPELINES.register_module()
class LoadAnnotationsFromNIIFile(object):
"""Load annotations for semantic segmentation.
Args:
reduce_zero_label (bool): Whether reduce all label value by 1.
Usually used for datasets where 0 is background label.
Default: False.
file_client_args (dict): Arguments to instantiate a FileClient.
See :class:`mmcv.fileio.FileClient` for details.
Defaults to ``dict(backend='disk')``.
imdecode_backend (str): Backend for :func:`mmcv.imdecode`. Default:
'pillow'
"""
def __init__(self, reduce_zero_label=False):
self.reduce_zero_label = reduce_zero_label
# self.ignore_black_area = ignore_black_area
# self.file_client_args = file_client_args.copy()
# self.file_client = None
# self.imdecode_backend = imdecode_backend
def __call__(self, results):
"""Call function to load multiple types annotations.
Args:
results (dict): Result dict from :obj:`mmseg.CustomDataset`.
Returns:
dict: The dict contains loaded semantic segmentation annotations.
"""
filename = results['ann_info']['seg_map']
img_nii = nib.load(filename)
results["seg_affine_matrix"] = img_nii.affine
# gt_semantic_seg = np.squeeze(img_nii.get_fdata(dtype=np.float32))
# if results.get('label_map', None) is not None:
# for old_id, new_id in results['label_map'].items():
# gt_semantic_seg[gt_semantic_seg == old_id] = new_id
# # reduce zero_label
# if self.reduce_zero_label:
# # avoid using underflow conversion
# gt_semantic_seg[gt_semantic_seg == 0] = 255
# gt_semantic_seg = gt_semantic_seg - 1
# gt_semantic_seg[gt_semantic_seg == 254] = 255
results['gt_semantic_seg'] = img_nii
results['seg_fields'].append('gt_semantic_seg')
return results
def __repr__(self):
repr_str = self.__class__.__name__
repr_str += f'(reduce_zero_label={self.reduce_zero_label},'
return repr_str | [
"nibabel.load"
] | [((4847, 4865), 'nibabel.load', 'nib.load', (['filename'], {}), '(filename)\n', (4855, 4865), True, 'import nibabel as nib\n'), ((2574, 2615), 'nibabel.load', 'nib.load', (["results['img_info']['filename']"], {}), "(results['img_info']['filename'])\n", (2582, 2615), True, 'import nibabel as nib\n'), ((1874, 1892), 'nibabel.load', 'nib.load', (['img_path'], {}), '(img_path)\n', (1882, 1892), True, 'import nibabel as nib\n')] |
from acme import Product
import random as rand
ADJECTIVES = ['Awesome', 'Shiny', 'Impressive', 'Portable', 'Improved']
NOUNS = ['Anvil', 'Catapult' 'Disguise' 'Mousetrap', '???']
def generate_products(n: int=30) -> list:
products = []
for _ in range(n):
name = rand.choice(ADJECTIVES) + ' ' + rand.choice(NOUNS)
price = rand.randint(5, 100)
weight = rand.randint(5, 100)
flammability = rand.uniform(0, 2.5)
products.append(Product(name, price, weight, flammability))
return products
def inventory_report(prods: list) -> None:
names = set()
total_price, total_weight, total_flam = 0,0,0
for prod in prods:
names.add(prod.name)
total_price += prod.price
total_weight += prod.weight
total_flam += prod.flammability
print(f'Number of Unique Names: {len(names)}')
print(f'Average Price: {total_price / len(prods)}')
print(f'Average Weight: {total_weight / len(prods)}')
print(f'Average Flammability: {total_flam / len(prods)}')
if __name__ == '__main__':
inventory_report(generate_products())
| [
"acme.Product",
"random.uniform",
"random.choice",
"random.randint"
] | [((356, 376), 'random.randint', 'rand.randint', (['(5)', '(100)'], {}), '(5, 100)\n', (368, 376), True, 'import random as rand\n'), ((395, 415), 'random.randint', 'rand.randint', (['(5)', '(100)'], {}), '(5, 100)\n', (407, 415), True, 'import random as rand\n'), ((440, 460), 'random.uniform', 'rand.uniform', (['(0)', '(2.5)'], {}), '(0, 2.5)\n', (452, 460), True, 'import random as rand\n'), ((320, 338), 'random.choice', 'rand.choice', (['NOUNS'], {}), '(NOUNS)\n', (331, 338), True, 'import random as rand\n'), ((488, 530), 'acme.Product', 'Product', (['name', 'price', 'weight', 'flammability'], {}), '(name, price, weight, flammability)\n', (495, 530), False, 'from acme import Product\n'), ((288, 311), 'random.choice', 'rand.choice', (['ADJECTIVES'], {}), '(ADJECTIVES)\n', (299, 311), True, 'import random as rand\n')] |
import numpy as np
#np.random.seed(10) #随机种子
t = np.random.randint(0,20,(3,4))
print(t)
#创建d0-dn维度的均匀分布的随机数组,浮点数,范围从0-1
t1 = np.random.rand(2,2)
print(t1)
#创建d0-dn维度的标准正态分布随机数,浮点数,平均数0,标准差1
t2 = np.random.randn(4,5)
print(t2)
#产生具有均匀分布的数组
t3 = np.random.uniform(0,20,(3,4))
print(t3)
#从指定正态分布中随机抽取样本,分布中心是loc(概率分布的均值),标准差是scale
t4 = np.random.normal(14,3,(2,3))
print(t4)
print("*"*100)
t5 = np.arange(6).reshape(2,3)
t5[[0,1],:] = t5[[1,0],:] # 行交换
print(t5)
t6 = np.arange(6,12).reshape(2,3)
t6[:,[1,2]] = t6[:,[2,1]] # 列交换
print(t6)
print("*"*50)
t7 = np.vstack((t5,t6))
print(t7)
print("*"*25)
t8 = np.hstack((t5,t6))
print(t8)
print("*"*50)
print(np.argmax(t7, axis=0)) #显示的是位置,而不是值
print(np.argmin(t8, axis=1))
print("*"*50)
p = np.tile(np.arange(0, 10, 2), (5, 1))
print(p)
print("*"*50)
value_sum = p.sum(axis=0)
print(value_sum)
value_mean = p.mean(axis=1)
print(value_mean)
value_median = np.median(p, axis=1)
print(value_median)
print(p.max(axis=0))
print(p.min(axis=1))
value_p = np.ptp(p, axis=1)
print(value_p)
value_std = p.std(axis=1)
print(value_std)
print("*"*50)
q1 = p<4
q2 = p[p<4]
print(q1)
print(q2)
print("*"*50)
r = np.where(p<4, 0, 1)
print(r)
print("*"*50)
s = np.clip(p, 2, 6)
print(s)
| [
"numpy.random.normal",
"numpy.ptp",
"numpy.clip",
"numpy.median",
"numpy.random.rand",
"numpy.hstack",
"numpy.where",
"numpy.argmax",
"numpy.random.randint",
"numpy.vstack",
"numpy.random.uniform",
"numpy.argmin",
"numpy.random.randn",
"numpy.arange"
] | [((52, 84), 'numpy.random.randint', 'np.random.randint', (['(0)', '(20)', '(3, 4)'], {}), '(0, 20, (3, 4))\n', (69, 84), True, 'import numpy as np\n'), ((129, 149), 'numpy.random.rand', 'np.random.rand', (['(2)', '(2)'], {}), '(2, 2)\n', (143, 149), True, 'import numpy as np\n'), ((200, 221), 'numpy.random.randn', 'np.random.randn', (['(4)', '(5)'], {}), '(4, 5)\n', (215, 221), True, 'import numpy as np\n'), ((250, 282), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(20)', '(3, 4)'], {}), '(0, 20, (3, 4))\n', (267, 282), True, 'import numpy as np\n'), ((340, 371), 'numpy.random.normal', 'np.random.normal', (['(14)', '(3)', '(2, 3)'], {}), '(14, 3, (2, 3))\n', (356, 371), True, 'import numpy as np\n'), ((568, 587), 'numpy.vstack', 'np.vstack', (['(t5, t6)'], {}), '((t5, t6))\n', (577, 587), True, 'import numpy as np\n'), ((616, 635), 'numpy.hstack', 'np.hstack', (['(t5, t6)'], {}), '((t5, t6))\n', (625, 635), True, 'import numpy as np\n'), ((917, 937), 'numpy.median', 'np.median', (['p'], {'axis': '(1)'}), '(p, axis=1)\n', (926, 937), True, 'import numpy as np\n'), ((1010, 1027), 'numpy.ptp', 'np.ptp', (['p'], {'axis': '(1)'}), '(p, axis=1)\n', (1016, 1027), True, 'import numpy as np\n'), ((1161, 1182), 'numpy.where', 'np.where', (['(p < 4)', '(0)', '(1)'], {}), '(p < 4, 0, 1)\n', (1169, 1182), True, 'import numpy as np\n'), ((1209, 1225), 'numpy.clip', 'np.clip', (['p', '(2)', '(6)'], {}), '(p, 2, 6)\n', (1216, 1225), True, 'import numpy as np\n'), ((666, 687), 'numpy.argmax', 'np.argmax', (['t7'], {'axis': '(0)'}), '(t7, axis=0)\n', (675, 687), True, 'import numpy as np\n'), ((711, 732), 'numpy.argmin', 'np.argmin', (['t8'], {'axis': '(1)'}), '(t8, axis=1)\n', (720, 732), True, 'import numpy as np\n'), ((761, 780), 'numpy.arange', 'np.arange', (['(0)', '(10)', '(2)'], {}), '(0, 10, 2)\n', (770, 780), True, 'import numpy as np\n'), ((400, 412), 'numpy.arange', 'np.arange', (['(6)'], {}), '(6)\n', (409, 412), True, 'import numpy as np\n'), ((475, 491), 'numpy.arange', 'np.arange', (['(6)', '(12)'], {}), '(6, 12)\n', (484, 491), True, 'import numpy as np\n')] |
from mainPackage.Functions import RigidDomainFinder
from GraphPackage.Graph_Config import List_Colors
if __name__ == '__main__':
rf = RigidDomainFinder()
# VMD likewise xyz format
PredictedLabels, ProtG = rf.segment_by_xyzFormat('./data/lysozyme.xyz')
print(PredictedLabels) | [
"mainPackage.Functions.RigidDomainFinder"
] | [((138, 157), 'mainPackage.Functions.RigidDomainFinder', 'RigidDomainFinder', ([], {}), '()\n', (155, 157), False, 'from mainPackage.Functions import RigidDomainFinder\n')] |
from setuptools import setup
from setuptools import find_packages
package_name = 'ros2_annotation_api'
setup(
name=package_name,
version='0.0.1',
packages=find_packages(exclude=['test']),
data_files=[
('share/' + package_name, ['package.xml']),
],
py_modules=[
'ros2_annotation_api.ros'
],
install_requires=['setuptools'],
author='<NAME>',
keywords=['ROS'],
zip_safe=True,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development',
],
license='Apache License, Version 2.0'
)
| [
"setuptools.find_packages"
] | [((169, 200), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test']"}), "(exclude=['test'])\n", (182, 200), False, 'from setuptools import find_packages\n')] |
import bisect, collections, copy, heapq, itertools, math, string, sys, queue, time, random
input = lambda: sys.stdin.readline().rstrip()
sys.setrecursionlimit(10**7)
def I():
return input()
def IS():
return input().split()
def II():
return int(input())
def IIS():
return map(int, input().split())
def LIIS():
return list(map(int, input().split()))
def Base_n_to_10(X, n):
out = 0
for i in range(1, len(str(X)) + 1):
out += int(X[-i]) * (n**(i - 1))
return out #int out
def Base_10_to_n(X, n):
if (X // n):
return Base_10_to_n(X // n, n) + str(X % n)
return str(X % n)
INF = 10**18
MOD = 10**9 + 7
sys.setrecursionlimit(10**8)
##############################################################################
n, q = IIS()
path = [[] for i in range(n)]
for i in range(n - 1):
a, b = IIS()
path[a - 1].append(b - 1)
path[b - 1].append(a - 1)
used = [False for i in range(n)]
st = set()
li = [0 for i in range(n)]
def EulerTour(n, X, i0):
done = [0] * n
Q = [~i0, i0] # 根をスタックに追加
ET = []
while Q:
i = Q.pop()
if i >= 0: # 行きがけの処理
done[i] = 1
ET.append(i)
for a in X[i][::-1]:
if done[a]: continue
Q.append(~a) # 帰りがけの処理をスタックに追加
Q.append(a) # 行きがけの処理をスタックに追加
else: # 帰りがけの処理
ET.append(~i)
return ET
li2 = [-1 for i in range(n)]
li3 = [0 for i in range(n)]
li = EulerTour(n, path, 0)
for i in range(n * 2):
if li2[li[i]] == -1:
li2[li[i]] = i
else:
li3[li[i]] = i
for i in range(q):
c, d = IIS()
if min(abs(li3[c - 1] - li2[d - 1]), abs(li2[c - 1] - li3[d - 1])) % 2:
print("Town")
else:
print("Road")
| [
"sys.stdin.readline",
"sys.setrecursionlimit"
] | [((138, 168), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 7)'], {}), '(10 ** 7)\n', (159, 168), False, 'import bisect, collections, copy, heapq, itertools, math, string, sys, queue, time, random\n'), ((671, 701), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10 ** 8)'], {}), '(10 ** 8)\n', (692, 701), False, 'import bisect, collections, copy, heapq, itertools, math, string, sys, queue, time, random\n'), ((108, 128), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (126, 128), False, 'import bisect, collections, copy, heapq, itertools, math, string, sys, queue, time, random\n')] |
from flask import Flask
from app.settings.config import config_dict
from utils import contants
import os, sys
BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, BASE_PATH + '/common')
def create_flask_app(type):
"""
内部调用生产app的方法
:param type: app对象的配置信息
:return: 对应配置的app对象
"""
app = Flask(__name__)
app.config.from_object(config_dict[type])
app.config.from_envvar(contants.EXTRA_EVN_CONFIG,silent=True)
return app
def create_app(type):
"""
外部调用生产app的工厂方法
:param type: 配置的类型
:return: app
"""
# 1.调用内容方法生产app
app = create_flask_app(type)
# 2.注册拓展初始化组件
# 3.注册蓝图初始化组件
return app
| [
"os.path.abspath",
"sys.path.insert",
"flask.Flask"
] | [((189, 230), 'sys.path.insert', 'sys.path.insert', (['(0)', "(BASE_PATH + '/common')"], {}), "(0, BASE_PATH + '/common')\n", (204, 230), False, 'import os, sys\n'), ((365, 380), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (370, 380), False, 'from flask import Flask\n'), ((160, 185), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (175, 185), False, 'import os, sys\n')] |
from setuptools import setup, find_packages
from os import path
# Copy README.md to project description
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="houston-client",
packages=find_packages(),
description="Houston Python Client",
long_description=long_description,
long_description_content_type="text/markdown",
version="1.1.1",
url="https://github.com/datasparq-intelligent-products/houston-python",
author="<NAME>, <NAME> & <NAME>",
license="MIT",
author_email="<EMAIL>",
install_requires=[
"requests>=2.18.0,<3.0.0dev",
"requests[security]>=2.18.0,<3.0.0dev"],
extras_require={
"gcp": [
"google-cloud-pubsub>=1.2.0,<2.0.0"],
"azure": [
"azure-eventgrid>=1.2.0,<2.0.0",
"azure-mgmt-eventgrid>=2.2.0,<3.0.0"]},
keywords=["houston"],
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((135, 157), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (147, 157), False, 'from os import path\n'), ((169, 207), 'os.path.join', 'path.join', (['this_directory', '"""README.md"""'], {}), "(this_directory, 'README.md')\n", (178, 207), False, 'from os import path\n'), ((313, 328), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (326, 328), False, 'from setuptools import setup, find_packages\n')] |
import re
from inquirer import errors
def isbn_validation(answers, current):
if not re.match(r'\d{11}', current):
raise errors.ValidationError('', reason='Wrong ISBN!')
return True
def num_validation(answers, current):
if not re.match(r'\d+', current):
raise errors.ValidationError('', reason='Wrong number!')
return True
#def clippings_validation(answers, current):
# if not re.match(r'.*My Clippings\.txt', current):
# raise errors.ValidationError('', reason='Rename it to My Clippings.txt')
#
# return True
| [
"re.match",
"inquirer.errors.ValidationError"
] | [((89, 117), 're.match', 're.match', (['"""\\\\d{11}"""', 'current'], {}), "('\\\\d{11}', current)\n", (97, 117), False, 'import re\n'), ((133, 181), 'inquirer.errors.ValidationError', 'errors.ValidationError', (['""""""'], {'reason': '"""Wrong ISBN!"""'}), "('', reason='Wrong ISBN!')\n", (155, 181), False, 'from inquirer import errors\n'), ((248, 273), 're.match', 're.match', (['"""\\\\d+"""', 'current'], {}), "('\\\\d+', current)\n", (256, 273), False, 'import re\n'), ((289, 339), 'inquirer.errors.ValidationError', 'errors.ValidationError', (['""""""'], {'reason': '"""Wrong number!"""'}), "('', reason='Wrong number!')\n", (311, 339), False, 'from inquirer import errors\n')] |
from alpineunittest import AlpineTestCase
from alpine.apiclient import APIClient
from future.datasource import *
class TestDataSource(AlpineTestCase):
def setUp(self):
super(TestDataSource, self).setUp()
global db_datasource_id
global hadoop_datasource_id
global ds
# To pass the tests, we need a Hadoop Data Source with Name "API_Test_Hadoop"
# and GPDB datas ource with name "API_Test_GPDB" created
gpdb_datasource_name = "API_Test_GPDB"
hadoop_datasource_name = "API_Test_Hadoop"
database_name = "miner_demo"
alpine_session = APIClient(self.host, self.port)
alpine_session.login(self.username, self.password)
ds = DataSource(alpine_session.base_url, alpine_session.session, alpine_session.token)
ds = DataSource(alpine_session.base_url, alpine_session.session, alpine_session.token)
ds.delete_db_data_source_if_exists(gpdb_datasource_name)
datasource = ds.add_greenplum_data_source(gpdb_datasource_name, "Test Greenplum", "10.10.0.151",
5432,
"miner_demo", "miner_demo", "miner_demo")
# Creating a Hadoop Datasource for test get/update functions
ds.delete_hadoop_data_source_if_exists(hadoop_datasource_name)
additional_parameters = [
{"key": "mapreduce.jobhistory.address", "value": "awscdh57singlenode.alpinenow.local:10020"},
{"key": "mapreduce.jobhistory.webapp.address", "value": "awscdh57singlenode.alpinenow.local:19888"},
{"key": "yarn.app.mapreduce.am.staging-dir", "value": "/tmp"},
{"key": "yarn.resourcemanager.admin.address", "value": "awscdh57singlenode.alpinenow.local:8033"},
{"key": "yarn.resourcemanager.resource-tracker.address",
"value": "awscdh57singlenode.alpinenow.local:8031"},
{"key": "yarn.resourcemanager.scheduler.address", "value": "awscdh57singlenode.alpinenow.local:8030"}
]
datasource_hadoop = ds.add_hadoop_data_source("Cloudera CDH5.4-5.7", hadoop_datasource_name,
"Test Cloudera",
"awscdh57singlenode.alpinenow.local", 8020,
"awscdh57singlenode.alpinenow.local", 8032,
"yarn", "hadoop", additional_parameters
)
# Creating a Database Datasource for test get/update functions
ds.delete_db_data_source_if_exists("Test_GP")
datasource = ds.add_greenplum_data_source("Test_GP", "Test Greenplum", "10.10.0.151", 5432,
"miner_demo", "miner_demo", "miner_demo")
db_datasource_id = datasource['id']
# Creating a Hadoop Datasource for test get/update functions
ds.delete_hadoop_data_source_if_exists("Test_Cloudera")
additional_parameters = [
{"key": "mapreduce.jobhistory.address", "value": "awscdh57singlenode.alpinenow.local:10020"},
{"key": "mapreduce.jobhistory.webapp.address", "value": "awscdh57singlenode.alpinenow.local:19888"},
{"key": "yarn.app.mapreduce.am.staging-dir", "value": "/tmp"},
{"key": "yarn.resourcemanager.admin.address", "value": "awscdh57singlenode.alpinenow.local:8033"},
{"key": "yarn.resourcemanager.resource-tracker.address",
"value": "awscdh57singlenode.alpinenow.local:8031"},
{"key": "yarn.resourcemanager.scheduler.address", "value": "awscdh57singlenode.alpinenow.local:8030"}
]
datasource_hadoop = ds.add_hadoop_data_source("Cloudera CDH5.4-5.7", "Test_Cloudera", "Test Cloudera",
"awscdh57singlenode.alpinenow.local", 8020,
"awscdh57singlenode.alpinenow.local", 8032,
"yarn", "hadoop", additional_parameters
)
hadoop_datasource_id = datasource_hadoop['id']
hadoop_datasource_id = alpine_session.datasource.get_id("Test_Cloudera")
def tearDown(self):
# Drop the datasources created in setup
alpine_session = APIClient(self.host, self.port)
alpine_session.login(self.username, self.password)
ds = DataSource(alpine_session.base_url, alpine_session.session, alpine_session.token)
ds.delete_db_data_source_if_exists("Test_GP")
ds.delete_db_data_source_if_exists("Test_Cloudera")
def test_add_greenplum_data_source(self):
ds.delete_db_data_source_if_exists("Test_GP_add")
datasource = ds.add_greenplum_data_source("Test_GP_add", "Test Greenplum", "10.10.0.151", 5432,
"miner_demo","miner_demo", "miner_demo")
self.assertEqual(datasource['name'], "Test_GP_add")
def test_add_postgres_data_source(self):
self.fail()
def test_add_hawq_data_source(self):
self.fail()
def test_add_oracle_data_source(self):
self.fail()
def test_add_jdbc_data_source(self):
self.fail()
def test_add_jdbc_hive_data_source(self):
self.fail()
def test_add_hadoop_data_source(self):
alpine_session = APIClient(self.host, self.port)
alpine_session.login(self.username, self.password)
ds = DataSource(alpine_session.base_url, alpine_session.session, alpine_session.token)
ds.delete_hadoop_data_source_if_exists("Test_Cloudera_add")
additional_parameters = [
{"key": "mapreduce.jobhistory.address", "value": "awscdh57singlenode.alpinenow.local:10020"},
{"key": "mapreduce.jobhistory.webapp.address", "value": "awscdh57singlenode.alpinenow.local:19888"},
{"key": "yarn.app.mapreduce.am.staging-dir", "value": "/tmp"},
{"key": "yarn.resourcemanager.admin.address", "value": "awscdh57singlenode.alpinenow.local:8033"},
{"key": "yarn.resourcemanager.resource-tracker.address", "value": "awscdh57singlenode.alpinenow.local:8031"},
{"key": "yarn.resourcemanager.scheduler.address", "value": "awscdh57singlenode.alpinenow.local:8030"}
]
datasource = ds.add_hadoop_data_source("Cloudera CDH5.4-5.7", "Test_Cloudera_add","Test Cloudera",
"awscdh57singlenode.alpinenow.local", 8020,
"awscdh57singlenode.alpinenow.local", 8032,
"yarn", "hadoop",additional_parameters
)
self.assertEqual(datasource['name'], "Test_Cloudera_add")
def test_add_hadoop_hive_data_source(self):
self.fail()
def test_delete_db_data_source(self):
alpine_session = APIClient(self.host, self.port)
alpine_session.login(self.username, self.password)
ds.delete_db_data_source_if_exists("Test_GP_delete")
ds.add_greenplum_data_source("Test_GP_delete", "Test Greenplum", "10.10.0.151", 5432,
"miner_demo", "miner_demo", "miner_demo")
response = ds.delete_db_data_source("Test_GP_delete")
self.assertEqual(response.status_code, 200)
try:
alpine_session.datasource.get("Test_GP_delete","Database")
except DataSourceNotFoundException:
pass
else:
self.fail("Failed to Delete the Datasource {0}".format("Test_GP_delete"))
def test_delete_hadoop_data_source(self):
alpine_session = APIClient(self.host, self.port)
alpine_session.login(self.username, self.password)
ds.delete_hadoop_data_source_if_exists("Test_Cloudera_delete")
additional_parameters = [
{"key": "mapreduce.jobhistory.address", "value": "awscdh57singlenode.alpinenow.local:10020"},
{"key": "mapreduce.jobhistory.webapp.address", "value": "awscdh57singlenode.alpinenow.local:19888"},
{"key": "yarn.app.mapreduce.am.staging-dir", "value": "/tmp"},
{"key": "yarn.resourcemanager.admin.address", "value": "awscdh57singlenode.alpinenow.local:8033"},
{"key": "yarn.resourcemanager.resource-tracker.address",
"value": "awscdh57singlenode.alpinenow.local:8031"},
{"key": "yarn.resourcemanager.scheduler.address", "value": "awscdh57singlenode.alpinenow.local:8030"}
]
ds.add_hadoop_data_source("Cloudera CDH5.4-5.7", "Test_Cloudera_delete", "Test Cloudera",
"awscdh57singlenode.alpinenow.local", 8020,
"awscdh57singlenode.alpinenow.local", 8032,
"yarn", "hadoop", additional_parameters
)
response = ds.delete_hadoop_data_source("Test_Cloudera_delete")
self.assertEqual(response.status_code, 200)
try:
alpine_session.datasource.get("Test_Cloudera_delete", "Hadoop")
except DataSourceNotFoundException:
pass
else:
self.fail("Failed to Delete the Datasource {0}".format("Test_Cloudera_delete"))
def test_get_db_data_source_username(self):
self.fail()
def test_enable_db_data_source(self):
self.fail()
def test_disable_db_data_source(self):
self.fail()
def test_enable_hdfs_data_source(self):
self.fail()
def test_disable_hdfs_data_source(self):
self.fail()
def test_add_user_to_datasource(self):
self.fail()
def test_get_users_on_a_data_source(self):
self.fail()
def test_remove_user_from_datasource(self):
self.fail()
def test_change_owner_of_datasource(self):
self.fail()
| [
"alpine.apiclient.APIClient"
] | [((616, 647), 'alpine.apiclient.APIClient', 'APIClient', (['self.host', 'self.port'], {}), '(self.host, self.port)\n', (625, 647), False, 'from alpine.apiclient import APIClient\n'), ((4440, 4471), 'alpine.apiclient.APIClient', 'APIClient', (['self.host', 'self.port'], {}), '(self.host, self.port)\n', (4449, 4471), False, 'from alpine.apiclient import APIClient\n'), ((5507, 5538), 'alpine.apiclient.APIClient', 'APIClient', (['self.host', 'self.port'], {}), '(self.host, self.port)\n', (5516, 5538), False, 'from alpine.apiclient import APIClient\n'), ((7138, 7169), 'alpine.apiclient.APIClient', 'APIClient', (['self.host', 'self.port'], {}), '(self.host, self.port)\n', (7147, 7169), False, 'from alpine.apiclient import APIClient\n'), ((7923, 7954), 'alpine.apiclient.APIClient', 'APIClient', (['self.host', 'self.port'], {}), '(self.host, self.port)\n', (7932, 7954), False, 'from alpine.apiclient import APIClient\n')] |
from datetime import date
from matplotlib.pyplot import hist
class Contract(object):
def __init__(self, date, customer, contract_type):
self.__date = date
self.__customer = customer
self.__type = contract_type
@property
def date(self):
return self.__date
@date.setter
def date(self, date):
self.__date = date
@property
def customer(self):
return self.__customer
@customer.setter
def customer(self, customer):
self.__customer = customer
@property
def contract_type(self):
return self.__type
@contract_type.setter
def contract_type(self, contract_type):
self.__type = contract_type
def advance(self):
if self.__type == 'NEW':
self.__type = 'IN PROGRESS'
elif self.__type == 'IN PROGRESS':
self.__type = 'DEAL'
elif self.__type == 'DEAL':
self.__type = 'DONE'
def save_state(self):
return State(Contract(date=self.__date, customer=self.__customer, contract_type=self.__type))
def restore_state(self, state):
self.__customer = state.contract.customer
self.__date = state.contract.date
self.__type = state.contract.contract_type
class State(object):
def __init__(self, contract):
self.__contract = contract
@property
def contract(self):
return self.__contract
class History(object):
def __init__(self):
self.__saved_states = []
def get_state(self, index):
return self.__saved_states[index]
def add_state(self, state):
self.__saved_states.append(state)
if __name__ == '__main__':
history = History()
contract = Contract(date=date.today(),
customer='<NAME>', contract_type='NEW')
contract.advance()
history.add_state(contract.save_state())
contract.advance()
contract.customer = '<NAME>'
history.add_state(contract.save_state())
contract.advance()
history.add_state(contract.save_state())
contract.advance()
history.add_state(contract.save_state())
print(contract.contract_type)
contract.restore_state(history.get_state(1))
print(contract.contract_type)
print(contract.customer)
| [
"datetime.date.today"
] | [((1745, 1757), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1755, 1757), False, 'from datetime import date\n')] |
#!/usr/bin/env python
import argparse
import logging
import simple_websocket
from filestreams import FilePublisher, FileSource
from edfstreams import EdfFileSource
from proxy import SubscriberProxy
from wsm import WebSocketMessenger
def file_source(file_path, file_format):
if file_format.upper() == 'EDF':
return EdfFileSource(file_path)
else:
return FileSource(file_path)
def main(file_path, file_format, endpoint):
logging.info(f"main(): file_path: {file_path} endpoint: {endpoint}")
try:
count = 0
ws = simple_websocket.Client(endpoint)
messenger = WebSocketMessenger(ws)
publisher = FilePublisher(file_source=file_source(file_path, file_format))
subscriber = SubscriberProxy(publisher, messenger=messenger)
logging.debug("main() starting receive loop...")
while True:
message = ws.receive()
count += 1
logging.debug(f"main() ws.receive({count}) message: {message}")
subscriber.event(message)
except (simple_websocket.ConnectionClosed, simple_websocket.ws.ConnectionClosed):
pass
except (KeyboardInterrupt, EOFError):
ws.close()
parser = argparse.ArgumentParser()
parser.add_argument('--input', type=str)
parser.add_argument('--format', type=str, default='text')
parser.add_argument('--endpoint', type=str)
parser.add_argument('--log-file', type=str, default='publish-file.log')
parser.add_argument('--log-level', type=str, default='DEBUG')
args = parser.parse_args()
# initialize logging
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s',
filename=args.log_file,
level=getattr(logging, args.log_level.upper()))
main(args.input, args.format, args.endpoint)
| [
"wsm.WebSocketMessenger",
"proxy.SubscriberProxy",
"simple_websocket.Client",
"logging.debug",
"argparse.ArgumentParser",
"edfstreams.EdfFileSource",
"filestreams.FileSource",
"logging.info"
] | [((1206, 1231), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1229, 1231), False, 'import argparse\n'), ((449, 517), 'logging.info', 'logging.info', (['f"""main(): file_path: {file_path} endpoint: {endpoint}"""'], {}), "(f'main(): file_path: {file_path} endpoint: {endpoint}')\n", (461, 517), False, 'import logging\n'), ((328, 352), 'edfstreams.EdfFileSource', 'EdfFileSource', (['file_path'], {}), '(file_path)\n', (341, 352), False, 'from edfstreams import EdfFileSource\n'), ((378, 399), 'filestreams.FileSource', 'FileSource', (['file_path'], {}), '(file_path)\n', (388, 399), False, 'from filestreams import FilePublisher, FileSource\n'), ((558, 591), 'simple_websocket.Client', 'simple_websocket.Client', (['endpoint'], {}), '(endpoint)\n', (581, 591), False, 'import simple_websocket\n'), ((612, 634), 'wsm.WebSocketMessenger', 'WebSocketMessenger', (['ws'], {}), '(ws)\n', (630, 634), False, 'from wsm import WebSocketMessenger\n'), ((739, 786), 'proxy.SubscriberProxy', 'SubscriberProxy', (['publisher'], {'messenger': 'messenger'}), '(publisher, messenger=messenger)\n', (754, 786), False, 'from proxy import SubscriberProxy\n'), ((795, 843), 'logging.debug', 'logging.debug', (['"""main() starting receive loop..."""'], {}), "('main() starting receive loop...')\n", (808, 843), False, 'import logging\n'), ((934, 997), 'logging.debug', 'logging.debug', (['f"""main() ws.receive({count}) message: {message}"""'], {}), "(f'main() ws.receive({count}) message: {message}')\n", (947, 997), False, 'import logging\n')] |
from django.views import generic
from django.db.models import get_model
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from oscar.apps.dashboard.catalogue import forms
from oscar.core.loading import get_classes
ProductForm, CategoryForm, StockRecordForm, StockAlertSearchForm, ProductCategoryFormSet, ProductImageFormSet = get_classes(
'dashboard.catalogue.forms', ('ProductForm', 'CategoryForm', 'StockRecordForm',
'StockAlertSearchForm',
'ProductCategoryFormSet',
'ProductImageFormSet'))
Product = get_model('catalogue', 'Product')
Category = get_model('catalogue', 'Category')
ProductCategory = get_model('catalogue', 'ProductCategory')
ProductClass = get_model('catalogue', 'ProductClass')
StockRecord = get_model('partner', 'StockRecord')
StockAlert = get_model('partner', 'StockAlert')
class ProductListView(generic.ListView):
template_name = 'dashboard/catalogue/product_list.html'
model = Product
context_object_name = 'products'
form_class = forms.ProductSearchForm
description_template = _(u'Products %(upc_filter)s %(title_filter)s')
paginate_by = 20
def get_context_data(self, **kwargs):
ctx = super(ProductListView, self).get_context_data(**kwargs)
ctx['product_classes'] = ProductClass.objects.all()
ctx['form'] = self.form
ctx['queryset_description'] = self.description
return ctx
def get_queryset(self):
"""
Build the queryset for this list and also update the title that
describes the queryset
"""
description_ctx = {'upc_filter': '',
'title_filter': ''}
queryset = self.model.objects.all().order_by('-date_created').prefetch_related(
'product_class', 'stockrecord__partner')
self.form = self.form_class(self.request.GET)
if not self.form.is_valid():
self.description = self.description_template % description_ctx
return queryset
data = self.form.cleaned_data
if data['upc']:
queryset = queryset.filter(upc=data['upc'])
description_ctx['upc_filter'] = _(" including an item with UPC '%s'") % data['upc']
if data['title']:
queryset = queryset.filter(title__icontains=data['title']).distinct()
description_ctx['title_filter'] = _(" including an item with title matching '%s'") % data['title']
self.description = self.description_template % description_ctx
return queryset
class ProductCreateRedirectView(generic.RedirectView):
def get_redirect_url(self, **kwargs):
product_class_id = self.request.GET.get('product_class', None)
if not product_class_id or not product_class_id.isdigit():
messages.error(self.request, _("Please choose a product class"))
return reverse('dashboard:catalogue-product-list')
try:
product_class = ProductClass.objects.get(id=product_class_id)
except ProductClass.DoesNotExist:
messages.error(self.request, _("Please choose a product class"))
return reverse('dashboard:catalogue-product-list')
else:
return reverse('dashboard:catalogue-product-create',
kwargs={'product_class_id': product_class.id})
class ProductCreateView(generic.CreateView):
template_name = 'dashboard/catalogue/product_update.html'
model = Product
context_object_name = 'product'
form_class = ProductForm
def get_context_data(self, **kwargs):
ctx = super(ProductCreateView, self).get_context_data(**kwargs)
pclass = self.get_product_class()
if 'stockrecord_form' not in ctx:
ctx['stockrecord_form'] = StockRecordForm(pclass)
if 'category_formset' not in ctx:
ctx['category_formset'] = ProductCategoryFormSet()
if 'image_formset' not in ctx:
ctx['image_formset'] = ProductImageFormSet()
ctx['title'] = _('Create new %s product') % pclass.name
ctx['product_class'] = pclass
return ctx
def get_product_class(self):
return ProductClass.objects.get(id=self.kwargs['product_class_id'])
def get_form_kwargs(self):
kwargs = super(ProductCreateView, self).get_form_kwargs()
kwargs['product_class'] = self.get_product_class()
return kwargs
def is_stockrecord_submitted(self):
return len(self.request.POST.get('partner', '')) > 0
def get_stockrecord_form(self):
pclass = self.get_product_class()
if self.is_stockrecord_submitted():
return StockRecordForm(pclass, self.request.POST)
return StockRecordForm(pclass)
def form_invalid(self, form):
category_formset = ProductCategoryFormSet(self.request.POST)
image_formset = ProductImageFormSet(self.request.POST, self.request.FILES)
messages.error(self.request,
_("Your submitted data was not valid - please "
"correct the below errors"))
ctx = self.get_context_data(form=form,
stockrecord_form=self.get_stockrecord_form(),
category_formset=category_formset,
image_formset=image_formset)
return self.render_to_response(ctx)
def form_valid(self, form):
product = form.save()
category_formset = ProductCategoryFormSet(self.request.POST,
instance=product)
image_formset = ProductImageFormSet(self.request.POST,
self.request.FILES,
instance=product)
stockrecord_form = self.get_stockrecord_form()
if self.is_stockrecord_submitted():
is_valid = all([stockrecord_form.is_valid(),
category_formset.is_valid(),
image_formset.is_valid()])
else:
is_valid = all([category_formset.is_valid(),
image_formset.is_valid()])
if is_valid:
if self.is_stockrecord_submitted():
# Save stock record
stockrecord = stockrecord_form.save(commit=False)
stockrecord.product = product
stockrecord.save()
# Save formsets
category_formset.save()
image_formset.save()
return HttpResponseRedirect(self.get_success_url(product))
messages.error(self.request,
_("Your submitted data was not valid - please "
"correct the below errors"))
# Delete product as its relations were not valid
product.delete()
ctx = self.get_context_data(form=form,
stockrecord_form=stockrecord_form,
category_formset=category_formset,
image_formset=image_formset)
return self.render_to_response(ctx)
def get_success_url(self, product):
messages.success(self.request, _("Created product '%s'") % product.title)
return reverse('dashboard:catalogue-product-list')
class ProductUpdateView(generic.UpdateView):
template_name = 'dashboard/catalogue/product_update.html'
model = Product
context_object_name = 'product'
form_class = ProductForm
def get_context_data(self, **kwargs):
ctx = super(ProductUpdateView, self).get_context_data(**kwargs)
if 'stockrecord_form' not in ctx:
ctx['stockrecord_form'] = self.get_stockrecord_form()
if 'category_formset' not in ctx:
ctx['category_formset'] = ProductCategoryFormSet(instance=self.object)
if 'image_formset' not in ctx:
ctx['image_formset'] = ProductImageFormSet(instance=self.object)
ctx['title'] = _('Update product')
return ctx
def get_form_kwargs(self):
kwargs = super(ProductUpdateView, self).get_form_kwargs()
kwargs['product_class'] = self.object.product_class
return kwargs
def is_stockrecord_submitted(self):
return len(self.request.POST.get('partner', '')) > 0
def get_stockrecord_form(self):
"""
Get the the ``StockRecordForm`` prepopulated with POST
data if available. If the product in this view has a
stock record it will be passed into the form as
``instance``.
"""
stockrecord = None
if self.object.has_stockrecord:
stockrecord = self.object.stockrecord
if not self.is_stockrecord_submitted():
return StockRecordForm(self.object.product_class,
instance=stockrecord)
return StockRecordForm(
self.object.product_class,
self.request.POST,
instance=stockrecord)
def form_invalid(self, form):
stockrecord_form = self.get_stockrecord_form()
category_formset = ProductCategoryFormSet(self.request.POST,
instance=self.object)
image_formset = ProductImageFormSet(self.request.POST,
self.request.FILES,
instance=self.object)
ctx = self.get_context_data(form=form,
stockrecord_form=stockrecord_form,
category_formset=category_formset,
image_formset=image_formset)
return self.render_to_response(ctx)
def form_valid(self, form):
stockrecord_form = self.get_stockrecord_form()
category_formset = ProductCategoryFormSet(self.request.POST,
instance=self.object)
image_formset = ProductImageFormSet(self.request.POST,
self.request.FILES,
instance=self.object)
if self.is_stockrecord_submitted():
is_valid = all([stockrecord_form.is_valid(),
category_formset.is_valid(),
image_formset.is_valid()])
else:
is_valid = all([category_formset.is_valid(),
image_formset.is_valid()])
if is_valid:
form.save()
if self.is_stockrecord_submitted():
stockrecord = stockrecord_form.save(commit=False)
stockrecord.product = self.object
stockrecord.save()
category_formset.save()
image_formset.save()
return HttpResponseRedirect(self.get_success_url())
ctx = self.get_context_data(form=form,
stockrecord_form=stockrecord_form,
category_formset=category_formset,
image_formset=image_formset)
return self.render_to_response(ctx)
def get_success_url(self):
messages.success(self.request, _("Updated product '%s'") %
self.object.title)
return reverse('dashboard:catalogue-product-list')
class StockAlertListView(generic.ListView):
template_name = 'dashboard/catalogue/stockalert_list.html'
model = StockAlert
context_object_name = 'alerts'
paginate_by = 20
def get_context_data(self, **kwargs):
ctx = super(StockAlertListView, self).get_context_data(**kwargs)
ctx['form'] = self.form
ctx['description'] = self.description
return ctx
def get_queryset(self):
if 'status' in self.request.GET:
self.form = StockAlertSearchForm(self.request.GET)
if self.form.is_valid():
status = self.form.cleaned_data['status']
self.description = _('Alerts with status "%s"') % status
return self.model.objects.filter(status=status)
else:
self.description = _('All alerts')
self.form = StockAlertSearchForm()
return self.model.objects.all()
class CategoryListView(generic.TemplateView):
template_name = 'dashboard/catalogue/category_list.html'
def get_context_data(self, *args, **kwargs):
ctx = super(CategoryListView, self).get_context_data(*args, **kwargs)
ctx['child_categories'] = Category.get_root_nodes()
return ctx
class CategoryDetailListView(generic.DetailView):
template_name = 'dashboard/catalogue/category_list.html'
model = Category
context_object_name = 'category'
def get_context_data(self, *args, **kwargs):
ctx = super(CategoryDetailListView, self).get_context_data(*args, **kwargs)
ctx['child_categories'] = self.object.get_children()
ctx['ancestors'] = self.object.get_ancestors()
return ctx
class CategoryListMixin(object):
def get_success_url(self):
parent = self.object.get_parent()
if parent is None:
return reverse("dashboard:catalogue-category-list")
else:
return reverse("dashboard:catalogue-category-detail-list",
args=(parent.pk,))
class CategoryCreateView(CategoryListMixin, generic.CreateView):
template_name = 'dashboard/catalogue/category_form.html'
model = Category
form_class = CategoryForm
def get_context_data(self, **kwargs):
ctx = super(CategoryCreateView, self).get_context_data(**kwargs)
ctx['title'] = "Add a new category"
return ctx
def get_success_url(self):
messages.info(self.request, "Category created successfully")
return super(CategoryCreateView, self).get_success_url()
class CategoryUpdateView(CategoryListMixin, generic.UpdateView):
template_name = 'dashboard/catalogue/category_form.html'
model = Category
form_class = CategoryForm
def get_context_data(self, **kwargs):
ctx = super(CategoryUpdateView, self).get_context_data(**kwargs)
ctx['title'] = "Update category '%s'" % self.object.name
return ctx
def get_success_url(self):
messages.info(self.request, "Category updated successfully")
return super(CategoryUpdateView, self).get_success_url()
class CategoryDeleteView(CategoryListMixin, generic.DeleteView):
template_name = 'dashboard/catalogue/category_delete.html'
model = Category
def get_context_data(self, *args, **kwargs):
ctx = super(CategoryDeleteView, self).get_context_data(*args, **kwargs)
ctx['parent'] = self.object.get_parent()
return ctx
def get_success_url(self):
messages.info(self.request, "Category deleted successfully")
return super(CategoryDeleteView, self).get_success_url()
| [
"django.utils.translation.ugettext_lazy",
"django.db.models.get_model",
"oscar.core.loading.get_classes",
"django.contrib.messages.info",
"django.core.urlresolvers.reverse"
] | [((460, 633), 'oscar.core.loading.get_classes', 'get_classes', (['"""dashboard.catalogue.forms"""', "('ProductForm', 'CategoryForm', 'StockRecordForm', 'StockAlertSearchForm',\n 'ProductCategoryFormSet', 'ProductImageFormSet')"], {}), "('dashboard.catalogue.forms', ('ProductForm', 'CategoryForm',\n 'StockRecordForm', 'StockAlertSearchForm', 'ProductCategoryFormSet',\n 'ProductImageFormSet'))\n", (471, 633), False, 'from oscar.core.loading import get_classes\n'), ((743, 776), 'django.db.models.get_model', 'get_model', (['"""catalogue"""', '"""Product"""'], {}), "('catalogue', 'Product')\n", (752, 776), False, 'from django.db.models import get_model\n'), ((788, 822), 'django.db.models.get_model', 'get_model', (['"""catalogue"""', '"""Category"""'], {}), "('catalogue', 'Category')\n", (797, 822), False, 'from django.db.models import get_model\n'), ((841, 882), 'django.db.models.get_model', 'get_model', (['"""catalogue"""', '"""ProductCategory"""'], {}), "('catalogue', 'ProductCategory')\n", (850, 882), False, 'from django.db.models import get_model\n'), ((898, 936), 'django.db.models.get_model', 'get_model', (['"""catalogue"""', '"""ProductClass"""'], {}), "('catalogue', 'ProductClass')\n", (907, 936), False, 'from django.db.models import get_model\n'), ((951, 986), 'django.db.models.get_model', 'get_model', (['"""partner"""', '"""StockRecord"""'], {}), "('partner', 'StockRecord')\n", (960, 986), False, 'from django.db.models import get_model\n'), ((1000, 1034), 'django.db.models.get_model', 'get_model', (['"""partner"""', '"""StockAlert"""'], {}), "('partner', 'StockAlert')\n", (1009, 1034), False, 'from django.db.models import get_model\n'), ((1263, 1309), 'django.utils.translation.ugettext_lazy', '_', (['u"""Products %(upc_filter)s %(title_filter)s"""'], {}), "(u'Products %(upc_filter)s %(title_filter)s')\n", (1264, 1309), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7464, 7507), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-product-list"""'], {}), "('dashboard:catalogue-product-list')\n", (7471, 7507), False, 'from django.core.urlresolvers import reverse\n'), ((8189, 8208), 'django.utils.translation.ugettext_lazy', '_', (['"""Update product"""'], {}), "('Update product')\n", (8190, 8208), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11512, 11555), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-product-list"""'], {}), "('dashboard:catalogue-product-list')\n", (11519, 11555), False, 'from django.core.urlresolvers import reverse\n'), ((13956, 14016), 'django.contrib.messages.info', 'messages.info', (['self.request', '"""Category created successfully"""'], {}), "(self.request, 'Category created successfully')\n", (13969, 14016), False, 'from django.contrib import messages\n'), ((14501, 14561), 'django.contrib.messages.info', 'messages.info', (['self.request', '"""Category updated successfully"""'], {}), "(self.request, 'Category updated successfully')\n", (14514, 14561), False, 'from django.contrib import messages\n'), ((15016, 15076), 'django.contrib.messages.info', 'messages.info', (['self.request', '"""Category deleted successfully"""'], {}), "(self.request, 'Category deleted successfully')\n", (15029, 15076), False, 'from django.contrib import messages\n'), ((3059, 3102), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-product-list"""'], {}), "('dashboard:catalogue-product-list')\n", (3066, 3102), False, 'from django.core.urlresolvers import reverse\n'), ((3405, 3501), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-product-create"""'], {'kwargs': "{'product_class_id': product_class.id}"}), "('dashboard:catalogue-product-create', kwargs={'product_class_id':\n product_class.id})\n", (3412, 3501), False, 'from django.core.urlresolvers import reverse\n'), ((4204, 4230), 'django.utils.translation.ugettext_lazy', '_', (['"""Create new %s product"""'], {}), "('Create new %s product')\n", (4205, 4230), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5166, 5238), 'django.utils.translation.ugettext_lazy', '_', (['"""Your submitted data was not valid - please correct the below errors"""'], {}), "('Your submitted data was not valid - please correct the below errors')\n", (5167, 5238), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6843, 6915), 'django.utils.translation.ugettext_lazy', '_', (['"""Your submitted data was not valid - please correct the below errors"""'], {}), "('Your submitted data was not valid - please correct the below errors')\n", (6844, 6915), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12367, 12382), 'django.utils.translation.ugettext_lazy', '_', (['"""All alerts"""'], {}), "('All alerts')\n", (12368, 12382), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((13381, 13425), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-category-list"""'], {}), "('dashboard:catalogue-category-list')\n", (13388, 13425), False, 'from django.core.urlresolvers import reverse\n'), ((13459, 13529), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-category-detail-list"""'], {'args': '(parent.pk,)'}), "('dashboard:catalogue-category-detail-list', args=(parent.pk,))\n", (13466, 13529), False, 'from django.core.urlresolvers import reverse\n'), ((2357, 2394), 'django.utils.translation.ugettext_lazy', '_', (['""" including an item with UPC \'%s\'"""'], {}), '(" including an item with UPC \'%s\'")\n', (2358, 2394), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2564, 2612), 'django.utils.translation.ugettext_lazy', '_', (['""" including an item with title matching \'%s\'"""'], {}), '(" including an item with title matching \'%s\'")\n', (2565, 2612), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3004, 3038), 'django.utils.translation.ugettext_lazy', '_', (['"""Please choose a product class"""'], {}), "('Please choose a product class')\n", (3005, 3038), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3328, 3371), 'django.core.urlresolvers.reverse', 'reverse', (['"""dashboard:catalogue-product-list"""'], {}), "('dashboard:catalogue-product-list')\n", (3335, 3371), False, 'from django.core.urlresolvers import reverse\n'), ((7406, 7431), 'django.utils.translation.ugettext_lazy', '_', (['"""Created product \'%s\'"""'], {}), '("Created product \'%s\'")\n', (7407, 7431), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((11425, 11450), 'django.utils.translation.ugettext_lazy', '_', (['"""Updated product \'%s\'"""'], {}), '("Updated product \'%s\'")\n', (11426, 11450), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3273, 3307), 'django.utils.translation.ugettext_lazy', '_', (['"""Please choose a product class"""'], {}), "('Please choose a product class')\n", (3274, 3307), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((12220, 12248), 'django.utils.translation.ugettext_lazy', '_', (['"""Alerts with status "%s\\""""'], {}), '(\'Alerts with status "%s"\')\n', (12221, 12248), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
#!/usr/bin/python3
#
# Author: <NAME>, (C) Aalto University.
# Only for student use on the Aalto course CS-E4800/CS-EJ4801.
# Do not redistribute.
#
import time
import queue
import itertools
from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid
from BFS import breadthFirstSearch
from Astar import ASTAR
grid3I= ["........",
"........",
"........",
"........",
"........",
"32......",
"14......"]
grid3G= ["........",
"........",
"........",
"..31....",
"..24....",
"........",
"........"]
#plan,cost = breadthFirstSearch(MAPPGridState(init3,xsize=xs3,ysize=ys3,walls=w3),
# lambda state: (state.agents == goal3))
print("CORRECT RESULT: optimal cost is 16.0")
print("RUNTIME ESTIMATE: < 5 seconds")
init3,xs3,ys3,w3 = createMAPPgrid(grid3I)
goal3,xs3,ys3,w3 = createMAPPgrid(grid3G)
plan,cost = ASTAR(MAPPGridState(init3,xsize=xs3,ysize=ys3,walls=w3),
lambda state: (state.agents == goal3), # goal test
MAPPdistance(goal3)) # function: distance to goal
for s in plan:
s.show()
grid1I= ["...#.........",
"...#.........",
"...#.........",
"...########..",
"..12......34.",
"...###..###..",
"...######....",
"........#....",
"........#...."]
grid1G= ["...#.........",
"...#.........",
"...#.........",
"...########..",
"...34.....21.",
"...###..###..",
"...######....",
"........#....",
"........#...."]
print("CORRECT RESULT: optimal cost is 34.0")
print("RUNTIME ESTIMATE: < 15 seconds")
init1,xs1,ys1,w1 = createMAPPgrid(grid1I)
goal1,xs1,ys1,w1 = createMAPPgrid(grid1G)
plan,cost = ASTAR(MAPPGridState(init1,xsize=xs1,ysize=ys1,walls=w1),
lambda state: (state.agents == goal1), # goal test
MAPPdistance(goal1)) # function: distance to goal
for s in plan:
s.show()
grid0I= ["...........",
"...........",
"..12.......",
"..34.......",
"...........",
"...........",
"..........."]
grid0G= ["...........",
"...........",
"...........",
"...........",
"...........",
"........12.",
"........34"]
print("CORRECT RESULT: optimal cost is 36.0")
print("RUNTIME ESTIMATE: < 40 seconds")
init0,xs0,ys0,w0 = createMAPPgrid(grid0I)
goal0,xs0,ys0,w0 = createMAPPgrid(grid0G)
plan,cost = ASTAR(MAPPGridState(init0,xsize=xs0,ysize=ys0,walls=w0),
lambda state: (state.agents == goal0), # goal test
MAPPdistance(goal0)) # function: distance to goal
for s in plan:
s.show()
grid2I= ["..1#....",
"..2#....",
"........",
"...#3...",
"...#4...",
"...#...."]
grid2G= ["...#1...",
"...#2...",
"........",
"..3#....",
"..4#....",
"...#...."]
init2,xs,ys,w = createMAPPgrid(grid1I)
goal2,xs,ys,w = createMAPPgrid(grid1G)
print("CORRECT RESULT: optimal cost is 24.0")
print("RUNTIME ESTIMATE: < 5 minutes")
init2,xs2,ys2,w2 = createMAPPgrid(grid2I)
goal2,xs2,ys2,w2 = createMAPPgrid(grid2G)
plan,cost = ASTAR(MAPPGridState(init2,xsize=xs2,ysize=ys2,walls=w2),
lambda state: (state.agents == goal2), # goal test
MAPPdistance(goal2)) # function: distance to goal
for s in plan:
s.show()
| [
"MAPP.MAPPGridState",
"MAPP.createMAPPgrid",
"MAPP.MAPPdistance"
] | [((884, 906), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid3I'], {}), '(grid3I)\n', (898, 906), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((926, 948), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid3G'], {}), '(grid3G)\n', (940, 948), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((1759, 1781), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid1I'], {}), '(grid1I)\n', (1773, 1781), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((1801, 1823), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid1G'], {}), '(grid1G)\n', (1815, 1823), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((2501, 2523), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid0I'], {}), '(grid0I)\n', (2515, 2523), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((2543, 2565), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid0G'], {}), '(grid0G)\n', (2557, 2565), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((3070, 3092), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid1I'], {}), '(grid1I)\n', (3084, 3092), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((3109, 3131), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid1G'], {}), '(grid1G)\n', (3123, 3131), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((3237, 3259), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid2I'], {}), '(grid2I)\n', (3251, 3259), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((3279, 3301), 'MAPP.createMAPPgrid', 'createMAPPgrid', (['grid2G'], {}), '(grid2G)\n', (3293, 3301), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((967, 1019), 'MAPP.MAPPGridState', 'MAPPGridState', (['init3'], {'xsize': 'xs3', 'ysize': 'ys3', 'walls': 'w3'}), '(init3, xsize=xs3, ysize=ys3, walls=w3)\n', (980, 1019), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((1105, 1124), 'MAPP.MAPPdistance', 'MAPPdistance', (['goal3'], {}), '(goal3)\n', (1117, 1124), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((1842, 1894), 'MAPP.MAPPGridState', 'MAPPGridState', (['init1'], {'xsize': 'xs1', 'ysize': 'ys1', 'walls': 'w1'}), '(init1, xsize=xs1, ysize=ys1, walls=w1)\n', (1855, 1894), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((1980, 1999), 'MAPP.MAPPdistance', 'MAPPdistance', (['goal1'], {}), '(goal1)\n', (1992, 1999), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((2584, 2636), 'MAPP.MAPPGridState', 'MAPPGridState', (['init0'], {'xsize': 'xs0', 'ysize': 'ys0', 'walls': 'w0'}), '(init0, xsize=xs0, ysize=ys0, walls=w0)\n', (2597, 2636), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((2722, 2741), 'MAPP.MAPPdistance', 'MAPPdistance', (['goal0'], {}), '(goal0)\n', (2734, 2741), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((3320, 3372), 'MAPP.MAPPGridState', 'MAPPGridState', (['init2'], {'xsize': 'xs2', 'ysize': 'ys2', 'walls': 'w2'}), '(init2, xsize=xs2, ysize=ys2, walls=w2)\n', (3333, 3372), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n'), ((3458, 3477), 'MAPP.MAPPdistance', 'MAPPdistance', (['goal2'], {}), '(goal2)\n', (3470, 3477), False, 'from MAPP import MAPPGridState, MAPPdistance, MAPPdistance0, createMAPPgrid\n')] |
from . import views
from django.urls import path
app_name = 'app'
urlpatterns = [
path('', views.app, name='app'),
]
| [
"django.urls.path"
] | [((88, 119), 'django.urls.path', 'path', (['""""""', 'views.app'], {'name': '"""app"""'}), "('', views.app, name='app')\n", (92, 119), False, 'from django.urls import path\n')] |
import unittest
from cloudsplaining.shared.validation import check_authorization_details_schema
import os
import json
class FindExcessiveWildcardsTestCase(unittest.TestCase):
def test_check_authorization_details_schema(self):
"""test_scanning.validate.check_authorization_details_schema"""
example_authz_details_file = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
os.path.pardir,
"files",
"example-authz-details.json",
)
)
with open(example_authz_details_file, "r") as json_file:
cfg = json.load(json_file)
decision = check_authorization_details_schema(cfg)
self.assertTrue(decision)
| [
"json.load",
"os.path.dirname",
"cloudsplaining.shared.validation.check_authorization_details_schema"
] | [((637, 657), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (646, 657), False, 'import json\n'), ((681, 720), 'cloudsplaining.shared.validation.check_authorization_details_schema', 'check_authorization_details_schema', (['cfg'], {}), '(cfg)\n', (715, 720), False, 'from cloudsplaining.shared.validation import check_authorization_details_schema\n'), ((400, 425), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (415, 425), False, 'import os\n')] |
import asyncio
import functools
from prompt_toolkit.mouse_events import MouseEventType
from prompt_toolkit.layout.containers import Container
from prompt_toolkit.layout.screen import Point
from prompt_toolkit.layout.dimension import LayoutDimension
from pymux.screen import BetterScreen
from pymux.stream import BetterStream
from ..lib.ssh import _SSHInteractiveHandler
from .tab import Tab
def create_interactive_tab(tosh, session):
tab = Vt100Tab(tosh, session)
session.switch_handler(functools.partial(_SSHInteractiveHandler, tab))
return tab
class Vt100Tab(Tab):
def __init__(self, tosh, session):
super().__init__(tosh)
self.title = 'SSH'
self._session = session
self._screen = BetterScreen(20, 80, self.write_to_ssh)
self._stream = BetterStream(self._screen)
self._stream.attach(self._screen)
self.layout = Vt100Window(self._screen, self)
def paste(self, event):
self.write_to_ssh(event.data)
def write_to_ssh(self, data):
self._session.channel.write(data)
def write_to_screen(self, data):
self._stream.feed(data)
self._tosh.refresh()
def set_size(self, w, h):
self._session.channel.change_terminal_size(w, h)
self._screen.resize(h, w)
class Vt100Window(Container):
"""
Container that holds the VT100 control.
"""
def __init__(self, screen, tab):
self.screen = screen
self._tab = tab
self._scroll_pos = 0
def reset(self):
pass
def preferred_width(self, cli, max_available_width):
return LayoutDimension()
def preferred_height(self, cli, width, max_available_height):
return LayoutDimension()
def _mouse_handler(self, cli, mouse_event):
if mouse_event.event_type == MouseEventType.SCROLL_DOWN:
self._scroll_pos = min(0, self._scroll_pos + 3)
elif mouse_event.event_type == MouseEventType.SCROLL_UP:
max_scroll = min(max(0, self.screen.max_y - self.screen.lines), self.screen.get_history_limit())
self._scroll_pos = max(-max_scroll, self._scroll_pos - 3)
def write_to_screen(self, cli, screen, mouse_handlers, write_position):
"""
Write window to screen. This renders the user control, the margins and
copies everything over to the absolute position at the given screen.
"""
self._tab.set_size(write_position.width, write_position.height)
xmin = write_position.xpos
xmax = xmin + write_position.width
ymin = write_position.ypos
ymax = ymin + write_position.height
mouse_handlers.set_mouse_handler_for_range(xmin, xmax, ymin, ymax, self._mouse_handler)
# Render UserControl.
temp_screen = self.screen.pt_screen
# Write body to screen.
self._copy_body(cli, temp_screen, screen, write_position, write_position.width)
def _copy_body(self, cli, temp_screen, new_screen, write_position, width):
"""
Copy characters from the temp screen that we got from the `UIControl`
to the real screen.
"""
xpos = write_position.xpos
ypos = write_position.ypos
height = write_position.height
temp_buffer = temp_screen.data_buffer
new_buffer = new_screen.data_buffer
temp_screen_height = temp_screen.height
vertical_scroll = self.screen.line_offset
y = 0
# Now copy the region we need to the real screen.
for y in range(0, height):
# We keep local row variables. (Don't look up the row in the dict
# for each iteration of the nested loop.)
new_row = new_buffer[y + ypos]
if y >= temp_screen_height and y >= write_position.height:
# Break out of for loop when we pass after the last row of the
# temp screen. (We use the 'y' position for calculation of new
# screen's height.)
break
else:
temp_row = temp_buffer[y + vertical_scroll + self._scroll_pos]
# Copy row content, except for transparent tokens.
# (This is useful in case of floats.)
for x in range(0, width):
new_row[x + xpos] = temp_row[x]
new_screen.cursor_position = Point(
y=temp_screen.cursor_position.y + ypos - vertical_scroll,
x=temp_screen.cursor_position.x + xpos)
new_screen.show_cursor = temp_screen.show_cursor and self._scroll_pos == 0
# Update height of the output screen. (new_screen.write_data is not
# called, so the screen is not aware of its height.)
new_screen.height = max(new_screen.height, ypos + y + 1)
def walk(self, cli):
# Only yield self. A window doesn't have children.
yield self
| [
"prompt_toolkit.layout.screen.Point",
"prompt_toolkit.layout.dimension.LayoutDimension",
"pymux.screen.BetterScreen",
"pymux.stream.BetterStream",
"functools.partial"
] | [((499, 545), 'functools.partial', 'functools.partial', (['_SSHInteractiveHandler', 'tab'], {}), '(_SSHInteractiveHandler, tab)\n', (516, 545), False, 'import functools\n'), ((738, 777), 'pymux.screen.BetterScreen', 'BetterScreen', (['(20)', '(80)', 'self.write_to_ssh'], {}), '(20, 80, self.write_to_ssh)\n', (750, 777), False, 'from pymux.screen import BetterScreen\n'), ((801, 827), 'pymux.stream.BetterStream', 'BetterStream', (['self._screen'], {}), '(self._screen)\n', (813, 827), False, 'from pymux.stream import BetterStream\n'), ((1608, 1625), 'prompt_toolkit.layout.dimension.LayoutDimension', 'LayoutDimension', ([], {}), '()\n', (1623, 1625), False, 'from prompt_toolkit.layout.dimension import LayoutDimension\n'), ((1708, 1725), 'prompt_toolkit.layout.dimension.LayoutDimension', 'LayoutDimension', ([], {}), '()\n', (1723, 1725), False, 'from prompt_toolkit.layout.dimension import LayoutDimension\n'), ((4354, 4462), 'prompt_toolkit.layout.screen.Point', 'Point', ([], {'y': '(temp_screen.cursor_position.y + ypos - vertical_scroll)', 'x': '(temp_screen.cursor_position.x + xpos)'}), '(y=temp_screen.cursor_position.y + ypos - vertical_scroll, x=\n temp_screen.cursor_position.x + xpos)\n', (4359, 4462), False, 'from prompt_toolkit.layout.screen import Point\n')] |
# -*- coding: utf-8 -*-
import sys
import getpass
from .models import Entry, Group
from .crypto import AESCipher
def read_string(prompt="[^] Input: "):
''' Read a string from stdin
:params prompt The string to display before reading (without \n)
'''
print("%s" % prompt, end='', flush=True)
inp = sys.stdin.readline().splitlines()
if len(inp) == 0:
return ''
return inp[0]
def list_entries():
''' Print all the entries owned by you on the console grouped by groups '''
print("[^] Listing Groups and entries!\n")
groups = {}
for entry in Entry.list(params={'embedded': {'group': 1}}):
if entry.group:
if entry.group.name not in groups:
groups[entry.group.name] = []
groups[entry.group.name].append(entry)
for group in Group.list():
if group.name not in groups:
groups[group.name] = []
for group in groups:
print("├── %s" % group)
if len(groups.get(group)) == 0:
print("[^] Empty !")
for entry in groups.get(group):
print("├ ├── %s - %s" % (entry._id, entry.name))
print("\n[+] Done!")
return True
def create_group():
''' It creates a group in the API '''
name = read_string(prompt="[^] Group name: ")
print("[^] Creating group '%s'" % name)
group = Group()
group.icon = None
group.name = name
return group.save()
def _find_group():
found = False
while not found:
group = Group.find_one(params={'where': {'name': read_string('[^] Group Name: ')}})
if group is not None:
found = True
else:
print("Group not found ! try again !")
return group
def _find_entry():
found = False
while not found:
entry = Entry.find_one(params={
'where': {
'_id': read_string('[+] Entry ID: ')
},
'embedded': {
'group': 1
}
})
if entry is not None:
found = True
else:
print("Entry not found ! try again !")
return entry
def _read_password(prompt="[^] Password: "):
''' Read password from stdin '''
print("%s" % prompt, end='', flush=True)
return getpass.getpass()
def _read_passwords():
''' Read the two passwords from stdin '''
ok = False
while not ok:
value1 = _read_password(prompt="[^] Password to store: ")
value2 = _read_password(prompt="[^] Repeat password: ")
if value1 == value2 and len(value1) > 0:
ok = True
else:
print("[-] Passwords incorrects, try again")
return value1
def create_entry():
''' It creates an entry in the API '''
entry = Entry()
entry.icon = None
entry.name = read_string("[^] Entry name: ")
entry.username = read_string("[^] Username: ")
entry.group = _find_group()._id
entry.url = read_string("[^] Url: ")
aes = AESCipher(key=_read_password(prompt="[^] Master password: "))
entry.value = aes.encrypt(_read_passwords())
return entry.save()
def delete_group():
''' It deletes a group from the eve api '''
name = read_string(prompt='[+] Group name: ')
group = Group.find_one(params={'where': {'name': name}})
status = False
if group is not None:
status = Group.delete(identifier=group._id, etag=group._etag)
if not status:
print("[-] Failed to delete group %s:'%s'" % (group._id, group.name))
else:
print("[-] Group by name '%s' was not found" % name)
return status
def delete_entry():
''' Delete a stored entry '''
entry = _find_entry()
print("[^] Deleting entry '%s'" % entry.name)
return Entry.delete(identifier=entry._id, etag=entry._etag)
def get_entry():
''' Retrieve entry password '''
entry = _find_entry()
aes = AESCipher(key=_read_password(prompt="[+] Master password"))
password = aes.decrypt(entry.value)
print("[^] Here is your entry: ")
print("[^]\t Name: %s" % entry.name)
print("[^]\t URL: %s" % entry.url)
print("[^]\t Username: %s" % entry.username)
print("[^]\t Password: %s" % password)
if entry.group is not None:
print("[^]\t Group: %s" % entry.group.name)
return True
| [
"sys.stdin.readline",
"getpass.getpass"
] | [((2287, 2304), 'getpass.getpass', 'getpass.getpass', ([], {}), '()\n', (2302, 2304), False, 'import getpass\n'), ((329, 349), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (347, 349), False, 'import sys\n')] |
import sys
import os
import time
import shutil
from glob import glob
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from pathlib import Path
import cv2
import numpy as np
import torch
import torch.backends.cudnn as cudnn
from models.experimental import attempt_load
from utils.datasets import LoadStreams, LoadImages
from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, \
increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER
from utils.plots import Annotator, colors
from utils.torch_utils import load_classifier, select_device, time_sync
from utils.augmentations import letterbox
### pyinstaller에 묶기 위한 tresh import ###
import seaborn
import yaml
import PIL
import scipy
import utils
import models
##########################################
dir = os.getcwd()
FILE = Path(__file__).absolute()
sys.path.append(FILE.parents[0].as_posix()) # add yolov5/ to path
IMG_FORMATS = ['bmp', 'jpg', 'jpeg', 'png', 'tif', 'tiff', 'dng', 'webp', 'mpo'] # acceptable image suffixes
VID_FORMATS = ['mov', 'avi', 'mp4', 'mpg', 'mpeg', 'm4v', 'wmv', 'mkv'] # acceptable video suffixes
FORMATS = IMG_FORMATS + VID_FORMATS
# class LoadImages:
# # YOLOv5 image/video dataloader, i.e. `python detect.py --source image.jpg/vid.mp4`
# def __init__(self, path, img_size=640, stride=32, auto=True):
# p = str(Path(path).resolve()) # os-agnostic absolute path
# if '*' in p:
# files = sorted(glob(p, recursive=True)) # glob
# elif os.path.isdir(p):
# files = sorted(glob(os.path.join(p, '*.*'))) # dir
# elif os.path.isfile(p):
# files = [p] # files
# else:
# raise Exception(f'ERROR: {p} does not exist')
#
# images = [x for x in files if x.split('.')[-1].lower() in IMG_FORMATS]
# videos = [x for x in files if x.split('.')[-1].lower() in VID_FORMATS]
# ni, nv = len(images), len(videos)
#
# self.img_size = img_size
# self.stride = stride
# self.files = images + videos
# self.nf = ni + nv # number of files
# self.video_flag = [False] * ni + [True] * nv
# self.mode = 'image'
# self.auto = auto
# if any(videos):
# self.new_video(videos[0]) # new video
# else:
# self.cap = None
# assert self.nf > 0, f'No images or videos found in {p}. ' \
# f'Supported formats are:\nimages: {IMG_FORMATS}\nvideos: {VID_FORMATS}'
#
# def __iter__(self):
# self.count = 0
# return self
#
# def __next__(self):
# if self.count == self.nf:
# raise StopIteration
# path = self.files[self.count]
#
# if self.video_flag[self.count]:
# # Read video
# self.mode = 'video'
# ret_val, img0 = self.cap.read()
# if not ret_val:
# self.count += 1
# self.cap.release()
# if self.count == self.nf: # last video
# raise StopIteration
# else:
# path = self.files[self.count]
# self.new_video(path)
# ret_val, img0 = self.cap.read()
#
# self.frame += 1
# s = f'video {self.count + 1}/{self.nf} ({self.frame}/{self.frames}) {path}: '
#
# else:
# # Read image
# self.count += 1
#
# # 변경부분
# ff = np.fromfile(path, np.uint8) #
# img0 = cv2.imdecode(ff, cv2.IMREAD_COLOR) #
# # img0 = cv2.imread(path) # BGR
#
# assert img0 is not None, f'Image Not Found {path}'
# s = f'image {self.count}/{self.nf} {path}: '
#
# # Padded resize
# img = letterbox(img0, self.img_size, stride=self.stride, auto=self.auto)[0]
#
# # Convert
# img = img.transpose((2, 0, 1))[::-1] # HWC to CHW, BGR to RGB
# img = np.ascontiguousarray(img)
#
# return path, img, img0, self.cap, s
#
# def new_video(self, path):
# self.frame = 0
# self.cap = cv2.VideoCapture(path)
# self.frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
#
# def __len__(self):
# return self.nf # number of files
class DetThread(QThread): # 쓰레드 정의
send_img = pyqtSignal(np.ndarray) # 처리 이미지 신호
send_raw = pyqtSignal(np.ndarray) # 원본 이미지 신호
send_statistic = pyqtSignal(dict) # detecting 결과 신호
def __init__(self):
super(DetThread, self).__init__()
self.weights = './yolov5s.pt'
self.source = '0'
self.conf_thres = 0.25
@torch.no_grad() # detect.py
def run(self,
weights=dir + '/yolov5s.pt', # model.pt path(s)
source=dir + '/data/images', # file/dir/URL/glob, 0 for webcam
imgsz=640, # inference size (pixels)
conf_thres=0.25, # confidence threshold
iou_thres=0.45, # NMS IOU threshold
max_det=1000, # maximum detections per image
device='', # cuda device, i.e. 0 or 0,1,2,3 or cpu
view_img=False, # show results
save_txt=False, # save results to *.txt
save_conf=False, # save confidences in --save-txt labels
save_crop=False, # save cropped prediction boxes
nosave=False, # do not save images/videos
classes=None, # filter by class: --class 0, or --class 0 2 3
agnostic_nms=False, # class-agnostic NMS
augment=False, # augmented inference
visualize=False, # visualize features
update=False, # update all models
project=dir + '/runs/detect', # save results to project/name
name='exp', # save results to project/name
exist_ok=False, # existing project/name ok, do not increment
line_thickness=3, # bounding box thickness (pixels)
hide_labels=False, # hide labels
hide_conf=False, # hide confidences
half=False, # use FP16 half-precision inference
dnn=False, # use OpenCV DNN for ONNX inference
unknown=True,
):
source = str(source)
save_img = not nosave and not source.endswith('.txt') # save inference images
webcam = source.isnumeric() or source.endswith('.txt') or source.lower().startswith(
('rtsp://', 'rtmp://', 'http://', 'https://'))
# Directories
save_dir = increment_path(Path(project) / name, exist_ok=exist_ok) # increment run
(save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
# Initialize
device = select_device(device)
half &= device.type != 'cpu' # half precision only supported on CUDA
# Load model
w = str(weights[0] if isinstance(weights, list) else weights)
classify, suffix, suffixes = False, Path(w).suffix.lower(), ['.pt', '.onnx', '.tflite', '.pb', '']
check_suffix(w, suffixes) # check weights have acceptable suffix
pt, onnx, tflite, pb, saved_model = (suffix == x for x in suffixes) # backend booleans
stride, names = 64, [f'class{i}' for i in range(1000)] # assign defaults
if pt:
model = torch.jit.load(w) if 'torchscript' in w else attempt_load(weights, map_location=device)
stride = int(model.stride.max()) # model stride
names = model.module.names if hasattr(model, 'module') else model.names # get class names
if unknown:
names.append('Unknown')
if half:
model.half() # to FP16
if classify: # second-stage classifier
modelc = load_classifier(name='resnet50', n=2) # initialize
modelc.load_state_dict(torch.load('resnet50.pt', map_location=device)['model']).to(device).eval()
elif onnx:
if dnn:
check_requirements(('opencv-python>=4.5.4',))
net = cv2.dnn.readNetFromONNX(w)
else:
check_requirements(('onnx', 'onnxruntime-gpu' if torch.has_cuda else 'onnxruntime'))
import onnxruntime
session = onnxruntime.InferenceSession(w, None)
else: # TensorFlow models
check_requirements(('tensorflow>=2.4.1',))
import tensorflow as tf
if pb: # https://www.tensorflow.org/guide/migrate#a_graphpb_or_graphpbtxt
def wrap_frozen_graph(gd, inputs, outputs):
x = tf.compat.v1.wrap_function(lambda: tf.compat.v1.import_graph_def(gd, name=""),
[]) # wrapped import
return x.prune(tf.nest.map_structure(x.graph.as_graph_element, inputs),
tf.nest.map_structure(x.graph.as_graph_element, outputs))
graph_def = tf.Graph().as_graph_def()
graph_def.ParseFromString(open(w, 'rb').read())
frozen_func = wrap_frozen_graph(gd=graph_def, inputs="x:0", outputs="Identity:0")
elif saved_model:
model = tf.keras.models.load_model(w)
elif tflite:
interpreter = tf.lite.Interpreter(model_path=w) # load TFLite model
interpreter.allocate_tensors() # allocate
input_details = interpreter.get_input_details() # inputs
output_details = interpreter.get_output_details() # outputs
int8 = input_details[0]['dtype'] == np.uint8 # is TFLite quantized uint8 model
imgsz = check_img_size(imgsz, s=stride) # check image size
# Dataloader
if webcam:
view_img = check_imshow()
cudnn.benchmark = True # set True to speed up constant image size inference
dataset = LoadStreams(source, img_size=imgsz, stride=stride, auto=pt)
bs = len(dataset) # batch_size
else:
dataset = LoadImages(source, img_size=imgsz, stride=stride, auto=pt)
bs = 1 # batch_size
vid_path, vid_writer = [None] * bs, [None] * bs
# Run inference
if pt and device.type != 'cpu':
model(torch.zeros(1, 3, *imgsz).to(device).type_as(next(model.parameters()))) # run once
dt, seen = [0.0, 0.0, 0.0], 0
temp = ""
# for name in names[:-1]:
# exec(f'{name}_list = set([])')
goranilist = set([])
wildboarlist = set([])
humanlist = set([])
for path, img, im0s, vid_cap, s in dataset:
if temp is not path:
# for name in names[:-1]:
# exec(f'{name}_cnt = 0')
cnt_gorani = 0
cnt_wildboar = 0
cnt_human = 0
temp = path
statistic_dic = {name: 0 for name in names}
t1 = time_sync()
if onnx:
img = img.astype('float32')
else:
img = torch.from_numpy(img).to(device)
img = img.half() if half else img.float() # uint8 to fp16/32
img /= 255.0 # 0 - 255 to 0.0 - 1.0
if len(img.shape) == 3:
img = img[None] # expand for batch dim
t2 = time_sync()
dt[0] += t2 - t1
# Inference
if pt:
visualize = increment_path(save_dir / Path(path).stem, mkdir=True) if visualize else False
pred = model(img, augment=augment, visualize=visualize)[0]
elif onnx:
if dnn:
net.setInput(img)
pred = torch.tensor(net.forward())
else:
pred = torch.tensor(
session.run([session.get_outputs()[0].name], {session.get_inputs()[0].name: img}))
else: # tensorflow model (tflite, pb, saved_model)
imn = img.permute(0, 2, 3, 1).cpu().numpy() # image in numpy
if pb:
pred = frozen_func(x=tf.constant(imn)).numpy()
elif saved_model:
pred = model(imn, training=False).numpy()
elif tflite:
if int8:
scale, zero_point = input_details[0]['quantization']
imn = (imn / scale + zero_point).astype(np.uint8) # de-scale
interpreter.set_tensor(input_details[0]['index'], imn)
interpreter.invoke()
pred = interpreter.get_tensor(output_details[0]['index'])
if int8:
scale, zero_point = output_details[0]['quantization']
pred = (pred.astype(np.float32) - zero_point) * scale # re-scale
pred[..., 0] *= imgsz[1] # x
pred[..., 1] *= imgsz[0] # y
pred[..., 2] *= imgsz[1] # w
pred[..., 3] *= imgsz[0] # h
pred = torch.tensor(pred)
t3 = time_sync()
dt[1] += t3 - t2
# NMS
pred = non_max_suppression(pred, conf_thres, iou_thres, classes, agnostic_nms, max_det=max_det,
unknown=unknown)
dt[2] += time_sync() - t3
# Second-stage classifier (optional)
if classify:
pred = apply_classifier(pred, modelc, img, im0s)
# Process predictions
for i, det in enumerate(pred): # per image
seen += 1
if webcam: # batch_size >= 1
p, im0, frame = path[i], im0s[i].copy(), dataset.count
s += f'{i}: '
else:
p, im0, frame = path, im0s.copy(), getattr(dataset, 'frame', 0)
p = Path(p) # to Path
save_path = str(save_dir / p.name) # img.jpg
txt_path = str(save_dir / 'labels' / p.stem) + (
'' if dataset.mode == 'image' else f'_{frame}') # img.txt
s += '%gx%g ' % img.shape[2:] # print string
gn = torch.tensor(im0.shape)[[1, 0, 1, 0]] # normalization gain whwh
imc = im0.copy() if save_crop else im0 # for save_crop
annotator = Annotator(im0, line_width=line_thickness, example=str(names))
imr = im0.copy()
if len(det):
# Rescale boxes from img_size to im0 size
det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round()
# Print results
for c in det[:, -1].unique():
n = (det[:, -1] == c).sum() # detections per class
s += f"{n} {names[int(c)]}{'s' * (n > 1)}, " # add to string
# Write results
for *xyxy, conf, cls in reversed(det):
if save_txt: # Write to file
xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh
line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format
with open(txt_path + '.txt', 'a') as f:
f.write(('%g ' * len(line)).rstrip() % line + '\n')
if save_img or save_crop or view_img: # Add bbox to image
c = int(cls) # integer class
statistic_dic[names[c]] += 1
label = None if hide_labels else (names[c] if hide_conf else f'{names[c]} {conf:.2f}')
annotator.box_label(xyxy, label, color=colors(c, True))
if save_crop:
save_one_box(xyxy, imc, file=save_dir / 'crops' / names[c] / f'{p.stem}.jpg', BGR=True)
# Print time (inference-only)
# LOGGER.info(f'{s}Done. ({t3 - t2:.3f}s)')
print(f'{s}Done. ({t3 - t2:.3f}s)')
# Stream results
im0 = annotator.result()
if view_img:
p = str(p).split('\\')[-1]
# img_array = np.fromfile(str(p), np.uint8) #한글명 디코딩
# img = cv2.imdecode(img_array, cv2.IMREAD_COLOR)
# cv2.imshow(p, im0)
# cv2.imshow('raw', imr)
# cv2.moveWindow(p, 50, 50)
cv2.waitKey(1) # 1 millisecond
# Save results (image with detections)
if save_img:
if dataset.mode == 'image':
cv2.imwrite(save_path, im0)
else: # 'video' or 'stream'
if vid_path[i] != save_path: # new video
vid_path[i] = save_path
if isinstance(vid_writer[i], cv2.VideoWriter):
vid_writer[i].release() # release previous video writer
if vid_cap: # video
fps = vid_cap.get(cv2.CAP_PROP_FPS)
w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
else: # stream
fps, w, h = 30, im0.shape[1], im0.shape[0]
save_path += '.mp4'
vid_writer[i] = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h))
vid_writer[i].write(im0)
# time.sleep(0.025)
self.send_img.emit(im0) # 처리영상
self.send_raw.emit(imr if isinstance(im0s, np.ndarray) else imr[0]) # 원본영상
self.send_statistic.emit(statistic_dic) # detecting 결과
#
# # 파일분리 (하드코딩)
# if 'Wildboar' in s:
# cnt_wildboar = cnt_wildboar + 1
# elif 'Deer' in s:
# cnt_gorani = cnt_gorani + 1
# elif 'human' in s:
# cnt_human = cnt_human + 1
#
# # if not webcam:
# ext = path.split('.')[-1].lower() # 파일 확장자 분리
#
# if ext in FORMATS: # 사진 리스트 분리
# if 'Deer' in s:
# goranilist.add(path)
# elif 'Wildboar' in s:
# wildboarlist.add(path)
# elif 'human' in s:
# humanlist.add(path)
#
# # if ext in VID_FORMATS: # 동영상 리스트 분리
# # if cnt_gorani >= 1:
# # goranilist.add(path)
# # elif cnt_wildboar >= 1:
# # wildboarlist.add(path)
# # elif cnt_human >= 1:
# # humanlist.add(path)
# print(goranilist)
# print(wildboarlist)
#
# if len(wildboarlist) > 0: # 파일 옮기기
# to = './Wildboar'
# if not os.path.isdir(to):
# os.mkdir(to)
# for i in wildboarlist:
# try:
# shutil.move(i, to)
# except:
# pass
#
# if len(goranilist) > 0:
# to = './Deer'
# if not os.path.isdir(to):
# os.mkdir(to)
# for i in goranilist:
# try:
# shutil.move(i, to)
# except:
# pass
#
# if len(humanlist) > 0:
# to = './human'
# if not os.path.isdir(to):
# os.mkdir(to)
# for i in humanlist:
# shutil.move(i, to)
cv2.destroyAllWindows()
# Print results
t = tuple(x / seen * 1E3 for x in dt) # speeds per image
# LOGGER.info(f'Speed: %.1fms pre-process, %.1fms inference, %.1fms NMS per image at shape {(1, 3, *imgsz)}' % t)
if save_txt or save_img:
s = f"\n{len(list(save_dir.glob('labels/*.txt')))} labels saved to {save_dir / 'labels'}" if save_txt else ''
LOGGER.info(f"Results saved to {colorstr('bold', save_dir)}{s}")
if update:
strip_optimizer(weights) # update model (to fix SourceChangeWarning)
# if cnt_gorani > 0 and cnt_gorani > cnt_wildboar:
# from_ = save_path
# to_ = './gorani'
# shutil.move(from_, to_)
class MyApp(QMainWindow): # 메인윈도우 정의
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.setWindowTitle('Detect Yolov5') # 툴 제목
self.setWindowIcon(QIcon('logo2.png')) # 로고 이미지
self.statusBar() # 상태바
# QWidget과 연결
self.cent_widget = CentWidget()
self.setCentralWidget(self.cent_widget.tabs)
self.show()
class CentWidget(QWidget): # 위젯정의
def __init__(self):
super().__init__()
self.det_thread = DetThread()
self.det_thread.send_img.connect(lambda x: self.show_image(x, self.img2))
self.det_thread.send_raw.connect(lambda x: self.show_image(x, self.img1))
self.det_thread.send_statistic.connect(self.show_statistic)
griddir = QGridLayout()
gridopt = QGridLayout()
font = QFont()
font.setBold(True)
font.setPointSize(10)
# weights : model.pt path(s)
self.lbl_weight = QLabel(dir + '/last.pt', self) # 고정값
self.lbl_weight.setFont(font)
self.lbl_weight.setStyleSheet('background-color: #FFFFFF')
self.lbl_weight.setStatusTip('Set model.pt path .pt파일의 경로를 설정합니다.')
self.lbl_weight.setToolTip('Set model.pt path\n.pt파일의 경로를 설정합니다.')
btn_weight = QPushButton('Weights', self)
btn_weight.setFont(font)
btn_weight.setStatusTip('Set model.pt path .pt파일의 경로를 설정합니다.')
btn_weight.setToolTip('Set model.pt path\n.pt파일의 경로를 설정합니다.')
btn_weight.clicked.connect(self.weights)
# source : file/dir/URL/glob, 0 for webcam
# self.lbl_source = QLabel(dir, self)
self.lbl_source = QLabel('D:/project/NationalPark_upgrade/PYQT5/yolov5_master/sample') # 고정값
self.lbl_source.setFont(font)
self.lbl_source.setStyleSheet('background-color: #FFFFFF')
self.lbl_source.setStatusTip("Set foldername to detect 분류/감지할 '폴더'를 설정합니다. *경로상에 한글이 들어가면 오류가 발생할 수 있음!")
self.lbl_source.setToolTip("Set foldername to detect\n분류/감지할 '폴더'를 설정합니다.\n*경로상에 한글이 들어가면 오류가 발생할 수 있음!")
btn_source = QPushButton('Source', self)
btn_source.setFont(font)
btn_source.setStatusTip("Set foldername to detect 분류/감지할 '폴더'를 설정합니다. *경로상에 한글이 들어가면 오류가 발생할 수 있음!")
btn_source.setToolTip("Set foldername to detect\n분류/감지할 '폴더'를 설정합니다.\n*경로상에 한글이 들어가면 오류가 발생할 수 있음!")
btn_source.clicked.connect(self.source)
# imgsz : inference size (pixels)
self.lbl_imgsz = QLabel(self)
self.lbl_imgsz.setNum(480)
self.lbl_imgsz.setFont(font)
self.lbl_imgsz.setStyleSheet('background-color: #FFFFFF')
self.lbl_imgsz.setStatusTip('Set inference size (pixels) 1~1280')
self.lbl_imgsz.setToolTip('Set inference size (pixels)\n1~1280')
btn_imgsz = QPushButton('Image Size', self)
btn_imgsz.setFont(font)
# menu = QMenu(self)
# menu.addAction('160')
# menu.addAction('320')
# menu.addAction('480')
# menu.addAction('640')
# menu.addAction('960')
# menu.addAction('1280')
# menu..connect(self.imgsz)
# cb = QComboBox(self)
btn_imgsz.setStatusTip('Set inference size (pixels) 1~1280')
btn_imgsz.setToolTip('Set inference size (pixels)\n1~1280')
# btn_imgsz.setMenu(menu)
# btn_imgsz.menu().connect(self.imgsz)
# menu.activeAction(self.imgsz)#.connect(self.imgsz)
btn_imgsz.clicked.connect(self.imgsz)
# conf_thres : confidence threshold
self.lbl_conf = QLabel(self)
self.lbl_conf.setText('70%')
self.setFont(font)
self.lbl_conf.setStyleSheet('background-color: #FFFFFF')
btn_conf = QPushButton('Conf-Thres', self)
btn_conf.setFont(font)
btn_conf.setStatusTip('Set confidence(%) threshold 1% ~ 99%')
btn_conf.setToolTip('Set confidence(%) threshold\n1% ~ 99%')
btn_conf.clicked.connect(self.conf)
# iou_thres : NMS IOU threshold
self.lbl_iou = QLabel(self)
self.lbl_iou.setText('25%')
self.setFont(font)
self.lbl_iou.setStyleSheet('background-color: #FFFFFF')
btn_iou = QPushButton('Iou-Thres', self)
btn_iou.setFont(font)
btn_iou.setStatusTip('NMS IOU(%) threshold 1% ~ 99%')
btn_iou.setToolTip('NMS IOU(%) threshold\n1% ~ 99%')
btn_iou.clicked.connect(self.iou)
# max_det : maximum detections per image
self.lbl_mxd = QLabel(self)
self.lbl_mxd.setNum(100)
self.setFont(font)
self.lbl_mxd.setStyleSheet('background-color: #FFFFFF')
btn_mxd = QPushButton('Max-Det', self)
btn_mxd.setFont(font)
btn_mxd.setStatusTip('maximum detections per image recommend set under 100')
btn_mxd.setToolTip('maximum detections per image\nrecommend set under 100')
btn_mxd.clicked.connect(self.det_num)
# project : save results to project/name
self.lbl_prj = QLabel(dir + '\\runs\\detect', self)
self.lbl_prj.setFont(font)
self.lbl_prj.setStyleSheet('background-color: #FFFFFF')
btn_prj = QPushButton('Project', self)
btn_prj.setFont(font)
btn_prj.setStatusTip('Save results to Project/Name')
btn_prj.setToolTip('Save results to Project/Name')
btn_prj.clicked.connect(self.project)
# name : save results to project/name
self.lbl_name = QLabel('exp', self)
self.lbl_name.setFont(font)
self.lbl_name.setStyleSheet('background-color: #FFFFFF')
btn_name = QPushButton('Name', self)
btn_name.setFont(font)
btn_name.setStatusTip('Save results to Project/Name')
btn_name.setToolTip('Save results to Project/Name')
btn_name.clicked.connect(self.name)
# line_thickness : bounding box thickness (pixels)
self.lbl_ltk = QLabel(self)
self.lbl_ltk.setNum(3)
self.setFont(font)
self.lbl_ltk.setStyleSheet('background-color: #FFFFFF')
btn_ltk = QPushButton('Thickness', self)
btn_ltk.setFont(font)
btn_ltk.setStatusTip('Bbox thickness (pixels) Bbox굵기(pixels)를 설정합니다.')
btn_ltk.setToolTip('Bbox thickness (pixels)\nBbox굵기(pixels)를 설정합니다.')
btn_ltk.clicked.connect(self.ltk)
btn_start = QPushButton('Start', self)
btn_start.setFont(font)
btn_start.clicked.connect(self.run)
self.chk_cam = QCheckBox('Webcam')
self.chk_cam.setFont(font)
self.chk_cam.setChecked(False)
self.chk_cam.toggled.connect(self.webcam)
self.lbl_rst = QLabel()
# self.lbl_rst.setEnabled(False)
# font = QFont()
# font.setFamily("Agency FB")
# font.setPointSize(11)
# font.setStyleStrategy(QFont.PreferDefault)
# self.lbl_rst.setFont(font)
# self.lbl_rst.setAcceptDrops(False)
# self.lbl_rst.setAutoFillBackground(False)
self.lbl_rst.setText('인식결과물')
self.lbl_dict = QListWidget()
# font = QFont()
# font.setPointSize(9)
# self.lbl_dict.setFont(font)
# self.lbl_dict.setStyleSheet("background:transparent")
# self.lbl_dict.setFrameShadow(QFrame.Plain)
# self.lbl_dict.setProperty("showDropIndicator", True)
# self.lbl_dict.setObjectName("listWidget")
self.lbl_raw = QLabel()
self.lbl_raw.setText('원본이미지')
self.lbl_prc = QLabel()
self.lbl_prc.setText('처리이미지')
self.img1 = QLabel()
self.img2 = QLabel()
self.sliconf = QSlider(Qt.Horizontal, self)
self.sliconf.setRange(1, 99)
self.sliconf.setSingleStep(1)
self.sliconf.setValue(45)
self.sliconf.valueChanged.connect(self.conf_chg)
self.sliiou = QSlider(Qt.Horizontal, self)
self.sliiou.setRange(1, 99)
self.sliiou.setSingleStep(1)
self.sliiou.setValue(25)
self.sliiou.valueChanged.connect(self.iou_chg)
# girddir
griddir.addWidget(btn_weight, 0, 0, 1, 10)
griddir.addWidget(btn_source, 1, 0, 1, 10)
griddir.addWidget(btn_prj, 2, 0, 1, 10)
griddir.addWidget(btn_name, 3, 0, 1, 10)
griddir.addWidget(self.lbl_weight, 0, 10, 1, 10)
griddir.addWidget(self.lbl_source, 1, 10, 1, 10)
griddir.addWidget(self.chk_cam, 1, 20, 1, 2)
griddir.addWidget(self.lbl_prj, 2, 10, 1, 10)
griddir.addWidget(self.lbl_name, 3, 10, 1, 10)
griddir.addWidget(self.img1, 5, 0, 10, 10)
griddir.addWidget(self.img2, 5, 10, 10, 10)
griddir.addWidget(self.lbl_raw, 4, 0, 1, 10)
griddir.addWidget(self.lbl_prc, 4, 10, 1, 10)
griddir.addWidget(self.lbl_rst, 4, 20, 1, 2)
griddir.addWidget(self.lbl_dict, 5, 20, 10, 2)
griddir.addWidget(btn_start, 15, 8, 1, 6) # 이거 dir로 옮기면서 이미지 찌그러짐 발생
# gridopt
gridopt.addWidget(btn_imgsz, 0, 0, 1, 10)
gridopt.addWidget(self.lbl_imgsz, 0, 10, 1, 3)
gridopt.addWidget(btn_conf, 1, 0, 1, 10)
gridopt.addWidget(self.lbl_conf, 1, 10, 1, 3)
gridopt.addWidget(self.sliconf, 1, 13, 1, 17)
gridopt.addWidget(btn_iou, 2, 0, 1, 10)
gridopt.addWidget(self.lbl_iou, 2, 10, 1, 3)
gridopt.addWidget(self.sliiou, 2, 13, 1, 17)
gridopt.addWidget(btn_mxd, 3, 0, 1, 10)
gridopt.addWidget(self.lbl_mxd, 3, 10, 1, 3)
gridopt.addWidget(btn_ltk, 4, 0, 1, 10)
gridopt.addWidget(self.lbl_ltk, 4, 10, 1, 3)
gridopt.addWidget(self.saveoptions(), 5, 0, 10, 10)
gridopt.addWidget(self.visualize(), 5, 10, 10, 10)
gridopt.addWidget(self.category(), 5, 20, 10, 10)
directory = QWidget()
directory.setLayout(griddir)
options = QWidget()
options.setLayout(gridopt)
self.tabs = QTabWidget(self)
self.tabs.addTab(directory, 'Directory')
self.tabs.addTab(options, 'Options')
def saveoptions(self):
groupbox = QGroupBox('Save Options')
vbox = QVBoxLayout()
vbox.addWidget(self.save())
vbox.addWidget(self.savetxt())
self.chk_savecrop = QCheckBox('Save Crop')
self.chk_savecrop.setStatusTip('If check, save cropped prediction boxes 체크시 프레임별 예측된 bbox의 사진을 카테고리별로 저장합니다')
self.chk_savecrop.setToolTip('If check, save cropped prediction boxes\n체크시 프레임별 예측된 bbox의 사진을 카테고리별로 저장합니다')
vbox.addWidget(self.chk_savecrop)
groupbox.setLayout(vbox)
return groupbox
def save(self):
self.groupbox1 = QGroupBox('Save')
self.groupbox1.setCheckable(True)
self.groupbox1.setChecked(False)
self.groupbox1.setStatusTip('If check off, do not save images/videos 체크해제시 처리된 이미지나 동영상을 저장하지 않습니다.')
self.groupbox1.setToolTip('If check off, do not save images/videos\n체크해제시 처리된 이미지나 동영상을 저장하지 않습니다.')
vbox = QVBoxLayout()
self.chk_exok = QCheckBox('Exist Ok')
self.chk_exok.setStatusTip(
'If check, existing project/name ok, do not increment 체크시 디렉토리를 추가로 생성하지 않습니다. 처리된 영상/이미지의 파일명이 같다면 기존파일을 덮어씁니다.')
self.chk_exok.setToolTip(
'If check, existing project/name ok, do not increment\n체크시 디렉토리를 추가로 생성하지 않습니다.\n처리된 영상/이미지의 파일명이 같다면 기존파일을 덮어씁니다.')
self.groupbox1.toggled.connect(self.signal1)
vbox.addWidget(self.chk_exok)
self.groupbox1.setLayout(vbox)
return self.groupbox1
def savetxt(self):
self.groupbox2 = QGroupBox('Save txt')
self.groupbox2.setCheckable(True)
self.groupbox2.setChecked(False)
self.groupbox2.setStatusTip('If check, save results to *.txt 체크해제시 인식 결과(카테고리, bbox좌표)를 txt파일로 저장합니다.')
self.groupbox2.setToolTip('If check, save results to *.txt\n체크해제시 인식 결과(카테고리, bbox좌표)를 txt파일로 저장합니다.')
vbox = QVBoxLayout()
self.chk_saveconf = QCheckBox('Save Conf')
self.chk_saveconf.setStatusTip('If check, save confidences in --save-txt labels 체크시 신뢰도값을 txt파일에 추가합니다.')
self.chk_saveconf.setToolTip('If check, save confidences in --save-txt labels\n체크시 신뢰도값을 txt파일에 추가합니다.')
self.groupbox2.toggled.connect(self.signal2)
vbox.addWidget(self.chk_saveconf)
self.groupbox2.setLayout(vbox)
return self.groupbox2
# def developermode(self): # 개발자모드
# self.groupbox3 = QGroupBox('Developer Options')
# self.groupbox3.setCheckable(True)
# self.groupbox3.setChecked(False)
# vbox = QVBoxLayout()
# chk1 = QCheckBox('Agnostic_nms')
# vbox.addWidget(chk1)
# chk2 = QCheckBox('Augment')
# vbox.addWidget(chk2)
# chk3 = QCheckBox('Update')
# vbox.addWidget(chk3)
# chk4 = QCheckBox('Half')
# vbox.addWidget(chk4)
# chk5 = QCheckBox('Dnn')
# vbox.addWidget(chk5)
# chk6 = QCheckBox('Visualize')
# vbox.addWidget(chk6)
# chk1.setStatusTip('If check, class-agnostic NMS')
# chk1.setToolTip('If check, class-agnostic NMS')
# chk2.setStatusTip('If check, augmented inference')
# chk2.setToolTip('If check, augmented inference')
# chk3.setStatusTip('If check, update all models')
# chk3.setToolTip('If check, update all models')
# chk4.setStatusTip('If check, use FP16 haself.lf-precision inference')
# chk4.setToolTip('If check, use FP16 half-precision inference')
# chk5.setStatusTip('If check, use OpenCV DNN for ONNX inference')
# chk5.setToolTip('If check, use OpenCV DNN for ONNX inference')
# chk6.setStatusTip('If check, visualize features')
# chk6.setToolTip('If check, visualize features')
# self.groupbox3.toggled.connect(self.signal3)
# self.groupbox3.setLayout(vbox)
# return self.groupbox3
def category(self):
gbx = QGroupBox('Category Filter')
gbx.setStatusTip('If check off, do not classify specific animal 체크 해제시 해당 동물을 분류하지 않습니다.')
gbx.setToolTip('If check off, do not classify specific animal\n체크 해제시 해당 동물을 분류하지 않습니다.')
self.chkcat1 = QCheckBox('WildBoar')
self.chkcat1.setStatusTip('멧돼지')
self.chkcat1.setToolTip('멧돼지')
self.chkcat1.setChecked(True)
self.chkcat2 = QCheckBox('WaterDeer')
self.chkcat2.setStatusTip('고라니')
self.chkcat2.setToolTip('고라니')
self.chkcat2.setChecked(True)
self.chkcat3 = QCheckBox('HalfMoonBear')
self.chkcat3.setStatusTip('반달가슴곰')
self.chkcat3.setToolTip('반달가슴곰')
self.chkcat3.setChecked(True)
self.chkcat4 = QCheckBox('Goral')
self.chkcat4.setStatusTip('산양')
self.chkcat4.setToolTip('산양')
self.chkcat4.setChecked(True)
vbox = QVBoxLayout()
vbox.addWidget(self.chkcat1)
vbox.addWidget(self.chkcat2)
vbox.addWidget(self.chkcat3)
vbox.addWidget(self.chkcat4)
gbx.setLayout(vbox)
return gbx
def visualize(self):
gbx = QGroupBox('Visualize Options')
self.chk_view = QCheckBox('View Image')
self.chk_view.setChecked(True)
self.chk_view.setStatusTip('If check, show results 체크시 영상을 직접 화면에 띄웁니다.')
self.chk_view.setToolTip('If check, show results\n체크시 영상을 직접 화면에 띄웁니다.')
self.chk_hlbl = QCheckBox('Hide Labels')
self.chk_hlbl.setStatusTip('If check, hide labels 체크시 처리된 영상이 분류된 종 이름을 띄우지 않습니다.')
self.chk_hlbl.setToolTip('If check, hide labels\n 체크시 처리된 영상이 분류된 종 이름을 띄우지 않습니다.')
self.chk_hconf = QCheckBox('Hide Conf')
self.chk_hconf.setStatusTip('If check, hide confidences 체크시 처리된 영상이 confidence값을 띄우지 않습니다.')
self.chk_hconf.setToolTip('If check, hide confidences\n체크시 처리된 영상이 confidence값을 띄우지 않습니다.')
vbox = QVBoxLayout()
vbox.addWidget(self.chk_view)
vbox.addWidget(self.chk_hlbl)
vbox.addWidget(self.chk_hconf)
gbx.setLayout(vbox)
return gbx
def weights(self):
fname = QFileDialog.getOpenFileName(self, 'Weights', filter='*.pt')
self.lbl_weight.setText(str(fname[0]))
self.lbl_weight.adjustSize()
def source(self):
fname = QFileDialog.getExistingDirectory(self, 'Source')
self.lbl_source.setText(str(fname))
self.lbl_source.adjustSize()
def webcam(self):
if self.chk_cam.isChecked():
self.lbl_source.setNum(0)
def imgsz(self, num):
# self.lbl_imgsz.setNum(int(num))
# self.lbl_imgsz.adjustSize()
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.IntInput)
dlg.setWindowTitle('Image Size')
dlg.setLabelText("inference size (pixels)\n1~1280")
dlg.resize(500, 100)
dlg.setIntRange(1, 1280)
dlg.setIntValue(int(self.lbl_imgsz.text()))
ok = dlg.exec_()
num = dlg.intValue()
if ok:
self.lbl_imgsz.setNum(num)
self.lbl_imgsz.adjustSize()
def conf(self):
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.IntInput)
dlg.setWindowTitle('Conf Thres')
dlg.setLabelText("confidence(%) threshold\n1% ~ 99%")
dlg.resize(500, 100)
dlg.setIntRange(1, 99)
dlg.setIntValue(int(self.lbl_conf.text()[:-1]))
ok = dlg.exec_()
num = dlg.intValue()
if ok:
self.lbl_conf.setText(str(num) + '%')
self.lbl_conf.adjustSize()
self.sliconf.setValue(num)
def conf_chg(self):
self.lbl_conf.setText(str(self.sliconf.value()) + '%')
def iou(self):
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.IntInput)
dlg.setWindowTitle('Iou Thres')
dlg.setLabelText("NMS IOU(%) threshold\n1%~99%")
dlg.resize(500, 100)
dlg.setIntRange(1, 99)
dlg.setIntValue(int(self.lbl_iou.text()[:-1]))
ok = dlg.exec_()
num = dlg.intValue()
if ok:
self.lbl_iou.setText(str(num) + '%')
self.lbl_iou.adjustSize()
self.sliiou.setValue(num)
def iou_chg(self):
self.lbl_iou.setText(str(self.sliiou.value()) + '%')
def det_num(self):
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.IntInput)
dlg.setWindowTitle('Max detection')
dlg.setLabelText("maximum detections per image\n1~9999\nrecommend set under 100")
dlg.resize(500, 100)
dlg.setIntRange(1, 9999)
dlg.setIntValue(int(self.lbl_mxd.text()))
ok = dlg.exec_()
num = dlg.intValue()
if ok:
self.lbl_mxd.setNum(num)
self.lbl_mxd.adjustSize()
def project(self):
fname = QFileDialog.getExistingDirectory(self, 'Project')
self.lbl_prj.setText(str(fname))
self.lbl_prj.adjustSize()
def name(self):
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.TextInput)
dlg.setWindowTitle('Name')
dlg.setLabelText(
f"save results to Project/Name\n저장할 폴더명\n현재 경로 : {self.lbl_prj.text()}\\{self.lbl_name.text()}")
dlg.setTextValue(self.lbl_name.text())
dlg.resize(700, 100)
ok = dlg.exec_()
text = dlg.textValue()
if ok:
self.lbl_name.setText(str(text))
self.lbl_name.adjustSize()
def ltk(self):
dlg = QInputDialog(self)
dlg.setInputMode(QInputDialog.IntInput)
dlg.setWindowTitle('Line Thickness')
dlg.setLabelText("Bbox Line Thickness (Pixels)\n1~10")
dlg.resize(500, 100)
dlg.setIntRange(1, 10)
dlg.setIntValue(int(self.lbl_ltk.text()))
ok = dlg.exec_()
num = dlg.intValue()
if ok:
self.lbl_ltk.setNum(num)
self.lbl_ltk.adjustSize()
def signal1(self):
if not self.groupbox1.isChecked():
self.chk_exok.setCheckState(False)
def signal2(self):
if not self.groupbox2.isChecked():
self.chk_saveconf.setCheckState(False)
# def signal3(self): # password
# if self.groupbox3.isChecked():
# dlg = QInputDialog(self)
# dlg.setInputMode(QInputDialog.TextInput)
# dlg.setWindowTitle('Enter Password')
# dlg.setLabelText("Made by BigleaderTeam")
# dlg.resize(700, 100)
# ok = dlg.exec_()
# text = dlg.textValue()
# if ok:
# if not str(text) == 'rlacksdlf':
# self.groupbox3.setChecked(False)
# else:
# self.groupbox3.setChecked(False)
def run(self):
li = []
if self.chkcat1.isChecked():
li.append(1)
if self.chkcat2.isChecked():
li.append(2)
if self.chkcat3.isChecked():
li.append(3)
if self.chkcat4.isChecked():
li.append(4)
self.det_thread.run(
weights=self.lbl_weight.text(),
source=self.lbl_source.text(),
imgsz=[int(self.lbl_imgsz.text()), int(self.lbl_imgsz.text())],
conf_thres=float(int(self.lbl_conf.text()[:-1]) / 100),
iou_thres=float(int(self.lbl_iou.text()[:-1]) / 100),
max_det=int(self.lbl_mxd.text()),
device='',
view_img=self.chk_view.isChecked(),
save_txt=self.groupbox2.isChecked(),
save_conf=self.chk_saveconf.isChecked(),
save_crop=self.chk_savecrop.isChecked(),
nosave=not self.groupbox1.isChecked(),
classes=None, # if len(li) == 0 else li,
agnostic_nms=False,
augment=False,
visualize=False,
update=False,
project=self.lbl_prj.text(),
name=self.lbl_name.text(),
exist_ok=self.chk_exok.isChecked(),
line_thickness=int(self.lbl_ltk.text()),
hide_labels=self.chk_hlbl.isChecked(),
hide_conf=self.chk_hconf.isChecked(),
half=False,
dnn=False
)
@staticmethod
def show_image(img_src, label):
try:
ih, iw, _ = img_src.shape
w = label.geometry().width()
h = label.geometry().height()
if iw > ih:
scal = w / iw
nw = w
nh = int(scal * ih)
img_src_ = cv2.resize(img_src, (nw, nh))
else:
scal = h / ih
nw = int(scal * iw)
nh = h
img_src_ = cv2.resize(img_src, (nw, nh))
frame = cv2.cvtColor(img_src_, cv2.COLOR_BGR2RGB)
img = QImage(frame.data, frame.shape[1], frame.shape[0], frame.shape[2] * frame.shape[1],
QImage.Format_RGB888)
label.setPixmap(QPixmap.fromImage(img))
except Exception as e:
print(repr(e))
def show_statistic(self, statistic_dic):
try:
self.lbl_dict.clear()
statistic_dic = sorted(statistic_dic.items(), key=lambda x: x[1], reverse=True)
statistic_dic = [i for i in statistic_dic if i[1] > 0]
results = [str(i[0]) + ':' + str(i[1]) for i in statistic_dic]
self.lbl_dict.addItems(results)
except Exception as e:
print(repr(e))
# class GroupBox(QGroupBox): # reverse gbx
# def paintEvent(self, event):
# painter = QStylePainter(self)
# option = QStyleOptionGroupBox()
# self.initStyleOption(option)
# if self.isCheckable():
# option.state &= ~QStyle.State_Off & ~QStyle.State_On
# option.state |= (
# QStyle.State_Off
# if self.isChecked()
# else QStyle.State_On
# )
# painter.drawComplexControl(QStyle.CC_GroupBox, option)
if __name__ == '__main__':
app = QApplication(sys.argv)
screen = app.primaryScreen()
size = screen.size()
w = size.width()
h = size.height()
ex = MyApp()
ex.setGeometry(int(0.1 * w), int(0.1 * h), int(0.8 * w), int(0.8 * h))
ex.showMaximized()
sys.exit(app.exec_())
| [
"utils.general.check_img_size",
"utils.general.save_one_box",
"torch.from_numpy",
"cv2.destroyAllWindows",
"utils.datasets.LoadStreams",
"utils.torch_utils.load_classifier",
"tensorflow.keras.models.load_model",
"utils.general.colorstr",
"tensorflow.lite.Interpreter",
"tensorflow.Graph",
"pathlib.Path",
"utils.general.apply_classifier",
"torch.jit.load",
"tensorflow.compat.v1.import_graph_def",
"utils.general.strip_optimizer",
"utils.torch_utils.time_sync",
"cv2.dnn.readNetFromONNX",
"utils.general.scale_coords",
"cv2.VideoWriter_fourcc",
"cv2.waitKey",
"utils.general.check_suffix",
"utils.plots.colors",
"cv2.cvtColor",
"cv2.resize",
"tensorflow.nest.map_structure",
"cv2.imwrite",
"utils.general.check_imshow",
"models.experimental.attempt_load",
"utils.torch_utils.select_device",
"torch.load",
"onnxruntime.InferenceSession",
"utils.datasets.LoadImages",
"os.getcwd",
"torch.tensor",
"utils.general.non_max_suppression",
"tensorflow.constant",
"torch.no_grad",
"torch.zeros",
"utils.general.check_requirements"
] | [((944, 955), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (953, 955), False, 'import os\n'), ((4905, 4920), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4918, 4920), False, 'import torch\n'), ((964, 978), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (968, 978), False, 'from pathlib import Path\n'), ((7015, 7036), 'utils.torch_utils.select_device', 'select_device', (['device'], {}), '(device)\n', (7028, 7036), False, 'from utils.torch_utils import load_classifier, select_device, time_sync\n'), ((7328, 7353), 'utils.general.check_suffix', 'check_suffix', (['w', 'suffixes'], {}), '(w, suffixes)\n', (7340, 7353), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((10005, 10036), 'utils.general.check_img_size', 'check_img_size', (['imgsz'], {'s': 'stride'}), '(imgsz, s=stride)\n', (10019, 10036), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((20452, 20475), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (20473, 20475), False, 'import cv2\n'), ((10125, 10139), 'utils.general.check_imshow', 'check_imshow', ([], {}), '()\n', (10137, 10139), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((10253, 10312), 'utils.datasets.LoadStreams', 'LoadStreams', (['source'], {'img_size': 'imgsz', 'stride': 'stride', 'auto': 'pt'}), '(source, img_size=imgsz, stride=stride, auto=pt)\n', (10264, 10312), False, 'from utils.datasets import LoadStreams, LoadImages\n'), ((10396, 10454), 'utils.datasets.LoadImages', 'LoadImages', (['source'], {'img_size': 'imgsz', 'stride': 'stride', 'auto': 'pt'}), '(source, img_size=imgsz, stride=stride, auto=pt)\n', (10406, 10454), False, 'from utils.datasets import LoadStreams, LoadImages\n'), ((11323, 11334), 'utils.torch_utils.time_sync', 'time_sync', ([], {}), '()\n', (11332, 11334), False, 'from utils.torch_utils import load_classifier, select_device, time_sync\n'), ((11718, 11729), 'utils.torch_utils.time_sync', 'time_sync', ([], {}), '()\n', (11727, 11729), False, 'from utils.torch_utils import load_classifier, select_device, time_sync\n'), ((13513, 13524), 'utils.torch_utils.time_sync', 'time_sync', ([], {}), '()\n', (13522, 13524), False, 'from utils.torch_utils import load_classifier, select_device, time_sync\n'), ((13596, 13705), 'utils.general.non_max_suppression', 'non_max_suppression', (['pred', 'conf_thres', 'iou_thres', 'classes', 'agnostic_nms'], {'max_det': 'max_det', 'unknown': 'unknown'}), '(pred, conf_thres, iou_thres, classes, agnostic_nms,\n max_det=max_det, unknown=unknown)\n', (13615, 13705), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((20959, 20983), 'utils.general.strip_optimizer', 'strip_optimizer', (['weights'], {}), '(weights)\n', (20974, 20983), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((44335, 44376), 'cv2.cvtColor', 'cv2.cvtColor', (['img_src_', 'cv2.COLOR_BGR2RGB'], {}), '(img_src_, cv2.COLOR_BGR2RGB)\n', (44347, 44376), False, 'import cv2\n'), ((6811, 6824), 'pathlib.Path', 'Path', (['project'], {}), '(project)\n', (6815, 6824), False, 'from pathlib import Path\n'), ((7611, 7628), 'torch.jit.load', 'torch.jit.load', (['w'], {}), '(w)\n', (7625, 7628), False, 'import torch\n'), ((7656, 7698), 'models.experimental.attempt_load', 'attempt_load', (['weights'], {'map_location': 'device'}), '(weights, map_location=device)\n', (7668, 7698), False, 'from models.experimental import attempt_load\n'), ((8073, 8110), 'utils.torch_utils.load_classifier', 'load_classifier', ([], {'name': '"""resnet50"""', 'n': '(2)'}), "(name='resnet50', n=2)\n", (8088, 8110), False, 'from utils.torch_utils import load_classifier, select_device, time_sync\n'), ((8665, 8707), 'utils.general.check_requirements', 'check_requirements', (["('tensorflow>=2.4.1',)"], {}), "(('tensorflow>=2.4.1',))\n", (8683, 8707), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((13764, 13775), 'utils.torch_utils.time_sync', 'time_sync', ([], {}), '()\n', (13773, 13775), False, 'from utils.torch_utils import load_classifier, select_device, time_sync\n'), ((13883, 13924), 'utils.general.apply_classifier', 'apply_classifier', (['pred', 'modelc', 'img', 'im0s'], {}), '(pred, modelc, img, im0s)\n', (13899, 13924), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((14335, 14342), 'pathlib.Path', 'Path', (['p'], {}), '(p)\n', (14339, 14342), False, 'from pathlib import Path\n'), ((44111, 44140), 'cv2.resize', 'cv2.resize', (['img_src', '(nw, nh)'], {}), '(img_src, (nw, nh))\n', (44121, 44140), False, 'import cv2\n'), ((44282, 44311), 'cv2.resize', 'cv2.resize', (['img_src', '(nw, nh)'], {}), '(img_src, (nw, nh))\n', (44292, 44311), False, 'import cv2\n'), ((8298, 8343), 'utils.general.check_requirements', 'check_requirements', (["('opencv-python>=4.5.4',)"], {}), "(('opencv-python>=4.5.4',))\n", (8316, 8343), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((8367, 8393), 'cv2.dnn.readNetFromONNX', 'cv2.dnn.readNetFromONNX', (['w'], {}), '(w)\n', (8390, 8393), False, 'import cv2\n'), ((8430, 8518), 'utils.general.check_requirements', 'check_requirements', (["('onnx', 'onnxruntime-gpu' if torch.has_cuda else 'onnxruntime')"], {}), "(('onnx', 'onnxruntime-gpu' if torch.has_cuda else\n 'onnxruntime'))\n", (8448, 8518), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((8578, 8615), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['w', 'None'], {}), '(w, None)\n', (8606, 8615), False, 'import onnxruntime\n'), ((13476, 13494), 'torch.tensor', 'torch.tensor', (['pred'], {}), '(pred)\n', (13488, 13494), False, 'import torch\n'), ((14648, 14671), 'torch.tensor', 'torch.tensor', (['im0.shape'], {}), '(im0.shape)\n', (14660, 14671), False, 'import torch\n'), ((17073, 17087), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (17084, 17087), False, 'import cv2\n'), ((7256, 7263), 'pathlib.Path', 'Path', (['w'], {}), '(w)\n', (7260, 7263), False, 'from pathlib import Path\n'), ((9536, 9565), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['w'], {}), '(w)\n', (9562, 9565), True, 'import tensorflow as tf\n'), ((11444, 11465), 'torch.from_numpy', 'torch.from_numpy', (['img'], {}), '(img)\n', (11460, 11465), False, 'import torch\n'), ((17267, 17294), 'cv2.imwrite', 'cv2.imwrite', (['save_path', 'im0'], {}), '(save_path, im0)\n', (17278, 17294), False, 'import cv2\n'), ((20893, 20919), 'utils.general.colorstr', 'colorstr', (['"""bold"""', 'save_dir'], {}), "('bold', save_dir)\n", (20901, 20919), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((9108, 9163), 'tensorflow.nest.map_structure', 'tf.nest.map_structure', (['x.graph.as_graph_element', 'inputs'], {}), '(x.graph.as_graph_element, inputs)\n', (9129, 9163), True, 'import tensorflow as tf\n'), ((9201, 9257), 'tensorflow.nest.map_structure', 'tf.nest.map_structure', (['x.graph.as_graph_element', 'outputs'], {}), '(x.graph.as_graph_element, outputs)\n', (9222, 9257), True, 'import tensorflow as tf\n'), ((9290, 9300), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (9298, 9300), True, 'import tensorflow as tf\n'), ((9623, 9656), 'tensorflow.lite.Interpreter', 'tf.lite.Interpreter', ([], {'model_path': 'w'}), '(model_path=w)\n', (9642, 9656), True, 'import tensorflow as tf\n'), ((15038, 15088), 'utils.general.scale_coords', 'scale_coords', (['img.shape[2:]', 'det[:, :4]', 'im0.shape'], {}), '(img.shape[2:], det[:, :4], im0.shape)\n', (15050, 15088), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((8954, 8996), 'tensorflow.compat.v1.import_graph_def', 'tf.compat.v1.import_graph_def', (['gd'], {'name': '""""""'}), "(gd, name='')\n", (8983, 8996), True, 'import tensorflow as tf\n'), ((10633, 10658), 'torch.zeros', 'torch.zeros', (['(1)', '(3)', '*imgsz'], {}), '(1, 3, *imgsz)\n', (10644, 10658), False, 'import torch\n'), ((11862, 11872), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (11866, 11872), False, 'from pathlib import Path\n'), ((16362, 16453), 'utils.general.save_one_box', 'save_one_box', (['xyxy', 'imc'], {'file': "(save_dir / 'crops' / names[c] / f'{p.stem}.jpg')", 'BGR': '(True)'}), "(xyxy, imc, file=save_dir / 'crops' / names[c] /\n f'{p.stem}.jpg', BGR=True)\n", (16374, 16453), False, 'from utils.general import apply_classifier, check_img_size, check_imshow, check_requirements, check_suffix, colorstr, increment_path, non_max_suppression, print_args, save_one_box, scale_coords, strip_optimizer, xyxy2xywh, LOGGER\n'), ((18157, 18188), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'mp4v'"], {}), "(*'mp4v')\n", (18179, 18188), False, 'import cv2\n'), ((16269, 16284), 'utils.plots.colors', 'colors', (['c', '(True)'], {}), '(c, True)\n', (16275, 16284), False, 'from utils.plots import Annotator, colors\n'), ((8165, 8211), 'torch.load', 'torch.load', (['"""resnet50.pt"""'], {'map_location': 'device'}), "('resnet50.pt', map_location=device)\n", (8175, 8211), False, 'import torch\n'), ((12518, 12534), 'tensorflow.constant', 'tf.constant', (['imn'], {}), '(imn)\n', (12529, 12534), True, 'import tensorflow as tf\n'), ((15552, 15570), 'torch.tensor', 'torch.tensor', (['xyxy'], {}), '(xyxy)\n', (15564, 15570), False, 'import torch\n')] |
"""
Abero
(c) 2020 <NAME>.
File analysis tool
"""
import os
import re
import sys
import zipfile
from statistics import mean
from itertools import groupby
from arkivist import Arkivist
def analyze(directory, extension="txt", threshold=80, template=None, skipnames=0, group=0, unzip=0, convert=0, reset=0):
if not check_path(directory):
print(f"\nAberoError: The directory was not found: {directory}")
sys.exit(0)
if not check_path(f"{directory}/abero"):
os.makedirs(f"{directory}/abero")
if not isinstance(directory, str):
print("\nAberoError: 'directory' parameter must be a string.")
return
if not isinstance(extension, str):
print("\nAberoWarning: 'extension' parameter must be a string.")
text_based = -1
extensions_text = ["html", "xml", "css", "svg", "json", "c", "cpp", "h", "cs", "js", "py",
"java", "rb", "pl", "php", "sh", "txt", "tex", "markdown", "asciidoc",
"rtf", "ps", "ini", "cfg", "rc", "reg", "csv", "tsv"]
extensions_binary = ["jpg", "png", "gif", "bmp", "tiff", "psd", "mp4", "mkv", "avi", "mov",
"mpg", "vob", "mp3", "aac", "wav", "flac", "ogg", "mka", "wma", "pdf", "doc",
"xls", "ppt", "docx", "odt", "zip", "rar", "7z", "tar", "iso", "mdb", "accde",
"frm", "sqlite", "exe", "dll", "so", "class"]
if extension in extensions_text:
text_based = 1
elif extension in extensions_binary:
text_based = 0
print(f"AberoWarning: The `{extension}` is a binary file, a separate analyzer will be used.")
else:
print(f"AberoWarning: The `{extension}` is currently unsupported for analysis," \
"errors might be encountered during execution.")
threshold = validate(threshold, 1, 100, 80)
skipnames = validate(skipnames, 0, 1, 0)
group = validate(group, 0, 1, 0)
unzip = validate(unzip, 0, 1, 0)
convert = validate(convert, 0, 1, 0)
reset = validate(reset, 0, 1, 0)
template_filename = ""
if isinstance(template, str):
if not check_path(template):
print("\nAberoWarning: Template file was not found.")
template = None
if unzip == 1:
print("\nUnzipping files:")
for filename in get_filenames(directory, "zip"):
print(f" - {filename}")
extract(f"{directory}/{filename}", f"{directory}")
if convert == 1:
print("\nConverting notebooks:")
for filename in get_filenames(directory, "ipynb"):
print(f" - {filename}")
ipynb2py(f"{directory}/{filename}")
print("\nAnalyzing files...")
prev = ""
if template is not None:
template_filename = template.split("\\")[-1:][0]
analysis = Arkivist(f"{directory}/abero/analysis.json")
if reset == 1:
analysis.clear()
filenames = get_filenames(f"{directory}", extension)
for index, filename in enumerate(filenames):
if template_filename != filename:
duplicates = {}
filepath = f"{directory}/{filename}"
for compare in filenames:
uid = compare
if template_filename == compare:
uid = "template"
if filename != compare:
skip = False
if skipnames == 1:
cid = None
common = common_string(filename, compare)
if common in compare:
if len(common) >= 10:
skip = True
if group == 1:
skip = True
rcommon = common_string(filename, compare, reverse=True)
if rcommon != "":
if rcommon in compare:
skip = False
if not skip:
if text_based == 1:
result = similarity(filepath, f"{directory}/{compare}")
else:
result = bin_diff(filepath, f"{directory}/{compare}")
duplicates.update({uid: result})
metadata = analysis.get(filename, {})
metadata.update({"text": text_based})
metadata.update({"duplicates": duplicates})
analysis.set(filename, metadata)
print("\nGenerating report...")
count = 0
analysis.reload()
for filename, metadata in analysis.show().items():
count += 1
print(f"\n File #{count}: {filename}")
originality = 0
duplicates = metadata.get("duplicates", {})
control = duplicates.get("template", {})
if metadata.get("text", -1) == 1:
duplication = [0]
control_avg = average(list(control.values()), threshold)
for uid, result in duplicates.items():
if uid != "template":
avg = average(list(result.values()), threshold)
avg = avg - control_avg
if avg > 0:
duplication.append(avg)
print(f" - {avg:.2f}% {uid}")
originality = (100 - max(duplication))
else:
duplication = [0]
control_matches = duplicates.get("template", {}).get("matches", [])
for uid, result in duplicates.items():
if uid != "template":
duplicated = []
matches = result.get("matches", [])
duplicated = [x for x in matches if x not in control_matches]
file_length = result.get("size", 1)
avg = (len(duplicated) / file_length) * 100
if avg > 0:
duplication.append(avg)
print(f" - {avg:.2f}% {compare}")
originality = 100 - max(duplication)
print(f" * Originality Rating: {originality:.2f}%")
statistics = metadata.get("statistics", {})
statistics.update({"originality": originality})
metadata.update({"statistics": statistics})
analysis.set(filename, metadata)
def average(results, threshold):
if len(results) == 0:
return 0
values = []
for value in results:
rate = value.get("1", 0)
if rate < threshold:
rate = 0
values.append(rate)
return round(mean(values), 2)
def similarity(original, compare):
""" Compare original file to other files """
duplicates = {}
data1, data2 = "", ""
with open(original, "r", encoding="utf-8") as file1:
try:
data1 = file1.read()
except:
data1 = ""
with open(compare, "r", encoding="utf-8") as file2:
try:
data2 = file2.read()
except:
data2 = ""
for line1 in data1.split("\n"):
rate = 0
words1 = pad(line1).split(" ")
words1 = [i for i in words1 if i.strip() != ""]
# words1.sort()
if len(set(words1)) > 0:
line = ""
for line2 in data2.split("\n"):
words2 = pad(line2).split(" ")
words2 = [i for i in words2 if i.strip() != ""]
if len(set(words2)) > 0:
if line1.strip() == line2.strip():
rate = 100
line = line2
else:
diff1 = difference(words2, words1)
diff2 = difference(words1, words2)
words3 = []
words3.extend(words1)
words3.extend(words2)
remain = len(diff1) + len(diff2)
if remain == 0:
rate = 100
line = line2
else:
temp = ((len(words3) - remain) / len(words3)) * 100
if temp > rate:
rate = temp
line = line2
duplicates.update({line1: {"0": line, "1": rate}})
return duplicates
def bin_diff(original, compare):
# https://stackoverflow.com/a/15798718/4943299
matches = []
with open(original, "rb") as file1:
content1 = file1.read()
with open(compare, "rb") as file2:
content2 = file2.read()
for k, g in groupby(range(min(len(content1), len(content2))), key=lambda i: content1[i] == content2[i]):
if k:
pos = next(g)
length = len(list(g)) + 1
matches.append((pos, length))
return dict({"size": len(content1), "matches": matches})
def pad(string):
""" Decongest statements """
padded = string.replace("\r", "").replace("\t", " ")
symbols = ["#", "%", "*", ")", "+", "-", "=",
"{", "}", "]", "\"", "'", "<", ">" ]
for item in symbols:
padded = padded.replace(item, f" {item} ")
return padded.replace("(", "( ")
def difference(diff, words):
paired = []
diff = [i for i in diff]
pairs = {"(": ")", "{": "}", "[": "]", "\"": "\"", "'": "'" }
for item in words:
if item in diff:
if item in paired:
paired.remove(item)
diff.remove(item)
else:
if len(item) > 1 and item in diff:
diff.remove(item)
for pair in pairs.keys():
if pair in item and not ("\\\"" in item) and not ("\\\'" in item):
paired.append(pairs.get(pair, ""))
if item in diff:
diff.remove(item)
else:
if item in diff:
diff.remove(item)
return diff
def ipynb2py(filepath):
script = []
ipynb = Arkivist(filepath).show()
for cell in ipynb.get("cells", []):
cell_type = cell.get("cell_type", "")
outputs = cell.get("outputs", [])
source = cell.get("source", [])
if cell_type == "code" and len(outputs) > 0:
for line in source:
script.append(line)
else:
script.append(f"# {line}")
new_filepath = filepath.replace(".ipynb", ".py")
with open(new_filepath, "w+", encoding="utf-8") as file:
file.wrtie("\n".join(script))
def validate(value, minimum, maximum, fallback):
if not isinstance(value, int):
print("KymeraWarning: Parameter must be an integer.")
value = int(fallback)
if not (minimum <= value <= maximum):
print(f"AberoWarning: Parameter must be an integer between {minimum}-{maximum}.")
value = int(fallback)
return value
def extract(path, destination):
try:
with zipfile.ZipFile(path, "r") as zip:
zip.extractall(destination)
except:
print(f"\nAberoWarning: Error in processing ZIP file: {path}")
pass
def common_string(original, compare, reverse=False):
if reverse:
extension = list(original.split("."))[-1]
original = "".join(list(reversed(original.replace(f".{extension}", "").strip())))
compare = "".join(list(reversed(compare.replace(f".{extension}", "").strip())))
common = []
limit = min((len(original), len(compare)))
for i in range(0, limit):
if original[i] != compare[i]:
break
common.append(original[i])
if reverse:
return "".join(list(reversed(common)))
return "".join(common)
def defaults(value, minimum, maximum, fallback):
if value is not None:
if not (minimum <= value <= maximum):
return fallback
return value
return fallback
# file/folder io
def get_folders(source):
return [f.name for f in os.scandir(source) if f.is_dir()]
def check_path(path):
if path.strip() == "":
return False
return os.path.exists(path)
def get_filenames(path, extension):
filenames = []
for filepath in os.listdir(path):
if filepath.split(".")[-1].lower() == extension:
filenames.append(filepath)
return filenames | [
"statistics.mean",
"os.path.exists",
"os.listdir",
"os.makedirs",
"zipfile.ZipFile",
"os.scandir",
"arkivist.Arkivist",
"sys.exit"
] | [((2861, 2905), 'arkivist.Arkivist', 'Arkivist', (['f"""{directory}/abero/analysis.json"""'], {}), "(f'{directory}/abero/analysis.json')\n", (2869, 2905), False, 'from arkivist import Arkivist\n'), ((12148, 12168), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (12162, 12168), False, 'import os\n'), ((12245, 12261), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (12255, 12261), False, 'import os\n'), ((437, 448), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (445, 448), False, 'import sys\n'), ((507, 540), 'os.makedirs', 'os.makedirs', (['f"""{directory}/abero"""'], {}), "(f'{directory}/abero')\n", (518, 540), False, 'import os\n'), ((6579, 6591), 'statistics.mean', 'mean', (['values'], {}), '(values)\n', (6583, 6591), False, 'from statistics import mean\n'), ((10077, 10095), 'arkivist.Arkivist', 'Arkivist', (['filepath'], {}), '(filepath)\n', (10085, 10095), False, 'from arkivist import Arkivist\n'), ((11026, 11052), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path', '"""r"""'], {}), "(path, 'r')\n", (11041, 11052), False, 'import zipfile\n'), ((12032, 12050), 'os.scandir', 'os.scandir', (['source'], {}), '(source)\n', (12042, 12050), False, 'import os\n')] |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for TPU Embeddings mid level API on TPU."""
from absl.testing import parameterized
from tensorflow.python.compat import v2_compat
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.platform import test
from tensorflow.python.tpu import tpu_embedding_v2
from tensorflow.python.tpu import tpu_embedding_v2_utils
from tensorflow.python.tpu.tests import tpu_embedding_base_test
class TPUEmbeddingTest(tpu_embedding_base_test.TPUEmbeddingBaseTest):
def test_tables_with_same_name(self):
with self.assertRaisesRegex(
ValueError, 'Multiple tables with name table found.'):
with self._get_strategy().scope():
tpu_embedding_v2.TPUEmbedding(
(tpu_embedding_v2_utils.FeatureConfig(
table=tpu_embedding_v2_utils.TableConfig(
name='table',
vocabulary_size=4,
dim=2,
initializer=self.initializer,),
name='watched'),
tpu_embedding_v2_utils.FeatureConfig(
table=tpu_embedding_v2_utils.TableConfig(
name='table',
vocabulary_size=4,
dim=2,
initializer=self.initializer),
name='favorited')),
tpu_embedding_v2_utils.SGD(learning_rate=0.1))
def test_pass_non_tensor_to_apply_gradients(self):
strategy, mid_level_api, _ = self._create_strategy_and_mid_level('sgd')
# We aren't going to actually run anything, so the batch_size here does not
# matter.
mid_level_api.build(64)
# Test pass non tensor to apply_gradients.
@def_function.function
def test_apply_1():
mid_level_api.apply_gradients((1, 2, 3))
with self.assertRaisesRegex(ValueError, 'found non-tensor type'):
strategy.run(test_apply_1)
# Test pass different structure to apply_gradients.
@def_function.function
def test_apply_2():
# This should be a tuple as feature_config is a tuple of 3 configs.
mid_level_api.apply_gradients([1, 2, 3])
with self.assertRaisesRegex(
TypeError, 'The two structures don\'t have the same nested structure.'):
strategy.run(test_apply_2)
def test_enqueue_weight_for_dense_tensor(self):
strategy, mid_level_api, _ = self._create_strategy_and_mid_level('sgd')
dataset = self._create_dense_dataset(strategy, include_weights=True)
dense_iter = iter(
strategy.experimental_distribute_dataset(
dataset,
options=distribute_lib.InputOptions(
experimental_fetch_to_device=False)))
@def_function.function
def test_fn():
def step():
return mid_level_api.dequeue()
features, weights = next(dense_iter)
mid_level_api.enqueue(features, weights=weights, training=False)
return strategy.run(step)
with self.assertRaisesRegex(ValueError, 'Weight specified for dense input'):
test_fn()
def test_enqueue_wrong_weight_type_for_sparse_and_ragged_tensor(self):
self.skip_if_oss()
strategy, mid_level_api, _ = self._create_strategy_and_mid_level('sgd')
sparse = self._create_sparse_dataset(strategy, include_weights=True)
ragged = self._create_ragged_dataset(strategy, include_weights=True)
sparse_iter = iter(
strategy.experimental_distribute_dataset(
sparse,
options=distribute_lib.InputOptions(
experimental_fetch_to_device=False)))
ragged_iter = iter(
strategy.experimental_distribute_dataset(
ragged,
options=distribute_lib.InputOptions(
experimental_fetch_to_device=False)))
@def_function.function
def test_sparse_fn():
def step():
return mid_level_api.dequeue()
features, _ = next(sparse_iter)
_, weights = next(ragged_iter)
mid_level_api.enqueue(features, weights=weights, training=False)
return strategy.run(step)
with self.assertRaisesRegex(
ValueError, 'which does not match type input which is SparseTensor.'):
test_sparse_fn()
@def_function.function
def test_ragged_fn():
def step():
return mid_level_api.dequeue()
_, weights = next(sparse_iter)
features, _ = next(ragged_iter)
mid_level_api.enqueue(features, weights=weights, training=False)
return strategy.run(step)
with self.assertRaisesRegex(
ValueError, 'which does not match type input which is RaggedTensor.'):
test_ragged_fn()
def test_enqueue_incorrect_structure_for_features_and_weights(self):
self.skip_if_oss()
strategy, mid_level_api, _ = self._create_strategy_and_mid_level('sgd')
sparse = self._create_sparse_dataset(strategy, include_weights=True)
sparse_iter = iter(
strategy.experimental_distribute_dataset(
sparse,
options=distribute_lib.InputOptions(
experimental_fetch_to_device=False)))
@def_function.function
def test_features_fn():
def step():
return mid_level_api.dequeue()
features = next(sparse_iter)
features = (features[0],)
mid_level_api.enqueue(features, training=False)
return strategy.run(step)
# The error here is raised from nest.assert_same_structure
with self.assertRaises(ValueError):
test_features_fn()
@def_function.function
def test_weights_fn():
def step():
return mid_level_api.dequeue()
features, weights = next(sparse_iter)
weights = (weights[0],)
mid_level_api.enqueue(features, weights=weights, training=False)
return strategy.run(step)
# The error here is raised from nest.assert_same_structure
with self.assertRaises(ValueError):
test_weights_fn()
def test_enqueue_cpu_tensor(self):
strategy, mid_level_api, _ = self._create_strategy_and_mid_level('sgd')
dataset = self._create_dense_dataset(strategy)
dense_iter = iter(strategy.experimental_distribute_dataset(dataset))
@def_function.function
def test_fn():
def get_activations():
return mid_level_api.dequeue()
features = next(dense_iter)
mid_level_api.enqueue(features, training=False)
activations = strategy.run(get_activations)
return activations
with self.assertRaisesRegex(ValueError, 'which is on a TPU input device'):
test_fn()
@parameterized.parameters([True, False])
def test_enqueue_cpu_tensor_with_outside_compilation(self, use_mlir):
if use_mlir:
config.enable_mlir_bridge()
strategy, mid_level_api, _ = self._create_strategy_and_mid_level('sgd')
dataset = self._create_dense_dataset(strategy)
dense_iter = iter(strategy.experimental_distribute_dataset(dataset))
@def_function.function
def test_fn():
def get_activations(features):
mid_level_api.enqueue(features, training=False)
return mid_level_api.dequeue()
activations = strategy.run(get_activations, args=(next(dense_iter),))
return activations
with self.assertRaisesRegex(ValueError, 'which is on a TPU input device'):
test_fn()
if __name__ == '__main__':
v2_compat.enable_v2_behavior()
test.main()
| [
"tensorflow.python.framework.config.enable_mlir_bridge",
"tensorflow.python.compat.v2_compat.enable_v2_behavior",
"tensorflow.python.tpu.tpu_embedding_v2_utils.SGD",
"tensorflow.python.tpu.tpu_embedding_v2_utils.TableConfig",
"tensorflow.python.distribute.distribute_lib.InputOptions",
"absl.testing.parameterized.parameters",
"tensorflow.python.platform.test.main"
] | [((7207, 7246), 'absl.testing.parameterized.parameters', 'parameterized.parameters', (['[True, False]'], {}), '([True, False])\n', (7231, 7246), False, 'from absl.testing import parameterized\n'), ((7980, 8010), 'tensorflow.python.compat.v2_compat.enable_v2_behavior', 'v2_compat.enable_v2_behavior', ([], {}), '()\n', (8008, 8010), False, 'from tensorflow.python.compat import v2_compat\n'), ((8013, 8024), 'tensorflow.python.platform.test.main', 'test.main', ([], {}), '()\n', (8022, 8024), False, 'from tensorflow.python.platform import test\n'), ((7343, 7370), 'tensorflow.python.framework.config.enable_mlir_bridge', 'config.enable_mlir_bridge', ([], {}), '()\n', (7368, 7370), False, 'from tensorflow.python.framework import config\n'), ((2094, 2139), 'tensorflow.python.tpu.tpu_embedding_v2_utils.SGD', 'tpu_embedding_v2_utils.SGD', ([], {'learning_rate': '(0.1)'}), '(learning_rate=0.1)\n', (2120, 2139), False, 'from tensorflow.python.tpu import tpu_embedding_v2_utils\n'), ((3335, 3398), 'tensorflow.python.distribute.distribute_lib.InputOptions', 'distribute_lib.InputOptions', ([], {'experimental_fetch_to_device': '(False)'}), '(experimental_fetch_to_device=False)\n', (3362, 3398), False, 'from tensorflow.python.distribute import distribute_lib\n'), ((4201, 4264), 'tensorflow.python.distribute.distribute_lib.InputOptions', 'distribute_lib.InputOptions', ([], {'experimental_fetch_to_device': '(False)'}), '(experimental_fetch_to_device=False)\n', (4228, 4264), False, 'from tensorflow.python.distribute import distribute_lib\n'), ((4398, 4461), 'tensorflow.python.distribute.distribute_lib.InputOptions', 'distribute_lib.InputOptions', ([], {'experimental_fetch_to_device': '(False)'}), '(experimental_fetch_to_device=False)\n', (4425, 4461), False, 'from tensorflow.python.distribute import distribute_lib\n'), ((5692, 5755), 'tensorflow.python.distribute.distribute_lib.InputOptions', 'distribute_lib.InputOptions', ([], {'experimental_fetch_to_device': '(False)'}), '(experimental_fetch_to_device=False)\n', (5719, 5755), False, 'from tensorflow.python.distribute import distribute_lib\n'), ((1559, 1667), 'tensorflow.python.tpu.tpu_embedding_v2_utils.TableConfig', 'tpu_embedding_v2_utils.TableConfig', ([], {'name': '"""table"""', 'vocabulary_size': '(4)', 'dim': '(2)', 'initializer': 'self.initializer'}), "(name='table', vocabulary_size=4, dim=2,\n initializer=self.initializer)\n", (1593, 1667), False, 'from tensorflow.python.tpu import tpu_embedding_v2_utils\n'), ((1854, 1962), 'tensorflow.python.tpu.tpu_embedding_v2_utils.TableConfig', 'tpu_embedding_v2_utils.TableConfig', ([], {'name': '"""table"""', 'vocabulary_size': '(4)', 'dim': '(2)', 'initializer': 'self.initializer'}), "(name='table', vocabulary_size=4, dim=2,\n initializer=self.initializer)\n", (1888, 1962), False, 'from tensorflow.python.tpu import tpu_embedding_v2_utils\n')] |
import requests
import json
import re
import shutil
import os
import base64
from util import *
def __handle__error(j):
if 'error' in j:
if 'error_summery' in j:
raise Exception(j['error_summery'])
else:
raise Exception(j['error'])
def __get_page(token,cursor):
url = "https://api.dropboxapi.com/2/files/list_folder/continue"
headers = {
"Authorization": "Bearer {}".format(token),
"Content-Type": "application/json"
}
data = {
"cursor": cursor
}
r = requests.post(url, headers=headers, data=json.dumps(data))
if r.ok:
j = r.json()
__handle__error(j)
yield from __yield__files(j ,token)
def __yield__files(j,token):
for e in j['entries']:
if e['.tag'] == 'file':
if e['is_downloadable']:
yield {
'name': e['name'],
'id': e['id'],
'id_stripped': re.sub('id: ','',e['id']),
'path': e['path_lower'],
'content_hash': e['content_hash']
}
else:
print("Warning file {} isn't downloadable".format(e['name']))
if j['has_more']:
yield from __get_page(token,j['cursor'])
def getFileMeta(token):
url = "https://api.dropboxapi.com/2/files/list_folder"
headers = {
"Authorization": "Bearer {}".format(token),
"Content-Type": "application/json"
}
data = {
"path": "/Photography/Published",
"recursive": False,
"include_media_info": False,
"include_deleted": False,
"limit": 200
}
r = requests.post(url, headers=headers, data=json.dumps(data))
if r.ok:
j = r.json()
__handle__error(j)
yield from __yield__files(j ,token)
else:
print(r)
print(r.text)
print(r.json())
raise Exception('Failed to request file information')
def downloadFile(dropbox,token):
url = "https://content.dropboxapi.com/2/files/download"
headers = {
"Authorization": "Bearer {}".format(token),
"Dropbox-API-Arg": "{\"path\":\""+dropbox['id']+"\"}"
}
r = requests.post(url, headers=headers,stream=True)
if r.ok:
r.raw.decode_content=True
match = re.match("([^/\\\\]+)\.([jJ][pP][gG])",dropbox['name'])
if match:
filename = "img/raw/{}.jpg".format(match[1])
if match[2] != "jpg":
print("Renamed {} to {}.jpg".format(dropbox['name'],match[1]))
with open(filename,'wb') as f:
shutil.copyfileobj(r.raw,f)
print("Downloaded {} ({})".format(dropbox['name'],sizeof_fmt(os.path.getsize(filename))))
else:
raise Exception("File extension of ({}) ins't valid, expected .jpg".format(dropbox['name']))
else:
print("Failed to download {} from dropbox".format(dropbox['name']))
print(r)
print(r.text)
print(r.json())
raise Exception('Failed to request file information')
def downloadSitedata(dropbox,token):
url = "https://content.dropboxapi.com/2/files/download"
headers = {
"Authorization": "Bearer {}".format(token),
"Dropbox-API-Arg": "{\"path\":\""+dropbox['id']+"\"}"
}
r = requests.post(url, headers=headers,stream=True)
if r.ok:
r.raw.decode_content=True
match = re.match("sitedata\.json",dropbox['name'])
if match:
filename = "api/sitedata.json"
with open(filename,'wb') as f:
shutil.copyfileobj(r.raw,f)
print("Downloaded {} ({})".format(dropbox['name'],sizeof_fmt(os.path.getsize(filename))))
else:
raise Exception("Expected sitedata.json not {}".format(dropbox['name']))
else:
print("Failed to download {} from dropbox".format(dropbox['name']))
print(r)
print(r.text)
print(r.json())
raise Exception('Failed to request file information') | [
"os.path.getsize",
"requests.post",
"shutil.copyfileobj",
"json.dumps",
"re.match",
"re.sub"
] | [((2271, 2319), 'requests.post', 'requests.post', (['url'], {'headers': 'headers', 'stream': '(True)'}), '(url, headers=headers, stream=True)\n', (2284, 2319), False, 'import requests\n'), ((3388, 3436), 'requests.post', 'requests.post', (['url'], {'headers': 'headers', 'stream': '(True)'}), '(url, headers=headers, stream=True)\n', (3401, 3436), False, 'import requests\n'), ((2382, 2439), 're.match', 're.match', (['"""([^/\\\\\\\\]+)\\\\.([jJ][pP][gG])"""', "dropbox['name']"], {}), "('([^/\\\\\\\\]+)\\\\.([jJ][pP][gG])', dropbox['name'])\n", (2390, 2439), False, 'import re\n'), ((3499, 3543), 're.match', 're.match', (['"""sitedata\\\\.json"""', "dropbox['name']"], {}), "('sitedata\\\\.json', dropbox['name'])\n", (3507, 3543), False, 'import re\n'), ((585, 601), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (595, 601), False, 'import json\n'), ((1769, 1785), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1779, 1785), False, 'import json\n'), ((2686, 2714), 'shutil.copyfileobj', 'shutil.copyfileobj', (['r.raw', 'f'], {}), '(r.raw, f)\n', (2704, 2714), False, 'import shutil\n'), ((3663, 3691), 'shutil.copyfileobj', 'shutil.copyfileobj', (['r.raw', 'f'], {}), '(r.raw, f)\n', (3681, 3691), False, 'import shutil\n'), ((996, 1023), 're.sub', 're.sub', (['"""id: """', '""""""', "e['id']"], {}), "('id: ', '', e['id'])\n", (1002, 1023), False, 'import re\n'), ((2787, 2812), 'os.path.getsize', 'os.path.getsize', (['filename'], {}), '(filename)\n', (2802, 2812), False, 'import os\n'), ((3764, 3789), 'os.path.getsize', 'os.path.getsize', (['filename'], {}), '(filename)\n', (3779, 3789), False, 'import os\n')] |
import tensorflow as tf
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch
def smooth_l1(y_true, y_pred):
HUBER_DELTA = 1.0
x = abs(y_true - y_pred)
x = switch(x < HUBER_DELTA, 0.5*x**2, HUBER_DELTA*(x - 0.5*HUBER_DELTA))
return x
def balanced_l1(y_true, y_pred):
'''
https://arxiv.org/pdf/1904.02701.pdf
'''
alpha = 0.5
gamma = 1.5
b = 19.085
C = 0
x = abs(y_true - y_pred)
x = switch(x < 1.0, (alpha*x + alpha/b)*tf.math.log(b*x + 1) - alpha*x, gamma*x + C)
return x
def rpn_loss(y_true, y_pred):
'''
Arguments
y_true: (batch_size, h, w, 6k)
y_pred: (batch_size, h, w, 6k)
Return
loss
'''
K = y_pred.shape[3]//6
true_clz_4dtensor = y_true[:, :, :, :2*K] # (batch_size, h, w, 2k)
true_bbe_4dtensor = y_true[:, :, :, 2*K:] # (batch_size, h, w, 4k)
pred_clz_4dtensor = y_pred[:, :, :, :2*K] # (batch_size, h, w, 2k)
pred_bbe_4dtensor = y_pred[:, :, :, 2*K:] # (batch_size, h, w, 4k)
true_clz_2dtensor = tf.reshape(tensor=true_clz_4dtensor, shape=[-1, 2]) # (h*w*k, 2)
true_bbe_2dtensor = tf.reshape(tensor=true_bbe_4dtensor, shape=[-1, 4]) # (h*w*k, 4)
pred_clz_2dtensor = tf.reshape(tensor=pred_clz_4dtensor, shape=[-1, 2]) # (h*w*k, 2)
pred_bbe_2dtensor = tf.reshape(tensor=pred_bbe_4dtensor, shape=[-1, 4]) # (h*w*k, 4)
# add small value when output is zeros, avoid log(0) = -inf
pred_clz_2dtensor = tf.where(
condition=tf.math.equal(x=pred_clz_2dtensor, y=0.0),
x=0.00001,
y=pred_clz_2dtensor)
LAMBDA = 1.0
L_clz = categorical_crossentropy(target=true_clz_2dtensor, output=pred_clz_2dtensor) # (h*w*k)
L_bbe = balanced_l1(true_bbe_2dtensor, pred_bbe_2dtensor) # (h*w*k, 4)
L_bbe = sum(x=L_bbe, axis=-1) # (h*w*k)
L = mean(L_clz) + LAMBDA*mean(true_clz_2dtensor[:, 0]*L_bbe)
return L
def det_loss(y_true, y_pred):
'''
Arguments
y_true: (batch_size, num_of_rois, num_of_classes+4)
y_pred: (batch_size, num_of_rois, num_of_classes+4)
Return
loss
'''
num_of_classes = y_pred.shape[2]-4
true_clz_3dtensor = y_true[:, :, :num_of_classes] # (batch_size, num_of_rois, num_of_classes)
true_bbe_3dtensor = y_true[:, :, num_of_classes:] # (batch_size, num_of_rois, 4)
pred_clz_3dtensor = y_pred[:, :, :num_of_classes] # (batch_size, num_of_rois, num_of_classes)
pred_bbe_3dtensor = y_pred[:, :, num_of_classes:] # (batch_size, num_of_rois, 4)
true_clz_2dtensor = tf.reshape(tensor=true_clz_3dtensor, shape=[-1, num_of_classes]) # (num_of_rois, num_of_classes)
true_bbe_2dtensor = tf.reshape(tensor=true_bbe_3dtensor, shape=[-1, 4]) # (num_of_rois, 4)
pred_clz_2dtensor = tf.reshape(tensor=pred_clz_3dtensor, shape=[-1, num_of_classes]) # (num_of_rois, num_of_classes)
pred_bbe_2dtensor = tf.reshape(tensor=pred_bbe_3dtensor, shape=[-1, 4]) # (num_of_rois, 4)
# add small value when output is zeros, avoid log(0) = -inf
pred_clz_2dtensor = tf.where(
condition=tf.math.equal(x=pred_clz_2dtensor, y=0.0),
x=0.0001,
y=pred_clz_2dtensor)
valid_3dtensor = tf.math.reduce_max(input_tensor=true_clz_3dtensor, axis=2, keepdims=True) # (batch_size, num_of_rois, 1)
valid_1dtensor = tf.reshape(tensor=valid_3dtensor, shape=[-1]) # (num_of_rois)
LAMBDA = 1.0
L_clz = categorical_crossentropy(target=true_clz_2dtensor, output=pred_clz_2dtensor) # (num_of_rois)
L_bbe = balanced_l1(true_bbe_2dtensor, pred_bbe_2dtensor) # (num_of_rois, 4)
L_bbe = sum(x=L_bbe, axis=-1) # (num_of_rois)
L = mean(L_clz) + LAMBDA*mean(valid_1dtensor*L_bbe)
return L
def dumpy_loss(y_true, y_pred):
'''
For inference, loss is no need
'''
return 0.0
def build_train_maskrcnn_non_fpn(ishape, anchor_4dtensor, classes, max_num_of_rois,
nsm_iou_threshold, nsm_score_threshold, unified_roi_size, rpn_head_dim, fc_denses,
block_settings, base_block_trainable=True):
'''
Arguments
ishape:
anchor_4dtensor:
classes:
max_num_of_rois:
nsm_iou_threshold:
nsm_score_threshold:
unified_roi_size:
rpn_head_dim:
fc_denses:
block_settings:
base_block_trainable:
Return
rpn_model:
detection_model:
'''
from net_blocks import non_fpn, rpn, classifier_net_non_fpn
num_of_classes = len(classes)
k = anchor_4dtensor.shape[2]
input_tensor = Input(shape=ishape)
roi_tensor = Input(shape=(max_num_of_rois, 4))
fmap_tensor = non_fpn(
input_tensor=input_tensor,
block_settings=block_settings,
trainable=base_block_trainable)
rpn_clzbbe_tensor = rpn(input_tensor=fmap_tensor, k=k, f=rpn_head_dim)
clzbbe_tensor = classifier_net_non_fpn(
input_tensor=fmap_tensor,
ishape=ishape,
roi_tensor=roi_tensor,
unified_roi_size=unified_roi_size,
num_of_classes=num_of_classes,
fc_denses=fc_denses)
rpn_model = Model(
inputs=input_tensor,
outputs=rpn_clzbbe_tensor,
name='RPN')
detection_model = Model(
inputs=[input_tensor, roi_tensor],
outputs=clzbbe_tensor,
name='DETECTION')
rpn_model.compile(
optimizer=Adam(lr=0.001),
loss=rpn_loss)
detection_model.compile(
optimizer=Adam(lr=0.001),
loss=det_loss)
return rpn_model, detection_model
def build_inference_maskrcnn_non_fpn(ishape, anchor_4dtensor, classes, max_num_of_rois,
nsm_iou_threshold, nsm_score_threshold, unified_roi_size, rpn_head_dim, fc_denses,
block_settings, base_block_trainable=False):
'''
Arguments
ishape:
anchor_4dtensor:
classes:
max_num_of_rois:
nsm_iou_threshold:
nsm_score_threshold:
unified_roi_size:
rpn_head_dim:
fc_denses:
block_settings:
base_block_trainable:
Return
rpn_model:
detection_model:
'''
from net_blocks import non_fpn, rpn, nsm_block_non_fpn, classifier_net_non_fpn, output_block
num_of_classes = len(classes)
k = anchor_4dtensor.shape[2]
input_tensor = Input(shape=ishape)
fmap_tensor = non_fpn(
input_tensor=input_tensor,
block_settings=block_settings,
trainable=base_block_trainable)
rpn_clzbbe_tensor = rpn(input_tensor=fmap_tensor, k=k, f=rpn_head_dim)
roi_tensor = nsm_block_non_fpn(
input_tensor=rpn_clzbbe_tensor,
ishape=ishape,
num_of_rois=max_num_of_rois,
nsm_iou_threshold=nsm_iou_threshold,
nsm_score_threshold=nsm_score_threshold,
anchor_4dtensor=anchor_4dtensor)
clzbbe_tensor = classifier_net_non_fpn(
input_tensor=fmap_tensor,
ishape=ishape,
roi_tensor=roi_tensor,
unified_roi_size=unified_roi_size,
num_of_classes=num_of_classes,
fc_denses=fc_denses)
output_tensor = output_block(
input_tensor=clzbbe_tensor,
roi_tensor=roi_tensor,
num_of_rois=max_num_of_rois,
ishape=ishape)
rpn_model = Model(inputs=input_tensor, outputs=roi_tensor, name='RPN')
detection_model = Model(inputs=input_tensor, outputs=output_tensor, name='DETECTION')
rpn_model.compile(optimizer=Adam(lr=0.001), loss=dumpy_loss)
detection_model.compile(optimizer=Adam(lr=0.001), loss=dumpy_loss)
return rpn_model, detection_model
def build_train_maskrcnn_fpn(ishape, anchor_4dtensors, classes, max_num_of_rois,
nsm_iou_threshold, nsm_score_threshold, unified_roi_size, k0, top_down_pyramid_size,
rpn_head_dim, fc_denses, block_settings, base_block_trainable=True):
'''
Arguments
ishape:
anchor_4dtensors:
classes:
max_num_of_rois:
nsm_iou_threshold:
nsm_score_threshold:
unified_roi_size:
k0:
top_down_pyramid_size:
rpn_head_dim:
fc_denses:
block_settings:
base_block_trainable:
Return
rpn_model:
detection_model:
'''
from net_blocks import fpn, rpn, classifier_net_fpn
num_of_classes = len(classes)
k1 = anchor_4dtensors[0].shape[2]
k2 = anchor_4dtensors[0].shape[2]
k3 = anchor_4dtensors[0].shape[2]
k4 = anchor_4dtensors[0].shape[2]
input_tensor = Input(shape=ishape)
roi_tensor = Input(shape=(max_num_of_rois, 4))
P2, P3, P4, P5 = fpn(
input_tensor=input_tensor,
block_settings=block_settings,
top_down_pyramid_size=top_down_pyramid_size,
trainable=base_block_trainable)
lvl1_rpn_clzbbe_tensor = rpn(input_tensor=P2, k=k1, f=rpn_head_dim)
lvl2_rpn_clzbbe_tensor = rpn(input_tensor=P3, k=k2, f=rpn_head_dim)
lvl3_rpn_clzbbe_tensor = rpn(input_tensor=P4, k=k3, f=rpn_head_dim)
lvl4_rpn_clzbbe_tensor = rpn(input_tensor=P5, k=k4, f=rpn_head_dim)
clzbbe_tensor = classifier_net_fpn(
input_tensors=[P2, P3, P4, P5],
ishape=ishape,
roi_tensor=roi_tensor,
unified_roi_size=unified_roi_size,
k0=k0,
num_of_classes=num_of_classes,
fc_denses=fc_denses)
rpn_model = Model(
inputs=input_tensor,
outputs=[
lvl1_rpn_clzbbe_tensor,
lvl2_rpn_clzbbe_tensor,
lvl3_rpn_clzbbe_tensor,
lvl4_rpn_clzbbe_tensor],
name='RPN')
detection_model = Model(
inputs=[input_tensor, roi_tensor],
outputs=clzbbe_tensor,
name='DETECTION')
rpn_model.compile(
optimizer=Adam(lr=0.001),
loss=[
rpn_loss,
rpn_loss,
rpn_loss,
rpn_loss])
detection_model.compile(
optimizer=Adam(lr=0.001),
loss=det_loss)
return rpn_model, detection_model
def build_inference_maskrcnn_fpn(ishape, anchor_4dtensors, classes, max_num_of_rois,
nsm_iou_threshold, nsm_score_threshold, unified_roi_size, k0, top_down_pyramid_size,
rpn_head_dim, fc_denses, block_settings, base_block_trainable=False):
'''
Arguments
ishape:
anchor_4dtensors:
classes:
max_num_of_rois:
nsm_iou_threshold:
nsm_score_threshold:
unified_roi_size:
k0:
top_down_pyramid_size:
rpn_head_dim:
fc_denses:
block_settings:
base_block_trainable:
Return
rpn_model:
detection_model:
'''
from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block
num_of_classes = len(classes)
k1 = anchor_4dtensors[0].shape[2]
k2 = anchor_4dtensors[1].shape[2]
k3 = anchor_4dtensors[2].shape[2]
k4 = anchor_4dtensors[3].shape[2]
input_tensor = Input(shape=ishape)
P2, P3, P4, P5 = fpn(
input_tensor=input_tensor,
block_settings=block_settings,
top_down_pyramid_size=top_down_pyramid_size,
trainable=base_block_trainable)
lvl1_rpn_clzbbe_tensor = rpn(input_tensor=P2, k=k1, f=rpn_head_dim)
lvl2_rpn_clzbbe_tensor = rpn(input_tensor=P3, k=k2, f=rpn_head_dim)
lvl3_rpn_clzbbe_tensor = rpn(input_tensor=P4, k=k3, f=rpn_head_dim)
lvl4_rpn_clzbbe_tensor = rpn(input_tensor=P5, k=k4, f=rpn_head_dim)
roi_tensor = nsm_block_fpn(
input_tensors=[
lvl1_rpn_clzbbe_tensor,
lvl2_rpn_clzbbe_tensor,
lvl3_rpn_clzbbe_tensor,
lvl4_rpn_clzbbe_tensor],
ishape=ishape,
num_of_rois=max_num_of_rois,
nsm_iou_threshold=nsm_iou_threshold,
nsm_score_threshold=nsm_score_threshold,
anchor_4dtensors=anchor_4dtensors)
clzbbe_tensor = classifier_net_fpn(
input_tensors=[P2, P3, P4, P5],
ishape=ishape,
roi_tensor=roi_tensor,
unified_roi_size=unified_roi_size,
k0=k0,
num_of_classes=num_of_classes,
fc_denses=fc_denses)
output_tensor = output_block(
input_tensor=clzbbe_tensor,
roi_tensor=roi_tensor,
num_of_rois=max_num_of_rois,
ishape=ishape)
rpn_model = Model(inputs=input_tensor, outputs=roi_tensor, name='RPN')
detection_model = Model(inputs=input_tensor, outputs=output_tensor, name='DETECTION')
rpn_model.compile(optimizer=Adam(lr=0.001), loss=dumpy_loss)
detection_model.compile(optimizer=Adam(lr=0.001), loss=dumpy_loss)
return rpn_model, detection_model
| [
"net_blocks.classifier_net_non_fpn",
"net_blocks.classifier_net_fpn",
"tensorflow.math.log",
"tensorflow.keras.backend.categorical_crossentropy",
"net_blocks.rpn",
"tensorflow.keras.layers.Input",
"tensorflow.keras.backend.mean",
"net_blocks.nsm_block_non_fpn",
"net_blocks.nsm_block_fpn",
"tensorflow.keras.backend.switch",
"tensorflow.math.reduce_max",
"tensorflow.math.equal",
"tensorflow.keras.models.Model",
"net_blocks.fpn",
"net_blocks.output_block",
"tensorflow.reshape",
"tensorflow.keras.backend.sum",
"net_blocks.non_fpn",
"tensorflow.keras.optimizers.Adam",
"tensorflow.keras.backend.abs"
] | [((298, 318), 'tensorflow.keras.backend.abs', 'abs', (['(y_true - y_pred)'], {}), '(y_true - y_pred)\n', (301, 318), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((324, 400), 'tensorflow.keras.backend.switch', 'switch', (['(x < HUBER_DELTA)', '(0.5 * x ** 2)', '(HUBER_DELTA * (x - 0.5 * HUBER_DELTA))'], {}), '(x < HUBER_DELTA, 0.5 * x ** 2, HUBER_DELTA * (x - 0.5 * HUBER_DELTA))\n', (330, 400), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((538, 558), 'tensorflow.keras.backend.abs', 'abs', (['(y_true - y_pred)'], {}), '(y_true - y_pred)\n', (541, 558), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((1109, 1160), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'true_clz_4dtensor', 'shape': '[-1, 2]'}), '(tensor=true_clz_4dtensor, shape=[-1, 2])\n', (1119, 1160), True, 'import tensorflow as tf\n'), ((1195, 1246), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'true_bbe_4dtensor', 'shape': '[-1, 4]'}), '(tensor=true_bbe_4dtensor, shape=[-1, 4])\n', (1205, 1246), True, 'import tensorflow as tf\n'), ((1281, 1332), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'pred_clz_4dtensor', 'shape': '[-1, 2]'}), '(tensor=pred_clz_4dtensor, shape=[-1, 2])\n', (1291, 1332), True, 'import tensorflow as tf\n'), ((1367, 1418), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'pred_bbe_4dtensor', 'shape': '[-1, 4]'}), '(tensor=pred_bbe_4dtensor, shape=[-1, 4])\n', (1377, 1418), True, 'import tensorflow as tf\n'), ((1640, 1716), 'tensorflow.keras.backend.categorical_crossentropy', 'categorical_crossentropy', ([], {'target': 'true_clz_2dtensor', 'output': 'pred_clz_2dtensor'}), '(target=true_clz_2dtensor, output=pred_clz_2dtensor)\n', (1664, 1716), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((1808, 1829), 'tensorflow.keras.backend.sum', 'sum', ([], {'x': 'L_bbe', 'axis': '(-1)'}), '(x=L_bbe, axis=-1)\n', (1811, 1829), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((2502, 2566), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'true_clz_3dtensor', 'shape': '[-1, num_of_classes]'}), '(tensor=true_clz_3dtensor, shape=[-1, num_of_classes])\n', (2512, 2566), True, 'import tensorflow as tf\n'), ((2620, 2671), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'true_bbe_3dtensor', 'shape': '[-1, 4]'}), '(tensor=true_bbe_3dtensor, shape=[-1, 4])\n', (2630, 2671), True, 'import tensorflow as tf\n'), ((2712, 2776), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'pred_clz_3dtensor', 'shape': '[-1, num_of_classes]'}), '(tensor=pred_clz_3dtensor, shape=[-1, num_of_classes])\n', (2722, 2776), True, 'import tensorflow as tf\n'), ((2830, 2881), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'pred_bbe_3dtensor', 'shape': '[-1, 4]'}), '(tensor=pred_bbe_3dtensor, shape=[-1, 4])\n', (2840, 2881), True, 'import tensorflow as tf\n'), ((3104, 3177), 'tensorflow.math.reduce_max', 'tf.math.reduce_max', ([], {'input_tensor': 'true_clz_3dtensor', 'axis': '(2)', 'keepdims': '(True)'}), '(input_tensor=true_clz_3dtensor, axis=2, keepdims=True)\n', (3122, 3177), True, 'import tensorflow as tf\n'), ((3227, 3272), 'tensorflow.reshape', 'tf.reshape', ([], {'tensor': 'valid_3dtensor', 'shape': '[-1]'}), '(tensor=valid_3dtensor, shape=[-1])\n', (3237, 3272), True, 'import tensorflow as tf\n'), ((3313, 3389), 'tensorflow.keras.backend.categorical_crossentropy', 'categorical_crossentropy', ([], {'target': 'true_clz_2dtensor', 'output': 'pred_clz_2dtensor'}), '(target=true_clz_2dtensor, output=pred_clz_2dtensor)\n', (3337, 3389), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((3493, 3514), 'tensorflow.keras.backend.sum', 'sum', ([], {'x': 'L_bbe', 'axis': '(-1)'}), '(x=L_bbe, axis=-1)\n', (3496, 3514), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((4294, 4313), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'ishape'}), '(shape=ishape)\n', (4299, 4313), False, 'from tensorflow.keras.layers import Input\n'), ((4328, 4361), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': '(max_num_of_rois, 4)'}), '(shape=(max_num_of_rois, 4))\n', (4333, 4361), False, 'from tensorflow.keras.layers import Input\n'), ((4378, 4480), 'net_blocks.non_fpn', 'non_fpn', ([], {'input_tensor': 'input_tensor', 'block_settings': 'block_settings', 'trainable': 'base_block_trainable'}), '(input_tensor=input_tensor, block_settings=block_settings, trainable\n =base_block_trainable)\n', (4385, 4480), False, 'from net_blocks import non_fpn, rpn, nsm_block_non_fpn, classifier_net_non_fpn, output_block\n'), ((4507, 4557), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'fmap_tensor', 'k': 'k', 'f': 'rpn_head_dim'}), '(input_tensor=fmap_tensor, k=k, f=rpn_head_dim)\n', (4510, 4557), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((4576, 4759), 'net_blocks.classifier_net_non_fpn', 'classifier_net_non_fpn', ([], {'input_tensor': 'fmap_tensor', 'ishape': 'ishape', 'roi_tensor': 'roi_tensor', 'unified_roi_size': 'unified_roi_size', 'num_of_classes': 'num_of_classes', 'fc_denses': 'fc_denses'}), '(input_tensor=fmap_tensor, ishape=ishape, roi_tensor=\n roi_tensor, unified_roi_size=unified_roi_size, num_of_classes=\n num_of_classes, fc_denses=fc_denses)\n', (4598, 4759), False, 'from net_blocks import non_fpn, rpn, nsm_block_non_fpn, classifier_net_non_fpn, output_block\n'), ((4782, 4847), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'input_tensor', 'outputs': 'rpn_clzbbe_tensor', 'name': '"""RPN"""'}), "(inputs=input_tensor, outputs=rpn_clzbbe_tensor, name='RPN')\n", (4787, 4847), False, 'from tensorflow.keras.models import Model\n'), ((4877, 4963), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': '[input_tensor, roi_tensor]', 'outputs': 'clzbbe_tensor', 'name': '"""DETECTION"""'}), "(inputs=[input_tensor, roi_tensor], outputs=clzbbe_tensor, name=\n 'DETECTION')\n", (4882, 4963), False, 'from tensorflow.keras.models import Model\n'), ((5792, 5811), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'ishape'}), '(shape=ishape)\n', (5797, 5811), False, 'from tensorflow.keras.layers import Input\n'), ((5827, 5929), 'net_blocks.non_fpn', 'non_fpn', ([], {'input_tensor': 'input_tensor', 'block_settings': 'block_settings', 'trainable': 'base_block_trainable'}), '(input_tensor=input_tensor, block_settings=block_settings, trainable\n =base_block_trainable)\n', (5834, 5929), False, 'from net_blocks import non_fpn, rpn, nsm_block_non_fpn, classifier_net_non_fpn, output_block\n'), ((5956, 6006), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'fmap_tensor', 'k': 'k', 'f': 'rpn_head_dim'}), '(input_tensor=fmap_tensor, k=k, f=rpn_head_dim)\n', (5959, 6006), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((6022, 6234), 'net_blocks.nsm_block_non_fpn', 'nsm_block_non_fpn', ([], {'input_tensor': 'rpn_clzbbe_tensor', 'ishape': 'ishape', 'num_of_rois': 'max_num_of_rois', 'nsm_iou_threshold': 'nsm_iou_threshold', 'nsm_score_threshold': 'nsm_score_threshold', 'anchor_4dtensor': 'anchor_4dtensor'}), '(input_tensor=rpn_clzbbe_tensor, ishape=ishape,\n num_of_rois=max_num_of_rois, nsm_iou_threshold=nsm_iou_threshold,\n nsm_score_threshold=nsm_score_threshold, anchor_4dtensor=anchor_4dtensor)\n', (6039, 6234), False, 'from net_blocks import non_fpn, rpn, nsm_block_non_fpn, classifier_net_non_fpn, output_block\n'), ((6262, 6445), 'net_blocks.classifier_net_non_fpn', 'classifier_net_non_fpn', ([], {'input_tensor': 'fmap_tensor', 'ishape': 'ishape', 'roi_tensor': 'roi_tensor', 'unified_roi_size': 'unified_roi_size', 'num_of_classes': 'num_of_classes', 'fc_denses': 'fc_denses'}), '(input_tensor=fmap_tensor, ishape=ishape, roi_tensor=\n roi_tensor, unified_roi_size=unified_roi_size, num_of_classes=\n num_of_classes, fc_denses=fc_denses)\n', (6284, 6445), False, 'from net_blocks import non_fpn, rpn, nsm_block_non_fpn, classifier_net_non_fpn, output_block\n'), ((6472, 6584), 'net_blocks.output_block', 'output_block', ([], {'input_tensor': 'clzbbe_tensor', 'roi_tensor': 'roi_tensor', 'num_of_rois': 'max_num_of_rois', 'ishape': 'ishape'}), '(input_tensor=clzbbe_tensor, roi_tensor=roi_tensor, num_of_rois\n =max_num_of_rois, ishape=ishape)\n', (6484, 6584), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((6606, 6664), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'input_tensor', 'outputs': 'roi_tensor', 'name': '"""RPN"""'}), "(inputs=input_tensor, outputs=roi_tensor, name='RPN')\n", (6611, 6664), False, 'from tensorflow.keras.models import Model\n'), ((6684, 6751), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'input_tensor', 'outputs': 'output_tensor', 'name': '"""DETECTION"""'}), "(inputs=input_tensor, outputs=output_tensor, name='DETECTION')\n", (6689, 6751), False, 'from tensorflow.keras.models import Model\n'), ((7689, 7708), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'ishape'}), '(shape=ishape)\n', (7694, 7708), False, 'from tensorflow.keras.layers import Input\n'), ((7723, 7756), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': '(max_num_of_rois, 4)'}), '(shape=(max_num_of_rois, 4))\n', (7728, 7756), False, 'from tensorflow.keras.layers import Input\n'), ((7775, 7922), 'net_blocks.fpn', 'fpn', ([], {'input_tensor': 'input_tensor', 'block_settings': 'block_settings', 'top_down_pyramid_size': 'top_down_pyramid_size', 'trainable': 'base_block_trainable'}), '(input_tensor=input_tensor, block_settings=block_settings,\n top_down_pyramid_size=top_down_pyramid_size, trainable=base_block_trainable\n )\n', (7778, 7922), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((7953, 7995), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P2', 'k': 'k1', 'f': 'rpn_head_dim'}), '(input_tensor=P2, k=k1, f=rpn_head_dim)\n', (7956, 7995), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((8022, 8064), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P3', 'k': 'k2', 'f': 'rpn_head_dim'}), '(input_tensor=P3, k=k2, f=rpn_head_dim)\n', (8025, 8064), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((8091, 8133), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P4', 'k': 'k3', 'f': 'rpn_head_dim'}), '(input_tensor=P4, k=k3, f=rpn_head_dim)\n', (8094, 8133), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((8160, 8202), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P5', 'k': 'k4', 'f': 'rpn_head_dim'}), '(input_tensor=P5, k=k4, f=rpn_head_dim)\n', (8163, 8202), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((8221, 8411), 'net_blocks.classifier_net_fpn', 'classifier_net_fpn', ([], {'input_tensors': '[P2, P3, P4, P5]', 'ishape': 'ishape', 'roi_tensor': 'roi_tensor', 'unified_roi_size': 'unified_roi_size', 'k0': 'k0', 'num_of_classes': 'num_of_classes', 'fc_denses': 'fc_denses'}), '(input_tensors=[P2, P3, P4, P5], ishape=ishape,\n roi_tensor=roi_tensor, unified_roi_size=unified_roi_size, k0=k0,\n num_of_classes=num_of_classes, fc_denses=fc_denses)\n', (8239, 8411), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((8439, 8591), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'input_tensor', 'outputs': '[lvl1_rpn_clzbbe_tensor, lvl2_rpn_clzbbe_tensor, lvl3_rpn_clzbbe_tensor,\n lvl4_rpn_clzbbe_tensor]', 'name': '"""RPN"""'}), "(inputs=input_tensor, outputs=[lvl1_rpn_clzbbe_tensor,\n lvl2_rpn_clzbbe_tensor, lvl3_rpn_clzbbe_tensor, lvl4_rpn_clzbbe_tensor],\n name='RPN')\n", (8444, 8591), False, 'from tensorflow.keras.models import Model\n'), ((8626, 8712), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': '[input_tensor, roi_tensor]', 'outputs': 'clzbbe_tensor', 'name': '"""DETECTION"""'}), "(inputs=[input_tensor, roi_tensor], outputs=clzbbe_tensor, name=\n 'DETECTION')\n", (8631, 8712), False, 'from tensorflow.keras.models import Model\n'), ((9740, 9759), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'ishape'}), '(shape=ishape)\n', (9745, 9759), False, 'from tensorflow.keras.layers import Input\n'), ((9778, 9925), 'net_blocks.fpn', 'fpn', ([], {'input_tensor': 'input_tensor', 'block_settings': 'block_settings', 'top_down_pyramid_size': 'top_down_pyramid_size', 'trainable': 'base_block_trainable'}), '(input_tensor=input_tensor, block_settings=block_settings,\n top_down_pyramid_size=top_down_pyramid_size, trainable=base_block_trainable\n )\n', (9781, 9925), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((9956, 9998), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P2', 'k': 'k1', 'f': 'rpn_head_dim'}), '(input_tensor=P2, k=k1, f=rpn_head_dim)\n', (9959, 9998), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10025, 10067), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P3', 'k': 'k2', 'f': 'rpn_head_dim'}), '(input_tensor=P3, k=k2, f=rpn_head_dim)\n', (10028, 10067), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10094, 10136), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P4', 'k': 'k3', 'f': 'rpn_head_dim'}), '(input_tensor=P4, k=k3, f=rpn_head_dim)\n', (10097, 10136), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10163, 10205), 'net_blocks.rpn', 'rpn', ([], {'input_tensor': 'P5', 'k': 'k4', 'f': 'rpn_head_dim'}), '(input_tensor=P5, k=k4, f=rpn_head_dim)\n', (10166, 10205), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10221, 10515), 'net_blocks.nsm_block_fpn', 'nsm_block_fpn', ([], {'input_tensors': '[lvl1_rpn_clzbbe_tensor, lvl2_rpn_clzbbe_tensor, lvl3_rpn_clzbbe_tensor,\n lvl4_rpn_clzbbe_tensor]', 'ishape': 'ishape', 'num_of_rois': 'max_num_of_rois', 'nsm_iou_threshold': 'nsm_iou_threshold', 'nsm_score_threshold': 'nsm_score_threshold', 'anchor_4dtensors': 'anchor_4dtensors'}), '(input_tensors=[lvl1_rpn_clzbbe_tensor, lvl2_rpn_clzbbe_tensor,\n lvl3_rpn_clzbbe_tensor, lvl4_rpn_clzbbe_tensor], ishape=ishape,\n num_of_rois=max_num_of_rois, nsm_iou_threshold=nsm_iou_threshold,\n nsm_score_threshold=nsm_score_threshold, anchor_4dtensors=anchor_4dtensors)\n', (10234, 10515), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10552, 10742), 'net_blocks.classifier_net_fpn', 'classifier_net_fpn', ([], {'input_tensors': '[P2, P3, P4, P5]', 'ishape': 'ishape', 'roi_tensor': 'roi_tensor', 'unified_roi_size': 'unified_roi_size', 'k0': 'k0', 'num_of_classes': 'num_of_classes', 'fc_denses': 'fc_denses'}), '(input_tensors=[P2, P3, P4, P5], ishape=ishape,\n roi_tensor=roi_tensor, unified_roi_size=unified_roi_size, k0=k0,\n num_of_classes=num_of_classes, fc_denses=fc_denses)\n', (10570, 10742), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10774, 10886), 'net_blocks.output_block', 'output_block', ([], {'input_tensor': 'clzbbe_tensor', 'roi_tensor': 'roi_tensor', 'num_of_rois': 'max_num_of_rois', 'ishape': 'ishape'}), '(input_tensor=clzbbe_tensor, roi_tensor=roi_tensor, num_of_rois\n =max_num_of_rois, ishape=ishape)\n', (10786, 10886), False, 'from net_blocks import fpn, rpn, nsm_block_fpn, classifier_net_fpn, output_block\n'), ((10908, 10966), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'input_tensor', 'outputs': 'roi_tensor', 'name': '"""RPN"""'}), "(inputs=input_tensor, outputs=roi_tensor, name='RPN')\n", (10913, 10966), False, 'from tensorflow.keras.models import Model\n'), ((10986, 11053), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'input_tensor', 'outputs': 'output_tensor', 'name': '"""DETECTION"""'}), "(inputs=input_tensor, outputs=output_tensor, name='DETECTION')\n", (10991, 11053), False, 'from tensorflow.keras.models import Model\n'), ((1845, 1856), 'tensorflow.keras.backend.mean', 'mean', (['L_clz'], {}), '(L_clz)\n', (1849, 1856), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((3536, 3547), 'tensorflow.keras.backend.mean', 'mean', (['L_clz'], {}), '(L_clz)\n', (3540, 3547), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((1537, 1578), 'tensorflow.math.equal', 'tf.math.equal', ([], {'x': 'pred_clz_2dtensor', 'y': '(0.0)'}), '(x=pred_clz_2dtensor, y=0.0)\n', (1550, 1578), True, 'import tensorflow as tf\n'), ((1866, 1903), 'tensorflow.keras.backend.mean', 'mean', (['(true_clz_2dtensor[:, 0] * L_bbe)'], {}), '(true_clz_2dtensor[:, 0] * L_bbe)\n', (1870, 1903), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((3006, 3047), 'tensorflow.math.equal', 'tf.math.equal', ([], {'x': 'pred_clz_2dtensor', 'y': '(0.0)'}), '(x=pred_clz_2dtensor, y=0.0)\n', (3019, 3047), True, 'import tensorflow as tf\n'), ((3557, 3585), 'tensorflow.keras.backend.mean', 'mean', (['(valid_1dtensor * L_bbe)'], {}), '(valid_1dtensor * L_bbe)\n', (3561, 3585), False, 'from tensorflow.keras.backend import mean, sum, categorical_crossentropy, abs, switch\n'), ((5001, 5015), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (5005, 5015), False, 'from tensorflow.keras.optimizers import Adam\n'), ((5073, 5087), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (5077, 5087), False, 'from tensorflow.keras.optimizers import Adam\n'), ((6782, 6796), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (6786, 6796), False, 'from tensorflow.keras.optimizers import Adam\n'), ((6850, 6864), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (6854, 6864), False, 'from tensorflow.keras.optimizers import Adam\n'), ((8750, 8764), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (8754, 8764), False, 'from tensorflow.keras.optimizers import Adam\n'), ((8867, 8881), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (8871, 8881), False, 'from tensorflow.keras.optimizers import Adam\n'), ((11084, 11098), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (11088, 11098), False, 'from tensorflow.keras.optimizers import Adam\n'), ((11152, 11166), 'tensorflow.keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.001)'}), '(lr=0.001)\n', (11156, 11166), False, 'from tensorflow.keras.optimizers import Adam\n'), ((600, 622), 'tensorflow.math.log', 'tf.math.log', (['(b * x + 1)'], {}), '(b * x + 1)\n', (611, 622), True, 'import tensorflow as tf\n')] |
# Themes are containers which are composed of:
# Theme Components:
# are containers within a theme that can specify an item type the theme colors/styles target
# Theme Colors:
# items that are added to a theme component and set colors
# Theme Styles:
# items that are added to a theme component and set styles
# The theme can be:
# bound as the default theme. This will have a global effect across all windows and propagate.
# bound to a container. This will propagate to its children if applicable theme components are in the theme.
# bound to an item type if applicable theme components are in the theme. a item container, or a specific item.
# Theme components must have a specified item type. This can either be mvAll for all items or a specific item type.
# Style and color items have a named constant and will apply that constant to their components named item type.
# Style and color items must have a named category. Constants contain their category in the name.
import dearpygui.dearpygui as dpg
def theme():
with dpg.window(label="Tutorial", pos=(20, 50), width=275, height=225) as win1:
t1 = dpg.add_input_text(default_value="some text")
t2 = dpg.add_input_text(default_value="some text")
with dpg.child_window(height=100):
t3 = dpg.add_input_text(default_value="some text")
dpg.add_input_int()
dpg.add_input_text(default_value="some text")
with dpg.window(label="Tutorial", pos=(320, 50), width=275, height=225) as win2:
dpg.add_input_text(default_value="some text")
dpg.add_input_int()
with dpg.theme() as global_theme:
with dpg.theme_component(dpg.mvAll):
dpg.add_theme_color(dpg.mvThemeCol_FrameBg, (255, 140, 23), category=dpg.mvThemeCat_Core)
dpg.add_theme_style(dpg.mvStyleVar_FrameRounding, 5, category=dpg.mvThemeCat_Core)
with dpg.theme_component(dpg.mvInputInt):
dpg.add_theme_color(dpg.mvThemeCol_FrameBg, (140, 255, 23), category=dpg.mvThemeCat_Core)
dpg.add_theme_style(dpg.mvStyleVar_FrameRounding, 5, category=dpg.mvThemeCat_Core)
dpg.bind_theme(global_theme)
# control style
dpg.show_style_editor()
dpg.create_viewport(title='Custom Title', width=800, height=600)
dpg.setup_dearpygui()
dpg.show_viewport()
dpg.start_dearpygui()
if __name__ == '__main__':
dpg.create_context()
theme()
dpg.destroy_context() | [
"dearpygui.dearpygui.show_viewport",
"dearpygui.dearpygui.add_input_text",
"dearpygui.dearpygui.create_viewport",
"dearpygui.dearpygui.add_input_int",
"dearpygui.dearpygui.create_context",
"dearpygui.dearpygui.show_style_editor",
"dearpygui.dearpygui.destroy_context",
"dearpygui.dearpygui.add_theme_color",
"dearpygui.dearpygui.theme",
"dearpygui.dearpygui.bind_theme",
"dearpygui.dearpygui.setup_dearpygui",
"dearpygui.dearpygui.window",
"dearpygui.dearpygui.add_theme_style",
"dearpygui.dearpygui.child_window",
"dearpygui.dearpygui.start_dearpygui",
"dearpygui.dearpygui.theme_component"
] | [((2126, 2154), 'dearpygui.dearpygui.bind_theme', 'dpg.bind_theme', (['global_theme'], {}), '(global_theme)\n', (2140, 2154), True, 'import dearpygui.dearpygui as dpg\n'), ((2180, 2203), 'dearpygui.dearpygui.show_style_editor', 'dpg.show_style_editor', ([], {}), '()\n', (2201, 2203), True, 'import dearpygui.dearpygui as dpg\n'), ((2209, 2273), 'dearpygui.dearpygui.create_viewport', 'dpg.create_viewport', ([], {'title': '"""Custom Title"""', 'width': '(800)', 'height': '(600)'}), "(title='Custom Title', width=800, height=600)\n", (2228, 2273), True, 'import dearpygui.dearpygui as dpg\n'), ((2278, 2299), 'dearpygui.dearpygui.setup_dearpygui', 'dpg.setup_dearpygui', ([], {}), '()\n', (2297, 2299), True, 'import dearpygui.dearpygui as dpg\n'), ((2304, 2323), 'dearpygui.dearpygui.show_viewport', 'dpg.show_viewport', ([], {}), '()\n', (2321, 2323), True, 'import dearpygui.dearpygui as dpg\n'), ((2328, 2349), 'dearpygui.dearpygui.start_dearpygui', 'dpg.start_dearpygui', ([], {}), '()\n', (2347, 2349), True, 'import dearpygui.dearpygui as dpg\n'), ((2383, 2403), 'dearpygui.dearpygui.create_context', 'dpg.create_context', ([], {}), '()\n', (2401, 2403), True, 'import dearpygui.dearpygui as dpg\n'), ((2420, 2441), 'dearpygui.dearpygui.destroy_context', 'dpg.destroy_context', ([], {}), '()\n', (2439, 2441), True, 'import dearpygui.dearpygui as dpg\n'), ((1038, 1103), 'dearpygui.dearpygui.window', 'dpg.window', ([], {'label': '"""Tutorial"""', 'pos': '(20, 50)', 'width': '(275)', 'height': '(225)'}), "(label='Tutorial', pos=(20, 50), width=275, height=225)\n", (1048, 1103), True, 'import dearpygui.dearpygui as dpg\n'), ((1126, 1171), 'dearpygui.dearpygui.add_input_text', 'dpg.add_input_text', ([], {'default_value': '"""some text"""'}), "(default_value='some text')\n", (1144, 1171), True, 'import dearpygui.dearpygui as dpg\n'), ((1185, 1230), 'dearpygui.dearpygui.add_input_text', 'dpg.add_input_text', ([], {'default_value': '"""some text"""'}), "(default_value='some text')\n", (1203, 1230), True, 'import dearpygui.dearpygui as dpg\n'), ((1377, 1422), 'dearpygui.dearpygui.add_input_text', 'dpg.add_input_text', ([], {'default_value': '"""some text"""'}), "(default_value='some text')\n", (1395, 1422), True, 'import dearpygui.dearpygui as dpg\n'), ((1433, 1499), 'dearpygui.dearpygui.window', 'dpg.window', ([], {'label': '"""Tutorial"""', 'pos': '(320, 50)', 'width': '(275)', 'height': '(225)'}), "(label='Tutorial', pos=(320, 50), width=275, height=225)\n", (1443, 1499), True, 'import dearpygui.dearpygui as dpg\n'), ((1517, 1562), 'dearpygui.dearpygui.add_input_text', 'dpg.add_input_text', ([], {'default_value': '"""some text"""'}), "(default_value='some text')\n", (1535, 1562), True, 'import dearpygui.dearpygui as dpg\n'), ((1571, 1590), 'dearpygui.dearpygui.add_input_int', 'dpg.add_input_int', ([], {}), '()\n', (1588, 1590), True, 'import dearpygui.dearpygui as dpg\n'), ((1601, 1612), 'dearpygui.dearpygui.theme', 'dpg.theme', ([], {}), '()\n', (1610, 1612), True, 'import dearpygui.dearpygui as dpg\n'), ((1244, 1272), 'dearpygui.dearpygui.child_window', 'dpg.child_window', ([], {'height': '(100)'}), '(height=100)\n', (1260, 1272), True, 'import dearpygui.dearpygui as dpg\n'), ((1291, 1336), 'dearpygui.dearpygui.add_input_text', 'dpg.add_input_text', ([], {'default_value': '"""some text"""'}), "(default_value='some text')\n", (1309, 1336), True, 'import dearpygui.dearpygui as dpg\n'), ((1349, 1368), 'dearpygui.dearpygui.add_input_int', 'dpg.add_input_int', ([], {}), '()\n', (1366, 1368), True, 'import dearpygui.dearpygui as dpg\n'), ((1644, 1674), 'dearpygui.dearpygui.theme_component', 'dpg.theme_component', (['dpg.mvAll'], {}), '(dpg.mvAll)\n', (1663, 1674), True, 'import dearpygui.dearpygui as dpg\n'), ((1688, 1782), 'dearpygui.dearpygui.add_theme_color', 'dpg.add_theme_color', (['dpg.mvThemeCol_FrameBg', '(255, 140, 23)'], {'category': 'dpg.mvThemeCat_Core'}), '(dpg.mvThemeCol_FrameBg, (255, 140, 23), category=dpg.\n mvThemeCat_Core)\n', (1707, 1782), True, 'import dearpygui.dearpygui as dpg\n'), ((1790, 1877), 'dearpygui.dearpygui.add_theme_style', 'dpg.add_theme_style', (['dpg.mvStyleVar_FrameRounding', '(5)'], {'category': 'dpg.mvThemeCat_Core'}), '(dpg.mvStyleVar_FrameRounding, 5, category=dpg.\n mvThemeCat_Core)\n', (1809, 1877), True, 'import dearpygui.dearpygui as dpg\n'), ((1887, 1922), 'dearpygui.dearpygui.theme_component', 'dpg.theme_component', (['dpg.mvInputInt'], {}), '(dpg.mvInputInt)\n', (1906, 1922), True, 'import dearpygui.dearpygui as dpg\n'), ((1936, 2030), 'dearpygui.dearpygui.add_theme_color', 'dpg.add_theme_color', (['dpg.mvThemeCol_FrameBg', '(140, 255, 23)'], {'category': 'dpg.mvThemeCat_Core'}), '(dpg.mvThemeCol_FrameBg, (140, 255, 23), category=dpg.\n mvThemeCat_Core)\n', (1955, 2030), True, 'import dearpygui.dearpygui as dpg\n'), ((2038, 2125), 'dearpygui.dearpygui.add_theme_style', 'dpg.add_theme_style', (['dpg.mvStyleVar_FrameRounding', '(5)'], {'category': 'dpg.mvThemeCat_Core'}), '(dpg.mvStyleVar_FrameRounding, 5, category=dpg.\n mvThemeCat_Core)\n', (2057, 2125), True, 'import dearpygui.dearpygui as dpg\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
:::==== :::==== ::: :::= ===
::: === ::: === ::: :::=====
=== === === === === ========
=== === === === === === ====
====== ======= === === ===
Developer: Chris "cmaddy" Maddalena
Version: 2.0.0 "Huginn"
Description: Observation, Detection, and Investigation of Networks
ODIN was designed to assist with OSINT automation for penetration testing clients and
their networks, both the types with IP address and social. Provide a client's name and
some domains to gather information from sources like RDAP, DNS, Shodan, and
so much more.
ODIN is made possible through the help, input, and work provided by others. Therefore,
this project is entirely open source and available to all to use/modify.
"""
import os
import click
from multiprocess import Process,Manager
from lib import reporter,asciis,verification,htmlreporter,grapher,helpers
VERSION = "2.0.0"
CODENAME = "HUGINN"
def setup_reports(client):
"""Function to create a reports directory structure for the target organization."""
if not os.path.exists("reports/{}".format(client)):
try:
os.makedirs("reports/{}".format(client))
os.makedirs("reports/{}/screenshots".format(client))
os.makedirs("reports/{}/file_downloads".format(client))
os.makedirs("reports/{}/html_report".format(client))
except OSError as error:
click.secho("[!] Could not create the reports directory!",fg="red")
click.secho("L.. Details: {}".format(error),fg="red")
# Setup a class for CLICK
class AliasedGroup(click.Group):
"""Allows commands to be called by their first unique character."""
def get_command(self,ctx,cmd_name):
"""
Allows commands to be called by their first unique character
:param ctx: Context information from click
:param cmd_name: Calling command name
:return:
"""
command = click.Group.get_command(self,ctx,cmd_name)
if command is not None:
return command
matches = [x for x in self.list_commands(ctx)
if x.startswith(cmd_name)]
if not matches:
return None
elif len(matches) == 1:
return click.Group.get_command(self,ctx,matches[0])
ctx.fail("Too many matches: %s" % ", ".join(sorted(matches)))
# That's right, we support -h and --help! Not using -h for an argument like 'host'! ;D
CONTEXT_SETTINGS = dict(help_option_names=['-h','--help'],max_content_width=200)
@click.group(cls=AliasedGroup,context_settings=CONTEXT_SETTINGS)
# Note: The following function descriptors will look weird and some will contain '\n' in spots.
# This is necessary for CLICK. These are displayed with the help info and need to be written
# just like we want them to be displayed in the user's terminal. Whitespace really matters.
def odin():
"""
Welcome to ODIN! To use ODIN, select a module you wish to run. Functions are split into modules
to support a few different use cases.\n
Run 'odin.py <MODULE> --help' for more information on a specific module.
"""
# Everything starts here
pass
# The OSINT module -- This is the primary module that does all the stuff
# Basic, required arguments
@odin.command(name='osint',short_help="The full OSINT suite of tools will be run (see README).")
@click.option('-o','--organization',help='The target client, such as "ABC Company," to use for \
report titles and searches for domains and cloud storage buckets.',required=True)
@click.option('-d','--domain',help="The target's primary domain, such as example.com. Use \
whatever the target uses for email and their main website. Provide additional domains in a scope \
file using --scope-file.",required=True)
# Optional arguments
@click.option('-sf','--scope-file',type=click.Path(exists=True,readable=True,\
resolve_path=True),help="A text file containing additional domain names you want to include. IP \
addresses can also be provided, if necessary. List each one on a new line.",required=False)
@click.option('--whoxy-limit',default=10,help="The maximum number of domains discovered via \
reverse WHOIS that ODIN will resolve and use when searching services like Censys and Shodan. \
You may get hundreds of results from reverse WHOIS, so this is intended to save time and \
API credits. Default is 10 domains and setting it above maybe 20 or 30 is not recommended. \
It is preferable to perform a search using a tool like Vincent Yiu's DomLink and then provide \
the newly discovered domains in your scope file with --scope-file.")
@click.option('--typo',is_flag=True,help="Generate a list of lookalike domain names for the \
provided domain (--domain), check if they have been registered, and then check those domains \
against URLVoid and Cymon.io to see if the domains or associated IP addresses have been \
flagged as malicious.")
# File searching arguments
@click.option('--files',is_flag=True,help="Use this option to use Google to search for files \
under the provided domain (--domain), download files, and extract metadata.")
@click.option('-e','--ext',default="all",help="File extensions to look for with --file. \
Default is 'all' or you can pick from key, pdf, doc, docx, xls, xlsx, and ppt.")
# Cloud-related arguments
@click.option('-w','--aws',help="A list of additional keywords to be used when searching for \
cloud sotrage buckets.",type=click.Path(exists=True,readable=True,resolve_path=True))
@click.option('-wf','--aws-fixes',help="A list of strings to be added to the start and end of \
the cloud storage bucket names.",type=click.Path(exists=True,readable=True,resolve_path=True))
# Reporting-related arguments
@click.option('--html',is_flag=True,help="Create an HTML report at the end for easy browsing.")
@click.option('--graph',is_flag=True,help="Create a Neo4j graph database from the completed \
SQLite3 database.")
@click.option('--nuke',is_flag=True,help="Clear the Neo4j project before converting the \
database. This is only used with --graph.")
@click.option('--screenshots',is_flag=True,help="Attempt to take screenshots of discovered \
web services.")
@click.option('--unsafe',is_flag=True,help="Adding this flag will spawn the headless Chrome \
browser with the --no-sandbox command line flag. This is NOT recommended for any users who are \
NOT running ODIN on a Kali Linux VM as root. Chrome will not run as the root user on Kali \
without this option.")
# Pass the above arguments on to your osint function
@click.pass_context
def osint(self,organization,domain,files,ext,scope_file,aws,aws_fixes,html,
screenshots,graph,nuke,whoxy_limit,typo,unsafe):
"""
The OSINT toolkit:
This is ODIN's primary module. ODIN will take the tagret organization, domain, and other data
provided and hunt for information. On the human side, ODIN looks for employee names,
email addresses, and social media profiles. Names and emails are cross-referenced with
HaveIBeenPwned, Twitter's API, and search engines to collect additional information.
ODIN also uses various tools and APIs to collect information on the provided IP addresses
and domain names, including things like DNS and IP address history.
View the wiki for the full details, reporting information, and lists of API keys.
Note: If providing any IP addresses in a scope file, acceptable IP addresses/ranges include:
* Single Address: 8.8.8.8
* Basic CIDR: 172.16.58.3/24
* Nmap-friendly Range: 8.8.8.8-10
* Underscores? OK: 8.8.8.8_172.16.17.32
"""
click.clear()
click.secho(asciis.print_art(),fg="magenta")
click.secho("\tRelease v{}, {}".format(VERSION,CODENAME),fg="magenta")
click.secho("[+] OSINT Module Selected: ODIN will run all recon modules.",fg="green")
# Perform prep work for reporting
setup_reports(organization)
report_path = "reports/{}/".format(organization)
output_report = report_path + "OSINT_DB.db"
if __name__ == "__main__":
# Create manager server to handle variables shared between jobs
manager = Manager()
ip_list = manager.list()
domain_list = manager.list()
rev_domain_list = manager.list()
# Create reporter object and generate lists of everything, just IP addresses, and just domains
browser = helpers.setup_headless_chrome(unsafe)
report = reporter.Reporter(organization,report_path,output_report,browser)
report.create_tables()
scope,ip_list,domain_list = report.prepare_scope(ip_list,domain_list,scope_file,domain)
# Create some jobs and put Python to work!
# Job queue 1 is for the initial phase
jobs = []
# Job queue 2 is used for jobs using data from job queue 1
more_jobs = []
# Job queue 3 is used for jobs that take a while and use the progress bar, i.e. AWS enum
even_more_jobs = []
# Phase 1 jobs
company_info = Process(name="Company Info Collector",
target=report.create_company_info_table,
args=(domain,))
jobs.append(company_info)
employee_report = Process(name="<NAME>",
target=report.create_people_table,
args=(domain_list,rev_domain_list,organization))
jobs.append(employee_report)
domain_report = Process(name="Domain and IP Hunter",
target=report.create_domain_report_table,
args=(organization,scope,ip_list,domain_list,rev_domain_list,whoxy_limit))
jobs.append(domain_report)
# Phase 2 jobs
shodan_report = Process(name="<NAME>",
target=report.create_shodan_table,
args=(ip_list,domain_list))
more_jobs.append(shodan_report)
if typo:
lookalike_report = Process(name="Lookalike Domain Reviewer",
target=report.create_lookalike_table,
args=(organization,domain))
more_jobs.append(lookalike_report)
if screenshots:
take_screenshots = Process(name="Screenshot Snapper",
target=report.capture_web_snapshots,
args=(report_path,browser))
more_jobs.append(take_screenshots)
if files:
files_report = Process(name="File Hunter",
target=report.create_metadata_table,
args=(domain,ext,report_path))
more_jobs.append(files_report)
# Phase 3 jobs
cloud_report = Process(name="<NAME>",
target=report.create_cloud_table,
args=(organization,domain,aws,aws_fixes))
even_more_jobs.append(cloud_report)
# Process the lists of jobs in phases, starting with phase 1
click.secho("[+] Beginning initial discovery phase! This could take some time...",fg="green")
for job in jobs:
click.secho("[+] Starting new process: {}".format(job.name),fg="green")
job.start()
for job in jobs:
job.join()
# Wait for phase 1 and then begin phase 2 jobs
click.secho("[+] Initial discovery is complete! Proceeding with additional queries...",fg="green")
for job in more_jobs:
click.secho("[+] Starting new process: {}".format(job.name),fg="green")
job.start()
for job in more_jobs:
job.join()
# Wait for phase 2 and then begin phase 3 jobs
click.secho("[+] Final phase: checking the cloud and web services...",fg="green")
for job in even_more_jobs:
click.secho("[+] Starting new process: {}".format(job.name),fg="green")
job.start()
for job in even_more_jobs:
job.join()
# All jobs are done, so close out the SQLIte3 database connection
report.close_out_reporting()
click.secho("[+] Job's done! Your results are in {} and can be viewed and queried with \
any SQLite browser.".format(output_report),fg="green")
# Perform additional tasks depending on the user's command line options
if graph:
graph_reporter = grapher.Grapher(output_report)
click.secho("[+] Loading ODIN database file {} for conversion to Neo4j".format(output_report),fg="green")
if nuke:
if click.confirm(click.style("[!] You set the --nuke option. This wipes out all nodes for a \
fresh start. Proceed?",fg="red"),default=True):
try:
graph_reporter.clear_neo4j_database()
click.secho("[+] Database successfully wiped!\n",fg="green")
except Exception as error:
click.secho("[!] Failed to clear the database! Check the Neo4j console and \
your configuration and try running grapher.py again.",fg="red")
click.secho("L.. Details: {}".format(error),fg="red")
else:
click.secho("[!] You can convert your database to a graph database later. \
Run lib/grapher.py with the appropriate options.",fg="red")
try:
graph_reporter.convert()
except Exception as error:
click.secho("[!] Failed to convert the database! Check the Neo4j console and \
your configuration and try running grapher.py again.",fg="red")
click.secho("L.. Details: {}".format(error),fg="red")
if html:
click.secho("\n[+] Creating the HTML report using {}.".format(output_report),fg="green")
try:
html_reporter = htmlreporter.HTMLReporter(organization,report_path + "/html_report/",output_report)
html_reporter.generate_full_report()
except Exception as error:
click.secho("[!] Failed to create the HTML report!",fg="red")
click.secho("L.. Details: {}".format(error),fg="red")
# The VERIFY module -- No OSINT, just a way to check a ownership of a list of IPs
@odin.command(name='verify',short_help="This module assists with verifying ownership of a list \
of IP addresses. This returns a csv file with SSL cert, WHOIS, and other data for verification.")
@click.option('-o','--organization',help='The target client, such as "ABC Company," to use for \
report titles and some keyword searches.',required=True)
@click.option('-sf','--scope-file',help="Name of the file with your IP addresses.",\
type=click.Path(exists=True,readable=True,resolve_path=True),required=True)
@click.option('-r','--report',default="Verification.csv",help="Output file (CSV) for the \
findings.")
# Pass the above arguments on to your verify function
@click.pass_context
def verify(self,organization,scope_file,report):
"""
The Verify module:
Uses reverse DNS, ARIN, and SSL/TLS certificate information to help you verify ownership of a
list of IP addresses.
This is only for verifying IP addresses. Domains may not have public ownership information
available. Compare the IP ownership information from ARIN and certificate information to what
you know about the presumed owner to determine ownership.
Acceptable IP addresses/ranges include:
* Single Address: 8.8.8.8
* Basic CIDR: 172.16.58.3/24
* Nmap-friendly Range: 8.8.8.8-10
* Underscores? OK: 8.8.8.8_8.8.8.10
"""
click.secho(asciis.print_art(),fg="magenta")
click.secho("\tRelease v{}, {}".format(VERSION,CODENAME),fg="magenta")
click.secho("[+] Scope Verification Module Selected: ODIN will attempt to verify who owns \
the provided IP addresses.",fg="green")
setup_reports(organization)
report_path = "reports/{}/{}".format(organization,report)
expanded_scope = []
results = {}
try:
verification.prepare_scope(scope_file,expanded_scope)
verification.perform_whois(expanded_scope,results)
verification.print_output(results,report_path)
except Exception as error:
click.secho("[!] Verification failed!",fg="red")
click.secho("L.. Details: {}".format(error),fg="red")
click.secho("[+] Job's done! Your identity report is in {}.".format(report_path),fg="green")
if __name__ == "__main__":
odin()
| [
"multiprocess.Process",
"lib.asciis.print_art",
"lib.verification.prepare_scope",
"lib.htmlreporter.HTMLReporter",
"click.secho",
"click.group",
"click.option",
"lib.helpers.setup_headless_chrome",
"lib.reporter.Reporter",
"lib.verification.print_output",
"click.style",
"lib.grapher.Grapher",
"click.Path",
"lib.verification.perform_whois",
"multiprocess.Manager",
"click.Group.get_command",
"click.clear"
] | [((2662, 2726), 'click.group', 'click.group', ([], {'cls': 'AliasedGroup', 'context_settings': 'CONTEXT_SETTINGS'}), '(cls=AliasedGroup, context_settings=CONTEXT_SETTINGS)\n', (2673, 2726), False, 'import click\n'), ((3484, 3672), 'click.option', 'click.option', (['"""-o"""', '"""--organization"""'], {'help': '"""The target client, such as "ABC Company," to use for report titles and searches for domains and cloud storage buckets."""', 'required': '(True)'}), '(\'-o\', \'--organization\', help=\n \'The target client, such as "ABC Company," to use for report titles and searches for domains and cloud storage buckets.\'\n , required=True)\n', (3496, 3672), False, 'import click\n'), ((3663, 3902), 'click.option', 'click.option', (['"""-d"""', '"""--domain"""'], {'help': '"""The target\'s primary domain, such as example.com. Use whatever the target uses for email and their main website. Provide additional domains in a scope file using --scope-file."""', 'required': '(True)'}), '(\'-d\', \'--domain\', help=\n "The target\'s primary domain, such as example.com. Use whatever the target uses for email and their main website. Provide additional domains in a scope file using --scope-file."\n , required=True)\n', (3675, 3902), False, 'import click\n'), ((4185, 4723), 'click.option', 'click.option', (['"""--whoxy-limit"""'], {'default': '(10)', 'help': '"""The maximum number of domains discovered via reverse WHOIS that ODIN will resolve and use when searching services like Censys and Shodan. You may get hundreds of results from reverse WHOIS, so this is intended to save time and API credits. Default is 10 domains and setting it above maybe 20 or 30 is not recommended. It is preferable to perform a search using a tool like Vincent Yiu\'s DomLink and then provide the newly discovered domains in your scope file with --scope-file."""'}), '(\'--whoxy-limit\', default=10, help=\n "The maximum number of domains discovered via reverse WHOIS that ODIN will resolve and use when searching services like Censys and Shodan. You may get hundreds of results from reverse WHOIS, so this is intended to save time and API credits. Default is 10 domains and setting it above maybe 20 or 30 is not recommended. It is preferable to perform a search using a tool like Vincent Yiu\'s DomLink and then provide the newly discovered domains in your scope file with --scope-file."\n )\n', (4197, 4723), False, 'import click\n'), ((4723, 5030), 'click.option', 'click.option', (['"""--typo"""'], {'is_flag': '(True)', 'help': '"""Generate a list of lookalike domain names for the provided domain (--domain), check if they have been registered, and then check those domains against URLVoid and Cymon.io to see if the domains or associated IP addresses have been flagged as malicious."""'}), "('--typo', is_flag=True, help=\n 'Generate a list of lookalike domain names for the provided domain (--domain), check if they have been registered, and then check those domains against URLVoid and Cymon.io to see if the domains or associated IP addresses have been flagged as malicious.'\n )\n", (4735, 5030), False, 'import click\n'), ((5053, 5234), 'click.option', 'click.option', (['"""--files"""'], {'is_flag': '(True)', 'help': '"""Use this option to use Google to search for files under the provided domain (--domain), download files, and extract metadata."""'}), "('--files', is_flag=True, help=\n 'Use this option to use Google to search for files under the provided domain (--domain), download files, and extract metadata.'\n )\n", (5065, 5234), False, 'import click\n'), ((5226, 5406), 'click.option', 'click.option', (['"""-e"""', '"""--ext"""'], {'default': '"""all"""', 'help': '"""File extensions to look for with --file. Default is \'all\' or you can pick from key, pdf, doc, docx, xls, xlsx, and ppt."""'}), '(\'-e\', \'--ext\', default=\'all\', help=\n "File extensions to look for with --file. Default is \'all\' or you can pick from key, pdf, doc, docx, xls, xlsx, and ppt."\n )\n', (5238, 5406), False, 'import click\n'), ((5825, 5926), 'click.option', 'click.option', (['"""--html"""'], {'is_flag': '(True)', 'help': '"""Create an HTML report at the end for easy browsing."""'}), "('--html', is_flag=True, help=\n 'Create an HTML report at the end for easy browsing.')\n", (5837, 5926), False, 'import click\n'), ((5921, 6038), 'click.option', 'click.option', (['"""--graph"""'], {'is_flag': '(True)', 'help': '"""Create a Neo4j graph database from the completed SQLite3 database."""'}), "('--graph', is_flag=True, help=\n 'Create a Neo4j graph database from the completed SQLite3 database.')\n", (5933, 6038), False, 'import click\n'), ((6035, 6177), 'click.option', 'click.option', (['"""--nuke"""'], {'is_flag': '(True)', 'help': '"""Clear the Neo4j project before converting the database. This is only used with --graph."""'}), "('--nuke', is_flag=True, help=\n 'Clear the Neo4j project before converting the database. This is only used with --graph.'\n )\n", (6047, 6177), False, 'import click\n'), ((6169, 6281), 'click.option', 'click.option', (['"""--screenshots"""'], {'is_flag': '(True)', 'help': '"""Attempt to take screenshots of discovered web services."""'}), "('--screenshots', is_flag=True, help=\n 'Attempt to take screenshots of discovered web services.')\n", (6181, 6281), False, 'import click\n'), ((6278, 6588), 'click.option', 'click.option', (['"""--unsafe"""'], {'is_flag': '(True)', 'help': '"""Adding this flag will spawn the headless Chrome browser with the --no-sandbox command line flag. This is NOT recommended for any users who are NOT running ODIN on a Kali Linux VM as root. Chrome will not run as the root user on Kali without this option."""'}), "('--unsafe', is_flag=True, help=\n 'Adding this flag will spawn the headless Chrome browser with the --no-sandbox command line flag. This is NOT recommended for any users who are NOT running ODIN on a Kali Linux VM as root. Chrome will not run as the root user on Kali without this option.'\n )\n", (6290, 6588), False, 'import click\n'), ((14606, 14769), 'click.option', 'click.option', (['"""-o"""', '"""--organization"""'], {'help': '"""The target client, such as "ABC Company," to use for report titles and some keyword searches."""', 'required': '(True)'}), '(\'-o\', \'--organization\', help=\n \'The target client, such as "ABC Company," to use for report titles and some keyword searches.\'\n , required=True)\n', (14618, 14769), False, 'import click\n'), ((14935, 15042), 'click.option', 'click.option', (['"""-r"""', '"""--report"""'], {'default': '"""Verification.csv"""', 'help': '"""Output file (CSV) for the findings."""'}), "('-r', '--report', default='Verification.csv', help=\n 'Output file (CSV) for the findings.')\n", (14947, 15042), False, 'import click\n'), ((7686, 7699), 'click.clear', 'click.clear', ([], {}), '()\n', (7697, 7699), False, 'import click\n'), ((7828, 7918), 'click.secho', 'click.secho', (['"""[+] OSINT Module Selected: ODIN will run all recon modules."""'], {'fg': '"""green"""'}), "('[+] OSINT Module Selected: ODIN will run all recon modules.',\n fg='green')\n", (7839, 7918), False, 'import click\n'), ((15889, 16029), 'click.secho', 'click.secho', (['"""[+] Scope Verification Module Selected: ODIN will attempt to verify who owns the provided IP addresses."""'], {'fg': '"""green"""'}), "(\n '[+] Scope Verification Module Selected: ODIN will attempt to verify who owns the provided IP addresses.'\n , fg='green')\n", (15900, 16029), False, 'import click\n'), ((2075, 2119), 'click.Group.get_command', 'click.Group.get_command', (['self', 'ctx', 'cmd_name'], {}), '(self, ctx, cmd_name)\n', (2098, 2119), False, 'import click\n'), ((7716, 7734), 'lib.asciis.print_art', 'asciis.print_art', ([], {}), '()\n', (7732, 7734), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((8206, 8215), 'multiprocess.Manager', 'Manager', ([], {}), '()\n', (8213, 8215), False, 'from multiprocess import Process, Manager\n'), ((8448, 8485), 'lib.helpers.setup_headless_chrome', 'helpers.setup_headless_chrome', (['unsafe'], {}), '(unsafe)\n', (8477, 8485), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((8503, 8571), 'lib.reporter.Reporter', 'reporter.Reporter', (['organization', 'report_path', 'output_report', 'browser'], {}), '(organization, report_path, output_report, browser)\n', (8520, 8571), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((9073, 9173), 'multiprocess.Process', 'Process', ([], {'name': '"""Company Info Collector"""', 'target': 'report.create_company_info_table', 'args': '(domain,)'}), "(name='Company Info Collector', target=report.\n create_company_info_table, args=(domain,))\n", (9080, 9173), False, 'from multiprocess import Process, Manager\n'), ((9291, 9403), 'multiprocess.Process', 'Process', ([], {'name': '"""<NAME>"""', 'target': 'report.create_people_table', 'args': '(domain_list, rev_domain_list, organization)'}), "(name='<NAME>', target=report.create_people_table, args=(domain_list,\n rev_domain_list, organization))\n", (9298, 9403), False, 'from multiprocess import Process, Manager\n'), ((9527, 9694), 'multiprocess.Process', 'Process', ([], {'name': '"""Domain and IP Hunter"""', 'target': 'report.create_domain_report_table', 'args': '(organization, scope, ip_list, domain_list, rev_domain_list, whoxy_limit)'}), "(name='Domain and IP Hunter', target=report.\n create_domain_report_table, args=(organization, scope, ip_list,\n domain_list, rev_domain_list, whoxy_limit))\n", (9534, 9694), False, 'from multiprocess import Process, Manager\n'), ((9827, 9917), 'multiprocess.Process', 'Process', ([], {'name': '"""<NAME>"""', 'target': 'report.create_shodan_table', 'args': '(ip_list, domain_list)'}), "(name='<NAME>', target=report.create_shodan_table, args=(ip_list,\n domain_list))\n", (9834, 9917), False, 'from multiprocess import Process, Manager\n'), ((10876, 10981), 'multiprocess.Process', 'Process', ([], {'name': '"""<NAME>"""', 'target': 'report.create_cloud_table', 'args': '(organization, domain, aws, aws_fixes)'}), "(name='<NAME>', target=report.create_cloud_table, args=(organization,\n domain, aws, aws_fixes))\n", (10883, 10981), False, 'from multiprocess import Process, Manager\n'), ((11158, 11261), 'click.secho', 'click.secho', (['"""[+] Beginning initial discovery phase! This could take some time..."""'], {'fg': '"""green"""'}), "(\n '[+] Beginning initial discovery phase! This could take some time...',\n fg='green')\n", (11169, 11261), False, 'import click\n'), ((11496, 11604), 'click.secho', 'click.secho', (['"""[+] Initial discovery is complete! Proceeding with additional queries..."""'], {'fg': '"""green"""'}), "(\n '[+] Initial discovery is complete! Proceeding with additional queries...',\n fg='green')\n", (11507, 11604), False, 'import click\n'), ((11849, 11936), 'click.secho', 'click.secho', (['"""[+] Final phase: checking the cloud and web services..."""'], {'fg': '"""green"""'}), "('[+] Final phase: checking the cloud and web services...', fg=\n 'green')\n", (11860, 11936), False, 'import click\n'), ((3955, 4012), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'readable': '(True)', 'resolve_path': '(True)'}), '(exists=True, readable=True, resolve_path=True)\n', (3965, 4012), False, 'import click\n'), ((5546, 5603), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'readable': '(True)', 'resolve_path': '(True)'}), '(exists=True, readable=True, resolve_path=True)\n', (5556, 5603), False, 'import click\n'), ((5737, 5794), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'readable': '(True)', 'resolve_path': '(True)'}), '(exists=True, readable=True, resolve_path=True)\n', (5747, 5794), False, 'import click\n'), ((15777, 15795), 'lib.asciis.print_art', 'asciis.print_art', ([], {}), '()\n', (15793, 15795), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((16173, 16227), 'lib.verification.prepare_scope', 'verification.prepare_scope', (['scope_file', 'expanded_scope'], {}), '(scope_file, expanded_scope)\n', (16199, 16227), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((16235, 16286), 'lib.verification.perform_whois', 'verification.perform_whois', (['expanded_scope', 'results'], {}), '(expanded_scope, results)\n', (16261, 16286), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((16294, 16341), 'lib.verification.print_output', 'verification.print_output', (['results', 'report_path'], {}), '(results, report_path)\n', (16319, 16341), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((14863, 14920), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'readable': '(True)', 'resolve_path': '(True)'}), '(exists=True, readable=True, resolve_path=True)\n', (14873, 14920), False, 'import click\n'), ((10065, 10178), 'multiprocess.Process', 'Process', ([], {'name': '"""Lookalike Domain Reviewer"""', 'target': 'report.create_lookalike_table', 'args': '(organization, domain)'}), "(name='Lookalike Domain Reviewer', target=report.\n create_lookalike_table, args=(organization, domain))\n", (10072, 10178), False, 'from multiprocess import Process, Manager\n'), ((10351, 10455), 'multiprocess.Process', 'Process', ([], {'name': '"""Screenshot Snapper"""', 'target': 'report.capture_web_snapshots', 'args': '(report_path, browser)'}), "(name='Screenshot Snapper', target=report.capture_web_snapshots,\n args=(report_path, browser))\n", (10358, 10455), False, 'from multiprocess import Process, Manager\n'), ((10621, 10723), 'multiprocess.Process', 'Process', ([], {'name': '"""File Hunter"""', 'target': 'report.create_metadata_table', 'args': '(domain, ext, report_path)'}), "(name='File Hunter', target=report.create_metadata_table, args=(\n domain, ext, report_path))\n", (10628, 10723), False, 'from multiprocess import Process, Manager\n'), ((12522, 12552), 'lib.grapher.Grapher', 'grapher.Grapher', (['output_report'], {}), '(output_report)\n', (12537, 12552), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((16380, 16429), 'click.secho', 'click.secho', (['"""[!] Verification failed!"""'], {'fg': '"""red"""'}), "('[!] Verification failed!', fg='red')\n", (16391, 16429), False, 'import click\n'), ((1530, 1598), 'click.secho', 'click.secho', (['"""[!] Could not create the reports directory!"""'], {'fg': '"""red"""'}), "('[!] Could not create the reports directory!', fg='red')\n", (1541, 1598), False, 'import click\n'), ((2376, 2422), 'click.Group.get_command', 'click.Group.get_command', (['self', 'ctx', 'matches[0]'], {}), '(self, ctx, matches[0])\n', (2399, 2422), False, 'import click\n'), ((14003, 14092), 'lib.htmlreporter.HTMLReporter', 'htmlreporter.HTMLReporter', (['organization', "(report_path + '/html_report/')", 'output_report'], {}), "(organization, report_path + '/html_report/',\n output_report)\n", (14028, 14092), False, 'from lib import reporter, asciis, verification, htmlreporter, grapher, helpers\n'), ((12725, 12843), 'click.style', 'click.style', (['"""[!] You set the --nuke option. This wipes out all nodes for a fresh start. Proceed?"""'], {'fg': '"""red"""'}), "(\n '[!] You set the --nuke option. This wipes out all nodes for a fresh start. Proceed?'\n , fg='red')\n", (12736, 12843), False, 'import click\n'), ((13354, 13498), 'click.secho', 'click.secho', (['"""[!] You can convert your database to a graph database later. Run lib/grapher.py with the appropriate options."""'], {'fg': '"""red"""'}), "(\n '[!] You can convert your database to a graph database later. Run lib/grapher.py with the appropriate options.'\n , fg='red')\n", (13365, 13498), False, 'import click\n'), ((14195, 14257), 'click.secho', 'click.secho', (['"""[!] Failed to create the HTML report!"""'], {'fg': '"""red"""'}), "('[!] Failed to create the HTML report!', fg='red')\n", (14206, 14257), False, 'import click\n'), ((12961, 13022), 'click.secho', 'click.secho', (['"""[+] Database successfully wiped!\n"""'], {'fg': '"""green"""'}), "('[+] Database successfully wiped!\\n', fg='green')\n", (12972, 13022), False, 'import click\n'), ((13619, 13770), 'click.secho', 'click.secho', (['"""[!] Failed to convert the database! Check the Neo4j console and your configuration and try running grapher.py again."""'], {'fg': '"""red"""'}), "(\n '[!] Failed to convert the database! Check the Neo4j console and your configuration and try running grapher.py again.'\n , fg='red')\n", (13630, 13770), False, 'import click\n'), ((13093, 13242), 'click.secho', 'click.secho', (['"""[!] Failed to clear the database! Check the Neo4j console and your configuration and try running grapher.py again."""'], {'fg': '"""red"""'}), "(\n '[!] Failed to clear the database! Check the Neo4j console and your configuration and try running grapher.py again.'\n , fg='red')\n", (13104, 13242), False, 'import click\n')] |
import numpy as np
import keras.backend as K
K.set_image_data_format('channels_last')
class DataConfig(object):
"""Input frame configuration and data augmentation setup."""
def __init__(self, crop_resolution=(256, 256), image_channels=(3,),
angles=[0], fixed_angle=0,
scales=[1], fixed_scale=1,
trans_x=[0], fixed_trans_x=0,
trans_y=[0], fixed_trans_y=0,
hflips=[0, 1], fixed_hflip=0,
chpower=0.01*np.array(range(90, 110+1, 2)), fixed_chpower=1,
geoocclusion=None, fixed_geoocclusion=None,
subsampling=[1], fixed_subsampling=1):
self.crop_resolution = crop_resolution
self.image_channels = image_channels
if K.image_data_format() == 'channels_last':
self.input_shape = crop_resolution + image_channels
else:
self.input_shape = image_channels + crop_resolution
self.angles = angles
self.fixed_angle = fixed_angle
self.scales = scales
self.fixed_scale = fixed_scale
self.trans_x = trans_x
self.trans_y = trans_y
self.fixed_trans_x = fixed_trans_x
self.fixed_trans_y = fixed_trans_y
self.hflips = hflips
self.fixed_hflip = fixed_hflip
self.chpower = chpower
self.fixed_chpower = fixed_chpower
self.geoocclusion = geoocclusion
self.fixed_geoocclusion = fixed_geoocclusion
self.subsampling = subsampling
self.fixed_subsampling = fixed_subsampling
def get_fixed_config(self):
return {'angle': self.fixed_angle,
'scale': self.fixed_scale,
'transx': self.fixed_trans_x,
'transy': self.fixed_trans_y,
'hflip': self.fixed_hflip,
'chpower': self.fixed_chpower,
'geoocclusion': self.fixed_geoocclusion,
'subspl': self.fixed_subsampling}
def random_data_generator(self):
angle = DataConfig._getrand(self.angles)
scale = DataConfig._getrand(self.scales)
trans_x = DataConfig._getrand(self.trans_x)
trans_y = DataConfig._getrand(self.trans_y)
hflip = DataConfig._getrand(self.hflips)
chpower = (DataConfig._getrand(self.chpower),
DataConfig._getrand(self.chpower),
DataConfig._getrand(self.chpower))
geoocclusion = self.__get_random_geoocclusion()
subsampling = DataConfig._getrand(self.subsampling)
return {'angle': angle,
'scale': scale,
'transx': trans_x,
'transy': trans_y,
'hflip': hflip,
'chpower': chpower,
'geoocclusion': geoocclusion,
'subspl': subsampling}
def __get_random_geoocclusion(self):
if self.geoocclusion is not None:
w = int(DataConfig._getrand(self.geoocclusion) / 2)
h = int(DataConfig._getrand(self.geoocclusion) / 2)
xmin = w + 1
xmax = self.crop_resolution[0] - xmin
ymin = h + 1
ymax = self.crop_resolution[1] - ymin
x = DataConfig._getrand(range(xmin, xmax, 5))
y = DataConfig._getrand(range(ymin, ymax, 5))
bbox = (x-w, y-h, x+w, y+h)
return bbox
else:
return None
@staticmethod
def _getrand(x):
return x[np.random.randint(0, len(x))]
# Data generation and configuration setup
mpii_sp_dataconf = DataConfig(
crop_resolution=(256, 256),
angles=np.array(range(-40, 40+1, 5)),
scales=np.array([0.7, 1., 1.3]),
)
pennaction_dataconf = DataConfig(
crop_resolution=(256, 256),
angles=np.array(range(-30, 30+1, 5)),
scales=np.array([0.7, 1.0, 1.3, 2.5]),
trans_x=np.array(range(-40, 40+1, 5)),
trans_y=np.array(range(-40, 40+1, 5)),
subsampling=[1, 2]
)
pennaction_ar_dataconf = DataConfig(
crop_resolution=(256, 256),
angles=np.array(range(-30, 30+1, 5)),
scales=np.array([0.7, 1.0, 1.3]),
trans_x=np.array(range(-40, 40+1, 5)),
trans_y=np.array(range(-40, 40+1, 5)),
subsampling=[1, 2, 3],
fixed_subsampling=2
)
human36m_dataconf = DataConfig(
crop_resolution=(256, 256),
angles=np.array(range(-10, 10+1, 5)),
scales=np.array([0.8, 1.0, 1.2]),
geoocclusion=np.array(range(20, 90)),
)
ntu_ar_dataconf = DataConfig(
crop_resolution=(256, 256),
angles=[0],
scales=np.array([0.7, 1.0, 1.3]),
trans_x=range(-40, 40+1, 5),
trans_y=range(-40, 40+1, 5),
subsampling=[1, 2, 3],
fixed_subsampling=2
)
ntu_pe_dataconf = DataConfig(
crop_resolution=(256, 256),
angles=np.array(range(-15, 15+1, 5)),
scales=np.array([0.8, 1.0, 1.2, 2.0]),
trans_x=np.array(range(-40, 40+1, 5)),
trans_y=np.array(range(-10, 10+1, 5)),
subsampling=[1, 2, 4]
)
# Aliases.
mpii_dataconf = mpii_sp_dataconf
| [
"keras.backend.set_image_data_format",
"numpy.array",
"keras.backend.image_data_format"
] | [((46, 86), 'keras.backend.set_image_data_format', 'K.set_image_data_format', (['"""channels_last"""'], {}), "('channels_last')\n", (69, 86), True, 'import keras.backend as K\n'), ((3635, 3660), 'numpy.array', 'np.array', (['[0.7, 1.0, 1.3]'], {}), '([0.7, 1.0, 1.3])\n', (3643, 3660), True, 'import numpy as np\n'), ((3803, 3833), 'numpy.array', 'np.array', (['[0.7, 1.0, 1.3, 2.5]'], {}), '([0.7, 1.0, 1.3, 2.5])\n', (3811, 3833), True, 'import numpy as np\n'), ((4101, 4126), 'numpy.array', 'np.array', (['[0.7, 1.0, 1.3]'], {}), '([0.7, 1.0, 1.3])\n', (4109, 4126), True, 'import numpy as np\n'), ((4421, 4446), 'numpy.array', 'np.array', (['[0.8, 1.0, 1.2]'], {}), '([0.8, 1.0, 1.2])\n', (4429, 4446), True, 'import numpy as np\n'), ((4606, 4631), 'numpy.array', 'np.array', (['[0.7, 1.0, 1.3]'], {}), '([0.7, 1.0, 1.3])\n', (4614, 4631), True, 'import numpy as np\n'), ((4904, 4934), 'numpy.array', 'np.array', (['[0.8, 1.0, 1.2, 2.0]'], {}), '([0.8, 1.0, 1.2, 2.0])\n', (4912, 4934), True, 'import numpy as np\n'), ((740, 761), 'keras.backend.image_data_format', 'K.image_data_format', ([], {}), '()\n', (759, 761), True, 'import keras.backend as K\n')] |
'''Test CIFAR10 with PyTorch.'''
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision
import torchvision.transforms as transforms
import os
import argparse
import numpy as np
import foolbox as fb
import pdb
from nearest_neighbor import nnclass, create_cifar_coreset_tensor
from utils import model_picker
parser = argparse.ArgumentParser(description='PyTorch CIFAR10 Testing')
parser.add_argument('--model_name', default='googlenet', type=str, help='Model name')
parser.add_argument('--cifar_ckpt_dir', default='/vulcanscratch/psando/cifar_model_ckpts/',
help='resume from checkpoint')
parser.add_argument('--cifar_dir', default='./data',
help='location of the cifar-10 dataset')
parser.add_argument('--workers', default=1, type=int, help='number of data loading workers')
parser.add_argument('--batch_size', default=128, type=int, help='input batch size')
parser.add_argument('--runs', default=1, type=int, help='number of runs')
parser.add_argument('--no_progress_bar', action='store_true', help='whether to show progress bar')
parser.add_argument('--no_download_data', action='store_true', help='whether to download data')
# Adversarial attack settings
parser.add_argument('--adversarial', action='store_true', help='Whether or not to perform adversarial attack during testing')
parser.add_argument('--attack_iters', type=int, default=20, help='Number of iterations for the attack')
parser.add_argument('--epsilon', type=float, default=8., help='Epsilon (default=8) for the attack. Script will divide by 255.')
args = parser.parse_args()
device = 'cuda' if torch.cuda.is_available() else 'cpu'
print(f'==> Using device {device}..')
# Data
print('==> Preparing data..')
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
# Dataset mean and std for normalization
dm = torch.tensor([0.4914, 0.4822, 0.4465])[None, :, None, None].cuda()
ds = torch.tensor([0.2023, 0.1994, 0.2010])[None, :, None, None].cuda()
trainset = torchvision.datasets.CIFAR10(
root=args.cifar_dir, train=True, download=(not args.no_download_data), transform=transform_test)
testset = torchvision.datasets.CIFAR10(
root=args.cifar_dir, train=False, download=(not args.no_download_data), transform=transform_test)
testloader = torch.utils.data.DataLoader(
testset, batch_size=args.batch_size, shuffle=False, num_workers=args.workers)
classes = ('plane', 'car', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck')
# Model
print('==> Building model..')
net = model_picker(args.model_name)
net = net.to(device)
if device == 'cuda':
net = torch.nn.DataParallel(net)
cudnn.benchmark = True
if args.cifar_ckpt_dir:
# Load checkpoint.
print('==> Resuming from checkpoint..')
assert os.path.isdir(args.cifar_ckpt_dir), 'Error: no checkpoint directory found!'
checkpoint = torch.load(os.path.join(args.cifar_ckpt_dir, f'{args.model_name}.pt'))
net.load_state_dict(checkpoint['model'])
criterion = nn.CrossEntropyLoss()
def test(epoch, adversarial=False, epsilon=(8./255), attack_iters=10):
global best_acc
net.eval()
test_loss = 0
attack_success = 0
standard_correct = 0
coreset_correct = 0
total = 0
# Create coreset loader
coreset_matrix, coreset_target = create_cifar_coreset_tensor(net, trainset)
nnc = nnclass(coreset_matrix, coreset_target)
if adversarial:
# no preprocessing since data from testloader is already normalized
# no bounds since adversarial examples are in normalized range
fmodel = fb.models.PyTorchModel(net, bounds=(-np.inf, np.inf), preprocessing=None)
attack = fb.attacks.LinfPGD(abs_stepsize=(epsilon / attack_iters * 2.5), steps=attack_iters, random_start=False)
print(f'LinfPGD Attack Parameters: epsilon={epsilon} iters={attack_iters}')
for batch_idx, (inputs, targets) in enumerate(testloader):
inputs, targets = inputs.to(device), targets.to(device)
if adversarial:
_, inputs, success = attack(fmodel, inputs, targets, epsilons=epsilon)
attack_success += success.sum().item()
with torch.no_grad():
outputs = net(inputs)
loss = criterion(outputs, targets)
test_loss += loss.item()
_, predicted = outputs.max(1)
total += targets.size(0)
standard_correct += predicted.eq(targets).sum().item()
# Compute coreset accuracy
embedded = net(inputs, last_layer=True)
outputs_nn = nnc.classify(embedded)
coreset_correct += outputs_nn.eq(targets).sum().item()
if not args.no_progress_bar:
from utils import progress_bar
progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)'
% (test_loss/(batch_idx+1), 100.*standard_correct/total, standard_correct, total))
print('\n')
acc = 100.*standard_correct/total
print(f'[Epoch {epoch}] Test Accuracy: {acc:.2f} %')
coreset_acc = 100.*coreset_correct/total
print(f'[Epoch {epoch}] Coreset Accuracy: {coreset_acc:.2f} %')
if adversarial:
attack_success_rate = 100.*attack_success/total
print(f'[Epoch {epoch}] Attack Success: {attack_success_rate:.2f} %')
return acc
acc = test(0, adversarial=args.adversarial, epsilon=(args.epsilon / 255), attack_iters=args.attack_iters)
| [
"utils.model_picker",
"torch.nn.CrossEntropyLoss",
"argparse.ArgumentParser",
"nearest_neighbor.nnclass",
"torch.nn.DataParallel",
"os.path.join",
"foolbox.models.PyTorchModel",
"foolbox.attacks.LinfPGD",
"torch.tensor",
"torchvision.datasets.CIFAR10",
"torch.cuda.is_available",
"os.path.isdir",
"nearest_neighbor.create_cifar_coreset_tensor",
"torch.utils.data.DataLoader",
"torchvision.transforms.Normalize",
"torch.no_grad",
"torchvision.transforms.ToTensor"
] | [((415, 477), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch CIFAR10 Testing"""'}), "(description='PyTorch CIFAR10 Testing')\n", (438, 477), False, 'import argparse\n'), ((2161, 2288), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': 'args.cifar_dir', 'train': '(True)', 'download': '(not args.no_download_data)', 'transform': 'transform_test'}), '(root=args.cifar_dir, train=True, download=not\n args.no_download_data, transform=transform_test)\n', (2189, 2288), False, 'import torchvision\n'), ((2302, 2430), 'torchvision.datasets.CIFAR10', 'torchvision.datasets.CIFAR10', ([], {'root': 'args.cifar_dir', 'train': '(False)', 'download': '(not args.no_download_data)', 'transform': 'transform_test'}), '(root=args.cifar_dir, train=False, download=not\n args.no_download_data, transform=transform_test)\n', (2330, 2430), False, 'import torchvision\n'), ((2447, 2557), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['testset'], {'batch_size': 'args.batch_size', 'shuffle': '(False)', 'num_workers': 'args.workers'}), '(testset, batch_size=args.batch_size, shuffle=\n False, num_workers=args.workers)\n', (2474, 2557), False, 'import torch\n'), ((2706, 2735), 'utils.model_picker', 'model_picker', (['args.model_name'], {}), '(args.model_name)\n', (2718, 2735), False, 'from utils import model_picker\n'), ((3167, 3188), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (3186, 3188), True, 'import torch.nn as nn\n'), ((1704, 1729), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1727, 1729), False, 'import torch\n'), ((2788, 2814), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['net'], {}), '(net)\n', (2809, 2814), False, 'import torch\n'), ((2945, 2979), 'os.path.isdir', 'os.path.isdir', (['args.cifar_ckpt_dir'], {}), '(args.cifar_ckpt_dir)\n', (2958, 2979), False, 'import os\n'), ((3467, 3509), 'nearest_neighbor.create_cifar_coreset_tensor', 'create_cifar_coreset_tensor', (['net', 'trainset'], {}), '(net, trainset)\n', (3494, 3509), False, 'from nearest_neighbor import nnclass, create_cifar_coreset_tensor\n'), ((3520, 3559), 'nearest_neighbor.nnclass', 'nnclass', (['coreset_matrix', 'coreset_target'], {}), '(coreset_matrix, coreset_target)\n', (3527, 3559), False, 'from nearest_neighbor import nnclass, create_cifar_coreset_tensor\n'), ((1859, 1880), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1878, 1880), True, 'import torchvision.transforms as transforms\n'), ((1886, 1957), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['(0.4914, 0.4822, 0.4465)', '(0.2023, 0.1994, 0.201)'], {}), '((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.201))\n', (1906, 1957), True, 'import torchvision.transforms as transforms\n'), ((3049, 3107), 'os.path.join', 'os.path.join', (['args.cifar_ckpt_dir', 'f"""{args.model_name}.pt"""'], {}), "(args.cifar_ckpt_dir, f'{args.model_name}.pt')\n", (3061, 3107), False, 'import os\n'), ((3745, 3818), 'foolbox.models.PyTorchModel', 'fb.models.PyTorchModel', (['net'], {'bounds': '(-np.inf, np.inf)', 'preprocessing': 'None'}), '(net, bounds=(-np.inf, np.inf), preprocessing=None)\n', (3767, 3818), True, 'import foolbox as fb\n'), ((3837, 3943), 'foolbox.attacks.LinfPGD', 'fb.attacks.LinfPGD', ([], {'abs_stepsize': '(epsilon / attack_iters * 2.5)', 'steps': 'attack_iters', 'random_start': '(False)'}), '(abs_stepsize=epsilon / attack_iters * 2.5, steps=\n attack_iters, random_start=False)\n', (3855, 3943), True, 'import foolbox as fb\n'), ((2010, 2048), 'torch.tensor', 'torch.tensor', (['[0.4914, 0.4822, 0.4465]'], {}), '([0.4914, 0.4822, 0.4465])\n', (2022, 2048), False, 'import torch\n'), ((2082, 2119), 'torch.tensor', 'torch.tensor', (['[0.2023, 0.1994, 0.201]'], {}), '([0.2023, 0.1994, 0.201])\n', (2094, 2119), False, 'import torch\n'), ((4330, 4345), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4343, 4345), False, 'import torch\n')] |
from unittest.mock import create_autospec, call, MagicMock
import pytest
from dynamixel_adapter import DynamixelAdapter
from mayday_robot import LegFactory
from motor_state import MotorState
class TestLeg:
@pytest.fixture()
def mock_leg(self):
self.mock_adapter = create_autospec(DynamixelAdapter)
leg_factory = LegFactory()
leg = leg_factory.create_basic(1, self.mock_adapter, 'left')
return leg
def test_given_position__when_set_joint_positions__then_calls_write_goal_position_on_all_joints(self, mock_leg):
joint_positions = (2, 3, 5)
mock_leg.set_joint_positions(joint_positions)
for joint, position in zip(mock_leg.joints, joint_positions):
assert call.write_goal_position(joint.id, position) in joint.adapter.method_calls
def test_given_mock_joint_positions__when_get_joint_positions__then_returns_those(self, mock_leg):
joint_positions = [3, 5, 7]
motor_states = [MotorState(pos) for pos in joint_positions]
self.mock_adapter.read_state = MagicMock(side_effect=motor_states)
actual = mock_leg.get_joint_positions()
assert actual == joint_positions
| [
"mayday_robot.LegFactory",
"unittest.mock.call.write_goal_position",
"unittest.mock.create_autospec",
"unittest.mock.MagicMock",
"pytest.fixture",
"motor_state.MotorState"
] | [((215, 231), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (229, 231), False, 'import pytest\n'), ((284, 317), 'unittest.mock.create_autospec', 'create_autospec', (['DynamixelAdapter'], {}), '(DynamixelAdapter)\n', (299, 317), False, 'from unittest.mock import create_autospec, call, MagicMock\n'), ((340, 352), 'mayday_robot.LegFactory', 'LegFactory', ([], {}), '()\n', (350, 352), False, 'from mayday_robot import LegFactory\n'), ((1063, 1098), 'unittest.mock.MagicMock', 'MagicMock', ([], {'side_effect': 'motor_states'}), '(side_effect=motor_states)\n', (1072, 1098), False, 'from unittest.mock import create_autospec, call, MagicMock\n'), ((980, 995), 'motor_state.MotorState', 'MotorState', (['pos'], {}), '(pos)\n', (990, 995), False, 'from motor_state import MotorState\n'), ((741, 785), 'unittest.mock.call.write_goal_position', 'call.write_goal_position', (['joint.id', 'position'], {}), '(joint.id, position)\n', (765, 785), False, 'from unittest.mock import create_autospec, call, MagicMock\n')] |
from __future__ import print_function
import argparse
import datetime
from utils.load_data.data_loader_instances import load_dataset
from utils.utils import importing_model
import torch
import math
import os
from utils.utils import save_model, load_model
from utils.optimizer import AdamNormGrad
import time
from utils.training import train_one_epoch
from utils.evaluation import evaluate_loss, final_evaluation
import random
def str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
parser = argparse.ArgumentParser(description='VAE+VampPrior')
parser.add_argument('--batch_size', type=int, default=100, metavar='BStrain',
help='input batch size for training (default: 100)')
parser.add_argument('--test_batch_size', type=int, default=100, metavar='BStest',
help='input batch size for testing (default: 100)')
parser.add_argument('--epochs', type=int, default=2000, metavar='E',
help='number of epochs to train (default: 2000)')
parser.add_argument('--lr', type=float, default=0.0005, metavar='LR',
help='learning rate (default: 0.0005)')
parser.add_argument('--early_stopping_epochs', type=int, default=50, metavar='ES',
help='number of epochs for early stopping')
parser.add_argument('--z1_size', type=int, default=40, metavar='M1',
help='latent size')
parser.add_argument('--z2_size', type=int, default=40, metavar='M2',
help='latent size')
parser.add_argument('--input_size', type=int, default=[1, 28, 28], metavar='D',
help='input size')
parser.add_argument('--number_components', type=int, default=50000, metavar='NC',
help='number of pseudo-inputs')
parser.add_argument('--pseudoinputs_mean', type=float, default=-0.05, metavar='PM',
help='mean for init pseudo-inputs')
parser.add_argument('--pseudoinputs_std', type=float, default=0.01, metavar='PS',
help='std for init pseudo-inputs')
parser.add_argument('--use_training_data_init', action='store_true', default=False,
help='initialize pseudo-inputs with randomly chosen training data')
parser.add_argument('--model_name', type=str, default='vae', metavar='MN',
help='model name: vae, hvae_2level, convhvae_2level')
parser.add_argument('--prior', type=str, default='vampprior', metavar='P',
help='prior: standard, vampprior, exemplar_prior')
parser.add_argument('--input_type', type=str, default='binary', metavar='IT',
help='type of the input: binary, gray, continuous, pca')
parser.add_argument('--S', type=int, default=5000, metavar='SLL',
help='number of samples used for approximating log-likelihood,'
'i.e. number of samples in IWAE')
parser.add_argument('--MB', type=int, default=100, metavar='MBLL',
help='size of a mini-batch used for approximating log-likelihood')
parser.add_argument('--use_whole_train', type=str2bool, default=False,
help='use whole training data points at the test time')
parser.add_argument('--dataset_name', type=str, default='freyfaces', metavar='DN',
help='name of the dataset: static_mnist, dynamic_mnist, omniglot, caltech101silhouettes,'
' histopathologyGray, freyfaces, cifar10')
parser.add_argument('--dynamic_binarization', action='store_true', default=False,
help='allow dynamic binarization')
parser.add_argument('--seed', type=int, default=14, metavar='S',
help='random seed (default: 14)')
parser.add_argument('--no_mask', action='store_true', default=False, help='no leave one out')
parser.add_argument('--parent_dir', type=str, default='')
parser.add_argument('--same_variational_var', type=str2bool, default=False,
help='use same variance for different dimentions')
parser.add_argument('--model_signature', type=str, default='', help='load from this directory and continue training')
parser.add_argument('--warmup', type=int, default=100, metavar='WU',
help='number of epochs for warmu-up')
parser.add_argument('--slurm_task_id', type=str, default='')
parser.add_argument('--slurm_job_id', type=str, default='')
parser.add_argument('--approximate_prior', type=str2bool, default=False)
parser.add_argument('--just_evaluate', type=str2bool, default=False)
parser.add_argument('--no_attention', type=str2bool, default=False)
parser.add_argument('--approximate_k', type=int, default=10)
parser.add_argument('--hidden_size', type=int, default=300)
parser.add_argument('--base_dir', type=str, default='snapshots/')
parser.add_argument('--continuous', type=str2bool, default=False)
parser.add_argument('--use_logit', type=str2bool, default=False)
parser.add_argument('--lambd', type=float, default=1e-4)
parser.add_argument('--bottleneck', type=int, default=6)
parser.add_argument('--training_set_size', type=int, default=50000)
def initial_or_load(checkpoint_path_load, model, optimizer, dir):
if os.path.exists(checkpoint_path_load):
model_loaded_str = "******model is loaded*********"
print(model_loaded_str)
with open(dir + 'whole_log.txt', 'a') as f:
print(model_loaded_str, file=f)
checkpoint = load_model(checkpoint_path_load, model, optimizer)
begin_epoch = checkpoint['epoch']
best_loss = checkpoint['best_loss']
e = checkpoint['e']
else:
torch.manual_seed(args.seed)
if args.device=='cuda':
torch.cuda.manual_seed(args.seed)
random.seed(args.seed)
begin_epoch = 1
best_loss = math.inf
e = 0
return begin_epoch, best_loss, e
def save_loss_files(folder, train_loss_history,
train_re_history, train_kl_history, val_loss_history, val_re_history, val_kl_history):
torch.save(train_loss_history, folder + '.train_loss')
torch.save(train_re_history, folder + '.train_re')
torch.save(train_kl_history, folder + '.train_kl')
torch.save(val_loss_history, folder + '.val_loss')
torch.save(val_re_history, folder + '.val_re')
torch.save(val_kl_history, folder + '.val_kl')
def run_density_estimation(args, train_loader_input, val_loader_input, test_loader_input, model, optimizer, dir, model_name='vae'):
torch.save(args, dir + args.model_name + '.config')
train_loss_history, train_re_history, train_kl_history, val_loss_history, val_re_history, val_kl_history, \
time_history = [], [], [], [], [], [], []
checkpoint_path_save = os.path.join(dir, 'checkpoint_temp.pth')
checkpoint_path_load = os.path.join(dir, 'checkpoint.pth')
best_model_path_load = os.path.join(dir, 'checkpoint_best.pth')
decayed = False
time_history = []
# with torch.autograd.detect_anomaly():
begin_epoch, best_loss, e = initial_or_load(checkpoint_path_load, model, optimizer, dir)
if args.just_evaluate is False:
for epoch in range(begin_epoch, args.epochs + 1):
time_start = time.time()
train_loss_epoch, train_re_epoch, train_kl_epoch \
= train_one_epoch(epoch, args, train_loader_input, model, optimizer)
with torch.no_grad():
val_loss_epoch, val_re_epoch, val_kl_epoch = evaluate_loss(args, model, val_loader_input,
dataset=train_loader_input.dataset)
time_end = time.time()
time_elapsed = time_end - time_start
content = {'epoch': epoch, 'state_dict': model.state_dict(),
'optimizer': optimizer.state_dict(), 'best_loss': best_loss, 'e': e}
if epoch % 10 == 0:
save_model(checkpoint_path_save, checkpoint_path_load, content)
if val_loss_epoch < best_loss:
e = 0
best_loss = val_loss_epoch
print('->model saved<-')
save_model(checkpoint_path_save, best_model_path_load, content)
else:
e += 1
if epoch < args.warmup:
e = 0
if e > args.early_stopping_epochs:
break
if math.isnan(val_loss_epoch):
print("***** val loss is Nan *******")
break
for param_group in optimizer.param_groups:
learning_rate = param_group['lr']
break
time_history.append(time_elapsed)
epoch_report = 'Epoch: {}/{}, Time elapsed: {:.2f}s\n' \
'learning rate: {:.5f}\n' \
'* Train loss: {:.2f} (RE: {:.2f}, KL: {:.2f})\n' \
'o Val. loss: {:.2f} (RE: {:.2f}, KL: {:.2f})\n' \
'--> Early stopping: {}/{} (BEST: {:.2f})\n'.format(epoch, args.epochs, time_elapsed,
learning_rate,
train_loss_epoch, train_re_epoch,
train_kl_epoch, val_loss_epoch,
val_re_epoch, val_kl_epoch, e,
args.early_stopping_epochs, best_loss)
if args.prior == 'exemplar_prior':
print("Prior Variance", model.prior_log_variance.item())
if args.continuous is True:
print("Decoder Variance", model.decoder_logstd.item())
print(epoch_report)
with open(dir + 'whole_log.txt', 'a') as f:
print(epoch_report, file=f)
train_loss_history.append(train_loss_epoch), train_re_history.append(
train_re_epoch), train_kl_history.append(train_kl_epoch)
val_loss_history.append(val_loss_epoch), val_re_history.append(val_re_epoch), val_kl_history.append(
val_kl_epoch)
save_loss_files(dir + args.model_name, train_loss_history,
train_re_history, train_kl_history, val_loss_history, val_re_history, val_kl_history)
with torch.no_grad():
final_evaluation(train_loader_input, test_loader_input, val_loader_input,
best_model_path_load, model, optimizer, args, dir)
def run(args, kwargs):
print('create model')
# importing model
VAE = importing_model(args)
print('load data')
train_loader, val_loader, test_loader, args = load_dataset(args, use_fixed_validation=True, **kwargs)
if args.slurm_job_id != '':
args.model_signature = str(args.seed)
# base_dir = 'checkpoints/final_report/'
elif args.model_signature == '':
args.model_signature = str(datetime.datetime.now())[0:19]
if args.parent_dir == '':
args.parent_dir = args.prior + '_on_' + args.dataset_name+'_model_name='+args.model_name
model_name = args.dataset_name + '_' + args.model_name + '_' + args.prior \
+ '_(components_' + str(args.number_components) + ', lr=' + str(args.lr) + ')'
snapshots_path = os.path.join(args.base_dir, args.parent_dir) + '/'
dir = snapshots_path + args.model_signature + '_' + model_name + '_' + args.parent_dir + '/'
if args.just_evaluate:
config = torch.load(dir + args.model_name + '.config')
config.translation = False
config.hidden_size = 300
model = VAE(config)
else:
model = VAE(args)
if not os.path.exists(dir):
os.makedirs(dir)
model.to(args.device)
optimizer = AdamNormGrad(model.parameters(), lr=args.lr)
print(args)
config_file = dir+'vae_config.txt'
with open(config_file, 'a') as f:
print(args, file=f)
run_density_estimation(args, train_loader, val_loader, test_loader, model, optimizer, dir, model_name = args.model_name)
if __name__ == "__main__":
args = parser.parse_args()
args.device = 'cuda' if torch.cuda.is_available() else 'cpu'
kwargs = {'num_workers': 2, 'pin_memory': True} if args.device=='cuda' else {}
run(args, kwargs)
| [
"utils.utils.load_model",
"torch.cuda.is_available",
"utils.utils.importing_model",
"utils.evaluation.evaluate_loss",
"os.path.exists",
"argparse.ArgumentParser",
"utils.utils.save_model",
"utils.training.train_one_epoch",
"argparse.ArgumentTypeError",
"torch.save",
"time.time",
"utils.evaluation.final_evaluation",
"torch.manual_seed",
"os.makedirs",
"torch.load",
"os.path.join",
"utils.load_data.data_loader_instances.load_dataset",
"random.seed",
"datetime.datetime.now",
"torch.no_grad",
"torch.cuda.manual_seed",
"math.isnan"
] | [((724, 776), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""VAE+VampPrior"""'}), "(description='VAE+VampPrior')\n", (747, 776), False, 'import argparse\n'), ((5347, 5383), 'os.path.exists', 'os.path.exists', (['checkpoint_path_load'], {}), '(checkpoint_path_load)\n', (5361, 5383), False, 'import os\n'), ((6180, 6234), 'torch.save', 'torch.save', (['train_loss_history', "(folder + '.train_loss')"], {}), "(train_loss_history, folder + '.train_loss')\n", (6190, 6234), False, 'import torch\n'), ((6239, 6289), 'torch.save', 'torch.save', (['train_re_history', "(folder + '.train_re')"], {}), "(train_re_history, folder + '.train_re')\n", (6249, 6289), False, 'import torch\n'), ((6294, 6344), 'torch.save', 'torch.save', (['train_kl_history', "(folder + '.train_kl')"], {}), "(train_kl_history, folder + '.train_kl')\n", (6304, 6344), False, 'import torch\n'), ((6349, 6399), 'torch.save', 'torch.save', (['val_loss_history', "(folder + '.val_loss')"], {}), "(val_loss_history, folder + '.val_loss')\n", (6359, 6399), False, 'import torch\n'), ((6404, 6450), 'torch.save', 'torch.save', (['val_re_history', "(folder + '.val_re')"], {}), "(val_re_history, folder + '.val_re')\n", (6414, 6450), False, 'import torch\n'), ((6455, 6501), 'torch.save', 'torch.save', (['val_kl_history', "(folder + '.val_kl')"], {}), "(val_kl_history, folder + '.val_kl')\n", (6465, 6501), False, 'import torch\n'), ((6640, 6691), 'torch.save', 'torch.save', (['args', "(dir + args.model_name + '.config')"], {}), "(args, dir + args.model_name + '.config')\n", (6650, 6691), False, 'import torch\n'), ((6877, 6917), 'os.path.join', 'os.path.join', (['dir', '"""checkpoint_temp.pth"""'], {}), "(dir, 'checkpoint_temp.pth')\n", (6889, 6917), False, 'import os\n'), ((6945, 6980), 'os.path.join', 'os.path.join', (['dir', '"""checkpoint.pth"""'], {}), "(dir, 'checkpoint.pth')\n", (6957, 6980), False, 'import os\n'), ((7008, 7048), 'os.path.join', 'os.path.join', (['dir', '"""checkpoint_best.pth"""'], {}), "(dir, 'checkpoint_best.pth')\n", (7020, 7048), False, 'import os\n'), ((10883, 10904), 'utils.utils.importing_model', 'importing_model', (['args'], {}), '(args)\n', (10898, 10904), False, 'from utils.utils import importing_model\n'), ((10978, 11033), 'utils.load_data.data_loader_instances.load_dataset', 'load_dataset', (['args'], {'use_fixed_validation': '(True)'}), '(args, use_fixed_validation=True, **kwargs)\n', (10990, 11033), False, 'from utils.load_data.data_loader_instances import load_dataset\n'), ((5594, 5644), 'utils.utils.load_model', 'load_model', (['checkpoint_path_load', 'model', 'optimizer'], {}), '(checkpoint_path_load, model, optimizer)\n', (5604, 5644), False, 'from utils.utils import save_model, load_model\n'), ((5777, 5805), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (5794, 5805), False, 'import torch\n'), ((5892, 5914), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (5903, 5914), False, 'import random\n'), ((10625, 10640), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (10638, 10640), False, 'import torch\n'), ((10650, 10778), 'utils.evaluation.final_evaluation', 'final_evaluation', (['train_loader_input', 'test_loader_input', 'val_loader_input', 'best_model_path_load', 'model', 'optimizer', 'args', 'dir'], {}), '(train_loader_input, test_loader_input, val_loader_input,\n best_model_path_load, model, optimizer, args, dir)\n', (10666, 10778), False, 'from utils.evaluation import evaluate_loss, final_evaluation\n'), ((11590, 11634), 'os.path.join', 'os.path.join', (['args.base_dir', 'args.parent_dir'], {}), '(args.base_dir, args.parent_dir)\n', (11602, 11634), False, 'import os\n'), ((11783, 11828), 'torch.load', 'torch.load', (["(dir + args.model_name + '.config')"], {}), "(dir + args.model_name + '.config')\n", (11793, 11828), False, 'import torch\n'), ((11972, 11991), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (11986, 11991), False, 'import os\n'), ((12001, 12017), 'os.makedirs', 'os.makedirs', (['dir'], {}), '(dir)\n', (12012, 12017), False, 'import os\n'), ((12439, 12464), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (12462, 12464), False, 'import torch\n'), ((659, 712), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""Boolean value expected."""'], {}), "('Boolean value expected.')\n", (685, 712), False, 'import argparse\n'), ((5850, 5883), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.seed'], {}), '(args.seed)\n', (5872, 5883), False, 'import torch\n'), ((7347, 7358), 'time.time', 'time.time', ([], {}), '()\n', (7356, 7358), False, 'import time\n'), ((7440, 7506), 'utils.training.train_one_epoch', 'train_one_epoch', (['epoch', 'args', 'train_loader_input', 'model', 'optimizer'], {}), '(epoch, args, train_loader_input, model, optimizer)\n', (7455, 7506), False, 'from utils.training import train_one_epoch\n'), ((7781, 7792), 'time.time', 'time.time', ([], {}), '()\n', (7790, 7792), False, 'import time\n'), ((8548, 8574), 'math.isnan', 'math.isnan', (['val_loss_epoch'], {}), '(val_loss_epoch)\n', (8558, 8574), False, 'import math\n'), ((7524, 7539), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (7537, 7539), False, 'import torch\n'), ((7602, 7687), 'utils.evaluation.evaluate_loss', 'evaluate_loss', (['args', 'model', 'val_loader_input'], {'dataset': 'train_loader_input.dataset'}), '(args, model, val_loader_input, dataset=train_loader_input.dataset\n )\n', (7615, 7687), False, 'from utils.evaluation import evaluate_loss, final_evaluation\n'), ((8055, 8118), 'utils.utils.save_model', 'save_model', (['checkpoint_path_save', 'checkpoint_path_load', 'content'], {}), '(checkpoint_path_save, checkpoint_path_load, content)\n', (8065, 8118), False, 'from utils.utils import save_model, load_model\n'), ((8284, 8347), 'utils.utils.save_model', 'save_model', (['checkpoint_path_save', 'best_model_path_load', 'content'], {}), '(checkpoint_path_save, best_model_path_load, content)\n', (8294, 8347), False, 'from utils.utils import save_model, load_model\n'), ((11233, 11256), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (11254, 11256), False, 'import datetime\n')] |
import matplotlib.pyplot as plt
import autograd.numpy as np
from autograd import grad
a = 0.025
B = 0.25
C = 0.12
b = B/2
c = C/2
h = 0.2
m0 = 4*np.pi/10000000
N = 100
I1 = 20
f = 30
def l(th):
f1 = 1/(np.sqrt(h**2 + b**2 + c**2 + 2*c*h*np.sin(th)))
f2 = 1/(np.sqrt(h**2 + b**2 + c**2 - 2*c*h*np.sin(th)))
f3 = 1/(h**2 + c**2 + 2*c*h*np.sin(th))
f4 = 1/(h**2 + c**2 - 2*c*h*np.sin(th))
f5 = (2*b*np.cos(th)/(b**2 + h**2*(np.cos(th))**2))
return (m0*a**2/4) * (f5*((h*np.sin(th) + c)*f1 - (h*np.sin(th) - c)*f2) +
(2*b*c*np.cos(th)*f1*f3) + (2*b*c*np.cos(th)*f2*f4))
L = np.vectorize(l)
DerL = np.vectorize(grad(l))
def th(t):
return 2*np.pi*f*t
def aux(t):
return N*I1*l(th(t))
e = grad(aux)
E = np.vectorize(e)
theta = np.linspace(0, 4*np.pi, 1000)
# plot L
fig, ax = plt.subplots()
ax.set(xlabel='Θ(rad)', ylabel='L(Θ)(H)', title='Αλληλεπαγωγή L(Θ)')
ax.grid()
ax.plot(theta, L(theta))
plt.show()
# plot DerL
fig, ax = plt.subplots()
ax.set(xlabel='Θ(rad)', ylabel="dL(Θ)/dΘ (H/rad)",
title='Ρυθμός Mεταβολής Αλληλεπαγωγής dL(Θ)/dΘ')
ax.grid()
ax.plot(theta, DerL(theta))
plt.show()
# plot E(f)^2
t = np.linspace(0, 5/f, 1000)
timestep = ((5/f) + 1) / t.size
E_f = np.fft.fft(E(t))
E_f = np.absolute(E_f)
freq = np.fft.fftfreq(t.size, d=timestep)
fig, ax = plt.subplots()
ax.set(xlabel='f(Hz)', ylabel='E(f)^2 ((V*s)^2)',
title='Φασματική Πυκνότητα Ενέργειας E(f)^2')
ax.grid()
ax.plot(freq, np.power(E_f, 2))
plt.show()
# fourier series coefficients with monte carlo integration, using 10000 samples
monte_time = np.linspace(0, 1/f, 10000)
E_t = E(monte_time)
def an(n):
a = E_t * np.cos(2*np.pi*n*f*monte_time)
return (2*a.sum())/a.size
def bn(n):
b = E_t * np.sin(2*np.pi*n*f*monte_time)
return (2*b.sum())/b.size
def approx(x, N):
fourier = np.array([an(i) * np.cos(2*np.pi*i*f*x) + bn(i)
* np.sin(2*np.pi*i*f*x) for i in range(1, N+1)])
return fourier.sum() + (an(0) / 2)
Nh = 10 # pano orio tou athroismatos sthn seira fourier
def fourier(x):
return approx(x, Nh)
fourier = np.vectorize(fourier)
# plot e(t) and fourier series approximation
t = np.linspace(0, 5/f, 10000)
fig, ax = plt.subplots()
ax.set(xlabel='t(s)', ylabel='e(t)(Volt)', title='Ηλεκτρεργετική Δύναμη e(t)')
ax.grid()
ax.plot(t, E(t), t, fourier(t))
plt.show()
pinakas = [[]]
pinakas[0].append('n')
pinakas[0].append('a_n')
pinakas[0].append('b_n')
print('a0 = ' + str(an(0)))
for i in range(1, 100 + 1):
pinakas.append([])
row = pinakas[i]
row.append(str(i))
row.append(an(i))
row.append(bn(i))
pinakas = np.array(pinakas)
print(pinakas)
| [
"autograd.numpy.vectorize",
"autograd.numpy.cos",
"autograd.numpy.sin",
"autograd.grad",
"autograd.numpy.absolute",
"autograd.numpy.array",
"autograd.numpy.linspace",
"autograd.numpy.power",
"autograd.numpy.fft.fftfreq",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((651, 666), 'autograd.numpy.vectorize', 'np.vectorize', (['l'], {}), '(l)\n', (663, 666), True, 'import autograd.numpy as np\n'), ((789, 798), 'autograd.grad', 'grad', (['aux'], {}), '(aux)\n', (793, 798), False, 'from autograd import grad\n'), ((804, 819), 'autograd.numpy.vectorize', 'np.vectorize', (['e'], {}), '(e)\n', (816, 819), True, 'import autograd.numpy as np\n'), ((831, 862), 'autograd.numpy.linspace', 'np.linspace', (['(0)', '(4 * np.pi)', '(1000)'], {}), '(0, 4 * np.pi, 1000)\n', (842, 862), True, 'import autograd.numpy as np\n'), ((884, 898), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (896, 898), True, 'import matplotlib.pyplot as plt\n'), ((1009, 1019), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1017, 1019), True, 'import matplotlib.pyplot as plt\n'), ((1046, 1060), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1058, 1060), True, 'import matplotlib.pyplot as plt\n'), ((1213, 1223), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1221, 1223), True, 'import matplotlib.pyplot as plt\n'), ((1246, 1273), 'autograd.numpy.linspace', 'np.linspace', (['(0)', '(5 / f)', '(1000)'], {}), '(0, 5 / f, 1000)\n', (1257, 1273), True, 'import autograd.numpy as np\n'), ((1336, 1352), 'autograd.numpy.absolute', 'np.absolute', (['E_f'], {}), '(E_f)\n', (1347, 1352), True, 'import autograd.numpy as np\n'), ((1361, 1395), 'autograd.numpy.fft.fftfreq', 'np.fft.fftfreq', (['t.size'], {'d': 'timestep'}), '(t.size, d=timestep)\n', (1375, 1395), True, 'import autograd.numpy as np\n'), ((1407, 1421), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1419, 1421), True, 'import matplotlib.pyplot as plt\n'), ((1574, 1584), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1582, 1584), True, 'import matplotlib.pyplot as plt\n'), ((1682, 1710), 'autograd.numpy.linspace', 'np.linspace', (['(0)', '(1 / f)', '(10000)'], {}), '(0, 1 / f, 10000)\n', (1693, 1710), True, 'import autograd.numpy as np\n'), ((2240, 2261), 'autograd.numpy.vectorize', 'np.vectorize', (['fourier'], {}), '(fourier)\n', (2252, 2261), True, 'import autograd.numpy as np\n'), ((2315, 2343), 'autograd.numpy.linspace', 'np.linspace', (['(0)', '(5 / f)', '(10000)'], {}), '(0, 5 / f, 10000)\n', (2326, 2343), True, 'import autograd.numpy as np\n'), ((2353, 2367), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2365, 2367), True, 'import matplotlib.pyplot as plt\n'), ((2495, 2505), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2503, 2505), True, 'import matplotlib.pyplot as plt\n'), ((2785, 2802), 'autograd.numpy.array', 'np.array', (['pinakas'], {}), '(pinakas)\n', (2793, 2802), True, 'import autograd.numpy as np\n'), ((688, 695), 'autograd.grad', 'grad', (['l'], {}), '(l)\n', (692, 695), False, 'from autograd import grad\n'), ((1555, 1571), 'autograd.numpy.power', 'np.power', (['E_f', '(2)'], {}), '(E_f, 2)\n', (1563, 1571), True, 'import autograd.numpy as np\n'), ((1761, 1799), 'autograd.numpy.cos', 'np.cos', (['(2 * np.pi * n * f * monte_time)'], {}), '(2 * np.pi * n * f * monte_time)\n', (1767, 1799), True, 'import autograd.numpy as np\n'), ((1854, 1892), 'autograd.numpy.sin', 'np.sin', (['(2 * np.pi * n * f * monte_time)'], {}), '(2 * np.pi * n * f * monte_time)\n', (1860, 1892), True, 'import autograd.numpy as np\n'), ((440, 450), 'autograd.numpy.cos', 'np.cos', (['th'], {}), '(th)\n', (446, 450), True, 'import autograd.numpy as np\n'), ((368, 378), 'autograd.numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (374, 378), True, 'import autograd.numpy as np\n'), ((413, 423), 'autograd.numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (419, 423), True, 'import autograd.numpy as np\n'), ((261, 271), 'autograd.numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (267, 271), True, 'import autograd.numpy as np\n'), ((322, 332), 'autograd.numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (328, 332), True, 'import autograd.numpy as np\n'), ((465, 475), 'autograd.numpy.cos', 'np.cos', (['th'], {}), '(th)\n', (471, 475), True, 'import autograd.numpy as np\n'), ((1972, 2001), 'autograd.numpy.cos', 'np.cos', (['(2 * np.pi * i * f * x)'], {}), '(2 * np.pi * i * f * x)\n', (1978, 2001), True, 'import autograd.numpy as np\n'), ((2029, 2058), 'autograd.numpy.sin', 'np.sin', (['(2 * np.pi * i * f * x)'], {}), '(2 * np.pi * i * f * x)\n', (2035, 2058), True, 'import autograd.numpy as np\n'), ((623, 633), 'autograd.numpy.cos', 'np.cos', (['th'], {}), '(th)\n', (629, 633), True, 'import autograd.numpy as np\n'), ((596, 606), 'autograd.numpy.cos', 'np.cos', (['th'], {}), '(th)\n', (602, 606), True, 'import autograd.numpy as np\n'), ((516, 526), 'autograd.numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (522, 526), True, 'import autograd.numpy as np\n'), ((540, 550), 'autograd.numpy.sin', 'np.sin', (['th'], {}), '(th)\n', (546, 550), True, 'import autograd.numpy as np\n')] |
from sqlalchemy import Column, Integer, String
from modules.core import source as core
class User(core.Base):
"""
Sample module user
"""
__tablename__ = "sample_users"
id = Column(Integer, primary_key=True)
string = Column(String)
def __init__(self, id_, string_):
self.id = id_
self.string = string_
| [
"sqlalchemy.Column"
] | [((198, 231), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (204, 231), False, 'from sqlalchemy import Column, Integer, String\n'), ((245, 259), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (251, 259), False, 'from sqlalchemy import Column, Integer, String\n')] |
import datetime
import traceback
from typing import Any, List
import aiohttp
import discord
from discord import AsyncWebhookAdapter, Webhook
from discord.ext import commands, flags
from dotenv import load_dotenv
import config
from ... import errors
from ...classes.bot import Bot
IGNORED_ERRORS = [commands.CommandNotFound, errors.AllCommandsDisabled]
EXPECTED_ERRORS = [
errors.ConversionError,
errors.DoesNotExist,
errors.AlreadyExists,
errors.CommandDisabled,
commands.MissingRequiredArgument,
commands.ChannelNotFound,
commands.RoleNotFound,
commands.NotOwner,
commands.CommandOnCooldown,
discord.Forbidden,
discord.InvalidArgument,
flags.ArgumentParsingError,
]
UPTIME = config.UPTIME_WEBHOOK
ERROR = config.ERROR_WEBHOOK
GUILD = config.GUILD_WEBHOOK
load_dotenv()
async def uptime_log(content: str) -> None:
if not UPTIME:
return
async with aiohttp.ClientSession() as session:
webhook = Webhook.from_url(
UPTIME, adapter=AsyncWebhookAdapter(session)
)
await webhook.send(
content, username="Starboard Uptime"
)
async def error_log(content: str) -> None:
if not ERROR:
return
async with aiohttp.ClientSession() as session:
webhook = Webhook.from_url(
ERROR, adapter=AsyncWebhookAdapter(session)
)
await webhook.send(
content, username="Starboard Errors"
)
async def join_leave_log(embed: discord.Embed) -> None:
if not GUILD:
return
async with aiohttp.ClientSession() as session:
webhook = Webhook.from_url(
GUILD, adapter=AsyncWebhookAdapter(session)
)
await webhook.send(
embed=embed, username="Starboard Guild Log"
)
class BaseEvents(commands.Cog):
def __init__(self, bot: Bot) -> None:
self.bot = bot
self.type_map = {
"error": {"color": self.bot.error_color, "title": "Error"},
"info": {"color": self.bot.theme_color, "title": "Info"},
}
@commands.Cog.listener()
async def on_guild_join(self, guild: discord.Guild) -> None:
embed = discord.Embed(
title=f"Joined **{guild.name}**",
description=f"**{guild.member_count} members**",
color=self.bot.theme_color,
)
embed.timestamp = datetime.datetime.utcnow()
await join_leave_log(embed)
@commands.Cog.listener()
async def on_guild_remove(self, guild: discord.Guild) -> None:
embed = discord.Embed(
title=f"Left **{guild.name}**",
description=f"**{guild.member_count} members**",
color=self.bot.dark_theme_color,
)
embed.timestamp = datetime.datetime.utcnow()
await join_leave_log(embed)
@commands.Cog.listener()
async def on_log_error(
self,
title: str,
error: Exception,
args: List[Any] = [],
kwargs: dict = {},
) -> None:
p = commands.Paginator(prefix="```python")
p.add_line(title)
p.add_line(empty=True)
p.add_line(f"{type(error)}: {error}")
p.add_line(empty=True)
p.add_line(f"Args: {args}")
p.add_line(f"Kwargs: {kwargs}")
p.add_line(empty=True)
tb = traceback.format_tb(error.__traceback__)
for line in tb:
p.add_line(line=line)
for page in p.pages:
await error_log(page)
@commands.Cog.listener()
async def on_shard_ready(self, shard_id: int) -> None:
self.bot.log.info(
f"[Cluster#{self.bot.cluster_name}] Shard {shard_id} ready"
)
@commands.Cog.listener()
async def on_ready(self) -> None:
self.bot.log.info(f"[Cluster#{self.bot.cluster_name}] Ready")
await uptime_log(
f":green_circle: Cluster **{self.bot.cluster_name}** ready!"
)
try:
self.bot.pipe.send(1)
except BrokenPipeError:
pass
@commands.Cog.listener()
async def on_message(self, message: discord.Message) -> None:
if message.author.bot:
return
if message.content.replace("!", "") == self.bot.user.mention:
await message.channel.send("My prefix is `sb!`")
else:
await self.bot.process_commands(message)
@commands.Cog.listener()
async def on_command_error(
self, ctx: commands.Context, e: Exception
) -> None:
try:
e = e.original
except AttributeError:
pass
if type(e) in IGNORED_ERRORS:
return
elif type(e) in EXPECTED_ERRORS:
await ctx.send(e)
elif type(e) == discord.errors.Forbidden:
try:
await ctx.message.author.send(
"I can't send messages in "
f"{ctx.message.channel.mention}, "
"or I'm missing the `Embed Links` "
"permission there."
)
except discord.Forbidden:
pass
else:
embed = discord.Embed(
title="Something's Not Right",
description=(
"Something went wrong while "
"running this command. If the "
"problem persists, please report "
"this in the support server."
),
color=self.bot.error_color,
)
tb = "".join(traceback.format_tb(e.__traceback__))
full_tb = f"{e}\b" f"```{tb}```"
if len(full_tb) > 1024:
to_remove = (len(full_tb) - 1024) + 10
full_tb = f"{e}\n```...{tb[to_remove:]}```"
embed.add_field(name=e.__class__.__name__, value=full_tb)
await ctx.send(embed=embed)
self.bot.dispatch(
"log_error", "Command Error", e, ctx.args, ctx.kwargs
)
@commands.Cog.listener()
async def on_guild_log(
self, message: str, log_type: str, guild: discord.Guild
) -> None:
sql_guild = await self.bot.db.get_guild(guild.id)
if sql_guild["log_channel"] is None:
return
log_channel = guild.get_channel(int(sql_guild["log_channel"]))
if not log_channel:
return
embed = discord.Embed(
title=self.type_map[log_type]["title"],
description=message,
color=self.type_map[log_type]["color"],
)
embed.timestamp = datetime.datetime.utcnow()
await log_channel.send(embed=embed)
def setup(bot: Bot) -> None:
bot.add_cog(BaseEvents(bot))
@bot.before_invoke
async def create_data(message: discord.Message) -> None:
await bot.db.create_guild(message.guild.id)
await bot.db.create_user(message.author.id, message.author.bot)
await bot.db.create_member(message.author.id, message.guild.id)
| [
"discord.ext.commands.Cog.listener",
"aiohttp.ClientSession",
"traceback.format_tb",
"datetime.datetime.utcnow",
"dotenv.load_dotenv",
"discord.ext.commands.Paginator",
"discord.AsyncWebhookAdapter",
"discord.Embed"
] | [((809, 822), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (820, 822), False, 'from dotenv import load_dotenv\n'), ((2084, 2107), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (2105, 2107), False, 'from discord.ext import commands, flags\n'), ((2456, 2479), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (2477, 2479), False, 'from discord.ext import commands, flags\n'), ((2833, 2856), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (2854, 2856), False, 'from discord.ext import commands, flags\n'), ((3493, 3516), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (3514, 3516), False, 'from discord.ext import commands, flags\n'), ((3691, 3714), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (3712, 3714), False, 'from discord.ext import commands, flags\n'), ((4034, 4057), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (4055, 4057), False, 'from discord.ext import commands, flags\n'), ((4378, 4401), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (4399, 4401), False, 'from discord.ext import commands, flags\n'), ((6006, 6029), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (6027, 6029), False, 'from discord.ext import commands, flags\n'), ((918, 941), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (939, 941), False, 'import aiohttp\n'), ((1237, 1260), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (1258, 1260), False, 'import aiohttp\n'), ((1568, 1591), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {}), '()\n', (1589, 1591), False, 'import aiohttp\n'), ((2189, 2318), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Joined **{guild.name}**"""', 'description': 'f"""**{guild.member_count} members**"""', 'color': 'self.bot.theme_color'}), "(title=f'Joined **{guild.name}**', description=\n f'**{guild.member_count} members**', color=self.bot.theme_color)\n", (2202, 2318), False, 'import discord\n'), ((2387, 2413), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2411, 2413), False, 'import datetime\n'), ((2563, 2695), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Left **{guild.name}**"""', 'description': 'f"""**{guild.member_count} members**"""', 'color': 'self.bot.dark_theme_color'}), "(title=f'Left **{guild.name}**', description=\n f'**{guild.member_count} members**', color=self.bot.dark_theme_color)\n", (2576, 2695), False, 'import discord\n'), ((2764, 2790), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (2788, 2790), False, 'import datetime\n'), ((3029, 3067), 'discord.ext.commands.Paginator', 'commands.Paginator', ([], {'prefix': '"""```python"""'}), "(prefix='```python')\n", (3047, 3067), False, 'from discord.ext import commands, flags\n'), ((3324, 3364), 'traceback.format_tb', 'traceback.format_tb', (['error.__traceback__'], {}), '(error.__traceback__)\n', (3343, 3364), False, 'import traceback\n'), ((6394, 6512), 'discord.Embed', 'discord.Embed', ([], {'title': "self.type_map[log_type]['title']", 'description': 'message', 'color': "self.type_map[log_type]['color']"}), "(title=self.type_map[log_type]['title'], description=message,\n color=self.type_map[log_type]['color'])\n", (6407, 6512), False, 'import discord\n'), ((6582, 6608), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6606, 6608), False, 'import datetime\n'), ((1018, 1046), 'discord.AsyncWebhookAdapter', 'AsyncWebhookAdapter', (['session'], {}), '(session)\n', (1037, 1046), False, 'from discord import AsyncWebhookAdapter, Webhook\n'), ((1336, 1364), 'discord.AsyncWebhookAdapter', 'AsyncWebhookAdapter', (['session'], {}), '(session)\n', (1355, 1364), False, 'from discord import AsyncWebhookAdapter, Webhook\n'), ((1667, 1695), 'discord.AsyncWebhookAdapter', 'AsyncWebhookAdapter', (['session'], {}), '(session)\n', (1686, 1695), False, 'from discord import AsyncWebhookAdapter, Webhook\n'), ((5139, 5352), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Something\'s Not Right"""', 'description': '"""Something went wrong while running this command. If the problem persists, please report this in the support server."""', 'color': 'self.bot.error_color'}), '(title="Something\'s Not Right", description=\n \'Something went wrong while running this command. If the problem persists, please report this in the support server.\'\n , color=self.bot.error_color)\n', (5152, 5352), False, 'import discord\n'), ((5540, 5576), 'traceback.format_tb', 'traceback.format_tb', (['e.__traceback__'], {}), '(e.__traceback__)\n', (5559, 5576), False, 'import traceback\n')] |
# Generated by Django 3.0.4 on 2020-03-24 23:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('opsserver', '0004_auto_20200324_2313'),
]
operations = [
migrations.AddField(
model_name='requestdata',
name='country',
field=models.CharField(blank=True, max_length=256, null=True),
),
]
| [
"django.db.models.CharField"
] | [((342, 397), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(256)', 'null': '(True)'}), '(blank=True, max_length=256, null=True)\n', (358, 397), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
import pytest
from amrex import base as amrex
@pytest.fixture(autouse=True, scope='session')
def amrex_init():
amrex.AMReX.initialize(args=["amrex.verbose=-1"])
yield
amrex.AMReX.finalize()
@pytest.fixture(scope='module')
def boxarr():
"""BoxArray for MultiFab creation"""
bx = amrex.Box.new((0, 0, 0), (63, 63, 63))
ba = amrex.BoxArray.new(bx)
ba.max_size(32)
return ba
@pytest.fixture(scope='module')
def distmap(boxarr):
"""DistributionMapping for MultiFab creation"""
dm = amrex.DistributionMapping.new(boxarr)
return dm
| [
"amrex.base.BoxArray.new",
"amrex.base.AMReX.finalize",
"amrex.base.DistributionMapping.new",
"amrex.base.Box.new",
"pytest.fixture",
"amrex.base.AMReX.initialize"
] | [((73, 118), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)', 'scope': '"""session"""'}), "(autouse=True, scope='session')\n", (87, 118), False, 'import pytest\n'), ((230, 260), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (244, 260), False, 'import pytest\n'), ((432, 462), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (446, 462), False, 'import pytest\n'), ((141, 190), 'amrex.base.AMReX.initialize', 'amrex.AMReX.initialize', ([], {'args': "['amrex.verbose=-1']"}), "(args=['amrex.verbose=-1'])\n", (163, 190), True, 'from amrex import base as amrex\n'), ((205, 227), 'amrex.base.AMReX.finalize', 'amrex.AMReX.finalize', ([], {}), '()\n', (225, 227), True, 'from amrex import base as amrex\n'), ((325, 363), 'amrex.base.Box.new', 'amrex.Box.new', (['(0, 0, 0)', '(63, 63, 63)'], {}), '((0, 0, 0), (63, 63, 63))\n', (338, 363), True, 'from amrex import base as amrex\n'), ((373, 395), 'amrex.base.BoxArray.new', 'amrex.BoxArray.new', (['bx'], {}), '(bx)\n', (391, 395), True, 'from amrex import base as amrex\n'), ((545, 582), 'amrex.base.DistributionMapping.new', 'amrex.DistributionMapping.new', (['boxarr'], {}), '(boxarr)\n', (574, 582), True, 'from amrex import base as amrex\n')] |
import torch
import numpy as np
import torch.nn.functional as F
'''
'''
dic2 = np.load('ClipGradByGlobalNorm.npz')
x = dic2['x']
w = dic2['w']
b = dic2['b']
loss_2 = dic2['loss']
w_2 = dic2['w_2']
b_2 = dic2['b_2']
x = torch.from_numpy(x)
linear = torch.nn.Linear(in_features=10, out_features=2)
w = w.transpose(1, 0)
linear.weight.data = torch.from_numpy(w)
linear.bias.data = torch.from_numpy(b)
# 建立优化器
scaler = torch.cuda.amp.GradScaler(enabled=False) # 不使用混合精度训练
param_groups = []
base_lr = 0.1
param_group_conv = {'params': [linear.weight]}
param_group_conv['lr'] = base_lr
param_group_conv['need_clip'] = True
param_group_conv['clip_norm'] = 1.0
# param_group_conv['weight_decay'] = base_wd
param_groups.append(param_group_conv)
if linear.bias is not None:
if linear.bias.requires_grad:
param_group_conv_bias = {'params': [linear.bias]}
param_group_conv_bias['lr'] = base_lr
param_group_conv_bias['need_clip'] = False
# param_group_conv_bias['weight_decay'] = 0.0
param_groups.append(param_group_conv_bias)
optimizer = torch.optim.SGD(
param_groups, lr=base_lr
)
# 计算损失
out = linear(x)
loss = 1000.0 * torch.mean(out) # 乘以1000.0放大损失,使得梯度的模很大
optimizer.zero_grad()
scaler.scale(loss).backward()
# 梯度裁剪
for param_group in optimizer.param_groups:
if param_group['need_clip']:
torch.nn.utils.clip_grad_norm_(param_group['params'], max_norm=param_group['clip_norm'], norm_type=2)
scaler.step(optimizer)
scaler.update()
www2 = linear.weight.data.cpu().detach().numpy()
www2 = www2.transpose(1, 0)
bbb2 = linear.bias.data.cpu().detach().numpy()
ddd = np.sum((w_2 - www2)**2)
print('ddd=%.6f' % ddd)
ddd = np.sum((b_2 - bbb2)**2)
print('ddd=%.6f' % ddd)
print()
| [
"torch.optim.SGD",
"torch.cuda.amp.GradScaler",
"torch.mean",
"torch.nn.utils.clip_grad_norm_",
"torch.from_numpy",
"numpy.sum",
"torch.nn.Linear",
"numpy.load"
] | [((84, 119), 'numpy.load', 'np.load', (['"""ClipGradByGlobalNorm.npz"""'], {}), "('ClipGradByGlobalNorm.npz')\n", (91, 119), True, 'import numpy as np\n'), ((229, 248), 'torch.from_numpy', 'torch.from_numpy', (['x'], {}), '(x)\n', (245, 248), False, 'import torch\n'), ((260, 307), 'torch.nn.Linear', 'torch.nn.Linear', ([], {'in_features': '(10)', 'out_features': '(2)'}), '(in_features=10, out_features=2)\n', (275, 307), False, 'import torch\n'), ((351, 370), 'torch.from_numpy', 'torch.from_numpy', (['w'], {}), '(w)\n', (367, 370), False, 'import torch\n'), ((390, 409), 'torch.from_numpy', 'torch.from_numpy', (['b'], {}), '(b)\n', (406, 409), False, 'import torch\n'), ((428, 468), 'torch.cuda.amp.GradScaler', 'torch.cuda.amp.GradScaler', ([], {'enabled': '(False)'}), '(enabled=False)\n', (453, 468), False, 'import torch\n'), ((1085, 1126), 'torch.optim.SGD', 'torch.optim.SGD', (['param_groups'], {'lr': 'base_lr'}), '(param_groups, lr=base_lr)\n', (1100, 1126), False, 'import torch\n'), ((1635, 1660), 'numpy.sum', 'np.sum', (['((w_2 - www2) ** 2)'], {}), '((w_2 - www2) ** 2)\n', (1641, 1660), True, 'import numpy as np\n'), ((1689, 1714), 'numpy.sum', 'np.sum', (['((b_2 - bbb2) ** 2)'], {}), '((b_2 - bbb2) ** 2)\n', (1695, 1714), True, 'import numpy as np\n'), ((1174, 1189), 'torch.mean', 'torch.mean', (['out'], {}), '(out)\n', (1184, 1189), False, 'import torch\n'), ((1361, 1467), 'torch.nn.utils.clip_grad_norm_', 'torch.nn.utils.clip_grad_norm_', (["param_group['params']"], {'max_norm': "param_group['clip_norm']", 'norm_type': '(2)'}), "(param_group['params'], max_norm=param_group[\n 'clip_norm'], norm_type=2)\n", (1391, 1467), False, 'import torch\n')] |
# Functions that would be used to create the topography of the neuraal net
# The neurons and weights are taken as matrices
# Neurons are 1D arrays or lists of the dimension 1 X col
import numpy as np
import pandas as pa
def collectData (sheet_name):
""" Returns an array (numpy) of the input data from the excel sheet """
Input = pa.read_excel(sheet_name) # I is the input data matrix, data has to be extracted one by one from the columns
Input = np.array(Input)
return Input
def getError(Input, row, Output):
""" Returns the error value of the network """
col = int(np.shape(Input)[1]) - 1
error = 0.5* ( (Input[row][col] - Output)**2 ) # MSE
return error
def inputNeurons (Input, row):
""" Returns an input matrix based on the data matrix with data set present in column 'column' """
n_row = int(np.shape(Input)[0])
n_col = int(np.shape(Input)[1])
I = [0]*(n_col - 1)
for c in range(n_col-1):
I[c] = Input[row][c]
return I
def transposeMat (M):
""" Returns the transpose of matrix M, used for neurons in the next layer """
# Not used in the current program
M = np.array(M)
row = int(np.shape(M)[0])
col = int(np.shape(M)[1])
M_trans = np.zeros((col, row))
for r in range(row):
for c in range(col):
M_trans[c][r] = M[r][c].copy()
M_trans = M_trans.tolist()
return M_trans
def getNextLayer (n_out_mat, w_mat):
""" Gets the next layer from output matrix of neurons and weights """
# The layer is in the form of 1 X col array/matrix
N = np.array(n_out_mat)
W = np.array(w_mat)
if (W.ndim == 1):
col_wt = int(np.shape(W)[0])
else:
col_wt = int(np.shape(W)[1])
col_n = int(np.shape(N)[0])
M_mult = np.zeros((1, col_wt)) # Only designed for neurons i.e. matrices of size 1 X col
for c_w in range(col_wt):
for c_n in range(col_n):
if (W.ndim == 1):
M_mult[0][c_w] = M_mult[0][c_w] + ( N[c_n] * W[c_w] )
else:
M_mult[0][c_w] = M_mult[0][c_w] + ( N[c_n] * W[c_n][c_w] ) # r X c Type matrix multiplication
M_mult = M_mult.tolist()
return M_mult[0] | [
"numpy.array",
"numpy.zeros",
"numpy.shape",
"pandas.read_excel"
] | [((342, 367), 'pandas.read_excel', 'pa.read_excel', (['sheet_name'], {}), '(sheet_name)\n', (355, 367), True, 'import pandas as pa\n'), ((463, 478), 'numpy.array', 'np.array', (['Input'], {}), '(Input)\n', (471, 478), True, 'import numpy as np\n'), ((1156, 1167), 'numpy.array', 'np.array', (['M'], {}), '(M)\n', (1164, 1167), True, 'import numpy as np\n'), ((1243, 1263), 'numpy.zeros', 'np.zeros', (['(col, row)'], {}), '((col, row))\n', (1251, 1263), True, 'import numpy as np\n'), ((1591, 1610), 'numpy.array', 'np.array', (['n_out_mat'], {}), '(n_out_mat)\n', (1599, 1610), True, 'import numpy as np\n'), ((1619, 1634), 'numpy.array', 'np.array', (['w_mat'], {}), '(w_mat)\n', (1627, 1634), True, 'import numpy as np\n'), ((1788, 1809), 'numpy.zeros', 'np.zeros', (['(1, col_wt)'], {}), '((1, col_wt))\n', (1796, 1809), True, 'import numpy as np\n'), ((849, 864), 'numpy.shape', 'np.shape', (['Input'], {}), '(Input)\n', (857, 864), True, 'import numpy as np\n'), ((885, 900), 'numpy.shape', 'np.shape', (['Input'], {}), '(Input)\n', (893, 900), True, 'import numpy as np\n'), ((1182, 1193), 'numpy.shape', 'np.shape', (['M'], {}), '(M)\n', (1190, 1193), True, 'import numpy as np\n'), ((1212, 1223), 'numpy.shape', 'np.shape', (['M'], {}), '(M)\n', (1220, 1223), True, 'import numpy as np\n'), ((1758, 1769), 'numpy.shape', 'np.shape', (['N'], {}), '(N)\n', (1766, 1769), True, 'import numpy as np\n'), ((599, 614), 'numpy.shape', 'np.shape', (['Input'], {}), '(Input)\n', (607, 614), True, 'import numpy as np\n'), ((1679, 1690), 'numpy.shape', 'np.shape', (['W'], {}), '(W)\n', (1687, 1690), True, 'import numpy as np\n'), ((1726, 1737), 'numpy.shape', 'np.shape', (['W'], {}), '(W)\n', (1734, 1737), True, 'import numpy as np\n')] |
from collections import deque
queue = deque(["Eric", "John", "Michael"])
print(queue)
queue.append("Terry")
print(queue)
queue.popleft()
print(queue) | [
"collections.deque"
] | [((40, 74), 'collections.deque', 'deque', (["['Eric', 'John', 'Michael']"], {}), "(['Eric', 'John', 'Michael'])\n", (45, 74), False, 'from collections import deque\n')] |
import gym
import cv2
import numpy as np
from abc import abstractmethod
from collections import deque
from copy import copy
import imutils
import gym_super_mario_bros
from nes_py.wrappers import JoypadSpace
from gym_super_mario_bros.actions import SIMPLE_MOVEMENT, COMPLEX_MOVEMENT
from torch.multiprocessing import Pipe, Process
from model import *
from config import *
from PIL import Image
import cv2
import os
train_method = default_config['TrainMethod']
max_step_per_episode = int(default_config['MaxStepPerEpisode'])
import glob
class Environment(Process):
@abstractmethod
def run(self):
pass
@abstractmethod
def reset(self):
pass
@abstractmethod
def pre_proc(self, x):
pass
@abstractmethod
def get_init_state(self, x):
pass
def unwrap(env):
if hasattr(env, "unwrapped"):
return env.unwrapped
elif hasattr(env, "env"):
return unwrap(env.env)
elif hasattr(env, "leg_env"):
return unwrap(env.leg_env)
else:
return env
class MaxAndSkipEnv(gym.Wrapper):
def __init__(self, env, is_render, skip=4):
"""Return only every `skip`-th frame"""
gym.Wrapper.__init__(self, env)
# most recent raw observations (for max pooling across time steps)
self._obs_buffer = np.zeros((2,) + env.observation_space.shape, dtype=np.uint8)
self._skip = skip
self.is_render = is_render
def step(self, action):
"""Repeat action, sum reward, and max over last observations."""
total_reward = 0.0
done = None
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
if self.is_render:
self.env.render()
if i == self._skip - 2:
self._obs_buffer[0] = obs
if i == self._skip - 1:
self._obs_buffer[1] = obs
total_reward += reward
if done:
break
# Note that the observation on the done=True frame
# doesn't matter
max_frame = self._obs_buffer.max(axis=0)
return max_frame, total_reward, done, info
def reset(self, **kwargs):
return self.env.reset(**kwargs)
class MontezumaInfoWrapper(gym.Wrapper):
def __init__(self, env, room_address):
super(MontezumaInfoWrapper, self).__init__(env)
self.room_address = room_address
self.visited_rooms = set()
def get_current_room(self):
ram = unwrap(self.env).ale.getRAM()
assert len(ram) == 128
return int(ram[self.room_address])
def step(self, action):
obs, rew, done, info = self.env.step(action)
self.visited_rooms.add(self.get_current_room())
if 'episode' not in info:
info['episode'] = {}
info['episode'].update(visited_rooms=copy(self.visited_rooms))
if done:
self.visited_rooms.clear()
return obs, rew, done, info
def reset(self):
return self.env.reset()
class AtariEnvironment(Environment):
def __init__(
self,
env_id,
is_render,
env_idx,
child_conn,
history_size=4,
h=84,
w=84,
life_done=True,
sticky_action=True,
p=0.25):
super(AtariEnvironment, self).__init__()
self.daemon = True
self.env = MaxAndSkipEnv(gym.make(env_id), is_render)
if 'Montezuma' in env_id:
self.env = MontezumaInfoWrapper(self.env, room_address=3 if 'Montezuma' in env_id else 1)
self.env_id = env_id
self.is_render = is_render
self.env_idx = env_idx
self.steps = 0
self.episode = 0
self.rall = 0
self.recent_rlist = deque(maxlen=100)
self.child_conn = child_conn
self.sticky_action = sticky_action
self.last_action = 0
self.p = p
self.history_size = history_size
self.history = np.zeros([history_size, h, w])
self.h = h
self.w = w
self.reset()
def run(self):
super(AtariEnvironment, self).run()
while True:
action = self.child_conn.recv()
if 'Breakout' in self.env_id:
action += 1
# sticky action
if self.sticky_action:
if np.random.rand() <= self.p:
action = self.last_action
self.last_action = action
s, reward, done, info = self.env.step(action)
if max_step_per_episode < self.steps:
done = True
log_reward = reward
force_done = done
self.history[:3, :, :] = self.history[1:, :, :]
self.history[3, :, :] = self.pre_proc(s)
self.rall += reward
self.steps += 1
if done:
self.recent_rlist.append(self.rall)
print("[Episode {}({})] Step: {} Reward: {} Recent Reward: {} Visited Room: [{}]".format(
self.episode, self.env_idx, self.steps, self.rall, np.mean(self.recent_rlist),
info.get('episode', {}).get('visited_rooms', {})))
self.history = self.reset()
self.child_conn.send(
[self.history[:, :, :], reward, force_done, done, log_reward])
def reset(self):
self.last_action = 0
self.steps = 0
self.episode += 1
self.rall = 0
s = self.env.reset()
self.get_init_state(
self.pre_proc(s))
return self.history[:, :, :]
def pre_proc(self, X):
X = np.array(Image.fromarray(X).convert('L')).astype('float32')
x = cv2.resize(X, (self.h, self.w))
return x
def get_init_state(self, s):
for i in range(self.history_size):
self.history[i, :, :] = self.pre_proc(s)
class MarioEnvironment(Process):
def __init__(
self,
env_id,
is_render,
env_idx,
child_conn,
history_size=4,
life_done=False,
h=84,
w=84, movement=COMPLEX_MOVEMENT, sticky_action=True,
p=0.25):
super(MarioEnvironment, self).__init__()
self.daemon = True
self.env = JoypadSpace(
gym_super_mario_bros.make(env_id), COMPLEX_MOVEMENT)
self.is_render = is_render
self.env_idx = env_idx
self.steps = 0
self.episode = 0
self.rall = 0
self.recent_rlist = deque(maxlen=100)
self.child_conn = child_conn
self.life_done = life_done
self.sticky_action = sticky_action
self.last_action = 0
self.p = p
self.history_size = history_size
self.history = np.zeros([history_size, h, w])
self.h = h
self.w = w
self.reset()
def run(self):
super(MarioEnvironment, self).run()
self.env = gym_super_mario_bros.make('SuperMarioBros-1-1-v0')
self.env = JoypadSpace(self.env, COMPLEX_MOVEMENT)
if not os.path.exists("data"):
os.makedirs("data")
done = True
min_area = 20
max_area = 100
subtractor = cv2.createBackgroundSubtractorKNN(history=5, detectShadows=True)
while True:
if done :
self.reset()
action = self.child_conn.recv()
if self.is_render:
self.env.render()
# sticky action
if self.sticky_action:
if np.random.rand() <= self.p:
action = self.last_action
self.last_action = action
# 4 frame skip
reward = 0.0
done = None
# list_img = os.listdir('data')
# total_img = len(list_img)
# img_last = total_img+1
# #print(int(list_img[-1][:-4]),img_last)
# img_last = str(img_last)+".jpg"
# path = "data/"+img_last
# print(path)
for i in range(4):
obs, r, done, info = self.env.step(action)
temp_frame = cv2.cvtColor(obs,cv2.COLOR_BGR2RGB)
temp_obs = np.array(cv2.cvtColor(obs,cv2.COLOR_BGR2RGB))
temp_frame = cv2.cvtColor(temp_frame, cv2.COLOR_BGR2GRAY)
temp_frame = cv2.GaussianBlur(temp_frame, (5, 5), 0)
subtr_frame = subtractor.apply(temp_frame)
#edges = cv2.Canny(temp_frame, 50, 100)
mask_sub = cv2.bitwise_and(temp_frame, subtr_frame)
_, thr = cv2.threshold(mask_sub, 100, 255, 0,cv2.THRESH_BINARY)
cnts = cv2.findContours(thr, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
for c in cnts:
if cv2.contourArea(c) < min_area or cv2.contourArea(c)>max_area :
continue
(x, y, w, h) = cv2.boundingRect(c)
#temp_obs = cv2.rectangle(temp_obs, (x-50, y-50), (x + w+50, y +50+ h), (0, 255, 0), 3)
temp_obs = temp_obs[y-5:y+h+5,x-5:x+w+5]
list_img = os.listdir('data')
total_img = len(list_img)
img_last = total_img + 1
img_last = str(img_last)+".jpg"
path = "data/"+img_last
try:
cv2.imwrite(path,cv2.resize(temp_obs, (40, 40)))
except:
pass
# cv2.imshow("frame",temp_frame)
# cv2.imshow("edge",edges)
#cv2.imwrite(path,obs)
if self.is_render:
self.env.render()
reward += r
if done:
break
# when Mario loses life, changes the state to the terminal
# state.
if self.life_done:
if self.lives > info['life'] and info['life'] > 0:
force_done = True
self.lives = info['life']
else:
force_done = done
self.lives = info['life']
else:
force_done = done
# reward range -15 ~ 15
log_reward = reward / 15
self.rall += log_reward
r = int(info.get('flag_get', False))
self.history[:3, :, :] = self.history[1:, :, :]
self.history[3, :, :] = self.pre_proc(obs)
self.steps += 1
if done:
self.recent_rlist.append(self.rall)
print(
"[Episode {}({})] Step: {} Reward: {} Recent Reward: {} Stage: {} current x:{} max x:{}".format(
self.episode,
self.env_idx,
self.steps,
self.rall,
np.mean(
self.recent_rlist),
info['stage'],
info['x_pos'],
self.max_pos))
self.history = self.reset()
self.child_conn.send([self.history[:, :, :], r, force_done, done, log_reward])
def reset(self):
self.last_action = 0
self.steps = 0
self.episode += 1
self.rall = 0
self.lives = 3
self.stage = 1
self.max_pos = 0
self.get_init_state(self.env.reset())
return self.history[:, :, :]
def pre_proc(self, X):
# grayscaling
x = cv2.cvtColor(X, cv2.COLOR_RGB2GRAY)
# resize
x = cv2.resize(x, (self.h, self.w))
return x
def get_init_state(self, s):
for i in range(self.history_size):
self.history[i, :, :] = self.pre_proc(s)
| [
"numpy.random.rand",
"nes_py.wrappers.JoypadSpace",
"copy.copy",
"gym.make",
"os.path.exists",
"gym_super_mario_bros.make",
"os.listdir",
"collections.deque",
"numpy.mean",
"cv2.threshold",
"cv2.contourArea",
"imutils.grab_contours",
"cv2.cvtColor",
"cv2.resize",
"cv2.GaussianBlur",
"PIL.Image.fromarray",
"os.makedirs",
"cv2.bitwise_and",
"numpy.zeros",
"gym.Wrapper.__init__",
"cv2.findContours",
"cv2.createBackgroundSubtractorKNN",
"cv2.boundingRect"
] | [((1184, 1215), 'gym.Wrapper.__init__', 'gym.Wrapper.__init__', (['self', 'env'], {}), '(self, env)\n', (1204, 1215), False, 'import gym\n'), ((1318, 1378), 'numpy.zeros', 'np.zeros', (['((2,) + env.observation_space.shape)'], {'dtype': 'np.uint8'}), '((2,) + env.observation_space.shape, dtype=np.uint8)\n', (1326, 1378), True, 'import numpy as np\n'), ((3809, 3826), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (3814, 3826), False, 'from collections import deque\n'), ((4021, 4051), 'numpy.zeros', 'np.zeros', (['[history_size, h, w]'], {}), '([history_size, h, w])\n', (4029, 4051), True, 'import numpy as np\n'), ((5757, 5788), 'cv2.resize', 'cv2.resize', (['X', '(self.h, self.w)'], {}), '(X, (self.h, self.w))\n', (5767, 5788), False, 'import cv2\n'), ((6594, 6611), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (6599, 6611), False, 'from collections import deque\n'), ((6841, 6871), 'numpy.zeros', 'np.zeros', (['[history_size, h, w]'], {}), '([history_size, h, w])\n', (6849, 6871), True, 'import numpy as np\n'), ((7014, 7064), 'gym_super_mario_bros.make', 'gym_super_mario_bros.make', (['"""SuperMarioBros-1-1-v0"""'], {}), "('SuperMarioBros-1-1-v0')\n", (7039, 7064), False, 'import gym_super_mario_bros\n'), ((7084, 7123), 'nes_py.wrappers.JoypadSpace', 'JoypadSpace', (['self.env', 'COMPLEX_MOVEMENT'], {}), '(self.env, COMPLEX_MOVEMENT)\n', (7095, 7123), False, 'from nes_py.wrappers import JoypadSpace\n'), ((7281, 7345), 'cv2.createBackgroundSubtractorKNN', 'cv2.createBackgroundSubtractorKNN', ([], {'history': '(5)', 'detectShadows': '(True)'}), '(history=5, detectShadows=True)\n', (7314, 7345), False, 'import cv2\n'), ((11683, 11718), 'cv2.cvtColor', 'cv2.cvtColor', (['X', 'cv2.COLOR_RGB2GRAY'], {}), '(X, cv2.COLOR_RGB2GRAY)\n', (11695, 11718), False, 'import cv2\n'), ((11748, 11779), 'cv2.resize', 'cv2.resize', (['x', '(self.h, self.w)'], {}), '(x, (self.h, self.w))\n', (11758, 11779), False, 'import cv2\n'), ((3451, 3467), 'gym.make', 'gym.make', (['env_id'], {}), '(env_id)\n', (3459, 3467), False, 'import gym\n'), ((6376, 6409), 'gym_super_mario_bros.make', 'gym_super_mario_bros.make', (['env_id'], {}), '(env_id)\n', (6401, 6409), False, 'import gym_super_mario_bros\n'), ((7139, 7161), 'os.path.exists', 'os.path.exists', (['"""data"""'], {}), "('data')\n", (7153, 7161), False, 'import os\n'), ((7175, 7194), 'os.makedirs', 'os.makedirs', (['"""data"""'], {}), "('data')\n", (7186, 7194), False, 'import os\n'), ((2861, 2885), 'copy.copy', 'copy', (['self.visited_rooms'], {}), '(self.visited_rooms)\n', (2865, 2885), False, 'from copy import copy\n'), ((8206, 8242), 'cv2.cvtColor', 'cv2.cvtColor', (['obs', 'cv2.COLOR_BGR2RGB'], {}), '(obs, cv2.COLOR_BGR2RGB)\n', (8218, 8242), False, 'import cv2\n'), ((8344, 8388), 'cv2.cvtColor', 'cv2.cvtColor', (['temp_frame', 'cv2.COLOR_BGR2GRAY'], {}), '(temp_frame, cv2.COLOR_BGR2GRAY)\n', (8356, 8388), False, 'import cv2\n'), ((8418, 8457), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['temp_frame', '(5, 5)', '(0)'], {}), '(temp_frame, (5, 5), 0)\n', (8434, 8457), False, 'import cv2\n'), ((8600, 8640), 'cv2.bitwise_and', 'cv2.bitwise_and', (['temp_frame', 'subtr_frame'], {}), '(temp_frame, subtr_frame)\n', (8615, 8640), False, 'import cv2\n'), ((8666, 8721), 'cv2.threshold', 'cv2.threshold', (['mask_sub', '(100)', '(255)', '(0)', 'cv2.THRESH_BINARY'], {}), '(mask_sub, 100, 255, 0, cv2.THRESH_BINARY)\n', (8679, 8721), False, 'import cv2\n'), ((8744, 8805), 'cv2.findContours', 'cv2.findContours', (['thr', 'cv2.RETR_TREE', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(thr, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n', (8760, 8805), False, 'import cv2\n'), ((8829, 8856), 'imutils.grab_contours', 'imutils.grab_contours', (['cnts'], {}), '(cnts)\n', (8850, 8856), False, 'import imutils\n'), ((4394, 4410), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (4408, 4410), True, 'import numpy as np\n'), ((7609, 7625), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (7623, 7625), True, 'import numpy as np\n'), ((8278, 8314), 'cv2.cvtColor', 'cv2.cvtColor', (['obs', 'cv2.COLOR_BGR2RGB'], {}), '(obs, cv2.COLOR_BGR2RGB)\n', (8290, 8314), False, 'import cv2\n'), ((9042, 9061), 'cv2.boundingRect', 'cv2.boundingRect', (['c'], {}), '(c)\n', (9058, 9061), False, 'import cv2\n'), ((9262, 9280), 'os.listdir', 'os.listdir', (['"""data"""'], {}), "('data')\n", (9272, 9280), False, 'import os\n'), ((5140, 5166), 'numpy.mean', 'np.mean', (['self.recent_rlist'], {}), '(self.recent_rlist)\n', (5147, 5166), True, 'import numpy as np\n'), ((11034, 11060), 'numpy.mean', 'np.mean', (['self.recent_rlist'], {}), '(self.recent_rlist)\n', (11041, 11060), True, 'import numpy as np\n'), ((5694, 5712), 'PIL.Image.fromarray', 'Image.fromarray', (['X'], {}), '(X)\n', (5709, 5712), False, 'from PIL import Image\n'), ((8911, 8929), 'cv2.contourArea', 'cv2.contourArea', (['c'], {}), '(c)\n', (8926, 8929), False, 'import cv2\n'), ((8944, 8962), 'cv2.contourArea', 'cv2.contourArea', (['c'], {}), '(c)\n', (8959, 8962), False, 'import cv2\n'), ((9534, 9564), 'cv2.resize', 'cv2.resize', (['temp_obs', '(40, 40)'], {}), '(temp_obs, (40, 40))\n', (9544, 9564), False, 'import cv2\n')] |
import os
import time
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate
from keras.layers import Input
from keras.models import Model
from keras.utils import plot_model, to_categorical
from keras.callbacks import EarlyStopping
from numpy.core.multiarray import ndarray
from scipy.signal import savgol_filter
from helpers.io import inputter_train, inputter_test, outputter
from helpers.preprocessing import transform_proba
stopper = EarlyStopping(monitor='val_loss', min_delta=0, patience=2, verbose=1)
def build(_base_shape):
inputer = Input(shape=_base_shape, name='input')
split = Lambda(lambda x: tf.split(x, num_or_size_splits=3, axis=1))(inputer)
conv1 = Conv1D(filters=16, kernel_size=11, activation='relu', padding='valid', name='conv1')(split[0])
maxpool1 = MaxPooling1D()(conv1)
conv2 = Conv1D(filters=32, kernel_size=5, activation='relu', padding='valid', name='conv2')(maxpool1)
maxpool2 = MaxPooling1D()(conv2)
conv3 = Conv1D(filters=64, kernel_size=5, activation='relu', padding='valid', name='conv3')(maxpool2)
maxpool3 = MaxPooling1D()(conv3)
conv4 = Conv1D(filters=128, kernel_size=5, activation='relu', padding='valid', name='conv4')(maxpool3)
maxpool4_1 = MaxPooling1D()(conv4)
conv1 = Conv1D(filters=16, kernel_size=11, activation='relu', padding='valid', name='conv1_2')(split[1])
maxpool1 = MaxPooling1D()(conv1)
conv2 = Conv1D(filters=32, kernel_size=5, activation='relu', padding='valid', name='conv2_2')(maxpool1)
maxpool2 = MaxPooling1D()(conv2)
conv3 = Conv1D(filters=64, kernel_size=5, activation='relu', padding='valid', name='conv3_2')(maxpool2)
maxpool3 = MaxPooling1D()(conv3)
conv4 = Conv1D(filters=128, kernel_size=5, activation='relu', padding='valid', name='conv4_2')(maxpool3)
maxpool4_2 = MaxPooling1D()(conv4)
conv1 = Conv1D(filters=16, kernel_size=11, activation='relu', padding='valid', name='conv1_3')(split[1])
maxpool1 = MaxPooling1D()(conv1)
conv2 = Conv1D(filters=32, kernel_size=5, activation='relu', padding='valid', name='conv2_3')(maxpool1)
maxpool2 = MaxPooling1D()(conv2)
conv3 = Conv1D(filters=64, kernel_size=5, activation='relu', padding='valid', name='conv3_3')(maxpool2)
maxpool3 = MaxPooling1D()(conv3)
conv4 = Conv1D(filters=128, kernel_size=5, activation='relu', padding='valid', name='conv4_3')(maxpool3)
maxpool4_3 = MaxPooling1D()(conv4)
merger = Concatenate(axis=1)([maxpool4_1, maxpool4_2, maxpool4_3])
flatten = Flatten()(merger)
dense1 = Dense(1024, activation='relu', name='dense1')(flatten)
dense2 = Dense(512, activation='relu', name='dense2')(dense1)
outputer = Dense(3, activation='softmax')(dense2)
_model = Model(inputs=inputer, outputs=outputer) # type: Model
return _model
eeg1, eeg2, emg, lab = inputter_train()
print('Each data input shape: ', eeg1.shape)
data = np.concatenate((np.reshape(eeg1, (-1, 128)), np.reshape(eeg2, (-1, 128)), np.reshape(emg, (-1, 128))), axis=1)
data = data[..., np.newaxis]
print("Data format: ", data.shape)
del eeg1
del eeg2
del emg
print(lab.shape)
labels = np.reshape(lab, (-1, 1))
labels = np.concatenate((labels, labels, labels, labels), axis=1)
print(labels.shape)
labels = np.reshape(labels, (-1, 1))
labels = np.subtract(labels, 1)
labels = to_categorical(labels, num_classes=None) # type: ndarray
base_shape = (data.shape[1], data.shape[2])
print('Input shape: ', base_shape)
print('Label shape: ', labels.shape)
print('Input done.')
model = build(base_shape)
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['categorical_accuracy'])
print(model.summary())
plot_model(model, to_file=os.getcwd() + '/data/' + str(time.strftime("%Y%m%d-%H%M%S")) + '_model.png', show_shapes=True,
show_layer_names=True, rankdir='TB')
print("Unique labels: ", np.unique(lab))
model.fit(data, labels, batch_size=128, epochs=50, verbose=1, validation_split=0.1,
callbacks=[stopper])
model.save_weights("/model/conv2d_model.h5")
eeg1_t, eeg2_t, emg_t = inputter_test()
data_t = np.concatenate((np.reshape(eeg1_t, (-1, 128)),
np.reshape(eeg2_t, (-1, 128)),
np.reshape(emg_t, (-1, 128))), axis=1)
data_t = data_t[..., np.newaxis]
del eeg1_t
del eeg2_t
del emg_t
print("Data format: ", data_t.shape)
y_pred_t = model.predict(data_t)
y_pred_t = transform_proba(y_pred=y_pred_t, exponential=False)
smoothened = np.reshape(y_pred_t, (2, -1))
smoothened = np.round(smoothened)
print(smoothened.shape)
smoothened = savgol_filter(smoothened, polyorder=1, axis=1, window_length=5, mode='nearest')
plt.plot(smoothened.T[:5000, 1])
smoothened = np.round(smoothened)
print(smoothened.shape)
# plt.plot(y_pred_t, alpha=0.15)
plt.plot(smoothened.T[:5000, 1])
plt.show()
smoothened = np.reshape(smoothened, (-1, 1))
outputter(smoothened)
| [
"helpers.io.inputter_train",
"tensorflow.split",
"scipy.signal.savgol_filter",
"keras.utils.to_categorical",
"keras.layers.Dense",
"keras.layers.MaxPooling1D",
"numpy.reshape",
"matplotlib.pyplot.plot",
"numpy.subtract",
"keras.models.Model",
"numpy.concatenate",
"keras.callbacks.EarlyStopping",
"numpy.round",
"keras.layers.Flatten",
"helpers.io.inputter_test",
"keras.layers.Concatenate",
"matplotlib.pyplot.show",
"numpy.unique",
"helpers.preprocessing.transform_proba",
"time.strftime",
"helpers.io.outputter",
"os.getcwd",
"keras.layers.Input",
"keras.layers.Conv1D"
] | [((543, 612), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_loss"""', 'min_delta': '(0)', 'patience': '(2)', 'verbose': '(1)'}), "(monitor='val_loss', min_delta=0, patience=2, verbose=1)\n", (556, 612), False, 'from keras.callbacks import EarlyStopping\n'), ((2925, 2941), 'helpers.io.inputter_train', 'inputter_train', ([], {}), '()\n', (2939, 2941), False, 'from helpers.io import inputter_train, inputter_test, outputter\n'), ((3224, 3248), 'numpy.reshape', 'np.reshape', (['lab', '(-1, 1)'], {}), '(lab, (-1, 1))\n', (3234, 3248), True, 'import numpy as np\n'), ((3258, 3314), 'numpy.concatenate', 'np.concatenate', (['(labels, labels, labels, labels)'], {'axis': '(1)'}), '((labels, labels, labels, labels), axis=1)\n', (3272, 3314), True, 'import numpy as np\n'), ((3344, 3371), 'numpy.reshape', 'np.reshape', (['labels', '(-1, 1)'], {}), '(labels, (-1, 1))\n', (3354, 3371), True, 'import numpy as np\n'), ((3381, 3403), 'numpy.subtract', 'np.subtract', (['labels', '(1)'], {}), '(labels, 1)\n', (3392, 3403), True, 'import numpy as np\n'), ((3413, 3453), 'keras.utils.to_categorical', 'to_categorical', (['labels'], {'num_classes': 'None'}), '(labels, num_classes=None)\n', (3427, 3453), False, 'from keras.utils import plot_model, to_categorical\n'), ((4184, 4199), 'helpers.io.inputter_test', 'inputter_test', ([], {}), '()\n', (4197, 4199), False, 'from helpers.io import inputter_train, inputter_test, outputter\n'), ((4526, 4577), 'helpers.preprocessing.transform_proba', 'transform_proba', ([], {'y_pred': 'y_pred_t', 'exponential': '(False)'}), '(y_pred=y_pred_t, exponential=False)\n', (4541, 4577), False, 'from helpers.preprocessing import transform_proba\n'), ((4592, 4621), 'numpy.reshape', 'np.reshape', (['y_pred_t', '(2, -1)'], {}), '(y_pred_t, (2, -1))\n', (4602, 4621), True, 'import numpy as np\n'), ((4635, 4655), 'numpy.round', 'np.round', (['smoothened'], {}), '(smoothened)\n', (4643, 4655), True, 'import numpy as np\n'), ((4693, 4772), 'scipy.signal.savgol_filter', 'savgol_filter', (['smoothened'], {'polyorder': '(1)', 'axis': '(1)', 'window_length': '(5)', 'mode': '"""nearest"""'}), "(smoothened, polyorder=1, axis=1, window_length=5, mode='nearest')\n", (4706, 4772), False, 'from scipy.signal import savgol_filter\n'), ((4773, 4805), 'matplotlib.pyplot.plot', 'plt.plot', (['smoothened.T[:5000, 1]'], {}), '(smoothened.T[:5000, 1])\n', (4781, 4805), True, 'import matplotlib.pyplot as plt\n'), ((4819, 4839), 'numpy.round', 'np.round', (['smoothened'], {}), '(smoothened)\n', (4827, 4839), True, 'import numpy as np\n'), ((4897, 4929), 'matplotlib.pyplot.plot', 'plt.plot', (['smoothened.T[:5000, 1]'], {}), '(smoothened.T[:5000, 1])\n', (4905, 4929), True, 'import matplotlib.pyplot as plt\n'), ((4930, 4940), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4938, 4940), True, 'import matplotlib.pyplot as plt\n'), ((4954, 4985), 'numpy.reshape', 'np.reshape', (['smoothened', '(-1, 1)'], {}), '(smoothened, (-1, 1))\n', (4964, 4985), True, 'import numpy as np\n'), ((4987, 5008), 'helpers.io.outputter', 'outputter', (['smoothened'], {}), '(smoothened)\n', (4996, 5008), False, 'from helpers.io import inputter_train, inputter_test, outputter\n'), ((653, 691), 'keras.layers.Input', 'Input', ([], {'shape': '_base_shape', 'name': '"""input"""'}), "(shape=_base_shape, name='input')\n", (658, 691), False, 'from keras.layers import Input\n'), ((2827, 2866), 'keras.models.Model', 'Model', ([], {'inputs': 'inputer', 'outputs': 'outputer'}), '(inputs=inputer, outputs=outputer)\n', (2832, 2866), False, 'from keras.models import Model\n'), ((3982, 3996), 'numpy.unique', 'np.unique', (['lab'], {}), '(lab)\n', (3991, 3996), True, 'import numpy as np\n'), ((786, 875), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(16)', 'kernel_size': '(11)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv1"""'}), "(filters=16, kernel_size=11, activation='relu', padding='valid', name\n ='conv1')\n", (792, 875), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((896, 910), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (908, 910), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((930, 1018), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(32)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv2"""'}), "(filters=32, kernel_size=5, activation='relu', padding='valid', name=\n 'conv2')\n", (936, 1018), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1039, 1053), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1051, 1053), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1073, 1161), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(64)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv3"""'}), "(filters=64, kernel_size=5, activation='relu', padding='valid', name=\n 'conv3')\n", (1079, 1161), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1182, 1196), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1194, 1196), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1216, 1305), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(128)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv4"""'}), "(filters=128, kernel_size=5, activation='relu', padding='valid', name\n ='conv4')\n", (1222, 1305), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1328, 1342), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1340, 1342), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1363, 1454), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(16)', 'kernel_size': '(11)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv1_2"""'}), "(filters=16, kernel_size=11, activation='relu', padding='valid', name\n ='conv1_2')\n", (1369, 1454), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1475, 1489), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1487, 1489), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1509, 1599), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(32)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv2_2"""'}), "(filters=32, kernel_size=5, activation='relu', padding='valid', name=\n 'conv2_2')\n", (1515, 1599), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1620, 1634), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1632, 1634), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1654, 1744), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(64)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv3_2"""'}), "(filters=64, kernel_size=5, activation='relu', padding='valid', name=\n 'conv3_2')\n", (1660, 1744), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1765, 1779), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1777, 1779), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1799, 1890), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(128)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv4_2"""'}), "(filters=128, kernel_size=5, activation='relu', padding='valid', name\n ='conv4_2')\n", (1805, 1890), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1913, 1927), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (1925, 1927), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((1948, 2039), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(16)', 'kernel_size': '(11)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv1_3"""'}), "(filters=16, kernel_size=11, activation='relu', padding='valid', name\n ='conv1_3')\n", (1954, 2039), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2060, 2074), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (2072, 2074), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2094, 2184), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(32)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv2_3"""'}), "(filters=32, kernel_size=5, activation='relu', padding='valid', name=\n 'conv2_3')\n", (2100, 2184), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2205, 2219), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (2217, 2219), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2239, 2329), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(64)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv3_3"""'}), "(filters=64, kernel_size=5, activation='relu', padding='valid', name=\n 'conv3_3')\n", (2245, 2329), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2350, 2364), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (2362, 2364), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2384, 2475), 'keras.layers.Conv1D', 'Conv1D', ([], {'filters': '(128)', 'kernel_size': '(5)', 'activation': '"""relu"""', 'padding': '"""valid"""', 'name': '"""conv4_3"""'}), "(filters=128, kernel_size=5, activation='relu', padding='valid', name\n ='conv4_3')\n", (2390, 2475), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2498, 2512), 'keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {}), '()\n', (2510, 2512), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2534, 2553), 'keras.layers.Concatenate', 'Concatenate', ([], {'axis': '(1)'}), '(axis=1)\n', (2545, 2553), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2607, 2616), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (2614, 2616), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2638, 2683), 'keras.layers.Dense', 'Dense', (['(1024)'], {'activation': '"""relu"""', 'name': '"""dense1"""'}), "(1024, activation='relu', name='dense1')\n", (2643, 2683), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2706, 2750), 'keras.layers.Dense', 'Dense', (['(512)'], {'activation': '"""relu"""', 'name': '"""dense2"""'}), "(512, activation='relu', name='dense2')\n", (2711, 2750), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((2774, 2804), 'keras.layers.Dense', 'Dense', (['(3)'], {'activation': '"""softmax"""'}), "(3, activation='softmax')\n", (2779, 2804), False, 'from keras.layers import Conv1D, MaxPooling1D, Dense, Flatten, Lambda, Concatenate\n'), ((3011, 3038), 'numpy.reshape', 'np.reshape', (['eeg1', '(-1, 128)'], {}), '(eeg1, (-1, 128))\n', (3021, 3038), True, 'import numpy as np\n'), ((3040, 3067), 'numpy.reshape', 'np.reshape', (['eeg2', '(-1, 128)'], {}), '(eeg2, (-1, 128))\n', (3050, 3067), True, 'import numpy as np\n'), ((3069, 3095), 'numpy.reshape', 'np.reshape', (['emg', '(-1, 128)'], {}), '(emg, (-1, 128))\n', (3079, 3095), True, 'import numpy as np\n'), ((4226, 4255), 'numpy.reshape', 'np.reshape', (['eeg1_t', '(-1, 128)'], {}), '(eeg1_t, (-1, 128))\n', (4236, 4255), True, 'import numpy as np\n'), ((4282, 4311), 'numpy.reshape', 'np.reshape', (['eeg2_t', '(-1, 128)'], {}), '(eeg2_t, (-1, 128))\n', (4292, 4311), True, 'import numpy as np\n'), ((4338, 4366), 'numpy.reshape', 'np.reshape', (['emg_t', '(-1, 128)'], {}), '(emg_t, (-1, 128))\n', (4348, 4366), True, 'import numpy as np\n'), ((721, 762), 'tensorflow.split', 'tf.split', (['x'], {'num_or_size_splits': '(3)', 'axis': '(1)'}), '(x, num_or_size_splits=3, axis=1)\n', (729, 762), True, 'import tensorflow as tf\n'), ((3813, 3824), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3822, 3824), False, 'import os\n'), ((3842, 3872), 'time.strftime', 'time.strftime', (['"""%Y%m%d-%H%M%S"""'], {}), "('%Y%m%d-%H%M%S')\n", (3855, 3872), False, 'import time\n')] |
from setuptools import setup
with open('README.md') as r:
desc=r.read()
setup(
name="fifa_simulator",
version="1.0.2",
description='A Python Package for Simulation of fifa-like tournament fifa_simulator',
long_description=desc,
long_description_content_type='text/markdown',
url='https://github.com/Abhishek741119/fifa_simulator',
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
classifiers=[
'Programming Language :: Python :: 3',
],
python_requires='>=3.0',
packages=['fixtures'],
include_package_data=True,
install_requires=["pandas"],
)
| [
"setuptools.setup"
] | [((79, 580), 'setuptools.setup', 'setup', ([], {'name': '"""fifa_simulator"""', 'version': '"""1.0.2"""', 'description': '"""A Python Package for Simulation of fifa-like tournament fifa_simulator"""', 'long_description': 'desc', 'long_description_content_type': '"""text/markdown"""', 'url': '"""https://github.com/Abhishek741119/fifa_simulator"""', 'license': '"""MIT"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'classifiers': "['Programming Language :: Python :: 3']", 'python_requires': '""">=3.0"""', 'packages': "['fixtures']", 'include_package_data': '(True)', 'install_requires': "['pandas']"}), "(name='fifa_simulator', version='1.0.2', description=\n 'A Python Package for Simulation of fifa-like tournament fifa_simulator',\n long_description=desc, long_description_content_type='text/markdown',\n url='https://github.com/Abhishek741119/fifa_simulator', license='MIT',\n author='<NAME>', author_email='<EMAIL>', classifiers=[\n 'Programming Language :: Python :: 3'], python_requires='>=3.0',\n packages=['fixtures'], include_package_data=True, install_requires=[\n 'pandas'])\n", (84, 580), False, 'from setuptools import setup\n')] |
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
class selectMetodos (unittest.TestCase):
def setUp(self):
global driver
driver = webdriver.Firefox()
driver.get("http://www.goodstartbooks.com/pruebas/")
def test1(self):
opcion = driver.find_element_by_xpath("//*[@id='noImportante']/td[2]")
if opcion is not None:
print("Texto", opcion.text)
def test2(self):
opcion2 = driver.find_element_by_id("importante")
if opcion2 is not None:
valorAtributo=opcion2.get_attribute("class")
print("Clase", valorAtributo)
def tearDown(self):
driver.quit()
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"selenium.webdriver.Firefox"
] | [((711, 726), 'unittest.main', 'unittest.main', ([], {}), '()\n', (724, 726), False, 'import unittest\n'), ((233, 252), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (250, 252), False, 'from selenium import webdriver\n')] |
# from server.controllers import transaction_controller
# from server.controllers import contact_controller
# from server.controllers import profile_controller
# from server.controllers import currently_controller
# from server.controllers import balance_controller
# from server.controllers import books_controller
# from server.controllers import user_controller
import os
from dotenv import load_dotenv
from flask import Flask
from flask_mysqldb import MySQL
load_dotenv()
db = MySQL()
def create_app():
app = Flask(__name__, static_folder='../client/build', static_url_path='/')
app.config['SECRET_KEY'] = os.urandom(12)
# database connection credentials
app.config['MYSQL_USER'] = 'b2393fd53cce34'
app.config['MYSQL_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_HOST'] = 'us-cdbr-east-02.cleardb.com'
app.config['MYSQL_DB'] = 'heroku_c28054944c75e38'
app.config['MYSQL_CURSORCLASS'] = 'DictCursor'
app.config['SESSION_COOKIE_SAMESITE'] = "Strict"
app.config.update(
MAIL_SERVER=os.getenv("MAIL_SERVER"),
MAIL_PORT=os.getenv("MAIL_PORT"),
MAIL_USE_SSL=True,
MAIL_USERNAME=os.getenv("MAIL_USERNAME"),
MAIL_PASSWORD=os.getenv("MAIL_PASSWORD"),
MAIL_DEFAULT_SENDER=os.getenv("MAIL_DEFAULT_SENDER")
)
db.init_app(app)
# app.register_blueprint(user_controller.bp)
#
# app.register_blueprint(books_controller.bp)
#
# app.register_blueprint(balance_controller.bp)
#
# app.register_blueprint(currently_controller.bp)
#
# app.register_blueprint(profile_controller.bp)
#
# app.register_blueprint(contact_controller.bp)
#
# app.register_blueprint(transaction_controller.bp)
return app
| [
"flask_mysqldb.MySQL",
"os.getenv",
"flask.Flask",
"os.urandom",
"dotenv.load_dotenv"
] | [((464, 477), 'dotenv.load_dotenv', 'load_dotenv', ([], {}), '()\n', (475, 477), False, 'from dotenv import load_dotenv\n'), ((485, 492), 'flask_mysqldb.MySQL', 'MySQL', ([], {}), '()\n', (490, 492), False, 'from flask_mysqldb import MySQL\n'), ((523, 592), 'flask.Flask', 'Flask', (['__name__'], {'static_folder': '"""../client/build"""', 'static_url_path': '"""/"""'}), "(__name__, static_folder='../client/build', static_url_path='/')\n", (528, 592), False, 'from flask import Flask\n'), ((624, 638), 'os.urandom', 'os.urandom', (['(12)'], {}), '(12)\n', (634, 638), False, 'import os\n'), ((1036, 1060), 'os.getenv', 'os.getenv', (['"""MAIL_SERVER"""'], {}), "('MAIL_SERVER')\n", (1045, 1060), False, 'import os\n'), ((1080, 1102), 'os.getenv', 'os.getenv', (['"""MAIL_PORT"""'], {}), "('MAIL_PORT')\n", (1089, 1102), False, 'import os\n'), ((1153, 1179), 'os.getenv', 'os.getenv', (['"""MAIL_USERNAME"""'], {}), "('MAIL_USERNAME')\n", (1162, 1179), False, 'import os\n'), ((1203, 1229), 'os.getenv', 'os.getenv', (['"""MAIL_PASSWORD"""'], {}), "('MAIL_PASSWORD')\n", (1212, 1229), False, 'import os\n'), ((1259, 1291), 'os.getenv', 'os.getenv', (['"""MAIL_DEFAULT_SENDER"""'], {}), "('MAIL_DEFAULT_SENDER')\n", (1268, 1291), False, 'import os\n')] |
# version 1.0
import csv
import math
# função para calcular a taxa.
def calc_taxa(mf,msp,tq):
return ((mf - msp) / tq)
# criando uma lista para cada tipo de dado da tabela.(usando apenas as colunas necessárias para o cálculo).
tempo = []
accx = []
accy = []
accz = []
vet_aceleracao = [] # vetor para armazenar os valores da aceleração.
vet_massa = [] # vetor para armazenar os valores dos dados de massa.
print('BASE DE DADOS:\n', end="\n")
# abrindo o arquivo csv e colocando os valores de cada dado em sua respectiva lista criada acima.
with open('data_baseex.csv', 'r') as file:
reader = csv.reader(file)
next(reader)
for lin in reader:
time,x,y,z = lin
tempo.append(float(time)) # atribuindo os dados do tempo da base de dados na lista tempo[].
accx.append(float(x)) # atribuindo os dados da aceleração em x da base de dados na lista accx[].
accy.append(float(y)) # atribuindo os dados da aceleração em y da base de dados na lista accy[].
accz.append(float(z)) # atribuindo os dados da aceleração em z da base de dados na lista accz[].
print(lin)
file.close()
print('\n', end="\n")
# calculo da aceleração.
print('-------------------------- CALCULANDO ACELERAÇÃO ------------------------')
for i in range(0, len(accx)):
aceleracao = math.pow(accx[i], 2) + math.pow(accy[i], 2) + math.pow(accz[i], 2)
aceleracao = math.sqrt(aceleracao)
vet_aceleracao.append(aceleracao) # passando os valores da aceleração para o vetor
print('Aceleração({}) = {:.4f}'.format(i,aceleracao), end="\n")
print('-------------------------------------------------------------------------\n', end="\n")
# calculo da força.
print('-------------------------- CALCULANDO FORÇA -----------------------------')
for i in range(0, len(vet_aceleracao)):
print('Valor da Massa: ', end="")
massa = float(input())
vet_massa.append(massa) # passando os valores da massa para o vetor.
forca = massa * vet_aceleracao[i]
print('Força({}) = {:.4f}\n'.format(i,forca), end="\n")
print('-------------------------------------------------------------------------\n', end='\n')
# pegando valores da taxa e realizando o cálculo.
print('-------------------------- VALORES DA TAXA ------------------------------')
print('Valor da massa Final: ', end="")
massa_final = float(input())
print('Valor da massa sem propelente: ', end="")
massa_semprop = float(input())
print('Valor do tempo de queima: ', end="")
temp_queima = float(input())
result_taxa = calc_taxa(massa_final,massa_semprop,temp_queima)
print('Resulatado da taxa: {:.4f}'.format(result_taxa), end="\n")
print('-------------------------------------------------------------------------\n', end="")
| [
"math.pow",
"math.sqrt",
"csv.reader"
] | [((604, 620), 'csv.reader', 'csv.reader', (['file'], {}), '(file)\n', (614, 620), False, 'import csv\n'), ((1408, 1429), 'math.sqrt', 'math.sqrt', (['aceleracao'], {}), '(aceleracao)\n', (1417, 1429), False, 'import math\n'), ((1370, 1390), 'math.pow', 'math.pow', (['accz[i]', '(2)'], {}), '(accz[i], 2)\n', (1378, 1390), False, 'import math\n'), ((1324, 1344), 'math.pow', 'math.pow', (['accx[i]', '(2)'], {}), '(accx[i], 2)\n', (1332, 1344), False, 'import math\n'), ((1347, 1367), 'math.pow', 'math.pow', (['accy[i]', '(2)'], {}), '(accy[i], 2)\n', (1355, 1367), False, 'import math\n')] |
import os
import json
import sys
sys.path.append("../")
from lambda_base import LambdaBase
from helper import AwsHelper
class GetCredentialsLambda(LambdaBase):
def __init__(self): # implementation-specific args and/or kwargs
pass
def handle(self, event, context):
print("event: {}".format(event))
sts = AwsHelper().getClient('sts')
transcribeCredentials = sts.assume_role(
RoleArn=os.environ['TRANSCRIBE_ACCESS_ROLEARN'],
RoleSessionName="access_transcribe_role"
)['Credentials']
print(transcribeCredentials)
result = {}
result['accessKeyId'] = transcribeCredentials['AccessKeyId']
result['secretAccessKey'] = transcribeCredentials['SecretAccessKey']
result['sessionToken'] = transcribeCredentials['SessionToken']
result['region'] = os.environ['AWS_REGION']
return {
"isBase64Encoded": False,
"statusCode": 200,
'body': json.dumps(result),
"headers": {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Methods': 'GET,POST,OPTIONS'
}
}
lambda_handler = GetCredentialsLambda.get_handler() # input values for args and/or kwargs
| [
"helper.AwsHelper",
"json.dumps",
"sys.path.append"
] | [((34, 56), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (49, 56), False, 'import sys\n'), ((988, 1006), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (998, 1006), False, 'import json\n'), ((339, 350), 'helper.AwsHelper', 'AwsHelper', ([], {}), '()\n', (348, 350), False, 'from helper import AwsHelper\n')] |
from django.shortcuts import redirect
def index(request):
return redirect('/blog/') | [
"django.shortcuts.redirect"
] | [((67, 85), 'django.shortcuts.redirect', 'redirect', (['"""/blog/"""'], {}), "('/blog/')\n", (75, 85), False, 'from django.shortcuts import redirect\n')] |
# -*- coding: utf-8 -*-
"""Generate numpy data from .edf files."""
# Author: <NAME> <<EMAIL>>
import os
from tqdm import tqdm
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from urllib.request import urlretrieve
import zipfile
import mne
import numpy as np
import pandas as pd
def generate_df(src_dir):
def is_responder(row):
if row['Healthy']:
return 'h'
else:
if row['Subject Number'] < 17:
return 'r'
else:
return 'nr'
data_paths = [os.path.abspath(os.path.join(src_dir, data_dir)) for data_dir in os.listdir(src_dir)]
subject_number = [int(os.path.basename(path).split()[1][1:]) for path in data_paths]
healthy = [True if 'H ' in path else False for path in data_paths]
mdd = [True if 'MDD ' in path else False for path in data_paths]
eo = [True if ' EO' in path else False for path in data_paths]
ec = [True if ' EC' in path else False for path in data_paths]
erp = [True if ' TASK' in path else False for path in data_paths]
mapping_dict = {'Path': data_paths, 'Subject Number': subject_number,
'Healthy': healthy, 'MDD': mdd, 'EO': eo, 'EC': ec, 'ERP': erp}
data = pd.DataFrame(mapping_dict)
data['Responder'] = data.apply(is_responder, axis=1)
eyes_closed = data[data['EC'] == True].copy()
eyes_closed.drop(['EC', 'EO', 'ERP'], axis=1, inplace=True)
eyes_opened = data[data['EO'] == True].copy()
eyes_opened.drop(['EC', 'EO', 'ERP'], axis=1, inplace=True)
df = pd.concat([eyes_opened, eyes_closed])
return df
def generate_data(src_dir, dst_dir, n_channels=19):
"""Loads each trial, multiplies by 1e6 and saves the normalized array as numpy array.
"""
ignore = ['H S14 EO.edf', 'H S18 EO.edf', 'H S19 EO.edf', 'H S21 EO.edf', 'H S22 EO.edf', 'H S23 EO.edf',
'H S26 EO.edf', 'H S4 EO.edf', 'H S5 EO.edf', 'H S8 EO.edf', 'H S9 EO.edf', 'MDD S23 EO.edf',
'MDD S5 EO.edf', 'H S13 EC.edf', 'H S14 EC.edf', 'H S15 EC.edf', 'H S19 EC.edf', 'H S21 EC.edf',
'H S23 EC.edf', 'H S26 EC.edf', 'H S4 EC.edf', 'H S5 EC.edf', 'H S8 EC.edf', 'MDD S27 EC.edf']
df = generate_df(src_dir)
s = df.iloc[0]
raw = mne.io.read_raw_edf(s['Path'], preload=True, verbose=False)
raw.pick_types(eeg=True)
channels = raw.ch_names[:n_channels]
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
print('Loading samples ...')
with tqdm(total=len(df)) as pbar:
for i, subject in enumerate(df.values):
path = subject[0]
file_name = os.path.basename(path)
if file_name in ignore:
print('ignored ', file_name)
pbar.update(1)
continue
raw = mne.io.read_raw_edf(path, preload=True, verbose=False)
raw.pick_types(eeg=True)
raw.pick_channels(channels)
arr = raw.get_data() * 1e6
label = subject[-1]
path_to_save = os.path.join(dst_dir, 's{}_{}.npy'.format(i + 1, label))
np.save(path_to_save, arr)
pbar.update(1)
return
def download_data(download_dir):
zip_file_name = 'eeg_data.zip'
zip_file_path = os.path.join(download_dir, zip_file_name)
data_dir = os.path.join(download_dir, 'eeg_data')
if os.path.exists(data_dir):
print('Data exists.')
return
elif os.path.exists(zip_file_path):
print('Zip file exists.')
else:
url = 'https://ndownloader.figshare.com/articles/4244171/versions/2'
print('Downloading file {} ...'.format(zip_file_name))
urlretrieve(url, zip_file_path)
print('File downloaded to ', zip_file_path)
with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
zip_ref.extractall(data_dir)
print('Zip file extracted to ', data_dir)
os.remove(os.path.join(data_dir, '6921143_H S15 EO.edf'))
os.rename(os.path.join(data_dir, '6921959_H S15 EO.edf'),
os.path.join(data_dir, 'H S15 EO.edf'))
if __name__ == '__main__':
parser = ArgumentParser(description='', formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('--data_dir', required=True, type=str, help='Complete path to the folder containing edf data.')
parser.add_argument('--dst_folder', required=False, type=str, default='data')
args = parser.parse_args()
src_dir = args.data_dir
dst_dir = os.path.abspath(os.path.join(os.path.dirname(src_dir), args.dst_folder))
generate_data(src_dir=src_dir, dst_dir=dst_dir)
| [
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"zipfile.ZipFile",
"urllib.request.urlretrieve",
"os.path.join",
"os.path.dirname",
"mne.io.read_raw_edf",
"os.mkdir",
"os.path.basename",
"pandas.DataFrame",
"pandas.concat",
"numpy.save"
] | [((1244, 1270), 'pandas.DataFrame', 'pd.DataFrame', (['mapping_dict'], {}), '(mapping_dict)\n', (1256, 1270), True, 'import pandas as pd\n'), ((1568, 1605), 'pandas.concat', 'pd.concat', (['[eyes_opened, eyes_closed]'], {}), '([eyes_opened, eyes_closed])\n', (1577, 1605), True, 'import pandas as pd\n'), ((2272, 2331), 'mne.io.read_raw_edf', 'mne.io.read_raw_edf', (["s['Path']"], {'preload': '(True)', 'verbose': '(False)'}), "(s['Path'], preload=True, verbose=False)\n", (2291, 2331), False, 'import mne\n'), ((3273, 3314), 'os.path.join', 'os.path.join', (['download_dir', 'zip_file_name'], {}), '(download_dir, zip_file_name)\n', (3285, 3314), False, 'import os\n'), ((3330, 3368), 'os.path.join', 'os.path.join', (['download_dir', '"""eeg_data"""'], {}), "(download_dir, 'eeg_data')\n", (3342, 3368), False, 'import os\n'), ((3377, 3401), 'os.path.exists', 'os.path.exists', (['data_dir'], {}), '(data_dir)\n', (3391, 3401), False, 'import os\n'), ((4125, 4202), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '""""""', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), "(description='', formatter_class=ArgumentDefaultsHelpFormatter)\n", (4139, 4202), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter\n'), ((2414, 2437), 'os.path.exists', 'os.path.exists', (['dst_dir'], {}), '(dst_dir)\n', (2428, 2437), False, 'import os\n'), ((2447, 2464), 'os.mkdir', 'os.mkdir', (['dst_dir'], {}), '(dst_dir)\n', (2455, 2464), False, 'import os\n'), ((3457, 3486), 'os.path.exists', 'os.path.exists', (['zip_file_path'], {}), '(zip_file_path)\n', (3471, 3486), False, 'import os\n'), ((3774, 3809), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file_path', '"""r"""'], {}), "(zip_file_path, 'r')\n", (3789, 3809), False, 'import zipfile\n'), ((3919, 3965), 'os.path.join', 'os.path.join', (['data_dir', '"""6921143_H S15 EO.edf"""'], {}), "(data_dir, '6921143_H S15 EO.edf')\n", (3931, 3965), False, 'import os\n'), ((3981, 4027), 'os.path.join', 'os.path.join', (['data_dir', '"""6921959_H S15 EO.edf"""'], {}), "(data_dir, '6921959_H S15 EO.edf')\n", (3993, 4027), False, 'import os\n'), ((4043, 4081), 'os.path.join', 'os.path.join', (['data_dir', '"""H S15 EO.edf"""'], {}), "(data_dir, 'H S15 EO.edf')\n", (4055, 4081), False, 'import os\n'), ((569, 600), 'os.path.join', 'os.path.join', (['src_dir', 'data_dir'], {}), '(src_dir, data_dir)\n', (581, 600), False, 'import os\n'), ((618, 637), 'os.listdir', 'os.listdir', (['src_dir'], {}), '(src_dir)\n', (628, 637), False, 'import os\n'), ((2639, 2661), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2655, 2661), False, 'import os\n'), ((2817, 2871), 'mne.io.read_raw_edf', 'mne.io.read_raw_edf', (['path'], {'preload': '(True)', 'verbose': '(False)'}), '(path, preload=True, verbose=False)\n', (2836, 2871), False, 'import mne\n'), ((3118, 3144), 'numpy.save', 'np.save', (['path_to_save', 'arr'], {}), '(path_to_save, arr)\n', (3125, 3144), True, 'import numpy as np\n'), ((3680, 3711), 'urllib.request.urlretrieve', 'urlretrieve', (['url', 'zip_file_path'], {}), '(url, zip_file_path)\n', (3691, 3711), False, 'from urllib.request import urlretrieve\n'), ((4509, 4533), 'os.path.dirname', 'os.path.dirname', (['src_dir'], {}), '(src_dir)\n', (4524, 4533), False, 'import os\n'), ((666, 688), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (682, 688), False, 'import os\n')] |
#!/usr/bin/env python2
#
# Combine a set of a source files into a single C file.
#
# Overview of the process:
#
# * Parse user supplied C files. Add automatic #undefs at the end
# of each C file to avoid defines bleeding from one file to another.
#
# * Combine the C files in specified order. If sources have ordering
# dependencies (depends on application), order may matter.
#
# * Process #include statements in the combined source, categorizing
# them either as "internal" (found in specified include path) or
# "external". Internal includes, unless explicitly excluded, are
# inlined into the result while extenal includes are left as is.
# Duplicate internal #include statements are replaced with a comment.
#
# At every step, source and header lines are represented with explicit
# line objects which keep track of original filename and line. The
# output contains #line directives, if requested, to ensure error
# throwing and other diagnostic info will work in a useful manner when
# deployed. It's also possible to generate a combined source with no
# #line directives.
#
# Making the process deterministic is important, so that if users have
# diffs that they apply to the combined source, such diffs would apply
# for as long as possible.
#
# Limitations and notes:
#
# * While there are automatic #undef's for #define's introduced in each
# C file, it's not possible to "undefine" structs, unions, etc. If
# there are structs/unions/typedefs with conflicting names, these
# have to be resolved in the source files first.
#
# * Because duplicate #include statements are suppressed, currently
# assumes #include statements are not conditional.
#
# * A system header might be #include'd in multiple source files with
# different feature defines (like _BSD_SOURCE). Because the #include
# file will only appear once in the resulting source, the first
# occurrence wins. The result may not work correctly if the feature
# defines must actually be different between two or more source files.
#
import logging
import sys
logging.basicConfig(level=logging.INFO, stream=sys.stdout, format='%(name)-21s %(levelname)-7s %(message)s')
logger = logging.getLogger('combine_src.py')
logger.setLevel(logging.INFO)
import os
import re
import json
import optparse
import logging
# Include path for finding include files which are amalgamated.
include_paths = []
# Include files specifically excluded from being inlined.
include_excluded = []
class File:
filename_full = None
filename = None
lines = None
def __init__(self, filename, lines):
self.filename = os.path.basename(filename)
self.filename_full = filename
self.lines = lines
class Line:
filename_full = None
filename = None
lineno = None
data = None
def __init__(self, filename, lineno, data):
self.filename = os.path.basename(filename)
self.filename_full = filename
self.lineno = lineno
self.data = data
def readFile(filename):
lines = []
with open(filename, 'rb') as f:
lineno = 0
for line in f:
lineno += 1
if len(line) > 0 and line[-1] == '\n':
line = line[:-1]
lines.append(Line(filename, lineno, line))
return File(filename, lines)
def lookupInclude(incfn):
re_sep = re.compile(r'/|\\')
inccomp = re.split(re_sep, incfn) # split include path, support / and \
for path in include_paths:
fn = apply(os.path.join, [ path ] + inccomp)
if os.path.exists(fn):
return fn # Return full path to first match
return None
def addAutomaticUndefs(f):
defined = {}
re_def = re.compile(r'#define\s+(\w+).*$')
re_undef = re.compile(r'#undef\s+(\w+).*$')
for line in f.lines:
m = re_def.match(line.data)
if m is not None:
#logger.debug('DEFINED: %s' % repr(m.group(1)))
defined[m.group(1)] = True
m = re_undef.match(line.data)
if m is not None:
# Could just ignore #undef's here: we'd then emit
# reliable #undef's (though maybe duplicates) at
# the end.
#logger.debug('UNDEFINED: %s' % repr(m.group(1)))
if defined.has_key(m.group(1)):
del defined[m.group(1)]
# Undefine anything that seems to be left defined. This not a 100%
# process because some #undef's might be conditional which we don't
# track at the moment. Note that it's safe to #undef something that's
# not defined.
keys = sorted(defined.keys()) # deterministic order
if len(keys) > 0:
#logger.debug('STILL DEFINED: %r' % repr(defined.keys()))
f.lines.append(Line(f.filename, len(f.lines) + 1, ''))
f.lines.append(Line(f.filename, len(f.lines) + 1, '/* automatic undefs */'))
for k in keys:
logger.debug('automatic #undef for ' + k)
f.lines.append(Line(f.filename, len(f.lines) + 1, '#undef %s' % k))
def createCombined(files, prologue_filename, line_directives):
res = []
line_map = [] # indicate combined source lines where uncombined file/line would change
metadata = {
'line_map': line_map
}
emit_state = [ None, None ] # curr_filename, curr_lineno
def emit(line):
if isinstance(line, (str, unicode)):
res.append(line)
emit_state[1] += 1
else:
if line.filename != emit_state[0] or line.lineno != emit_state[1]:
if line_directives:
res.append('#line %d "%s"' % (line.lineno, line.filename))
line_map.append({ 'original_file': line.filename,
'original_line': line.lineno,
'combined_line': len(res) + 1 })
res.append(line.data)
emit_state[0] = line.filename
emit_state[1] = line.lineno + 1
included = {} # headers already included
if prologue_filename is not None:
with open(prologue_filename, 'rb') as f:
for line in f.read().split('\n'):
res.append(line)
re_inc = re.compile(r'^#include\s+(<|\")(.*?)(>|\").*$')
# Process a file, appending it to the result; the input may be a
# source or an include file. #include directives are handled
# recursively.
def processFile(f):
logger.debug('Process file: ' + f.filename)
for line in f.lines:
if not line.data.startswith('#include'):
emit(line)
continue
m = re_inc.match(line.data)
if m is None:
raise Exception('Couldn\'t match #include line: %s' % repr(line.data))
incpath = m.group(2)
if incpath in include_excluded:
# Specific include files excluded from the
# inlining / duplicate suppression process.
emit(line) # keep as is
continue
if included.has_key(incpath):
# We suppress duplicate includes, both internal and
# external, based on the assumption that includes are
# not behind #if defined() checks. This is the case for
# Duktape (except for the include files excluded).
emit('/* #include %s -> already included */' % incpath)
continue
included[incpath] = True
# An include file is considered "internal" and is amalgamated
# if it is found in the include path provided by the user.
incfile = lookupInclude(incpath)
if incfile is not None:
logger.debug('Include considered internal: %s -> %s' % (repr(line.data), repr(incfile)))
emit('/* #include %s */' % incpath)
processFile(readFile(incfile))
else:
logger.debug('Include considered external: %s' % repr(line.data))
emit(line) # keep as is
for f in files:
processFile(f)
return '\n'.join(res) + '\n', metadata
def main():
global include_paths, include_excluded
parser = optparse.OptionParser()
parser.add_option('--include-path', dest='include_paths', action='append', default=[], help='Include directory for "internal" includes, can be specified multiple times')
parser.add_option('--include-exclude', dest='include_excluded', action='append', default=[], help='Include file excluded from being considered internal (even if found in include dirs)')
parser.add_option('--prologue', dest='prologue', help='Prologue to prepend to start of file')
parser.add_option('--output-source', dest='output_source', help='Output source filename')
parser.add_option('--output-metadata', dest='output_metadata', help='Output metadata filename')
parser.add_option('--line-directives', dest='line_directives', action='store_true', default=False, help='Use #line directives in combined source')
parser.add_option('--quiet', dest='quiet', action='store_true', default=False, help='Suppress info messages (show warnings)')
parser.add_option('--verbose', dest='verbose', action='store_true', default=False, help='Show verbose debug messages')
(opts, args) = parser.parse_args()
assert(opts.include_paths is not None)
include_paths = opts.include_paths # global for easy access
include_excluded = opts.include_excluded
assert(opts.output_source)
assert(opts.output_metadata)
# Log level.
if opts.quiet:
logger.setLevel(logging.WARNING)
elif opts.verbose:
logger.setLevel(logging.DEBUG)
# Read input files, add automatic #undefs
sources = args
files = []
for fn in sources:
res = readFile(fn)
logger.debug('Add automatic undefs for: ' + fn)
addAutomaticUndefs(res)
files.append(res)
combined_source, metadata = \
createCombined(files, opts.prologue, opts.line_directives)
with open(opts.output_source, 'wb') as f:
f.write(combined_source)
with open(opts.output_metadata, 'wb') as f:
f.write(json.dumps(metadata, indent=4))
logger.info('Combined %d source files, %d bytes written to %s' % (len(files), len(combined_source), opts.output_source))
if __name__ == '__main__':
main()
| [
"logging.basicConfig",
"re.split",
"logging.getLogger",
"os.path.exists",
"re.compile",
"json.dumps",
"optparse.OptionParser",
"os.path.basename"
] | [((2135, 2248), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'stream': 'sys.stdout', 'format': '"""%(name)-21s %(levelname)-7s %(message)s"""'}), "(level=logging.INFO, stream=sys.stdout, format=\n '%(name)-21s %(levelname)-7s %(message)s')\n", (2154, 2248), False, 'import logging\n'), ((2253, 2288), 'logging.getLogger', 'logging.getLogger', (['"""combine_src.py"""'], {}), "('combine_src.py')\n", (2270, 2288), False, 'import logging\n'), ((3421, 3441), 're.compile', 're.compile', (['"""/|\\\\\\\\"""'], {}), "('/|\\\\\\\\')\n", (3431, 3441), False, 'import re\n'), ((3456, 3479), 're.split', 're.split', (['re_sep', 'incfn'], {}), '(re_sep, incfn)\n', (3464, 3479), False, 'import re\n'), ((3768, 3802), 're.compile', 're.compile', (['"""#define\\\\s+(\\\\w+).*$"""'], {}), "('#define\\\\s+(\\\\w+).*$')\n", (3778, 3802), False, 'import re\n'), ((3817, 3850), 're.compile', 're.compile', (['"""#undef\\\\s+(\\\\w+).*$"""'], {}), "('#undef\\\\s+(\\\\w+).*$')\n", (3827, 3850), False, 'import re\n'), ((6247, 6296), 're.compile', 're.compile', (['"""^#include\\\\s+(<|\\\\")(.*?)(>|\\\\").*$"""'], {}), '(\'^#include\\\\s+(<|\\\\")(.*?)(>|\\\\").*$\')\n', (6257, 6296), False, 'import re\n'), ((8263, 8286), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (8284, 8286), False, 'import optparse\n'), ((2689, 2715), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (2705, 2715), False, 'import os\n'), ((2946, 2972), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (2962, 2972), False, 'import os\n'), ((3615, 3633), 'os.path.exists', 'os.path.exists', (['fn'], {}), '(fn)\n', (3629, 3633), False, 'import os\n'), ((10234, 10264), 'json.dumps', 'json.dumps', (['metadata'], {'indent': '(4)'}), '(metadata, indent=4)\n', (10244, 10264), False, 'import json\n')] |
import unittest
from mock import MagicMock
from tests.KeypadGPIOStub import KeypadGPIOStub
from keypad.MatrixKeypad import MatrixKeypad
class TestStringMethods(unittest.TestCase):
def setUp(self):
key_map = [
[1, 2, 3, "A"],
[4, 5, 6, "B"],
[7, 8, 9, "C"],
["*", 0, "#", "D"]
]
rows = [29, 31, 33, 35]
columns = [37, 36, 38, 40]
self.gpio = KeypadGPIOStub()
self.key_matrix = MatrixKeypad(key_map, rows, columns, self.gpio)
def test_init(self):
key_map = [
[1, 2, 3, "A"],
[4, 5, 6, "B"],
[7, 8, 9, "C"],
["*", 0, "#", "D"]
]
rows = []
columns = []
self.gpio.setmode = MagicMock()
self.key_matrix = MatrixKeypad(key_map, rows, columns, self.gpio)
self.gpio.setmode.assert_called_with("")
def test_get_key_gpio_setup(self):
self.gpio.setup = MagicMock()
self.key_matrix.get_key()
expected_setup = [
"call([37, 36, 38, 40], 'out')",
"call([29, 31, 33, 35], '', pull_up_down='')",
"call(29, '', pull_up_down='')",
"call(31, '', pull_up_down='')",
"call(33, '', pull_up_down='')",
"call(35, '', pull_up_down='')",
"call([37, 36, 38, 40], '', pull_up_down='')",
];
for item in self.gpio.setup.call_args_list:
self.assertIn(str(item), expected_setup, "Setup of " + str(item) + " not found.")
self.assertEquals(4, self.gpio.setup.call_count)
def test_get_key_gpio_output(self):
self.gpio.output = MagicMock()
self.key_matrix.get_key()
expected_output = [
"call(37, 0)",
"call(36, 0)",
"call(38, 0)",
"call(40, 0)",
];
for item in self.gpio.output.call_args_list:
self.assertIn(str(item), expected_output)
self.assertEquals(4, self.gpio.output.call_count)
@staticmethod
def my_side_effect(*args, **kwargs):
if args[0] == 29:
return 0
elif args[0] == 37:
return 1
else:
return 2
def test_get_key_reads_input(self):
self.gpio.input = MagicMock(side_effect=self.my_side_effect)
self.key_matrix.get_key()
self.assertEquals(8, self.gpio.input.call_count)
def test_get_key_one_one(self):
self.gpio.input = MagicMock(side_effect=self.my_side_effect)
pressed_key = self.key_matrix.get_key()
self.assertEquals(1, pressed_key)
def test_get_key_round_trip(self):
self.gpio.input = MagicMock(side_effect=self.my_side_effect)
self.gpio.setup = MagicMock()
self.gpio.output = MagicMock()
self.key_matrix.get_key()
self.assertEquals(8, self.gpio.input.call_count)
self.assertEquals(5, self.gpio.output.call_count)
self.assertEquals(6, self.gpio.setup.call_count)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"tests.KeypadGPIOStub.KeypadGPIOStub",
"keypad.MatrixKeypad.MatrixKeypad",
"mock.MagicMock"
] | [((3043, 3058), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3056, 3058), False, 'import unittest\n'), ((435, 451), 'tests.KeypadGPIOStub.KeypadGPIOStub', 'KeypadGPIOStub', ([], {}), '()\n', (449, 451), False, 'from tests.KeypadGPIOStub import KeypadGPIOStub\n'), ((478, 525), 'keypad.MatrixKeypad.MatrixKeypad', 'MatrixKeypad', (['key_map', 'rows', 'columns', 'self.gpio'], {}), '(key_map, rows, columns, self.gpio)\n', (490, 525), False, 'from keypad.MatrixKeypad import MatrixKeypad\n'), ((764, 775), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (773, 775), False, 'from mock import MagicMock\n'), ((802, 849), 'keypad.MatrixKeypad.MatrixKeypad', 'MatrixKeypad', (['key_map', 'rows', 'columns', 'self.gpio'], {}), '(key_map, rows, columns, self.gpio)\n', (814, 849), False, 'from keypad.MatrixKeypad import MatrixKeypad\n'), ((965, 976), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (974, 976), False, 'from mock import MagicMock\n'), ((1666, 1677), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1675, 1677), False, 'from mock import MagicMock\n'), ((2285, 2327), 'mock.MagicMock', 'MagicMock', ([], {'side_effect': 'self.my_side_effect'}), '(side_effect=self.my_side_effect)\n', (2294, 2327), False, 'from mock import MagicMock\n'), ((2483, 2525), 'mock.MagicMock', 'MagicMock', ([], {'side_effect': 'self.my_side_effect'}), '(side_effect=self.my_side_effect)\n', (2492, 2525), False, 'from mock import MagicMock\n'), ((2683, 2725), 'mock.MagicMock', 'MagicMock', ([], {'side_effect': 'self.my_side_effect'}), '(side_effect=self.my_side_effect)\n', (2692, 2725), False, 'from mock import MagicMock\n'), ((2752, 2763), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2761, 2763), False, 'from mock import MagicMock\n'), ((2791, 2802), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2800, 2802), False, 'from mock import MagicMock\n')] |
from hakka import HakkaRedisClient
from os import environ
if __name__ == '__main__':
client = HakkaRedisClient(
host=environ.get('REDIS_HOST', 'localhost'),
port=environ.get('REDIS_PORT', 6379)
)
message = {
"name": "hakka",
"reading": "books"
}
for i in range(10):
message.update({
"msg": "Hello World x {}!".format(i),
})
client.set_value('hello:msg', message)
| [
"os.environ.get"
] | [((130, 168), 'os.environ.get', 'environ.get', (['"""REDIS_HOST"""', '"""localhost"""'], {}), "('REDIS_HOST', 'localhost')\n", (141, 168), False, 'from os import environ\n'), ((183, 214), 'os.environ.get', 'environ.get', (['"""REDIS_PORT"""', '(6379)'], {}), "('REDIS_PORT', 6379)\n", (194, 214), False, 'from os import environ\n')] |
# -*- coding: utf-8 -*-
"""
test_pandas
~~~~~~~~~~~
Test optional pandas functionality.
"""
import pytest
from pubchempy import *
import logging
log = logging.getLogger(__name__)
# Import pandas as pd, skipping tests in this module if pandas is not installed
pd = pytest.importorskip('pandas')
def test_compounds_dataframe():
""""""
df = get_compounds('C20H41Br', 'formula', as_dataframe=True)
assert df.ndim == 2
assert df.index.names == ['cid']
assert len(df.index) > 5
columns = df.columns.values.tolist()
assert 'atom_stereo_count' in columns
assert 'atoms' in columns
assert 'canonical_smiles' in columns
assert 'exact_mass' in columns
def test_substances_dataframe():
df = get_substances([1, 2, 3, 4], as_dataframe=True)
assert df.ndim == 2
assert df.index.names == ['sid']
assert len(df.index) == 4
assert df.columns.values.tolist() == ['source_id', 'source_name', 'standardized_cid', 'synonyms']
def test_properties_dataframe():
df = get_properties(['isomeric_smiles', 'xlogp', 'inchikey'], '1,2,3,4', 'cid', as_dataframe=True)
assert df.ndim == 2
assert df.index.names == ['CID']
assert len(df.index) == 4
assert df.columns.values.tolist() == ['InChIKey', 'IsomericSMILES', 'XLogP']
def test_compound_series():
s = Compound.from_cid(241).to_series()
assert isinstance(s, pd.Series)
def test_substance_series():
s = Substance.from_sid(1234).to_series()
assert isinstance(s, pd.Series)
def test_compound_to_frame():
s = compounds_to_frame(Compound.from_cid(241))
assert isinstance(s, pd.DataFrame)
def test_substance_to_frame():
s = substances_to_frame(Substance.from_sid(1234))
assert isinstance(s, pd.DataFrame)
| [
"logging.getLogger",
"pytest.importorskip"
] | [((157, 184), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (174, 184), False, 'import logging\n'), ((272, 301), 'pytest.importorskip', 'pytest.importorskip', (['"""pandas"""'], {}), "('pandas')\n", (291, 301), False, 'import pytest\n')] |
from functools import reduce
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext
from pyston.exception import RESTException
from pyston.utils import rfs, LOOKUP_SEP
from pyston.utils.helpers import get_field_or_none, get_method_or_none
from pyston.serializer import get_resource_or_none
from .exceptions import OrderIdentifierError
from .parsers import DefaultOrderParser, OrderParserError
from .utils import DIRECTION
from .sorters import DefaultSorter
def get_allowed_order_fields_rfs_from_model(model):
return rfs(model._rest_meta.extra_order_fields).join(rfs(model._rest_meta.order_fields))
class ModelOrderManager:
"""
Order manager is used inside model resource for order response queryset according to input values.
This is abstract class that provides methods to obtain concrete order strings from resource and model methods
and fields.
"""
def _get_real_field_name(self, resource, field_name):
return resource.renamed_fields.get(field_name, field_name) if resource else field_name
def _get_sorter_from_method(self, method, identifiers_prefix, identifiers, direction, model, resource, request,
order_fields_rfs):
"""
:param method: method from which we can get order string.
:param identifiers_prefix: because order strings are recursive if model relations property contains list of
identifiers that was used for recursive searching the order string.
:param identifiers: list of identifiers that conclusively identifies order string.
:param direction: direction of ordering ASC or DESC.
:param model: django model class.
:param resource: resource object.
:param request: django HTTP request.
:param order_fields_rfs: RFS of fields that is allowed to order.
:return: db order method string that is obtained from method.
"""
if hasattr(method, 'order_by'):
# If method has order_by attribute order string is being searched according to this value.
order_identifiers = method.order_by.split(LOOKUP_SEP)
# Because method must be inside allowed order fields RFS, we must add value order_by of the method
# to the next RFS.
next_order_fields_rfs = rfs(order_identifiers)
return self._get_sorter_recursive(
identifiers_prefix, order_identifiers, direction, model, resource, request, next_order_fields_rfs
)
if not identifiers_prefix and hasattr(method, 'sorter'):
return method.sorter(identifiers_prefix + identifiers, direction)
raise OrderIdentifierError
def _get_sorter_from_resource(self, identifiers_prefix, identifiers, direction, model, resource, request,
order_fields_rfs):
"""
:param identifiers_prefix: because order strings are recursive if model relations property contains list of
identifiers that was used for recursive searching the order string.
:param identifiers: list of identifiers that conclusively identifies order string.
:param direction: direction of ordering ASC or DESC.
:param model: django model class.
:param resource: resource object.
:param request: django HTTP request.
:param order_fields_rfs: RFS of fields that is allowed to order.
:return: db order method string that is obtained from resource object.
"""
full_identifiers_string = self._get_real_field_name(resource, LOOKUP_SEP.join(identifiers))
resource_method = resource.get_method_returning_field_value(full_identifiers_string) if resource else None
if full_identifiers_string in order_fields_rfs and resource_method:
return self._get_sorter_from_method(resource_method, identifiers_prefix, identifiers, direction, model,
resource, request, order_fields_rfs)
def _get_sorter_from_model(self, identifiers_prefix, identifiers, direction, model, resource, request,
order_fields_rfs):
"""
:param identifiers_prefix: because order strings are recursive if model relations property contains list of
identifiers that was used for recursive searching the order string.
:param identifiers: list of identifiers that conclusively identifies order string.
:param direction: direction of ordering ASC or DESC.
:param model: django model class.
:param resource: resource object.
:param request: django HTTP request.
:param order_fields_rfs: RFS of fields that is allowed to order.
:return: db order method string that is obtained from model fields or methods.
"""
current_identifier = self._get_real_field_name(resource, identifiers[0])
identifiers_suffix = identifiers[1:]
if current_identifier not in order_fields_rfs:
raise OrderIdentifierError
model_field = get_field_or_none(model, current_identifier)
model_method = get_method_or_none(model, current_identifier)
if model_field and not identifiers_suffix and (not model_field.is_relation or model_field.related_model):
return DefaultSorter(identifiers_prefix + identifiers, direction)
elif model_field and model_field.is_relation and model_field.related_model:
next_model = model_field.related_model
next_resource = get_resource_or_none(request, next_model, getattr(resource, 'resource_typemapper', None))
return self._get_sorter_recursive(
identifiers_prefix + [identifiers[0]], identifiers[1:], direction,
next_model, next_resource, request, order_fields_rfs[current_identifier].subfieldset
)
elif model_method and not identifiers_suffix:
return self._get_sorter_from_method(
model_method, identifiers_prefix, identifiers, direction, model, resource, request, order_fields_rfs
)
def _get_sorter_recursive(self, identifiers_prefix, identifiers, direction, model, resource, request,
extra_order_fields_rfs=None):
"""
:param identifiers_prefix: because order strings are recursive if model relations property contains list of
identifiers that was used for recursive searching the order string.
:param identifiers: list of identifiers that conclusively identifies order string.
:param direction: direction of ordering ASC or DESC.
:param model: django model class.
:param resource: resource object.
:param request: django HTTP request.
:param extra_order_fields_rfs: RFS of fields that is allowed to order.
:return: method search resursice order string with order_string_from_model or order_string_from_resource
getters.
"""
extra_order_fields_rfs = rfs() if extra_order_fields_rfs is None else extra_order_fields_rfs
order_fields_rfs = (
extra_order_fields_rfs.join(
resource.get_order_fields_rfs() if resource else get_allowed_order_fields_rfs_from_model(model)
)
)
order_string = (
self._get_sorter_from_resource(
identifiers_prefix, identifiers, direction, model, resource, request, order_fields_rfs) or
self._get_sorter_from_model(
identifiers_prefix, identifiers, direction, model, resource, request, order_fields_rfs)
)
if not order_string:
raise OrderIdentifierError
return order_string
def get_sorter(self, identifiers, direction, resource, request):
"""
:param identifiers: list of identifiers that conclusively identifies a order string.
:param direction: direction of ordering ASC or DESC.
:param resource: resource object.
:param request: django HTTP request.
:return: method returns filter string according to input identifiers, resource and request.
"""
return self._get_sorter_recursive([], identifiers, direction, resource.model, resource, request)
class ParserModelOrderManager(ModelOrderManager):
"""
Manager that uses parser to parse input order data to the list of order strings.
"""
parser = None
def _get_sorters(self, parsed_order_terms, resource, request):
"""
Converts order terms to sorter classes
"""
sorters = []
for ordering_term in parsed_order_terms:
try:
sorters.append(self.get_sorter(ordering_term.identifiers, ordering_term.direction, resource, request))
except OrderIdentifierError:
raise RESTException(
mark_safe(ugettext('Invalid identifier of ordering "{}"').format(ordering_term.source))
)
return sorters
def _convert_order_terms(self, sorters):
"""
Converts sorters to the django query order strings.
"""
return [sorter.get_order_term() for sorter in sorters]
def _update_queryset(self, qs, sorters):
"""
Update queryset for extra sorter class (it is used for annotations before ordering)
"""
return reduce(
lambda qs, sorter: sorter.update_queryset(qs) if hasattr(sorter, 'update_queryset') else qs, sorters, qs
)
def sort(self, resource, qs, request):
try:
parsed_order_terms = self.parser.parse(request)
sorters = self._get_sorters(parsed_order_terms or (), resource, request)
qs = self._update_queryset(qs, sorters)
return qs.order_by(*self._convert_order_terms(sorters)) if sorters else qs
except OrderParserError as ex:
raise RESTException(ex)
class DefaultModelOrderManager(ParserModelOrderManager):
"""
Default order manager.
"""
parser = DefaultOrderParser()
| [
"pyston.utils.rfs",
"pyston.utils.LOOKUP_SEP.join",
"pyston.exception.RESTException",
"pyston.utils.helpers.get_field_or_none",
"pyston.utils.helpers.get_method_or_none",
"django.utils.translation.ugettext"
] | [((608, 642), 'pyston.utils.rfs', 'rfs', (['model._rest_meta.order_fields'], {}), '(model._rest_meta.order_fields)\n', (611, 642), False, 'from pyston.utils import rfs, LOOKUP_SEP\n'), ((5097, 5141), 'pyston.utils.helpers.get_field_or_none', 'get_field_or_none', (['model', 'current_identifier'], {}), '(model, current_identifier)\n', (5114, 5141), False, 'from pyston.utils.helpers import get_field_or_none, get_method_or_none\n'), ((5165, 5210), 'pyston.utils.helpers.get_method_or_none', 'get_method_or_none', (['model', 'current_identifier'], {}), '(model, current_identifier)\n', (5183, 5210), False, 'from pyston.utils.helpers import get_field_or_none, get_method_or_none\n'), ((562, 602), 'pyston.utils.rfs', 'rfs', (['model._rest_meta.extra_order_fields'], {}), '(model._rest_meta.extra_order_fields)\n', (565, 602), False, 'from pyston.utils import rfs, LOOKUP_SEP\n'), ((2342, 2364), 'pyston.utils.rfs', 'rfs', (['order_identifiers'], {}), '(order_identifiers)\n', (2345, 2364), False, 'from pyston.utils import rfs, LOOKUP_SEP\n'), ((3608, 3636), 'pyston.utils.LOOKUP_SEP.join', 'LOOKUP_SEP.join', (['identifiers'], {}), '(identifiers)\n', (3623, 3636), False, 'from pyston.utils import rfs, LOOKUP_SEP\n'), ((7058, 7063), 'pyston.utils.rfs', 'rfs', ([], {}), '()\n', (7061, 7063), False, 'from pyston.utils import rfs, LOOKUP_SEP\n'), ((9947, 9964), 'pyston.exception.RESTException', 'RESTException', (['ex'], {}), '(ex)\n', (9960, 9964), False, 'from pyston.exception import RESTException\n'), ((8925, 8972), 'django.utils.translation.ugettext', 'ugettext', (['"""Invalid identifier of ordering "{}\\""""'], {}), '(\'Invalid identifier of ordering "{}"\')\n', (8933, 8972), False, 'from django.utils.translation import ugettext\n')] |
from uuid import uuid4
class DynamoDbFixtures:
@staticmethod
def get_cognito_dynamo_json():
return {
"address": {"S": "killians-amazing-address"},
"birthdate": {"S": "killians-amazing-birthdate"},
"email": {"S": "killians-amazing-email"},
"family_name": {"S": "killians-amazing-family_name"},
"gender": {"S": "killians-amazing-gender"},
"given_name": {"S": "killians-amazing-given_name"},
"locale": {"S": "killians-amazing-locale"},
"role": {"S": "killians-amazing-role"},
"preferred_username": {"S": "killians-amazing-preferred_username"},
}
@staticmethod
def get_member_no_role_dynamo_json(member_id=None):
if member_id is None:
member_id = str(uuid4())
return {
"id": {"S": member_id},
"details": {"M": DynamoDbFixtures.get_cognito_dynamo_json()},
"manager": {"NULL": True},
"officer": {"NULL": True},
"academy_player": {"NULL": True},
"player": {"NULL": True},
}
@staticmethod
def get_player_dynamo_json(player_id=None):
if player_id is None:
player_id = str(uuid4())
return {
"id": {"S": player_id},
"details": {"M": DynamoDbFixtures.get_cognito_dynamo_json()},
"manager": {"NULL": True},
"officer": {"NULL": True},
"academy_player": {"NULL": True},
"player": {
"M": {
"achievements": {"L": []},
"all_time_appearances": {"N": "0"},
"all_time_assists": {"N": "0"},
"all_time_goals": {"N": "0"},
"positions": {"L": []},
"season_appearances": {"N": "0"},
"season_assists": {"N": "0"},
"season_goals": {"N": "0"},
}
},
}
@staticmethod
def get_manager_dynamo_json(manager_id=None):
if manager_id is None:
manager_id = str(uuid4())
return {
"id": {"S": manager_id},
"details": {"M": DynamoDbFixtures.get_cognito_dynamo_json()},
"manager": {
"M": {
"favorite_formation": {"S": "killians-amazing-favorite_formation"},
"win_record": {"N": "0"},
"loss_record": {"N": "0"},
"achievements": {"L": []},
}
},
"officer": {"NULL": True},
"academy_player": {"NULL": True},
"player": {"NULL": True},
}
@staticmethod
def get_fixture_dynamodb_json():
return {
"home_team": {"S": "killians-amazing-home-team"},
"away_team": {"S": "killians-amazing-away-team"},
"competition": {"S": "killians-amazing-competition"},
"location": {"S": "killians-amazing-location"},
"kick_off_time": {"S": "killians-amazing-kick-off-time"},
"meeting_time": {"S": "killians-amazing-meeting-time"},
}
@staticmethod
def get_team_dynamodb_json(team_id=None, players=None, managers=None, fixtures=None):
if team_id is None:
team_id = str(uuid4())
if managers is None:
managers = []
if players is None:
players = []
if fixtures is None:
fixtures = [{"M": DynamoDbFixtures.get_fixture_dynamodb_json()}]
return {
"id": {"S": team_id},
"name": {"S": "killians-amazing-team-name"},
"managers": {"L": [{"S": manager} for manager in managers]},
"players": {"L": [{"S": player} for player in players]},
"training_times": {"L": [{"S": "killians-amazing-training_times"}]},
"fixtures": {"L": fixtures},
}
@staticmethod
def get_news_story_dynamodb_json(story_id, key):
return {
"id": {"S": story_id},
"category": {"L": [{"S": "killians-cool-category"}]},
"title": {"S": "killians-terrible-title"},
"description": {"S": "killians-deceptive-description"},
"thumbnail_key": {"S": key},
}
| [
"uuid.uuid4"
] | [((810, 817), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (815, 817), False, 'from uuid import uuid4\n'), ((1243, 1250), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1248, 1250), False, 'from uuid import uuid4\n'), ((2123, 2130), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (2128, 2130), False, 'from uuid import uuid4\n'), ((3336, 3343), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (3341, 3343), False, 'from uuid import uuid4\n')] |
import os
import errno
import torch
import random
from sklearn.externals import joblib
import numpy as np
import pandas as pd
from sklearn.utils import shuffle
from sklearn.preprocessing import OneHotEncoder
from src.configuration.settings_module_loader import SettingsModuleLoader
from src.configuration.settings_template import Settings
from src.io.read_data_input import Dataset
from src.run.model_runner import ModelRunner
from src.tasks.bert_multilabel_train import *
# If there's a GPU available...
ngpu = 0
if torch.cuda.is_available():
# Tell PyTorch to use the GPU.
device = torch.device("cuda")
ngpu = torch.cuda.device_count()
print('There are %d GPU(s) available.' % ngpu)
print('We will use the GPU:', torch.cuda.get_device_name(0))
# If not...
else:
print('No GPU available, using the CPU instead.')
device = torch.device("cpu")
random.seed(42)
np.random.seed(42)
torch.manual_seed(42)
if ngpu > 0:
torch.cuda.manual_seed_all(42)
def run_predictions(base_dir):
tokenizer = get_bert_tokenizer()
sentence_column = Settings.BERT_FEATURES.sentence_column_name
data_loader = None
for model_run_instance in ModelRunner.get_all_model_run_instances():
label_instance = model_run_instance.label
model_dir = os.path.join(Settings.IO.BERT_MODEL_DIR, label_instance)
results_per_fold_dir = os.path.join(base_dir, 'results_per_fold', label_instance)
results_dir = os.path.join(base_dir, 'results', label_instance)
print("\nPredicting for label: {}".format(model_run_instance.label))
X, y, le = Dataset().get(model_run_instance.label, model_run_instance.feature_source, custom_column_filters=Settings.COLUMNS.BERT_PREDICT_FILTERS)
X, y = shuffle(X, y, random_state=Settings.RANDOM_STATE)
print("\nNum Test Examples: {}".format(len(X)))
instance_config = Settings.COLUMNS.BERT_LABELS[label_instance]
BATCH_SIZE = Settings.CROSS_VALIDATION.HYPER_PARAMS.BERT['batch_size'] if \
'batch_size' in Settings.CROSS_VALIDATION.HYPER_PARAMS.BERT else 32
MAX_LEN = Settings.CROSS_VALIDATION.HYPER_PARAMS.BERT['max_seq_len'] if 'max_seq_len' in Settings.CROSS_VALIDATION.HYPER_PARAMS.BERT else 128
num_folds = Settings.CROSS_VALIDATION.NUM_TRAIN_TEST_FOLDS
num_labels = instance_config['num_labels']
if instance_config['is_multilabel']:
label_list = instance_config['label_list']
else:
label_list = ["0", "1"]
num_labels = 2
overall_predictions = None
for fold_num in range(1, num_folds+1):
# paths for loading model for fold
model_path = os.path.join(model_dir, str(fold_num), 'model.bin')
fold_result_path = os.path.join(results_per_fold_dir, str(fold_num))
# Get data for fold
print('Collecting data for fold {}'.format(fold_num))
X_test, y_test = X, y
print('Num Test: ', len(y_test))
# Loading BERT model
print('Loading model for fold: {}'.format(fold_num))
# Load model to make sure it works
model = load_bert_model(model_path, num_labels=num_labels)
# Convert labels to floats
print('Parsing data for predictions...')
# Evaluate model
predictions, data_loader = predict_from_model(model, X_test, tokenizer, label_list=label_list, batch_size=BATCH_SIZE, max_seq_len=MAX_LEN, data_loader=data_loader)
# Write results to file
write_predictions_to_disk(fold_result_path, predictions,label_list)
#Add to Overall Predictions
if overall_predictions is None:
overall_predictions = predictions
else:
overall_predictions[predictions.columns[2:]] += predictions[predictions.columns[2:]]
print('Testing Complete.')
overall_predictions[predictions.columns[2:]] /= num_folds
write_predictions_to_disk(results_dir, overall_predictions, label_list)
def main():
SettingsModuleLoader.init_settings()
base_dir = Settings.IO.RESULTS_OUTPUT_FOLDER
run_predictions(base_dir)
if __name__ == '__main__':
main()
| [
"torch.cuda.manual_seed_all",
"torch.manual_seed",
"torch.cuda.get_device_name",
"src.run.model_runner.ModelRunner.get_all_model_run_instances",
"sklearn.utils.shuffle",
"os.path.join",
"torch.cuda.device_count",
"random.seed",
"src.io.read_data_input.Dataset",
"torch.cuda.is_available",
"numpy.random.seed",
"src.configuration.settings_module_loader.SettingsModuleLoader.init_settings",
"torch.device"
] | [((523, 548), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (546, 548), False, 'import torch\n'), ((880, 895), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (891, 895), False, 'import random\n'), ((896, 914), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (910, 914), True, 'import numpy as np\n'), ((915, 936), 'torch.manual_seed', 'torch.manual_seed', (['(42)'], {}), '(42)\n', (932, 936), False, 'import torch\n'), ((599, 619), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (611, 619), False, 'import torch\n'), ((631, 656), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (654, 656), False, 'import torch\n'), ((860, 879), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (872, 879), False, 'import torch\n'), ((954, 984), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['(42)'], {}), '(42)\n', (980, 984), False, 'import torch\n'), ((1174, 1215), 'src.run.model_runner.ModelRunner.get_all_model_run_instances', 'ModelRunner.get_all_model_run_instances', ([], {}), '()\n', (1213, 1215), False, 'from src.run.model_runner import ModelRunner\n'), ((4098, 4134), 'src.configuration.settings_module_loader.SettingsModuleLoader.init_settings', 'SettingsModuleLoader.init_settings', ([], {}), '()\n', (4132, 4134), False, 'from src.configuration.settings_module_loader import SettingsModuleLoader\n'), ((743, 772), 'torch.cuda.get_device_name', 'torch.cuda.get_device_name', (['(0)'], {}), '(0)\n', (769, 772), False, 'import torch\n'), ((1287, 1343), 'os.path.join', 'os.path.join', (['Settings.IO.BERT_MODEL_DIR', 'label_instance'], {}), '(Settings.IO.BERT_MODEL_DIR, label_instance)\n', (1299, 1343), False, 'import os\n'), ((1375, 1433), 'os.path.join', 'os.path.join', (['base_dir', '"""results_per_fold"""', 'label_instance'], {}), "(base_dir, 'results_per_fold', label_instance)\n", (1387, 1433), False, 'import os\n'), ((1456, 1505), 'os.path.join', 'os.path.join', (['base_dir', '"""results"""', 'label_instance'], {}), "(base_dir, 'results', label_instance)\n", (1468, 1505), False, 'import os\n'), ((1753, 1802), 'sklearn.utils.shuffle', 'shuffle', (['X', 'y'], {'random_state': 'Settings.RANDOM_STATE'}), '(X, y, random_state=Settings.RANDOM_STATE)\n', (1760, 1802), False, 'from sklearn.utils import shuffle\n'), ((1602, 1611), 'src.io.read_data_input.Dataset', 'Dataset', ([], {}), '()\n', (1609, 1611), False, 'from src.io.read_data_input import Dataset\n')] |
import json
import logging
import os
import re
import sys
from http.server import BaseHTTPRequestHandler as httpHandler
from xmlrpc.client import ServerProxy
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
from argon2 import PasswordHasher, extract_parameters
from argon2.exceptions import InvalidHash, VerifyMismatchError, VerificationError
from defusedxml.xmlrpc import monkey_patch
from dotmap import DotMap
# Monkey patch xmlrpc to protect it from attacks https://github.com/tiran/defusedxml
monkey_patch()
logging.basicConfig(stream=sys.stdout, format="%(asctime)s - %(levelname)s - %(message)s", level=logging.DEBUG)
log = logging.getLogger()
def log_message(self, format, *args):
"""Overrides the logging used by the xmlrpc server with our custom one"""
log.info("%s - - [%s] %s" % (self.address_string(), self.log_date_time_string(), format % args))
httpHandler.log_message = log_message
class LoopiaProxyFunctions:
# Regex to validate domains
_domain_re = re.compile("^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\\.)+[A-Za-z]{2,}$")
def __init__(self):
self._ph = PasswordHasher()
# Read and hash any un-hashed passwords
with open('config/settings.json', encoding='utf-8', mode='r+t') as f:
self._users = DotMap(json.load(f))
updated = False
for name, user in self._users.items():
try:
extract_parameters(user.password)
except InvalidHash:
user.password = self._ph.hash(user.password)
updated = True
# Update the file if we have hashed the password
if updated:
f.seek(0)
json.dump(self._users.toDict(), f, indent=2)
f.truncate()
self._loopia = ServerProxy("https://api.loopia.se/RPCSERV")
self._api_user = os.environ['LOOPIA_USER']
self._api_pass = os.environ['LOOPIA_PASS']
def _updateUser(self, username, password):
log.debug(f"Updating user {username=}")
self._users[username].password = self._ph.hash(password)
with open('config/settings.json', encoding='utf-8', mode='w') as f:
json.dump(self._users.toDict(), f, indent=2)
# Authenticates the username against the local file
def _auth(self, username, password):
if username in self._users:
try:
user = self._users[username]
self._ph.verify(user.password, password)
if self._ph.check_needs_rehash(user.password):
self._updateUser(username, password)
return True
except (VerificationError, VerifyMismatchError, InvalidHash):
pass
return False
def _checkAndRun(self, username, password, domain, subdomain, func):
# Filter out bad input
if domain == "" or (subdomain is not None and subdomain == "") or not self._domain_re.match(domain):
return ["BAD_INDATA"]
if not self._auth(username, password):
return ["AUTH_ERROR"]
user = self._users[username]
if domain not in user.domains:
return ["UNKNOWN_ERROR"]
return func()
def getDomains(self, username, password):
"""Returns a list of domains that the account has access to"""
log.info(f"getting domains: {username}")
if not self._auth(username, password):
return ["AUTH_ERROR"]
user = self._users[username]
domains = self._loopia.getDomains(self._api_user, self._api_pass)
result = []
for domain in domains:
if domain['domain'] in user.domains:
result.append(domain)
return result
def getSubdomains(self, username, password, domain):
"""Returns a list of subdomains on the provided domain"""
log.info(f"getting subdomains: {username} -> {domain}")
return self._checkAndRun(username, password, domain, None,
lambda: self._loopia.getSubdomains(self._api_user, self._api_pass, domain))
def getZoneRecords(self, username, password, domain, subdomain):
"""Returns a list of zone records for the provided subdomain on the provided domain"""
log.info(f"getting zone records: {username} -> {subdomain}.{domain}")
return self._checkAndRun(username, password, domain, subdomain,
lambda: self._loopia.getZoneRecords(self._api_user, self._api_pass, domain, subdomain))
def addSubdomain(self, username, password, domain, subdomain):
"""Adds a subdomain to the provided domain"""
log.info(f"adding subdomain: {username} -> {subdomain}.{domain}")
return self._checkAndRun(username, password, domain, subdomain,
lambda: self._loopia.addSubdomain(self._api_user, self._api_pass, domain, subdomain))
def removeSubdomain(self, username, password, domain, subdomain):
"""Removes a subdomain on the provided domain"""
log.info(f"removing subdomain: {username} -> {subdomain}.{domain}")
return self._checkAndRun(username, password, domain, subdomain,
lambda: self._loopia.removeSubdomain(self._api_user, self._api_pass, domain, subdomain))
def addZoneRecord(self, username, password, domain, subdomain, record):
"""Adds a zone records to the provided subdomain for the provided domain"""
log.info(f"adding zone record to subdomain: {username} -> {subdomain}.{domain}")
return self._checkAndRun(username, password, domain, subdomain,
lambda: self._loopia.addZoneRecord(self._api_user, self._api_pass, domain, subdomain, record))
def removeZoneRecord(self, username, password, domain, subdomain, record_id):
"""Removes a zone record from the provided subdomain for the provided domain"""
log.info(f"removing zone record on subdomain: {username} -> {subdomain}.{domain}. ID: {record_id}")
return self._checkAndRun(username, password, domain, subdomain,
lambda: self._loopia.removeZoneRecord(self._api_user, self._api_pass, domain, subdomain, record_id))
def updateZoneRecord(self, username, password, domain, subdomain, record):
"""Updates a zone record on the provided subdomain for the provided domain"""
if 'record_id' not in record:
return 'BAD_INDATA'
log.info(f"updating zone record on subdomain: {username} -> {subdomain}.{domain}. ID: {record['record_id']}")
return self._checkAndRun(username, password, domain, subdomain,
lambda: self._loopia.updateZoneRecord(self._api_user, self._api_pass, domain, subdomain, record))
def __close(self):
self._loopia("close")
def __call__(self, attr):
if attr == "close":
return self.__close
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ('/RPCSERV',)
def do_GET(self):
"""Return 404 on all GET requests"""
self.report_404()
# Host available on POST http://localhost:8000/RPCSERV
port = int(os.environ.get('PORT', '8000'))
host = os.environ.get('HOST', 'localhost')
log.info("Starting server on: " + host + ':' + str(port))
with SimpleXMLRPCServer((host, port), RequestHandler) as server:
server.register_introspection_functions()
proxy = LoopiaProxyFunctions()
server.register_instance(proxy)
try:
server.serve_forever()
except KeyboardInterrupt as err:
log.error(f"Stopping from keyboard interrupt: {err=}")
proxy("close")
sys.exit(0)
| [
"logging.basicConfig",
"logging.getLogger",
"defusedxml.xmlrpc.monkey_patch",
"xmlrpc.server.SimpleXMLRPCServer",
"argon2.PasswordHasher",
"re.compile",
"os.environ.get",
"xmlrpc.client.ServerProxy",
"sys.exit",
"json.load",
"argon2.extract_parameters"
] | [((523, 537), 'defusedxml.xmlrpc.monkey_patch', 'monkey_patch', ([], {}), '()\n', (535, 537), False, 'from defusedxml.xmlrpc import monkey_patch\n'), ((539, 655), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'format': '"""%(asctime)s - %(levelname)s - %(message)s"""', 'level': 'logging.DEBUG'}), "(stream=sys.stdout, format=\n '%(asctime)s - %(levelname)s - %(message)s', level=logging.DEBUG)\n", (558, 655), False, 'import logging\n'), ((657, 676), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (674, 676), False, 'import logging\n'), ((7283, 7318), 'os.environ.get', 'os.environ.get', (['"""HOST"""', '"""localhost"""'], {}), "('HOST', 'localhost')\n", (7297, 7318), False, 'import os\n'), ((1015, 1078), 're.compile', 're.compile', (['"""^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\\\\.)+[A-Za-z]{2,}$"""'], {}), "('^((?!-)[A-Za-z0-9-]{1,63}(?<!-)\\\\.)+[A-Za-z]{2,}$')\n", (1025, 1078), False, 'import re\n'), ((7244, 7274), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '"""8000"""'], {}), "('PORT', '8000')\n", (7258, 7274), False, 'import os\n'), ((7382, 7430), 'xmlrpc.server.SimpleXMLRPCServer', 'SimpleXMLRPCServer', (['(host, port)', 'RequestHandler'], {}), '((host, port), RequestHandler)\n', (7400, 7430), False, 'from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler\n'), ((1123, 1139), 'argon2.PasswordHasher', 'PasswordHasher', ([], {}), '()\n', (1137, 1139), False, 'from argon2 import PasswordHasher, extract_parameters\n'), ((1831, 1875), 'xmlrpc.client.ServerProxy', 'ServerProxy', (['"""https://api.loopia.se/RPCSERV"""'], {}), "('https://api.loopia.se/RPCSERV')\n", (1842, 1875), False, 'from xmlrpc.client import ServerProxy\n'), ((7730, 7741), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (7738, 7741), False, 'import sys\n'), ((1300, 1312), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1309, 1312), False, 'import json\n'), ((1435, 1468), 'argon2.extract_parameters', 'extract_parameters', (['user.password'], {}), '(user.password)\n', (1453, 1468), False, 'from argon2 import PasswordHasher, extract_parameters\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.