code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Copyright (c) 2010-2014 openpyxl
import pytest
from openpyxl.styles.borders import Border, Side
from openpyxl.styles.fills import GradientFill
from openpyxl.styles.colors import Color
from openpyxl.writer.styles import StyleWriter
from openpyxl.tests.helper import get_xml, compare_xml
class DummyWorkbook:
style_properties = []
def test_write_gradient_fill():
fill = GradientFill(degree=90, stop=[Color(theme=0), Color(theme=4)])
writer = StyleWriter(DummyWorkbook())
writer._write_gradient_fill(writer._root, fill)
xml = get_xml(writer._root)
expected = """<?xml version="1.0" ?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<gradientFill degree="90" type="linear">
<stop position="0">
<color theme="0"/>
</stop>
<stop position="1">
<color theme="4"/>
</stop>
</gradientFill>
</styleSheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_write_borders():
borders = Border()
writer = StyleWriter(DummyWorkbook())
writer._write_border(writer._root, borders)
xml = get_xml(writer._root)
expected = """<?xml version="1.0"?>
<styleSheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<border>
<left/>
<right/>
<top/>
<bottom/>
<diagonal/>
</border>
</styleSheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
| [
"openpyxl.styles.colors.Color",
"openpyxl.tests.helper.get_xml",
"openpyxl.tests.helper.compare_xml",
"openpyxl.styles.borders.Border"
]
| [((552, 573), 'openpyxl.tests.helper.get_xml', 'get_xml', (['writer._root'], {}), '(writer._root)\n', (559, 573), False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((910, 936), 'openpyxl.tests.helper.compare_xml', 'compare_xml', (['xml', 'expected'], {}), '(xml, expected)\n', (921, 936), False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((1009, 1017), 'openpyxl.styles.borders.Border', 'Border', ([], {}), '()\n', (1015, 1017), False, 'from openpyxl.styles.borders import Border, Side\n'), ((1118, 1139), 'openpyxl.tests.helper.get_xml', 'get_xml', (['writer._root'], {}), '(writer._root)\n', (1125, 1139), False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((1381, 1407), 'openpyxl.tests.helper.compare_xml', 'compare_xml', (['xml', 'expected'], {}), '(xml, expected)\n', (1392, 1407), False, 'from openpyxl.tests.helper import get_xml, compare_xml\n'), ((415, 429), 'openpyxl.styles.colors.Color', 'Color', ([], {'theme': '(0)'}), '(theme=0)\n', (420, 429), False, 'from openpyxl.styles.colors import Color\n'), ((431, 445), 'openpyxl.styles.colors.Color', 'Color', ([], {'theme': '(4)'}), '(theme=4)\n', (436, 445), False, 'from openpyxl.styles.colors import Color\n')] |
from unittest import TestCase
from io import StringIO
import json
class TestDump(TestCase):
def test_dump(self):
sio = StringIO()
json.dump({}, sio)
self.assertEquals(sio.getvalue(), '{}')
def test_dumps(self):
self.assertEquals(json.dumps({}), '{}')
def test_encode_truefalse(self):
self.assertEquals(json.dumps(
{True: False, False: True}, sort_keys=True),
'{"false": true, "true": false}')
self.assertEquals(json.dumps(
{2: 3.0, 4.0: 5, False: 1, 6: True}, sort_keys=True),
'{"false": 1, "2": 3.0, "4.0": 5, "6": true}')
| [
"io.StringIO",
"json.dumps",
"json.dump"
]
| [((133, 143), 'io.StringIO', 'StringIO', ([], {}), '()\n', (141, 143), False, 'from io import StringIO\n'), ((152, 170), 'json.dump', 'json.dump', (['{}', 'sio'], {}), '({}, sio)\n', (161, 170), False, 'import json\n'), ((272, 286), 'json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (282, 286), False, 'import json\n'), ((358, 416), 'json.dumps', 'json.dumps', (['{(True): False, (False): True}'], {'sort_keys': '(True)'}), '({(True): False, (False): True}, sort_keys=True)\n', (368, 416), False, 'import json\n'), ((509, 580), 'json.dumps', 'json.dumps', (['{(2): 3.0, (4.0): 5, (False): 1, (6): True}'], {'sort_keys': '(True)'}), '({(2): 3.0, (4.0): 5, (False): 1, (6): True}, sort_keys=True)\n', (519, 580), False, 'import json\n')] |
# -*- coding: utf-8 -*-
#
# Tencent is pleased to support the open source community by making QT4C available.
# Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
# QT4C is licensed under the BSD 3-Clause License, except for the third-party components listed below.
# A copy of the BSD 3-Clause License is included in this file.
#
'''单元测试
'''
import unittest
import os
import sys
test_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(test_dir))
def main():
runner = unittest.TextTestRunner(verbosity=10 + sys.argv.count('-v'))
suite = unittest.TestLoader().discover(test_dir, pattern='test_*.py')
raise SystemExit(not runner.run(suite).wasSuccessful())
if __name__ == '__main__':
main()
| [
"os.path.abspath",
"os.path.dirname",
"sys.argv.count",
"unittest.TestLoader"
]
| [((439, 464), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (454, 464), False, 'import os\n'), ((485, 510), 'os.path.dirname', 'os.path.dirname', (['test_dir'], {}), '(test_dir)\n', (500, 510), False, 'import os\n'), ((611, 632), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (630, 632), False, 'import unittest\n'), ((577, 597), 'sys.argv.count', 'sys.argv.count', (['"""-v"""'], {}), "('-v')\n", (591, 597), False, 'import sys\n')] |
from cffi import FFI
ffibuilder = FFI()
ffibuilder.cdef("""
int test(int t);
""")
ffibuilder.set_source("_pi_cffi",
"""
#include "brute.h"
""",
sources=['brute.c'])
if __name__ == "__main__":
ffibuilder.compile(verbose = True)
| [
"cffi.FFI"
]
| [((35, 40), 'cffi.FFI', 'FFI', ([], {}), '()\n', (38, 40), False, 'from cffi import FFI\n')] |
"""Board Module"""
import copy
from typing import Tuple, List
from src.coordinate import Coordinate
from src.snake import Snake
class Board:
"""Track the cooardinates for all snakes and food in the game."""
def __init__(self, data):
self._data = data
self._snakes = None
self._foods = None
@property
def snakes(self) -> List[Snake]:
"""Retreive the list of snakes from the board data."""
if self._snakes is None:
snakes = [Snake(snake_data) for snake_data in self._data['snakes']]
self._snakes = snakes
return self._snakes
@property
def foods(self) -> List[Coordinate]:
"""Retreive the list of food from the board data."""
if self._foods is None:
self._foods = [Coordinate(food_data) for food_data in self._data['food']]
return self._foods
@property
def width(self) -> int:
"""Get width of the board -- note: it's a square."""
return self._data['width']
def is_coordinate_in_bounds(self, coordinate) -> bool:
"""Check whether or not the Coordinate is within the bounds of the Board."""
is_wall = (coordinate.x == -1 or coordinate.x == self.width
or coordinate.y == -1 or coordinate.y == self.width)
return not is_wall
def get_other_snakes(self, exclude_id) -> List[Snake]:
"""Get the List of Snakes whose IDs don't match the given ID."""
return [snake for snake in self.snakes if snake.id != exclude_id]
def advance_snake_along_path(self, snake_id: str, path: List[Coordinate]):
"""Return a new board with our snake advanced along given path."""
new_board = copy.deepcopy(self)
return new_board.__help_advance_snake_along_path(snake_id, path)
def __help_advance_snake_along_path(self, snake_id: str, path: List[Coordinate]):
"""Do the actual advancement of the snake along the path."""
me = next((snake for snake in self.snakes if snake.id == snake_id), None)
if not me:
raise ValueError("No snake for given id!")
me.coordinates += path
me.coordinates = me.coordinates[len(path):]
me.coordinates.reverse()
me.coordinates.append(me.coordinates[-1])
print("new coords:")
for coord in me.coordinates:
print(coord)
return self
| [
"src.snake.Snake",
"src.coordinate.Coordinate",
"copy.deepcopy"
]
| [((1708, 1727), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (1721, 1727), False, 'import copy\n'), ((493, 510), 'src.snake.Snake', 'Snake', (['snake_data'], {}), '(snake_data)\n', (498, 510), False, 'from src.snake import Snake\n'), ((789, 810), 'src.coordinate.Coordinate', 'Coordinate', (['food_data'], {}), '(food_data)\n', (799, 810), False, 'from src.coordinate import Coordinate\n')] |
import os
import warnings
from django.conf import settings
CAPTCHA_FONT_PATH = getattr(settings, 'CAPTCHA_FONT_PATH', os.path.normpath(os.path.join(os.path.dirname(__file__), '..', 'fonts/Vera.ttf')))
CAPTCHA_FONT_SIZE = getattr(settings, 'CAPTCHA_FONT_SIZE', 22)
CAPTCHA_LETTER_ROTATION = getattr(settings, 'CAPTCHA_LETTER_ROTATION', (-35, 35))
CAPTCHA_BACKGROUND_COLOR = getattr(settings, 'CAPTCHA_BACKGROUND_COLOR', '#ffffff')
CAPTCHA_FOREGROUND_COLOR = getattr(settings, 'CAPTCHA_FOREGROUND_COLOR', '#001100')
CAPTCHA_CHALLENGE_FUNCT = getattr(settings, 'CAPTCHA_CHALLENGE_FUNCT', 'captcha.helpers.random_char_challenge')
CAPTCHA_NOISE_FUNCTIONS = getattr(settings, 'CAPTCHA_NOISE_FUNCTIONS', ('captcha.helpers.noise_arcs', 'captcha.helpers.noise_dots',))
CAPTCHA_FILTER_FUNCTIONS = getattr(settings, 'CAPTCHA_FILTER_FUNCTIONS', ('captcha.helpers.post_smooth',))
CAPTCHA_WORDS_DICTIONARY = getattr(settings, 'CAPTCHA_WORDS_DICTIONARY', '/usr/share/dict/words')
CAPTCHA_PUNCTUATION = getattr(settings, 'CAPTCHA_PUNCTUATION', '''_"',.;:-''')
CAPTCHA_FLITE_PATH = getattr(settings, 'CAPTCHA_FLITE_PATH', None)
CAPTCHA_SOX_PATH = getattr(settings, 'CAPTCHA_SOX_PATH', None)
CAPTCHA_TIMEOUT = getattr(settings, 'CAPTCHA_TIMEOUT', 5) # Minutes
CAPTCHA_LENGTH = int(getattr(settings, 'CAPTCHA_LENGTH', 4)) # Chars
# CAPTCHA_IMAGE_BEFORE_FIELD = getattr(settings, 'CAPTCHA_IMAGE_BEFORE_FIELD', True)
CAPTCHA_DICTIONARY_MIN_LENGTH = getattr(settings, 'CAPTCHA_DICTIONARY_MIN_LENGTH', 0)
CAPTCHA_DICTIONARY_MAX_LENGTH = getattr(settings, 'CAPTCHA_DICTIONARY_MAX_LENGTH', 99)
CAPTCHA_IMAGE_SIZE = getattr(settings, 'CAPTCHA_IMAGE_SIZE', None)
CAPTCHA_IMAGE_TEMPLATE = getattr(settings, 'CAPTCHA_IMAGE_TEMPLATE', 'captcha/image.html')
CAPTCHA_HIDDEN_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_HIDDEN_FIELD_TEMPLATE', 'captcha/hidden_field.html')
CAPTCHA_TEXT_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_TEXT_FIELD_TEMPLATE', 'captcha/text_field.html')
if getattr(settings, 'CAPTCHA_FIELD_TEMPLATE', None):
msg = ("CAPTCHA_FIELD_TEMPLATE setting is deprecated in favor of widget's template_name.")
warnings.warn(msg, DeprecationWarning)
CAPTCHA_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_FIELD_TEMPLATE', None)
if getattr(settings, 'CAPTCHA_OUTPUT_FORMAT', None):
msg = ("CAPTCHA_OUTPUT_FORMAT setting is deprecated in favor of widget's template_name.")
warnings.warn(msg, DeprecationWarning)
CAPTCHA_OUTPUT_FORMAT = getattr(settings, 'CAPTCHA_OUTPUT_FORMAT', None)
CAPTCHA_MATH_CHALLENGE_OPERATOR = getattr(settings, 'CAPTCHA_MATH_CHALLENGE_OPERATOR', '*')
CAPTCHA_GET_FROM_POOL = getattr(settings, 'CAPTCHA_GET_FROM_POOL', False)
CAPTCHA_GET_FROM_POOL_TIMEOUT = getattr(settings, 'CAPTCHA_GET_FROM_POOL_TIMEOUT', 5)
CAPTCHA_TEST_MODE = getattr(settings, 'CAPTCHA_TEST_MODE', False)
# Failsafe
if CAPTCHA_DICTIONARY_MIN_LENGTH > CAPTCHA_DICTIONARY_MAX_LENGTH:
CAPTCHA_DICTIONARY_MIN_LENGTH, CAPTCHA_DICTIONARY_MAX_LENGTH = CAPTCHA_DICTIONARY_MAX_LENGTH, CAPTCHA_DICTIONARY_MIN_LENGTH
def _callable_from_string(string_or_callable):
if callable(string_or_callable):
return string_or_callable
else:
return getattr(__import__('.'.join(string_or_callable.split('.')[:-1]), {}, {}, ['']), string_or_callable.split('.')[-1])
def get_challenge(generator=None):
return _callable_from_string(generator or CAPTCHA_CHALLENGE_FUNCT)
def noise_functions():
if CAPTCHA_NOISE_FUNCTIONS:
return map(_callable_from_string, CAPTCHA_NOISE_FUNCTIONS)
return []
def filter_functions():
if CAPTCHA_FILTER_FUNCTIONS:
return map(_callable_from_string, CAPTCHA_FILTER_FUNCTIONS)
return []
| [
"warnings.warn",
"os.path.dirname"
]
| [((2103, 2141), 'warnings.warn', 'warnings.warn', (['msg', 'DeprecationWarning'], {}), '(msg, DeprecationWarning)\n', (2116, 2141), False, 'import warnings\n'), ((2368, 2406), 'warnings.warn', 'warnings.warn', (['msg', 'DeprecationWarning'], {}), '(msg, DeprecationWarning)\n', (2381, 2406), False, 'import warnings\n'), ((150, 175), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (165, 175), False, 'import os\n')] |
# Generated by Django 2.2.21 on 2021-06-23 12:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('resources', '0125_add_timmi_payload_model'),
]
operations = [
migrations.AddField(
model_name='unit',
name='disallow_overlapping_reservations_per_user',
field=models.BooleanField(default=False, verbose_name='Disallow overlapping reservations in this unit per user.'),
),
]
| [
"django.db.models.BooleanField"
]
| [((409, 521), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Disallow overlapping reservations in this unit per user."""'}), "(default=False, verbose_name=\n 'Disallow overlapping reservations in this unit per user.')\n", (428, 521), False, 'from django.db import migrations, models\n')] |
import logging
from lora_multihop import serial_connection, variables
def config_module(configuration=variables.MODULE_CONFIG):
if serial_connection.execute_command(configuration, [variables.STATUS_OK]):
serial_connection.execute_command('AT+SEND=1', [variables.STATUS_OK])
serial_connection.execute_command('a', ['AT,SENDING', 'AT,SENDED'])
logging.debug('module config successfully set')
return True
logging.warning("could not set module config")
return False
def set_address(address):
cmd = f'AT+ADDR={address}'
if serial_connection.execute_command(serial_connection.str_to_bytes(cmd), [variables.STATUS_OK]):
logging.debug(f'module address successfully set to: {address}')
return True
logging.warning("could not set module address")
return False
def get_current_address():
serial_connection.execute_command(serial_connection.str_to_bytes(variables.GET_ADDR))
addr = serial_connection.response_q.get(variables.COMMAND_VERIFICATION_TIMEOUT)
addr = serial_connection.bytes_to_str(addr)
addr_as_list = addr.split(variables.LORA_MODULE_DELIMITER)
if addr_as_list[0].strip() != 'AT' or addr_as_list[2].strip() != 'OK':
raise ValueError('could not get address of module')
return addr_as_list[1]
| [
"logging.debug",
"lora_multihop.serial_connection.execute_command",
"logging.warning",
"lora_multihop.serial_connection.response_q.get",
"lora_multihop.serial_connection.str_to_bytes",
"lora_multihop.serial_connection.bytes_to_str"
]
| [((138, 209), 'lora_multihop.serial_connection.execute_command', 'serial_connection.execute_command', (['configuration', '[variables.STATUS_OK]'], {}), '(configuration, [variables.STATUS_OK])\n', (171, 209), False, 'from lora_multihop import serial_connection, variables\n'), ((445, 491), 'logging.warning', 'logging.warning', (['"""could not set module config"""'], {}), "('could not set module config')\n", (460, 491), False, 'import logging\n'), ((766, 813), 'logging.warning', 'logging.warning', (['"""could not set module address"""'], {}), "('could not set module address')\n", (781, 813), False, 'import logging\n'), ((961, 1033), 'lora_multihop.serial_connection.response_q.get', 'serial_connection.response_q.get', (['variables.COMMAND_VERIFICATION_TIMEOUT'], {}), '(variables.COMMAND_VERIFICATION_TIMEOUT)\n', (993, 1033), False, 'from lora_multihop import serial_connection, variables\n'), ((1045, 1081), 'lora_multihop.serial_connection.bytes_to_str', 'serial_connection.bytes_to_str', (['addr'], {}), '(addr)\n', (1075, 1081), False, 'from lora_multihop import serial_connection, variables\n'), ((219, 288), 'lora_multihop.serial_connection.execute_command', 'serial_connection.execute_command', (['"""AT+SEND=1"""', '[variables.STATUS_OK]'], {}), "('AT+SEND=1', [variables.STATUS_OK])\n", (252, 288), False, 'from lora_multihop import serial_connection, variables\n'), ((297, 364), 'lora_multihop.serial_connection.execute_command', 'serial_connection.execute_command', (['"""a"""', "['AT,SENDING', 'AT,SENDED']"], {}), "('a', ['AT,SENDING', 'AT,SENDED'])\n", (330, 364), False, 'from lora_multihop import serial_connection, variables\n'), ((373, 420), 'logging.debug', 'logging.debug', (['"""module config successfully set"""'], {}), "('module config successfully set')\n", (386, 420), False, 'import logging\n'), ((609, 644), 'lora_multihop.serial_connection.str_to_bytes', 'serial_connection.str_to_bytes', (['cmd'], {}), '(cmd)\n', (639, 644), False, 'from lora_multihop import serial_connection, variables\n'), ((678, 741), 'logging.debug', 'logging.debug', (['f"""module address successfully set to: {address}"""'], {}), "(f'module address successfully set to: {address}')\n", (691, 741), False, 'import logging\n'), ((898, 948), 'lora_multihop.serial_connection.str_to_bytes', 'serial_connection.str_to_bytes', (['variables.GET_ADDR'], {}), '(variables.GET_ADDR)\n', (928, 948), False, 'from lora_multihop import serial_connection, variables\n')] |
import abc
from typing import Dict, Callable
import tensorflow as tf
from flink_ml_framework.context import Context
from flink_ml_framework.java_file import *
from ..runner import tf_helper, io_helper
from ..runner.output_writer import DirectOutputWriter
try:
from flink_ml_tensorflow.tensorflow_context import TFContext
except:
from flink_ml_tensorflow2.tensorflow_context import TFContext
# noinspection PyUnresolvedReferences
from tensorflow_io.core.python.ops import core_ops
__all__ = ['TF1_TYPE', 'TF2_TYPE']
TF1_TYPE = 'tf1'
TF2_TYPE = 'tf2'
class BaseEntry(abc.ABC):
def __init__(self, func_name, engine_type):
self.func_name = func_name
self.engine_type = engine_type
@staticmethod
def get_func_by_name(func_name):
"""
Get function by the func name
:param func_name: func name
:return: function
"""
if '.' not in func_name:
if func_name in globals():
return globals()[func_name]
else:
raise RuntimeError('cannot find function[{}]'.format(func_name))
else:
module_name, func_name = func_name.rsplit('.', 1)
import importlib
# load the module, will raise ImportError if module cannot be loaded
m = importlib.import_module(module_name)
# get the class, will raise AttributeError if class cannot be found
c = getattr(m, func_name)
return c
@abc.abstractmethod
def construct_args(self, **kwargs):
pass
def is_batch(self):
return True
def post_process(self, **kwargs):
pass
def entry_func(self, context: Context):
tf_context = TFContext(context)
properties = tf_context.properties
print('properties', properties, flush=True)
# intra_op_parallelism is set by akdl, because there is a bug in TensorFlow 1.x
# See: https://stackoverflow.com/questions/34426268/restricting-number-of-cores-used
intra_op_parallelism = int(properties['ALINK:intra_op_parallelism'])
if self.engine_type == TF1_TYPE:
tf_helper.set_intra_op_parallelism(intra_op_parallelism_threads=intra_op_parallelism)
elif self.engine_type == TF2_TYPE:
tf.config.threading.set_intra_op_parallelism_threads(intra_op_parallelism)
num_workers = int(properties['ALINK:num_workers'])
work_dir = properties['ALINK:work_dir']
cluster, task_type, task_index = tf_context.export_estimator_cluster()
if self.is_batch():
java_queue_file = JavaFile(context.from_java(), context.to_java())
dataset_file = os.path.join(work_dir, 'dataset.tfrecords')
dataset, dataset_length = io_helper.convert_java_queue_file_to_repeatable_dataset(java_queue_file,
dataset_file)
print("number of records: " + str(dataset_length), flush=True)
dataset_fn: Callable[[], tf.data.TFRecordDataset] = lambda: tf.data.TFRecordDataset(dataset_file)
else:
dataset_fn: Callable[[], tf.data.TFRecordDataset] = lambda: tf_context.flink_stream_dataset()
dataset = None
dataset_file = None
dataset_length = None
saved_model_dir = os.path.join(work_dir, 'savedmodel')
user_params: Dict = json.loads(properties['ALINK:user_defined_params'])
for i in range(1, 1024):
key = "ALINK:bc_" + str(i)
if key in properties:
user_params[key] = context.properties[key]
key = "ALINK:model_dir"
if key in properties:
user_params[key] = properties[key]
output_writer = DirectOutputWriter(tf_context.from_java(), tf_context.to_java())
locals_copy = locals().copy()
locals_copy.pop("self")
print("locals_copy = ", locals_copy, flush=True)
args = self.construct_args(**locals_copy)
func = self.get_func_by_name(self.func_name)
func(args)
print("task_type = {}, task_index = {}: done tf_user_main".format(task_type, task_index), flush=True)
local_vars = locals().copy()
local_vars.pop('self')
self.post_process(**local_vars)
print("task_type = {}, task_index = {}: exit".format(task_type, task_index), flush=True)
output_writer.close()
| [
"flink_ml_tensorflow2.tensorflow_context.TFContext",
"tensorflow.data.TFRecordDataset",
"tensorflow.config.threading.set_intra_op_parallelism_threads",
"importlib.import_module"
]
| [((1728, 1746), 'flink_ml_tensorflow2.tensorflow_context.TFContext', 'TFContext', (['context'], {}), '(context)\n', (1737, 1746), False, 'from flink_ml_tensorflow2.tensorflow_context import TFContext\n'), ((1311, 1347), 'importlib.import_module', 'importlib.import_module', (['module_name'], {}), '(module_name)\n', (1334, 1347), False, 'import importlib\n'), ((2295, 2369), 'tensorflow.config.threading.set_intra_op_parallelism_threads', 'tf.config.threading.set_intra_op_parallelism_threads', (['intra_op_parallelism'], {}), '(intra_op_parallelism)\n', (2347, 2369), True, 'import tensorflow as tf\n'), ((3102, 3139), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['dataset_file'], {}), '(dataset_file)\n', (3125, 3139), True, 'import tensorflow as tf\n')] |
import pytest
ENCODING = 'utf-8'
@pytest.fixture(scope='function', autouse=True)
def setup_case(request):
def destroy_case():
from corm import annihilate_keyspace_tables, SESSIONS
annihilate_keyspace_tables('mykeyspace')
for keyspace_name, session in SESSIONS.copy().items():
if keyspace_name in ['global']:
continue
session.shutdown()
del SESSIONS[keyspace_name]
request.addfinalizer(destroy_case)
def test_initial_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
class TestModel(CORMBase):
__keyspace__ = 'mykeyspace'
something: str
other: str
register_table(TestModel)
sync_schema()
one = TestModel('one', 'two')
two = TestModel('one', 'two')
three = TestModel('one', 'three')
insert([one, two, three])
def test_keyspace_api():
import hashlib
import uuid
from corm import register_table, insert, sync_schema, \
keyspace_exists, keyspace_destroy, keyspace_create
from corm.datatypes import CassandraKeyspaceStrategy
from corm.models import CORMBase
# Keyspaces seem to have to start with Alpha-Letters
keyspace_name = hashlib.md5(str(uuid.uuid4()).encode(ENCODING)).hexdigest()
keyspace_name = f'abc_{keyspace_name}'
assert keyspace_exists(keyspace_name) is False
keyspace_create(keyspace_name, CassandraKeyspaceStrategy.Simple)
assert keyspace_exists(keyspace_name) is True
keyspace_destroy(keyspace_name)
assert keyspace_exists(keyspace_name) is False
class TestModelKeyspace(CORMBase):
__keyspace__ = keyspace_name
item: str
register_table(TestModelKeyspace)
assert keyspace_exists(keyspace_name) is False
sync_schema()
assert keyspace_exists(keyspace_name) is True
one = TestModelKeyspace('one')
insert([one])
keyspace_destroy(keyspace_name)
assert keyspace_exists(keyspace_name) is False
def test_float_api():
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
class TestModelFloat(CORMBase):
__keyspace__ = 'mykeyspace'
input_one: float
register_table(TestModelFloat)
sync_schema()
data = 324.593998934
one = TestModelFloat(data)
insert([one])
for idx, entry in enumerate(select(TestModelFloat)):
assert entry.input_one == data
def test_boolean_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
from datetime import datetime
class TestModelBoolean(CORMBase):
__keyspace__ = 'mykeyspace'
item: str
created: datetime
value: bool
register_table(TestModelBoolean)
sync_schema()
one = TestModelBoolean('one', datetime.utcnow(), True)
two = TestModelBoolean('two', datetime.utcnow(), False)
insert([one, two])
def test_datetime_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
from datetime import datetime
class TestModelDatetime(CORMBase):
__keyspace__ = 'mykeyspace'
item: str
created: datetime
register_table(TestModelDatetime)
sync_schema()
one = TestModelDatetime('one', datetime.utcnow())
two = TestModelDatetime('two', datetime.utcnow())
insert([one, two])
def test_set_api():
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
from corm.annotations import Set
class TestModelSet(CORMBase):
__keyspace__ = 'mykeyspace'
something: str
other: Set
register_table(TestModelSet)
sync_schema()
one = TestModelSet('one', {'first'})
two = TestModelSet('two', {'last', 'second-to-last'})
three = TestModelSet('three', {'last', 'second-to-last', 'last'})
four = TestModelSet('four', ['one', 'two', 'three', 'four'])
insert([one, two, three, four])
def test_select_api():
import random
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
from corm.annotations import Set
from datetime import datetime
MAX_INT = 1000
class TestModelSelect(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
register_table(TestModelSelect)
sync_schema()
insert_later = []
values = []
for idx in range(0, 100):
values.append({
'random_number': random.randint(0, MAX_INT),
'created': datetime.utcnow()
})
entry = TestModelSelect(values[-1]['random_number'], values[-1]['created'])
insert_later.append(entry)
if len(insert_later) > 20:
insert(insert_later)
insert_later = []
insert(insert_later)
for idx, entry in enumerate(select(TestModelSelect, fetch_size=100)):
assert isinstance(entry, TestModelSelect)
# Order is not consistent
# assert entry.random_number == values[idx]['random_number']
# assert entry.created == values[idx]['created']
assert idx > 0
def test_select_where_api():
import random
from corm import register_table, insert, sync_schema, select, where
from corm.models import CORMBase
from datetime import datetime
MAX_INT = 99999
class TestModelSelectSource(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
one: str
two: str
class TestModelSelectPivot(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
one: str
two: str
source: TestModelSelectSource
# TODO: Build UserType integration
# register_table(TestModelSelectSource)
# register_table(TestModelSelectPivot)
def test_alter_table_api():
from corm import register_table, insert, sync_schema, select, obtain_session
from corm.models import CORMBase
from datetime import datetime
# Create Table or Delete Column on existing Table
class TestModelAlter(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
register_table(TestModelAlter)
sync_schema()
COL_CQL = f'''
SELECT
column_name, type
FROM
system_schema.columns
WHERE
table_name = '{TestModelAlter._corm_details.table_name}'
AND
keyspace_name = '{TestModelAlter._corm_details.keyspace}'
'''
rows = [(row.column_name, row.type) for row in obtain_session('mykeyspace').execute(COL_CQL)]
assert len(rows) == 3
# Add Column on existing Table
class TestModelAlter(CORMBase):
__keyspace__ = 'mykeyspace'
random_number: int
created: datetime
new_column: str
register_table(TestModelAlter)
sync_schema()
rows = [(row.column_name, row.type) for row in obtain_session('mykeyspace').execute(COL_CQL)]
assert len(rows) == 4
def test_not_ordered_by_pk_field():
import random
from corm import register_table, insert, sync_schema, select, obtain_session
from corm.models import CORMBase
from datetime import datetime
class TestNotOrderedByPkField(CORMBase):
__keyspace__ = 'mykeyspace'
__primary_keys__ = ['one', 'two', 'three']
random_number: int
created: datetime
one: str
two: str
three: str
register_table(TestNotOrderedByPkField)
sync_schema()
first_entry = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'beta')
gamma = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'gamma')
delta = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'delta')
second_entry = TestNotOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'alpha')
insert([first_entry, gamma, delta, second_entry])
for idx, entry in enumerate(select(TestNotOrderedByPkField)):
if idx == 0:
assert entry.three != 'alpha'
def test_ordered_by_pk_field():
import random
from corm import register_table, insert, sync_schema, select, obtain_session
from corm.models import CORMBase
from corm.datatypes import TableOrdering
from datetime import datetime
class TestOrderedByPkField(CORMBase):
__keyspace__ = 'mykeyspace'
__primary_keys__ = ['one', 'two', 'three']
__ordered_by_primary_keys__ = TableOrdering.DESC
random_number: int
created: datetime
one: str
two: str
three: str
register_table(TestOrderedByPkField)
sync_schema()
first_entry = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'beta')
second_entry = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'alpha')
gamma = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'gamma')
delta = TestOrderedByPkField(random.randint(0, 99999), datetime.utcnow(), 'one', 'one', 'delta')
insert([first_entry, second_entry, delta, gamma])
for idx, entry in enumerate(select(TestOrderedByPkField)):
if idx == 0:
assert entry.three == 'alpha'
elif idx == 1:
assert entry.three == 'beta'
elif idx == 2:
assert entry.three == 'delta'
elif idx == 3:
assert entry.three == 'gamma'
def test_corm_auth():
import os
os.environ['CLUSTER_PORT'] = '9043'
os.environ['CLUSTER_USERNAME'] = 'cassandra'
os.environ['CLUSTER_PASSWORD'] = '<PASSWORD>'
from corm import register_table, insert, sync_schema
from corm.models import CORMBase
class TestCORMAuth(CORMBase):
one: str
__keyspace__ = 'test_corm_auth'
register_table(TestCORMAuth)
sync_schema()
def test_corm_enum():
import enum
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
class OptionList(enum.Enum):
One = 'one'
Two = 'two'
class TestCormEnum(CORMBase):
__keyspace__ = 'test_corm_enum'
option: OptionList
register_table(TestCormEnum)
sync_schema()
first = TestCormEnum(OptionList.One)
second = TestCormEnum(OptionList.Two)
insert([first, second])
for idx, entry in enumerate(select(TestCormEnum)):
assert entry.option in OptionList.__members__.values()
def test_corm_where():
import enum
from corm import register_table, insert, sync_schema, select, where, cp, Operator
from corm.models import CORMBase
class OptionList(enum.Enum):
One = 'one'
Two = 'two'
class TestCORMWhere(CORMBase):
__keyspace__ = 'test_corm_where'
option: OptionList
score: int
register_table(TestCORMWhere)
sync_schema()
one = TestCORMWhere(OptionList.One, 1)
two = TestCORMWhere(OptionList.One, 2)
three = TestCORMWhere(OptionList.Two, 3)
four = TestCORMWhere(OptionList.Two, 4)
insert([one, two, three, four])
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'score', 4)])):
assert idx == 0
assert entry.score == 4
assert entry.option == OptionList.Two
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'score', 1)])):
assert idx == 0
assert entry.score == 1
assert entry.option == OptionList.One
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'option', OptionList.One)])):
assert idx in [0, 1]
assert entry.score in [1, 2]
assert entry.option == OptionList.One
for idx, entry in enumerate(where(TestCORMWhere, [cp(Operator.Equal, 'option', OptionList.Two)])):
assert idx in [0, 1]
assert entry.score in [3, 4]
assert entry.option == OptionList.Two
def test_corm_uuid():
import uuid
from corm import register_table, insert, sync_schema, select
from corm.models import CORMBase
class TestCORMUUID(CORMBase):
__keyspace__ = 'mykeyspace'
identity_test: uuid.UUID
register_table(TestCORMUUID)
sync_schema()
one = TestCORMUUID(uuid.uuid4())
insert([one])
for entry in select(TestCORMUUID):
assert isinstance(entry.identity_test, uuid.UUID)
| [
"corm.register_table",
"corm.select",
"corm.keyspace_exists",
"datetime.datetime.utcnow",
"corm.annihilate_keyspace_tables",
"corm.sync_schema",
"corm.insert",
"corm.keyspace_destroy",
"uuid.uuid4",
"corm.cp",
"corm.obtain_session",
"pytest.fixture",
"corm.SESSIONS.copy",
"random.randint",
"corm.keyspace_create"
]
| [((36, 82), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""', 'autouse': '(True)'}), "(scope='function', autouse=True)\n", (50, 82), False, 'import pytest\n'), ((735, 760), 'corm.register_table', 'register_table', (['TestModel'], {}), '(TestModel)\n', (749, 760), False, 'from corm import register_table, insert, sync_schema, select\n'), ((765, 778), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (776, 778), False, 'from corm import register_table, insert, sync_schema, select\n'), ((889, 914), 'corm.insert', 'insert', (['[one, two, three]'], {}), '([one, two, three])\n', (895, 914), False, 'from corm import register_table, insert, sync_schema, select\n'), ((1430, 1494), 'corm.keyspace_create', 'keyspace_create', (['keyspace_name', 'CassandraKeyspaceStrategy.Simple'], {}), '(keyspace_name, CassandraKeyspaceStrategy.Simple)\n', (1445, 1494), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1549, 1580), 'corm.keyspace_destroy', 'keyspace_destroy', (['keyspace_name'], {}), '(keyspace_name)\n', (1565, 1580), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1733, 1766), 'corm.register_table', 'register_table', (['TestModelKeyspace'], {}), '(TestModelKeyspace)\n', (1747, 1766), False, 'from corm import register_table, insert, sync_schema, select\n'), ((1822, 1835), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (1833, 1835), False, 'from corm import register_table, insert, sync_schema, select\n'), ((1925, 1938), 'corm.insert', 'insert', (['[one]'], {}), '([one])\n', (1931, 1938), False, 'from corm import register_table, insert, sync_schema, select\n'), ((1943, 1974), 'corm.keyspace_destroy', 'keyspace_destroy', (['keyspace_name'], {}), '(keyspace_name)\n', (1959, 1974), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((2255, 2285), 'corm.register_table', 'register_table', (['TestModelFloat'], {}), '(TestModelFloat)\n', (2269, 2285), False, 'from corm import register_table, insert, sync_schema, select\n'), ((2290, 2303), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (2301, 2303), False, 'from corm import register_table, insert, sync_schema, select\n'), ((2364, 2377), 'corm.insert', 'insert', (['[one]'], {}), '([one])\n', (2370, 2377), False, 'from corm import register_table, insert, sync_schema, select\n'), ((2773, 2805), 'corm.register_table', 'register_table', (['TestModelBoolean'], {}), '(TestModelBoolean)\n', (2787, 2805), False, 'from corm import register_table, insert, sync_schema, select\n'), ((2810, 2823), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (2821, 2823), False, 'from corm import register_table, insert, sync_schema, select\n'), ((2947, 2965), 'corm.insert', 'insert', (['[one, two]'], {}), '([one, two])\n', (2953, 2965), False, 'from corm import register_table, insert, sync_schema, select\n'), ((3247, 3280), 'corm.register_table', 'register_table', (['TestModelDatetime'], {}), '(TestModelDatetime)\n', (3261, 3280), False, 'from corm import register_table, insert, sync_schema, select\n'), ((3285, 3298), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (3296, 3298), False, 'from corm import register_table, insert, sync_schema, select\n'), ((3411, 3429), 'corm.insert', 'insert', (['[one, two]'], {}), '([one, two])\n', (3417, 3429), False, 'from corm import register_table, insert, sync_schema, select\n'), ((3701, 3729), 'corm.register_table', 'register_table', (['TestModelSet'], {}), '(TestModelSet)\n', (3715, 3729), False, 'from corm import register_table, insert, sync_schema, select\n'), ((3734, 3747), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (3745, 3747), False, 'from corm import register_table, insert, sync_schema, select\n'), ((3986, 4017), 'corm.insert', 'insert', (['[one, two, three, four]'], {}), '([one, two, three, four])\n', (3992, 4017), False, 'from corm import register_table, insert, sync_schema, select\n'), ((4385, 4416), 'corm.register_table', 'register_table', (['TestModelSelect'], {}), '(TestModelSelect)\n', (4399, 4416), False, 'from corm import register_table, insert, sync_schema, select\n'), ((4421, 4434), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (4432, 4434), False, 'from corm import register_table, insert, sync_schema, select\n'), ((4858, 4878), 'corm.insert', 'insert', (['insert_later'], {}), '(insert_later)\n', (4864, 4878), False, 'from corm import register_table, insert, sync_schema, select\n'), ((6262, 6292), 'corm.register_table', 'register_table', (['TestModelAlter'], {}), '(TestModelAlter)\n', (6276, 6292), False, 'from corm import register_table, insert, sync_schema, select\n'), ((6297, 6310), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (6308, 6310), False, 'from corm import register_table, insert, sync_schema, select\n'), ((6843, 6873), 'corm.register_table', 'register_table', (['TestModelAlter'], {}), '(TestModelAlter)\n', (6857, 6873), False, 'from corm import register_table, insert, sync_schema, select\n'), ((6878, 6891), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (6889, 6891), False, 'from corm import register_table, insert, sync_schema, select\n'), ((7470, 7509), 'corm.register_table', 'register_table', (['TestNotOrderedByPkField'], {}), '(TestNotOrderedByPkField)\n', (7484, 7509), False, 'from corm import register_table, insert, sync_schema, select\n'), ((7514, 7527), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (7525, 7527), False, 'from corm import register_table, insert, sync_schema, select\n'), ((7961, 8010), 'corm.insert', 'insert', (['[first_entry, gamma, delta, second_entry]'], {}), '([first_entry, gamma, delta, second_entry])\n', (7967, 8010), False, 'from corm import register_table, insert, sync_schema, select\n'), ((8688, 8724), 'corm.register_table', 'register_table', (['TestOrderedByPkField'], {}), '(TestOrderedByPkField)\n', (8702, 8724), False, 'from corm import register_table, insert, sync_schema, select\n'), ((8729, 8742), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (8740, 8742), False, 'from corm import register_table, insert, sync_schema, select\n'), ((9164, 9213), 'corm.insert', 'insert', (['[first_entry, second_entry, delta, gamma]'], {}), '([first_entry, second_entry, delta, gamma])\n', (9170, 9213), False, 'from corm import register_table, insert, sync_schema, select\n'), ((9905, 9933), 'corm.register_table', 'register_table', (['TestCORMAuth'], {}), '(TestCORMAuth)\n', (9919, 9933), False, 'from corm import register_table, insert, sync_schema, select\n'), ((9938, 9951), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (9949, 9951), False, 'from corm import register_table, insert, sync_schema, select\n'), ((10276, 10304), 'corm.register_table', 'register_table', (['TestCormEnum'], {}), '(TestCormEnum)\n', (10290, 10304), False, 'from corm import register_table, insert, sync_schema, select\n'), ((10309, 10322), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (10320, 10322), False, 'from corm import register_table, insert, sync_schema, select\n'), ((10411, 10434), 'corm.insert', 'insert', (['[first, second]'], {}), '([first, second])\n', (10417, 10434), False, 'from corm import register_table, insert, sync_schema, select\n'), ((10922, 10951), 'corm.register_table', 'register_table', (['TestCORMWhere'], {}), '(TestCORMWhere)\n', (10936, 10951), False, 'from corm import register_table, insert, sync_schema, select\n'), ((10956, 10969), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (10967, 10969), False, 'from corm import register_table, insert, sync_schema, select\n'), ((11149, 11180), 'corm.insert', 'insert', (['[one, two, three, four]'], {}), '([one, two, three, four])\n', (11155, 11180), False, 'from corm import register_table, insert, sync_schema, select\n'), ((12252, 12280), 'corm.register_table', 'register_table', (['TestCORMUUID'], {}), '(TestCORMUUID)\n', (12266, 12280), False, 'from corm import register_table, insert, sync_schema, select\n'), ((12285, 12298), 'corm.sync_schema', 'sync_schema', ([], {}), '()\n', (12296, 12298), False, 'from corm import register_table, insert, sync_schema, select\n'), ((12340, 12353), 'corm.insert', 'insert', (['[one]'], {}), '([one])\n', (12346, 12353), False, 'from corm import register_table, insert, sync_schema, select\n'), ((12371, 12391), 'corm.select', 'select', (['TestCORMUUID'], {}), '(TestCORMUUID)\n', (12377, 12391), False, 'from corm import register_table, insert, sync_schema, select\n'), ((202, 242), 'corm.annihilate_keyspace_tables', 'annihilate_keyspace_tables', (['"""mykeyspace"""'], {}), "('mykeyspace')\n", (228, 242), False, 'from corm import annihilate_keyspace_tables, SESSIONS\n'), ((1386, 1416), 'corm.keyspace_exists', 'keyspace_exists', (['keyspace_name'], {}), '(keyspace_name)\n', (1401, 1416), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1506, 1536), 'corm.keyspace_exists', 'keyspace_exists', (['keyspace_name'], {}), '(keyspace_name)\n', (1521, 1536), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1592, 1622), 'corm.keyspace_exists', 'keyspace_exists', (['keyspace_name'], {}), '(keyspace_name)\n', (1607, 1622), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1778, 1808), 'corm.keyspace_exists', 'keyspace_exists', (['keyspace_name'], {}), '(keyspace_name)\n', (1793, 1808), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1847, 1877), 'corm.keyspace_exists', 'keyspace_exists', (['keyspace_name'], {}), '(keyspace_name)\n', (1862, 1877), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((1986, 2016), 'corm.keyspace_exists', 'keyspace_exists', (['keyspace_name'], {}), '(keyspace_name)\n', (2001, 2016), False, 'from corm import register_table, insert, sync_schema, keyspace_exists, keyspace_destroy, keyspace_create\n'), ((2410, 2432), 'corm.select', 'select', (['TestModelFloat'], {}), '(TestModelFloat)\n', (2416, 2432), False, 'from corm import register_table, insert, sync_schema, select\n'), ((2858, 2875), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2873, 2875), False, 'from datetime import datetime\n'), ((2917, 2934), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2932, 2934), False, 'from datetime import datetime\n'), ((3334, 3351), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3349, 3351), False, 'from datetime import datetime\n'), ((3388, 3405), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3403, 3405), False, 'from datetime import datetime\n'), ((4911, 4950), 'corm.select', 'select', (['TestModelSelect'], {'fetch_size': '(100)'}), '(TestModelSelect, fetch_size=100)\n', (4917, 4950), False, 'from corm import register_table, insert, sync_schema, select\n'), ((7571, 7595), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (7585, 7595), False, 'import random\n'), ((7597, 7614), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7612, 7614), False, 'from datetime import datetime\n'), ((7674, 7698), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (7688, 7698), False, 'import random\n'), ((7700, 7717), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7715, 7717), False, 'from datetime import datetime\n'), ((7778, 7802), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (7792, 7802), False, 'import random\n'), ((7804, 7821), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7819, 7821), False, 'from datetime import datetime\n'), ((7889, 7913), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (7903, 7913), False, 'import random\n'), ((7915, 7932), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7930, 7932), False, 'from datetime import datetime\n'), ((8043, 8074), 'corm.select', 'select', (['TestNotOrderedByPkField'], {}), '(TestNotOrderedByPkField)\n', (8049, 8074), False, 'from corm import register_table, insert, sync_schema, select\n'), ((8783, 8807), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (8797, 8807), False, 'import random\n'), ((8809, 8826), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (8824, 8826), False, 'from datetime import datetime\n'), ((8890, 8914), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (8904, 8914), False, 'import random\n'), ((8916, 8933), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (8931, 8933), False, 'from datetime import datetime\n'), ((8991, 9015), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (9005, 9015), False, 'import random\n'), ((9017, 9034), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9032, 9034), False, 'from datetime import datetime\n'), ((9092, 9116), 'random.randint', 'random.randint', (['(0)', '(99999)'], {}), '(0, 99999)\n', (9106, 9116), False, 'import random\n'), ((9118, 9135), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9133, 9135), False, 'from datetime import datetime\n'), ((9246, 9274), 'corm.select', 'select', (['TestOrderedByPkField'], {}), '(TestOrderedByPkField)\n', (9252, 9274), False, 'from corm import register_table, insert, sync_schema, select\n'), ((10468, 10488), 'corm.select', 'select', (['TestCormEnum'], {}), '(TestCormEnum)\n', (10474, 10488), False, 'from corm import register_table, insert, sync_schema, select\n'), ((12322, 12334), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (12332, 12334), False, 'import uuid\n'), ((4802, 4822), 'corm.insert', 'insert', (['insert_later'], {}), '(insert_later)\n', (4808, 4822), False, 'from corm import register_table, insert, sync_schema, select\n'), ((281, 296), 'corm.SESSIONS.copy', 'SESSIONS.copy', ([], {}), '()\n', (294, 296), False, 'from corm import annihilate_keyspace_tables, SESSIONS\n'), ((4556, 4582), 'random.randint', 'random.randint', (['(0)', 'MAX_INT'], {}), '(0, MAX_INT)\n', (4570, 4582), False, 'import random\n'), ((4607, 4624), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4622, 4624), False, 'from datetime import datetime\n'), ((11236, 11266), 'corm.cp', 'cp', (['Operator.Equal', '"""score"""', '(4)'], {}), "(Operator.Equal, 'score', 4)\n", (11238, 11266), False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((11428, 11458), 'corm.cp', 'cp', (['Operator.Equal', '"""score"""', '(1)'], {}), "(Operator.Equal, 'score', 1)\n", (11430, 11458), False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((11621, 11665), 'corm.cp', 'cp', (['Operator.Equal', '"""option"""', 'OptionList.One'], {}), "(Operator.Equal, 'option', OptionList.One)\n", (11623, 11665), False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((11837, 11881), 'corm.cp', 'cp', (['Operator.Equal', '"""option"""', 'OptionList.Two'], {}), "(Operator.Equal, 'option', OptionList.Two)\n", (11839, 11881), False, 'from corm import register_table, insert, sync_schema, select, where, cp, Operator\n'), ((6579, 6607), 'corm.obtain_session', 'obtain_session', (['"""mykeyspace"""'], {}), "('mykeyspace')\n", (6593, 6607), False, 'from corm import register_table, insert, sync_schema, select, obtain_session\n'), ((6944, 6972), 'corm.obtain_session', 'obtain_session', (['"""mykeyspace"""'], {}), "('mykeyspace')\n", (6958, 6972), False, 'from corm import register_table, insert, sync_schema, select, obtain_session\n'), ((1288, 1300), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1298, 1300), False, 'import uuid\n')] |
import json
import os
from utilities.SaveLoadJson import SaveLoadJson as SLJ
from utilities.LineCount import LineCount as LC
import subprocess
from geolite2 import geolite2
class getData:
#Get Data Functions ------------------------------------------------------
@staticmethod
def getDATA():
result = {"requests":{},
"time":'',
"cpuload":'',
"uptime":'',
"temp":'',
"ip":''}
result["requests"]=getData.getRequests()
time = getData.getTime().split('\t')
result["time"] = time[0]
result["cpuload"]=time[1]
result["uptime"]=getData.getUptime()
result["temp"]=getData.getTemp()
result["ip"]=getData.getIP()
return json.dumps(result)
@staticmethod
def getRequests():
data = SLJ.load('dataStore.txt')
return {"totalRequests":str(data["totalRequests"]),
"totalQueries":str(data["totalQueries"]),
"totalAdjusts":str(data["totalAdjusts"])}
@staticmethod
def getTime():
proc = subprocess.Popen(['uptime'],stdout=subprocess.PIPE, shell=False)
(out, err) = proc.communicate()
return (str(out)[1:9] + '\t' +
str(float(str(out).split(',')[4])*100)+'%')
@staticmethod
def getUptime():
proc = subprocess.Popen(['uptime', '-p'],stdout=subprocess.PIPE, shell=False)
(out, err) = proc.communicate()
return str(out)
@staticmethod
def getTemp():
proc = subprocess.Popen(['vcgencmd', 'measure_temp'],stdout=subprocess.PIPE, shell=False)
(out,err) = proc.communicate()
return str(out)[5:-1]
@staticmethod
def getIP():
proc = subprocess.Popen(['hostname', '-I'],stdout=subprocess.PIPE, shell=False)
(out, err) = proc.communicate()
return str(out)
#Get Access Functions ---------------------------------------------------
@staticmethod
def getAccess():
result={"Countries":dict(),
"CountrySrs":dict(),
"devices":dict(),
"mostRecentSearch":'',
"mostRecentAcc":'',
"mostRecentIP":'',
"recentSearches":[],
"Users":0}
lastNum = 200
total=0
mostRecentIP = ''
mostRecentAcc = ''
mostRecentSearch = ''
Cname='Unknown'
Sname='Unknown'
Ctyname='Unknown'
ips=dict()
logFile = 'utilities/access.log'
newFile='utilities/new.log'
#f = open(newFile, 'w')
with open(logFile, 'r') as lf:
for temp in lf:
line = temp.split(';')
if len(line) > 1:
if line[2] == '200':
if 'GET /find' in line[3]:
#f.write(temp)
mostRecentIP=line[0]
mostRecentAcc=line[1]
reader = geolite2.reader()
loc = reader.get(line[0])
Cname = loc['country']['names']['en']
if 'subdivisions' in loc:
Sname = loc['subdivisions'][0]['names']['en']
else:
Sname='Unknown'
if 'city' in loc:
Ctyname = loc['city']['names']['en']
else:
Ctyname='Unknown'
if Cname not in result["Countries"]:
result["Countries"][Cname]=dict()
result["CountrySrs"][Cname]=0
if Sname not in result["Countries"][Cname]:
result["Countries"][Cname][Sname]=dict()
if Ctyname not in result["Countries"][Cname][Sname]:
result["Countries"][Cname][Sname][Ctyname] = []
result["CountrySrs"][Cname]+=1
total+=1
search = (line[3].split(' ')[1][6:]).replace('%20',' ')
mostRecentSearch=search
if search not in result["Countries"][Cname][Sname][Ctyname]:
result["Countries"][Cname][Sname][Ctyname].append(search)
if len(result["Countries"][Cname][Sname][Ctyname]) >= lastNum:
result["Countries"][Cname][Sname][Ctyname].pop(0)
if search not in result["recentSearches"]:
result["recentSearches"].insert(0,search)
if len(result["recentSearches"]) >= lastNum:
result["recentSearches"].pop(-1)
ips[line[0]]=1
device=(line[4].split('('))
if len(device)>1:
device=device[1]
else:
device="Unknown"
if device not in result["devices"]:
result["devices"][device]=0
result["devices"][device]+=1
#f.close()
#Most recent stuff
result["mostRecentIP"]=mostRecentIP
result["mostRecentAcc"]=mostRecentAcc
result["mostRecentSearch"]=mostRecentSearch
result["mostRecentLoc"]=str(Ctyname+', '+Sname+', '+Cname)
#Unique Users
for key, value in ips.items():
result["Users"]+=1
#Device percents
for key, value in result["devices"].items():
percnt = (float(value)/float(total))*100
result["devices"][key]=format(percnt, '.2f')
#Country percents
for key, value in result["CountrySrs"].items():
percnt = (float(value)/float(total))*100
result["CountrySrs"][key]=format(percnt,'.2f')
#os.system("sudo mv -f "+newFile+" "+logFile)
return json.dumps(result)
| [
"utilities.SaveLoadJson.SaveLoadJson.load",
"subprocess.Popen",
"json.dumps",
"geolite2.geolite2.reader"
]
| [((789, 807), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (799, 807), False, 'import json\n'), ((865, 890), 'utilities.SaveLoadJson.SaveLoadJson.load', 'SLJ.load', (['"""dataStore.txt"""'], {}), "('dataStore.txt')\n", (873, 890), True, 'from utilities.SaveLoadJson import SaveLoadJson as SLJ\n'), ((1124, 1189), 'subprocess.Popen', 'subprocess.Popen', (["['uptime']"], {'stdout': 'subprocess.PIPE', 'shell': '(False)'}), "(['uptime'], stdout=subprocess.PIPE, shell=False)\n", (1140, 1189), False, 'import subprocess\n'), ((1383, 1454), 'subprocess.Popen', 'subprocess.Popen', (["['uptime', '-p']"], {'stdout': 'subprocess.PIPE', 'shell': '(False)'}), "(['uptime', '-p'], stdout=subprocess.PIPE, shell=False)\n", (1399, 1454), False, 'import subprocess\n'), ((1571, 1658), 'subprocess.Popen', 'subprocess.Popen', (["['vcgencmd', 'measure_temp']"], {'stdout': 'subprocess.PIPE', 'shell': '(False)'}), "(['vcgencmd', 'measure_temp'], stdout=subprocess.PIPE,\n shell=False)\n", (1587, 1658), False, 'import subprocess\n'), ((1774, 1847), 'subprocess.Popen', 'subprocess.Popen', (["['hostname', '-I']"], {'stdout': 'subprocess.PIPE', 'shell': '(False)'}), "(['hostname', '-I'], stdout=subprocess.PIPE, shell=False)\n", (1790, 1847), False, 'import subprocess\n'), ((6318, 6336), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (6328, 6336), False, 'import json\n'), ((3078, 3095), 'geolite2.geolite2.reader', 'geolite2.reader', ([], {}), '()\n', (3093, 3095), False, 'from geolite2 import geolite2\n')] |
# All credit to https://stackoverflow.com/questions/46571448/tkinter-and-a-html-file - thanks DELICA - https://stackoverflow.com/users/7027346/delica
from cefpython3 import cefpython as cef
import ctypes
try:
import tkinter as tk
from tkinter import messagebox
except ImportError:
import Tkinter as tk
import sys
import platform
import logging as _logging
# Fix for PyCharm hints warnings
WindowUtils = cef.WindowUtils()
# Platforms
WINDOWS = (platform.system() == "Windows")
LINUX = (platform.system() == "Linux")
MAC = (platform.system() == "Darwin")
# Globals
logger = _logging.getLogger("tkinter_.py")
url = "localhost:8050/"
class MainFrame(tk.Frame):
def __init__(self, root):
self.closing = False
self.browser = None
# Root
root.geometry("900x640")
tk.Grid.rowconfigure(root, 0, weight=1)
tk.Grid.columnconfigure(root, 0, weight=1)
# MainFrame
tk.Frame.__init__(self, root)
self.master.title('SimBA Dashboard')
self.master.protocol("WM_DELETE_WINDOW", self.on_close)
self.bind("<Configure>", self.on_configure)
self.bind("<FocusIn>", self.on_focus_in)
self.bind("<FocusOut>", self.on_focus_out)
self.focus_set()
# Pack MainFrame
self.pack(fill=tk.BOTH, expand=tk.YES)
def embed_browser(self):
window_info = cef.WindowInfo()
rect = [0, 0, self.winfo_width(), self.winfo_height()]
window_info.SetAsChild(self.get_window_handle(), rect)
self.browser = cef.CreateBrowserSync(window_info,
url=url) #todo
assert self.browser
self.browser.SetClientHandler(LoadHandler(self))
self.browser.SetClientHandler(FocusHandler(self))
self.message_loop_work()
def get_window_handle(self):
if self.winfo_id() > 0:
return self.winfo_id()
else:
raise Exception("Couldn't obtain window handle")
def message_loop_work(self):
cef.MessageLoopWork()
self.after(10, self.message_loop_work)
def on_configure(self, event):
width = event.width
height = event.height
if self.browser:
if WINDOWS:
ctypes.windll.user32.SetWindowPos(
self.browser.GetWindowHandle(), 0,
0, 0, width, height, 0x0002)
elif LINUX:
self.browser.SetBounds(0, 0, width, height)
self.browser.NotifyMoveOrResizeStarted()
if not self.browser:
self.embed_browser()
def on_focus_in(self, _):
logger.debug("BrowserFrame.on_focus_in")
if self.browser:
self.browser.SetFocus(True)
self.focus_set()
def on_focus_out(self, _):
logger.debug("BrowserFrame.on_focus_out")
if self.browser:
self.browser.SetFocus(False)
def on_close(self):
if self.browser:
self.browser.CloseBrowser(True)
self.clear_browser_references()
self.destroy()
self.master.destroy()
def get_browser(self):
if self.browser:
return self.browser
return None
def clear_browser_references(self):
self.browser = None
class LoadHandler(object):
def __init__(self, browser_frame):
self.browser_frame = browser_frame
class FocusHandler(object):
def __init__(self, browser):
self.browser = browser
def OnTakeFocus(self, next_component, **_):
logger.debug("FocusHandler.OnTakeFocus, next={next}"
.format(next=next_component))
def OnSetFocus(self, source, **_):
logger.debug("FocusHandler.OnSetFocus, source={source}"
.format(source=source))
return False
def OnGotFocus(self, **_):
"""Fix CEF focus issues (#255). Call browser frame's focus_set
to get rid of type cursor in url entry widget."""
logger.debug("FocusHandler.OnGotFocus")
self.browser.focus_set()
# if __name__ == '__main__':
logger.setLevel(_logging.INFO)
stream_handler = _logging.StreamHandler()
formatter = _logging.Formatter("[%(filename)s] %(message)s")
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.info("CEF Python {ver}".format(ver=cef.__version__))
logger.info("Python {ver} {arch}".format(
ver=platform.python_version(), arch=platform.architecture()[0]))
logger.info("Tk {ver}".format(ver=tk.Tcl().eval('info patchlevel')))
assert cef.__version__ >= "55.3", "CEF Python v55.3+ required to run this"
sys.excepthook = cef.ExceptHook # To shutdown all CEF processes on error
root = tk.Tk()
app = MainFrame(root)
def on_closing():
if messagebox.askokcancel("Quit", "Do you want to quit?"):
root.destroy()
root.protocol("WM_DELETE_WINDOW", on_closing)
# Tk must be initialized before CEF otherwise fatal error (Issue #306)
cef.Initialize()
root.mainloop()
# app.mainloop()
cef.Shutdown()
| [
"logging.getLogger",
"Tkinter.Grid.rowconfigure",
"logging.StreamHandler",
"tkinter.messagebox.askokcancel",
"Tkinter.Frame.__init__",
"logging.Formatter",
"Tkinter.Tk",
"Tkinter.Tcl",
"cefpython3.cefpython.MessageLoopWork",
"cefpython3.cefpython.Initialize",
"platform.system",
"platform.architecture",
"Tkinter.Grid.columnconfigure",
"cefpython3.cefpython.CreateBrowserSync",
"cefpython3.cefpython.WindowUtils",
"cefpython3.cefpython.WindowInfo",
"platform.python_version",
"cefpython3.cefpython.Shutdown"
]
| [((418, 435), 'cefpython3.cefpython.WindowUtils', 'cef.WindowUtils', ([], {}), '()\n', (433, 435), True, 'from cefpython3 import cefpython as cef\n'), ((589, 622), 'logging.getLogger', '_logging.getLogger', (['"""tkinter_.py"""'], {}), "('tkinter_.py')\n", (607, 622), True, 'import logging as _logging\n'), ((4152, 4176), 'logging.StreamHandler', '_logging.StreamHandler', ([], {}), '()\n', (4174, 4176), True, 'import logging as _logging\n'), ((4189, 4237), 'logging.Formatter', '_logging.Formatter', (['"""[%(filename)s] %(message)s"""'], {}), "('[%(filename)s] %(message)s')\n", (4207, 4237), True, 'import logging as _logging\n'), ((4711, 4718), 'Tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (4716, 4718), True, 'import Tkinter as tk\n'), ((4964, 4980), 'cefpython3.cefpython.Initialize', 'cef.Initialize', ([], {}), '()\n', (4978, 4980), True, 'from cefpython3 import cefpython as cef\n'), ((5014, 5028), 'cefpython3.cefpython.Shutdown', 'cef.Shutdown', ([], {}), '()\n', (5026, 5028), True, 'from cefpython3 import cefpython as cef\n'), ((460, 477), 'platform.system', 'platform.system', ([], {}), '()\n', (475, 477), False, 'import platform\n'), ((501, 518), 'platform.system', 'platform.system', ([], {}), '()\n', (516, 518), False, 'import platform\n'), ((538, 555), 'platform.system', 'platform.system', ([], {}), '()\n', (553, 555), False, 'import platform\n'), ((4766, 4820), 'tkinter.messagebox.askokcancel', 'messagebox.askokcancel', (['"""Quit"""', '"""Do you want to quit?"""'], {}), "('Quit', 'Do you want to quit?')\n", (4788, 4820), False, 'from tkinter import messagebox\n'), ((820, 859), 'Tkinter.Grid.rowconfigure', 'tk.Grid.rowconfigure', (['root', '(0)'], {'weight': '(1)'}), '(root, 0, weight=1)\n', (840, 859), True, 'import Tkinter as tk\n'), ((868, 910), 'Tkinter.Grid.columnconfigure', 'tk.Grid.columnconfigure', (['root', '(0)'], {'weight': '(1)'}), '(root, 0, weight=1)\n', (891, 910), True, 'import Tkinter as tk\n'), ((940, 969), 'Tkinter.Frame.__init__', 'tk.Frame.__init__', (['self', 'root'], {}), '(self, root)\n', (957, 969), True, 'import Tkinter as tk\n'), ((1381, 1397), 'cefpython3.cefpython.WindowInfo', 'cef.WindowInfo', ([], {}), '()\n', (1395, 1397), True, 'from cefpython3 import cefpython as cef\n'), ((1547, 1590), 'cefpython3.cefpython.CreateBrowserSync', 'cef.CreateBrowserSync', (['window_info'], {'url': 'url'}), '(window_info, url=url)\n', (1568, 1590), True, 'from cefpython3 import cefpython as cef\n'), ((2036, 2057), 'cefpython3.cefpython.MessageLoopWork', 'cef.MessageLoopWork', ([], {}), '()\n', (2055, 2057), True, 'from cefpython3 import cefpython as cef\n'), ((4425, 4450), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (4448, 4450), False, 'import platform\n'), ((4457, 4480), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (4478, 4480), False, 'import platform\n'), ((4520, 4528), 'Tkinter.Tcl', 'tk.Tcl', ([], {}), '()\n', (4526, 4528), True, 'import Tkinter as tk\n')] |
#!/usr/bin/env python3
# encoding: utf-8
import sys
import urllib.parse
import selenium.webdriver
def exit():
driver.quit()
sys.exit(0)
driver = selenium.webdriver.Firefox()
# for some reason, detectportal.firefox.com and connectivitycheck.gstatic.com are not blocked
# therefore, they cannot be used to detect connectivity
# we instead visit another site that is known not to ever have TLS
driver.get('http://neverssl.com')
if 'neverssl.com' in urllib.parse.urlparse(driver.current_url).netloc:
exit()
driver.find_element_by_css_selector('label[for="promo_button"]').click()
driver.find_element_by_css_selector('input[alt="Next"]').click()
driver.find_element_by_css_selector('#PromotionCode').send_keys('lobby18')
driver.find_element_by_css_selector('input[alt="Connect"]').click()
exit()
| [
"sys.exit"
]
| [((129, 140), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (137, 140), False, 'import sys\n')] |
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from . import helpers
# Create your views here.
@csrf_exempt
def convert_video(request, version):
# Get video
video = request.FILES['video']
# Transcribe video and extract audio
response = helpers.transcribe_file(video)
context = response
# return render(request, 'api/v1/result_successful.html', context)
return JsonResponse(context, safe=False) | [
"django.http.JsonResponse"
]
| [((532, 565), 'django.http.JsonResponse', 'JsonResponse', (['context'], {'safe': '(False)'}), '(context, safe=False)\n', (544, 565), False, 'from django.http import JsonResponse\n')] |
from itertools import count
import numpy as np
class Particle(object):
"""Object containing all the properties for a single particle"""
_ids = count(0)
def __init__(self, main_data=None, x=np.zeros(2)):
self.id = next(self._ids)
self.main_data = main_data
self.x = np.array(x)
self.v = np.zeros(2)
self.a = np.zeros(2)
self.D = 0
self.rho = main_data.rho0
self.P = 0
self.m = main_data.dx ** 2 * main_data.rho0 # initial mass depends on the initial particle spacing
self.boundary = False # Particle by default is not on the boundary
# For predictor corrector
self.prev_x = np.array(x)
self.prev_v = np.zeros(2)
self.prev_rho = main_data.rho0
def calc_index(self):
"""Calculates the 2D integer index for the particle's location in the search grid"""
# Calculates the bucket coordinates
self.list_num = np.array((self.x - self.main_data.min_x) /
(2.0 * self.main_data.h), int)
def B(self):
return (self.main_data.rho0 * self.main_data.c0 ** 2) / self.main_data.gamma
def update_P(self):
"""
Equation of state
System is assumed slightly compressible
"""
rho0 = self.main_data.rho0
gamma = self.main_data.gamma
self.P = self.B() * ((self.rho / rho0)**gamma - 1)
def set_main_data(self, main_data):
self.main_data = main_data
def set_x(self, x):
self.x = x
self.calc_index()
def set_v(self, v):
self.v = v
def set_a(self, a):
self.a = a
def set_D(self, D):
self.D = D
def set_rho(self, rho):
self.rho = rho
self.update_P()
def m(self, m):
self.m = m
def list_attributes(self):
x_s = "position: " + str(self.x) + ", "
v_s = "velocity: " + str(self.v) + ", "
a_s = "acceleration: " + str(self.a) + ", "
D_s = "derivative of density: " + str(self.D) + ", "
rho_s = "density: " + str(self.rho) + ", "
m_s = "mass: " + str(self.m) + ", "
P_s = "pressure: " + str(self.P) + ", "
boundary_s = "is boundary: " + str(self.boundary)
return [x_s + v_s + a_s + D_s + rho_s + m_s + P_s + boundary_s]
| [
"numpy.array",
"numpy.zeros",
"itertools.count"
]
| [((154, 162), 'itertools.count', 'count', (['(0)'], {}), '(0)\n', (159, 162), False, 'from itertools import count\n'), ((205, 216), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (213, 216), True, 'import numpy as np\n'), ((305, 316), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (313, 316), True, 'import numpy as np\n'), ((334, 345), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (342, 345), True, 'import numpy as np\n'), ((363, 374), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (371, 374), True, 'import numpy as np\n'), ((687, 698), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (695, 698), True, 'import numpy as np\n'), ((721, 732), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (729, 732), True, 'import numpy as np\n'), ((960, 1033), 'numpy.array', 'np.array', (['((self.x - self.main_data.min_x) / (2.0 * self.main_data.h))', 'int'], {}), '((self.x - self.main_data.min_x) / (2.0 * self.main_data.h), int)\n', (968, 1033), True, 'import numpy as np\n')] |
from unittest import TestCase
from pyRdfa import pyRdfa
class NonXhtmlTest(TestCase):
"""
RDFa that is in not well-formed XHTML is passed through html5lib.
These tests make sure that this RDFa can be processed both from
a file, and from a URL.
"""
target1 = '<og:isbn>9780596516499</og:isbn>'
target2 = '<gr:typeOfGood rdf:resource="urn:x-domain:oreilly.com:product:9780596803391.EBOOK"/>'
def test_url(self):
g = pyRdfa().rdf_from_source('http://oreilly.com/catalog/9780596516499/')
self.assert_(self.target1.encode('utf-8') in g)
def test_file(self):
g = pyRdfa().rdf_from_source('test/rdfa/oreilly.html')
self.assert_(self.target2.encode('utf-8') in g)
| [
"pyRdfa.pyRdfa"
]
| [((459, 467), 'pyRdfa.pyRdfa', 'pyRdfa', ([], {}), '()\n', (465, 467), False, 'from pyRdfa import pyRdfa\n'), ((623, 631), 'pyRdfa.pyRdfa', 'pyRdfa', ([], {}), '()\n', (629, 631), False, 'from pyRdfa import pyRdfa\n')] |
from des109 import moeda
preco = float(input('Digite o preço pretendido: €'))
print(f'''A metade do preço é {(moeda.metade(preco))}
O dobro do preço é {(moeda.dobra(preco))}
Aumentando o preço 10% temos {(moeda.aumentar(preco, 10))}
Diminuindo o preço 13% temos {(moeda.aumentar(preco, 13))}''')
| [
"des109.moeda.metade",
"des109.moeda.dobra",
"des109.moeda.aumentar"
]
| [((113, 132), 'des109.moeda.metade', 'moeda.metade', (['preco'], {}), '(preco)\n', (125, 132), False, 'from des109 import moeda\n'), ((163, 181), 'des109.moeda.dobra', 'moeda.dobra', (['preco'], {}), '(preco)\n', (174, 181), False, 'from des109 import moeda\n'), ((214, 239), 'des109.moeda.aumentar', 'moeda.aumentar', (['preco', '(10)'], {}), '(preco, 10)\n', (228, 239), False, 'from des109 import moeda\n'), ((279, 304), 'des109.moeda.aumentar', 'moeda.aumentar', (['preco', '(13)'], {}), '(preco, 13)\n', (293, 304), False, 'from des109 import moeda\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = ['PartnerRegistrationArgs', 'PartnerRegistration']
@pulumi.input_type
class PartnerRegistrationArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
authorized_azure_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
customer_service_uri: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logo_uri: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
partner_customer_service_extension: Optional[pulumi.Input[str]] = None,
partner_customer_service_number: Optional[pulumi.Input[str]] = None,
partner_name: Optional[pulumi.Input[str]] = None,
partner_registration_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_description: Optional[pulumi.Input[str]] = None,
partner_resource_type_display_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_name: Optional[pulumi.Input[str]] = None,
setup_uri: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
visibility_state: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]] = None):
"""
The set of arguments for constructing a PartnerRegistration resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription.
:param pulumi.Input[Sequence[pulumi.Input[str]]] authorized_azure_subscription_ids: List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
:param pulumi.Input[str] customer_service_uri: The extension of the customer service URI of the publisher.
:param pulumi.Input[str] location: Location of the resource.
:param pulumi.Input[str] logo_uri: URI of the logo.
:param pulumi.Input[str] long_description: Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
:param pulumi.Input[str] partner_customer_service_extension: The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
:param pulumi.Input[str] partner_customer_service_number: The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
:param pulumi.Input[str] partner_name: Official name of the partner name. For example: "Contoso".
:param pulumi.Input[str] partner_registration_name: Name of the partner registration.
:param pulumi.Input[str] partner_resource_type_description: Short description of the partner resource type. The length of this description should not exceed 256 characters.
:param pulumi.Input[str] partner_resource_type_display_name: Display name of the partner resource type.
:param pulumi.Input[str] partner_resource_type_name: Name of the partner resource type.
:param pulumi.Input[str] setup_uri: URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags of the resource.
:param pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']] visibility_state: Visibility state of the partner registration.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
if authorized_azure_subscription_ids is not None:
pulumi.set(__self__, "authorized_azure_subscription_ids", authorized_azure_subscription_ids)
if customer_service_uri is not None:
pulumi.set(__self__, "customer_service_uri", customer_service_uri)
if location is not None:
pulumi.set(__self__, "location", location)
if logo_uri is not None:
pulumi.set(__self__, "logo_uri", logo_uri)
if long_description is not None:
pulumi.set(__self__, "long_description", long_description)
if partner_customer_service_extension is not None:
pulumi.set(__self__, "partner_customer_service_extension", partner_customer_service_extension)
if partner_customer_service_number is not None:
pulumi.set(__self__, "partner_customer_service_number", partner_customer_service_number)
if partner_name is not None:
pulumi.set(__self__, "partner_name", partner_name)
if partner_registration_name is not None:
pulumi.set(__self__, "partner_registration_name", partner_registration_name)
if partner_resource_type_description is not None:
pulumi.set(__self__, "partner_resource_type_description", partner_resource_type_description)
if partner_resource_type_display_name is not None:
pulumi.set(__self__, "partner_resource_type_display_name", partner_resource_type_display_name)
if partner_resource_type_name is not None:
pulumi.set(__self__, "partner_resource_type_name", partner_resource_type_name)
if setup_uri is not None:
pulumi.set(__self__, "setup_uri", setup_uri)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if visibility_state is not None:
pulumi.set(__self__, "visibility_state", visibility_state)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="authorizedAzureSubscriptionIds")
def authorized_azure_subscription_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
"""
return pulumi.get(self, "authorized_azure_subscription_ids")
@authorized_azure_subscription_ids.setter
def authorized_azure_subscription_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "authorized_azure_subscription_ids", value)
@property
@pulumi.getter(name="customerServiceUri")
def customer_service_uri(self) -> Optional[pulumi.Input[str]]:
"""
The extension of the customer service URI of the publisher.
"""
return pulumi.get(self, "customer_service_uri")
@customer_service_uri.setter
def customer_service_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "customer_service_uri", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Location of the resource.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="logoUri")
def logo_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI of the logo.
"""
return pulumi.get(self, "logo_uri")
@logo_uri.setter
def logo_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logo_uri", value)
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> Optional[pulumi.Input[str]]:
"""
Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
"""
return pulumi.get(self, "long_description")
@long_description.setter
def long_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "long_description", value)
@property
@pulumi.getter(name="partnerCustomerServiceExtension")
def partner_customer_service_extension(self) -> Optional[pulumi.Input[str]]:
"""
The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
"""
return pulumi.get(self, "partner_customer_service_extension")
@partner_customer_service_extension.setter
def partner_customer_service_extension(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_customer_service_extension", value)
@property
@pulumi.getter(name="partnerCustomerServiceNumber")
def partner_customer_service_number(self) -> Optional[pulumi.Input[str]]:
"""
The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
"""
return pulumi.get(self, "partner_customer_service_number")
@partner_customer_service_number.setter
def partner_customer_service_number(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_customer_service_number", value)
@property
@pulumi.getter(name="partnerName")
def partner_name(self) -> Optional[pulumi.Input[str]]:
"""
Official name of the partner name. For example: "Contoso".
"""
return pulumi.get(self, "partner_name")
@partner_name.setter
def partner_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_name", value)
@property
@pulumi.getter(name="partnerRegistrationName")
def partner_registration_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the partner registration.
"""
return pulumi.get(self, "partner_registration_name")
@partner_registration_name.setter
def partner_registration_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_registration_name", value)
@property
@pulumi.getter(name="partnerResourceTypeDescription")
def partner_resource_type_description(self) -> Optional[pulumi.Input[str]]:
"""
Short description of the partner resource type. The length of this description should not exceed 256 characters.
"""
return pulumi.get(self, "partner_resource_type_description")
@partner_resource_type_description.setter
def partner_resource_type_description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_resource_type_description", value)
@property
@pulumi.getter(name="partnerResourceTypeDisplayName")
def partner_resource_type_display_name(self) -> Optional[pulumi.Input[str]]:
"""
Display name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_display_name")
@partner_resource_type_display_name.setter
def partner_resource_type_display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_resource_type_display_name", value)
@property
@pulumi.getter(name="partnerResourceTypeName")
def partner_resource_type_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_name")
@partner_resource_type_name.setter
def partner_resource_type_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "partner_resource_type_name", value)
@property
@pulumi.getter(name="setupUri")
def setup_uri(self) -> Optional[pulumi.Input[str]]:
"""
URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
"""
return pulumi.get(self, "setup_uri")
@setup_uri.setter
def setup_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "setup_uri", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Tags of the resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="visibilityState")
def visibility_state(self) -> Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]]:
"""
Visibility state of the partner registration.
"""
return pulumi.get(self, "visibility_state")
@visibility_state.setter
def visibility_state(self, value: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]]):
pulumi.set(self, "visibility_state", value)
class PartnerRegistration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorized_azure_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
customer_service_uri: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logo_uri: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
partner_customer_service_extension: Optional[pulumi.Input[str]] = None,
partner_customer_service_number: Optional[pulumi.Input[str]] = None,
partner_name: Optional[pulumi.Input[str]] = None,
partner_registration_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_description: Optional[pulumi.Input[str]] = None,
partner_resource_type_display_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
setup_uri: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
visibility_state: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]] = None,
__props__=None):
"""
Information about a partner registration.
API Version: 2020-04-01-preview.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] authorized_azure_subscription_ids: List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
:param pulumi.Input[str] customer_service_uri: The extension of the customer service URI of the publisher.
:param pulumi.Input[str] location: Location of the resource.
:param pulumi.Input[str] logo_uri: URI of the logo.
:param pulumi.Input[str] long_description: Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
:param pulumi.Input[str] partner_customer_service_extension: The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
:param pulumi.Input[str] partner_customer_service_number: The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
:param pulumi.Input[str] partner_name: Official name of the partner name. For example: "Contoso".
:param pulumi.Input[str] partner_registration_name: Name of the partner registration.
:param pulumi.Input[str] partner_resource_type_description: Short description of the partner resource type. The length of this description should not exceed 256 characters.
:param pulumi.Input[str] partner_resource_type_display_name: Display name of the partner resource type.
:param pulumi.Input[str] partner_resource_type_name: Name of the partner resource type.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription.
:param pulumi.Input[str] setup_uri: URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Tags of the resource.
:param pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']] visibility_state: Visibility state of the partner registration.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PartnerRegistrationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Information about a partner registration.
API Version: 2020-04-01-preview.
:param str resource_name: The name of the resource.
:param PartnerRegistrationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PartnerRegistrationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorized_azure_subscription_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
customer_service_uri: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
logo_uri: Optional[pulumi.Input[str]] = None,
long_description: Optional[pulumi.Input[str]] = None,
partner_customer_service_extension: Optional[pulumi.Input[str]] = None,
partner_customer_service_number: Optional[pulumi.Input[str]] = None,
partner_name: Optional[pulumi.Input[str]] = None,
partner_registration_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_description: Optional[pulumi.Input[str]] = None,
partner_resource_type_display_name: Optional[pulumi.Input[str]] = None,
partner_resource_type_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
setup_uri: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
visibility_state: Optional[pulumi.Input[Union[str, 'PartnerRegistrationVisibilityState']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PartnerRegistrationArgs.__new__(PartnerRegistrationArgs)
__props__.__dict__["authorized_azure_subscription_ids"] = authorized_azure_subscription_ids
__props__.__dict__["customer_service_uri"] = customer_service_uri
__props__.__dict__["location"] = location
__props__.__dict__["logo_uri"] = logo_uri
__props__.__dict__["long_description"] = long_description
__props__.__dict__["partner_customer_service_extension"] = partner_customer_service_extension
__props__.__dict__["partner_customer_service_number"] = partner_customer_service_number
__props__.__dict__["partner_name"] = partner_name
__props__.__dict__["partner_registration_name"] = partner_registration_name
__props__.__dict__["partner_resource_type_description"] = partner_resource_type_description
__props__.__dict__["partner_resource_type_display_name"] = partner_resource_type_display_name
__props__.__dict__["partner_resource_type_name"] = partner_resource_type_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["setup_uri"] = setup_uri
__props__.__dict__["tags"] = tags
__props__.__dict__["visibility_state"] = visibility_state
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:eventgrid:PartnerRegistration"), pulumi.Alias(type_="azure-native:eventgrid/v20200401preview:PartnerRegistration"), pulumi.Alias(type_="azure-nextgen:eventgrid/v20200401preview:PartnerRegistration"), pulumi.Alias(type_="azure-native:eventgrid/v20201015preview:PartnerRegistration"), pulumi.Alias(type_="azure-nextgen:eventgrid/v20201015preview:PartnerRegistration")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PartnerRegistration, __self__).__init__(
'azure-native:eventgrid:PartnerRegistration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PartnerRegistration':
"""
Get an existing PartnerRegistration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PartnerRegistrationArgs.__new__(PartnerRegistrationArgs)
__props__.__dict__["authorized_azure_subscription_ids"] = None
__props__.__dict__["customer_service_uri"] = None
__props__.__dict__["location"] = None
__props__.__dict__["logo_uri"] = None
__props__.__dict__["long_description"] = None
__props__.__dict__["name"] = None
__props__.__dict__["partner_customer_service_extension"] = None
__props__.__dict__["partner_customer_service_number"] = None
__props__.__dict__["partner_name"] = None
__props__.__dict__["partner_resource_type_description"] = None
__props__.__dict__["partner_resource_type_display_name"] = None
__props__.__dict__["partner_resource_type_name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["setup_uri"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["visibility_state"] = None
return PartnerRegistration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="authorizedAzureSubscriptionIds")
def authorized_azure_subscription_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of Azure subscription Ids that are authorized to create a partner namespace
associated with this partner registration. This is an optional property. Creating
partner namespaces is always permitted under the same Azure subscription as the one used
for creating the partner registration.
"""
return pulumi.get(self, "authorized_azure_subscription_ids")
@property
@pulumi.getter(name="customerServiceUri")
def customer_service_uri(self) -> pulumi.Output[Optional[str]]:
"""
The extension of the customer service URI of the publisher.
"""
return pulumi.get(self, "customer_service_uri")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="logoUri")
def logo_uri(self) -> pulumi.Output[Optional[str]]:
"""
URI of the logo.
"""
return pulumi.get(self, "logo_uri")
@property
@pulumi.getter(name="longDescription")
def long_description(self) -> pulumi.Output[Optional[str]]:
"""
Long description for the custom scenarios and integration to be displayed in the portal if needed.
Length of this description should not exceed 2048 characters.
"""
return pulumi.get(self, "long_description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="partnerCustomerServiceExtension")
def partner_customer_service_extension(self) -> pulumi.Output[Optional[str]]:
"""
The extension of the customer service number of the publisher. Only digits are allowed and number of digits should not exceed 10.
"""
return pulumi.get(self, "partner_customer_service_extension")
@property
@pulumi.getter(name="partnerCustomerServiceNumber")
def partner_customer_service_number(self) -> pulumi.Output[Optional[str]]:
"""
The customer service number of the publisher. The expected phone format should start with a '+' sign
followed by the country code. The remaining digits are then followed. Only digits and spaces are allowed and its
length cannot exceed 16 digits including country code. Examples of valid phone numbers are: +1 515 123 4567 and
+966 7 5115 2471. Examples of invalid phone numbers are: +1 (515) 123-4567, 1 515 123 4567 and +966 121 5115 24 7 551 1234 43
"""
return pulumi.get(self, "partner_customer_service_number")
@property
@pulumi.getter(name="partnerName")
def partner_name(self) -> pulumi.Output[Optional[str]]:
"""
Official name of the partner name. For example: "Contoso".
"""
return pulumi.get(self, "partner_name")
@property
@pulumi.getter(name="partnerResourceTypeDescription")
def partner_resource_type_description(self) -> pulumi.Output[Optional[str]]:
"""
Short description of the partner resource type. The length of this description should not exceed 256 characters.
"""
return pulumi.get(self, "partner_resource_type_description")
@property
@pulumi.getter(name="partnerResourceTypeDisplayName")
def partner_resource_type_display_name(self) -> pulumi.Output[Optional[str]]:
"""
Display name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_display_name")
@property
@pulumi.getter(name="partnerResourceTypeName")
def partner_resource_type_name(self) -> pulumi.Output[Optional[str]]:
"""
Name of the partner resource type.
"""
return pulumi.get(self, "partner_resource_type_name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Provisioning state of the partner registration.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="setupUri")
def setup_uri(self) -> pulumi.Output[Optional[str]]:
"""
URI of the partner website that can be used by Azure customers to setup Event Grid
integration on an event source.
"""
return pulumi.get(self, "setup_uri")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system metadata relating to Partner Registration resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Tags of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Type of the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="visibilityState")
def visibility_state(self) -> pulumi.Output[Optional[str]]:
"""
Visibility state of the partner registration.
"""
return pulumi.get(self, "visibility_state")
| [
"pulumi.get",
"pulumi.Alias",
"pulumi.getter",
"pulumi.set",
"pulumi.ResourceOptions",
"pulumi.ResourceOptions.merge"
]
| [((6655, 6694), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupName"""'}), "(name='resourceGroupName')\n", (6668, 6694), False, 'import pulumi\n'), ((7070, 7122), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""authorizedAzureSubscriptionIds"""'}), "(name='authorizedAzureSubscriptionIds')\n", (7083, 7122), False, 'import pulumi\n'), ((7891, 7931), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""customerServiceUri"""'}), "(name='customerServiceUri')\n", (7904, 7931), False, 'import pulumi\n'), ((8646, 8675), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""logoUri"""'}), "(name='logoUri')\n", (8659, 8675), False, 'import pulumi\n'), ((8970, 9007), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""longDescription"""'}), "(name='longDescription')\n", (8983, 9007), False, 'import pulumi\n'), ((9495, 9548), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerCustomerServiceExtension"""'}), "(name='partnerCustomerServiceExtension')\n", (9508, 9548), False, 'import pulumi\n'), ((10086, 10136), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerCustomerServiceNumber"""'}), "(name='partnerCustomerServiceNumber')\n", (10099, 10136), False, 'import pulumi\n'), ((11009, 11042), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerName"""'}), "(name='partnerName')\n", (11022, 11042), False, 'import pulumi\n'), ((11399, 11444), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerRegistrationName"""'}), "(name='partnerRegistrationName')\n", (11412, 11444), False, 'import pulumi\n'), ((11841, 11893), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerResourceTypeDescription"""'}), "(name='partnerResourceTypeDescription')\n", (11854, 11893), False, 'import pulumi\n'), ((12409, 12461), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerResourceTypeDisplayName"""'}), "(name='partnerResourceTypeDisplayName')\n", (12422, 12461), False, 'import pulumi\n'), ((12912, 12957), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerResourceTypeName"""'}), "(name='partnerResourceTypeName')\n", (12925, 12957), False, 'import pulumi\n'), ((13360, 13390), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""setupUri"""'}), "(name='setupUri')\n", (13373, 13390), False, 'import pulumi\n'), ((14146, 14183), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""visibilityState"""'}), "(name='visibilityState')\n", (14159, 14183), False, 'import pulumi\n'), ((26360, 26412), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""authorizedAzureSubscriptionIds"""'}), "(name='authorizedAzureSubscriptionIds')\n", (26373, 26412), False, 'import pulumi\n'), ((26943, 26983), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""customerServiceUri"""'}), "(name='customerServiceUri')\n", (26956, 26983), False, 'import pulumi\n'), ((27402, 27431), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""logoUri"""'}), "(name='logoUri')\n", (27415, 27431), False, 'import pulumi\n'), ((27601, 27638), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""longDescription"""'}), "(name='longDescription')\n", (27614, 27638), False, 'import pulumi\n'), ((28147, 28200), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerCustomerServiceExtension"""'}), "(name='partnerCustomerServiceExtension')\n", (28160, 28200), False, 'import pulumi\n'), ((28535, 28585), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerCustomerServiceNumber"""'}), "(name='partnerCustomerServiceNumber')\n", (28548, 28585), False, 'import pulumi\n'), ((29264, 29297), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerName"""'}), "(name='partnerName')\n", (29277, 29297), False, 'import pulumi\n'), ((29517, 29569), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerResourceTypeDescription"""'}), "(name='partnerResourceTypeDescription')\n", (29530, 29569), False, 'import pulumi\n'), ((29885, 29937), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerResourceTypeDisplayName"""'}), "(name='partnerResourceTypeDisplayName')\n", (29898, 29937), False, 'import pulumi\n'), ((30185, 30230), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""partnerResourceTypeName"""'}), "(name='partnerResourceTypeName')\n", (30198, 30230), False, 'import pulumi\n'), ((30454, 30493), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningState"""'}), "(name='provisioningState')\n", (30467, 30493), False, 'import pulumi\n'), ((30704, 30734), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""setupUri"""'}), "(name='setupUri')\n", (30717, 30734), False, 'import pulumi\n'), ((31013, 31045), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemData"""'}), "(name='systemData')\n", (31026, 31045), False, 'import pulumi\n'), ((31646, 31683), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""visibilityState"""'}), "(name='visibilityState')\n", (31659, 31683), False, 'import pulumi\n'), ((4683, 4747), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_group_name"""', 'resource_group_name'], {}), "(__self__, 'resource_group_name', resource_group_name)\n", (4693, 4747), False, 'import pulumi\n'), ((6861, 6900), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_name"""'], {}), "(self, 'resource_group_name')\n", (6871, 6900), False, 'import pulumi\n'), ((7003, 7049), 'pulumi.set', 'pulumi.set', (['self', '"""resource_group_name"""', 'value'], {}), "(self, 'resource_group_name', value)\n", (7013, 7049), False, 'import pulumi\n'), ((7592, 7645), 'pulumi.get', 'pulumi.get', (['self', '"""authorized_azure_subscription_ids"""'], {}), "(self, 'authorized_azure_subscription_ids')\n", (7602, 7645), False, 'import pulumi\n'), ((7810, 7870), 'pulumi.set', 'pulumi.set', (['self', '"""authorized_azure_subscription_ids"""', 'value'], {}), "(self, 'authorized_azure_subscription_ids', value)\n", (7820, 7870), False, 'import pulumi\n'), ((8106, 8146), 'pulumi.get', 'pulumi.get', (['self', '"""customer_service_uri"""'], {}), "(self, 'customer_service_uri')\n", (8116, 8146), False, 'import pulumi\n'), ((8261, 8308), 'pulumi.set', 'pulumi.set', (['self', '"""customer_service_uri"""', 'value'], {}), "(self, 'customer_service_uri', value)\n", (8271, 8308), False, 'import pulumi\n'), ((8471, 8499), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (8481, 8499), False, 'import pulumi\n'), ((8590, 8625), 'pulumi.set', 'pulumi.set', (['self', '"""location"""', 'value'], {}), "(self, 'location', value)\n", (8600, 8625), False, 'import pulumi\n'), ((8795, 8823), 'pulumi.get', 'pulumi.get', (['self', '"""logo_uri"""'], {}), "(self, 'logo_uri')\n", (8805, 8823), False, 'import pulumi\n'), ((8914, 8949), 'pulumi.set', 'pulumi.set', (['self', '"""logo_uri"""', 'value'], {}), "(self, 'logo_uri', value)\n", (8924, 8949), False, 'import pulumi\n'), ((9288, 9324), 'pulumi.get', 'pulumi.get', (['self', '"""long_description"""'], {}), "(self, 'long_description')\n", (9298, 9324), False, 'import pulumi\n'), ((9431, 9474), 'pulumi.set', 'pulumi.set', (['self', '"""long_description"""', 'value'], {}), "(self, 'long_description', value)\n", (9441, 9474), False, 'import pulumi\n'), ((9807, 9861), 'pulumi.get', 'pulumi.get', (['self', '"""partner_customer_service_extension"""'], {}), "(self, 'partner_customer_service_extension')\n", (9817, 9861), False, 'import pulumi\n'), ((10004, 10065), 'pulumi.set', 'pulumi.set', (['self', '"""partner_customer_service_extension"""', 'value'], {}), "(self, 'partner_customer_service_extension', value)\n", (10014, 10065), False, 'import pulumi\n'), ((10742, 10793), 'pulumi.get', 'pulumi.get', (['self', '"""partner_customer_service_number"""'], {}), "(self, 'partner_customer_service_number')\n", (10752, 10793), False, 'import pulumi\n'), ((10930, 10988), 'pulumi.set', 'pulumi.set', (['self', '"""partner_customer_service_number"""', 'value'], {}), "(self, 'partner_customer_service_number', value)\n", (10940, 10988), False, 'import pulumi\n'), ((11208, 11240), 'pulumi.get', 'pulumi.get', (['self', '"""partner_name"""'], {}), "(self, 'partner_name')\n", (11218, 11240), False, 'import pulumi\n'), ((11339, 11378), 'pulumi.set', 'pulumi.set', (['self', '"""partner_name"""', 'value'], {}), "(self, 'partner_name', value)\n", (11349, 11378), False, 'import pulumi\n'), ((11598, 11643), 'pulumi.get', 'pulumi.get', (['self', '"""partner_registration_name"""'], {}), "(self, 'partner_registration_name')\n", (11608, 11643), False, 'import pulumi\n'), ((11768, 11820), 'pulumi.set', 'pulumi.set', (['self', '"""partner_registration_name"""', 'value'], {}), "(self, 'partner_registration_name', value)\n", (11778, 11820), False, 'import pulumi\n'), ((12134, 12187), 'pulumi.get', 'pulumi.get', (['self', '"""partner_resource_type_description"""'], {}), "(self, 'partner_resource_type_description')\n", (12144, 12187), False, 'import pulumi\n'), ((12328, 12388), 'pulumi.set', 'pulumi.set', (['self', '"""partner_resource_type_description"""', 'value'], {}), "(self, 'partner_resource_type_description', value)\n", (12338, 12388), False, 'import pulumi\n'), ((12633, 12687), 'pulumi.get', 'pulumi.get', (['self', '"""partner_resource_type_display_name"""'], {}), "(self, 'partner_resource_type_display_name')\n", (12643, 12687), False, 'import pulumi\n'), ((12830, 12891), 'pulumi.set', 'pulumi.set', (['self', '"""partner_resource_type_display_name"""', 'value'], {}), "(self, 'partner_resource_type_display_name', value)\n", (12840, 12891), False, 'import pulumi\n'), ((13113, 13159), 'pulumi.get', 'pulumi.get', (['self', '"""partner_resource_type_name"""'], {}), "(self, 'partner_resource_type_name')\n", (13123, 13159), False, 'import pulumi\n'), ((13286, 13339), 'pulumi.set', 'pulumi.set', (['self', '"""partner_resource_type_name"""', 'value'], {}), "(self, 'partner_resource_type_name', value)\n", (13296, 13339), False, 'import pulumi\n'), ((13618, 13647), 'pulumi.get', 'pulumi.get', (['self', '"""setup_uri"""'], {}), "(self, 'setup_uri')\n", (13628, 13647), False, 'import pulumi\n'), ((13740, 13776), 'pulumi.set', 'pulumi.set', (['self', '"""setup_uri"""', 'value'], {}), "(self, 'setup_uri', value)\n", (13750, 13776), False, 'import pulumi\n'), ((13959, 13983), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (13969, 13983), False, 'import pulumi\n'), ((14094, 14125), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (14104, 14125), False, 'import pulumi\n'), ((14385, 14421), 'pulumi.get', 'pulumi.get', (['self', '"""visibility_state"""'], {}), "(self, 'visibility_state')\n", (14395, 14421), False, 'import pulumi\n'), ((14573, 14616), 'pulumi.set', 'pulumi.set', (['self', '"""visibility_state"""', 'value'], {}), "(self, 'visibility_state', value)\n", (14583, 14616), False, 'import pulumi\n'), ((24283, 24329), 'pulumi.ResourceOptions.merge', 'pulumi.ResourceOptions.merge', (['opts', 'alias_opts'], {}), '(opts, alias_opts)\n', (24311, 24329), False, 'import pulumi\n'), ((26869, 26922), 'pulumi.get', 'pulumi.get', (['self', '"""authorized_azure_subscription_ids"""'], {}), "(self, 'authorized_azure_subscription_ids')\n", (26879, 26922), False, 'import pulumi\n'), ((27159, 27199), 'pulumi.get', 'pulumi.get', (['self', '"""customer_service_uri"""'], {}), "(self, 'customer_service_uri')\n", (27169, 27199), False, 'import pulumi\n'), ((27353, 27381), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (27363, 27381), False, 'import pulumi\n'), ((27552, 27580), 'pulumi.get', 'pulumi.get', (['self', '"""logo_uri"""'], {}), "(self, 'logo_uri')\n", (27562, 27580), False, 'import pulumi\n'), ((27920, 27956), 'pulumi.get', 'pulumi.get', (['self', '"""long_description"""'], {}), "(self, 'long_description')\n", (27930, 27956), False, 'import pulumi\n'), ((28102, 28126), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (28112, 28126), False, 'import pulumi\n'), ((28460, 28514), 'pulumi.get', 'pulumi.get', (['self', '"""partner_customer_service_extension"""'], {}), "(self, 'partner_customer_service_extension')\n", (28470, 28514), False, 'import pulumi\n'), ((29192, 29243), 'pulumi.get', 'pulumi.get', (['self', '"""partner_customer_service_number"""'], {}), "(self, 'partner_customer_service_number')\n", (29202, 29243), False, 'import pulumi\n'), ((29464, 29496), 'pulumi.get', 'pulumi.get', (['self', '"""partner_name"""'], {}), "(self, 'partner_name')\n", (29474, 29496), False, 'import pulumi\n'), ((29811, 29864), 'pulumi.get', 'pulumi.get', (['self', '"""partner_resource_type_description"""'], {}), "(self, 'partner_resource_type_description')\n", (29821, 29864), False, 'import pulumi\n'), ((30110, 30164), 'pulumi.get', 'pulumi.get', (['self', '"""partner_resource_type_display_name"""'], {}), "(self, 'partner_resource_type_display_name')\n", (30120, 30164), False, 'import pulumi\n'), ((30387, 30433), 'pulumi.get', 'pulumi.get', (['self', '"""partner_resource_type_name"""'], {}), "(self, 'partner_resource_type_name')\n", (30397, 30433), False, 'import pulumi\n'), ((30645, 30683), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_state"""'], {}), "(self, 'provisioning_state')\n", (30655, 30683), False, 'import pulumi\n'), ((30963, 30992), 'pulumi.get', 'pulumi.get', (['self', '"""setup_uri"""'], {}), "(self, 'setup_uri')\n", (30973, 30992), False, 'import pulumi\n'), ((31230, 31261), 'pulumi.get', 'pulumi.get', (['self', '"""system_data"""'], {}), "(self, 'system_data')\n", (31240, 31261), False, 'import pulumi\n'), ((31431, 31455), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (31441, 31455), False, 'import pulumi\n'), ((31601, 31625), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (31611, 31625), False, 'import pulumi\n'), ((31841, 31877), 'pulumi.get', 'pulumi.get', (['self', '"""visibility_state"""'], {}), "(self, 'visibility_state')\n", (31851, 31877), False, 'import pulumi\n'), ((4818, 4914), 'pulumi.set', 'pulumi.set', (['__self__', '"""authorized_azure_subscription_ids"""', 'authorized_azure_subscription_ids'], {}), "(__self__, 'authorized_azure_subscription_ids',\n authorized_azure_subscription_ids)\n", (4828, 4914), False, 'import pulumi\n'), ((4968, 5034), 'pulumi.set', 'pulumi.set', (['__self__', '"""customer_service_uri"""', 'customer_service_uri'], {}), "(__self__, 'customer_service_uri', customer_service_uri)\n", (4978, 5034), False, 'import pulumi\n'), ((5080, 5122), 'pulumi.set', 'pulumi.set', (['__self__', '"""location"""', 'location'], {}), "(__self__, 'location', location)\n", (5090, 5122), False, 'import pulumi\n'), ((5168, 5210), 'pulumi.set', 'pulumi.set', (['__self__', '"""logo_uri"""', 'logo_uri'], {}), "(__self__, 'logo_uri', logo_uri)\n", (5178, 5210), False, 'import pulumi\n'), ((5264, 5322), 'pulumi.set', 'pulumi.set', (['__self__', '"""long_description"""', 'long_description'], {}), "(__self__, 'long_description', long_description)\n", (5274, 5322), False, 'import pulumi\n'), ((5394, 5492), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_customer_service_extension"""', 'partner_customer_service_extension'], {}), "(__self__, 'partner_customer_service_extension',\n partner_customer_service_extension)\n", (5404, 5492), False, 'import pulumi\n'), ((5557, 5649), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_customer_service_number"""', 'partner_customer_service_number'], {}), "(__self__, 'partner_customer_service_number',\n partner_customer_service_number)\n", (5567, 5649), False, 'import pulumi\n'), ((5695, 5745), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_name"""', 'partner_name'], {}), "(__self__, 'partner_name', partner_name)\n", (5705, 5745), False, 'import pulumi\n'), ((5808, 5884), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_registration_name"""', 'partner_registration_name'], {}), "(__self__, 'partner_registration_name', partner_registration_name)\n", (5818, 5884), False, 'import pulumi\n'), ((5955, 6051), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_resource_type_description"""', 'partner_resource_type_description'], {}), "(__self__, 'partner_resource_type_description',\n partner_resource_type_description)\n", (5965, 6051), False, 'import pulumi\n'), ((6119, 6217), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_resource_type_display_name"""', 'partner_resource_type_display_name'], {}), "(__self__, 'partner_resource_type_display_name',\n partner_resource_type_display_name)\n", (6129, 6217), False, 'import pulumi\n'), ((6277, 6355), 'pulumi.set', 'pulumi.set', (['__self__', '"""partner_resource_type_name"""', 'partner_resource_type_name'], {}), "(__self__, 'partner_resource_type_name', partner_resource_type_name)\n", (6287, 6355), False, 'import pulumi\n'), ((6402, 6446), 'pulumi.set', 'pulumi.set', (['__self__', '"""setup_uri"""', 'setup_uri'], {}), "(__self__, 'setup_uri', setup_uri)\n", (6412, 6446), False, 'import pulumi\n'), ((6488, 6522), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (6498, 6522), False, 'import pulumi\n'), ((6576, 6634), 'pulumi.set', 'pulumi.set', (['__self__', '"""visibility_state"""', 'visibility_state'], {}), "(__self__, 'visibility_state', visibility_state)\n", (6586, 6634), False, 'import pulumi\n'), ((21660, 21684), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (21682, 21684), False, 'import pulumi\n'), ((25143, 25172), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (25165, 25172), False, 'import pulumi\n'), ((23866, 23931), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-nextgen:eventgrid:PartnerRegistration"""'}), "(type_='azure-nextgen:eventgrid:PartnerRegistration')\n", (23878, 23931), False, 'import pulumi\n'), ((23933, 24019), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-native:eventgrid/v20200401preview:PartnerRegistration"""'}), "(type_=\n 'azure-native:eventgrid/v20200401preview:PartnerRegistration')\n", (23945, 24019), False, 'import pulumi\n'), ((24016, 24103), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-nextgen:eventgrid/v20200401preview:PartnerRegistration"""'}), "(type_=\n 'azure-nextgen:eventgrid/v20200401preview:PartnerRegistration')\n", (24028, 24103), False, 'import pulumi\n'), ((24100, 24186), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-native:eventgrid/v20201015preview:PartnerRegistration"""'}), "(type_=\n 'azure-native:eventgrid/v20201015preview:PartnerRegistration')\n", (24112, 24186), False, 'import pulumi\n'), ((24183, 24270), 'pulumi.Alias', 'pulumi.Alias', ([], {'type_': '"""azure-nextgen:eventgrid/v20201015preview:PartnerRegistration"""'}), "(type_=\n 'azure-nextgen:eventgrid/v20201015preview:PartnerRegistration')\n", (24195, 24270), False, 'import pulumi\n')] |
import os
import glob
import cv2
import numpy as np
import torch
from torchvision.transforms import transforms
from natsort import natsorted
from models import resmasking_dropout1
from utils.datasets.fer2013dataset import EMOTION_DICT
from barez import show
transform = transforms.Compose(
[
transforms.ToPILImage(),
transforms.ToTensor(),
]
)
def activations_mask(tensor):
tensor = torch.squeeze(tensor, 0)
tensor = torch.mean(tensor, 0)
tensor = tensor.detach().cpu().numpy()
tensor = np.maximum(tensor, 0)
tensor = cv2.resize(tensor, (224, 224))
tensor = tensor - np.min(tensor)
tensor = tensor / np.max(tensor)
heatmap = cv2.applyColorMap(np.uint8(255 * tensor), cv2.COLORMAP_JET)
return heatmap
model = resmasking_dropout1(3, 7)
# state = torch.load('./saved/checkpoints/resmasking_dropout1_rot30_2019Nov17_14.33')
state = torch.load("./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32")
model.load_state_dict(state["net"])
model.cuda()
model.eval()
for image_path in natsorted(
glob.glob("/home/z/research/bkemo/images/**/*.png", recursive=True)
):
image_name = os.path.basename(image_path)
print(image_name)
# image_path = '/home/z/research/bkemo/images/disgust/0.0_dc10a3_1976_0.png'
image = cv2.imread(image_path)
image = cv2.resize(image, (224, 224))
tensor = transform(image)
tensor = torch.unsqueeze(tensor, 0)
tensor = tensor.cuda()
# output = model(tensor)
x = model.conv1(tensor) # 112
x = model.bn1(x)
x = model.relu(x)
x = model.maxpool(x) # 56
x = model.layer1(x) # 56
m = model.mask1(x)
x = x * (1 + m)
x = model.layer2(x) # 28
m = model.mask2(x)
x = x * (1 + m)
x = model.layer3(x) # 14
heat_1 = activations_mask(x)
m = model.mask3(x)
x = x * (1 + m)
# heat_2 = activations_mask(m)
x = model.layer4(x) # 7
m = model.mask4(x)
x = x * (1 + m)
x = model.avgpool(x)
x = torch.flatten(x, 1)
output = model.fc(x)
# print(np.sum(heat_1 - heat_2))
# show(np.concatenate((image, heat_1, heat_2), axis=1))
cv2.imwrite(
"./masking_provements/{}".format(image_name),
np.concatenate((image, heat_1), axis=1),
)
# np.concatenate((image, heat_1, heat_2), axis=1))
# output = output.cpu().numpy()
# print(EMOTION_DICT[torch.argmax(output, 1).item()])
| [
"torchvision.transforms.transforms.ToPILImage",
"models.resmasking_dropout1",
"numpy.uint8",
"cv2.resize",
"torch.mean",
"torch.unsqueeze",
"torch.load",
"numpy.min",
"torch.flatten",
"numpy.max",
"torchvision.transforms.transforms.ToTensor",
"os.path.basename",
"numpy.concatenate",
"torch.squeeze",
"numpy.maximum",
"cv2.imread",
"glob.glob"
]
| [((774, 799), 'models.resmasking_dropout1', 'resmasking_dropout1', (['(3)', '(7)'], {}), '(3, 7)\n', (793, 799), False, 'from models import resmasking_dropout1\n'), ((894, 971), 'torch.load', 'torch.load', (['"""./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32"""'], {}), "('./saved/checkpoints/Z_resmasking_dropout1_rot30_2019Nov30_13.32')\n", (904, 971), False, 'import torch\n'), ((414, 438), 'torch.squeeze', 'torch.squeeze', (['tensor', '(0)'], {}), '(tensor, 0)\n', (427, 438), False, 'import torch\n'), ((452, 473), 'torch.mean', 'torch.mean', (['tensor', '(0)'], {}), '(tensor, 0)\n', (462, 473), False, 'import torch\n'), ((530, 551), 'numpy.maximum', 'np.maximum', (['tensor', '(0)'], {}), '(tensor, 0)\n', (540, 551), True, 'import numpy as np\n'), ((565, 595), 'cv2.resize', 'cv2.resize', (['tensor', '(224, 224)'], {}), '(tensor, (224, 224))\n', (575, 595), False, 'import cv2\n'), ((1068, 1135), 'glob.glob', 'glob.glob', (['"""/home/z/research/bkemo/images/**/*.png"""'], {'recursive': '(True)'}), "('/home/z/research/bkemo/images/**/*.png', recursive=True)\n", (1077, 1135), False, 'import glob\n'), ((1156, 1184), 'os.path.basename', 'os.path.basename', (['image_path'], {}), '(image_path)\n', (1172, 1184), False, 'import os\n'), ((1300, 1322), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (1310, 1322), False, 'import cv2\n'), ((1335, 1364), 'cv2.resize', 'cv2.resize', (['image', '(224, 224)'], {}), '(image, (224, 224))\n', (1345, 1364), False, 'import cv2\n'), ((1408, 1434), 'torch.unsqueeze', 'torch.unsqueeze', (['tensor', '(0)'], {}), '(tensor, 0)\n', (1423, 1434), False, 'import torch\n'), ((1999, 2018), 'torch.flatten', 'torch.flatten', (['x', '(1)'], {}), '(x, 1)\n', (2012, 2018), False, 'import torch\n'), ((305, 328), 'torchvision.transforms.transforms.ToPILImage', 'transforms.ToPILImage', ([], {}), '()\n', (326, 328), False, 'from torchvision.transforms import transforms\n'), ((338, 359), 'torchvision.transforms.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (357, 359), False, 'from torchvision.transforms import transforms\n'), ((618, 632), 'numpy.min', 'np.min', (['tensor'], {}), '(tensor)\n', (624, 632), True, 'import numpy as np\n'), ((655, 669), 'numpy.max', 'np.max', (['tensor'], {}), '(tensor)\n', (661, 669), True, 'import numpy as np\n'), ((703, 725), 'numpy.uint8', 'np.uint8', (['(255 * tensor)'], {}), '(255 * tensor)\n', (711, 725), True, 'import numpy as np\n'), ((2223, 2262), 'numpy.concatenate', 'np.concatenate', (['(image, heat_1)'], {'axis': '(1)'}), '((image, heat_1), axis=1)\n', (2237, 2262), True, 'import numpy as np\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetNamespaceResult',
'AwaitableGetNamespaceResult',
'get_namespace',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:notificationhubs:getNamespace'.""", DeprecationWarning)
@pulumi.output_type
class GetNamespaceResult:
"""
Description of a Namespace resource.
"""
def __init__(__self__, created_at=None, critical=None, data_center=None, enabled=None, id=None, location=None, metric_id=None, name=None, namespace_type=None, provisioning_state=None, region=None, scale_unit=None, service_bus_endpoint=None, sku=None, status=None, subscription_id=None, tags=None, type=None, updated_at=None):
if created_at and not isinstance(created_at, str):
raise TypeError("Expected argument 'created_at' to be a str")
pulumi.set(__self__, "created_at", created_at)
if critical and not isinstance(critical, bool):
raise TypeError("Expected argument 'critical' to be a bool")
pulumi.set(__self__, "critical", critical)
if data_center and not isinstance(data_center, str):
raise TypeError("Expected argument 'data_center' to be a str")
pulumi.set(__self__, "data_center", data_center)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if metric_id and not isinstance(metric_id, str):
raise TypeError("Expected argument 'metric_id' to be a str")
pulumi.set(__self__, "metric_id", metric_id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if namespace_type and not isinstance(namespace_type, str):
raise TypeError("Expected argument 'namespace_type' to be a str")
pulumi.set(__self__, "namespace_type", namespace_type)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if region and not isinstance(region, str):
raise TypeError("Expected argument 'region' to be a str")
pulumi.set(__self__, "region", region)
if scale_unit and not isinstance(scale_unit, str):
raise TypeError("Expected argument 'scale_unit' to be a str")
pulumi.set(__self__, "scale_unit", scale_unit)
if service_bus_endpoint and not isinstance(service_bus_endpoint, str):
raise TypeError("Expected argument 'service_bus_endpoint' to be a str")
pulumi.set(__self__, "service_bus_endpoint", service_bus_endpoint)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if subscription_id and not isinstance(subscription_id, str):
raise TypeError("Expected argument 'subscription_id' to be a str")
pulumi.set(__self__, "subscription_id", subscription_id)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if updated_at and not isinstance(updated_at, str):
raise TypeError("Expected argument 'updated_at' to be a str")
pulumi.set(__self__, "updated_at", updated_at)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[str]:
"""
The time the namespace was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter
def critical(self) -> Optional[bool]:
"""
Whether or not the namespace is set as Critical.
"""
return pulumi.get(self, "critical")
@property
@pulumi.getter(name="dataCenter")
def data_center(self) -> Optional[str]:
"""
Data center for the namespace
"""
return pulumi.get(self, "data_center")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Whether or not the namespace is currently enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="metricId")
def metric_id(self) -> str:
"""
Identifier for Azure Insights metrics
"""
return pulumi.get(self, "metric_id")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namespaceType")
def namespace_type(self) -> Optional[str]:
"""
The namespace type.
"""
return pulumi.get(self, "namespace_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Provisioning state of the Namespace.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def region(self) -> Optional[str]:
"""
Specifies the targeted region in which the namespace should be created. It can be any of the following values: Australia East, Australia Southeast, Central US, East US, East US 2, West US, North Central US, South Central US, East Asia, Southeast Asia, Brazil South, Japan East, Japan West, North Europe, West Europe
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="scaleUnit")
def scale_unit(self) -> Optional[str]:
"""
ScaleUnit where the namespace gets created
"""
return pulumi.get(self, "scale_unit")
@property
@pulumi.getter(name="serviceBusEndpoint")
def service_bus_endpoint(self) -> Optional[str]:
"""
Endpoint you can use to perform NotificationHub operations.
"""
return pulumi.get(self, "service_bus_endpoint")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
The sku of the created namespace
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Status of the namespace. It can be any of these values:1 = Created/Active2 = Creating3 = Suspended4 = Deleting
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="subscriptionId")
def subscription_id(self) -> Optional[str]:
"""
The Id of the Azure subscription associated with the namespace.
"""
return pulumi.get(self, "subscription_id")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> Optional[str]:
"""
The time the namespace was updated.
"""
return pulumi.get(self, "updated_at")
class AwaitableGetNamespaceResult(GetNamespaceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetNamespaceResult(
created_at=self.created_at,
critical=self.critical,
data_center=self.data_center,
enabled=self.enabled,
id=self.id,
location=self.location,
metric_id=self.metric_id,
name=self.name,
namespace_type=self.namespace_type,
provisioning_state=self.provisioning_state,
region=self.region,
scale_unit=self.scale_unit,
service_bus_endpoint=self.service_bus_endpoint,
sku=self.sku,
status=self.status,
subscription_id=self.subscription_id,
tags=self.tags,
type=self.type,
updated_at=self.updated_at)
def get_namespace(namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNamespaceResult:
"""
Description of a Namespace resource.
Latest API Version: 2017-04-01.
:param str namespace_name: The namespace name.
:param str resource_group_name: The name of the resource group.
"""
pulumi.log.warn("""get_namespace is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-native:notificationhubs:getNamespace'.""")
__args__ = dict()
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:notificationhubs/latest:getNamespace', __args__, opts=opts, typ=GetNamespaceResult).value
return AwaitableGetNamespaceResult(
created_at=__ret__.created_at,
critical=__ret__.critical,
data_center=__ret__.data_center,
enabled=__ret__.enabled,
id=__ret__.id,
location=__ret__.location,
metric_id=__ret__.metric_id,
name=__ret__.name,
namespace_type=__ret__.namespace_type,
provisioning_state=__ret__.provisioning_state,
region=__ret__.region,
scale_unit=__ret__.scale_unit,
service_bus_endpoint=__ret__.service_bus_endpoint,
sku=__ret__.sku,
status=__ret__.status,
subscription_id=__ret__.subscription_id,
tags=__ret__.tags,
type=__ret__.type,
updated_at=__ret__.updated_at)
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"warnings.warn",
"pulumi.log.warn",
"pulumi.runtime.invoke",
"pulumi.InvokeOptions"
]
| [((436, 620), 'warnings.warn', 'warnings.warn', (['"""The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."""', 'DeprecationWarning'], {}), '(\n "The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."\n , DeprecationWarning)\n', (449, 620), False, 'import warnings\n'), ((4544, 4575), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""createdAt"""'}), "(name='createdAt')\n", (4557, 4575), False, 'import pulumi\n'), ((4954, 4986), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dataCenter"""'}), "(name='dataCenter')\n", (4967, 4986), False, 'import pulumi\n'), ((5671, 5701), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""metricId"""'}), "(name='metricId')\n", (5684, 5701), False, 'import pulumi\n'), ((6016, 6051), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""namespaceType"""'}), "(name='namespaceType')\n", (6029, 6051), False, 'import pulumi\n'), ((6221, 6260), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningState"""'}), "(name='provisioningState')\n", (6234, 6260), False, 'import pulumi\n'), ((6918, 6949), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""scaleUnit"""'}), "(name='scaleUnit')\n", (6931, 6949), False, 'import pulumi\n'), ((7134, 7174), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceBusEndpoint"""'}), "(name='serviceBusEndpoint')\n", (7147, 7174), False, 'import pulumi\n'), ((7846, 7882), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""subscriptionId"""'}), "(name='subscriptionId')\n", (7859, 7882), False, 'import pulumi\n'), ((8416, 8447), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""updatedAt"""'}), "(name='updatedAt')\n", (8429, 8447), False, 'import pulumi\n'), ((9955, 10150), 'pulumi.log.warn', 'pulumi.log.warn', (['"""get_namespace is deprecated: The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."""'], {}), '(\n "get_namespace is deprecated: The \'latest\' version is deprecated. Please migrate to the function in the top-level module: \'azure-native:notificationhubs:getNamespace\'."\n )\n', (9970, 10150), False, 'import pulumi\n'), ((1190, 1236), 'pulumi.set', 'pulumi.set', (['__self__', '"""created_at"""', 'created_at'], {}), "(__self__, 'created_at', created_at)\n", (1200, 1236), False, 'import pulumi\n'), ((1374, 1416), 'pulumi.set', 'pulumi.set', (['__self__', '"""critical"""', 'critical'], {}), "(__self__, 'critical', critical)\n", (1384, 1416), False, 'import pulumi\n'), ((1561, 1609), 'pulumi.set', 'pulumi.set', (['__self__', '"""data_center"""', 'data_center'], {}), "(__self__, 'data_center', data_center)\n", (1571, 1609), False, 'import pulumi\n'), ((1744, 1784), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (1754, 1784), False, 'import pulumi\n'), ((1902, 1932), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (1912, 1932), False, 'import pulumi\n'), ((2068, 2110), 'pulumi.set', 'pulumi.set', (['__self__', '"""location"""', 'location'], {}), "(__self__, 'location', location)\n", (2078, 2110), False, 'import pulumi\n'), ((2249, 2293), 'pulumi.set', 'pulumi.set', (['__self__', '"""metric_id"""', 'metric_id'], {}), "(__self__, 'metric_id', metric_id)\n", (2259, 2293), False, 'import pulumi\n'), ((2417, 2451), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (2427, 2451), False, 'import pulumi\n'), ((2605, 2659), 'pulumi.set', 'pulumi.set', (['__self__', '"""namespace_type"""', 'namespace_type'], {}), "(__self__, 'namespace_type', namespace_type)\n", (2615, 2659), False, 'import pulumi\n'), ((2825, 2887), 'pulumi.set', 'pulumi.set', (['__self__', '"""provisioning_state"""', 'provisioning_state'], {}), "(__self__, 'provisioning_state', provisioning_state)\n", (2835, 2887), False, 'import pulumi\n'), ((3017, 3055), 'pulumi.set', 'pulumi.set', (['__self__', '"""region"""', 'region'], {}), "(__self__, 'region', region)\n", (3027, 3055), False, 'import pulumi\n'), ((3197, 3243), 'pulumi.set', 'pulumi.set', (['__self__', '"""scale_unit"""', 'scale_unit'], {}), "(__self__, 'scale_unit', scale_unit)\n", (3207, 3243), False, 'import pulumi\n'), ((3415, 3481), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_bus_endpoint"""', 'service_bus_endpoint'], {}), "(__self__, 'service_bus_endpoint', service_bus_endpoint)\n", (3425, 3481), False, 'import pulumi\n'), ((3604, 3636), 'pulumi.set', 'pulumi.set', (['__self__', '"""sku"""', 'sku'], {}), "(__self__, 'sku', sku)\n", (3614, 3636), False, 'import pulumi\n'), ((3766, 3804), 'pulumi.set', 'pulumi.set', (['__self__', '"""status"""', 'status'], {}), "(__self__, 'status', status)\n", (3776, 3804), False, 'import pulumi\n'), ((3961, 4017), 'pulumi.set', 'pulumi.set', (['__self__', '"""subscription_id"""', 'subscription_id'], {}), "(__self__, 'subscription_id', subscription_id)\n", (3971, 4017), False, 'import pulumi\n'), ((4143, 4177), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (4153, 4177), False, 'import pulumi\n'), ((4301, 4335), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (4311, 4335), False, 'import pulumi\n'), ((4477, 4523), 'pulumi.set', 'pulumi.set', (['__self__', '"""updated_at"""', 'updated_at'], {}), "(__self__, 'updated_at', updated_at)\n", (4487, 4523), False, 'import pulumi\n'), ((4702, 4732), 'pulumi.get', 'pulumi.get', (['self', '"""created_at"""'], {}), "(self, 'created_at')\n", (4712, 4732), False, 'import pulumi\n'), ((4905, 4933), 'pulumi.get', 'pulumi.get', (['self', '"""critical"""'], {}), "(self, 'critical')\n", (4915, 4933), False, 'import pulumi\n'), ((5108, 5139), 'pulumi.get', 'pulumi.get', (['self', '"""data_center"""'], {}), "(self, 'data_center')\n", (5118, 5139), False, 'import pulumi\n'), ((5313, 5340), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (5323, 5340), False, 'import pulumi\n'), ((5459, 5481), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (5469, 5481), False, 'import pulumi\n'), ((5622, 5650), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (5632, 5650), False, 'import pulumi\n'), ((5819, 5848), 'pulumi.get', 'pulumi.get', (['self', '"""metric_id"""'], {}), "(self, 'metric_id')\n", (5829, 5848), False, 'import pulumi\n'), ((5971, 5995), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (5981, 5995), False, 'import pulumi\n'), ((6166, 6200), 'pulumi.get', 'pulumi.get', (['self', '"""namespace_type"""'], {}), "(self, 'namespace_type')\n", (6176, 6200), False, 'import pulumi\n'), ((6396, 6434), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_state"""'], {}), "(self, 'provisioning_state')\n", (6406, 6434), False, 'import pulumi\n'), ((6871, 6897), 'pulumi.get', 'pulumi.get', (['self', '"""region"""'], {}), "(self, 'region')\n", (6881, 6897), False, 'import pulumi\n'), ((7083, 7113), 'pulumi.get', 'pulumi.get', (['self', '"""scale_unit"""'], {}), "(self, 'scale_unit')\n", (7093, 7113), False, 'import pulumi\n'), ((7335, 7375), 'pulumi.get', 'pulumi.get', (['self', '"""service_bus_endpoint"""'], {}), "(self, 'service_bus_endpoint')\n", (7345, 7375), False, 'import pulumi\n'), ((7544, 7567), 'pulumi.get', 'pulumi.get', (['self', '"""sku"""'], {}), "(self, 'sku')\n", (7554, 7567), False, 'import pulumi\n'), ((7799, 7825), 'pulumi.get', 'pulumi.get', (['self', '"""status"""'], {}), "(self, 'status')\n", (7809, 7825), False, 'import pulumi\n'), ((8042, 8077), 'pulumi.get', 'pulumi.get', (['self', '"""subscription_id"""'], {}), "(self, 'subscription_id')\n", (8052, 8077), False, 'import pulumi\n'), ((8224, 8248), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (8234, 8248), False, 'import pulumi\n'), ((8371, 8395), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (8381, 8395), False, 'import pulumi\n'), ((8574, 8604), 'pulumi.get', 'pulumi.get', (['self', '"""updated_at"""'], {}), "(self, 'updated_at')\n", (8584, 8604), False, 'import pulumi\n'), ((10306, 10328), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (10326, 10328), False, 'import pulumi\n'), ((10420, 10543), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:notificationhubs/latest:getNamespace"""', '__args__'], {'opts': 'opts', 'typ': 'GetNamespaceResult'}), "('azure-native:notificationhubs/latest:getNamespace',\n __args__, opts=opts, typ=GetNamespaceResult)\n", (10441, 10543), False, 'import pulumi\n')] |
import json
from pygments import highlight
from pygments.lexers import JsonLexer
from pygments.formatters import TerminalFormatter
def print_json_obj(json_object):
json_str = json.dumps(json_object, indent=4, sort_keys=True)
print(highlight(json_str, JsonLexer(), TerminalFormatter()))
def print_json_str(json_str):
print(highlight(json_str, JsonLexer(), TerminalFormatter()))
| [
"pygments.lexers.JsonLexer",
"json.dumps",
"pygments.formatters.TerminalFormatter"
]
| [((181, 230), 'json.dumps', 'json.dumps', (['json_object'], {'indent': '(4)', 'sort_keys': '(True)'}), '(json_object, indent=4, sort_keys=True)\n', (191, 230), False, 'import json\n'), ((261, 272), 'pygments.lexers.JsonLexer', 'JsonLexer', ([], {}), '()\n', (270, 272), False, 'from pygments.lexers import JsonLexer\n'), ((274, 293), 'pygments.formatters.TerminalFormatter', 'TerminalFormatter', ([], {}), '()\n', (291, 293), False, 'from pygments.formatters import TerminalFormatter\n'), ((358, 369), 'pygments.lexers.JsonLexer', 'JsonLexer', ([], {}), '()\n', (367, 369), False, 'from pygments.lexers import JsonLexer\n'), ((371, 390), 'pygments.formatters.TerminalFormatter', 'TerminalFormatter', ([], {}), '()\n', (388, 390), False, 'from pygments.formatters import TerminalFormatter\n')] |
#!/usr/bin/env python3
import os
from opendbc.can.parser import CANParser
from cereal import car
from selfdrive.car.interfaces import RadarInterfaceBase
RADAR_MSGS_C = list(range(0x2c2, 0x2d4+2, 2)) # c_ messages 706,...,724
RADAR_MSGS_D = list(range(0x2a2, 0x2b4+2, 2)) # d_ messages
LAST_MSG = max(RADAR_MSGS_C + RADAR_MSGS_D)
NUMBER_MSGS = len(RADAR_MSGS_C) + len(RADAR_MSGS_D)
def _create_radar_can_parser():
dbc_f = 'chrysler_pacifica_2017_hybrid_private_fusion.dbc'
msg_n = len(RADAR_MSGS_C)
# list of [(signal name, message name or number, initial values), (...)]
# [('RADAR_STATE', 1024, 0),
# ('LONG_DIST', 1072, 255),
# ('LONG_DIST', 1073, 255),
# ('LONG_DIST', 1074, 255),
# ('LONG_DIST', 1075, 255),
# The factor and offset are applied by the dbc parsing library, so the
# default values should be after the factor/offset are applied.
signals = list(zip(['LONG_DIST'] * msg_n +
['LAT_DIST'] * msg_n +
['REL_SPEED'] * msg_n,
RADAR_MSGS_C * 2 + # LONG_DIST, LAT_DIST
RADAR_MSGS_D, # REL_SPEED
[0] * msg_n + # LONG_DIST
[-1000] * msg_n + # LAT_DIST
[-146.278] * msg_n)) # REL_SPEED set to 0, factor/offset to this
# TODO what are the checks actually used for?
# honda only checks the last message,
# toyota checks all the messages. Which do we want?
checks = list(zip(RADAR_MSGS_C +
RADAR_MSGS_D,
[20]*msg_n + # 20Hz (0.05s)
[20]*msg_n)) # 20Hz (0.05s)
return CANParser(os.path.splitext(dbc_f)[0], signals, checks, 1)
def _address_to_track(address):
if address in RADAR_MSGS_C:
return (address - RADAR_MSGS_C[0]) // 2
if address in RADAR_MSGS_D:
return (address - RADAR_MSGS_D[0]) // 2
raise ValueError("radar received unexpected address %d" % address)
class RadarInterface(RadarInterfaceBase):
def __init__(self, CP):
self.pts = {}
self.delay = 0 # Delay of radar #TUNE
self.rcp = _create_radar_can_parser()
self.updated_messages = set()
self.trigger_msg = LAST_MSG
def update(self, can_strings):
vls = self.rcp.update_strings(can_strings)
self.updated_messages.update(vls)
if self.trigger_msg not in self.updated_messages:
return None
ret = car.RadarData.new_message()
errors = []
if not self.rcp.can_valid:
errors.append("canError")
ret.errors = errors
for ii in self.updated_messages: # ii should be the message ID as a number
cpt = self.rcp.vl[ii]
trackId = _address_to_track(ii)
if trackId not in self.pts:
self.pts[trackId] = car.RadarData.RadarPoint.new_message()
self.pts[trackId].trackId = trackId
self.pts[trackId].aRel = float('nan')
self.pts[trackId].yvRel = float('nan')
self.pts[trackId].measured = True
if 'LONG_DIST' in cpt: # c_* message
self.pts[trackId].dRel = cpt['LONG_DIST'] # from front of car
# our lat_dist is positive to the right in car's frame.
# TODO what does yRel want?
self.pts[trackId].yRel = cpt['LAT_DIST'] # in car frame's y axis, left is positive
else: # d_* message
self.pts[trackId].vRel = cpt['REL_SPEED']
# We want a list, not a dictionary. Filter out LONG_DIST==0 because that means it's not valid.
ret.points = [x for x in self.pts.values() if x.dRel != 0]
self.updated_messages.clear()
return ret
| [
"cereal.car.RadarData.new_message",
"os.path.splitext",
"cereal.car.RadarData.RadarPoint.new_message"
]
| [((2329, 2356), 'cereal.car.RadarData.new_message', 'car.RadarData.new_message', ([], {}), '()\n', (2354, 2356), False, 'from cereal import car\n'), ((1589, 1612), 'os.path.splitext', 'os.path.splitext', (['dbc_f'], {}), '(dbc_f)\n', (1605, 1612), False, 'import os\n'), ((2670, 2708), 'cereal.car.RadarData.RadarPoint.new_message', 'car.RadarData.RadarPoint.new_message', ([], {}), '()\n', (2706, 2708), False, 'from cereal import car\n')] |
import os
import numpy as np
import tensorflow as tf
from image_quality.utils import utils
class TrainDataGenerator(tf.keras.utils.Sequence):
'''inherits from Keras Sequence base object, allows to use multiprocessing in .fit_generator'''
def __init__(self, samples, img_dir, batch_size, n_classes, basenet_preprocess,
img_load_dims=(256, 256), img_crop_dims=(224, 224), shuffle=True):
self.samples = samples
self.img_dir = img_dir
self.batch_size = batch_size
self.n_classes = n_classes
self.basenet_preprocess = basenet_preprocess # Keras basenet specific preprocessing function
self.img_load_dims = img_load_dims # dimensions that images get resized into when loaded
self.img_crop_dims = img_crop_dims # dimensions that images get randomly cropped to
self.shuffle = shuffle
self.on_epoch_end() # call ensures that samples are shuffled in first epoch if shuffle is set to True
def __len__(self):
return int(np.ceil(len(self.samples) / self.batch_size)) # number of batches per epoch
def __getitem__(self, index):
batch_indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # get batch indexes
batch_samples = [self.samples[i] for i in batch_indexes] # get batch samples
X, y = self.__data_generator(batch_samples)
return X, y
def on_epoch_end(self):
self.indexes = np.arange(len(self.samples))
if self.shuffle is True:
np.random.shuffle(self.indexes)
def __data_generator(self, batch_samples):
# initialize images and labels tensors for faster processing
X = np.empty((len(batch_samples), *self.img_crop_dims, 3))
y = np.empty((len(batch_samples), self.n_classes))
for i, sample in enumerate(batch_samples):
# load and randomly augment image
img_file = os.path.join(self.img_dir, '{}'.format(sample['image_id']))
img = utils.load_image(img_file, self.img_load_dims)
if img is not None:
img = utils.random_crop(img, self.img_crop_dims)
img = utils.random_horizontal_flip(img)
X[i, ] = img
# normalize labels
y[i, ] = utils.normalize_labels(sample['label'])
# apply basenet specific preprocessing
# input is 4D numpy array of RGB values within [0, 255]
X = self.basenet_preprocess(X)
return X, y
class TestDataGenerator(tf.keras.utils.Sequence):
'''inherits from Keras Sequence base object, allows to use multiprocessing in .fit_generator'''
def __init__(self, samples, img_dir, batch_size, n_classes, basenet_preprocess,
img_load_dims=(224, 224)):
self.samples = samples
self.img_dir = img_dir
self.batch_size = batch_size
self.n_classes = n_classes
self.basenet_preprocess = basenet_preprocess # Keras basenet specific preprocessing function
self.img_load_dims = img_load_dims # dimensions that images get resized into when loaded
self.on_epoch_end() # call ensures that samples are shuffled in first epoch if shuffle is set to True
def __len__(self):
return int(np.ceil(len(self.samples) / self.batch_size)) # number of batches per epoch
def __getitem__(self, index):
batch_indexes = self.indexes[index*self.batch_size:(index+1)*self.batch_size] # get batch indexes
batch_samples = [self.samples[i] for i in batch_indexes] # get batch samples
X, y = self.__data_generator(batch_samples)
return X, y
def on_epoch_end(self):
self.indexes = np.arange(len(self.samples))
def __data_generator(self, batch_samples):
# initialize images and labels tensors for faster processing
X = np.empty((len(batch_samples), *self.img_load_dims, 3))
y = np.empty((len(batch_samples), self.n_classes))
for i, sample in enumerate(batch_samples):
# load and randomly augment image
img_file = os.path.join(self.img_dir, '{}'.format(sample['image_id']))
img = utils.load_image(img_file, self.img_load_dims)
if img is not None:
X[i, ] = img
# normalize labels
if sample.get('label') is not None:
y[i, ] = utils.normalize_labels(sample['label'])
# apply basenet specific preprocessing
# input is 4D numpy array of RGB values within [0, 255]
X = self.basenet_preprocess(X)
return X, y
| [
"image_quality.utils.utils.random_crop",
"image_quality.utils.utils.load_image",
"image_quality.utils.utils.random_horizontal_flip",
"image_quality.utils.utils.normalize_labels",
"numpy.random.shuffle"
]
| [((1440, 1471), 'numpy.random.shuffle', 'np.random.shuffle', (['self.indexes'], {}), '(self.indexes)\n', (1457, 1471), True, 'import numpy as np\n'), ((1878, 1924), 'image_quality.utils.utils.load_image', 'utils.load_image', (['img_file', 'self.img_load_dims'], {}), '(img_file, self.img_load_dims)\n', (1894, 1924), False, 'from image_quality.utils import utils\n'), ((2118, 2157), 'image_quality.utils.utils.normalize_labels', 'utils.normalize_labels', (["sample['label']"], {}), "(sample['label'])\n", (2140, 2157), False, 'from image_quality.utils import utils\n'), ((3876, 3922), 'image_quality.utils.utils.load_image', 'utils.load_image', (['img_file', 'self.img_load_dims'], {}), '(img_file, self.img_load_dims)\n', (3892, 3922), False, 'from image_quality.utils import utils\n'), ((1965, 2007), 'image_quality.utils.utils.random_crop', 'utils.random_crop', (['img', 'self.img_crop_dims'], {}), '(img, self.img_crop_dims)\n', (1982, 2007), False, 'from image_quality.utils import utils\n'), ((2022, 2055), 'image_quality.utils.utils.random_horizontal_flip', 'utils.random_horizontal_flip', (['img'], {}), '(img)\n', (2050, 2055), False, 'from image_quality.utils import utils\n'), ((4055, 4094), 'image_quality.utils.utils.normalize_labels', 'utils.normalize_labels', (["sample['label']"], {}), "(sample['label'])\n", (4077, 4094), False, 'from image_quality.utils import utils\n')] |
# -*- coding: utf-8 -*-
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp
import numpy as np
from qat.comm.quops.ttypes import QuantumChannel, RepresentationType
from qat.comm.datamodel.ttypes import Matrix, ComplexNumber
def array_to_matrix(array):
"""
Transform a two dimmentional numpy array to a myqlm Matrix.
Args:
array: (ndarray) a two dimmentional numpy array
Returns:
(Matrix): a myqlm Matrix
"""
assert len(array.shape) == 2, "The array must be two dimmentional"
data = []
for arr in array:
for elem in arr:
data.append(ComplexNumber(np.real(elem), np.imag(elem)))
matri = Matrix(array.shape[0], array.shape[1], data)
return matri
def qiskit_to_qchannel(representation):
"""
Create a myqlm representation of quantum channel from a qiskit representation
of a quantum channel.
Args:
representation: (Kraus|Choi|Chi|SuperOp|PTM) qiskit representation of a quantum channel.
Returns:
(QuantumChannel): myqlm representation of a quantum channel.
"""
qchannel = None
qiskit_data = representation.data
# Find what representation it is.
# Then create the corresponding matrix (kraus_ops|basis|matrix)from the data
# of the representation.
# Finally, create the QuantumChannel with the RepresentationType, the arity
# (got from the qiskit representation) and the matrix.
if isinstance(representation, Kraus):
kraus_ops = []
for arr in qiskit_data:
kraus_ops.append(array_to_matrix(arr))
qchannel = QuantumChannel(
representation=RepresentationType.KRAUS,
arity=representation.num_qubits,
kraus_ops=kraus_ops)
elif isinstance(representation, Chi):
basis = []
basis.append(array_to_matrix(qiskit_data))
qchannel = QuantumChannel(
representation=RepresentationType.CHI,
arity=representation.num_qubits,
basis=basis)
elif isinstance(representation, SuperOp):
basis = []
basis.append(array_to_matrix(qiskit_data))
qchannel = QuantumChannel(
representation=RepresentationType.SUPEROP,
arity=representation.num_qubits,
basis=basis)
elif isinstance(representation, PTM):
matri = array_to_matrix(qiskit_data)
qchannel = QuantumChannel(
representation=RepresentationType.PTM,
arity=representation.num_qubits,
matrix=matri)
elif isinstance(representation, Choi):
matri = array_to_matrix(qiskit_data)
qchannel = QuantumChannel(
representation=RepresentationType.CHOI,
arity=representation.num_qubits,
matrix=matri)
return qchannel
def qchannel_to_qiskit(representation):
"""
Create a qiskit representation of quantum channel from a myqlm representation
of a quantum channel.
Args:
representation: (QuantumChannel) myqlm representation of a quantum channel.
Returns:
(Kraus|Choi|Chi|SuperOp|PTM): qiskit representation of a quantum channel.
"""
rep = representation.representation
# Find what representation it is.
# Then create the corresponding matrix and shape it like qiskit is expecting it.
# Finally, create the qiskit representation from that matrix.
if rep in (RepresentationType.PTM, RepresentationType.CHOI):
matri = representation.matrix
data_re = []
data_im = []
for i in range(matri.nRows):
for j in range(matri.nCols):
data_re.append(matri.data[i * matri.nRows + j].re + 0.j)
data_im.append(matri.data[i * matri.nRows + j].im)
data = np.array(data_re)
data.imag = np.array(data_im)
data = data.reshape((matri.nRows, matri.nCols))
return PTM(data) if (rep == RepresentationType.PTM) else Choi(data)
if rep in (RepresentationType.CHI, RepresentationType.SUPEROP):
final_data = []
for matri in representation.basis:
data_re = []
data_im = []
for i in range(matri.nRows):
for j in range(matri.nCols):
data_re.append(matri.data[i * matri.nRows + j].re + 0.j)
data_im.append(matri.data[i * matri.nRows + j].im)
data = np.array(data_re)
data.imag = np.array(data_im)
data = data.reshape((matri.nRows, matri.nCols))
final_data.append(data)
if rep == RepresentationType.CHI:
return Chi(final_data) if len(final_data) > 1 else Chi(final_data[0])
return SuperOp(final_data) if len(final_data) > 1 else SuperOp(final_data[0])
if rep == RepresentationType.KRAUS:
final_data = []
for matri in representation.kraus_ops:
data_re = []
data_im = []
for i in range(matri.nRows):
for j in range(matri.nCols):
data_re.append(matri.data[i * matri.nRows + j].re + 0.j)
data_im.append(matri.data[i * matri.nRows + j].im)
data = np.array(data_re)
data.imag = np.array(data_im)
data = data.reshape((matri.nRows, matri.nCols))
final_data.append(data)
return Kraus(final_data)
return None
| [
"qiskit.quantum_info.operators.channel.Chi",
"qiskit.quantum_info.operators.channel.Kraus",
"qiskit.quantum_info.operators.channel.Choi",
"qiskit.quantum_info.operators.channel.PTM",
"qat.comm.quops.ttypes.QuantumChannel",
"numpy.array",
"qiskit.quantum_info.operators.channel.SuperOp",
"numpy.real",
"qat.comm.datamodel.ttypes.Matrix",
"numpy.imag"
]
| [((1512, 1556), 'qat.comm.datamodel.ttypes.Matrix', 'Matrix', (['array.shape[0]', 'array.shape[1]', 'data'], {}), '(array.shape[0], array.shape[1], data)\n', (1518, 1556), False, 'from qat.comm.datamodel.ttypes import Matrix, ComplexNumber\n'), ((2444, 2558), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', ([], {'representation': 'RepresentationType.KRAUS', 'arity': 'representation.num_qubits', 'kraus_ops': 'kraus_ops'}), '(representation=RepresentationType.KRAUS, arity=\n representation.num_qubits, kraus_ops=kraus_ops)\n', (2458, 2558), False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((4610, 4627), 'numpy.array', 'np.array', (['data_re'], {}), '(data_re)\n', (4618, 4627), True, 'import numpy as np\n'), ((4648, 4665), 'numpy.array', 'np.array', (['data_im'], {}), '(data_im)\n', (4656, 4665), True, 'import numpy as np\n'), ((6187, 6204), 'qiskit.quantum_info.operators.channel.Kraus', 'Kraus', (['final_data'], {}), '(final_data)\n', (6192, 6204), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((2722, 2826), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', ([], {'representation': 'RepresentationType.CHI', 'arity': 'representation.num_qubits', 'basis': 'basis'}), '(representation=RepresentationType.CHI, arity=representation.\n num_qubits, basis=basis)\n', (2736, 2826), False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((4737, 4746), 'qiskit.quantum_info.operators.channel.PTM', 'PTM', (['data'], {}), '(data)\n', (4740, 4746), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((4787, 4797), 'qiskit.quantum_info.operators.channel.Choi', 'Choi', (['data'], {}), '(data)\n', (4791, 4797), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((5236, 5253), 'numpy.array', 'np.array', (['data_re'], {}), '(data_re)\n', (5244, 5253), True, 'import numpy as np\n'), ((5278, 5295), 'numpy.array', 'np.array', (['data_im'], {}), '(data_im)\n', (5286, 5295), True, 'import numpy as np\n'), ((5531, 5550), 'qiskit.quantum_info.operators.channel.SuperOp', 'SuperOp', (['final_data'], {}), '(final_data)\n', (5538, 5550), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((5579, 5601), 'qiskit.quantum_info.operators.channel.SuperOp', 'SuperOp', (['final_data[0]'], {}), '(final_data[0])\n', (5586, 5601), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((6016, 6033), 'numpy.array', 'np.array', (['data_re'], {}), '(data_re)\n', (6024, 6033), True, 'import numpy as np\n'), ((6058, 6075), 'numpy.array', 'np.array', (['data_im'], {}), '(data_im)\n', (6066, 6075), True, 'import numpy as np\n'), ((2994, 3102), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', ([], {'representation': 'RepresentationType.SUPEROP', 'arity': 'representation.num_qubits', 'basis': 'basis'}), '(representation=RepresentationType.SUPEROP, arity=\n representation.num_qubits, basis=basis)\n', (3008, 3102), False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((5453, 5468), 'qiskit.quantum_info.operators.channel.Chi', 'Chi', (['final_data'], {}), '(final_data)\n', (5456, 5468), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((5497, 5515), 'qiskit.quantum_info.operators.channel.Chi', 'Chi', (['final_data[0]'], {}), '(final_data[0])\n', (5500, 5515), False, 'from qiskit.quantum_info.operators.channel import Choi, PTM, Kraus, Chi, SuperOp\n'), ((1469, 1482), 'numpy.real', 'np.real', (['elem'], {}), '(elem)\n', (1476, 1482), True, 'import numpy as np\n'), ((1484, 1497), 'numpy.imag', 'np.imag', (['elem'], {}), '(elem)\n', (1491, 1497), True, 'import numpy as np\n'), ((3241, 3346), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', ([], {'representation': 'RepresentationType.PTM', 'arity': 'representation.num_qubits', 'matrix': 'matri'}), '(representation=RepresentationType.PTM, arity=representation.\n num_qubits, matrix=matri)\n', (3255, 3346), False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n'), ((3486, 3592), 'qat.comm.quops.ttypes.QuantumChannel', 'QuantumChannel', ([], {'representation': 'RepresentationType.CHOI', 'arity': 'representation.num_qubits', 'matrix': 'matri'}), '(representation=RepresentationType.CHOI, arity=representation\n .num_qubits, matrix=matri)\n', (3500, 3592), False, 'from qat.comm.quops.ttypes import QuantumChannel, RepresentationType\n')] |
""" util.auth2: Authentication tools
This module is based off of util.auth, except with the action
paradigm removed.
"""
from flask import session
from app.models import Account
from app.util import course as course_util
# Session keys
SESSION_EMAIL = 'email'
def create_account(email: str, password: str, first_name: str,
last_name: str, fsuid: str, course_list: list = []):
"""
Creates an account for a single user.
:email: Required, the email address of the user.
:password: Required, user's chosen password.
:first_name: Required, user's first name.
:last_name: Required, user's last name.
:fsuid: Optional, user's FSUID.
:course_list: Optional, courses being taken by user
:return: Account object.
"""
account = Account(
email=email,
first_name=first_name,
last_name=last_name,
fsuid=fsuid,
is_admin=False
)
# Set user's extra credit courses
course_util.set_courses(account, course_list)
account.set_password(password)
account.save()
return account
def get_account(email: str=None):
"""
Retrieves account via email (defaults to using session), otherwise
redirects to login page.
:email: Optional email string, if not provided will use session['email']
:return: Account if email is present in session, None otherwise.
"""
try:
email = email or session['email']
return Account.objects.get_or_404(email=email)
except:
return None
| [
"app.models.Account.objects.get_or_404",
"app.util.course.set_courses",
"app.models.Account"
]
| [((795, 893), 'app.models.Account', 'Account', ([], {'email': 'email', 'first_name': 'first_name', 'last_name': 'last_name', 'fsuid': 'fsuid', 'is_admin': '(False)'}), '(email=email, first_name=first_name, last_name=last_name, fsuid=\n fsuid, is_admin=False)\n', (802, 893), False, 'from app.models import Account\n'), ((978, 1023), 'app.util.course.set_courses', 'course_util.set_courses', (['account', 'course_list'], {}), '(account, course_list)\n', (1001, 1023), True, 'from app.util import course as course_util\n'), ((1464, 1503), 'app.models.Account.objects.get_or_404', 'Account.objects.get_or_404', ([], {'email': 'email'}), '(email=email)\n', (1490, 1503), False, 'from app.models import Account\n')] |
from PyQt5.QtWidgets import *
from matplotlib.backends.backend_qt5agg import FigureCanvas
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
class PstaticWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.fig_pstatic = Figure()
self.fig_pstatic.set_facecolor('#ffffff')
self.canvas_pstatic = FigureCanvas(self.fig_pstatic)
vertical_layout = QVBoxLayout()
vertical_layout.addWidget(self.canvas_pstatic)
self.canvas_pstatic.axes_pstatic = self.canvas_pstatic.figure.add_subplot(111)
self.setLayout(vertical_layout)
self.canvas_pstatic.axes_pstatic.set_xticks([])
self.canvas_pstatic.axes_pstatic.set_yticks([])
self.canvas_pstatic.axes_pstatic.axis('off')
self.fig_pstatic.subplots_adjust(left=0.12, bottom=0.15, right=0.985, top=0.95)
self.toolbar = NavigationToolbar(self.canvas_pstatic, self)
self.toolbar.setFixedHeight(25)
vertical_layout.addWidget(self.toolbar) | [
"matplotlib.figure.Figure",
"matplotlib.backends.backend_qt5agg.NavigationToolbar2QT",
"matplotlib.backends.backend_qt5agg.FigureCanvas"
]
| [((360, 368), 'matplotlib.figure.Figure', 'Figure', ([], {}), '()\n', (366, 368), False, 'from matplotlib.figure import Figure\n'), ((451, 481), 'matplotlib.backends.backend_qt5agg.FigureCanvas', 'FigureCanvas', (['self.fig_pstatic'], {}), '(self.fig_pstatic)\n', (463, 481), False, 'from matplotlib.backends.backend_qt5agg import FigureCanvas\n'), ((991, 1035), 'matplotlib.backends.backend_qt5agg.NavigationToolbar2QT', 'NavigationToolbar', (['self.canvas_pstatic', 'self'], {}), '(self.canvas_pstatic, self)\n', (1008, 1035), True, 'from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar\n')] |
import os
class Config:
CSRF_ENABLED = True
SECRET_KEY = 'your-very-very-secret-key'
SQLALCHEMY_DATABASE_URI = 'postgresql:///flask_template_dev'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_ECHO = True
class Development(Config):
ENV = 'development'
DEBUG = True
TESTING = False
class Production(Config):
ENV = 'production'
DEBUG = False
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL', 'postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da')
| [
"os.getenv"
]
| [((419, 610), 'os.getenv', 'os.getenv', (['"""DATABASE_URL"""', '"""postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da"""'], {}), "('DATABASE_URL',\n 'postgres://firhokdcdnfygz:93231d3f2ae1156cabfc40f7e4ba08587a77f68a5e2072fbcbbdb30150ba4bcb@ec2-107-22-253-158.compute-1.amazonaws.com:5432/df9c5vvl0s21da'\n )\n", (428, 610), False, 'import os\n')] |
import itertools
import numpy as np
import pandas as pd
def find_intersections(formula_lists,group_labels,exclusive = True):
"""
Docstring for function pyKrev.find_intersections
====================
This function compares n lists of molecular formula and outputs a dictionary containing the intersections between each list.
Use
----
find_intersections([list_1,..,list_n],['group_1',...,'group_n'])
Returns a dictionary in which each key corresponds to a combination of group labels
and the corresponding value is a set containing the intersections between the groups in that combination.
Parameters
----------
formula_lists: a list containing n lists of molecular formula. Each item in the sub list should be a formula string.
group_labels: a list containing n strings of corresponding group labels.
exclusive: True or False, depending on whether you want the intersections to contain only unique values.
"""
if len(formula_lists) != len(group_labels):
raise InputError('formula_lists and group_labels must be of equal length')
combinations = [seq for i in range(0,len(group_labels)+1) for seq in itertools.combinations(group_labels,i) if len(seq) > 0]
combinations = sorted(combinations,key = lambda c : len(c),reverse = True) # sort combinations by length
if exclusive == True:
assigned_formula = set() #create a set that will hold all the formula already assigned to a group
amb = pd.DataFrame(data = formula_lists).T
amb.columns = group_labels
intersections = dict()
for combo in combinations:
queries = []
for c in combo:
formula = list(filter(None,amb[c])) #Remove None entries introduced by dataframe
queries.append(set(formula))
if len(queries) == 1: #if there is only one query find the unique elements in it
q_set = frozenset(queries[0]) #qset is a frozen set, so it will not be mutated by changes to queries[0]
for f_list in formula_lists: #cycle all formula in formula_lists
set_f = frozenset(f_list) #convert f_list to sets, must be frozen so type matches q_set
if set_f == q_set: # ignore the set that corresponds to the query
pass
else:
queries[0] = queries[0] - set_f #delete any repeated elements in fset
intersections[combo] = queries[0]
elif len(queries) > 1:
if exclusive == True:
q_intersect = intersect(queries)
intersections[combo] = q_intersect - assigned_formula #remove any elements from q_intersect that have already been assigned
assigned_formula.update(q_intersect) #update the assigned_set with q_intersect
else:
intersections[combo] = intersect(queries)
return intersections
def intersect(samples,counter=0):
""" This command uses recursion to find the intersections between a variable number of sets given in samples.
Where samples = [set_1,set_2,...,set_n] """
if len(samples) == 1:
return samples[0]
a = samples[counter]
b = samples[counter+1::]
if len(b) == 1: #check to see whether the recursion has reached the final element
return a & b[0]
else:
counter += 1
return a & intersect(samples,counter) | [
"pandas.DataFrame",
"itertools.combinations"
]
| [((1483, 1515), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'formula_lists'}), '(data=formula_lists)\n', (1495, 1515), True, 'import pandas as pd\n'), ((1176, 1215), 'itertools.combinations', 'itertools.combinations', (['group_labels', 'i'], {}), '(group_labels, i)\n', (1198, 1215), False, 'import itertools\n')] |
import os
import glob
import shutil
from tinytag import TinyTag
""" root = 'C:/'
copy_to = '/copy to/folder'
tag = TinyTag.get('C:/Users/jchap/OneDrive/Pictures/(VERYRAREBOYZ) (feat. $ki Mask The Slump God and Drugz).mp3')
print(tag.artist)
print('song duration: '+str(tag.duration))
"""
f = []
f=glob.glob('C:/Users/jchap/OneDrive/*.mp3')
print(f)
musicDirectory=[]
musicFiles =[]
# tag = TinyTag.get(f[0])
# print(tag.artist)
# for root, dirs, files in os.walk("C:/Users/jchap/OneDrive/"):
for root, dirs, files in os.walk("C:/"):
for file in files:
if file.endswith(".mp3"):
musicFiles.append(file)
musicDirectory.append(os.path.join(root, file))
#print(os.path.join(root, file))
print('files'+str(musicFiles))
tag = TinyTag.get(musicDirectory[0])
print('Artist',tag.artist)
print('Album Artist',tag.albumartist)
print('Title',tag.title)
print('Biterate',tag.bitrate)
print('music directory'+str(musicDirectory))
print(len(musicDirectory))
currentDirectory =os.path.dirname(__file__)
with open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', "r") as f:
content_list = [word.strip() for word in f]
""" my_file = open(currentDirectory+'/The_Krabby_Patty Formula_.m3u', "r")
content_list = my_file. readlines() """
# print('playlist contents')
# print(content_list)
musicDirectory
musicWithoutDuplicates = []
duplicatesList = []
count =0
# check for tags equal to none
#musicDirectory =[x for x in musicDirectory j = TinyTag.get(x) if x != 'wdg']
#remove tracks without albumn artist or title
for track in reversed(range(len(musicDirectory))):
try:
trackTag = TinyTag.get(musicDirectory[track])
if str(trackTag.albumartist)== 'None' or str(trackTag.title)=='None':
print('albumArtist = none',musicDirectory[track])
print('removing track and adding to log file')
musicDirectory.remove(musicDirectory[track])
except IndexError:
break
#check for duplicates
for j in range(len(musicDirectory)):
musicDtag = TinyTag.get(musicDirectory[j])
duplicateL=[]
duplicateLBiterate=[]
for duplicate in range(len(musicDirectory)):
duplicateTag = TinyTag.get(musicDirectory[duplicate])
musicWithoutDuplicates.append(musicDirectory[j])
if duplicateTag.albumartist == musicDtag.albumartist or duplicateTag.albumartist in musicDtag.albumartist:
if duplicateTag.title == musicDtag.title or duplicateTag.title in musicDtag.title :
#check if last iteration
if duplicate>=len(musicDirectory)-1:
print("found a duplicate!",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)
if len(duplicateLBiterate)==1:## did something here may need to change the conditional statement or add another
print('biterate')
#[x for x in duplicateL if TinyTag.get(musicDirectory[x]).bitrate > musicDirectory[x]]
print("Current duplicate Bite rate", duplicateLBiterate)
for x in range(len(duplicateL)):
if TinyTag.get(duplicateL[x]).bitrate == max(duplicateLBiterate):
#REMOVE ONE WITH THE BEST BITERATE
duplicateL.remove(duplicateL[x])
print('duplicate list',duplicateL)
#Add
duplicatesList = duplicatesList + duplicateL
else:
print("found a duplicate!",musicDirectory[duplicate],duplicateTag.albumartist,duplicateTag.title)
duplicateL.append(musicDirectory[duplicate])
duplicateLBiterate.append(duplicateTag.bitrate)
print('dup ',duplicatesList)
#remove duplicates from list
for u in range(len(duplicatesList)):
for i in range(len(musicDirectory)):
if duplicatesList[u]==musicDirectory[i]:
musicDirectory.remove(musicDirectory[i])
print('music ',musicDirectory)
#create playlist
newPlaylist = open("Test.m3u", "w")
#add file path to the respective track in the new playlist
for content in enumerate(content_list):
# split strings into artist and title
trackNumber=content[0]
trackArray =str(content[1]).split('-')
albumArtist= trackArray[0].strip()
title=trackArray[1].strip()
print('title:',title)
print('albumArtist:',albumArtist)
for trackDirectory in range(len(musicDirectory)):
trackTag = TinyTag.get(musicDirectory[trackDirectory])
if trackTag.albumartist == albumArtist or trackTag.albumartist in albumArtist:
if trackTag.title == title or trackTag.title in title:
newPlaylist.write(trackDirectory + " " + content)
newPlaylist.close()
try:
while True:
content.next()
except StopIteration:
pass
break
else:
print()
else:
print() | [
"os.path.join",
"os.path.dirname",
"tinytag.TinyTag.get",
"glob.glob",
"os.walk"
]
| [((313, 355), 'glob.glob', 'glob.glob', (['"""C:/Users/jchap/OneDrive/*.mp3"""'], {}), "('C:/Users/jchap/OneDrive/*.mp3')\n", (322, 355), False, 'import glob\n'), ((544, 558), 'os.walk', 'os.walk', (['"""C:/"""'], {}), "('C:/')\n", (551, 558), False, 'import os\n'), ((807, 837), 'tinytag.TinyTag.get', 'TinyTag.get', (['musicDirectory[0]'], {}), '(musicDirectory[0])\n', (818, 837), False, 'from tinytag import TinyTag\n'), ((1059, 1084), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1074, 1084), False, 'import os\n'), ((2139, 2169), 'tinytag.TinyTag.get', 'TinyTag.get', (['musicDirectory[j]'], {}), '(musicDirectory[j])\n', (2150, 2169), False, 'from tinytag import TinyTag\n'), ((1714, 1748), 'tinytag.TinyTag.get', 'TinyTag.get', (['musicDirectory[track]'], {}), '(musicDirectory[track])\n', (1725, 1748), False, 'from tinytag import TinyTag\n'), ((2300, 2338), 'tinytag.TinyTag.get', 'TinyTag.get', (['musicDirectory[duplicate]'], {}), '(musicDirectory[duplicate])\n', (2311, 2338), False, 'from tinytag import TinyTag\n'), ((4808, 4851), 'tinytag.TinyTag.get', 'TinyTag.get', (['musicDirectory[trackDirectory]'], {}), '(musicDirectory[trackDirectory])\n', (4819, 4851), False, 'from tinytag import TinyTag\n'), ((693, 717), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (705, 717), False, 'import os\n'), ((3360, 3386), 'tinytag.TinyTag.get', 'TinyTag.get', (['duplicateL[x]'], {}), '(duplicateL[x])\n', (3371, 3386), False, 'from tinytag import TinyTag\n')] |
#import modules
import os
import csv
#input
csvpath = os.path.join('Resources', 'budget_data.csv')
#output
outfile = os.path.join('Analysis', 'pybankstatements.txt')
#declare variables
months = []; total_m = 1; net_total = 0; total_change = 0; monthly_changes = []; greatest_inc = ['', 0]; greatest_dec = ['', 0]
#open & read csv
with open(csvpath) as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
header = next(csvreader)
first_row = next(csvreader)
previous_row = int(first_row[1])
net_total = int(first_row[1])
#loop
for row in csvreader:
net_total += int(row[1])
total_m = total_m+1
current_value = int(row[1])
change_value = int(current_value-previous_row)
monthly_changes.append(change_value)
months.append(row[0])
previous_row = int(row[1])
total_change = total_change + change_value
if change_value > greatest_inc[1]:
greatest_inc[0] = str(row[0])
greatest_inc[1] = change_value
if change_value < greatest_dec[1]:
greatest_dec[0] = str(row[0])
greatest_dec[1] = change_value
avg_change = total_change/len(months)
output = (
f"\n Financial Analysis \n"
f"------------------------------\n"
f"Total Months: {total_m}\n"
f"Total: ${net_total}\n"
f"Average Change: ${avg_change:.2f}\n"
f"Greatest Increase in Profits: {greatest_inc[0]} (${greatest_inc[1]})\n"
f"Greatest Decrease in Profits: {greatest_dec[0]} (${greatest_dec[1]})\n")
with open(outfile, "w") as txt_file:
txt_file.write(output)
outfile | [
"os.path.join",
"csv.reader"
]
| [((54, 98), 'os.path.join', 'os.path.join', (['"""Resources"""', '"""budget_data.csv"""'], {}), "('Resources', 'budget_data.csv')\n", (66, 98), False, 'import os\n'), ((117, 165), 'os.path.join', 'os.path.join', (['"""Analysis"""', '"""pybankstatements.txt"""'], {}), "('Analysis', 'pybankstatements.txt')\n", (129, 165), False, 'import os\n'), ((379, 413), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (389, 413), False, 'import csv\n')] |
from rest_framework import serializers
from cms.api.serializers import UniCMSContentTypeClass, UniCMSCreateUpdateSerializer
from cms.medias.serializers import MediaSerializer
from . models import Carousel, CarouselItem, CarouselItemLink, CarouselItemLinkLocalization, CarouselItemLocalization
class CarouselForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
return Carousel.objects.filter(pk=carousel_id)
return None # pragma: no cover
class CarouselItemForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']
return CarouselItem.objects.filter(pk=item_id,
carousel__pk=carousel_id)
return None # pragma: no cover
class CarouselItemLinkForeignKey(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
request = self.context.get('request', None)
if request:
carousel_id = self.context['request'].parser_context['kwargs']['carousel_id']
item_id = self.context['request'].parser_context['kwargs']['carousel_item_id']
link_id = self.context['request'].parser_context['kwargs']['carousel_item_link_id']
return CarouselItemLink.objects.filter(pk=link_id,
carousel_item__pk=item_id,
carousel_item__carousel__pk=carousel_id)
return None # pragma: no cover
class CarouselSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
class Meta:
model = Carousel
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel = CarouselForeignKey()
def to_representation(self, instance):
data = super().to_representation(instance)
image = MediaSerializer(instance.image)
data['image'] = image.data
return data
class Meta:
model = CarouselItem
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemLocalizationSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item = CarouselItemForeignKey()
class Meta:
model = CarouselItemLocalization
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselItemLinkSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item = CarouselItemForeignKey()
class Meta:
model = CarouselItemLink
fields = '__all__'
class CarouselItemLinkLocalizationSerializer(UniCMSCreateUpdateSerializer,
UniCMSContentTypeClass):
carousel_item_link = CarouselItemLinkForeignKey()
class Meta:
model = CarouselItemLinkLocalization
fields = '__all__'
read_only_fields = ('created_by', 'modified_by')
class CarouselSelectOptionsSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
data = super().to_representation(instance)
data['value'] = instance.pk
data['text'] = instance.name
return data
class Meta:
model = Carousel
fields = ()
| [
"cms.medias.serializers.MediaSerializer"
]
| [((2384, 2415), 'cms.medias.serializers.MediaSerializer', 'MediaSerializer', (['instance.image'], {}), '(instance.image)\n', (2399, 2415), False, 'from cms.medias.serializers import MediaSerializer\n')] |
#!/usr/bin/env python3
"""
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from openvino.runtime import Core, get_version
import cv2 as cv
import numpy as np
import logging as log
from time import perf_counter
import sys
from argparse import ArgumentParser, SUPPRESS
from pathlib import Path
sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python'))
sys.path.append(str(Path(__file__).resolve().parents[2] / 'common/python/openvino/model_zoo'))
import monitors
from images_capture import open_images_capture
from model_api.performance_metrics import PerformanceMetrics
log.basicConfig(format='[ %(levelname)s ] %(message)s', level=log.DEBUG, stream=sys.stdout)
def build_arg():
parser = ArgumentParser(add_help=False)
in_args = parser.add_argument_group('Options')
in_args.add_argument('-h', '--help', action='help', default=SUPPRESS, help='Help with the script.')
in_args.add_argument("-m", "--model", help="Required. Path to .xml file with pre-trained model.",
required=True, type=Path)
in_args.add_argument("-d", "--device",
help="Optional. Specify target device for infer: CPU, GPU, HDDL or MYRIAD. "
"Default: CPU",
default="CPU", type=str)
in_args.add_argument('-i', "--input", required=True,
help='Required. An input to process. The input must be a single image, '
'a folder of images, video file or camera id.')
in_args.add_argument('--loop', default=False, action='store_true',
help='Optional. Enable reading the input in a loop.')
in_args.add_argument('-o', '--output', required=False,
help='Optional. Name of the output file(s) to save.')
in_args.add_argument('-limit', '--output_limit', required=False, default=1000, type=int,
help='Optional. Number of frames to store in output. '
'If 0 is set, all frames are stored.')
in_args.add_argument("--no_show", help="Optional. Don't show output.",
action='store_true', default=False)
in_args.add_argument("-u", "--utilization_monitors", default="", type=str,
help="Optional. List of monitors to show initially.")
return parser
def main(args):
cap = open_images_capture(args.input, args.loop)
log.info('OpenVINO Inference Engine')
log.info('\tbuild: {}'.format(get_version()))
core = Core()
log.info('Reading model {}'.format(args.model))
model = core.read_model(args.model, args.model.with_suffix(".bin"))
input_tensor_name = 'data_l'
input_shape = model.input(input_tensor_name).shape
assert input_shape[1] == 1, "Expected model input shape with 1 channel"
inputs = {}
for input in model.inputs:
inputs[input.get_any_name()] = np.zeros(input.shape)
assert len(model.outputs) == 1, "Expected number of outputs is equal 1"
compiled_model = core.compile_model(model, device_name=args.device)
infer_request = compiled_model.create_infer_request()
log.info('The model {} is loaded to {}'.format(args.model, args.device))
_, _, h_in, w_in = input_shape
frames_processed = 0
imshow_size = (640, 480)
graph_size = (imshow_size[0] // 2, imshow_size[1] // 4)
presenter = monitors.Presenter(args.utilization_monitors, imshow_size[1] * 2 - graph_size[1], graph_size)
metrics = PerformanceMetrics()
video_writer = cv.VideoWriter()
if args.output and not video_writer.open(args.output, cv.VideoWriter_fourcc(*'MJPG'),
cap.fps(), (imshow_size[0] * 2, imshow_size[1] * 2)):
raise RuntimeError("Can't open video writer")
start_time = perf_counter()
original_frame = cap.read()
if original_frame is None:
raise RuntimeError("Can't read an image from the input")
while original_frame is not None:
(h_orig, w_orig) = original_frame.shape[:2]
if original_frame.shape[2] > 1:
frame = cv.cvtColor(cv.cvtColor(original_frame, cv.COLOR_BGR2GRAY), cv.COLOR_GRAY2RGB)
else:
frame = cv.cvtColor(original_frame, cv.COLOR_GRAY2RGB)
img_rgb = frame.astype(np.float32) / 255
img_lab = cv.cvtColor(img_rgb, cv.COLOR_RGB2Lab)
img_l_rs = cv.resize(img_lab.copy(), (w_in, h_in))[:, :, 0]
inputs[input_tensor_name] = np.expand_dims(img_l_rs, axis=[0, 1])
res = next(iter(infer_request.infer(inputs).values()))
update_res = np.squeeze(res)
out = update_res.transpose((1, 2, 0))
out = cv.resize(out, (w_orig, h_orig))
img_lab_out = np.concatenate((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2)
img_bgr_out = np.clip(cv.cvtColor(img_lab_out, cv.COLOR_Lab2BGR), 0, 1)
original_image = cv.resize(original_frame, imshow_size)
grayscale_image = cv.resize(frame, imshow_size)
colorize_image = (cv.resize(img_bgr_out, imshow_size) * 255).astype(np.uint8)
lab_image = cv.resize(img_lab_out, imshow_size).astype(np.uint8)
original_image = cv.putText(original_image, 'Original', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
grayscale_image = cv.putText(grayscale_image, 'Grayscale', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
colorize_image = cv.putText(colorize_image, 'Colorize', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
lab_image = cv.putText(lab_image, 'LAB interpretation', (25, 50),
cv.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)
ir_image = [cv.hconcat([original_image, grayscale_image]),
cv.hconcat([lab_image, colorize_image])]
final_image = cv.vconcat(ir_image)
metrics.update(start_time, final_image)
frames_processed += 1
if video_writer.isOpened() and (args.output_limit <= 0 or frames_processed <= args.output_limit):
video_writer.write(final_image)
presenter.drawGraphs(final_image)
if not args.no_show:
cv.imshow('Colorization Demo', final_image)
key = cv.waitKey(1)
if key in {ord("q"), ord("Q"), 27}:
break
presenter.handleKey(key)
start_time = perf_counter()
original_frame = cap.read()
metrics.log_total()
for rep in presenter.reportMeans():
log.info(rep)
if __name__ == "__main__":
args = build_arg().parse_args()
sys.exit(main(args) or 0)
| [
"cv2.vconcat",
"model_api.performance_metrics.PerformanceMetrics",
"images_capture.open_images_capture",
"cv2.imshow",
"logging.info",
"argparse.ArgumentParser",
"pathlib.Path",
"time.perf_counter",
"cv2.VideoWriter",
"numpy.concatenate",
"cv2.VideoWriter_fourcc",
"cv2.waitKey",
"numpy.squeeze",
"cv2.putText",
"cv2.cvtColor",
"cv2.resize",
"cv2.hconcat",
"logging.basicConfig",
"openvino.runtime.Core",
"numpy.zeros",
"numpy.expand_dims",
"monitors.Presenter",
"openvino.runtime.get_version"
]
| [((1125, 1220), 'logging.basicConfig', 'log.basicConfig', ([], {'format': '"""[ %(levelname)s ] %(message)s"""', 'level': 'log.DEBUG', 'stream': 'sys.stdout'}), "(format='[ %(levelname)s ] %(message)s', level=log.DEBUG,\n stream=sys.stdout)\n", (1140, 1220), True, 'import logging as log\n'), ((1249, 1279), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (1263, 1279), False, 'from argparse import ArgumentParser, SUPPRESS\n'), ((2931, 2973), 'images_capture.open_images_capture', 'open_images_capture', (['args.input', 'args.loop'], {}), '(args.input, args.loop)\n', (2950, 2973), False, 'from images_capture import open_images_capture\n'), ((2979, 3016), 'logging.info', 'log.info', (['"""OpenVINO Inference Engine"""'], {}), "('OpenVINO Inference Engine')\n", (2987, 3016), True, 'import logging as log\n'), ((3078, 3084), 'openvino.runtime.Core', 'Core', ([], {}), '()\n', (3082, 3084), False, 'from openvino.runtime import Core, get_version\n'), ((3936, 4033), 'monitors.Presenter', 'monitors.Presenter', (['args.utilization_monitors', '(imshow_size[1] * 2 - graph_size[1])', 'graph_size'], {}), '(args.utilization_monitors, imshow_size[1] * 2 -\n graph_size[1], graph_size)\n', (3954, 4033), False, 'import monitors\n'), ((4044, 4064), 'model_api.performance_metrics.PerformanceMetrics', 'PerformanceMetrics', ([], {}), '()\n', (4062, 4064), False, 'from model_api.performance_metrics import PerformanceMetrics\n'), ((4085, 4101), 'cv2.VideoWriter', 'cv.VideoWriter', ([], {}), '()\n', (4099, 4101), True, 'import cv2 as cv\n'), ((4363, 4377), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (4375, 4377), False, 'from time import perf_counter\n'), ((3462, 3483), 'numpy.zeros', 'np.zeros', (['input.shape'], {}), '(input.shape)\n', (3470, 3483), True, 'import numpy as np\n'), ((4886, 4924), 'cv2.cvtColor', 'cv.cvtColor', (['img_rgb', 'cv.COLOR_RGB2Lab'], {}), '(img_rgb, cv.COLOR_RGB2Lab)\n', (4897, 4924), True, 'import cv2 as cv\n'), ((5030, 5067), 'numpy.expand_dims', 'np.expand_dims', (['img_l_rs'], {'axis': '[0, 1]'}), '(img_l_rs, axis=[0, 1])\n', (5044, 5067), True, 'import numpy as np\n'), ((5154, 5169), 'numpy.squeeze', 'np.squeeze', (['res'], {}), '(res)\n', (5164, 5169), True, 'import numpy as np\n'), ((5231, 5263), 'cv2.resize', 'cv.resize', (['out', '(w_orig, h_orig)'], {}), '(out, (w_orig, h_orig))\n', (5240, 5263), True, 'import cv2 as cv\n'), ((5286, 5351), 'numpy.concatenate', 'np.concatenate', (['(img_lab[:, :, 0][:, :, np.newaxis], out)'], {'axis': '(2)'}), '((img_lab[:, :, 0][:, :, np.newaxis], out), axis=2)\n', (5300, 5351), True, 'import numpy as np\n'), ((5458, 5496), 'cv2.resize', 'cv.resize', (['original_frame', 'imshow_size'], {}), '(original_frame, imshow_size)\n', (5467, 5496), True, 'import cv2 as cv\n'), ((5523, 5552), 'cv2.resize', 'cv.resize', (['frame', 'imshow_size'], {}), '(frame, imshow_size)\n', (5532, 5552), True, 'import cv2 as cv\n'), ((5738, 5846), 'cv2.putText', 'cv.putText', (['original_image', '"""Original"""', '(25, 50)', 'cv.FONT_HERSHEY_SIMPLEX', '(1)', '(0, 0, 255)', '(2)', 'cv.LINE_AA'], {}), "(original_image, 'Original', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,\n (0, 0, 255), 2, cv.LINE_AA)\n", (5748, 5846), True, 'import cv2 as cv\n'), ((5905, 6015), 'cv2.putText', 'cv.putText', (['grayscale_image', '"""Grayscale"""', '(25, 50)', 'cv.FONT_HERSHEY_SIMPLEX', '(1)', '(0, 0, 255)', '(2)', 'cv.LINE_AA'], {}), "(grayscale_image, 'Grayscale', (25, 50), cv.FONT_HERSHEY_SIMPLEX,\n 1, (0, 0, 255), 2, cv.LINE_AA)\n", (5915, 6015), True, 'import cv2 as cv\n'), ((6074, 6182), 'cv2.putText', 'cv.putText', (['colorize_image', '"""Colorize"""', '(25, 50)', 'cv.FONT_HERSHEY_SIMPLEX', '(1)', '(0, 0, 255)', '(2)', 'cv.LINE_AA'], {}), "(colorize_image, 'Colorize', (25, 50), cv.FONT_HERSHEY_SIMPLEX, 1,\n (0, 0, 255), 2, cv.LINE_AA)\n", (6084, 6182), True, 'import cv2 as cv\n'), ((6235, 6349), 'cv2.putText', 'cv.putText', (['lab_image', '"""LAB interpretation"""', '(25, 50)', 'cv.FONT_HERSHEY_SIMPLEX', '(1)', '(0, 0, 255)', '(2)', 'cv.LINE_AA'], {}), "(lab_image, 'LAB interpretation', (25, 50), cv.\n FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv.LINE_AA)\n", (6245, 6349), True, 'import cv2 as cv\n'), ((6527, 6547), 'cv2.vconcat', 'cv.vconcat', (['ir_image'], {}), '(ir_image)\n', (6537, 6547), True, 'import cv2 as cv\n'), ((7066, 7080), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (7078, 7080), False, 'from time import perf_counter\n'), ((7190, 7203), 'logging.info', 'log.info', (['rep'], {}), '(rep)\n', (7198, 7203), True, 'import logging as log\n'), ((3051, 3064), 'openvino.runtime.get_version', 'get_version', ([], {}), '()\n', (3062, 3064), False, 'from openvino.runtime import Core, get_version\n'), ((4771, 4817), 'cv2.cvtColor', 'cv.cvtColor', (['original_frame', 'cv.COLOR_GRAY2RGB'], {}), '(original_frame, cv.COLOR_GRAY2RGB)\n', (4782, 4817), True, 'import cv2 as cv\n'), ((5382, 5424), 'cv2.cvtColor', 'cv.cvtColor', (['img_lab_out', 'cv.COLOR_Lab2BGR'], {}), '(img_lab_out, cv.COLOR_Lab2BGR)\n', (5393, 5424), True, 'import cv2 as cv\n'), ((6397, 6442), 'cv2.hconcat', 'cv.hconcat', (['[original_image, grayscale_image]'], {}), '([original_image, grayscale_image])\n', (6407, 6442), True, 'import cv2 as cv\n'), ((6464, 6503), 'cv2.hconcat', 'cv.hconcat', (['[lab_image, colorize_image]'], {}), '([lab_image, colorize_image])\n', (6474, 6503), True, 'import cv2 as cv\n'), ((6862, 6905), 'cv2.imshow', 'cv.imshow', (['"""Colorization Demo"""', 'final_image'], {}), "('Colorization Demo', final_image)\n", (6871, 6905), True, 'import cv2 as cv\n'), ((6924, 6937), 'cv2.waitKey', 'cv.waitKey', (['(1)'], {}), '(1)\n', (6934, 6937), True, 'import cv2 as cv\n'), ((4160, 4190), 'cv2.VideoWriter_fourcc', 'cv.VideoWriter_fourcc', (["*'MJPG'"], {}), "(*'MJPG')\n", (4181, 4190), True, 'import cv2 as cv\n'), ((4670, 4716), 'cv2.cvtColor', 'cv.cvtColor', (['original_frame', 'cv.COLOR_BGR2GRAY'], {}), '(original_frame, cv.COLOR_BGR2GRAY)\n', (4681, 4716), True, 'import cv2 as cv\n'), ((5659, 5694), 'cv2.resize', 'cv.resize', (['img_lab_out', 'imshow_size'], {}), '(img_lab_out, imshow_size)\n', (5668, 5694), True, 'import cv2 as cv\n'), ((5579, 5614), 'cv2.resize', 'cv.resize', (['img_bgr_out', 'imshow_size'], {}), '(img_bgr_out, imshow_size)\n', (5588, 5614), True, 'import cv2 as cv\n'), ((848, 862), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (852, 862), False, 'from pathlib import Path\n'), ((924, 938), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (928, 938), False, 'from pathlib import Path\n')] |
# coding: utf-8
"""
[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) © 2016‐2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501
OpenAPI spec version: 2.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.amount import Amount # noqa: F401,E501
class Transfer(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'iban': 'str',
'bic': 'str',
'name': 'str',
'amount': 'Amount',
'purpose': 'str',
'tan_media_id': 'str',
'tan_scheme': 'str'
}
attribute_map = {
'iban': 'iban',
'bic': 'bic',
'name': 'name',
'amount': 'amount',
'purpose': 'purpose',
'tan_media_id': 'tanMediaId',
'tan_scheme': 'tanScheme'
}
def __init__(self, iban=None, bic=None, name=None, amount=None, purpose=None, tan_media_id=None, tan_scheme=None): # noqa: E501
"""Transfer - a model defined in Swagger""" # noqa: E501
self._iban = None
self._bic = None
self._name = None
self._amount = None
self._purpose = None
self._tan_media_id = None
self._tan_scheme = None
self.discriminator = None
self.iban = iban
if bic is not None:
self.bic = bic
self.name = name
self.amount = amount
if purpose is not None:
self.purpose = purpose
self.tan_media_id = tan_media_id
self.tan_scheme = tan_scheme
@property
def iban(self):
"""Gets the iban of this Transfer. # noqa: E501
IBAN - International Bank Account Number (defined in ISO 13616-1) # noqa: E501
:return: The iban of this Transfer. # noqa: E501
:rtype: str
"""
return self._iban
@iban.setter
def iban(self, iban):
"""Sets the iban of this Transfer.
IBAN - International Bank Account Number (defined in ISO 13616-1) # noqa: E501
:param iban: The iban of this Transfer. # noqa: E501
:type: str
"""
if iban is None:
raise ValueError("Invalid value for `iban`, must not be `None`") # noqa: E501
self._iban = iban
@property
def bic(self):
"""Gets the bic of this Transfer. # noqa: E501
BIC - Business Identifier Code (defined in ISO-9362) # noqa: E501
:return: The bic of this Transfer. # noqa: E501
:rtype: str
"""
return self._bic
@bic.setter
def bic(self, bic):
"""Sets the bic of this Transfer.
BIC - Business Identifier Code (defined in ISO-9362) # noqa: E501
:param bic: The bic of this Transfer. # noqa: E501
:type: str
"""
self._bic = bic
@property
def name(self):
"""Gets the name of this Transfer. # noqa: E501
Name - Name of the creditor # noqa: E501
:return: The name of this Transfer. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this Transfer.
Name - Name of the creditor # noqa: E501
:param name: The name of this Transfer. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def amount(self):
"""Gets the amount of this Transfer. # noqa: E501
Amount to be transfered # noqa: E501
:return: The amount of this Transfer. # noqa: E501
:rtype: Amount
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this Transfer.
Amount to be transfered # noqa: E501
:param amount: The amount of this Transfer. # noqa: E501
:type: Amount
"""
if amount is None:
raise ValueError("Invalid value for `amount`, must not be `None`") # noqa: E501
self._amount = amount
@property
def purpose(self):
"""Gets the purpose of this Transfer. # noqa: E501
Purpose # noqa: E501
:return: The purpose of this Transfer. # noqa: E501
:rtype: str
"""
return self._purpose
@purpose.setter
def purpose(self, purpose):
"""Sets the purpose of this Transfer.
Purpose # noqa: E501
:param purpose: The purpose of this Transfer. # noqa: E501
:type: str
"""
self._purpose = purpose
@property
def tan_media_id(self):
"""Gets the tan_media_id of this Transfer. # noqa: E501
TANMediaId - The identifying ID of the TANMedia. # noqa: E501
:return: The tan_media_id of this Transfer. # noqa: E501
:rtype: str
"""
return self._tan_media_id
@tan_media_id.setter
def tan_media_id(self, tan_media_id):
"""Sets the tan_media_id of this Transfer.
TANMediaId - The identifying ID of the TANMedia. # noqa: E501
:param tan_media_id: The tan_media_id of this Transfer. # noqa: E501
:type: str
"""
if tan_media_id is None:
raise ValueError("Invalid value for `tan_media_id`, must not be `None`") # noqa: E501
self._tan_media_id = tan_media_id
@property
def tan_scheme(self):
"""Gets the tan_scheme of this Transfer. # noqa: E501
TANScheme - The scheme **id** that is used to verify this payment (e.g. \"901\") # noqa: E501
:return: The tan_scheme of this Transfer. # noqa: E501
:rtype: str
"""
return self._tan_scheme
@tan_scheme.setter
def tan_scheme(self, tan_scheme):
"""Sets the tan_scheme of this Transfer.
TANScheme - The scheme **id** that is used to verify this payment (e.g. \"901\") # noqa: E501
:param tan_scheme: The tan_scheme of this Transfer. # noqa: E501
:type: str
"""
if tan_scheme is None:
raise ValueError("Invalid value for `tan_scheme`, must not be `None`") # noqa: E501
self._tan_scheme = tan_scheme
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Transfer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
]
| [((6926, 6959), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (6939, 6959), False, 'import six\n')] |
# Copyright 2015-2017 ARM Limited, Google and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from builtins import chr
import os
import json
import shutil
import sys
import unittest
import utils_tests
import trappy
from trappy.ftrace import GenericFTrace
from trappy.systrace import SysTrace
class TestCaching(utils_tests.SetupDirectory):
def __init__(self, *args, **kwargs):
super(TestCaching, self).__init__(
[("trace_sched.txt", "trace.txt"),
("trace_sched.txt", "trace.raw.txt"),
("trace_systrace.html", "trace.html")],
*args,
**kwargs)
def test_cache_created(self):
"""Test cache creation when enabled"""
GenericFTrace.disable_cache = False
traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))
for trace in traces:
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
self.assertTrue(cache_dir in os.listdir(trace_dir))
def test_cache_not_created(self):
"""Test that cache should not be created when disabled """
GenericFTrace.disable_cache = True
traces = (trappy.FTrace(), trappy.SysTrace(path='./trace.html'))
for trace in traces:
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
self.assertFalse(cache_dir in os.listdir(trace_dir))
def test_compare_cached_vs_uncached(self):
""" Test that the cached and uncached traces are same """
# Build the cache, but the actual trace will be parsed
# fresh since this is a first time parse
GenericFTrace.disable_cache = False
uncached_trace = trappy.FTrace()
uncached_dfr = uncached_trace.sched_wakeup.data_frame
# Now read from previously parsed cache by reusing the path
cached_trace = trappy.FTrace(uncached_trace.trace_path)
cached_dfr = cached_trace.sched_wakeup.data_frame
# By default, the str to float conversion done when reading from csv is
# different from the one used when reading from the trace.txt file.
#
# Here's an example:
# - trace.txt string timestamps:
# [76.402065, 80.402065, 80.001337]
# - parsed dataframe timestamps:
# [76.402065000000007, 80.402065000000007, 82.001337000000007]
#
# - csv string timestamps:
# [76.402065, 80.402065, 80.001337]
# - cached dataframe timestamps:
# [76.402064999999993, 80.402064999999993, 82.001337000000007]
#
# To fix this, the timestamps read from the cache are converted using
# the same conversion method as the trace.txt parser, which results in
# cache-read timestamps being identical to trace-read timestamps.
#
# This test ensures that this stays true.
cached_times = [r[0] for r in cached_dfr.iterrows()]
uncached_times = [r[0] for r in uncached_dfr.iterrows()]
self.assertTrue(cached_times == uncached_times)
# compare other columns as well
self.assertTrue([r[1].pid for r in cached_dfr.iterrows()] ==
[r[1].pid for r in uncached_dfr.iterrows()])
self.assertTrue([r[1].comm for r in cached_dfr.iterrows()] ==
[r[1].comm for r in uncached_dfr.iterrows()])
self.assertTrue([r[1].prio for r in cached_dfr.iterrows()] ==
[r[1].prio for r in uncached_dfr.iterrows()])
def test_invalid_cache_overwritten(self):
"""Test a cache with a bad checksum is overwritten"""
# This is a directory so we can't use the files_to_copy arg of
# SetUpDirectory, just do it ourselves.
cache_path = ".trace.txt.cache"
src = os.path.join(utils_tests.TESTS_DIRECTORY, "trace_sched.txt.cache")
shutil.copytree(src, cache_path)
metadata_path = os.path.join(cache_path, "metadata.json")
def read_metadata():
with open(metadata_path, "r") as f:
return json.load(f)
def write_md5(md5):
metadata = read_metadata()
metadata["md5sum"] = md5
with open(metadata_path, "w") as f:
json.dump(metadata, f)
# Change 1 character of the stored checksum
md5sum = read_metadata()["md5sum"]
md5sum_inc = md5sum[:-1] + chr(ord(md5sum[-1]) + 1)
write_md5(md5sum_inc)
# Parse a trace, this should delete and overwrite the invalidated cache
GenericFTrace.disable_cache = False
trace = trappy.FTrace()
# Check that the modified md5sum was overwritten
self.assertNotEqual(read_metadata()["md5sum"], md5sum_inc,
"The invalid ftrace cache wasn't overwritten")
def test_cache_dynamic_events(self):
"""Test that caching works if new event parsers have been registered"""
# Parse the trace to create a cache
GenericFTrace.disable_cache = False
trace1 = trappy.FTrace()
# Check we're actually testing what we think we are
if hasattr(trace1, 'dynamic_event'):
raise RuntimeError('Test bug: found unexpected event in trace')
# Now register a new event type, call the constructor again, and check
# that the newly added event (which is not present in the cache) is
# parsed.
parse_class = trappy.register_dynamic_ftrace("DynamicEvent", "dynamic_test_key")
trace2 = trappy.FTrace()
self.assertTrue(len(trace2.dynamic_event.data_frame) == 1)
trappy.unregister_dynamic_ftrace(parse_class)
def test_cache_normalize_time(self):
"""Test that caching doesn't break normalize_time"""
GenericFTrace.disable_cache = False
# Times in trace_sched.txt
start_time = 6550.018511
first_freq_event_time = 6550.056870
# Parse without normalizing time
trace1 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],
normalize_time=False)
self.assertEqual(trace1.cpu_frequency.data_frame.index[0],
first_freq_event_time)
# Parse with normalized time
trace2 = trappy.FTrace(events=['cpu_frequency', 'sched_wakeup'],
normalize_time=True)
self.assertEqual(trace2.cpu_frequency.data_frame.index[0],
first_freq_event_time - start_time)
def test_cache_window_broad(self):
"""Test that caching doesn't break the 'window' parameter"""
GenericFTrace.disable_cache = False
trace1 = trappy.FTrace(
events=['sched_wakeup'],
window=(0, 1))
# Check that we're testing what we think we're testing The trace
# contains 2 sched_wakeup events; this window should get rid of one of
# them.
if len(trace1.sched_wakeup.data_frame) != 1:
raise RuntimeError('Test bug: bad sched_wakeup event count')
# Parse again without the window
trace1 = trappy.FTrace(
events=['sched_wakeup'],
window=(0, None))
self.assertEqual(len(trace1.sched_wakeup.data_frame), 2)
def test_cache_window_narrow(self):
"""
Test that applying a window to a cached trace returns EXACTLY what is expected
"""
# As described in test_compare_cache_vs_uncached, reading from cache
# results in slightly different timestamps
#
# This test verifies that applying windows results in identical
# dataframes whether cache is used or not.
GenericFTrace.disable_cache = False
uncached_trace = trappy.FTrace()
trace = trappy.FTrace(uncached_trace.trace_path,
normalize_time=False,
abs_window=(6550.100000, 6552.000002))
self.assertAlmostEquals(trace.get_duration(), 1.900002)
self.assertEqual(len(trace.sched_wakeup.data_frame), 2)
self.assertEqual(len(trace.sched_wakeup_new.data_frame), 1)
def test_ftrace_metadata(self):
"""Test that caching keeps trace metadata"""
GenericFTrace.disable_cache = False
self.test_cache_created()
trace = trappy.FTrace()
version = int(trace._version)
cpus = int(trace._cpus)
self.assertEqual(version, 6)
self.assertEqual(cpus, 6)
def test_cache_delete_single(self):
GenericFTrace.disable_cache = False
trace = trappy.FTrace()
trace_path = os.path.abspath(trace.trace_path)
trace_dir = os.path.dirname(trace_path)
trace_file = os.path.basename(trace_path)
cache_dir = '.' + trace_file + '.cache'
number_of_trace_categories = 31
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)
os.remove(os.path.join(cache_dir, 'SchedWakeup.csv'))
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories - 1)
# Generate trace again, should regenerate only the missing item
trace = trappy.FTrace()
self.assertEqual(len(os.listdir(cache_dir)), number_of_trace_categories)
for c in trace.trace_classes:
if isinstance(c, trace.class_definitions['sched_wakeup']):
self.assertEqual(c.cached, False)
continue
self.assertEqual(c.cached, True)
| [
"os.listdir",
"trappy.register_dynamic_ftrace",
"os.path.join",
"shutil.copytree",
"os.path.dirname",
"json.load",
"os.path.basename",
"os.path.abspath",
"trappy.FTrace",
"trappy.unregister_dynamic_ftrace",
"json.dump",
"trappy.SysTrace"
]
| [((2584, 2599), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (2597, 2599), False, 'import trappy\n'), ((2754, 2794), 'trappy.FTrace', 'trappy.FTrace', (['uncached_trace.trace_path'], {}), '(uncached_trace.trace_path)\n', (2767, 2794), False, 'import trappy\n'), ((4683, 4749), 'os.path.join', 'os.path.join', (['utils_tests.TESTS_DIRECTORY', '"""trace_sched.txt.cache"""'], {}), "(utils_tests.TESTS_DIRECTORY, 'trace_sched.txt.cache')\n", (4695, 4749), False, 'import os\n'), ((4758, 4790), 'shutil.copytree', 'shutil.copytree', (['src', 'cache_path'], {}), '(src, cache_path)\n', (4773, 4790), False, 'import shutil\n'), ((4816, 4857), 'os.path.join', 'os.path.join', (['cache_path', '"""metadata.json"""'], {}), "(cache_path, 'metadata.json')\n", (4828, 4857), False, 'import os\n'), ((5492, 5507), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (5505, 5507), False, 'import trappy\n'), ((5936, 5951), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (5949, 5951), False, 'import trappy\n'), ((6331, 6397), 'trappy.register_dynamic_ftrace', 'trappy.register_dynamic_ftrace', (['"""DynamicEvent"""', '"""dynamic_test_key"""'], {}), "('DynamicEvent', 'dynamic_test_key')\n", (6361, 6397), False, 'import trappy\n'), ((6416, 6431), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (6429, 6431), False, 'import trappy\n'), ((6508, 6553), 'trappy.unregister_dynamic_ftrace', 'trappy.unregister_dynamic_ftrace', (['parse_class'], {}), '(parse_class)\n', (6540, 6553), False, 'import trappy\n'), ((6873, 6950), 'trappy.FTrace', 'trappy.FTrace', ([], {'events': "['cpu_frequency', 'sched_wakeup']", 'normalize_time': '(False)'}), "(events=['cpu_frequency', 'sched_wakeup'], normalize_time=False)\n", (6886, 6950), False, 'import trappy\n'), ((7153, 7229), 'trappy.FTrace', 'trappy.FTrace', ([], {'events': "['cpu_frequency', 'sched_wakeup']", 'normalize_time': '(True)'}), "(events=['cpu_frequency', 'sched_wakeup'], normalize_time=True)\n", (7166, 7229), False, 'import trappy\n'), ((7561, 7614), 'trappy.FTrace', 'trappy.FTrace', ([], {'events': "['sched_wakeup']", 'window': '(0, 1)'}), "(events=['sched_wakeup'], window=(0, 1))\n", (7574, 7614), False, 'import trappy\n'), ((7994, 8050), 'trappy.FTrace', 'trappy.FTrace', ([], {'events': "['sched_wakeup']", 'window': '(0, None)'}), "(events=['sched_wakeup'], window=(0, None))\n", (8007, 8050), False, 'import trappy\n'), ((8625, 8640), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (8638, 8640), False, 'import trappy\n'), ((8658, 8759), 'trappy.FTrace', 'trappy.FTrace', (['uncached_trace.trace_path'], {'normalize_time': '(False)', 'abs_window': '(6550.1, 6552.000002)'}), '(uncached_trace.trace_path, normalize_time=False, abs_window=(\n 6550.1, 6552.000002))\n', (8671, 8759), False, 'import trappy\n'), ((9204, 9219), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (9217, 9219), False, 'import trappy\n'), ((9464, 9479), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (9477, 9479), False, 'import trappy\n'), ((9502, 9535), 'os.path.abspath', 'os.path.abspath', (['trace.trace_path'], {}), '(trace.trace_path)\n', (9517, 9535), False, 'import os\n'), ((9556, 9583), 'os.path.dirname', 'os.path.dirname', (['trace_path'], {}), '(trace_path)\n', (9571, 9583), False, 'import os\n'), ((9605, 9633), 'os.path.basename', 'os.path.basename', (['trace_path'], {}), '(trace_path)\n', (9621, 9633), False, 'import os\n'), ((10040, 10055), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (10053, 10055), False, 'import trappy\n'), ((1387, 1402), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (1400, 1402), False, 'import trappy\n'), ((1404, 1440), 'trappy.SysTrace', 'trappy.SysTrace', ([], {'path': '"""./trace.html"""'}), "(path='./trace.html')\n", (1419, 1440), False, 'import trappy\n'), ((1497, 1530), 'os.path.abspath', 'os.path.abspath', (['trace.trace_path'], {}), '(trace.trace_path)\n', (1512, 1530), False, 'import os\n'), ((1555, 1582), 'os.path.dirname', 'os.path.dirname', (['trace_path'], {}), '(trace_path)\n', (1570, 1582), False, 'import os\n'), ((1608, 1636), 'os.path.basename', 'os.path.basename', (['trace_path'], {}), '(trace_path)\n', (1624, 1636), False, 'import os\n'), ((1921, 1936), 'trappy.FTrace', 'trappy.FTrace', ([], {}), '()\n', (1934, 1936), False, 'import trappy\n'), ((1938, 1974), 'trappy.SysTrace', 'trappy.SysTrace', ([], {'path': '"""./trace.html"""'}), "(path='./trace.html')\n", (1953, 1974), False, 'import trappy\n'), ((2031, 2064), 'os.path.abspath', 'os.path.abspath', (['trace.trace_path'], {}), '(trace.trace_path)\n', (2046, 2064), False, 'import os\n'), ((2089, 2116), 'os.path.dirname', 'os.path.dirname', (['trace_path'], {}), '(trace_path)\n', (2104, 2116), False, 'import os\n'), ((2142, 2170), 'os.path.basename', 'os.path.basename', (['trace_path'], {}), '(trace_path)\n', (2158, 2170), False, 'import os\n'), ((9822, 9864), 'os.path.join', 'os.path.join', (['cache_dir', '"""SchedWakeup.csv"""'], {}), "(cache_dir, 'SchedWakeup.csv')\n", (9834, 9864), False, 'import os\n'), ((4959, 4971), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4968, 4971), False, 'import json\n'), ((5141, 5163), 'json.dump', 'json.dump', (['metadata', 'f'], {}), '(metadata, f)\n', (5150, 5163), False, 'import json\n'), ((9751, 9772), 'os.listdir', 'os.listdir', (['cache_dir'], {}), '(cache_dir)\n', (9761, 9772), False, 'import os\n'), ((9895, 9916), 'os.listdir', 'os.listdir', (['cache_dir'], {}), '(cache_dir)\n', (9905, 9916), False, 'import os\n'), ((10085, 10106), 'os.listdir', 'os.listdir', (['cache_dir'], {}), '(cache_dir)\n', (10095, 10106), False, 'import os\n'), ((1731, 1752), 'os.listdir', 'os.listdir', (['trace_dir'], {}), '(trace_dir)\n', (1741, 1752), False, 'import os\n'), ((2266, 2287), 'os.listdir', 'os.listdir', (['trace_dir'], {}), '(trace_dir)\n', (2276, 2287), False, 'import os\n')] |
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.shortcuts import render
from django.urls import reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.utils import timezone
from olaf.models import *
from olaf.forms import *
from olaf.utility import usertools
from olaf.chess.controller import proccess_move
def index ( request ):
args = {}
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
args [ 'message' ] = message
if ( request.user.is_authenticated ):
if ( request.method == 'POST' ):
if ( request.POST.get ( 'game_id' ) is not None ):
game_id = request.POST.get ( 'game_id' )
if ( game_id == '-1' ):
game_id = usertools.new_game ( request )
request.session [ 'game_id' ] = game_id
else:
request.session.pop ( 'game_id', default = None )
f = lambda a : str ( a.date () ) + " - " + str ( a.hour ) + ":" + str ( a.minute ) + ":" + str ( a.second )
args [ 'game_list' ] = list ([str ( game.id ), f ( game.creation_time )] for game in request.user.userdata.game_history.filter ( result = 0 ).order_by ( '-creation_time' ) )
if ( request.session.get ( 'game_id' ) is not None ):
args [ 'game_board' ] = usertools.get_translated_game_board ( request )
else:
args [ 'game_board' ] = None
return render ( request, 'olaf/index_logged_in.html', args )
else:
args [ 'login_form' ] = LoginForm ()
args [ 'register_form' ] = RegisterForm ()
args [ 'score' ] = list ( [user.master.username, user.wins, user.loses, user.ties] for user in UserData.objects.filter ( is_active = True ) )
return render ( request, 'olaf/index_not_logged_in.html', args )
form_operation_dict = {
'login' : (
usertools.login_user,
LoginForm,
'olaf/login.html',
{},
'index',
{ 'message' : "You're logged in. :)"}
),
'register' : (
usertools.register_user,
RegisterForm,
'olaf/register.html',
{},
'index',
{ 'message' : "An activation email has been sent to you" }
),
'password_reset_request' : (
usertools.init_pass_reset_token,
ForgotPasswordUsernameOrEmailForm,
'olaf/password_reset_request.html',
{},
'index',
{ 'message' : "An email containing the password reset link will be sent to your email"}
),
'reset_password' : (
usertools.reset_password_action,
PasswordChangeForm,
'olaf/reset_password.html',
{},
'olaf:login',
{ 'message' : "Password successfully changed, you can login now" }
),
'resend_activation_email' : (
usertools.resend_activation_email,
ResendActivationUsernameOrEmailForm,
'olaf/resend_activation_email.html',
{},
'index',
{ 'message' : "Activation email successfully sent to your email" }
),
}
def form_operation ( request, oper, *args ):
func, FORM, fail_template, fail_args, success_url, success_args = form_operation_dict [ oper ]
if ( request.method == 'POST' ):
form = FORM ( request.POST )
if ( form.is_valid () ):
func ( request, form, *args )
for key in success_args:
request.session [ key ] = success_args [ key ]
return HttpResponseRedirect ( reverse ( success_url ) )
else:
form = FORM ()
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
fail_args [ 'message' ] = message
fail_args [ 'form' ] = form
return render ( request, fail_template, fail_args )
#view functions
def login_user ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'login' )
def register_user ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'register' )
def password_reset_request ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'password_reset_request' )
def reset_password_action ( request, token ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
tk = ExpirableTokenField.objects.filter ( token = token ).first ()
if ( tk is None ):
request.session [ 'message' ] = "Broken link"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
if ( timezone.now () <= tk.expiration_time ):
return form_operation ( request, 'reset_password', token )
else:
request.session [ 'message' ] = "Link expired, try getting a new one"
return HttpResponseRedirect ( reverse ( 'olaf:reset_password' ) )
def activate_account ( request, token ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
tk = ExpirableTokenField.objects.filter ( token = token ).first ()
if ( tk is None ):
request.session [ 'message' ] = "Broken link"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
if ( timezone.now () <= tk.expiration_time ):
if ( tk.user.is_active ):
request.session [ 'message' ] = "Account already active"
return HttpResponseRedirect ( reverse ( 'index' ) )
else:
userdata = tk.user
userdata.is_active = True
userdata.save ()
request.session [ 'message' ] = "Your account has been activated successfully"
return HttpResponseRedirect ( reverse ( 'olaf:login' ) )
else:
request.session [ 'message' ] = "Link expired, try getting a new one"
return HttpResponseRedirect ( reverse ( 'olaf:resend_activation_email' ) )
def resend_activation_email ( request ):
if ( request.user.is_authenticated ):
return HttpResponseRedirect ( reverse ( 'index' ) )
return form_operation ( request, 'resend_activation_email' )
def logout_user ( request ):
usertools.logout_user ( request )
request.session [ 'message' ] = "Goodbye :)"
return HttpResponseRedirect ( reverse ( 'index' ) )
def scoreboard ( request ):
if ( request.method == 'POST' ):
username = request.POST.get ( 'username' )
user = User.objects.filter ( username = username ).first ()
if ( user is None ):
request.session [ 'message' ] = "User not found"
return HttpResponseRedirect ( reverse ( 'olaf:scoreboard' ) )
else:
return HttpResponseRedirect ( reverse ( 'olaf:user_profile', args = (username, ) ) )
else:
args = {}
message = request.session.pop ( 'message', default = None )
if ( message is not None ):
args [ 'message' ] = message
lst = [ (user.master.username, user.wins, user.loses, user.ties) for user in UserData.objects.filter ( is_active = True ) ]
args [ 'lst' ] = lst
if ( request.user.is_authenticated ):
args [ 'logged_in' ] = True
return render ( request, 'olaf/scoreboard.html', args )
def move ( request ):
proccess_move ( request )
return HttpResponseRedirect ( reverse ( 'index' ) ) | [
"django.shortcuts.render",
"django.utils.timezone.now",
"django.contrib.auth.models.User.objects.filter",
"django.urls.reverse",
"olaf.chess.controller.proccess_move",
"olaf.utility.usertools.get_translated_game_board",
"olaf.utility.usertools.new_game",
"olaf.utility.usertools.logout_user"
]
| [((3350, 3391), 'django.shortcuts.render', 'render', (['request', 'fail_template', 'fail_args'], {}), '(request, fail_template, fail_args)\n', (3356, 3391), False, 'from django.shortcuts import render\n'), ((5689, 5719), 'olaf.utility.usertools.logout_user', 'usertools.logout_user', (['request'], {}), '(request)\n', (5710, 5719), False, 'from olaf.utility import usertools\n'), ((6679, 6701), 'olaf.chess.controller.proccess_move', 'proccess_move', (['request'], {}), '(request)\n', (6692, 6701), False, 'from olaf.chess.controller import proccess_move\n'), ((1382, 1432), 'django.shortcuts.render', 'render', (['request', '"""olaf/index_logged_in.html"""', 'args'], {}), "(request, 'olaf/index_logged_in.html', args)\n", (1388, 1432), False, 'from django.shortcuts import render\n'), ((1681, 1735), 'django.shortcuts.render', 'render', (['request', '"""olaf/index_not_logged_in.html"""', 'args'], {}), "(request, 'olaf/index_not_logged_in.html', args)\n", (1687, 1735), False, 'from django.shortcuts import render\n'), ((5801, 5817), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (5808, 5817), False, 'from django.urls import reverse\n'), ((6606, 6651), 'django.shortcuts.render', 'render', (['request', '"""olaf/scoreboard.html"""', 'args'], {}), "(request, 'olaf/scoreboard.html', args)\n", (6612, 6651), False, 'from django.shortcuts import render\n'), ((6737, 6753), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (6744, 6753), False, 'from django.urls import reverse\n'), ((1284, 1328), 'olaf.utility.usertools.get_translated_game_board', 'usertools.get_translated_game_board', (['request'], {}), '(request)\n', (1319, 1328), False, 'from olaf.utility import usertools\n'), ((3512, 3528), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3519, 3528), False, 'from django.urls import reverse\n'), ((3682, 3698), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3689, 3698), False, 'from django.urls import reverse\n'), ((3864, 3880), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (3871, 3880), False, 'from django.urls import reverse\n'), ((4066, 4082), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (4073, 4082), False, 'from django.urls import reverse\n'), ((4257, 4273), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (4264, 4273), False, 'from django.urls import reverse\n'), ((4293, 4307), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (4305, 4307), False, 'from django.utils import timezone\n'), ((4659, 4675), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (4666, 4675), False, 'from django.urls import reverse\n'), ((4850, 4866), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (4857, 4866), False, 'from django.urls import reverse\n'), ((4886, 4900), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (4898, 4900), False, 'from django.utils import timezone\n'), ((5573, 5589), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (5580, 5589), False, 'from django.urls import reverse\n'), ((3135, 3155), 'django.urls.reverse', 'reverse', (['success_url'], {}), '(success_url)\n', (3142, 3155), False, 'from django.urls import reverse\n'), ((4510, 4540), 'django.urls.reverse', 'reverse', (['"""olaf:reset_password"""'], {}), "('olaf:reset_password')\n", (4517, 4540), False, 'from django.urls import reverse\n'), ((5415, 5454), 'django.urls.reverse', 'reverse', (['"""olaf:resend_activation_email"""'], {}), "('olaf:resend_activation_email')\n", (5422, 5454), False, 'from django.urls import reverse\n'), ((5940, 5978), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username': 'username'}), '(username=username)\n', (5959, 5978), False, 'from django.contrib.auth.models import User\n'), ((6102, 6128), 'django.urls.reverse', 'reverse', (['"""olaf:scoreboard"""'], {}), "('olaf:scoreboard')\n", (6109, 6128), False, 'from django.urls import reverse\n'), ((6175, 6221), 'django.urls.reverse', 'reverse', (['"""olaf:user_profile"""'], {'args': '(username,)'}), "('olaf:user_profile', args=(username,))\n", (6182, 6221), False, 'from django.urls import reverse\n'), ((774, 801), 'olaf.utility.usertools.new_game', 'usertools.new_game', (['request'], {}), '(request)\n', (792, 801), False, 'from olaf.utility import usertools\n'), ((5051, 5067), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (5058, 5067), False, 'from django.urls import reverse\n'), ((5274, 5295), 'django.urls.reverse', 'reverse', (['"""olaf:login"""'], {}), "('olaf:login')\n", (5281, 5295), False, 'from django.urls import reverse\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# [Import start]
from flask import Blueprint, jsonify
# [Import end]
app = Blueprint(
'hoge',
__name__,
url_prefix='/hoge'
)
@app.route('/test')
def hoge():
return "\nhogehoge"
| [
"flask.Blueprint"
]
| [((119, 166), 'flask.Blueprint', 'Blueprint', (['"""hoge"""', '__name__'], {'url_prefix': '"""/hoge"""'}), "('hoge', __name__, url_prefix='/hoge')\n", (128, 166), False, 'from flask import Blueprint, jsonify\n')] |
#!/usr/bin/env python3
import apt_pkg
import sys
from apt_pkg import CURSTATE_INSTALLED, version_compare
from operator import lt, le, eq, ge, gt
# Function mappings for relationship operators.
relation_operators = {"<<": lt, "<=": le, "=": eq, ">=": ge, ">>": gt}
# Set up APT cache.
apt_pkg.init()
cache = apt_pkg.Cache(None)
missing_packages = []
for i in sys.argv[1:]:
# Build the package relationship string for use by 'apt-get satisfy'.
relationship_operator = None
for j in ["<=", ">=", "<", ">", "="]:
if j in i:
relationship_operator = j
break
if relationship_operator is not None:
if relationship_operator in ["<", ">"]:
relationship_operator_formatted = j + j
else:
relationship_operator_formatted = j
package = i.split(relationship_operator)
pkgname = package[0]
pkgver = package[1]
package_string = f"{pkgname} ({relationship_operator_formatted} {pkgver})"
else:
pkgname = i
pkgver = None
package_string = pkgname
# Check if the package is in the cache.
try:
pkg = cache[pkgname]
except KeyError:
missing_packages += [package_string]
continue
# Get the list of installed and provided packages that are currently installed.
installed_pkg_versions = []
if pkg.current_state == CURSTATE_INSTALLED:
installed_pkg_versions += [pkg]
for i in pkg.provides_list:
parent_pkg = i[2].parent_pkg
if parent_pkg.current_state == CURSTATE_INSTALLED:
installed_pkg_versions += [parent_pkg]
# If an installed package was found and no relationship operators were used, the dependency has been satisfied.
if (len(installed_pkg_versions) != 0) and (relationship_operator is None):
continue
# Otherwise, check all matching installed packages and see if any of them fit the specified relationship operator.
matched_pkg = False
for i in installed_pkg_versions:
installed_version = i.current_ver.ver_str
version_result = version_compare(installed_version, pkgver)
if relation_operators[relationship_operator_formatted](version_result, 0):
matched_pkg = True
if not matched_pkg:
missing_packages += [package_string]
for i in missing_packages:
print(i)
exit(0)
| [
"apt_pkg.version_compare",
"apt_pkg.Cache",
"apt_pkg.init"
]
| [((287, 301), 'apt_pkg.init', 'apt_pkg.init', ([], {}), '()\n', (299, 301), False, 'import apt_pkg\n'), ((310, 329), 'apt_pkg.Cache', 'apt_pkg.Cache', (['None'], {}), '(None)\n', (323, 329), False, 'import apt_pkg\n'), ((2105, 2147), 'apt_pkg.version_compare', 'version_compare', (['installed_version', 'pkgver'], {}), '(installed_version, pkgver)\n', (2120, 2147), False, 'from apt_pkg import CURSTATE_INSTALLED, version_compare\n')] |
# Generated by Django 3.0.7 on 2020-08-24 06:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datasets', '0008_auto_20200821_1427'),
]
operations = [
migrations.AddField(
model_name='rawdar',
name='AsB',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='AsB_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='AsB_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Ba',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Ba_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Ba_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Cs',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Cs_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Cs_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='DMA',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='DMA_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='DMA_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='MMA',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='MMA_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='MMA_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Sr',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='Sr_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='Sr_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='iAs',
field=models.FloatField(blank=True, null=True),
),
migrations.AddField(
model_name='rawdar',
name='iAs_BDL',
field=models.CharField(choices=[('1', 'below detection level'), ('0', 'above detection level'), ('nan', 'invalid')], default=0, max_length=3),
preserve_default=False,
),
migrations.AddField(
model_name='rawdar',
name='iAs_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ag',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ag_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Al',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Al_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='As',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='As_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Be',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Be_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cd',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cd_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Co',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Co_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cr',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cr_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cu',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Cu_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Fe',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Fe_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Hg',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Hg_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mo',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Mo_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ni',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Ni_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Pb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Pb_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sb',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sb_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Se',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Se_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Sn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Tl',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Tl_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='U',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='U_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='V',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='V_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='W',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='W_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Zn',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='Zn_IDL',
field=models.FloatField(blank=True, null=True),
),
migrations.AlterField(
model_name='rawdar',
name='urine_specific_gravity',
field=models.FloatField(blank=True, null=True),
),
]
| [
"django.db.models.FloatField",
"django.db.models.CharField"
]
| [((332, 372), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (349, 372), False, 'from django.db import migrations, models\n'), ((493, 632), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (509, 632), False, 'from django.db import migrations, models\n'), ((785, 825), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (802, 825), False, 'from django.db import migrations, models\n'), ((941, 981), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (958, 981), False, 'from django.db import migrations, models\n'), ((1101, 1240), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (1117, 1240), False, 'from django.db import migrations, models\n'), ((1392, 1432), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1409, 1432), False, 'from django.db import migrations, models\n'), ((1548, 1588), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1565, 1588), False, 'from django.db import migrations, models\n'), ((1708, 1847), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (1724, 1847), False, 'from django.db import migrations, models\n'), ((1999, 2039), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2016, 2039), False, 'from django.db import migrations, models\n'), ((2156, 2196), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2173, 2196), False, 'from django.db import migrations, models\n'), ((2317, 2456), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (2333, 2456), False, 'from django.db import migrations, models\n'), ((2609, 2649), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2626, 2649), False, 'from django.db import migrations, models\n'), ((2766, 2806), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (2783, 2806), False, 'from django.db import migrations, models\n'), ((2927, 3066), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (2943, 3066), False, 'from django.db import migrations, models\n'), ((3219, 3259), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (3236, 3259), False, 'from django.db import migrations, models\n'), ((3375, 3415), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (3392, 3415), False, 'from django.db import migrations, models\n'), ((3535, 3674), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (3551, 3674), False, 'from django.db import migrations, models\n'), ((3826, 3866), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (3843, 3866), False, 'from django.db import migrations, models\n'), ((3983, 4023), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (4000, 4023), False, 'from django.db import migrations, models\n'), ((4144, 4283), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('1', 'below detection level'), ('0', 'above detection level'), ('nan',\n 'invalid')]", 'default': '(0)', 'max_length': '(3)'}), "(choices=[('1', 'below detection level'), ('0',\n 'above detection level'), ('nan', 'invalid')], default=0, max_length=3)\n", (4160, 4283), False, 'from django.db import migrations, models\n'), ((4436, 4476), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (4453, 4476), False, 'from django.db import migrations, models\n'), ((4594, 4634), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (4611, 4634), False, 'from django.db import migrations, models\n'), ((4756, 4796), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (4773, 4796), False, 'from django.db import migrations, models\n'), ((4914, 4954), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (4931, 4954), False, 'from django.db import migrations, models\n'), ((5076, 5116), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (5093, 5116), False, 'from django.db import migrations, models\n'), ((5234, 5274), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (5251, 5274), False, 'from django.db import migrations, models\n'), ((5396, 5436), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (5413, 5436), False, 'from django.db import migrations, models\n'), ((5554, 5594), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (5571, 5594), False, 'from django.db import migrations, models\n'), ((5716, 5756), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (5733, 5756), False, 'from django.db import migrations, models\n'), ((5874, 5914), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (5891, 5914), False, 'from django.db import migrations, models\n'), ((6036, 6076), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6053, 6076), False, 'from django.db import migrations, models\n'), ((6194, 6234), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6211, 6234), False, 'from django.db import migrations, models\n'), ((6356, 6396), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6373, 6396), False, 'from django.db import migrations, models\n'), ((6514, 6554), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6531, 6554), False, 'from django.db import migrations, models\n'), ((6676, 6716), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6693, 6716), False, 'from django.db import migrations, models\n'), ((6834, 6874), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6851, 6874), False, 'from django.db import migrations, models\n'), ((6996, 7036), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7013, 7036), False, 'from django.db import migrations, models\n'), ((7154, 7194), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7171, 7194), False, 'from django.db import migrations, models\n'), ((7316, 7356), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7333, 7356), False, 'from django.db import migrations, models\n'), ((7474, 7514), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7491, 7514), False, 'from django.db import migrations, models\n'), ((7636, 7676), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7653, 7676), False, 'from django.db import migrations, models\n'), ((7794, 7834), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7811, 7834), False, 'from django.db import migrations, models\n'), ((7956, 7996), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (7973, 7996), False, 'from django.db import migrations, models\n'), ((8114, 8154), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8131, 8154), False, 'from django.db import migrations, models\n'), ((8276, 8316), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8293, 8316), False, 'from django.db import migrations, models\n'), ((8434, 8474), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8451, 8474), False, 'from django.db import migrations, models\n'), ((8596, 8636), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8613, 8636), False, 'from django.db import migrations, models\n'), ((8754, 8794), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8771, 8794), False, 'from django.db import migrations, models\n'), ((8916, 8956), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (8933, 8956), False, 'from django.db import migrations, models\n'), ((9074, 9114), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (9091, 9114), False, 'from django.db import migrations, models\n'), ((9236, 9276), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (9253, 9276), False, 'from django.db import migrations, models\n'), ((9394, 9434), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (9411, 9434), False, 'from django.db import migrations, models\n'), ((9556, 9596), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (9573, 9596), False, 'from django.db import migrations, models\n'), ((9714, 9754), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (9731, 9754), False, 'from django.db import migrations, models\n'), ((9876, 9916), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (9893, 9916), False, 'from django.db import migrations, models\n'), ((10034, 10074), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (10051, 10074), False, 'from django.db import migrations, models\n'), ((10196, 10236), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (10213, 10236), False, 'from django.db import migrations, models\n'), ((10353, 10393), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (10370, 10393), False, 'from django.db import migrations, models\n'), ((10514, 10554), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (10531, 10554), False, 'from django.db import migrations, models\n'), ((10671, 10711), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (10688, 10711), False, 'from django.db import migrations, models\n'), ((10832, 10872), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (10849, 10872), False, 'from django.db import migrations, models\n'), ((10989, 11029), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (11006, 11029), False, 'from django.db import migrations, models\n'), ((11150, 11190), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (11167, 11190), False, 'from django.db import migrations, models\n'), ((11308, 11348), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (11325, 11348), False, 'from django.db import migrations, models\n'), ((11470, 11510), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (11487, 11510), False, 'from django.db import migrations, models\n'), ((11648, 11688), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (11665, 11688), False, 'from django.db import migrations, models\n')] |
import os
import math
import time
import geohash
import geojson
from geojson import MultiLineString
from shapely import geometry
import shapefile
import numpy
import datetime as dt
import pandas as pd
import logging
logger = logging.getLogger(__name__)
source_shape_file_path = "C:/temp/2018/"
threshold = 60*60
cols = ['start', 'end','start_epoch_round','end_epoch_round','start_epoch_round_dt','end_epoch_round_dt']
times = []
for root,dirs,files in os.walk(source_shape_file_path):
for file in files:
with open(os.path.join(root,file),"r") as auto:
if file.endswith(".shp"):
try:
filename = file.replace(".shp","")
shape=shapefile.Reader(source_shape_file_path+filename+"/"+file)
for r in shape.iterRecords():
start_time = dt.datetime.strptime(r[1], '%Y%j %H%M')
end_time = dt.datetime.strptime(r[2], '%Y%j %H%M')
epoch_s = dt.datetime.timestamp(dt.datetime.strptime(r[1], '%Y%j %H%M'))
epoch_e = dt.datetime.timestamp(dt.datetime.strptime(r[2], '%Y%j %H%M'))
# sometimes start is later than end time, we'll assume the earlier time is start
epoch_end_round = round(max(epoch_s,epoch_e) / threshold) * threshold
epoch_start_round = round(min(epoch_s,epoch_e) / threshold) * threshold
epoch_end_round_dt = dt.datetime.utcfromtimestamp(3600 * ((max(epoch_s,epoch_e) + 1800) // 3600))
epoch_start_round_dt = dt.datetime.utcfromtimestamp(3600 * ((min(epoch_s,epoch_e) + 1800) // 3600))
times.append([start_time,end_time,epoch_start_round,epoch_end_round,epoch_start_round_dt,epoch_end_round_dt])
break
except:
logger.error('failed to parse file:'+source_shape_file_path+filename+"/")
continue
df = pd.DataFrame(times, columns=cols)
df.to_csv('noaa_times.csv')
| [
"logging.getLogger",
"shapefile.Reader",
"datetime.datetime.strptime",
"os.path.join",
"pandas.DataFrame",
"os.walk"
]
| [((226, 253), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (243, 253), False, 'import logging\n'), ((454, 485), 'os.walk', 'os.walk', (['source_shape_file_path'], {}), '(source_shape_file_path)\n', (461, 485), False, 'import os\n'), ((2012, 2045), 'pandas.DataFrame', 'pd.DataFrame', (['times'], {'columns': 'cols'}), '(times, columns=cols)\n', (2024, 2045), True, 'import pandas as pd\n'), ((528, 552), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (540, 552), False, 'import os\n'), ((706, 770), 'shapefile.Reader', 'shapefile.Reader', (["(source_shape_file_path + filename + '/' + file)"], {}), "(source_shape_file_path + filename + '/' + file)\n", (722, 770), False, 'import shapefile\n'), ((852, 891), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['r[1]', '"""%Y%j %H%M"""'], {}), "(r[1], '%Y%j %H%M')\n", (872, 891), True, 'import datetime as dt\n'), ((927, 966), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['r[2]', '"""%Y%j %H%M"""'], {}), "(r[2], '%Y%j %H%M')\n", (947, 966), True, 'import datetime as dt\n'), ((1023, 1062), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['r[1]', '"""%Y%j %H%M"""'], {}), "(r[1], '%Y%j %H%M')\n", (1043, 1062), True, 'import datetime as dt\n'), ((1120, 1159), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['r[2]', '"""%Y%j %H%M"""'], {}), "(r[2], '%Y%j %H%M')\n", (1140, 1159), True, 'import datetime as dt\n')] |
import pygame_sdl2
pygame_sdl2.import_as_pygame()
import pygame
import os
import random
import math
from Ball import Ball
def save_state(balls):
"""
Saves the game state.
"""
stateString = ""
with open("state.txt", "w") as f:
for ball in balls:
stateString += "{} {} {} {} {}".format(ball.imageFile,
ball.speedx,
ball.speedy,
ball.rect.centerx,
ball.rect.centery)
stateString += '\n'
f.write(stateString)
def load_state():
try:
objects = []
with open("state.txt", "r") as f:
for line in f.read():
f, sx, sy, x, y = line.split()
objects += Ball(f, [int(sx), int(sy)], [int(x), int(y)])
return objects
except:
return None
def delete_state():
if os.path.exists("state.txt"):
os.unlink("state.txt")
def main():
pygame.init()
clock = pygame.time.Clock()
infoObject = pygame.display.Info()
#print infoObject.current_w
width = infoObject.current_w
height = infoObject.current_h
size = width, height
bgColor = r,g,b = 0, 0, 0
screen = pygame.display.set_mode(size)
pygame.display.set_mode()
balls = load_state()
delete_state()
if balls == None:
balls = []
ballTimer = 0
ballTimerMax = .75 * 60
done = False
sleeping = False
font = pygame.font.Font("DejaVuSans.ttf", 124)
text = font.render("Start", True, (255, 255, 255, 255))
textRect = text.get_rect(center = (width/2, height/2))
while not done:
for event in pygame.event.get():
text = font.render(str(event.type), True, (255, 255, 255, 255))
if event.type == pygame.QUIT:
done = True
elif event.type == pygame.KEYDOWN and event.key == pygame.K_AC_BACK:
done = True
elif event.type == pygame.APP_WILLENTERBACKGROUND:
# The app is about to go to sleep. It should save state, cancel
# any timers, and stop drawing the screen until an APP_DIDENTERFOREGROUND
# event shows up.
save_state(balls)
sleeping = True
elif event.type == pygame.APP_DIDENTERFOREGROUND:
# The app woke back up. Delete the saved state (we don't need it),
# restore any times, and start drawing the screen again.
delete_state()
sleeping = False
# For now, we have to re-open the window when entering the
# foreground.
screen = pygame.display.set_mode((1280, 720))
if not sleeping:
ballTimer += 1
if ballTimer >= ballTimerMax:
ballTimer = 0
ballSpeed = [random.randint(-5, 5),
random.randint(-5, 5)]
ballPos = [random.randint(100, width-100),
random.randint(100, height-100)]
balls += [Ball("ball.png",ballSpeed,ballPos)]
save_state(balls)
for ball in balls:
ball.move()
ball.collideScreen(size)
for first in balls:
for second in balls:
if first != second:
first.collideBall(second)
bgColor = r,g,b
screen.fill(bgColor)
for ball in balls:
screen.blit(ball.image, ball.rect)
screen.blit(text, textRect)
pygame.display.flip()
clock.tick(60)
if done:
break
if __name__ == "__main__":
main()
| [
"os.path.exists",
"pygame.init",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.display.Info",
"pygame_sdl2.import_as_pygame",
"os.unlink",
"pygame.time.Clock",
"pygame.font.Font",
"random.randint",
"Ball.Ball"
]
| [((19, 49), 'pygame_sdl2.import_as_pygame', 'pygame_sdl2.import_as_pygame', ([], {}), '()\n', (47, 49), False, 'import pygame_sdl2\n'), ((999, 1026), 'os.path.exists', 'os.path.exists', (['"""state.txt"""'], {}), "('state.txt')\n", (1013, 1026), False, 'import os\n'), ((1076, 1089), 'pygame.init', 'pygame.init', ([], {}), '()\n', (1087, 1089), False, 'import pygame\n'), ((1107, 1126), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (1124, 1126), False, 'import pygame\n'), ((1144, 1165), 'pygame.display.Info', 'pygame.display.Info', ([], {}), '()\n', (1163, 1165), False, 'import pygame\n'), ((1351, 1380), 'pygame.display.set_mode', 'pygame.display.set_mode', (['size'], {}), '(size)\n', (1374, 1380), False, 'import pygame\n'), ((1385, 1410), 'pygame.display.set_mode', 'pygame.display.set_mode', ([], {}), '()\n', (1408, 1410), False, 'import pygame\n'), ((1612, 1651), 'pygame.font.Font', 'pygame.font.Font', (['"""DejaVuSans.ttf"""', '(124)'], {}), "('DejaVuSans.ttf', 124)\n", (1628, 1651), False, 'import pygame\n'), ((1036, 1058), 'os.unlink', 'os.unlink', (['"""state.txt"""'], {}), "('state.txt')\n", (1045, 1058), False, 'import os\n'), ((1817, 1835), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (1833, 1835), False, 'import pygame\n'), ((3821, 3842), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3840, 3842), False, 'import pygame\n'), ((3036, 3057), 'random.randint', 'random.randint', (['(-5)', '(5)'], {}), '(-5, 5)\n', (3050, 3057), False, 'import random\n'), ((3088, 3109), 'random.randint', 'random.randint', (['(-5)', '(5)'], {}), '(-5, 5)\n', (3102, 3109), False, 'import random\n'), ((3138, 3170), 'random.randint', 'random.randint', (['(100)', '(width - 100)'], {}), '(100, width - 100)\n', (3152, 3170), False, 'import random\n'), ((3199, 3232), 'random.randint', 'random.randint', (['(100)', '(height - 100)'], {}), '(100, height - 100)\n', (3213, 3232), False, 'import random\n'), ((3258, 3294), 'Ball.Ball', 'Ball', (['"""ball.png"""', 'ballSpeed', 'ballPos'], {}), "('ball.png', ballSpeed, ballPos)\n", (3262, 3294), False, 'from Ball import Ball\n'), ((2838, 2874), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(1280, 720)'], {}), '((1280, 720))\n', (2861, 2874), False, 'import pygame\n')] |
from __future__ import division
import math, copy
import argparse
from brownian import Brownian
import scipy
import LLRcalc
class sprt:
def __init__(self, alpha=0.05, beta=0.05, elo0=0, elo1=5, elo_model="logistic"):
assert elo_model in ("logistic", "normalized")
self.elo_model = elo_model
self.a = math.log(beta / (1 - alpha))
self.b = math.log((1 - beta) / alpha)
self.elo0 = elo0
self.elo1 = elo1
self.clamped = False
self.LLR_drift_variance = LLRcalc.LLR_drift_variance_alt2
def elo_to_score(self, elo):
"""
"elo" is expressed in our current elo_model.
"""
if self.elo_model == "normalized":
nt = elo / LLRcalc.nelo_divided_by_nt
return nt * self.sigma_pg + 0.5
else:
return LLRcalc.L_(elo)
def lelo_to_elo(self, lelo):
"""
For external use. "elo" is expressed in our current elo_model.
"lelo" is logistic.
"""
if self.elo_model == "logistic":
return lelo
score = LLRcalc.L_(lelo)
nt = (score - 0.5) / self.sigma_pg
return nt * LLRcalc.nelo_divided_by_nt
def set_state(self, results):
N, self.pdf = LLRcalc.results_to_pdf(results)
if self.elo_model == "normalized":
mu, var = LLRcalc.stats(self.pdf) # code duplication with LLRcalc
if len(results) == 5:
self.sigma_pg = (2 * var) ** 0.5
elif len(results) == 3:
self.sigma_pg = var ** 0.5
else:
assert False
self.s0, self.s1 = [self.elo_to_score(elo) for elo in (self.elo0, self.elo1)]
mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, None)
# llr estimate
self.llr = N * mu_LLR
self.T = N
# now normalize llr (if llr is not legal then the implications
# of this are unclear)
slope = self.llr / N
if self.llr > 1.03 * self.b or self.llr < 1.03 * self.a:
self.clamped = True
if self.llr < self.a:
self.T = self.a / slope
self.llr = self.a
elif self.llr > self.b:
self.T = self.b / slope
self.llr = self.b
def outcome_prob(self, elo):
"""
The probability of a test with the given elo with worse outcome
(faster fail, slower pass or a pass changed into a fail).
"""
s = LLRcalc.L_(elo)
mu_LLR, var_LLR = self.LLR_drift_variance(self.pdf, self.s0, self.s1, s)
sigma_LLR = math.sqrt(var_LLR)
return Brownian(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR).outcome_cdf(
T=self.T, y=self.llr
)
def lower_cb(self, p):
"""
Maximal elo value such that the observed outcome of the test has probability
less than p.
"""
avg_elo = (self.elo0 + self.elo1) / 2
delta = self.elo1 - self.elo0
N = 30
# Various error conditions must be handled better here!
while True:
elo0 = max(avg_elo - N * delta, -1000)
elo1 = min(avg_elo + N * delta, 1000)
try:
sol, res = scipy.optimize.brentq(
lambda elo: self.outcome_prob(elo) - (1 - p),
elo0,
elo1,
full_output=True,
disp=False,
)
except ValueError:
if elo0 > -1000 or elo1 < 1000:
N *= 2
continue
else:
if self.outcome_prob(elo0) - (1 - p) > 0:
return elo1
else:
return elo0
assert res.converged
break
return sol
def analytics(self, p=0.05):
ret = {}
ret["clamped"] = self.clamped
ret["a"] = self.a
ret["b"] = self.b
ret["elo"] = self.lower_cb(0.5)
ret["ci"] = [self.lower_cb(p / 2), self.lower_cb(1 - p / 2)]
ret["LOS"] = self.outcome_prob(0)
ret["LLR"] = self.llr
return ret
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--alpha", help="probability of a false positve", type=float, default=0.05
)
parser.add_argument(
"--beta", help="probability of a false negative", type=float, default=0.05
)
parser.add_argument(
"--elo0", help="H0 (expressed in LogisticElo)", type=float, default=0.0
)
parser.add_argument(
"--elo1", help="H1 (expressed in LogisticElo)", type=float, default=5.0
)
parser.add_argument("--level", help="confidence level", type=float, default=0.95)
parser.add_argument(
"--elo-model",
help="logistic or normalized",
choices=['logistic', 'normalized'],
default='logistic',
)
parser.add_argument(
"--results",
help="trinomial of pentanomial frequencies, low to high",
nargs="*",
type=int,
required=True,
)
args = parser.parse_args()
results = args.results
if len(results) != 3 and len(results) != 5:
parser.error("argument --results: expected 3 or 5 arguments")
alpha = args.alpha
beta = args.beta
elo0 = args.elo0
elo1 = args.elo1
elo_model = args.elo_model
p = 1 - args.level
s = sprt(alpha=alpha, beta=beta, elo0=elo0, elo1=elo1, elo_model=elo_model)
s.set_state(results)
a = s.analytics(p)
print("Design parameters")
print("=================")
print("False positives : %4.2f%%" % (100 * alpha,))
print("False negatives : %4.2f%%" % (100 * beta,))
print("[Elo0,Elo1] : [%.2f,%.2f]" % (elo0, elo1))
print("Confidence level : %4.2f%%" % (100 * (1 - p),))
print("Elo model : %s" % elo_model)
print("Estimates")
print("=========")
print("Elo : %.2f" % a["elo"])
print(
"Confidence interval : [%.2f,%.2f] (%4.2f%%)"
% (a["ci"][0], a["ci"][1], 100 * (1 - p))
)
print("LOS : %4.2f%%" % (100 * a["LOS"],))
print("Context")
print("=======")
print(
"LLR [u,l] : %.2f %s [%.2f,%.2f]"
% (a["LLR"], "(clamped)" if a["clamped"] else "", a["a"], a["b"])
)
| [
"LLRcalc.results_to_pdf",
"argparse.ArgumentParser",
"math.sqrt",
"math.log",
"LLRcalc.stats",
"LLRcalc.L_",
"brownian.Brownian"
]
| [((4157, 4182), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4180, 4182), False, 'import argparse\n'), ((330, 358), 'math.log', 'math.log', (['(beta / (1 - alpha))'], {}), '(beta / (1 - alpha))\n', (338, 358), False, 'import math, copy\n'), ((376, 404), 'math.log', 'math.log', (['((1 - beta) / alpha)'], {}), '((1 - beta) / alpha)\n', (384, 404), False, 'import math, copy\n'), ((1054, 1070), 'LLRcalc.L_', 'LLRcalc.L_', (['lelo'], {}), '(lelo)\n', (1064, 1070), False, 'import LLRcalc\n'), ((1218, 1249), 'LLRcalc.results_to_pdf', 'LLRcalc.results_to_pdf', (['results'], {}), '(results)\n', (1240, 1249), False, 'import LLRcalc\n'), ((2432, 2447), 'LLRcalc.L_', 'LLRcalc.L_', (['elo'], {}), '(elo)\n', (2442, 2447), False, 'import LLRcalc\n'), ((2549, 2567), 'math.sqrt', 'math.sqrt', (['var_LLR'], {}), '(var_LLR)\n', (2558, 2567), False, 'import math, copy\n'), ((815, 830), 'LLRcalc.L_', 'LLRcalc.L_', (['elo'], {}), '(elo)\n', (825, 830), False, 'import LLRcalc\n'), ((1315, 1338), 'LLRcalc.stats', 'LLRcalc.stats', (['self.pdf'], {}), '(self.pdf)\n', (1328, 1338), False, 'import LLRcalc\n'), ((2583, 2639), 'brownian.Brownian', 'Brownian', ([], {'a': 'self.a', 'b': 'self.b', 'mu': 'mu_LLR', 'sigma': 'sigma_LLR'}), '(a=self.a, b=self.b, mu=mu_LLR, sigma=sigma_LLR)\n', (2591, 2639), False, 'from brownian import Brownian\n')] |
"""Simple Hail query example."""
import click
import hail as hl
from bokeh.io.export import get_screenshot_as_png
from analysis_runner import output_path
GNOMAD_HGDP_1KG_MT = (
'gs://gcp-public-data--gnomad/release/3.1/mt/genomes/'
'gnomad.genomes.v3.1.hgdp_1kg_subset_dense.mt'
)
@click.command()
@click.option('--rerun', help='Whether to overwrite cached files', default=False)
def query(rerun):
"""Query script entry point."""
hl.init(default_reference='GRCh38')
sample_qc_path = output_path('sample_qc.mt')
if rerun or not hl.hadoop_exists(sample_qc_path):
mt = hl.read_matrix_table(GNOMAD_HGDP_1KG_MT)
mt = mt.head(100, n_cols=100)
mt_qc = hl.sample_qc(mt)
mt_qc.write(sample_qc_path)
mt_qc = hl.read_matrix_table(sample_qc_path)
plot_filename = output_path('call_rate_plot.png', 'web')
if rerun or not hl.hadoop_exists(plot_filename):
call_rate_plot = hl.plot.histogram(
mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate'
)
with hl.hadoop_open(plot_filename, 'wb') as f:
get_screenshot_as_png(call_rate_plot).save(f, format='PNG')
if __name__ == '__main__':
query() # pylint: disable=no-value-for-parameter
| [
"hail.hadoop_open",
"click.option",
"hail.sample_qc",
"hail.hadoop_exists",
"hail.read_matrix_table",
"hail.init",
"bokeh.io.export.get_screenshot_as_png",
"click.command",
"analysis_runner.output_path",
"hail.plot.histogram"
]
| [((295, 310), 'click.command', 'click.command', ([], {}), '()\n', (308, 310), False, 'import click\n'), ((312, 397), 'click.option', 'click.option', (['"""--rerun"""'], {'help': '"""Whether to overwrite cached files"""', 'default': '(False)'}), "('--rerun', help='Whether to overwrite cached files', default=False\n )\n", (324, 397), False, 'import click\n'), ((452, 487), 'hail.init', 'hl.init', ([], {'default_reference': '"""GRCh38"""'}), "(default_reference='GRCh38')\n", (459, 487), True, 'import hail as hl\n'), ((510, 537), 'analysis_runner.output_path', 'output_path', (['"""sample_qc.mt"""'], {}), "('sample_qc.mt')\n", (521, 537), False, 'from analysis_runner import output_path\n'), ((765, 801), 'hail.read_matrix_table', 'hl.read_matrix_table', (['sample_qc_path'], {}), '(sample_qc_path)\n', (785, 801), True, 'import hail as hl\n'), ((823, 863), 'analysis_runner.output_path', 'output_path', (['"""call_rate_plot.png"""', '"""web"""'], {}), "('call_rate_plot.png', 'web')\n", (834, 863), False, 'from analysis_runner import output_path\n'), ((605, 645), 'hail.read_matrix_table', 'hl.read_matrix_table', (['GNOMAD_HGDP_1KG_MT'], {}), '(GNOMAD_HGDP_1KG_MT)\n', (625, 645), True, 'import hail as hl\n'), ((700, 716), 'hail.sample_qc', 'hl.sample_qc', (['mt'], {}), '(mt)\n', (712, 716), True, 'import hail as hl\n'), ((942, 1020), 'hail.plot.histogram', 'hl.plot.histogram', (['mt_qc.sample_qc.call_rate'], {'range': '(0, 1)', 'legend': '"""Call rate"""'}), "(mt_qc.sample_qc.call_rate, range=(0, 1), legend='Call rate')\n", (959, 1020), True, 'import hail as hl\n'), ((558, 590), 'hail.hadoop_exists', 'hl.hadoop_exists', (['sample_qc_path'], {}), '(sample_qc_path)\n', (574, 590), True, 'import hail as hl\n'), ((884, 915), 'hail.hadoop_exists', 'hl.hadoop_exists', (['plot_filename'], {}), '(plot_filename)\n', (900, 915), True, 'import hail as hl\n'), ((1056, 1091), 'hail.hadoop_open', 'hl.hadoop_open', (['plot_filename', '"""wb"""'], {}), "(plot_filename, 'wb')\n", (1070, 1091), True, 'import hail as hl\n'), ((1110, 1147), 'bokeh.io.export.get_screenshot_as_png', 'get_screenshot_as_png', (['call_rate_plot'], {}), '(call_rate_plot)\n', (1131, 1147), False, 'from bokeh.io.export import get_screenshot_as_png\n')] |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Loads, converts, and runs sample models."""
import abc
import collections
import functools
import tempfile
import time
from typing import Callable, Iterable, List, Mapping, Optional, Sequence, Union
from absl import logging
import numpy as np
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import tensor_shape_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.client import session
from tensorflow.python.compiler.tensorrt import trt_convert as trt
from tensorflow.python.framework import convert_to_constants
from tensorflow.python.framework import dtypes as tf_dtypes
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops as framework_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.saved_model import load as saved_model_load
from tensorflow.python.saved_model import loader as saved_model_loader
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
# pylint: disable=bad-whitespace
### Helper Functions
def _get_concrete_tensor_shape(
tensor_shape: tensor_shape_pb2.TensorShapeProto,
batch_size: Optional[int] = None) -> Sequence[int]:
"""Gets a concrete tensor shape without dynamic dimensions."""
if tensor_shape.unknown_rank:
raise ValueError("Cannot generates random tensors for unknown rank!")
shape = [dim.size for dim in tensor_shape.dim]
if not shape:
raise ValueError("The tensor cannot have a rank of 0!")
if shape[0] < 0:
if batch_size is None or batch_size <= 0:
raise ValueError("Must provide a valid batch size "
"as the tensor has a dynamic batch size!")
shape[0] = batch_size
if any(filter(lambda x: x < 0, shape)):
raise ValueError("Cannot have dynamic dimensions except for batch size!")
return shape
def _generate_random_tensor_v1(tensor_info: meta_graph_pb2.TensorInfo,
batch_size: Optional[int] = None) -> np.ndarray:
"""Generates a random tensor based on the data type and tensor shape."""
dtype = tf_dtypes.as_dtype(tensor_info.dtype)
shape = _get_concrete_tensor_shape(tensor_info.tensor_shape, batch_size)
with session.Session():
return random_ops.random_uniform(
shape=shape, dtype=dtype, name=tensor_info.name.split(":")[0]).eval()
def _generate_random_tensor_v2(
tensor: framework_ops.Tensor,
batch_size: Optional[int] = None) -> framework_ops.Tensor:
"""Generates a random tensor based on the data type and tensor shape."""
shape = _get_concrete_tensor_shape(tensor.shape.as_proto(), batch_size)
return random_ops.random_uniform(
shape=shape, dtype=tensor.dtype, name=tensor.name)
# Models are repeatedly loaded for different TensorRT conversion settings.
# Using cache can reduce I/O.
@functools.lru_cache()
def load_meta_graph(
saved_model_dir: str, saved_model_tags: str,
saved_model_signature_key: str) -> meta_graph_pb2.MetaGraphDef:
"""Loads a `tf.MetaGraphDef` in TF1."""
with session.Session() as sess:
meta_graph = saved_model_loader.load(
sess=sess,
export_dir=saved_model_dir,
tags=saved_model_tags,
)
output_node_names = [
tensor.name.split(":")[0] for tensor in
meta_graph.signature_def[saved_model_signature_key].outputs.values()
]
graph_def = (
convert_to_constants.convert_variables_to_constants_from_session_graph(
sess, meta_graph.graph_def, output_node_names))
meta_graph.graph_def.CopyFrom(graph_def)
return meta_graph
@functools.lru_cache()
def load_graph_func(saved_model_dir: str, saved_model_tags: str,
saved_model_signature_key: str):
"""Loads a graph function in TF2."""
imported = saved_model_load.load(
export_dir=saved_model_dir, tags=saved_model_tags)
graph_func = imported.signatures[saved_model_signature_key]
return convert_to_constants.convert_variables_to_constants_v2(graph_func)
### Test Classes
class TestResult(
collections.namedtuple("TestResult",
["outputs", "latency", "trt_convert_params"])):
def __new__(cls,
outputs: Mapping[str, np.ndarray],
latency: List[float],
trt_convert_params: trt.TrtConversionParams = None):
return super(TestResult, cls).__new__(cls, outputs, latency,
trt_convert_params)
class ModelConfig(
collections.namedtuple("ModelConfig", [
"saved_model_dir", "saved_model_tags", "saved_model_signature_key",
"default_batch_size"
])):
"""Configurations for test models."""
def __new__(cls,
saved_model_dir: str,
saved_model_tags: Sequence[str] = (tag_constants.SERVING,),
saved_model_signature_key: str = (
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY),
default_batch_size: int = 1):
return super(ModelConfig,
cls).__new__(cls, saved_model_dir, saved_model_tags,
saved_model_signature_key, default_batch_size)
class TestResultCollection(
collections.namedtuple("TestResultCollection", ["results", "config"])):
def __new__(cls, config: ModelConfig,
results: Sequence[TestResult] = tuple()):
return super(TestResultCollection, cls).__new__(cls, config, results)
class _ModelHandlerBase(metaclass=abc.ABCMeta):
"""Base class for running a model."""
def __init__(self, model_config: ModelConfig):
self._model_config = model_config
def __str__(self) -> str:
return str(self._model_config)
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, str(self))
@property
def model_config(self) -> ModelConfig:
return self._model_config
@property
def input_tensort_names(self) -> Sequence[str]:
"""Names of input tensors."""
@property
def output_tensor_names(self) -> Sequence[str]:
"""Names of output tensors."""
@abc.abstractmethod
def generate_random_inputs(
self,
batch_size: Optional[int] = None
) -> Mapping[str, Union[np.ndarray, framework_ops.Tensor]]:
"""Generates mapping from names to input tensors."""
@abc.abstractmethod
def run(self,
inputs=None,
warmup_iterations: int = 10,
benchmark_iterations: int = 100,
allow_to_use_gpu: bool = False) -> TestResult:
"""Runs the model with provided or randomly generated input tensors.
Args:
inputs: Mapping from names to input ndarrays in TF1, or a sequence of
tensors in TF2. If `None`, ramdomly generated inputs will be used
instead.
warmup_iterations: Number of inferences to warm up the runtime.
benchmark_iterations: Number of inferences to measure the latency.
allow_to_use_gpu: Whether it is allowed to use GPU or not.
Returns:
`TestResult` summarizing timing and numerics information.
"""
class ModelHandlerV1(_ModelHandlerBase):
"""Runs a model in TF1."""
@property
def meta_graph(self) -> meta_graph_pb2.MetaGraphDef:
return load_meta_graph(
saved_model_dir=self.model_config.saved_model_dir,
saved_model_tags=self.model_config.saved_model_tags,
saved_model_signature_key=self.model_config.saved_model_signature_key)
@property
def input_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:
return self.meta_graph.signature_def[
self.model_config.saved_model_signature_key].inputs
@property
def output_tensor_info(self) -> Mapping[str, meta_graph_pb2.TensorInfo]:
return self.meta_graph.signature_def[
self.model_config.saved_model_signature_key].outputs
@property
def input_tensort_names(self) -> Sequence[str]:
return [info.name for info in self.input_tensor_info.values()]
@property
def output_tensor_names(self) -> Sequence[str]:
return [info.name for info in self.output_tensor_info.values()]
def generate_random_inputs(self,
batch_size: Optional[int] = None
) -> Mapping[str, np.ndarray]:
batch_size = batch_size or self.model_config.default_batch_size
return {
tensor_info.name: _generate_random_tensor_v1(tensor_info, batch_size)
for tensor_info in self.input_tensor_info.values()
}
def run(self,
inputs: Optional[Mapping[str, np.ndarray]] = None,
warmup_iterations=10,
benchmark_iterations=100,
allow_to_use_gpu=False) -> TestResult:
inputs = inputs or self.generate_random_inputs()
config_proto = None
if not allow_to_use_gpu:
config_proto = config_pb2.ConfigProto(device_count={"CPU": 1, "GPU": 0})
with session.Session(config=config_proto) as sess:
importer.import_graph_def(self.meta_graph.graph_def)
try:
for _ in range(warmup_iterations):
sess.run(fetches=self.output_tensor_names, feed_dict=inputs)
latency = []
for _ in range(benchmark_iterations):
before = time.time()
outputs = sess.run(fetches=self.output_tensor_names, feed_dict=inputs)
latency.append(time.time() - before)
except Exception as exc:
raise RuntimeError("Failed to run model inference! "
"Model information: {}".format(str(self))) from exc
outputs = dict(zip(self.output_tensor_names, outputs))
return TestResult(latency=latency, outputs=outputs if inputs else None)
class ModelHandlerV2(_ModelHandlerBase):
"""Runs a model in TF2."""
@property
def graph_func(self):
graph_func = load_graph_func(
saved_model_dir=self.model_config.saved_model_dir,
saved_model_tags=self.model_config.saved_model_tags,
saved_model_signature_key=self.model_config.saved_model_signature_key)
return convert_to_constants.convert_variables_to_constants_v2(graph_func)
@property
def input_tensor_names(self):
return [tensor.name for tensor in self.graph_func.inputs]
@property
def output_tensor_names(self):
return [tensor.name for tensor in self.graph_func.outputs]
def generate_random_inputs(self,
batch_size: Optional[int] = None
) -> Sequence[framework_ops.Tensor]:
batch_size = batch_size or self.model_config.default_batch_size
return [
_generate_random_tensor_v2(tensor, batch_size)
for tensor in self.graph_func.inputs
]
def run(self,
inputs: Optional[Sequence[framework_ops.Tensor]] = None,
warmup_iterations=10,
benchmark_iterations=100,
allow_to_use_gpu=False) -> TestResult:
inputs = inputs or self.generate_random_inputs()
try:
device = "/device:gpu:0" if allow_to_use_gpu else "/device:cpu:0"
with framework_ops.device(device):
for _ in range(warmup_iterations):
self.graph_func(*inputs)
latency = []
for _ in range(benchmark_iterations):
before = time.time()
outputs = self.graph_func(*inputs)
latency.append(time.time() - before)
except Exception as exc:
raise RuntimeError("Failed to run model inference! "
"Model information: {}".format(str(self))) from exc
outputs = dict(zip(self.output_tensor_names, outputs))
return TestResult(latency=latency, outputs=outputs if inputs else None)
class _TrtModelHandlerBase(_ModelHandlerBase):
"""Base class for converting and running a model."""
def __init__(
self,
model_config: ModelConfig,
trt_convert_params: trt.TrtConversionParams,
):
super(_TrtModelHandlerBase, self).__init__(model_config)
self._trt_convert_params = trt_convert_params
self._converter = self._create_converter(trt_convert_params)
logging.info("Converting to TensorRT!")
self._check_conversion(self._converter.convert())
self._conversion_is_saved = False
@abc.abstractmethod
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
"""Creates a converter for the corresponding TF version."""
@abc.abstractmethod
def _check_conversion(self, conversion_output):
"""Checks if conversion output has any TensorRT engines."""
def _check_contains_trt_engine(self, graph_def: graph_pb2.GraphDef):
if "TRTEngineOp" not in [node.op for node in graph_def.node]:
raise RuntimeError("Failed to convert to TensorRT! "
"Model Information: {}".format(str(self)))
def __str__(self) -> str:
base = super(_TrtModelHandlerBase, self).__str__()
return "{}, TrtConversionParams: {}".format(base,
str(self._trt_convert_params))
@property
def trt_convert_params(self) -> trt.TrtConversionParams:
return self._trt_convert_params
def save(self,
output_saved_model_dir: Optional[str] = None,
overwrite=True) -> None:
"""Saves a TensorRT converted model."""
if self._conversion_is_saved and not overwrite:
return
output_saved_model_dir = output_saved_model_dir or tempfile.mkdtemp()
logging.info("Saving TensorRT model to %s!", output_saved_model_dir)
self._converter.save(output_saved_model_dir)
self._model_config = self.model_config._replace(
saved_model_dir=output_saved_model_dir)
self._conversion_is_saved = True
class TrtModelHandlerV1(_TrtModelHandlerBase, ModelHandlerV1):
"""Converts a TF1 model with TensorRT and runs the converted model."""
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
conversion_nodes_denylist = self.output_tensor_names
return trt.TrtGraphConverter(
input_saved_model_dir=self.model_config.saved_model_dir,
input_saved_model_tags=self.model_config.saved_model_tags,
input_saved_model_signature_key=(
self.model_config.saved_model_signature_key),
nodes_denylist=conversion_nodes_denylist,
max_batch_size=trt_convert_params.max_batch_size,
max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes,
precision_mode=trt_convert_params.precision_mode,
minimum_segment_size=trt_convert_params.minimum_segment_size,
is_dynamic_op=trt_convert_params.is_dynamic_op,
maximum_cached_engines=trt_convert_params.maximum_cached_engines,
use_calibration=trt_convert_params.use_calibration,
)
_check_conversion = _TrtModelHandlerBase._check_contains_trt_engine
def run(self,
inputs: Optional[Mapping[str, np.ndarray]] = None,
warmup_iterations=10,
benchmark_iterations=100) -> TestResult:
self.save(overwrite=False)
logging.info("Running with TensorRT!")
test_result = ModelHandlerV1.run(
self,
inputs,
warmup_iterations,
benchmark_iterations,
allow_to_use_gpu=True)
return test_result._replace(trt_convert_params=self._trt_convert_params)
class TrtModelHandlerV2(_TrtModelHandlerBase, ModelHandlerV2):
"""Converts a TF2 model with TensorRT and runs the converted model."""
def _create_converter(self, trt_convert_params: trt.TrtConversionParams):
return trt.TrtGraphConverterV2(
input_saved_model_dir=self.model_config.saved_model_dir,
input_saved_model_tags=self.model_config.saved_model_tags,
input_saved_model_signature_key=(
self.model_config.saved_model_signature_key),
conversion_params=trt_convert_params)
def _check_conversion(self, graph_func):
graph_def = graph_func.graph.as_graph_def()
self._check_contains_trt_engine(graph_def)
def run(self,
inputs: Optional[Sequence[framework_ops.Tensor]] = None,
warmup_iterations=10,
benchmark_iterations=100) -> TestResult:
self.save(overwrite=False)
logging.info("Running with TensorRT!")
test_result = ModelHandlerV2.run(
self,
inputs,
warmup_iterations,
benchmark_iterations,
allow_to_use_gpu=True)
return test_result._replace(trt_convert_params=self._trt_convert_params)
class _ModelHandlerManagerBase(metaclass=abc.ABCMeta):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking."""
def __init__(
self, model_config: ModelConfig,
default_trt_convert_params: trt.TrtConversionParams,
trt_convert_params_updater: Callable[[trt.TrtConversionParams],
Iterable[trt.TrtConversionParams]]):
self._ori_model = self.model_handler_cls(model_config)
self._trt_models = []
for trt_convert_params in trt_convert_params_updater(
default_trt_convert_params):
trt_model = self.trt_model_handler_cls(
model_config, trt_convert_params=trt_convert_params)
self._trt_models.append(trt_model)
self._result_collection = TestResultCollection(
results=[], config=model_config)
def __str__(self) -> str:
return "Input Model: {}".format(str(self._ori_model))
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, str(self))
@property
@classmethod
@abc.abstractmethod
def model_handler_cls(cls):
"""The modle handler class. ModelHandleV1/ModelHandlerV2."""
@property
@classmethod
@abc.abstractmethod
def trt_model_handler_cls(cls):
"""The TensorRTmodle handler class. TrtModelHandleV1/TrtModelHandlerV2."""
@property
def model_config(self):
return self._ori_model.model_config
def generate_random_inputs(self, batch_size: Optional[int] = None):
return self._ori_model.generate_random_inputs(batch_size)
def run(self,
inputs=None,
warmup_iterations: int = 10,
benchmark_iterations: int = 100) -> TestResultCollection:
"""Runs model inference with provided or randomly generated input tensors.
Args:
inputs: Mapping from names to input ndarrays in TF1. Or a sequence of
tensors in TF2. If `None`, ramdomly generated input tensors will be used
instead.
warmup_iterations: Number of inferences to warm up the runtime.
benchmark_iterations: Number of inferences to measure the latency.
Returns:
`TestResultCollection` summarizing timing and numerics information for
different TensorRT conversion settings.
"""
inputs = inputs or self.generate_random_inputs()
results = [
model.run(inputs, warmup_iterations, benchmark_iterations)
for model in [self._ori_model] + self._trt_models
]
return self._result_collection._replace(results=results)
class ModelHandlerManagerV1(_ModelHandlerManagerBase):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF1."""
model_handler_cls = ModelHandlerV1
trt_model_handler_cls = TrtModelHandlerV1
class ModelHandlerManagerV2(_ModelHandlerManagerBase):
"""Manages a series of ModelHandlers for aggregrated testing/benchmarking in TF2."""
model_handler_cls = ModelHandlerV2
trt_model_handler_cls = TrtModelHandlerV2
| [
"tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_from_session_graph",
"collections.namedtuple",
"tensorflow.python.ops.random_ops.random_uniform",
"tensorflow.python.saved_model.load.load",
"tensorflow.python.client.session.Session",
"tensorflow.core.protobuf.config_pb2.ConfigProto",
"tensorflow.python.saved_model.loader.load",
"absl.logging.info",
"tensorflow.python.framework.importer.import_graph_def",
"tensorflow.python.compiler.tensorrt.trt_convert.TrtGraphConverter",
"tensorflow.python.compiler.tensorrt.trt_convert.TrtGraphConverterV2",
"tempfile.mkdtemp",
"tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_v2",
"tensorflow.python.framework.ops.device",
"functools.lru_cache",
"tensorflow.python.framework.dtypes.as_dtype",
"time.time"
]
| [((3602, 3623), 'functools.lru_cache', 'functools.lru_cache', ([], {}), '()\n', (3621, 3623), False, 'import functools\n'), ((4355, 4376), 'functools.lru_cache', 'functools.lru_cache', ([], {}), '()\n', (4374, 4376), False, 'import functools\n'), ((4806, 4892), 'collections.namedtuple', 'collections.namedtuple', (['"""TestResult"""', "['outputs', 'latency', 'trt_convert_params']"], {}), "('TestResult', ['outputs', 'latency',\n 'trt_convert_params'])\n", (4828, 4892), False, 'import collections\n'), ((5242, 5375), 'collections.namedtuple', 'collections.namedtuple', (['"""ModelConfig"""', "['saved_model_dir', 'saved_model_tags', 'saved_model_signature_key',\n 'default_batch_size']"], {}), "('ModelConfig', ['saved_model_dir',\n 'saved_model_tags', 'saved_model_signature_key', 'default_batch_size'])\n", (5264, 5375), False, 'import collections\n'), ((5944, 6013), 'collections.namedtuple', 'collections.namedtuple', (['"""TestResultCollection"""', "['results', 'config']"], {}), "('TestResultCollection', ['results', 'config'])\n", (5966, 6013), False, 'import collections\n'), ((2866, 2903), 'tensorflow.python.framework.dtypes.as_dtype', 'tf_dtypes.as_dtype', (['tensor_info.dtype'], {}), '(tensor_info.dtype)\n', (2884, 2903), True, 'from tensorflow.python.framework import dtypes as tf_dtypes\n'), ((3410, 3486), 'tensorflow.python.ops.random_ops.random_uniform', 'random_ops.random_uniform', ([], {'shape': 'shape', 'dtype': 'tensor.dtype', 'name': 'tensor.name'}), '(shape=shape, dtype=tensor.dtype, name=tensor.name)\n', (3435, 3486), False, 'from tensorflow.python.ops import random_ops\n'), ((4547, 4619), 'tensorflow.python.saved_model.load.load', 'saved_model_load.load', ([], {'export_dir': 'saved_model_dir', 'tags': 'saved_model_tags'}), '(export_dir=saved_model_dir, tags=saved_model_tags)\n', (4568, 4619), True, 'from tensorflow.python.saved_model import load as saved_model_load\n'), ((4698, 4764), 'tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_v2', 'convert_to_constants.convert_variables_to_constants_v2', (['graph_func'], {}), '(graph_func)\n', (4752, 4764), False, 'from tensorflow.python.framework import convert_to_constants\n'), ((2986, 3003), 'tensorflow.python.client.session.Session', 'session.Session', ([], {}), '()\n', (3001, 3003), False, 'from tensorflow.python.client import session\n'), ((3811, 3828), 'tensorflow.python.client.session.Session', 'session.Session', ([], {}), '()\n', (3826, 3828), False, 'from tensorflow.python.client import session\n'), ((3855, 3945), 'tensorflow.python.saved_model.loader.load', 'saved_model_loader.load', ([], {'sess': 'sess', 'export_dir': 'saved_model_dir', 'tags': 'saved_model_tags'}), '(sess=sess, export_dir=saved_model_dir, tags=\n saved_model_tags)\n', (3878, 3945), True, 'from tensorflow.python.saved_model import loader as saved_model_loader\n'), ((4155, 4276), 'tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_from_session_graph', 'convert_to_constants.convert_variables_to_constants_from_session_graph', (['sess', 'meta_graph.graph_def', 'output_node_names'], {}), '(sess,\n meta_graph.graph_def, output_node_names)\n', (4225, 4276), False, 'from tensorflow.python.framework import convert_to_constants\n'), ((10665, 10731), 'tensorflow.python.framework.convert_to_constants.convert_variables_to_constants_v2', 'convert_to_constants.convert_variables_to_constants_v2', (['graph_func'], {}), '(graph_func)\n', (10719, 10731), False, 'from tensorflow.python.framework import convert_to_constants\n'), ((12645, 12684), 'absl.logging.info', 'logging.info', (['"""Converting to TensorRT!"""'], {}), "('Converting to TensorRT!')\n", (12657, 12684), False, 'from absl import logging\n'), ((13965, 14033), 'absl.logging.info', 'logging.info', (['"""Saving TensorRT model to %s!"""', 'output_saved_model_dir'], {}), "('Saving TensorRT model to %s!', output_saved_model_dir)\n", (13977, 14033), False, 'from absl import logging\n'), ((14504, 15205), 'tensorflow.python.compiler.tensorrt.trt_convert.TrtGraphConverter', 'trt.TrtGraphConverter', ([], {'input_saved_model_dir': 'self.model_config.saved_model_dir', 'input_saved_model_tags': 'self.model_config.saved_model_tags', 'input_saved_model_signature_key': 'self.model_config.saved_model_signature_key', 'nodes_denylist': 'conversion_nodes_denylist', 'max_batch_size': 'trt_convert_params.max_batch_size', 'max_workspace_size_bytes': 'trt_convert_params.max_workspace_size_bytes', 'precision_mode': 'trt_convert_params.precision_mode', 'minimum_segment_size': 'trt_convert_params.minimum_segment_size', 'is_dynamic_op': 'trt_convert_params.is_dynamic_op', 'maximum_cached_engines': 'trt_convert_params.maximum_cached_engines', 'use_calibration': 'trt_convert_params.use_calibration'}), '(input_saved_model_dir=self.model_config.\n saved_model_dir, input_saved_model_tags=self.model_config.\n saved_model_tags, input_saved_model_signature_key=self.model_config.\n saved_model_signature_key, nodes_denylist=conversion_nodes_denylist,\n max_batch_size=trt_convert_params.max_batch_size,\n max_workspace_size_bytes=trt_convert_params.max_workspace_size_bytes,\n precision_mode=trt_convert_params.precision_mode, minimum_segment_size=\n trt_convert_params.minimum_segment_size, is_dynamic_op=\n trt_convert_params.is_dynamic_op, maximum_cached_engines=\n trt_convert_params.maximum_cached_engines, use_calibration=\n trt_convert_params.use_calibration)\n', (14525, 15205), True, 'from tensorflow.python.compiler.tensorrt import trt_convert as trt\n'), ((15536, 15574), 'absl.logging.info', 'logging.info', (['"""Running with TensorRT!"""'], {}), "('Running with TensorRT!')\n", (15548, 15574), False, 'from absl import logging\n'), ((16034, 16303), 'tensorflow.python.compiler.tensorrt.trt_convert.TrtGraphConverterV2', 'trt.TrtGraphConverterV2', ([], {'input_saved_model_dir': 'self.model_config.saved_model_dir', 'input_saved_model_tags': 'self.model_config.saved_model_tags', 'input_saved_model_signature_key': 'self.model_config.saved_model_signature_key', 'conversion_params': 'trt_convert_params'}), '(input_saved_model_dir=self.model_config.\n saved_model_dir, input_saved_model_tags=self.model_config.\n saved_model_tags, input_saved_model_signature_key=self.model_config.\n saved_model_signature_key, conversion_params=trt_convert_params)\n', (16057, 16303), True, 'from tensorflow.python.compiler.tensorrt import trt_convert as trt\n'), ((16678, 16716), 'absl.logging.info', 'logging.info', (['"""Running with TensorRT!"""'], {}), "('Running with TensorRT!')\n", (16690, 16716), False, 'from absl import logging\n'), ((9481, 9538), 'tensorflow.core.protobuf.config_pb2.ConfigProto', 'config_pb2.ConfigProto', ([], {'device_count': "{'CPU': 1, 'GPU': 0}"}), "(device_count={'CPU': 1, 'GPU': 0})\n", (9503, 9538), False, 'from tensorflow.core.protobuf import config_pb2\n'), ((9548, 9584), 'tensorflow.python.client.session.Session', 'session.Session', ([], {'config': 'config_proto'}), '(config=config_proto)\n', (9563, 9584), False, 'from tensorflow.python.client import session\n'), ((9600, 9652), 'tensorflow.python.framework.importer.import_graph_def', 'importer.import_graph_def', (['self.meta_graph.graph_def'], {}), '(self.meta_graph.graph_def)\n', (9625, 9652), False, 'from tensorflow.python.framework import importer\n'), ((13942, 13960), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (13958, 13960), False, 'import tempfile\n'), ((11644, 11672), 'tensorflow.python.framework.ops.device', 'framework_ops.device', (['device'], {}), '(device)\n', (11664, 11672), True, 'from tensorflow.python.framework import ops as framework_ops\n'), ((9864, 9875), 'time.time', 'time.time', ([], {}), '()\n', (9873, 9875), False, 'import time\n'), ((11838, 11849), 'time.time', 'time.time', ([], {}), '()\n', (11847, 11849), False, 'import time\n'), ((9982, 9993), 'time.time', 'time.time', ([], {}), '()\n', (9991, 9993), False, 'import time\n'), ((11920, 11931), 'time.time', 'time.time', ([], {}), '()\n', (11929, 11931), False, 'import time\n')] |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import List, Pattern
from recognizers_text.utilities import RegExpUtility
from recognizers_number.number import BaseNumberParser
from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor
from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration
from ...resources.base_date_time import BaseDateTime
from ...resources.italian_date_time import ItalianDateTime
from ..extractors import DateTimeExtractor
from ..base_duration import BaseDurationExtractor
from ..base_date import BaseDateExtractor
from ..base_dateperiod import DatePeriodExtractorConfiguration, MatchedIndex
from .duration_extractor_config import ItalianDurationExtractorConfiguration
from .date_extractor_config import ItalianDateExtractorConfiguration
from recognizers_text.extractor import Extractor
from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor
class ItalianDatePeriodExtractorConfiguration(DatePeriodExtractorConfiguration):
@property
def previous_prefix_regex(self) -> Pattern:
return self._previous_prefix_regex
@property
def check_both_before_after(self) -> bool:
return self._check_both_before_after
@property
def simple_cases_regexes(self) -> List[Pattern]:
return self._simple_cases_regexes
@property
def illegal_year_regex(self) -> Pattern:
return self._illegal_year_regex
@property
def year_regex(self) -> Pattern:
return self._year_regex
@property
def till_regex(self) -> Pattern:
return self._till_regex
@property
def followed_unit(self) -> Pattern:
return self._followed_unit
@property
def number_combined_with_unit(self) -> Pattern:
return self._number_combined_with_unit
@property
def past_regex(self) -> Pattern:
return self._past_regex
@property
def decade_with_century_regex(self) -> Pattern:
return self._decade_with_century_regex
@property
def future_regex(self) -> Pattern:
return self._future_regex
@property
def week_of_regex(self) -> Pattern:
return self._week_of_regex
@property
def month_of_regex(self) -> Pattern:
return self._month_of_regex
@property
def date_unit_regex(self) -> Pattern:
return self._date_unit_regex
@property
def in_connector_regex(self) -> Pattern:
return self._in_connector_regex
@property
def range_unit_regex(self) -> Pattern:
return self._range_unit_regex
@property
def date_point_extractor(self) -> DateTimeExtractor:
return self._date_point_extractor
@property
def integer_extractor(self) -> BaseNumberExtractor:
return self._integer_extractor
@property
def number_parser(self) -> BaseNumberParser:
return self._number_parser
@property
def duration_extractor(self) -> DateTimeExtractor:
return self._duration_extractor
@property
def now_regex(self) -> Pattern:
return self._now_regex
@property
def future_suffix_regex(self) -> Pattern:
return self._future_suffix_regex
@property
def ago_regex(self) -> Pattern:
return self._ago_regex
@property
def later_regex(self) -> Pattern:
return self._later_regex
@property
def less_than_regex(self) -> Pattern:
return self._less_than_regex
@property
def more_than_regex(self) -> Pattern:
return self._more_than_regex
@property
def duration_date_restrictions(self) -> [str]:
return self._duration_date_restrictions
@property
def year_period_regex(self) -> Pattern:
return self._year_period_regex
@property
def month_num_regex(self) -> Pattern:
return self._month_num_regex
@property
def century_suffix_regex(self) -> Pattern:
return self._century_suffix_regex
@property
def ordinal_extractor(self) -> BaseNumberExtractor:
return self._ordinal_extractor
@property
def cardinal_extractor(self) -> Extractor:
return self._cardinal_extractor
@property
def time_unit_regex(self) -> Pattern:
return self._time_unit_regex
@property
def within_next_prefix_regex(self) -> Pattern:
return self._within_next_prefix_regex
@property
def range_connector_regex(self) -> Pattern:
return self._range_connector_regex
@property
def day_regex(self) -> Pattern:
return self._day_regex
@property
def week_day_regex(self) -> Pattern:
return self._week_day_regex
@property
def relative_month_regex(self) -> Pattern:
return self._relative_month_regex
@property
def month_suffix_regex(self) -> Pattern:
return self._month_suffix_regex
@property
def past_prefix_regex(self) -> Pattern:
return self._past_prefix_regex
@property
def next_prefix_regex(self) -> Pattern:
return self._next_prefix_regex
@property
def this_prefix_regex(self) -> Pattern:
return self._this_prefix_regex
@property
def which_week_regex(self) -> Pattern:
return self._which_week_regex
@property
def rest_of_date_regex(self) -> Pattern:
return self._rest_of_date_regex
@property
def complex_date_period_regex(self) -> Pattern:
return self._complex_date_period_regex
@property
def week_day_of_month_regex(self) -> Pattern:
return self._week_day_of_month_regex
@property
def all_half_year_regex(self) -> Pattern:
return self._all_half_year_regex
def __init__(self):
self._all_half_year_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.AllHalfYearRegex)
self._week_day_of_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex)
self._complex_date_period_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ComplexDatePeriodRegex)
self._rest_of_date_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RestOfDateRegex)
self._which_week_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WhichWeekRegex)
self._this_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.ThisPrefixRegex)
self._next_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.NextSuffixRegex)
self._past_prefix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.PastSuffixRegex)
self._month_suffix_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthSuffixRegex)
self._relative_month_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeMonthRegex)
self._week_day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayRegex)
self._day_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.DayRegex)
self._range_connector_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.RangeConnectorRegex)
self._time_unit_regex = RegExpUtility.get_safe_reg_exp(ItalianDateTime.TimeUnitRegex)
self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PastSuffixRegex)
self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter
self._simple_cases_regexes = [
RegExpUtility.get_safe_reg_exp(ItalianDateTime.SimpleCasesRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.BetweenRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.OneWordPeriodRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthWithYear),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.MonthNumWithYear),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPeriodRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekOfYearRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.WeekDayOfMonthRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthFrontBetweenRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthFrontSimpleCasesRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.QuarterRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.QuarterRegexYearFront),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.SeasonRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LaterEarlyPeriodRegex),
RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WeekWithWeekDayRangeRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.YearPlusNumberRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.DecadeWithCenturyRegex),
RegExpUtility.get_safe_reg_exp(ItalianDateTime.RelativeDecadeRegex)
]
self._check_both_before_after = ItalianDateTime.CheckBothBeforeAfter
self._illegal_year_regex = RegExpUtility.get_safe_reg_exp(
BaseDateTime.IllegalYearRegex)
self._year_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.YearRegex)
self._till_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.TillRegex)
self._followed_unit = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FollowedDateUnit)
self._number_combined_with_unit = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NumberCombinedWithDateUnit)
self._past_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PastSuffixRegex)
self._future_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NextSuffixRegex)
self._week_of_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WeekOfRegex)
self._month_of_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthOfRegex)
self._date_unit_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.DateUnitRegex)
self._within_next_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.WithinNextPrefixRegex)
self._in_connector_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.InConnectorRegex)
self._range_unit_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.RangeUnitRegex)
self.from_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FromRegex)
self.connector_and_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.ConnectorAndRegex)
self.before_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.BeforeRegex2)
self._date_point_extractor = BaseDateExtractor(
ItalianDateExtractorConfiguration())
self._integer_extractor = ItalianIntegerExtractor()
self._number_parser = BaseNumberParser(
ItalianNumberParserConfiguration())
self._duration_extractor = BaseDurationExtractor(
ItalianDurationExtractorConfiguration())
self._now_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.NowRegex)
self._future_suffix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.FutureSuffixRegex
)
self._ago_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.AgoRegex
)
self._later_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LaterRegex
)
self._less_than_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.LessThanRegex
)
self._more_than_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MoreThanRegex
)
self._duration_date_restrictions = ItalianDateTime.DurationDateRestrictions
self._year_period_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.YearPeriodRegex
)
self._month_num_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.MonthNumRegex
)
self._century_suffix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.CenturySuffixRegex
)
self._ordinal_extractor = ItalianOrdinalExtractor()
self._cardinal_extractor = ItalianCardinalExtractor()
self._previous_prefix_regex = RegExpUtility.get_safe_reg_exp(
ItalianDateTime.PreviousPrefixRegex
)
self._cardinal_extractor = ItalianCardinalExtractor()
# TODO When the implementation for these properties is added, change the None values to their respective Regexps
self._time_unit_regex = None
def get_from_token_index(self, source: str) -> MatchedIndex:
match = self.from_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def get_between_token_index(self, source: str) -> MatchedIndex:
match = self.before_regex.search(source)
if match:
return MatchedIndex(True, match.start())
return MatchedIndex(False, -1)
def has_connector_token(self, source: str) -> bool:
return not self.connector_and_regex.search(source) is None
| [
"recognizers_number.ItalianCardinalExtractor",
"recognizers_number.number.italian.extractors.ItalianIntegerExtractor",
"recognizers_text.utilities.RegExpUtility.get_safe_reg_exp",
"recognizers_number.ItalianOrdinalExtractor",
"recognizers_number.number.italian.parsers.ItalianNumberParserConfiguration"
]
| [((5851, 5915), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.AllHalfYearRegex'], {}), '(ItalianDateTime.AllHalfYearRegex)\n', (5881, 5915), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((5956, 6023), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WeekDayOfMonthRegex'], {}), '(ItalianDateTime.WeekDayOfMonthRegex)\n', (5986, 6023), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6066, 6136), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.ComplexDatePeriodRegex'], {}), '(ItalianDateTime.ComplexDatePeriodRegex)\n', (6096, 6136), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6172, 6235), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.RestOfDateRegex'], {}), '(ItalianDateTime.RestOfDateRegex)\n', (6202, 6235), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6269, 6331), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WhichWeekRegex'], {}), '(ItalianDateTime.WhichWeekRegex)\n', (6299, 6331), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6366, 6429), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.ThisPrefixRegex'], {}), '(ItalianDateTime.ThisPrefixRegex)\n', (6396, 6429), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6464, 6527), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.NextSuffixRegex'], {}), '(ItalianDateTime.NextSuffixRegex)\n', (6494, 6527), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6562, 6625), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.PastSuffixRegex'], {}), '(ItalianDateTime.PastSuffixRegex)\n', (6592, 6625), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6661, 6725), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthSuffixRegex'], {}), '(ItalianDateTime.MonthSuffixRegex)\n', (6691, 6725), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6763, 6829), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.RelativeMonthRegex'], {}), '(ItalianDateTime.RelativeMonthRegex)\n', (6793, 6829), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6861, 6921), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WeekDayRegex'], {}), '(ItalianDateTime.WeekDayRegex)\n', (6891, 6921), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((6948, 7004), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.DayRegex'], {}), '(ItalianDateTime.DayRegex)\n', (6978, 7004), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7043, 7110), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.RangeConnectorRegex'], {}), '(ItalianDateTime.RangeConnectorRegex)\n', (7073, 7110), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7143, 7204), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.TimeUnitRegex'], {}), '(ItalianDateTime.TimeUnitRegex)\n', (7173, 7204), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7243, 7306), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.PastSuffixRegex'], {}), '(ItalianDateTime.PastSuffixRegex)\n', (7273, 7306), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9151, 9212), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['BaseDateTime.IllegalYearRegex'], {}), '(BaseDateTime.IllegalYearRegex)\n', (9181, 9212), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9253, 9310), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.YearRegex'], {}), '(ItalianDateTime.YearRegex)\n', (9283, 9310), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9351, 9408), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.TillRegex'], {}), '(ItalianDateTime.TillRegex)\n', (9381, 9408), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9452, 9516), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.FollowedDateUnit'], {}), '(ItalianDateTime.FollowedDateUnit)\n', (9482, 9516), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9572, 9646), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.NumberCombinedWithDateUnit'], {}), '(ItalianDateTime.NumberCombinedWithDateUnit)\n', (9602, 9646), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9687, 9750), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.PastSuffixRegex'], {}), '(ItalianDateTime.PastSuffixRegex)\n', (9717, 9750), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9793, 9856), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.NextSuffixRegex'], {}), '(ItalianDateTime.NextSuffixRegex)\n', (9823, 9856), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((9900, 9959), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WeekOfRegex'], {}), '(ItalianDateTime.WeekOfRegex)\n', (9930, 9959), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10004, 10064), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthOfRegex'], {}), '(ItalianDateTime.MonthOfRegex)\n', (10034, 10064), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10110, 10171), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.DateUnitRegex'], {}), '(ItalianDateTime.DateUnitRegex)\n', (10140, 10171), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10226, 10295), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WithinNextPrefixRegex'], {}), '(ItalianDateTime.WithinNextPrefixRegex)\n', (10256, 10295), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10344, 10408), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.InConnectorRegex'], {}), '(ItalianDateTime.InConnectorRegex)\n', (10374, 10408), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10455, 10517), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.RangeUnitRegex'], {}), '(ItalianDateTime.RangeUnitRegex)\n', (10485, 10517), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10558, 10615), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.FromRegex'], {}), '(ItalianDateTime.FromRegex)\n', (10588, 10615), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10664, 10729), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.ConnectorAndRegex'], {}), '(ItalianDateTime.ConnectorAndRegex)\n', (10694, 10729), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10771, 10831), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.BeforeRegex2'], {}), '(ItalianDateTime.BeforeRegex2)\n', (10801, 10831), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((10985, 11010), 'recognizers_number.number.italian.extractors.ItalianIntegerExtractor', 'ItalianIntegerExtractor', ([], {}), '()\n', (11008, 11010), False, 'from recognizers_number.number.italian.extractors import ItalianIntegerExtractor, ItalianCardinalExtractor\n'), ((11244, 11300), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.NowRegex'], {}), '(ItalianDateTime.NowRegex)\n', (11274, 11300), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((11350, 11415), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.FutureSuffixRegex'], {}), '(ItalianDateTime.FutureSuffixRegex)\n', (11380, 11415), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((11464, 11520), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.AgoRegex'], {}), '(ItalianDateTime.AgoRegex)\n', (11494, 11520), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((11571, 11629), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.LaterRegex'], {}), '(ItalianDateTime.LaterRegex)\n', (11601, 11629), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((11684, 11745), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.LessThanRegex'], {}), '(ItalianDateTime.LessThanRegex)\n', (11714, 11745), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((11800, 11861), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MoreThanRegex'], {}), '(ItalianDateTime.MoreThanRegex)\n', (11830, 11861), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((12002, 12065), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.YearPeriodRegex'], {}), '(ItalianDateTime.YearPeriodRegex)\n', (12032, 12065), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((12120, 12181), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthNumRegex'], {}), '(ItalianDateTime.MonthNumRegex)\n', (12150, 12181), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((12241, 12307), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.CenturySuffixRegex'], {}), '(ItalianDateTime.CenturySuffixRegex)\n', (12271, 12307), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((12364, 12389), 'recognizers_number.ItalianOrdinalExtractor', 'ItalianOrdinalExtractor', ([], {}), '()\n', (12387, 12389), False, 'from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor\n'), ((12425, 12451), 'recognizers_number.ItalianCardinalExtractor', 'ItalianCardinalExtractor', ([], {}), '()\n', (12449, 12451), False, 'from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor\n'), ((12490, 12557), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.PreviousPrefixRegex'], {}), '(ItalianDateTime.PreviousPrefixRegex)\n', (12520, 12557), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((12615, 12641), 'recognizers_number.ItalianCardinalExtractor', 'ItalianCardinalExtractor', ([], {}), '()\n', (12639, 12641), False, 'from recognizers_number import ItalianOrdinalExtractor, BaseNumberExtractor, ItalianCardinalExtractor\n'), ((7448, 7512), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.SimpleCasesRegex'], {}), '(ItalianDateTime.SimpleCasesRegex)\n', (7478, 7512), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7526, 7586), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.BetweenRegex'], {}), '(ItalianDateTime.BetweenRegex)\n', (7556, 7586), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7600, 7666), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.OneWordPeriodRegex'], {}), '(ItalianDateTime.OneWordPeriodRegex)\n', (7630, 7666), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7680, 7741), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthWithYear'], {}), '(ItalianDateTime.MonthWithYear)\n', (7710, 7741), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7755, 7819), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthNumWithYear'], {}), '(ItalianDateTime.MonthNumWithYear)\n', (7785, 7819), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7833, 7890), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.YearRegex'], {}), '(ItalianDateTime.YearRegex)\n', (7863, 7890), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7904, 7967), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.YearPeriodRegex'], {}), '(ItalianDateTime.YearPeriodRegex)\n', (7934, 7967), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((7981, 8044), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WeekOfYearRegex'], {}), '(ItalianDateTime.WeekOfYearRegex)\n', (8011, 8044), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8058, 8125), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WeekDayOfMonthRegex'], {}), '(ItalianDateTime.WeekDayOfMonthRegex)\n', (8088, 8125), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8139, 8209), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthFrontBetweenRegex'], {}), '(ItalianDateTime.MonthFrontBetweenRegex)\n', (8169, 8209), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8240, 8314), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.MonthFrontSimpleCasesRegex'], {}), '(ItalianDateTime.MonthFrontSimpleCasesRegex)\n', (8270, 8314), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8345, 8405), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.QuarterRegex'], {}), '(ItalianDateTime.QuarterRegex)\n', (8375, 8405), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8419, 8488), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.QuarterRegexYearFront'], {}), '(ItalianDateTime.QuarterRegexYearFront)\n', (8449, 8488), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8519, 8578), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.SeasonRegex'], {}), '(ItalianDateTime.SeasonRegex)\n', (8549, 8578), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8592, 8661), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.LaterEarlyPeriodRegex'], {}), '(ItalianDateTime.LaterEarlyPeriodRegex)\n', (8622, 8661), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8692, 8765), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.WeekWithWeekDayRangeRegex'], {}), '(ItalianDateTime.WeekWithWeekDayRangeRegex)\n', (8722, 8765), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8796, 8863), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.YearPlusNumberRegex'], {}), '(ItalianDateTime.YearPlusNumberRegex)\n', (8826, 8863), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8877, 8947), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.DecadeWithCenturyRegex'], {}), '(ItalianDateTime.DecadeWithCenturyRegex)\n', (8907, 8947), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((8961, 9028), 'recognizers_text.utilities.RegExpUtility.get_safe_reg_exp', 'RegExpUtility.get_safe_reg_exp', (['ItalianDateTime.RelativeDecadeRegex'], {}), '(ItalianDateTime.RelativeDecadeRegex)\n', (8991, 9028), False, 'from recognizers_text.utilities import RegExpUtility\n'), ((11071, 11105), 'recognizers_number.number.italian.parsers.ItalianNumberParserConfiguration', 'ItalianNumberParserConfiguration', ([], {}), '()\n', (11103, 11105), False, 'from recognizers_number.number.italian.parsers import ItalianNumberParserConfiguration\n')] |
def test_setupcall():
"""
Test the call of the setup function
"""
import jupyter_libertem_proxy as jx
print("\nRunning test_setupcall...")
print(jx.setup_libertem())
| [
"jupyter_libertem_proxy.setup_libertem"
]
| [((170, 189), 'jupyter_libertem_proxy.setup_libertem', 'jx.setup_libertem', ([], {}), '()\n', (187, 189), True, 'import jupyter_libertem_proxy as jx\n')] |
import math
import imageio
import cv2 as cv
import numpy as np
import transformer
def fix_rotation(img):
img_copy = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
rows, cols = img.shape
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)
img = cv.medianBlur(img, 3)
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
roi = max(contours, key=cv.contourArea)
x, y, w, h = cv.boundingRect(roi)
corners = [[x, y], [x + w, y], [x, y + h], [x + w, y + h]]
src = np.float32(corners)
# src = np.reshape(src, (len(src), 1, 2))
# perimeter = cv.arcLength(src, True)
# corners = cv.approxPolyDP(src, perimeter // 10, True)
# corners = np.vstack(corners)
dst = np.float32([[0, 0], [cols, 0], [0, rows], [cols, rows]])
matrix = cv.getPerspectiveTransform(src, dst)
rotated_img = cv.warpPerspective(img_copy, matrix, (cols, rows))
cv.imshow('', rotated_img)
D1 = 105
D2 = 175
D3 = 275
if __name__ == "__main__":
cap = cv.VideoCapture('samples/delta.mp4')
if not cap.isOpened():
raise IOError("Video was not opened!")
mse = 0
count = 0
reader = imageio.get_reader('samples/delta.mp4')
fps = reader.get_meta_data()['fps']
writer = imageio.get_writer('samples/result.mp4', fps=fps)
while True:
res, frame = cap.read()
if not res:
break
mean_error = 0
holes_count = 0
img = frame.copy()
cv.imshow('dfa', img)
frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
frame_copy = frame.copy()
# frame = cv.adaptiveThreshold(frame, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
# kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
# frame = cv.morphologyEx(frame, cv.MORPH_OPEN, kernel)
# frame = cv.medianBlur(frame, 3)
# contours, hierarchy = cv.findContours(frame, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
# roi = max(contours, key=cv.contourArea)
# x, y, w, h = cv.boundingRect(roi)
x, y, w, h = 115, 0, 445, 360
img = img[y: y+h, x: x+w]
img = transformer.rotate_along_axis(img, theta=40)
frame_copy = frame_copy[y: y+h, x: x+w]
frame_copy = transformer.rotate_along_axis(frame_copy, theta=40)
# cv.imshow('', frame_copy)
# cv.rectangle(frame_copy, (x, y), (x + w, y + h), (0, 255, 0), 2)
# cv.drawContours(frame_copy, roi, -1, (0, 0, 255), 2)
# res, mask = cv.threshold(frame_copy, 0, 255, cv.THRESH_BINARY)
# frame_copy = cv.bitwise_and(frame_copy, frame_copy, mask=mask)
# corners = cv.goodFeaturesToTrack(frame_copy, 1000, 0.0001, 1)
# corners = list(sorted(corners, key=lambda x: x[0][1]))
# print(corners[-1], corners[-2])
# print()
# corners = np.array([[38, 293], [407, 293]])
# for item in corners:
# # x, y = map(int, item.ravel())
# x, y = item
# cv.circle(img, (x, y), 5, (0, 0, 255), -1)
src = np.float32([[0, 0], [w, 0], [38, 293], [407, 293]])
dst = np.float32([[0, 0], [w, 0], [30, h], [w - 30, h]])
matrix = cv.getPerspectiveTransform(src, dst)
img = cv.warpPerspective(img, matrix, (w, h))
cv.imshow('', img)
img_copy = img.copy()
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
img = cv.adaptiveThreshold(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.THRESH_BINARY_INV, 15, 9)
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (3, 3))
img = cv.morphologyEx(img, cv.MORPH_OPEN, kernel)
img = cv.medianBlur(img, 3)
origin = (w // 2 + 4, h // 2 + 2)
o1, o2 = origin
r = w // 2 + 1
ORIGIN = (0, 0)
R = 300 # mm
contours, hierarchy = cv.findContours(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)
contours = list(filter(lambda x: 50 < cv.contourArea(x) < 175, contours))
factor = 0.1
smooth_contours = []
for i in range(len(contours)):
epsilon = factor * cv.arcLength(contours[i], True)
approx = cv.approxPolyDP(contours[i], epsilon, True)
x, y, width, height = cv.boundingRect(approx)
area = width*height
if len(approx) == 4 and 75 < area < 200:
smooth_contours.append(contours[i])
center, radius = cv.minEnclosingCircle(approx)
radius = int(radius)
center = tuple(map(int, center))
x, y = center
X = ((x - o1) * R) / r
Y = ((y - o2) * R) / r
X, Y = round(X, 2), round(Y, 2)
cv.circle(img_copy, center, radius, (0, 255, 0), 2)
cv.putText(img_copy, str((X, Y)), center, cv.FONT_HERSHEY_SIMPLEX, 0.3, (255, 0, 255, 255), 1, cv.LINE_AA)
e1, e2, e3 = map(lambda d: abs(math.hypot(X, Y) - d), [D1, D2, D3])
error = min(e1, e2, e3)
if error < 10:
mean_error += error ** 2
holes_count += 1
cv.circle(img_copy, origin, 4, (0, 0, 255), -1)
# cv.line(img_copy, origin, (origin[0], origin[1]), (255, 0, 255), 2)
mean_error /= holes_count
mse += mean_error
count += 1
cv.imshow("Final", img_copy)
writer.append_data(img_copy)
# cv.imshow("Chg", img)
if cv.waitKey(30) == 27:
break
print("E:", mse / count, "N:", count)
writer.close()
cap.release()
cv.destroyAllWindows() | [
"cv2.imshow",
"cv2.warpPerspective",
"cv2.destroyAllWindows",
"cv2.approxPolyDP",
"math.hypot",
"imageio.get_writer",
"imageio.get_reader",
"cv2.arcLength",
"cv2.medianBlur",
"cv2.contourArea",
"cv2.waitKey",
"cv2.getPerspectiveTransform",
"cv2.minEnclosingCircle",
"cv2.morphologyEx",
"cv2.circle",
"cv2.cvtColor",
"transformer.rotate_along_axis",
"cv2.adaptiveThreshold",
"cv2.VideoCapture",
"cv2.findContours",
"cv2.getStructuringElement",
"numpy.float32",
"cv2.boundingRect"
]
| [((142, 177), 'cv2.cvtColor', 'cv.cvtColor', (['img', 'cv.COLOR_BGR2GRAY'], {}), '(img, cv.COLOR_BGR2GRAY)\n', (153, 177), True, 'import cv2 as cv\n'), ((215, 306), 'cv2.adaptiveThreshold', 'cv.adaptiveThreshold', (['img', '(255)', 'cv.ADAPTIVE_THRESH_MEAN_C', 'cv.THRESH_BINARY_INV', '(15)', '(9)'], {}), '(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.\n THRESH_BINARY_INV, 15, 9)\n', (235, 306), True, 'import cv2 as cv\n'), ((316, 366), 'cv2.getStructuringElement', 'cv.getStructuringElement', (['cv.MORPH_ELLIPSE', '(3, 3)'], {}), '(cv.MORPH_ELLIPSE, (3, 3))\n', (340, 366), True, 'import cv2 as cv\n'), ((377, 420), 'cv2.morphologyEx', 'cv.morphologyEx', (['img', 'cv.MORPH_OPEN', 'kernel'], {}), '(img, cv.MORPH_OPEN, kernel)\n', (392, 420), True, 'import cv2 as cv\n'), ((432, 453), 'cv2.medianBlur', 'cv.medianBlur', (['img', '(3)'], {}), '(img, 3)\n', (445, 453), True, 'import cv2 as cv\n'), ((481, 537), 'cv2.findContours', 'cv.findContours', (['img', 'cv.RETR_LIST', 'cv.CHAIN_APPROX_NONE'], {}), '(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)\n', (496, 537), True, 'import cv2 as cv\n'), ((604, 624), 'cv2.boundingRect', 'cv.boundingRect', (['roi'], {}), '(roi)\n', (619, 624), True, 'import cv2 as cv\n'), ((698, 717), 'numpy.float32', 'np.float32', (['corners'], {}), '(corners)\n', (708, 717), True, 'import numpy as np\n'), ((912, 968), 'numpy.float32', 'np.float32', (['[[0, 0], [cols, 0], [0, rows], [cols, rows]]'], {}), '([[0, 0], [cols, 0], [0, rows], [cols, rows]])\n', (922, 968), True, 'import numpy as np\n'), ((983, 1019), 'cv2.getPerspectiveTransform', 'cv.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (1009, 1019), True, 'import cv2 as cv\n'), ((1038, 1088), 'cv2.warpPerspective', 'cv.warpPerspective', (['img_copy', 'matrix', '(cols, rows)'], {}), '(img_copy, matrix, (cols, rows))\n', (1056, 1088), True, 'import cv2 as cv\n'), ((1093, 1119), 'cv2.imshow', 'cv.imshow', (['""""""', 'rotated_img'], {}), "('', rotated_img)\n", (1102, 1119), True, 'import cv2 as cv\n'), ((1187, 1223), 'cv2.VideoCapture', 'cv.VideoCapture', (['"""samples/delta.mp4"""'], {}), "('samples/delta.mp4')\n", (1202, 1223), True, 'import cv2 as cv\n'), ((1339, 1378), 'imageio.get_reader', 'imageio.get_reader', (['"""samples/delta.mp4"""'], {}), "('samples/delta.mp4')\n", (1357, 1378), False, 'import imageio\n'), ((1432, 1481), 'imageio.get_writer', 'imageio.get_writer', (['"""samples/result.mp4"""'], {'fps': 'fps'}), "('samples/result.mp4', fps=fps)\n", (1450, 1481), False, 'import imageio\n'), ((5765, 5787), 'cv2.destroyAllWindows', 'cv.destroyAllWindows', ([], {}), '()\n', (5785, 5787), True, 'import cv2 as cv\n'), ((1653, 1674), 'cv2.imshow', 'cv.imshow', (['"""dfa"""', 'img'], {}), "('dfa', img)\n", (1662, 1674), True, 'import cv2 as cv\n'), ((1691, 1728), 'cv2.cvtColor', 'cv.cvtColor', (['frame', 'cv.COLOR_BGR2GRAY'], {}), '(frame, cv.COLOR_BGR2GRAY)\n', (1702, 1728), True, 'import cv2 as cv\n'), ((2320, 2364), 'transformer.rotate_along_axis', 'transformer.rotate_along_axis', (['img'], {'theta': '(40)'}), '(img, theta=40)\n', (2349, 2364), False, 'import transformer\n'), ((2434, 2485), 'transformer.rotate_along_axis', 'transformer.rotate_along_axis', (['frame_copy'], {'theta': '(40)'}), '(frame_copy, theta=40)\n', (2463, 2485), False, 'import transformer\n'), ((3234, 3285), 'numpy.float32', 'np.float32', (['[[0, 0], [w, 0], [38, 293], [407, 293]]'], {}), '([[0, 0], [w, 0], [38, 293], [407, 293]])\n', (3244, 3285), True, 'import numpy as np\n'), ((3300, 3350), 'numpy.float32', 'np.float32', (['[[0, 0], [w, 0], [30, h], [w - 30, h]]'], {}), '([[0, 0], [w, 0], [30, h], [w - 30, h]])\n', (3310, 3350), True, 'import numpy as np\n'), ((3368, 3404), 'cv2.getPerspectiveTransform', 'cv.getPerspectiveTransform', (['src', 'dst'], {}), '(src, dst)\n', (3394, 3404), True, 'import cv2 as cv\n'), ((3419, 3458), 'cv2.warpPerspective', 'cv.warpPerspective', (['img', 'matrix', '(w, h)'], {}), '(img, matrix, (w, h))\n', (3437, 3458), True, 'import cv2 as cv\n'), ((3467, 3485), 'cv2.imshow', 'cv.imshow', (['""""""', 'img'], {}), "('', img)\n", (3476, 3485), True, 'import cv2 as cv\n'), ((3531, 3566), 'cv2.cvtColor', 'cv.cvtColor', (['img', 'cv.COLOR_BGR2GRAY'], {}), '(img, cv.COLOR_BGR2GRAY)\n', (3542, 3566), True, 'import cv2 as cv\n'), ((3581, 3672), 'cv2.adaptiveThreshold', 'cv.adaptiveThreshold', (['img', '(255)', 'cv.ADAPTIVE_THRESH_MEAN_C', 'cv.THRESH_BINARY_INV', '(15)', '(9)'], {}), '(img, 255, cv.ADAPTIVE_THRESH_MEAN_C, cv.\n THRESH_BINARY_INV, 15, 9)\n', (3601, 3672), True, 'import cv2 as cv\n'), ((3686, 3736), 'cv2.getStructuringElement', 'cv.getStructuringElement', (['cv.MORPH_ELLIPSE', '(3, 3)'], {}), '(cv.MORPH_ELLIPSE, (3, 3))\n', (3710, 3736), True, 'import cv2 as cv\n'), ((3751, 3794), 'cv2.morphologyEx', 'cv.morphologyEx', (['img', 'cv.MORPH_OPEN', 'kernel'], {}), '(img, cv.MORPH_OPEN, kernel)\n', (3766, 3794), True, 'import cv2 as cv\n'), ((3809, 3830), 'cv2.medianBlur', 'cv.medianBlur', (['img', '(3)'], {}), '(img, 3)\n', (3822, 3830), True, 'import cv2 as cv\n'), ((3998, 4054), 'cv2.findContours', 'cv.findContours', (['img', 'cv.RETR_LIST', 'cv.CHAIN_APPROX_NONE'], {}), '(img, cv.RETR_LIST, cv.CHAIN_APPROX_NONE)\n', (4013, 4054), True, 'import cv2 as cv\n'), ((5318, 5365), 'cv2.circle', 'cv.circle', (['img_copy', 'origin', '(4)', '(0, 0, 255)', '(-1)'], {}), '(img_copy, origin, 4, (0, 0, 255), -1)\n', (5327, 5365), True, 'import cv2 as cv\n'), ((5532, 5560), 'cv2.imshow', 'cv.imshow', (['"""Final"""', 'img_copy'], {}), "('Final', img_copy)\n", (5541, 5560), True, 'import cv2 as cv\n'), ((4319, 4362), 'cv2.approxPolyDP', 'cv.approxPolyDP', (['contours[i]', 'epsilon', '(True)'], {}), '(contours[i], epsilon, True)\n', (4334, 4362), True, 'import cv2 as cv\n'), ((4410, 4433), 'cv2.boundingRect', 'cv.boundingRect', (['approx'], {}), '(approx)\n', (4425, 4433), True, 'import cv2 as cv\n'), ((5641, 5655), 'cv2.waitKey', 'cv.waitKey', (['(30)'], {}), '(30)\n', (5651, 5655), True, 'import cv2 as cv\n'), ((4266, 4297), 'cv2.arcLength', 'cv.arcLength', (['contours[i]', '(True)'], {}), '(contours[i], True)\n', (4278, 4297), True, 'import cv2 as cv\n'), ((4605, 4634), 'cv2.minEnclosingCircle', 'cv.minEnclosingCircle', (['approx'], {}), '(approx)\n', (4626, 4634), True, 'import cv2 as cv\n'), ((4896, 4947), 'cv2.circle', 'cv.circle', (['img_copy', 'center', 'radius', '(0, 255, 0)', '(2)'], {}), '(img_copy, center, radius, (0, 255, 0), 2)\n', (4905, 4947), True, 'import cv2 as cv\n'), ((4101, 4118), 'cv2.contourArea', 'cv.contourArea', (['x'], {}), '(x)\n', (4115, 4118), True, 'import cv2 as cv\n'), ((5119, 5135), 'math.hypot', 'math.hypot', (['X', 'Y'], {}), '(X, Y)\n', (5129, 5135), False, 'import math\n')] |
# Altere o Programa 8.20 de forma que o usuário tenha três chances de acertar o número
# O programa termina se o usuário acertar ou errar três vezes
# Programa 8.20 do livro, página 184
# Programa 8.20 - Adivinhando o número
#
# import random
#
# n = random.randint(1, 10)
# x = int(input('Escolha um número entre 1 e 10: '))
# if x == n:
# print('Você acertou!')
# else:
# print('Você errou.')
import random
numberRandom = random.randint(1, 10)
counter = 0
while True:
chosenNumber = int(input('\nEscolha um número entre 1 e 10: '))
counter += 1
if chosenNumber == numberRandom:
print(f'Parabéns! Você acertou na {counter}ª de 3 tentativas!')
break
else:
print(f'Você errou!')
if counter < 3:
print(f'Resta(m) {3 - counter} tentativa(s).')
else:
print('Suas tentativas acabaram! Mais sorte na próxima vez.')
print(f'O número sorteado foi {numberRandom}.')
break
| [
"random.randint"
]
| [((435, 456), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (449, 456), False, 'import random\n')] |
import setuptools
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setuptools.setup(
name="atm76",
version="0.1.0",
author="<NAME>",
author_email="<EMAIL>",
description="Differentiable 1976 Atmosphere",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/shb84/ATM76.git",
packages=setuptools.find_packages(),
package_data={},
install_requires=["numpy>=1.16", "genn"],
include_package_data=True,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.7',
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
]
| [((115, 137), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (127, 137), False, 'from os import path\n'), ((150, 178), 'os.path.join', 'path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (159, 178), False, 'from os import path\n'), ((542, 568), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (566, 568), False, 'import setuptools\n')] |
import logging
import asyncio
from agent.check_plugins import AbstractCheckPlugin
# Do khong biet dung thu vien asyncio ntn ca nen em dung thu vien request
# python
import requests
import sys
import time
from datetime import datetime
logger = logging.getLogger(__name__)
class Download(AbstractCheckPlugin):
@asyncio.coroutine
def __call__(self, client, dnode):
logger.info('Test download speed : running...')
start = time.clock()
r = requests.get('http://{}'.format(dnode), stream=True)
total_length = int(r.headers.get('content-length'))
if total_length is None:
logger.error("Empty file!")
else:
array_speed = []
start_chunk = time.clock()
for chunk in r.iter_content(1024): # 1kB1024 1MB 1048576
end_chunk = time.clock()
delta = end_chunk - start_chunk
start_chunk = end_chunk
if delta <= 0:
break
else:
array_speed.append(1//delta) # kB / s
end = time.clock()
yield from self._queue.put(self.get_result(dnode, start, end, total_length, array_speed))
@asyncio.coroutine
def get_result(self, url, start, end, total_length, array_speed):
"""Download and processing data.
Args:
url (str): url file download.
start (float): It's time which started download.
end (float): It's time which finished download.
total_length (int): size of file download (Byte)
array_speed (list): list download speeds for each 1024 Byte (kB/s)
Returns:
list with item 0 : json format for influxdb
"""
download_speed = total_length // (time.clock() - start)
accelerationS = self.acceleration(array_speed)
mean_deviationS = self.mean_deviation(array_speed, download_speed)
logger.info("Test download speed done!")
#TODO Bỏ time, để kiểm tra xem db có ghi đc dữ liệu hay chưa
return [self.output([self._snode, url, datetime.now(), download_speed, mean_deviationS, accelerationS])]
def acceleration(self, array_speed):
"""Caculate acceleration.
By get the highest speed in the first cycle.
Args:
array_speed (list): list download times for each 1024 Byte
Returns:
acceleration (kB/s) : the deviation between highest speed and first byte speed
"""
if len(array_speed) == 0:
return 0
speed_before = array_speed[0]
for speed in array_speed:
if speed < speed_before:
break
else:
speed_before = speed
return speed_before - array_speed[0]
def mean_deviation(self, array_speed, download_speed):
"""The mean deviation each downloads with download_speed.
Args:
array_speed (list): list download speeds for each kB.
download_speed (kB/s): mean download speed.
Returns:
mean_deviation (kB/s)
"""
if len(array_speed) == 0:
return 0
sum = 0
for speed in array_speed:
sum += abs(speed - download_speed)
return sum//len(array_speed)
def output(self, my_array):
"""Reformat my_array for inserting into influxdb.
Args:
my_array (list): [self._snode, url, str(datetime.now()), download_speed, mean_deviationS, accelerationS]
Returns:
json format for influxdb
"""
return {
"measurement": "download_speed",
"tags": {
"snode": "{}".format(my_array[0]),
"dnode": "{}".format(my_array[1])
},
# "time": "{}".format(my_array[2]),
"fields": {
"speed": my_array[3],
"mean_deviation": my_array[4],
"acceleration": my_array[5]
}
}
| [
"logging.getLogger",
"datetime.datetime.now",
"time.clock"
]
| [((245, 272), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (262, 272), False, 'import logging\n'), ((448, 460), 'time.clock', 'time.clock', ([], {}), '()\n', (458, 460), False, 'import time\n'), ((728, 740), 'time.clock', 'time.clock', ([], {}), '()\n', (738, 740), False, 'import time\n'), ((1097, 1109), 'time.clock', 'time.clock', ([], {}), '()\n', (1107, 1109), False, 'import time\n'), ((839, 851), 'time.clock', 'time.clock', ([], {}), '()\n', (849, 851), False, 'import time\n'), ((1795, 1807), 'time.clock', 'time.clock', ([], {}), '()\n', (1805, 1807), False, 'import time\n'), ((2112, 2126), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2124, 2126), False, 'from datetime import datetime\n')] |
import weakref
import os
import requests
import ssl
from ssl import SSLContext
import logging
from ssl_context_builder.builder.builder import SslContextBuilder
from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter
class RequestsSecureSession:
def __init__(self, ssl_context: SSLContext):
"""
This class create a wrapper for the requests.Session object
It does the following:
1. Disable session env_vars consuming
2. Load certificates provided with the ssl_context
3. Except ssl_context to control the TLS communication
@param ssl_context: SSLContext
"""
self.cert_file_path = self._create_cert_file(ssl_context) # see note inside the function why not using tempfile
self._ssl_context = ssl_context
self.session = requests.Session()
self.session.trust_env = False
self.session.verify = self.cert_file_path
self.session.mount('https://', SslAdapter(ssl_context))
self._finalizer = weakref.finalize(
self, self._cleanup, self.cert_file_path, self.session,
warn_message="Implicitly cleaning up {!r}".format(self))
def __enter__(self):
return self
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self): # Non throw function
"""
Delete the cert file and close the session
@return:
"""
if self._finalizer.detach():
try:
os.remove(self.cert_file_path)
except:
logging.warning(f"Couldn't delete certs file {self.cert_file_path}")
try:
self.session.close()
except:
logging.warning("Couldn't close session")
@staticmethod
def _cleanup(name, session, warn_message):
try:
os.remove(name)
except:
logging.warning(f"Couldn't delete certs file {name}")
try:
session.close()
except:
logging.warning("Couldn't close session")
logging.warning(warn_message)
@classmethod
def _create_cert_file(cls, ssl_context: SSLContext):
"""
This create a CA bundle file extracted from the ssl_context
The reason we are creating a real file and deleting it is that this file is being opened later on
in the requests flow. This means we have to close the file before it is being used
tempfile is being destroyed when closed.
@param ssl_context: ssl_context
@return: path to the created ca_bundle file
"""
path = "certs.pem"
if os.path.exists(path):
path = cls._generate_cert_file_path("certs")
with open(path, mode="a+") as certs_file:
certs = ""
for der in ssl_context.get_ca_certs(True):
certs += f"{ssl.DER_cert_to_PEM_cert(der)}\n"
certs_file.write(certs)
return path
@classmethod
def _generate_cert_file_path(cls, file_name: str, num=1):
file_name_candidate = f"{file_name}({num}).pem"
if os.path.exists(file_name_candidate):
return cls._generate_cert_file_path(file_name, num + 1)
return file_name_candidate
| [
"os.path.exists",
"requests.Session",
"logging.warning",
"ssl.DER_cert_to_PEM_cert",
"ssl_context_builder.http_impl.requests_wrapper.ssl_adapter.SslAdapter",
"os.remove"
]
| [((838, 856), 'requests.Session', 'requests.Session', ([], {}), '()\n', (854, 856), False, 'import requests\n'), ((2087, 2116), 'logging.warning', 'logging.warning', (['warn_message'], {}), '(warn_message)\n', (2102, 2116), False, 'import logging\n'), ((2660, 2680), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2674, 2680), False, 'import os\n'), ((3132, 3167), 'os.path.exists', 'os.path.exists', (['file_name_candidate'], {}), '(file_name_candidate)\n', (3146, 3167), False, 'import os\n'), ((985, 1008), 'ssl_context_builder.http_impl.requests_wrapper.ssl_adapter.SslAdapter', 'SslAdapter', (['ssl_context'], {}), '(ssl_context)\n', (995, 1008), False, 'from ssl_context_builder.http_impl.requests_wrapper.ssl_adapter import SslAdapter\n'), ((1870, 1885), 'os.remove', 'os.remove', (['name'], {}), '(name)\n', (1879, 1885), False, 'import os\n'), ((1510, 1540), 'os.remove', 'os.remove', (['self.cert_file_path'], {}), '(self.cert_file_path)\n', (1519, 1540), False, 'import os\n'), ((1914, 1967), 'logging.warning', 'logging.warning', (['f"""Couldn\'t delete certs file {name}"""'], {}), '(f"Couldn\'t delete certs file {name}")\n', (1929, 1967), False, 'import logging\n'), ((2037, 2078), 'logging.warning', 'logging.warning', (['"""Couldn\'t close session"""'], {}), '("Couldn\'t close session")\n', (2052, 2078), False, 'import logging\n'), ((1577, 1645), 'logging.warning', 'logging.warning', (['f"""Couldn\'t delete certs file {self.cert_file_path}"""'], {}), '(f"Couldn\'t delete certs file {self.cert_file_path}")\n', (1592, 1645), False, 'import logging\n'), ((1737, 1778), 'logging.warning', 'logging.warning', (['"""Couldn\'t close session"""'], {}), '("Couldn\'t close session")\n', (1752, 1778), False, 'import logging\n'), ((2895, 2924), 'ssl.DER_cert_to_PEM_cert', 'ssl.DER_cert_to_PEM_cert', (['der'], {}), '(der)\n', (2919, 2924), False, 'import ssl\n')] |
# Ghetto Fixtures
from codebox import app
from codebox.apps.auth.models import User
from codebox.apps.snippets.models import Snippet
from codebox.apps.organizations.models import Organization, OrganizationMember
from flask import g
client = app.test_client()
_ctx = app.test_request_context()
_ctx.push()
app.preprocess_request()
g.redis.flushdb()
User.objects.create(pk=1, name='zeeg')
Organization.objects.create(pk='disqus', name='DISQUS')
OrganizationMember.objects.create(org='disqus', user=1)
# Create sample snippets
# plaintext
Snippet.objects.create(org='disqus', user=1, lang='text', text = "Hello World!")
# python
Snippet.objects.create(org='disqus', user=1, lang='python', text = "print 'Disqus was here'")
# html
Snippet.objects.create(org='disqus', user=1, lang='html', text = '<h1>Look its HTML!</h1>')
# javascript
Snippet.objects.create(org='disqus', user=1, lang='javascript', text = "document.write('Di-squs')")
| [
"codebox.app.test_client",
"codebox.apps.snippets.models.Snippet.objects.create",
"codebox.apps.organizations.models.Organization.objects.create",
"codebox.app.preprocess_request",
"codebox.apps.organizations.models.OrganizationMember.objects.create",
"flask.g.redis.flushdb",
"codebox.app.test_request_context",
"codebox.apps.auth.models.User.objects.create"
]
| [((243, 260), 'codebox.app.test_client', 'app.test_client', ([], {}), '()\n', (258, 260), False, 'from codebox import app\n'), ((268, 294), 'codebox.app.test_request_context', 'app.test_request_context', ([], {}), '()\n', (292, 294), False, 'from codebox import app\n'), ((307, 331), 'codebox.app.preprocess_request', 'app.preprocess_request', ([], {}), '()\n', (329, 331), False, 'from codebox import app\n'), ((332, 349), 'flask.g.redis.flushdb', 'g.redis.flushdb', ([], {}), '()\n', (347, 349), False, 'from flask import g\n'), ((351, 389), 'codebox.apps.auth.models.User.objects.create', 'User.objects.create', ([], {'pk': '(1)', 'name': '"""zeeg"""'}), "(pk=1, name='zeeg')\n", (370, 389), False, 'from codebox.apps.auth.models import User\n'), ((391, 446), 'codebox.apps.organizations.models.Organization.objects.create', 'Organization.objects.create', ([], {'pk': '"""disqus"""', 'name': '"""DISQUS"""'}), "(pk='disqus', name='DISQUS')\n", (418, 446), False, 'from codebox.apps.organizations.models import Organization, OrganizationMember\n'), ((448, 503), 'codebox.apps.organizations.models.OrganizationMember.objects.create', 'OrganizationMember.objects.create', ([], {'org': '"""disqus"""', 'user': '(1)'}), "(org='disqus', user=1)\n", (481, 503), False, 'from codebox.apps.organizations.models import Organization, OrganizationMember\n'), ((543, 621), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', ([], {'org': '"""disqus"""', 'user': '(1)', 'lang': '"""text"""', 'text': '"""Hello World!"""'}), "(org='disqus', user=1, lang='text', text='Hello World!')\n", (565, 621), False, 'from codebox.apps.snippets.models import Snippet\n'), ((633, 729), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', ([], {'org': '"""disqus"""', 'user': '(1)', 'lang': '"""python"""', 'text': '"""print \'Disqus was here\'"""'}), '(org=\'disqus\', user=1, lang=\'python\', text=\n "print \'Disqus was here\'")\n', (655, 729), False, 'from codebox.apps.snippets.models import Snippet\n'), ((734, 828), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', ([], {'org': '"""disqus"""', 'user': '(1)', 'lang': '"""html"""', 'text': '"""<h1>Look its HTML!</h1>"""'}), "(org='disqus', user=1, lang='html', text=\n '<h1>Look its HTML!</h1>')\n", (756, 828), False, 'from codebox.apps.snippets.models import Snippet\n'), ((839, 941), 'codebox.apps.snippets.models.Snippet.objects.create', 'Snippet.objects.create', ([], {'org': '"""disqus"""', 'user': '(1)', 'lang': '"""javascript"""', 'text': '"""document.write(\'Di-squs\')"""'}), '(org=\'disqus\', user=1, lang=\'javascript\', text=\n "document.write(\'Di-squs\')")\n', (861, 941), False, 'from codebox.apps.snippets.models import Snippet\n')] |
import function_exercise_01 as st
st.sandwich_toppings('meatballs', 'salad')
| [
"function_exercise_01.sandwich_toppings"
]
| [((35, 77), 'function_exercise_01.sandwich_toppings', 'st.sandwich_toppings', (['"""meatballs"""', '"""salad"""'], {}), "('meatballs', 'salad')\n", (55, 77), True, 'import function_exercise_01 as st\n')] |
import json
import time
from functools import lru_cache
from multiprocessing import Pool, Process
from threading import Thread, Timer
from typing import Any, Dict, List
from datetime import datetime
import hashlib
import inspect
import requests
import waitress
from bottle import BaseTemplate, Bottle, request, response, static_file, template, error
import utils.constants as consts
from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block
from authority import Authority
from utils.logger import logger, iplogger
from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db
from utils.utils import compress, decompress, dhash
from wallet import Wallet
app = Bottle()
BaseTemplate.defaults["get_url"] = app.get_url
LINE_PROFILING = False
BLOCKCHAIN = BlockChain()
PEER_LIST: List[Dict[str, Any]] = []
MY_WALLET = Wallet()
miner = Authority()
def mining_thread_task():
while True:
if not miner.is_mining() and not consts.NO_MINING:
miner.start_mining(BLOCKCHAIN.mempool, BLOCKCHAIN.active_chain, MY_WALLET)
time.sleep(consts.MINING_INTERVAL_THRESHOLD // 2)
def send_to_all_peers(url, data):
def request_task(peers, url, data):
for peer in peers:
try:
requests.post(get_peer_url(peer) + url, data=data, timeout=(5, 1))
except Exception as e:
logger.debug("Server: Requests: Error while sending data in process" + str(peer))
Process(target=request_task, args=(PEER_LIST, url, data), daemon=True).start()
def start_mining_thread():
time.sleep(5)
Thread(target=mining_thread_task, name="Miner", daemon=True).start()
def fetch_peer_list() -> List[Dict[str, Any]]:
try:
r = requests.post(consts.SEED_SERVER_URL, data={"port": consts.MINER_SERVER_PORT})
peer_list = json.loads(r.text)
return peer_list
except Exception as e:
logger.error("Could not connect to DNS Seed")
return []
def get_peer_url(peer: Dict[str, Any]) -> str:
return "http://" + str(peer["ip"]) + ":" + str(peer["port"])
def greet_peer(peer: Dict[str, Any]) -> bool:
try:
url = get_peer_url(peer)
data = {"port": consts.MINER_SERVER_PORT, "version": consts.MINER_VERSION, "blockheight": BLOCKCHAIN.active_chain.length}
# Send a POST request to the peer
r = requests.post(url + "/greetpeer", data=data)
data = json.loads(r.text)
# Update the peer data in the peer list with the new data received from the peer.
if data.get("blockheight", None):
peer.update(data)
else:
logger.debug("Main: Peer data does not have Block Height")
return False
return True
except Exception as e:
logger.debug("Main: Could not greet peer" + str(e))
return False
def receive_block_from_peer(peer: Dict[str, Any], header_hash) -> Block:
r = requests.post(get_peer_url(peer) + "/getblock", data={"headerhash": header_hash})
return Block.from_json(decompress(r.text)).object()
def check_block_with_peer(peer, hhash):
r = requests.post(get_peer_url(peer) + "/checkblock", data={"headerhash": hhash})
result = json.loads(r.text)
if result:
return True
return False
def get_block_header_hash(height):
return dhash(BLOCKCHAIN.active_chain.header_list[height])
def sync(max_peer):
fork_height = BLOCKCHAIN.active_chain.length
r = requests.post(get_peer_url(max_peer) + "/getblockhashes", data={"myheight": fork_height})
hash_list = json.loads(decompress(r.text.encode()))
for hhash in hash_list:
block = receive_block_from_peer(max_peer, hhash)
if not BLOCKCHAIN.add_block(block):
logger.error("Sync: Block received is invalid, Cannot Sync")
break
return
# Periodically sync with all the peers
def sync_with_peers():
try:
PEER_LIST = fetch_peer_list()
new_peer_list = []
for peer in PEER_LIST:
if greet_peer(peer):
new_peer_list.append(peer)
PEER_LIST = new_peer_list
if PEER_LIST:
max_peer = max(PEER_LIST, key=lambda k: k["blockheight"])
logger.debug(f"Sync: Syncing with {get_peer_url(max_peer)}, he seems to have height {max_peer['blockheight']}")
sync(max_peer)
except Exception as e:
logger.error("Sync: Error: " + str(e))
Timer(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers).start()
def check_balance(pub_key: str) -> int:
current_balance = 0
for x, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():
tx_out = utxo_list[0]
if tx_out.address == pub_key:
current_balance += int(tx_out.amount)
return int(current_balance)
def send_bounty(receiver_public_keys: List[str], amounts: List[int]):
current_balance = check_balance(MY_WALLET.public_key)
for key in receiver_public_keys:
if len(key) < consts.PUBLIC_KEY_LENGTH:
logger.debug("Invalid Public Key Length")
return False
total_amount = sum(amounts)
if current_balance < total_amount:
logger.debug("Insuficient balance")
elif MY_WALLET.public_key in receiver_public_keys:
logger.debug("Cannot send to myself")
else:
transaction = create_transaction(receiver_public_keys, amounts, MY_WALLET.public_key, message="Authority: Faucet Money")
transaction.sign(MY_WALLET)
logger.info("Wallet: Attempting to Send Transaction")
try:
r = requests.post(
"http://0.0.0.0:" + str(consts.MINER_SERVER_PORT) + "/newtransaction",
data=compress(transaction.to_json()),
timeout=(5, 1),
)
if r.status_code == 400:
logger.info("Wallet: Could not Send Transaction. Invalid Transaction")
else:
logger.info("Wallet: Transaction Sent, Wait for it to be Mined")
return True
except Exception as e:
logger.error("Wallet: Could not Send Transaction. Try Again." + str(e))
return False
def create_transaction(receiver_public_keys: List[str], amounts: List[int], sender_public_key, message="") -> Transaction:
vout = {}
vin = {}
current_amount = 0
total_amount = sum(amounts)
i = 0
for so, utxo_list in BLOCKCHAIN.active_chain.utxo.utxo.items():
tx_out = utxo_list[0]
if current_amount >= total_amount:
break
if tx_out.address == sender_public_key:
current_amount += tx_out.amount
vin[i] = TxIn(payout=SingleOutput.from_json(so), pub_key=sender_public_key, sig="")
i += 1
for i, address in enumerate(receiver_public_keys):
vout[i] = TxOut(amount=amounts[i], address=address)
change = (current_amount - total_amount)
if change > 0:
vout[i + 1] = TxOut(amount=change, address=sender_public_key)
tx = Transaction(version=consts.MINER_VERSION, locktime=0, timestamp=int(time.time()), vin=vin, vout=vout, message=message)
return tx
def get_ip(request):
return request.environ.get("HTTP_X_FORWARDED_FOR") or request.environ.get("REMOTE_ADDR")
def log_ip(request, fname):
client_ip = get_ip(request)
iplogger.info(f"{client_ip} : Called function {fname}")
@app.post("/checkBalance")
def checkingbalance():
log_ip(request, inspect.stack()[0][3])
data = request.json
public_key = data["public_key"]
logger.debug(public_key)
current_balance = check_balance(public_key)
return str(current_balance)
@app.post("/makeTransaction")
def make_transaction():
log_ip(request, inspect.stack()[0][3])
data = request.json
bounty = int(data["bounty"])
receiver_public_key = data["receiver_public_key"]
sender_public_key = data["sender_public_key"]
message = "No Message"
if "message" in data:
message = data["message"]
if len(receiver_public_key) < consts.PUBLIC_KEY_LENGTH:
logger.debug("Invalid Receiver Public Key")
response.status = 400
return "Invalid Receiver Public Key"
current_balance = check_balance(sender_public_key)
if current_balance < bounty:
logger.debug("Insufficient Balance to make Transaction")
response.status = 400
return "Insufficient Balance to make Transaction, need more " + str(bounty - current_balance)
elif sender_public_key == receiver_public_key:
logger.debug("Someone trying to send money to himself")
response.status = 400
return "Cannot send money to youself"
else:
transaction = create_transaction([receiver_public_key], [bounty], sender_public_key, message=message)
data = {}
data["send_this"] = transaction.to_json()
transaction.vin = {}
data["sign_this"] = transaction.to_json()
return json.dumps(data)
@app.post("/sendTransaction")
def send_transaction():
log_ip(request, inspect.stack()[0][3])
data = request.json
transaction = Transaction.from_json(data["transaction"]).object()
sig = data["signature"]
transaction.add_sign(sig)
logger.debug(transaction)
logger.info("Wallet: Attempting to Send Transaction")
try:
r = requests.post(
"http://0.0.0.0:" + str(consts.MINER_SERVER_PORT) + "/newtransaction",
data=compress(transaction.to_json()),
timeout=(5, 1),
)
if r.status_code == 400:
response.status = 400
logger.error("Wallet: Could not Send Transaction. Invalid transaction")
return "Try Again"
except Exception as e:
response.status = 400
logger.error("Wallet: Could not Send Transaction. Try Again." + str(e))
return "Try Again"
else:
logger.info("Wallet: Transaction Sent, Wait for it to be Mined")
return "Done"
@app.post("/transactionHistory")
def transaction_history():
log_ip(request, inspect.stack()[0][3])
data = request.json
public_key = data["public_key"]
tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(public_key)
return json.dumps(tx_hist)
@app.post("/greetpeer")
def greet_peer_f():
log_ip(request, inspect.stack()[0][3])
try:
peer = {}
peer["port"] = request.forms.get("port")
peer["ip"] = request.remote_addr
peer["time"] = time.time()
peer["version"] = request.forms.get("version")
peer["blockheight"] = request.forms.get("blockheight")
ADD_ENTRY = True
for entry in PEER_LIST:
ip = entry["ip"]
port = entry["port"]
if ip == peer["ip"] and port == peer["port"]:
ADD_ENTRY = False
if ADD_ENTRY:
PEER_LIST.append(peer)
logger.debug("Server: Greet, A new peer joined, Adding to List")
except Exception as e:
logger.debug("Server: Greet Error: " + str(e))
pass
data = {"version": consts.MINER_VERSION, "blockheight": BLOCKCHAIN.active_chain.length}
response.content_type = "application/json"
return json.dumps(data)
@lru_cache(maxsize=128)
def cached_get_block(headerhash: str) -> str:
if headerhash:
db_block = get_block_from_db(headerhash)
if db_block:
return compress(db_block)
else:
logger.error("ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK")
return "Invalid Hash"
@app.post("/getblock")
def getblock():
log_ip(request, inspect.stack()[0][3])
hhash = request.forms.get("headerhash")
return cached_get_block(hhash)
@app.post("/checkblock")
def checkblock():
log_ip(request, inspect.stack()[0][3])
headerhash = request.forms.get("headerhash")
if get_block_from_db(headerhash):
return json.dumps(True)
return json.dumps(False)
@app.post("/getblockhashes")
def send_block_hashes():
log_ip(request, inspect.stack()[0][3])
peer_height = int(request.forms.get("myheight"))
hash_list = []
for i in range(peer_height, BLOCKCHAIN.active_chain.length):
hash_list.append(dhash(BLOCKCHAIN.active_chain.header_list[i]))
return compress(json.dumps(hash_list)).decode()
@lru_cache(maxsize=16)
def process_new_block(request_data: bytes) -> str:
global BLOCKCHAIN
block_json = decompress(request_data)
if block_json:
try:
block = Block.from_json(block_json).object()
# Check if block already exists
if get_block_from_db(dhash(block.header)):
logger.info("Server: Received block exists, doing nothing")
return "Block already Received Before"
if BLOCKCHAIN.add_block(block):
logger.info("Server: Received a New Valid Block, Adding to Chain")
logger.debug("Server: Sending new block to peers")
# Broadcast block to other peers
send_to_all_peers("/newblock", request_data)
# TODO Make new chain/ orphan set for Block that is not added
except Exception as e:
logger.error("Server: New Block: invalid block received " + str(e))
return "Invalid Block Received"
# Kill Miner
t = Timer(1, miner.stop_mining)
t.start()
return "Block Received"
logger.error("Server: Invalid Block Received")
return "Invalid Block"
@app.post("/newblock")
def received_new_block():
log_ip(request, inspect.stack()[0][3])
return process_new_block(request.body.read())
@lru_cache(maxsize=16)
def process_new_transaction(request_data: bytes) -> str:
global BLOCKCHAIN
transaction_json = decompress(request_data)
if transaction_json:
try:
tx = Transaction.from_json(transaction_json).object()
# Add transaction to Mempool
if tx not in BLOCKCHAIN.mempool:
if BLOCKCHAIN.active_chain.is_transaction_valid(tx):
logger.debug("Valid Transaction received, Adding to Mempool")
BLOCKCHAIN.mempool.add(tx)
# Broadcast block to other peers
send_to_all_peers("/newtransaction", request_data)
else:
logger.debug("The transation is not valid, not added to Mempool")
return False, "Not Valid Transaction"
else:
return True, "Transaction Already received"
except Exception as e:
logger.error("Server: New Transaction: Invalid tx received: " + str(e))
return False, "Not Valid Transaction"
return True, "Done"
# Transactions for all active chains
@app.post("/newtransaction")
def received_new_transaction():
log_ip(request, inspect.stack()[0][3])
result, message = process_new_transaction(request.body.read())
if result:
response.status = 200
else:
response.status = 400
return message
question = '''What is greater than God,
more evil than the devil,
the poor have it,
the rich need it,
and if you eat it, you'll die?'''
actual_answer = "nothing"
@app.get("/")
def home():
log_ip(request, inspect.stack()[0][3])
message = ""
message_type = "info"
return template("index.html", message=message, message_type=message_type, question=question)
with open('uuids.json', 'r') as file:
uuid_json = file.read()
valid_ids = set(json.loads(uuid_json))
@app.post("/")
def puzzle():
log_ip(request, inspect.stack()[0][3])
message = ""
message_type = "info"
uuid = request.forms.get("uuid")
pubkey = request.forms.get("pubkey")
amounts = [300]
if uuid in valid_ids:
logger.debug("Valid Answer, Rewarding " + pubkey)
message = "Well Done!"
if check_balance(MY_WALLET.public_key) >= sum(amounts):
result = send_bounty([pubkey], amounts)
if result:
message = "Your reward is being sent, please wait for it to be mined!"
valid_ids.remove(uuid)
else:
message = "Some Error Occured, Contact Admin."
message_type = "warning"
else:
message = "Invalid Unique ID!"
message_type = "danger"
return template("index.html", message=message, message_type=message_type, question=question)
@app.get('/about')
def about():
return template("about.html")
# @app.get("/wallet")
# def wallet():
# log_ip(request, inspect.stack()[0][3])
# return template("wallet.html", message="", message_type="", pubkey=MY_WALLET.public_key)
# @app.post("/wallet")
# def wallet_post():
# log_ip(request, inspect.stack()[0][3])
# number = int(request.forms.get("number"))
# message = ""
# message_type = "info"
# try:
# receivers = []
# amounts = []
# total_amount = 0
# for i in range(0, number):
# receiver = str(request.forms.get("port" + str(i)))
# bounty = int(request.forms.get("amount" + str(i)))
# publickey = ""
# if len(receiver) < 10:
# wallet = get_wallet_from_db(receiver)
# if wallet is not None:
# publickey = wallet[1]
# else:
# message = "Error with the Receiver Port ID, try again."
# message_type = "danger"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
# else:
# publickey = receiver
# total_amount += bounty
# receivers.append(publickey)
# amounts.append(bounty)
# if check_balance(MY_WALLET.public_key) >= total_amount:
# result = send_bounty(receivers, amounts)
# if result:
# message = "Your transaction is sent, please wait for it to be mined!"
# else:
# message = "Some Error Occured, Contact Admin."
# message_type = "warning"
# else:
# message = "You have Insufficient Balance!"
# message_type = "warning"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
# except Exception as e:
# logger.error(e)
# message = "Some Error Occured. Please try again later."
# message_type = "danger"
# return template("wallet.html", message=message, message_type=message_type, pubkey=MY_WALLET.public_key)
@app.get("/checkmybalance")
def checkblance():
log_ip(request, inspect.stack()[0][3])
return str(check_balance(MY_WALLET.public_key))
@app.route("/static/<filename:path>", name="static")
def serve_static(filename):
log_ip(request, inspect.stack()[0][3])
return static_file(filename, root="static")
@app.get("/favicon.ico")
def get_favicon():
log_ip(request, inspect.stack()[0][3])
return static_file("favicon.ico", root="static")
@app.get("/info")
def sendinfo():
log_ip(request, inspect.stack()[0][3])
s = (
"No. of Blocks: "
+ str(BLOCKCHAIN.active_chain.length)
+ "<br>"
+ dhash(BLOCKCHAIN.active_chain.header_list[-1])
+ "<br>"
+ "Balance "
+ str(check_balance(MY_WALLET.public_key))
+ "<br>Public Key: <br>"
+ str(get_wallet_from_db(consts.MINER_SERVER_PORT)[1])
)
return s
def render_block_header(hdr):
html = "<table>"
html += "<tr><th>" + "Height" + "</th>"
html += "<td>" + str(hdr.height) + "</td></tr>"
html += "<tr><th>" + "Block Hash" + "</th>"
html += "<td>" + dhash(hdr) + "</td></tr>"
html += "<tr><th>" + "Prev Block Hash" + "</th>"
html += "<td>" + str(hdr.prev_block_hash) + "</td></tr>"
html += "<tr><th>" + "Merkle Root" + "</th>"
html += "<td>" + str(hdr.merkle_root) + "</td></tr>"
html += "<tr><th>" + "Timestamp" + "</th>"
html += (
"<td>"
+ str(datetime.fromtimestamp(hdr.timestamp).strftime("%d-%m-%Y %H:%M:%S"))
+ " ("
+ str(hdr.timestamp)
+ ")</td></tr>"
)
# get block
block = Block.from_json(get_block_from_db(dhash(hdr))).object()
html += "<tr><th>" + "Transactions" + "</th>"
html += "<td>" + str(len(block.transactions)) + "</td></tr>"
# for i, transaction in enumerate(block.transactions):
# s = "coinbase: " + str(transaction.is_coinbase) + ", fees: " + str(transaction.fees)
# html += "<tr><th>Transaction " + str(i) + "</th><td>" + str(s) + "</td></tr>"
html += "</table>"
return str(html)
@app.get("/chains")
def visualize_chain():
log_ip(request, inspect.stack()[0][3])
data = []
start = BLOCKCHAIN.active_chain.length - 10 if BLOCKCHAIN.active_chain.length > 10 else 0
headers = []
hdr_list = BLOCKCHAIN.active_chain.header_list
if len(hdr_list) > 200:
hdr_list = BLOCKCHAIN.active_chain.header_list[:100] + BLOCKCHAIN.active_chain.header_list[-100:]
for hdr in hdr_list:
d = {}
d["hash"] = dhash(hdr)[-5:]
d["time"] = hdr.timestamp
d["data"] = render_block_header(hdr)
headers.append(d)
data.append(headers)
return template("chains.html", data=data, start=start)
@app.get("/explorer")
def explorer():
log_ip(request, inspect.stack()[0][3])
prev = int(request.query.prev or 0)
if prev < 0:
prev = 0
hdr_list = list(reversed(BLOCKCHAIN.active_chain.header_list))
indexes = [i for i in range(prev * 8, (prev + 1) * 8) if i < len(hdr_list)]
blocks = [Block.from_json(get_block_from_db(dhash(hdr_list[i]))).object() for i in indexes]
transactions = list(BLOCKCHAIN.mempool)
return template("explorer.html", blocks=blocks, transactions=transactions, prev=prev)
@app.route("/block/<blockhash>", name="transaction")
def block(blockhash):
log_ip(request, inspect.stack()[0][3])
try:
block = Block.from_json(get_block_from_db(blockhash)).object()
except Exception as e:
logger.debug("BLOCK/blockhash: " + str(e))
return template("error.html")
return template("block.html", block=block)
@app.route("/transaction/<blockhash>/<txhash>", name="transaction")
def transaction(blockhash, txhash):
log_ip(request, inspect.stack()[0][3])
try:
block = Block.from_json(get_block_from_db(blockhash)).object()
tx = None
for t in block.transactions:
if t.hash() == txhash:
tx = t
except Exception as e:
logger.debug("Transaction/bhash/tx: " + str(e))
return template("error.html")
return template("transaction.html", tx=tx, block=block)
@app.route("/address/<pubkey:re:.+>", name="account")
def account(pubkey):
log_ip(request, inspect.stack()[0][3])
balance = check_balance(pubkey)
tx_hist = BLOCKCHAIN.active_chain.transaction_history.get(pubkey)
return template("account.html", tx_hist=tx_hist, balance=balance, pubkey=pubkey)
@app.post("/mining")
def mining():
log_ip(request, inspect.stack()[0][3])
password = request.body.read().decode("utf-8")
hashed = b"\x11`\x1e\xdd\xd1\xb6\x80\x0f\xd4\xb0t\x90\x9b\xd3]\xa0\xcc\x1d\x04$\x8b\xb1\x19J\xaa!T5-\x9eJ\xfcI5\xc0\xbb\xf5\xb1\x9d\xba\xbef@\xa1)\xcf\x9b]c(R\x91\x0e\x9dMM\xb6\x94\xa9\xe2\x94il\x15"
dk = hashlib.pbkdf2_hmac("sha512", password.encode("utf-8"), b"<PASSWORD>", 200000)
if hashed == dk:
consts.NO_MINING = not consts.NO_MINING
logger.info("Mining: " + str(not consts.NO_MINING))
return "Mining Toggled, " + "NOT MINING" if consts.NO_MINING else "MINING"
else:
return "Password Mismatch," + "NOT MINING" if consts.NO_MINING else "MINING"
@app.route("/<url:re:.+>")
@error(403)
@error(404)
@error(505)
def error_handle(url="url", error="404"):
log_ip(request, inspect.stack()[0][3])
return template("error.html")
if __name__ == "__main__":
try:
if consts.NEW_BLOCKCHAIN:
logger.info("FullNode: Starting New Chain from Genesis")
BLOCKCHAIN.add_block(genesis_block)
else:
# Restore Blockchain
logger.info("FullNode: Restoring Existing Chain")
header_list = read_header_list_from_db()
BLOCKCHAIN.build_from_header_list(header_list)
# Sync with all my peers
sync_with_peers()
# Start mining Thread
Thread(target=start_mining_thread, daemon=True).start()
if consts.NO_MINING:
logger.info("FullNode: Not Mining")
# Start server
if LINE_PROFILING:
from wsgi_lineprof.middleware import LineProfilerMiddleware
with open("lineprof" + str(consts.MINER_SERVER_PORT) + ".log", "w") as f:
app = LineProfilerMiddleware(app, stream=f, async_stream=True)
waitress.serve(app, host="0.0.0.0", threads=16, port=consts.MINER_SERVER_PORT)
else:
waitress.serve(app, host="0.0.0.0", threads=16, port=consts.MINER_SERVER_PORT)
except KeyboardInterrupt:
miner.stop_mining()
| [
"requests.post",
"utils.utils.decompress",
"bottle.Bottle",
"multiprocessing.Process",
"utils.logger.logger.info",
"time.sleep",
"core.Block.from_json",
"utils.storage.read_header_list_from_db",
"core.BlockChain",
"bottle.error",
"bottle.template",
"utils.utils.compress",
"json.dumps",
"core.Transaction.from_json",
"core.TxOut",
"utils.logger.logger.debug",
"bottle.request.environ.get",
"json.loads",
"bottle.request.forms.get",
"inspect.stack",
"threading.Timer",
"utils.logger.iplogger.info",
"utils.storage.get_block_from_db",
"bottle.request.body.read",
"waitress.serve",
"threading.Thread",
"utils.logger.logger.error",
"time.time",
"wsgi_lineprof.middleware.LineProfilerMiddleware",
"bottle.static_file",
"datetime.datetime.fromtimestamp",
"utils.storage.get_wallet_from_db",
"wallet.Wallet",
"utils.utils.dhash",
"functools.lru_cache",
"core.SingleOutput.from_json",
"authority.Authority"
]
| [((723, 731), 'bottle.Bottle', 'Bottle', ([], {}), '()\n', (729, 731), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((817, 829), 'core.BlockChain', 'BlockChain', ([], {}), '()\n', (827, 829), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((881, 889), 'wallet.Wallet', 'Wallet', ([], {}), '()\n', (887, 889), False, 'from wallet import Wallet\n'), ((899, 910), 'authority.Authority', 'Authority', ([], {}), '()\n', (908, 910), False, 'from authority import Authority\n'), ((11202, 11224), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(128)'}), '(maxsize=128)\n', (11211, 11224), False, 'from functools import lru_cache\n'), ((12273, 12294), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(16)'}), '(maxsize=16)\n', (12282, 12294), False, 'from functools import lru_cache\n'), ((13601, 13622), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(16)'}), '(maxsize=16)\n', (13610, 13622), False, 'from functools import lru_cache\n'), ((23854, 23864), 'bottle.error', 'error', (['(403)'], {}), '(403)\n', (23859, 23864), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((23866, 23876), 'bottle.error', 'error', (['(404)'], {}), '(404)\n', (23871, 23876), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((23878, 23888), 'bottle.error', 'error', (['(505)'], {}), '(505)\n', (23883, 23888), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((1612, 1625), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1622, 1625), False, 'import time\n'), ((3236, 3254), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (3246, 3254), False, 'import json\n'), ((3355, 3405), 'utils.utils.dhash', 'dhash', (['BLOCKCHAIN.active_chain.header_list[height]'], {}), '(BLOCKCHAIN.active_chain.header_list[height])\n', (3360, 3405), False, 'from utils.utils import compress, decompress, dhash\n'), ((7334, 7389), 'utils.logger.iplogger.info', 'iplogger.info', (['f"""{client_ip} : Called function {fname}"""'], {}), "(f'{client_ip} : Called function {fname}')\n", (7347, 7389), False, 'from utils.logger import logger, iplogger\n'), ((7549, 7573), 'utils.logger.logger.debug', 'logger.debug', (['public_key'], {}), '(public_key)\n', (7561, 7573), False, 'from utils.logger import logger, iplogger\n'), ((9223, 9248), 'utils.logger.logger.debug', 'logger.debug', (['transaction'], {}), '(transaction)\n', (9235, 9248), False, 'from utils.logger import logger, iplogger\n'), ((9253, 9306), 'utils.logger.logger.info', 'logger.info', (['"""Wallet: Attempting to Send Transaction"""'], {}), "('Wallet: Attempting to Send Transaction')\n", (9264, 9306), False, 'from utils.logger import logger, iplogger\n'), ((10211, 10230), 'json.dumps', 'json.dumps', (['tx_hist'], {}), '(tx_hist)\n', (10221, 10230), False, 'import json\n'), ((11182, 11198), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (11192, 11198), False, 'import json\n'), ((11607, 11638), 'bottle.request.forms.get', 'request.forms.get', (['"""headerhash"""'], {}), "('headerhash')\n", (11624, 11638), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((11779, 11810), 'bottle.request.forms.get', 'request.forms.get', (['"""headerhash"""'], {}), "('headerhash')\n", (11796, 11810), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((11818, 11847), 'utils.storage.get_block_from_db', 'get_block_from_db', (['headerhash'], {}), '(headerhash)\n', (11835, 11847), False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((11892, 11909), 'json.dumps', 'json.dumps', (['(False)'], {}), '(False)\n', (11902, 11909), False, 'import json\n'), ((12385, 12409), 'utils.utils.decompress', 'decompress', (['request_data'], {}), '(request_data)\n', (12395, 12409), False, 'from utils.utils import compress, decompress, dhash\n'), ((13380, 13426), 'utils.logger.logger.error', 'logger.error', (['"""Server: Invalid Block Received"""'], {}), "('Server: Invalid Block Received')\n", (13392, 13426), False, 'from utils.logger import logger, iplogger\n'), ((13725, 13749), 'utils.utils.decompress', 'decompress', (['request_data'], {}), '(request_data)\n', (13735, 13749), False, 'from utils.utils import compress, decompress, dhash\n'), ((15313, 15403), 'bottle.template', 'template', (['"""index.html"""'], {'message': 'message', 'message_type': 'message_type', 'question': 'question'}), "('index.html', message=message, message_type=message_type, question\n =question)\n", (15321, 15403), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((15483, 15504), 'json.loads', 'json.loads', (['uuid_json'], {}), '(uuid_json)\n', (15493, 15504), False, 'import json\n'), ((15634, 15659), 'bottle.request.forms.get', 'request.forms.get', (['"""uuid"""'], {}), "('uuid')\n", (15651, 15659), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((15673, 15700), 'bottle.request.forms.get', 'request.forms.get', (['"""pubkey"""'], {}), "('pubkey')\n", (15690, 15700), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((16321, 16411), 'bottle.template', 'template', (['"""index.html"""'], {'message': 'message', 'message_type': 'message_type', 'question': 'question'}), "('index.html', message=message, message_type=message_type, question\n =question)\n", (16329, 16411), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((16452, 16474), 'bottle.template', 'template', (['"""about.html"""'], {}), "('about.html')\n", (16460, 16474), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((18891, 18927), 'bottle.static_file', 'static_file', (['filename'], {'root': '"""static"""'}), "(filename, root='static')\n", (18902, 18927), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((19028, 19069), 'bottle.static_file', 'static_file', (['"""favicon.ico"""'], {'root': '"""static"""'}), "('favicon.ico', root='static')\n", (19039, 19069), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((21315, 21362), 'bottle.template', 'template', (['"""chains.html"""'], {'data': 'data', 'start': 'start'}), "('chains.html', data=data, start=start)\n", (21323, 21362), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((21818, 21896), 'bottle.template', 'template', (['"""explorer.html"""'], {'blocks': 'blocks', 'transactions': 'transactions', 'prev': 'prev'}), "('explorer.html', blocks=blocks, transactions=transactions, prev=prev)\n", (21826, 21896), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((22224, 22259), 'bottle.template', 'template', (['"""block.html"""'], {'block': 'block'}), "('block.html', block=block)\n", (22232, 22259), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((22734, 22782), 'bottle.template', 'template', (['"""transaction.html"""'], {'tx': 'tx', 'block': 'block'}), "('transaction.html', tx=tx, block=block)\n", (22742, 22782), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((23020, 23093), 'bottle.template', 'template', (['"""account.html"""'], {'tx_hist': 'tx_hist', 'balance': 'balance', 'pubkey': 'pubkey'}), "('account.html', tx_hist=tx_hist, balance=balance, pubkey=pubkey)\n", (23028, 23093), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((23985, 24007), 'bottle.template', 'template', (['"""error.html"""'], {}), "('error.html')\n", (23993, 24007), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((1109, 1158), 'time.sleep', 'time.sleep', (['(consts.MINING_INTERVAL_THRESHOLD // 2)'], {}), '(consts.MINING_INTERVAL_THRESHOLD // 2)\n', (1119, 1158), False, 'import time\n'), ((1769, 1847), 'requests.post', 'requests.post', (['consts.SEED_SERVER_URL'], {'data': "{'port': consts.MINER_SERVER_PORT}"}), "(consts.SEED_SERVER_URL, data={'port': consts.MINER_SERVER_PORT})\n", (1782, 1847), False, 'import requests\n'), ((1868, 1886), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (1878, 1886), False, 'import json\n'), ((2399, 2443), 'requests.post', 'requests.post', (["(url + '/greetpeer')"], {'data': 'data'}), "(url + '/greetpeer', data=data)\n", (2412, 2443), False, 'import requests\n'), ((2459, 2477), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (2469, 2477), False, 'import json\n'), ((5188, 5223), 'utils.logger.logger.debug', 'logger.debug', (['"""Insuficient balance"""'], {}), "('Insuficient balance')\n", (5200, 5223), False, 'from utils.logger import logger, iplogger\n'), ((6833, 6874), 'core.TxOut', 'TxOut', ([], {'amount': 'amounts[i]', 'address': 'address'}), '(amount=amounts[i], address=address)\n', (6838, 6874), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((6961, 7008), 'core.TxOut', 'TxOut', ([], {'amount': 'change', 'address': 'sender_public_key'}), '(amount=change, address=sender_public_key)\n', (6966, 7008), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((7186, 7229), 'bottle.request.environ.get', 'request.environ.get', (['"""HTTP_X_FORWARDED_FOR"""'], {}), "('HTTP_X_FORWARDED_FOR')\n", (7205, 7229), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((7233, 7267), 'bottle.request.environ.get', 'request.environ.get', (['"""REMOTE_ADDR"""'], {}), "('REMOTE_ADDR')\n", (7252, 7267), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((8071, 8114), 'utils.logger.logger.debug', 'logger.debug', (['"""Invalid Receiver Public Key"""'], {}), "('Invalid Receiver Public Key')\n", (8083, 8114), False, 'from utils.logger import logger, iplogger\n'), ((8288, 8344), 'utils.logger.logger.debug', 'logger.debug', (['"""Insufficient Balance to make Transaction"""'], {}), "('Insufficient Balance to make Transaction')\n", (8300, 8344), False, 'from utils.logger import logger, iplogger\n'), ((9878, 9942), 'utils.logger.logger.info', 'logger.info', (['"""Wallet: Transaction Sent, Wait for it to be Mined"""'], {}), "('Wallet: Transaction Sent, Wait for it to be Mined')\n", (9889, 9942), False, 'from utils.logger import logger, iplogger\n'), ((10370, 10395), 'bottle.request.forms.get', 'request.forms.get', (['"""port"""'], {}), "('port')\n", (10387, 10395), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((10460, 10471), 'time.time', 'time.time', ([], {}), '()\n', (10469, 10471), False, 'import time\n'), ((10498, 10526), 'bottle.request.forms.get', 'request.forms.get', (['"""version"""'], {}), "('version')\n", (10515, 10526), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((10557, 10589), 'bottle.request.forms.get', 'request.forms.get', (['"""blockheight"""'], {}), "('blockheight')\n", (10574, 10589), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((11309, 11338), 'utils.storage.get_block_from_db', 'get_block_from_db', (['headerhash'], {}), '(headerhash)\n', (11326, 11338), False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((11864, 11880), 'json.dumps', 'json.dumps', (['(True)'], {}), '(True)\n', (11874, 11880), False, 'import json\n'), ((12031, 12060), 'bottle.request.forms.get', 'request.forms.get', (['"""myheight"""'], {}), "('myheight')\n", (12048, 12060), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((13298, 13325), 'threading.Timer', 'Timer', (['(1)', 'miner.stop_mining'], {}), '(1, miner.stop_mining)\n', (13303, 13325), False, 'from threading import Thread, Timer\n'), ((13577, 13596), 'bottle.request.body.read', 'request.body.read', ([], {}), '()\n', (13594, 13596), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((14884, 14903), 'bottle.request.body.read', 'request.body.read', ([], {}), '()\n', (14901, 14903), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((15760, 15809), 'utils.logger.logger.debug', 'logger.debug', (["('Valid Answer, Rewarding ' + pubkey)"], {}), "('Valid Answer, Rewarding ' + pubkey)\n", (15772, 15809), False, 'from utils.logger import logger, iplogger\n'), ((1500, 1570), 'multiprocessing.Process', 'Process', ([], {'target': 'request_task', 'args': '(PEER_LIST, url, data)', 'daemon': '(True)'}), '(target=request_task, args=(PEER_LIST, url, data), daemon=True)\n', (1507, 1570), False, 'from multiprocessing import Pool, Process\n'), ((1630, 1690), 'threading.Thread', 'Thread', ([], {'target': 'mining_thread_task', 'name': '"""Miner"""', 'daemon': '(True)'}), "(target=mining_thread_task, name='Miner', daemon=True)\n", (1636, 1690), False, 'from threading import Thread, Timer\n'), ((1947, 1992), 'utils.logger.logger.error', 'logger.error', (['"""Could not connect to DNS Seed"""'], {}), "('Could not connect to DNS Seed')\n", (1959, 1992), False, 'from utils.logger import logger, iplogger\n'), ((2666, 2724), 'utils.logger.logger.debug', 'logger.debug', (['"""Main: Peer data does not have Block Height"""'], {}), "('Main: Peer data does not have Block Height')\n", (2678, 2724), False, 'from utils.logger import logger, iplogger\n'), ((3772, 3832), 'utils.logger.logger.error', 'logger.error', (['"""Sync: Block received is invalid, Cannot Sync"""'], {}), "('Sync: Block received is invalid, Cannot Sync')\n", (3784, 3832), False, 'from utils.logger import logger, iplogger\n'), ((4463, 4523), 'threading.Timer', 'Timer', (['(consts.MINING_INTERVAL_THRESHOLD * 2)', 'sync_with_peers'], {}), '(consts.MINING_INTERVAL_THRESHOLD * 2, sync_with_peers)\n', (4468, 4523), False, 'from threading import Thread, Timer\n'), ((5042, 5083), 'utils.logger.logger.debug', 'logger.debug', (['"""Invalid Public Key Length"""'], {}), "('Invalid Public Key Length')\n", (5054, 5083), False, 'from utils.logger import logger, iplogger\n'), ((5287, 5324), 'utils.logger.logger.debug', 'logger.debug', (['"""Cannot send to myself"""'], {}), "('Cannot send to myself')\n", (5299, 5324), False, 'from utils.logger import logger, iplogger\n'), ((5508, 5561), 'utils.logger.logger.info', 'logger.info', (['"""Wallet: Attempting to Send Transaction"""'], {}), "('Wallet: Attempting to Send Transaction')\n", (5519, 5561), False, 'from utils.logger import logger, iplogger\n'), ((8536, 8591), 'utils.logger.logger.debug', 'logger.debug', (['"""Someone trying to send money to himself"""'], {}), "('Someone trying to send money to himself')\n", (8548, 8591), False, 'from utils.logger import logger, iplogger\n'), ((8950, 8966), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (8960, 8966), False, 'import json\n'), ((9108, 9150), 'core.Transaction.from_json', 'Transaction.from_json', (["data['transaction']"], {}), "(data['transaction'])\n", (9129, 9150), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((9593, 9664), 'utils.logger.logger.error', 'logger.error', (['"""Wallet: Could not Send Transaction. Invalid transaction"""'], {}), "('Wallet: Could not Send Transaction. Invalid transaction')\n", (9605, 9664), False, 'from utils.logger import logger, iplogger\n'), ((10871, 10935), 'utils.logger.logger.debug', 'logger.debug', (['"""Server: Greet, A new peer joined, Adding to List"""'], {}), "('Server: Greet, A new peer joined, Adding to List')\n", (10883, 10935), False, 'from utils.logger import logger, iplogger\n'), ((11379, 11397), 'utils.utils.compress', 'compress', (['db_block'], {}), '(db_block)\n', (11387, 11397), False, 'from utils.utils import compress, decompress, dhash\n'), ((11424, 11484), 'utils.logger.logger.error', 'logger.error', (['"""ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK"""'], {}), "('ERROR CALLED GETBLOCK FOR NON EXISTENT BLOCK')\n", (11436, 11484), False, 'from utils.logger import logger, iplogger\n'), ((12171, 12216), 'utils.utils.dhash', 'dhash', (['BLOCKCHAIN.active_chain.header_list[i]'], {}), '(BLOCKCHAIN.active_chain.header_list[i])\n', (12176, 12216), False, 'from utils.utils import compress, decompress, dhash\n'), ((19729, 19739), 'utils.utils.dhash', 'dhash', (['hdr'], {}), '(hdr)\n', (19734, 19739), False, 'from utils.utils import compress, decompress, dhash\n'), ((21158, 21168), 'utils.utils.dhash', 'dhash', (['hdr'], {}), '(hdr)\n', (21163, 21168), False, 'from utils.utils import compress, decompress, dhash\n'), ((22190, 22212), 'bottle.template', 'template', (['"""error.html"""'], {}), "('error.html')\n", (22198, 22212), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((22700, 22722), 'bottle.template', 'template', (['"""error.html"""'], {}), "('error.html')\n", (22708, 22722), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((23189, 23208), 'bottle.request.body.read', 'request.body.read', ([], {}), '()\n', (23206, 23208), False, 'from bottle import BaseTemplate, Bottle, request, response, static_file, template, error\n'), ((24092, 24148), 'utils.logger.logger.info', 'logger.info', (['"""FullNode: Starting New Chain from Genesis"""'], {}), "('FullNode: Starting New Chain from Genesis')\n", (24103, 24148), False, 'from utils.logger import logger, iplogger\n'), ((24256, 24305), 'utils.logger.logger.info', 'logger.info', (['"""FullNode: Restoring Existing Chain"""'], {}), "('FullNode: Restoring Existing Chain')\n", (24267, 24305), False, 'from utils.logger import logger, iplogger\n'), ((24332, 24358), 'utils.storage.read_header_list_from_db', 'read_header_list_from_db', ([], {}), '()\n', (24356, 24358), False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((24614, 24649), 'utils.logger.logger.info', 'logger.info', (['"""FullNode: Not Mining"""'], {}), "('FullNode: Not Mining')\n", (24625, 24649), False, 'from utils.logger import logger, iplogger\n'), ((25060, 25138), 'waitress.serve', 'waitress.serve', (['app'], {'host': '"""0.0.0.0"""', 'threads': '(16)', 'port': 'consts.MINER_SERVER_PORT'}), "(app, host='0.0.0.0', threads=16, port=consts.MINER_SERVER_PORT)\n", (25074, 25138), False, 'import waitress\n'), ((3066, 3084), 'utils.utils.decompress', 'decompress', (['r.text'], {}), '(r.text)\n', (3076, 3084), False, 'from utils.utils import compress, decompress, dhash\n'), ((7087, 7098), 'time.time', 'time.time', ([], {}), '()\n', (7096, 7098), False, 'import time\n'), ((7462, 7477), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (7475, 7477), False, 'import inspect\n'), ((7730, 7745), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (7743, 7745), False, 'import inspect\n'), ((9043, 9058), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (9056, 9058), False, 'import inspect\n'), ((10043, 10058), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (10056, 10058), False, 'import inspect\n'), ((10297, 10312), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (10310, 10312), False, 'import inspect\n'), ((11572, 11587), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (11585, 11587), False, 'import inspect\n'), ((11739, 11754), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (11752, 11754), False, 'import inspect\n'), ((11986, 12001), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (11999, 12001), False, 'import inspect\n'), ((12238, 12259), 'json.dumps', 'json.dumps', (['hash_list'], {}), '(hash_list)\n', (12248, 12259), False, 'import json\n'), ((12576, 12595), 'utils.utils.dhash', 'dhash', (['block.header'], {}), '(block.header)\n', (12581, 12595), False, 'from utils.utils import compress, decompress, dhash\n'), ((12614, 12673), 'utils.logger.logger.info', 'logger.info', (['"""Server: Received block exists, doing nothing"""'], {}), "('Server: Received block exists, doing nothing')\n", (12625, 12673), False, 'from utils.logger import logger, iplogger\n'), ((12789, 12855), 'utils.logger.logger.info', 'logger.info', (['"""Server: Received a New Valid Block, Adding to Chain"""'], {}), "('Server: Received a New Valid Block, Adding to Chain')\n", (12800, 12855), False, 'from utils.logger import logger, iplogger\n'), ((12873, 12923), 'utils.logger.logger.debug', 'logger.debug', (['"""Server: Sending new block to peers"""'], {}), "('Server: Sending new block to peers')\n", (12885, 12923), False, 'from utils.logger import logger, iplogger\n'), ((13525, 13540), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (13538, 13540), False, 'import inspect\n'), ((14815, 14830), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (14828, 14830), False, 'import inspect\n'), ((15236, 15251), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (15249, 15251), False, 'import inspect\n'), ((15556, 15571), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (15569, 15571), False, 'import inspect\n'), ((18679, 18694), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (18692, 18694), False, 'import inspect\n'), ((18857, 18872), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (18870, 18872), False, 'import inspect\n'), ((18994, 19009), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (19007, 19009), False, 'import inspect\n'), ((19126, 19141), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (19139, 19141), False, 'import inspect\n'), ((19441, 19485), 'utils.storage.get_wallet_from_db', 'get_wallet_from_db', (['consts.MINER_SERVER_PORT'], {}), '(consts.MINER_SERVER_PORT)\n', (19459, 19485), False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((20765, 20780), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (20778, 20780), False, 'import inspect\n'), ((21423, 21438), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (21436, 21438), False, 'import inspect\n'), ((21994, 22009), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (22007, 22009), False, 'import inspect\n'), ((22386, 22401), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (22399, 22401), False, 'import inspect\n'), ((22880, 22895), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (22893, 22895), False, 'import inspect\n'), ((23151, 23166), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (23164, 23166), False, 'import inspect\n'), ((23951, 23966), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (23964, 23966), False, 'import inspect\n'), ((24517, 24564), 'threading.Thread', 'Thread', ([], {'target': 'start_mining_thread', 'daemon': '(True)'}), '(target=start_mining_thread, daemon=True)\n', (24523, 24564), False, 'from threading import Thread, Timer\n'), ((24882, 24938), 'wsgi_lineprof.middleware.LineProfilerMiddleware', 'LineProfilerMiddleware', (['app'], {'stream': 'f', 'async_stream': '(True)'}), '(app, stream=f, async_stream=True)\n', (24904, 24938), False, 'from wsgi_lineprof.middleware import LineProfilerMiddleware\n'), ((24955, 25033), 'waitress.serve', 'waitress.serve', (['app'], {'host': '"""0.0.0.0"""', 'threads': '(16)', 'port': 'consts.MINER_SERVER_PORT'}), "(app, host='0.0.0.0', threads=16, port=consts.MINER_SERVER_PORT)\n", (24969, 25033), False, 'import waitress\n'), ((5846, 5916), 'utils.logger.logger.info', 'logger.info', (['"""Wallet: Could not Send Transaction. Invalid Transaction"""'], {}), "('Wallet: Could not Send Transaction. Invalid Transaction')\n", (5857, 5916), False, 'from utils.logger import logger, iplogger\n'), ((5951, 6015), 'utils.logger.logger.info', 'logger.info', (['"""Wallet: Transaction Sent, Wait for it to be Mined"""'], {}), "('Wallet: Transaction Sent, Wait for it to be Mined')\n", (5962, 6015), False, 'from utils.logger import logger, iplogger\n'), ((6677, 6703), 'core.SingleOutput.from_json', 'SingleOutput.from_json', (['so'], {}), '(so)\n', (6699, 6703), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((12462, 12489), 'core.Block.from_json', 'Block.from_json', (['block_json'], {}), '(block_json)\n', (12477, 12489), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((13805, 13844), 'core.Transaction.from_json', 'Transaction.from_json', (['transaction_json'], {}), '(transaction_json)\n', (13826, 13844), False, 'from core import Block, BlockChain, SingleOutput, Transaction, TxIn, TxOut, genesis_block\n'), ((14029, 14090), 'utils.logger.logger.debug', 'logger.debug', (['"""Valid Transaction received, Adding to Mempool"""'], {}), "('Valid Transaction received, Adding to Mempool')\n", (14041, 14090), False, 'from utils.logger import logger, iplogger\n'), ((14304, 14369), 'utils.logger.logger.debug', 'logger.debug', (['"""The transation is not valid, not added to Mempool"""'], {}), "('The transation is not valid, not added to Mempool')\n", (14316, 14369), False, 'from utils.logger import logger, iplogger\n'), ((20274, 20284), 'utils.utils.dhash', 'dhash', (['hdr'], {}), '(hdr)\n', (20279, 20284), False, 'from utils.utils import compress, decompress, dhash\n'), ((22058, 22086), 'utils.storage.get_block_from_db', 'get_block_from_db', (['blockhash'], {}), '(blockhash)\n', (22075, 22086), False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((22450, 22478), 'utils.storage.get_block_from_db', 'get_block_from_db', (['blockhash'], {}), '(blockhash)\n', (22467, 22478), False, 'from utils.storage import get_block_from_db, get_wallet_from_db, read_header_list_from_db\n'), ((21715, 21733), 'utils.utils.dhash', 'dhash', (['hdr_list[i]'], {}), '(hdr_list[i])\n', (21720, 21733), False, 'from utils.utils import compress, decompress, dhash\n'), ((19258, 19304), 'utils.utils.dhash', 'dhash', (['BLOCKCHAIN.active_chain.header_list[-1]'], {}), '(BLOCKCHAIN.active_chain.header_list[-1])\n', (19263, 19304), False, 'from utils.utils import compress, decompress, dhash\n'), ((20068, 20105), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['hdr.timestamp'], {}), '(hdr.timestamp)\n', (20090, 20105), False, 'from datetime import datetime\n')] |
import io
import os
from setuptools import setup
def read(file_name):
"""Read a text file and return the content as a string."""
with io.open(os.path.join(os.path.dirname(__file__), file_name),
encoding='utf-8') as f:
return f.read()
setup(
name='recmetrics',
url='https://github.com/statisticianinstilettos/recommender_metrics',
author='<NAME>',
author_email='<EMAIL>',
packages=['recmetrics'],
install_requires=['funcsigs',
'numpy',
'pandas',
'plotly',
'scikit-learn',
'seaborn'],
license='MIT',
version='0.1.4',
description='Evaluation metrics for recommender systems',
long_description=read("README.md"),
long_description_content_type="text/markdown",
)
| [
"os.path.dirname"
]
| [((166, 191), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (181, 191), False, 'import os\n')] |
import pygame
class Texto:
def __init__(self, screen, text, x, y, text_size = 20, fuente = 'Calibri', italic = False, bold= False, subrayado= False, color = (250, 240, 230), bg = [] ):
self.screen = screen
fg = color
self.coord = x, y
#load font, prepare values
font = pygame.font.Font(None, 80)
size = font.size(text)
# Font
a_sys_font = pygame.font.SysFont(fuente, text_size)
# Cursiva
if italic:
a_sys_font.set_bold(1)
# Negritas
if bold:
a_sys_font.set_bold(1)
# Subrayado
if subrayado:
a_sys_font.set_underline(1)
# Construccion del texto
if len(bg) > 1: # Si hay fondo de texto
ren = a_sys_font.render(text, 1, fg, bg)
else: # Si no, transparente
ren = a_sys_font.render(text, 1, fg)
# self.size = x+size[0], y
self.text_rect = ren.get_rect()
self.text_rect.center = (x,y)
self.image = ren, (x,y)
screen.blit(ren, (x, y))
# Cursiva
if italic:
a_sys_font.set_bold(0)
# Negritas
if bold:
a_sys_font.set_bold(0)
# Subrayado
if subrayado:
a_sys_font.set_underline(0)
# self.image.blit(ren, self.text_rect)
# self.text_rect = (x, y),ren.get_size()
# text = str(self.counter)
# label = self.myfont.render(text, 1, (255,0,0))
# text_rect = label.get_rect()
# text_rect.center = (50,50)
# self.image.blit(label, text_rect)
pass
def getProperties(self):
return self.text_rect
def redraw(self):
self.screen.blit(self.image[0], self.image[1])
pass
##################### EJEMPLO DE USO ##############################
# texto1 = Texto(screen, 'Hola', 10, 10)
class TextArea():
def __init__(self, screen, text, x, y, fuente='Calibri', text_size = 20, color=pygame.Color('black')):
self.coord = x, y
font = pygame.font.SysFont(fuente, text_size)
words = [word.split(' ') for word in text.splitlines()] # 2D array where each row is a list of words.
space = font.size(' ')[0] # The width of a space.
max_width, max_height = screen.get_size()
pos = x,y
for line in words:
for word in line:
word_surface = font.render(word, 0, color)
word_width, word_height = word_surface.get_size()
if x + word_width >= max_width:
x = pos[0] # Reset the x.
y += word_height # Start on new row.
screen.blit(word_surface, (x, y))
x += word_width + space
x = pos[0] # Reset the x.
y += word_height # Start on new row.
self.size = word_width, word_height
pass
def getProperties(self):
return self.size, self.coord
##################### EJEMPLO DE USO ##############################
# textarea1 = Textarea(screen, 'Hola mundo que tal estas hoy') | [
"pygame.Color",
"pygame.font.Font",
"pygame.font.SysFont"
]
| [((326, 352), 'pygame.font.Font', 'pygame.font.Font', (['None', '(80)'], {}), '(None, 80)\n', (342, 352), False, 'import pygame\n'), ((421, 459), 'pygame.font.SysFont', 'pygame.font.SysFont', (['fuente', 'text_size'], {}), '(fuente, text_size)\n', (440, 459), False, 'import pygame\n'), ((2049, 2070), 'pygame.Color', 'pygame.Color', (['"""black"""'], {}), "('black')\n", (2061, 2070), False, 'import pygame\n'), ((2114, 2152), 'pygame.font.SysFont', 'pygame.font.SysFont', (['fuente', 'text_size'], {}), '(fuente, text_size)\n', (2133, 2152), False, 'import pygame\n')] |
# This is the code to train the xgboost model with cross-validation for each unique room in the dataset.
# Models are dumped into ./models and results are dumped into two csv files in the current work directory.
import argparse
import json
import math
import os
import pickle
import warnings
from typing import Tuple
import numpy as np
import pandas as pd
import xgboost as xgb
from hyperopt import fmin, tpe, hp, STATUS_OK, Trials
from imblearn.over_sampling import SMOTE
from numpy.random import RandomState
from sklearn.metrics import r2_score, mean_squared_error
from sklearn.model_selection import train_test_split
from sklearn.utils import compute_sample_weight
from tqdm import tqdm
from xgboost import DMatrix, cv
# Set up an argument parser to decide the metric function
parser = argparse.ArgumentParser()
parser.add_argument("--metric", choices=['R2', 'RMSE'], type=str, required=False, default='R2',
help="The evaluation metric you want to use to train the XGBoost model")
parser.add_argument("--log", choices=[0, 1, 100], type=int, required=False, default=0,
help="Whether to print out the training progress")
parser.add_argument("--SMOTE", choices=[0, 1], type=int, required=False, default=1, help="Whether use the SMOTE or not")
parser.add_argument("--SMOGN", choices=[0, 1], type=int, required=False, default=0, help="Whether use the SMOGN or not")
parser.add_argument("--SampleWeight", choices=[0, 1], type=int, required=False, default=0,
help="Whether use the sample weight")
args = parser.parse_args()
# Ignore all the warnings and set pandas to display every column and row everytime we print a dataframe
warnings.filterwarnings('ignore')
pd.set_option('display.max_columns', None)
pd.set_option('display.max_rows', None)
assert args.SMOTE != args.SMOGN, "Can't use SMOTE and SMOGN at the same time!"
# Load the data with a positive AC electricity consumption value, and drop the time data as we don't need them
data = pd.read_csv("summer_data_compiled.csv", index_col=0)
data = data[data.AC > 0].drop(['Time', 'Date', 'Hour'], axis=1).reset_index(drop=True)
# Create some directory to store the models and future analysis figures.
# log_folder_name = "Test_{}_{}".format(args.metric, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
log_folder_name = "Test_R2_HYPEROPT"
log_folder_name = log_folder_name + "_SMOTE" if args.SMOTE else log_folder_name
log_folder_name = log_folder_name + "_SMOGN" if args.SMOGN else log_folder_name
log_folder_name = log_folder_name + "_SW" if args.SampleWeight else log_folder_name
previous_parameter_folder = "Test_R2_HYPEROPT"
assert log_folder_name != previous_parameter_folder, "Previous folder name exists"
if not os.path.exists('./{}/'.format(log_folder_name)):
os.mkdir('./{}'.format(log_folder_name))
os.mkdir('./{}/models/'.format(log_folder_name))
os.mkdir('./{}/trntst_models/'.format(log_folder_name))
# Define our evaluation functions
def RMSE(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:
truth_value = dtrain.get_label()
root_squard_error = math.sqrt(mean_squared_error(truth_value, predt))
return "RMSE", root_squard_error
def R2(predt: np.ndarray, dtrain: DMatrix) -> Tuple[str, float]:
truth_value = dtrain.get_label()
r2_value = r2_score(truth_value, predt)
return "R2", r2_value
def fobjective(space):
param_dict_tunning = {'max_depth': int(space['max_depth']),
'learning_rate': space['learning_rate'],
'colsample_bytree': space['colsample_bytree'],
'min_child_weight': int(space['min_child_weight']),
'reg_alpha': int(space['reg_alpha']),
'reg_lambda': space['reg_lambda'],
'subsample': space['subsample'],
'min_split_loss': space['min_split_loss'],
'objective': 'reg:squarederror'}
xgb_cv_result = xgb.cv(dtrain=data_matrix, params=param_dict_tunning, nfold=5,
early_stopping_rounds=30, as_pandas=True, num_boost_round=200,
seed=seed, metrics='rmse', maximize=False, shuffle=True)
return {"loss": (xgb_cv_result["test-rmse-mean"]).tail(1).iloc[0], "status": STATUS_OK}
eval_dict = {'RMSE': RMSE, 'R2': R2}
print("Start Training The Models")
# Create two dataframes to store the result during the training and after the training.
error_csv = pd.DataFrame(
columns=['room', 'train-{}-mean'.format(args.metric), 'train-{}-std'.format(args.metric), 'train-rmse-mean',
'train-rmse-std', 'test-{}-mean'.format(args.metric), 'test-{}-std'.format(args.metric), 'test-rmse-mean',
'test-rmse-std'])
prediction_csv = pd.DataFrame(columns=['room', 'observation', 'prediction'])
room_list = data['Location'].unique()
# ranging through all the rooms and do the training and cross-validation for each room.
for room in tqdm(room_list):
seed = 2030 + room
# Four rooms have low quality data and we delete them manually
if room == 309 or room == 312 or room == 826 or room == 917 or room == 1001:
continue
# We extract the data of particular room and run the SMOTE algorithm on it.
room_data = data[data.Location == room].drop(['Location'], axis=1).reset_index(drop=True)
if args.SMOTE:
# Label all the AC data by 0.75, all AC above 0.75 will be marked as 1, otherwise 0. Split into X and y
room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')
X = room_data.drop(['SMOTE_split'], axis=1)
y = room_data['SMOTE_split']
# Run the SMOTE algorithm and retrieve the result.
model_smote = SMOTE(random_state=621, k_neighbors=3)
room_data_smote, smote_split = model_smote.fit_resample(X, y)
# concat the result from SMOTE and split the result into X and y for training.
room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)
y = room_data_smote['AC']
X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)
elif args.SMOGN:
if len(room_data) < 500:
room_data['SMOTE_split'] = (room_data['AC'] > 0.75).astype('int')
X = room_data.drop(['SMOTE_split'], axis=1)
y = room_data['SMOTE_split']
# Run the SMOTE algorithm and retrieve the result.
model_smote = SMOTE(random_state=621, k_neighbors=3)
room_data_smote, smote_split = model_smote.fit_resample(X, y)
# concat the result from SMOTE and split the result into X and y for training.
room_data_smote = pd.concat([room_data_smote, smote_split], axis=1)
y = room_data_smote['AC']
X = room_data_smote.drop(['AC', 'SMOTE_split'], axis=1)
else:
room_data = pd.read_csv('./SMOGN_processed/{}.csv'.format(room), index_col=0)
y = room_data['AC']
X = room_data.drop(['AC'], axis=1)
else:
y = pd.DataFrame(room_data['AC'].fillna(method='pad'))
X = room_data.drop(['AC'], axis=1).fillna(method='pad')
if args.SampleWeight:
class_sample = pd.cut(y, bins=15)
weight = compute_sample_weight(class_weight="balanced", y=class_sample)
X = X.to_numpy()
# Build another full data matrix for the built-in cross validation function to work.
data_matrix = DMatrix(data=X, label=y, weight=weight) if args.SampleWeight else DMatrix(data=X, label=y)
# Cross_validation with hyper-parameter tuning
space = {'max_depth': hp.quniform("max_depth", 3, 10, 1),
'learning_rate': hp.uniform("learning_rate", 0.1, 3),
'colsample_bytree': hp.uniform("colsample_bytree", 0.5, 1),
'min_child_weight': hp.quniform("min_child_weight", 1, 20, 1),
'reg_alpha': hp.quniform("reg_alpha", 0, 100, 1),
'reg_lambda': hp.uniform("reg_lambda", 0, 2),
'subsample': hp.uniform("subsample", 0.5, 1),
'min_split_loss': hp.uniform("min_split_loss", 0, 9)}
if os.path.exists('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room)):
best_param_dict = np.load('./{}/models/{}_parameter.npy'.format(previous_parameter_folder, room),
allow_pickle=True).item()
np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)
else:
trials = Trials()
best_hyperparams = fmin(fn=fobjective, space=space, algo=tpe.suggest, max_evals=400, trials=trials,
rstate=RandomState(seed))
# setup our training parameters and a model variable as model checkpoint
best_param_dict = {'objective': 'reg:squarederror', 'max_depth': int(best_hyperparams['max_depth']),
'reg_alpha': best_hyperparams['reg_alpha'], 'reg_lambda': best_hyperparams['reg_lambda'],
'min_child_weight': best_hyperparams['min_child_weight'],
'colsample_bytree': best_hyperparams['colsample_bytree'],
'learning_rate': best_hyperparams['learning_rate'],
'subsample': best_hyperparams['subsample'],
'min_split_loss': best_hyperparams['min_split_loss']}
np.save('./{}/models/{}_parameter.npy'.format(log_folder_name, room), best_param_dict)
# Use the built-in cv function to do the cross validation, still with ten folds, this will return us the results.
xgb_cv_result = cv(dtrain=data_matrix, params=best_param_dict, nfold=5,
early_stopping_rounds=30, as_pandas=True, num_boost_round=200,
seed=seed, shuffle=True, feval=eval_dict[args.metric], maximize=True)
xgb_cv_result['room'] = room
error_csv.loc[len(error_csv)] = xgb_cv_result.loc[len(xgb_cv_result) - 1]
# Use one training_testing for ploting, and save both ground truth and prediction value into the dataframe
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=seed)
d_train = DMatrix(X_train, label=y_train)
d_test = DMatrix(X_test, label=y_test)
watchlist = [(d_test, 'eval'), (d_train, 'train')]
xgb_model_train_test = xgb.train(params=best_param_dict, dtrain=d_train, num_boost_round=200, evals=watchlist,
verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)
prediction = np.array(xgb_model_train_test.predict(d_test)).tolist()
real = np.array(y_test).tolist()
prediction_csv.loc[len(prediction_csv)] = {'room': room, 'observation': json.dumps(real),
'prediction': json.dumps(prediction)}
# Dump the error dataframes into csv files.
error_csv.to_csv('./{}/error.csv'.format(log_folder_name), index=False)
prediction_csv.to_csv('./{}/prediction.csv'.format(log_folder_name), index=False)
# Develop a model using the whole orignial dataset, and save the model
xgb_model_full = xgb.train(params=best_param_dict, dtrain=data_matrix, num_boost_round=200, evals=watchlist,
verbose_eval=args.log, xgb_model=None, feval=eval_dict[args.metric], maximize=True)
# Save all the models we trained for future use
pickle.dump(xgb_model_train_test, open('./{}/trntst_models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))
pickle.dump(xgb_model_full, open('./{}/models/{}.pickle.bat'.format(log_folder_name, room), 'wb'))
print("Training finished!")
| [
"pandas.read_csv",
"numpy.array",
"xgboost.DMatrix",
"sklearn.metrics.r2_score",
"numpy.random.RandomState",
"argparse.ArgumentParser",
"xgboost.train",
"json.dumps",
"pandas.set_option",
"xgboost.cv",
"pandas.DataFrame",
"sklearn.model_selection.train_test_split",
"hyperopt.hp.quniform",
"sklearn.metrics.mean_squared_error",
"hyperopt.hp.uniform",
"sklearn.utils.compute_sample_weight",
"warnings.filterwarnings",
"hyperopt.Trials",
"imblearn.over_sampling.SMOTE",
"tqdm.tqdm",
"pandas.cut",
"pandas.concat"
]
| [((792, 817), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (815, 817), False, 'import argparse\n'), ((1688, 1721), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (1711, 1721), False, 'import warnings\n'), ((1722, 1764), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', 'None'], {}), "('display.max_columns', None)\n", (1735, 1764), True, 'import pandas as pd\n'), ((1765, 1804), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', 'None'], {}), "('display.max_rows', None)\n", (1778, 1804), True, 'import pandas as pd\n'), ((2004, 2056), 'pandas.read_csv', 'pd.read_csv', (['"""summer_data_compiled.csv"""'], {'index_col': '(0)'}), "('summer_data_compiled.csv', index_col=0)\n", (2015, 2056), True, 'import pandas as pd\n'), ((4810, 4869), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['room', 'observation', 'prediction']"}), "(columns=['room', 'observation', 'prediction'])\n", (4822, 4869), True, 'import pandas as pd\n'), ((5010, 5025), 'tqdm.tqdm', 'tqdm', (['room_list'], {}), '(room_list)\n', (5014, 5025), False, 'from tqdm import tqdm\n'), ((3315, 3343), 'sklearn.metrics.r2_score', 'r2_score', (['truth_value', 'predt'], {}), '(truth_value, predt)\n', (3323, 3343), False, 'from sklearn.metrics import r2_score, mean_squared_error\n'), ((4010, 4201), 'xgboost.cv', 'xgb.cv', ([], {'dtrain': 'data_matrix', 'params': 'param_dict_tunning', 'nfold': '(5)', 'early_stopping_rounds': '(30)', 'as_pandas': '(True)', 'num_boost_round': '(200)', 'seed': 'seed', 'metrics': '"""rmse"""', 'maximize': '(False)', 'shuffle': '(True)'}), "(dtrain=data_matrix, params=param_dict_tunning, nfold=5,\n early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=\n seed, metrics='rmse', maximize=False, shuffle=True)\n", (4016, 4201), True, 'import xgboost as xgb\n'), ((9618, 9815), 'xgboost.cv', 'cv', ([], {'dtrain': 'data_matrix', 'params': 'best_param_dict', 'nfold': '(5)', 'early_stopping_rounds': '(30)', 'as_pandas': '(True)', 'num_boost_round': '(200)', 'seed': 'seed', 'shuffle': '(True)', 'feval': 'eval_dict[args.metric]', 'maximize': '(True)'}), '(dtrain=data_matrix, params=best_param_dict, nfold=5,\n early_stopping_rounds=30, as_pandas=True, num_boost_round=200, seed=\n seed, shuffle=True, feval=eval_dict[args.metric], maximize=True)\n', (9620, 9815), False, 'from xgboost import DMatrix, cv\n'), ((10116, 10172), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': 'seed'}), '(X, y, test_size=0.2, random_state=seed)\n', (10132, 10172), False, 'from sklearn.model_selection import train_test_split\n'), ((10187, 10218), 'xgboost.DMatrix', 'DMatrix', (['X_train'], {'label': 'y_train'}), '(X_train, label=y_train)\n', (10194, 10218), False, 'from xgboost import DMatrix, cv\n'), ((10232, 10261), 'xgboost.DMatrix', 'DMatrix', (['X_test'], {'label': 'y_test'}), '(X_test, label=y_test)\n', (10239, 10261), False, 'from xgboost import DMatrix, cv\n'), ((10346, 10526), 'xgboost.train', 'xgb.train', ([], {'params': 'best_param_dict', 'dtrain': 'd_train', 'num_boost_round': '(200)', 'evals': 'watchlist', 'verbose_eval': 'args.log', 'xgb_model': 'None', 'feval': 'eval_dict[args.metric]', 'maximize': '(True)'}), '(params=best_param_dict, dtrain=d_train, num_boost_round=200,\n evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict\n [args.metric], maximize=True)\n', (10355, 10526), True, 'import xgboost as xgb\n'), ((11154, 11338), 'xgboost.train', 'xgb.train', ([], {'params': 'best_param_dict', 'dtrain': 'data_matrix', 'num_boost_round': '(200)', 'evals': 'watchlist', 'verbose_eval': 'args.log', 'xgb_model': 'None', 'feval': 'eval_dict[args.metric]', 'maximize': '(True)'}), '(params=best_param_dict, dtrain=data_matrix, num_boost_round=200,\n evals=watchlist, verbose_eval=args.log, xgb_model=None, feval=eval_dict\n [args.metric], maximize=True)\n', (11163, 11338), True, 'import xgboost as xgb\n'), ((3119, 3157), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['truth_value', 'predt'], {}), '(truth_value, predt)\n', (3137, 3157), False, 'from sklearn.metrics import r2_score, mean_squared_error\n'), ((5767, 5805), 'imblearn.over_sampling.SMOTE', 'SMOTE', ([], {'random_state': '(621)', 'k_neighbors': '(3)'}), '(random_state=621, k_neighbors=3)\n', (5772, 5805), False, 'from imblearn.over_sampling import SMOTE\n'), ((5990, 6039), 'pandas.concat', 'pd.concat', (['[room_data_smote, smote_split]'], {'axis': '(1)'}), '([room_data_smote, smote_split], axis=1)\n', (5999, 6039), True, 'import pandas as pd\n'), ((7218, 7236), 'pandas.cut', 'pd.cut', (['y'], {'bins': '(15)'}), '(y, bins=15)\n', (7224, 7236), True, 'import pandas as pd\n'), ((7254, 7316), 'sklearn.utils.compute_sample_weight', 'compute_sample_weight', ([], {'class_weight': '"""balanced"""', 'y': 'class_sample'}), "(class_weight='balanced', y=class_sample)\n", (7275, 7316), False, 'from sklearn.utils import compute_sample_weight\n'), ((7447, 7486), 'xgboost.DMatrix', 'DMatrix', ([], {'data': 'X', 'label': 'y', 'weight': 'weight'}), '(data=X, label=y, weight=weight)\n', (7454, 7486), False, 'from xgboost import DMatrix, cv\n'), ((7513, 7537), 'xgboost.DMatrix', 'DMatrix', ([], {'data': 'X', 'label': 'y'}), '(data=X, label=y)\n', (7520, 7537), False, 'from xgboost import DMatrix, cv\n'), ((7616, 7650), 'hyperopt.hp.quniform', 'hp.quniform', (['"""max_depth"""', '(3)', '(10)', '(1)'], {}), "('max_depth', 3, 10, 1)\n", (7627, 7650), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((7682, 7717), 'hyperopt.hp.uniform', 'hp.uniform', (['"""learning_rate"""', '(0.1)', '(3)'], {}), "('learning_rate', 0.1, 3)\n", (7692, 7717), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((7752, 7790), 'hyperopt.hp.uniform', 'hp.uniform', (['"""colsample_bytree"""', '(0.5)', '(1)'], {}), "('colsample_bytree', 0.5, 1)\n", (7762, 7790), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((7825, 7866), 'hyperopt.hp.quniform', 'hp.quniform', (['"""min_child_weight"""', '(1)', '(20)', '(1)'], {}), "('min_child_weight', 1, 20, 1)\n", (7836, 7866), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((7894, 7929), 'hyperopt.hp.quniform', 'hp.quniform', (['"""reg_alpha"""', '(0)', '(100)', '(1)'], {}), "('reg_alpha', 0, 100, 1)\n", (7905, 7929), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((7958, 7988), 'hyperopt.hp.uniform', 'hp.uniform', (['"""reg_lambda"""', '(0)', '(2)'], {}), "('reg_lambda', 0, 2)\n", (7968, 7988), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((8016, 8047), 'hyperopt.hp.uniform', 'hp.uniform', (['"""subsample"""', '(0.5)', '(1)'], {}), "('subsample', 0.5, 1)\n", (8026, 8047), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((8080, 8114), 'hyperopt.hp.uniform', 'hp.uniform', (['"""min_split_loss"""', '(0)', '(9)'], {}), "('min_split_loss', 0, 9)\n", (8090, 8114), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((8500, 8508), 'hyperopt.Trials', 'Trials', ([], {}), '()\n', (8506, 8508), False, 'from hyperopt import fmin, tpe, hp, STATUS_OK, Trials\n'), ((10743, 10759), 'json.dumps', 'json.dumps', (['real'], {}), '(real)\n', (10753, 10759), False, 'import json\n'), ((10822, 10844), 'json.dumps', 'json.dumps', (['prediction'], {}), '(prediction)\n', (10832, 10844), False, 'import json\n'), ((10640, 10656), 'numpy.array', 'np.array', (['y_test'], {}), '(y_test)\n', (10648, 10656), True, 'import numpy as np\n'), ((6457, 6495), 'imblearn.over_sampling.SMOTE', 'SMOTE', ([], {'random_state': '(621)', 'k_neighbors': '(3)'}), '(random_state=621, k_neighbors=3)\n', (6462, 6495), False, 'from imblearn.over_sampling import SMOTE\n'), ((6692, 6741), 'pandas.concat', 'pd.concat', (['[room_data_smote, smote_split]'], {'axis': '(1)'}), '([room_data_smote, smote_split], axis=1)\n', (6701, 6741), True, 'import pandas as pd\n'), ((8656, 8673), 'numpy.random.RandomState', 'RandomState', (['seed'], {}), '(seed)\n', (8667, 8673), False, 'from numpy.random import RandomState\n')] |
import os
from setuptools import setup
# Read the version
g = {}
with open(os.path.join("editorconfig", "version.py"), "rt") as fp:
exec(fp.read(), g)
v = g['VERSION']
version = ".".join(str(x) for x in v[:3])
if v[3] != "final":
version += "-" + v[3]
setup(
name='EditorConfig',
version=version,
author='EditorConfig Team',
packages=['editorconfig'],
url='http://editorconfig.org/',
license='python',
description='EditorConfig File Locator and Interpreter for Python',
long_description=open('README.rst').read(),
entry_points = {
'console_scripts': [
'editorconfig = editorconfig.__main__:main',
]
},
classifiers=[
'License :: OSI Approved :: Python Software Foundation License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
| [
"os.path.join"
]
| [((76, 118), 'os.path.join', 'os.path.join', (['"""editorconfig"""', '"""version.py"""'], {}), "('editorconfig', 'version.py')\n", (88, 118), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""Tests for sktime annotators."""
import pandas as pd
import pytest
from sktime.registry import all_estimators
from sktime.utils._testing.estimator_checks import _make_args
ALL_ANNOTATORS = all_estimators(estimator_types="series-annotator", return_names=False)
@pytest.mark.parametrize("Estimator", ALL_ANNOTATORS)
def test_output_type(Estimator):
"""Test annotator output type."""
estimator = Estimator.create_test_instance()
args = _make_args(estimator, "fit")
estimator.fit(*args)
args = _make_args(estimator, "predict")
y_pred = estimator.predict(*args)
assert isinstance(y_pred, pd.Series)
| [
"pytest.mark.parametrize",
"sktime.utils._testing.estimator_checks._make_args",
"sktime.registry.all_estimators"
]
| [((218, 288), 'sktime.registry.all_estimators', 'all_estimators', ([], {'estimator_types': '"""series-annotator"""', 'return_names': '(False)'}), "(estimator_types='series-annotator', return_names=False)\n", (232, 288), False, 'from sktime.registry import all_estimators\n'), ((292, 344), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""Estimator"""', 'ALL_ANNOTATORS'], {}), "('Estimator', ALL_ANNOTATORS)\n", (315, 344), False, 'import pytest\n'), ((477, 505), 'sktime.utils._testing.estimator_checks._make_args', '_make_args', (['estimator', '"""fit"""'], {}), "(estimator, 'fit')\n", (487, 505), False, 'from sktime.utils._testing.estimator_checks import _make_args\n'), ((542, 574), 'sktime.utils._testing.estimator_checks._make_args', '_make_args', (['estimator', '"""predict"""'], {}), "(estimator, 'predict')\n", (552, 574), False, 'from sktime.utils._testing.estimator_checks import _make_args\n')] |
#!/usr/bin/env python
from __future__ import print_function
from kaldi.segmentation import NnetSAD, SegmentationProcessor
from kaldi.nnet3 import NnetSimpleComputationOptions
from kaldi.util.table import SequentialMatrixReader
# Construct SAD
model = NnetSAD.read_model("final.raw")
post = NnetSAD.read_average_posteriors("post_output.vec")
transform = NnetSAD.make_sad_transform(post)
graph = NnetSAD.make_sad_graph()
decodable_opts = NnetSimpleComputationOptions()
decodable_opts.extra_left_context = 79
decodable_opts.extra_right_context = 21
decodable_opts.extra_left_context_initial = 0
decodable_opts.extra_right_context_final = 0
decodable_opts.frames_per_chunk = 150
decodable_opts.acoustic_scale = 0.3
sad = NnetSAD(model, transform, graph, decodable_opts=decodable_opts)
seg = SegmentationProcessor(target_labels=[2])
# Define feature pipeline as a Kaldi rspecifier
feats_rspec = "ark:compute-mfcc-feats --config=mfcc.conf scp:wav.scp ark:- |"
# Segment
with SequentialMatrixReader(feats_rspec) as f, open ("segments", "w") as s:
for key, feats in f:
out = sad.segment(feats)
segments, stats = seg.process(out["alignment"])
seg.write(key, segments, s)
print("segments:", segments, flush=True)
print("stats:", stats, flush=True)
print("global stats:", seg.stats, flush=True)
| [
"kaldi.nnet3.NnetSimpleComputationOptions",
"kaldi.segmentation.NnetSAD.make_sad_transform",
"kaldi.util.table.SequentialMatrixReader",
"kaldi.segmentation.NnetSAD.read_average_posteriors",
"kaldi.segmentation.NnetSAD",
"kaldi.segmentation.SegmentationProcessor",
"kaldi.segmentation.NnetSAD.read_model",
"kaldi.segmentation.NnetSAD.make_sad_graph"
]
| [((254, 285), 'kaldi.segmentation.NnetSAD.read_model', 'NnetSAD.read_model', (['"""final.raw"""'], {}), "('final.raw')\n", (272, 285), False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((293, 343), 'kaldi.segmentation.NnetSAD.read_average_posteriors', 'NnetSAD.read_average_posteriors', (['"""post_output.vec"""'], {}), "('post_output.vec')\n", (324, 343), False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((356, 388), 'kaldi.segmentation.NnetSAD.make_sad_transform', 'NnetSAD.make_sad_transform', (['post'], {}), '(post)\n', (382, 388), False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((397, 421), 'kaldi.segmentation.NnetSAD.make_sad_graph', 'NnetSAD.make_sad_graph', ([], {}), '()\n', (419, 421), False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((439, 469), 'kaldi.nnet3.NnetSimpleComputationOptions', 'NnetSimpleComputationOptions', ([], {}), '()\n', (467, 469), False, 'from kaldi.nnet3 import NnetSimpleComputationOptions\n'), ((720, 783), 'kaldi.segmentation.NnetSAD', 'NnetSAD', (['model', 'transform', 'graph'], {'decodable_opts': 'decodable_opts'}), '(model, transform, graph, decodable_opts=decodable_opts)\n', (727, 783), False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((790, 830), 'kaldi.segmentation.SegmentationProcessor', 'SegmentationProcessor', ([], {'target_labels': '[2]'}), '(target_labels=[2])\n', (811, 830), False, 'from kaldi.segmentation import NnetSAD, SegmentationProcessor\n'), ((974, 1009), 'kaldi.util.table.SequentialMatrixReader', 'SequentialMatrixReader', (['feats_rspec'], {}), '(feats_rspec)\n', (996, 1009), False, 'from kaldi.util.table import SequentialMatrixReader\n')] |
"""
Comparison between the efficiency of the Boyer-Moore algorithm and the naive substring search algorithm.
The runtimes for both algorithms are plotted on the same axes.
"""
import matplotlib.pyplot as plt
import numpy as np
import string
import time
import random
from bm_alg import boyer_moore_match, naive_match
# number of test cases for each iteration
TEST_CASES = 100
# test cases generated based on this pattern (vary_n)
PATTERN = 'ICT1002 is a really great module!'
# test cases generated based on this text (vary_m)
TEXT = PATTERN * 50
def generate_test_cases(pattern, length, k):
"""
Generates <k> test cases with text of length <length> containing <pattern>
Args:
pattern (str): A pattern within the text.
length (int): The length of the pattern
k (int): The number of test cases
Returns:
A list of test cases, i.e. strings that contain <pattern>
"""
result = []
for _ in range(k):
text = pattern
while len(text) < length:
direction = random.choice((0, 1))
# 0 --> Left
if direction == 0:
text = random.choice(string.ascii_lowercase) + text
# 1 --> Right
else:
text = text + random.choice(string.ascii_lowercase)
result.append(text)
return result
def vary_n(max_n):
x = [n for n in range(1, max_n + 1)]
y_bm = []
y_naive = []
for n in x:
print('n =', n)
bm_result = []
naive_result = []
if n >= len(PATTERN):
# generate test cases of length n, which contain PATTERN
test_cases = generate_test_cases(PATTERN, n, TEST_CASES)
else:
# generate test cases of length n, which do not (and can not possibly) contain PATTERN
test_cases = generate_test_cases('', n, TEST_CASES)
for test_case in test_cases:
start = time.time()
naive_match(test_case, PATTERN)
naive_result.append(time.time() - start)
start = time.time()
boyer_moore_match(test_case, PATTERN)
bm_result.append(time.time() - start)
# obtain median runtime (mean is affected by outliers)
y_naive.append(sorted(naive_result)[TEST_CASES // 2])
y_bm.append(sorted(bm_result)[TEST_CASES // 2])
plt.plot(x, y_naive, label="Naive Algorithm")
plt.plot(x, y_bm, label="Boyer-Moore Algorithm")
plt.xlabel("n")
plt.ylabel("Runtime")
plt.title("Substring Search Algorithm Efficiency")
plt.legend()
plt.show()
def vary_m(max_m):
x = [m for m in range(1, max_m + 1)]
y_bm = []
y_naive = []
for m in x:
print('m =', m)
bm_result = []
naive_result = []
# generate test cases of length n
test_cases = generate_test_cases('', m, TEST_CASES)
for test_case in test_cases:
start = time.time()
naive_match(TEXT, test_case)
naive_result.append(time.time() - start)
start = time.time()
boyer_moore_match(TEXT, test_case)
bm_result.append(time.time() - start)
# obtain median runtime (mean is affected by outliers)
y_naive.append(sorted(naive_result)[TEST_CASES // 2])
y_bm.append(sorted(bm_result)[TEST_CASES // 2])
plt.plot(x, y_naive, label="Naive Algorithm")
plt.plot(x, y_bm, label="Boyer-Moore Algorithm")
plt.xlabel("m")
plt.ylabel("Runtime")
plt.title("Substring Search Algorithm Efficiency")
plt.legend()
plt.show()
def main():
done = False
print("m = Length of pattern\nn = Length of text\n")
print("1. Constant m, vary n")
print("2. Constant n, vary m")
print("3. Quit\n")
while not done:
choice = input("Your choice: ")
if choice == '1':
max_n = input("Upper limit of n: ")
while not (max_n.isnumeric() and int(max_n) > 1):
print("That is not a valid number.")
max_n = input("Upper limit of n: ")
vary_n(int(max_n))
elif choice == '2':
max_m = input("Upper limit of m: ")
while not (max_m.isnumeric() and int(max_m) > 1):
print("That is not a valid number.")
max_m = input("Upper limit of m: ")
vary_m(int(max_m))
elif choice == '3':
done = True
else:
print("That is not a valid option.")
if __name__ == '__main__':
main()
| [
"bm_alg.naive_match",
"random.choice",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.title",
"bm_alg.boyer_moore_match",
"time.time",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
]
| [((2371, 2416), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_naive'], {'label': '"""Naive Algorithm"""'}), "(x, y_naive, label='Naive Algorithm')\n", (2379, 2416), True, 'import matplotlib.pyplot as plt\n'), ((2421, 2469), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_bm'], {'label': '"""Boyer-Moore Algorithm"""'}), "(x, y_bm, label='Boyer-Moore Algorithm')\n", (2429, 2469), True, 'import matplotlib.pyplot as plt\n'), ((2474, 2489), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""n"""'], {}), "('n')\n", (2484, 2489), True, 'import matplotlib.pyplot as plt\n'), ((2494, 2515), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Runtime"""'], {}), "('Runtime')\n", (2504, 2515), True, 'import matplotlib.pyplot as plt\n'), ((2520, 2570), 'matplotlib.pyplot.title', 'plt.title', (['"""Substring Search Algorithm Efficiency"""'], {}), "('Substring Search Algorithm Efficiency')\n", (2529, 2570), True, 'import matplotlib.pyplot as plt\n'), ((2575, 2587), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2585, 2587), True, 'import matplotlib.pyplot as plt\n'), ((2592, 2602), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2600, 2602), True, 'import matplotlib.pyplot as plt\n'), ((3370, 3415), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_naive'], {'label': '"""Naive Algorithm"""'}), "(x, y_naive, label='Naive Algorithm')\n", (3378, 3415), True, 'import matplotlib.pyplot as plt\n'), ((3420, 3468), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y_bm'], {'label': '"""Boyer-Moore Algorithm"""'}), "(x, y_bm, label='Boyer-Moore Algorithm')\n", (3428, 3468), True, 'import matplotlib.pyplot as plt\n'), ((3473, 3488), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""m"""'], {}), "('m')\n", (3483, 3488), True, 'import matplotlib.pyplot as plt\n'), ((3493, 3514), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Runtime"""'], {}), "('Runtime')\n", (3503, 3514), True, 'import matplotlib.pyplot as plt\n'), ((3519, 3569), 'matplotlib.pyplot.title', 'plt.title', (['"""Substring Search Algorithm Efficiency"""'], {}), "('Substring Search Algorithm Efficiency')\n", (3528, 3569), True, 'import matplotlib.pyplot as plt\n'), ((3574, 3586), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (3584, 3586), True, 'import matplotlib.pyplot as plt\n'), ((3591, 3601), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3599, 3601), True, 'import matplotlib.pyplot as plt\n'), ((1047, 1068), 'random.choice', 'random.choice', (['(0, 1)'], {}), '((0, 1))\n', (1060, 1068), False, 'import random\n'), ((1942, 1953), 'time.time', 'time.time', ([], {}), '()\n', (1951, 1953), False, 'import time\n'), ((1966, 1997), 'bm_alg.naive_match', 'naive_match', (['test_case', 'PATTERN'], {}), '(test_case, PATTERN)\n', (1977, 1997), False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((2072, 2083), 'time.time', 'time.time', ([], {}), '()\n', (2081, 2083), False, 'import time\n'), ((2096, 2133), 'bm_alg.boyer_moore_match', 'boyer_moore_match', (['test_case', 'PATTERN'], {}), '(test_case, PATTERN)\n', (2113, 2133), False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((2947, 2958), 'time.time', 'time.time', ([], {}), '()\n', (2956, 2958), False, 'import time\n'), ((2971, 2999), 'bm_alg.naive_match', 'naive_match', (['TEXT', 'test_case'], {}), '(TEXT, test_case)\n', (2982, 2999), False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((3074, 3085), 'time.time', 'time.time', ([], {}), '()\n', (3083, 3085), False, 'import time\n'), ((3098, 3132), 'bm_alg.boyer_moore_match', 'boyer_moore_match', (['TEXT', 'test_case'], {}), '(TEXT, test_case)\n', (3115, 3132), False, 'from bm_alg import boyer_moore_match, naive_match\n'), ((1149, 1186), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (1162, 1186), False, 'import random\n'), ((1269, 1306), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (1282, 1306), False, 'import random\n'), ((2030, 2041), 'time.time', 'time.time', ([], {}), '()\n', (2039, 2041), False, 'import time\n'), ((2163, 2174), 'time.time', 'time.time', ([], {}), '()\n', (2172, 2174), False, 'import time\n'), ((3032, 3043), 'time.time', 'time.time', ([], {}), '()\n', (3041, 3043), False, 'import time\n'), ((3162, 3173), 'time.time', 'time.time', ([], {}), '()\n', (3171, 3173), False, 'import time\n')] |
import json
from typing import Type, TYPE_CHECKING
from django.core.exceptions import ObjectDoesNotExist
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_page
from rest_framework import viewsets, filters
from rest_framework.exceptions import NotFound
from rest_framework.negotiation import BaseContentNegotiation
from rest_framework.permissions import IsAuthenticatedOrReadOnly, AllowAny
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.views import APIView
from indicators.models import Variable, DataViz
from indicators.utils import get_geog_model
from indicators.views import GeoJSONRenderer
from maps.models import DataLayer
from maps.serializers import DataLayerSerializer, DataLayerDetailsSerializer
from profiles.settings import VIEW_CACHE_TTL
if TYPE_CHECKING:
from geo.models import AdminRegion
from indicators.models.viz import MiniMap
class DataLayerViewSet(viewsets.ModelViewSet):
queryset = DataLayer.objects.all()
serializer_class = DataLayerSerializer
permission_classes = [IsAuthenticatedOrReadOnly, ]
filter_backends = [filters.SearchFilter, ]
def get_serializer_class(self):
if self.action == 'list':
return DataLayerSerializer
return DataLayerDetailsSerializer
media_type = 'application/geo+json'
format = 'geojson'
def render(self, data, media_type=None, renderer_context=None):
return json.dumps(data)
class GeoJSONContentNegotiation(BaseContentNegotiation):
"""
Custom content negotiation scheme for GeoJSON files.
`GeoJSONRenderer` is used for downloading geojson files
`JSONRenderer` is used for ajax calls.
"""
def select_parser(self, request, parsers):
return super(GeoJSONContentNegotiation, self).select_parser(request, parsers)
def select_renderer(self, request: Request, renderers, format_suffix=None):
renderer = renderers[0]
if request.query_params.get('download', False):
renderer = GeoJSONRenderer()
return renderer, renderer.media_type
class GeoJSONDataLayerView(APIView):
permission_classes = [AllowAny, ]
content_negotiation_class = GeoJSONContentNegotiation
@method_decorator(cache_page(VIEW_CACHE_TTL))
def get(self, request: Request, map_slug=None):
try:
data_layer: DataLayer = DataLayer.objects.get(slug=map_slug)
geojson = data_layer.as_geojson()
except KeyError as e:
# when the geog is wrong todo: make 400 malformed with info on available geo types
raise NotFound
except ObjectDoesNotExist as e:
raise NotFound
if request.query_params.get('download', False):
headers = {
'Content-Disposition': f'attachment; filename="{map_slug}.geojson"'
}
return Response(geojson, headers=headers, content_type='application/geo+json')
return Response(geojson)
| [
"json.dumps",
"rest_framework.response.Response",
"django.views.decorators.cache.cache_page",
"indicators.views.GeoJSONRenderer",
"maps.models.DataLayer.objects.get",
"maps.models.DataLayer.objects.all"
]
| [((1032, 1055), 'maps.models.DataLayer.objects.all', 'DataLayer.objects.all', ([], {}), '()\n', (1053, 1055), False, 'from maps.models import DataLayer\n'), ((1501, 1517), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1511, 1517), False, 'import json\n'), ((3018, 3035), 'rest_framework.response.Response', 'Response', (['geojson'], {}), '(geojson)\n', (3026, 3035), False, 'from rest_framework.response import Response\n'), ((2301, 2327), 'django.views.decorators.cache.cache_page', 'cache_page', (['VIEW_CACHE_TTL'], {}), '(VIEW_CACHE_TTL)\n', (2311, 2327), False, 'from django.views.decorators.cache import cache_page\n'), ((2080, 2097), 'indicators.views.GeoJSONRenderer', 'GeoJSONRenderer', ([], {}), '()\n', (2095, 2097), False, 'from indicators.views import GeoJSONRenderer\n'), ((2430, 2466), 'maps.models.DataLayer.objects.get', 'DataLayer.objects.get', ([], {'slug': 'map_slug'}), '(slug=map_slug)\n', (2451, 2466), False, 'from maps.models import DataLayer\n'), ((2930, 3001), 'rest_framework.response.Response', 'Response', (['geojson'], {'headers': 'headers', 'content_type': '"""application/geo+json"""'}), "(geojson, headers=headers, content_type='application/geo+json')\n", (2938, 3001), False, 'from rest_framework.response import Response\n')] |
# -*- coding: utf-8 -*-
"""
Provide download function by request
"""
from datetime import datetime
import logging
import time
import urllib.parse
import requests
from bs4 import BeautifulSoup
class Throttle(object):
"""Throttle downloading by sleeping between requests to same domain."""
def __init__(self, delay):
# amount of delay between downloads for each domain
self.delay = delay
# timestamp of when a domain was last accessed
self.domains = {}
def wait(self, url):
domain = urllib.parse.urlparse(url).netloc
last_accessed = self.domains.get(domain)
if self.delay > 0 and last_accessed is not None:
sleep_secs = self.delay - (datetime.now() - last_accessed).seconds
if sleep_secs > 0:
time.sleep(sleep_secs)
self.domains[domain] = datetime.now()
class Downloader(object):
"""Convenient download of web pages or caller to call api.
Args:
delay: Interval between downloads (seconds)
num_retries: Number of retries when downloading errors
timeout: Download timeout
"""
def __init__(self, delay=5, user_agent='awsl', proxies=None, num_retries=1,
timeout=60, cache=None, auth=None):
self.session = requests.Session()
self.session.headers.update({'user-agent': user_agent})
self.session.proxies = proxies
self.session.auth = auth
self.throttle = Throttle(delay)
self.num_retries = num_retries
self.timeout = timeout
self.cache = cache
def get_from_cache(self, request):
"""Try to get the result of the request from the cache."""
result = None
if self.cache:
result = self.cache.get(request.url)
if result and self.num_retries > 0 and 500 <= result['code'] < 600:
result = None
return result
def prepare_request(self, url, params=None):
"""Build requests based on the provided url and parameters."""
request = requests.Request('GET', url, params=params)
return self.session.prepare_request(request)
def send_request(self, request, num_retries):
"""Send request and return response object."""
self.throttle.wait(request.url)
try:
logging.info('Downloading: %s' % request.url)
response = self.session.send(request, timeout=self.timeout)
response.raise_for_status()
except requests.exceptions.HTTPError as e:
logging.warn('Download error: %s' % e)
if num_retries > 0 and 500 <= response.status_code < 600:
return self.send_request(request, num_retries - 1)
except requests.exceptions.RequestException:
logging.error('Download faild: %s' % request.url)
response = None
return response
def text(self, url, params=None, encoding=None):
"""Download web content in text format or html."""
request = self.prepare_request(url, params)
result = self.get_from_cache(request)
if result is None:
response = self.send_request(request, self.num_retries)
if response:
if encoding:
response.encoding = encoding
result = {'text': response.text, 'code': response.status_code}
if self.cache:
self.cache[request.url] = result
return result['text']
def json(self, url, params=None):
"""Access the api and return the json object."""
request = self.prepare_request(url, params)
result = self.get_from_cache(request)
if result is None:
response = self.send_request(request, self.num_retries)
if response:
result = {'json': response.json(), 'code': response.status_code}
if self.cache:
self.cache[request.url] = result
return result['json']
| [
"logging.warn",
"requests.Session",
"time.sleep",
"requests.Request",
"datetime.datetime.now",
"logging.info",
"logging.error"
]
| [((891, 905), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (903, 905), False, 'from datetime import datetime\n'), ((1334, 1352), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1350, 1352), False, 'import requests\n'), ((2118, 2161), 'requests.Request', 'requests.Request', (['"""GET"""', 'url'], {'params': 'params'}), "('GET', url, params=params)\n", (2134, 2161), False, 'import requests\n'), ((2393, 2438), 'logging.info', 'logging.info', (["('Downloading: %s' % request.url)"], {}), "('Downloading: %s' % request.url)\n", (2405, 2438), False, 'import logging\n'), ((836, 858), 'time.sleep', 'time.sleep', (['sleep_secs'], {}), '(sleep_secs)\n', (846, 858), False, 'import time\n'), ((2618, 2656), 'logging.warn', 'logging.warn', (["('Download error: %s' % e)"], {}), "('Download error: %s' % e)\n", (2630, 2656), False, 'import logging\n'), ((2863, 2912), 'logging.error', 'logging.error', (["('Download faild: %s' % request.url)"], {}), "('Download faild: %s' % request.url)\n", (2876, 2912), False, 'import logging\n'), ((747, 761), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (759, 761), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (C) 2017 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
import requests
from requests.adapters import HTTPAdapter
from .exceptions import KeycloakConnectionError
class ConnectionManager(object):
"""
Represents a simple server connection.
:param base_url: (str) The server URL.
:param headers: (dict) The header parameters of the requests to the server.
:param timeout: (int) Timeout to use for requests to the server.
:param verify: (bool) Verify server SSL.
:param proxies: (dict) The proxies servers requests is sent by.
"""
def __init__(self, base_url, headers={}, timeout=60, verify=True, proxies=None):
self._base_url = base_url
self._headers = headers
self._timeout = timeout
self._verify = verify
self._s = requests.Session()
self._s.auth = lambda x: x # don't let requests add auth headers
# retry once to reset connection with Keycloak after tomcat's ConnectionTimeout
# see https://github.com/marcospereirampj/python-keycloak/issues/36
for protocol in ("https://", "http://"):
adapter = HTTPAdapter(max_retries=1)
# adds POST to retry whitelist
allowed_methods = set(adapter.max_retries.allowed_methods)
allowed_methods.add("POST")
adapter.max_retries.allowed_methods = frozenset(allowed_methods)
self._s.mount(protocol, adapter)
if proxies:
self._s.proxies.update(proxies)
def __del__(self):
self._s.close()
@property
def base_url(self):
"""Return base url in use for requests to the server."""
return self._base_url
@base_url.setter
def base_url(self, value):
""" """
self._base_url = value
@property
def timeout(self):
"""Return timeout in use for request to the server."""
return self._timeout
@timeout.setter
def timeout(self, value):
""" """
self._timeout = value
@property
def verify(self):
"""Return verify in use for request to the server."""
return self._verify
@verify.setter
def verify(self, value):
""" """
self._verify = value
@property
def headers(self):
"""Return header request to the server."""
return self._headers
@headers.setter
def headers(self, value):
""" """
self._headers = value
def param_headers(self, key):
"""
Return a specific header parameter.
:param key: (str) Header parameters key.
:returns: If the header parameters exist, return its value.
"""
return self.headers.get(key)
def clean_headers(self):
"""Clear header parameters."""
self.headers = {}
def exist_param_headers(self, key):
"""Check if the parameter exists in the header.
:param key: (str) Header parameters key.
:returns: If the header parameters exist, return True.
"""
return self.param_headers(key) is not None
def add_param_headers(self, key, value):
"""Add a single parameter inside the header.
:param key: (str) Header parameters key.
:param value: (str) Value to be added.
"""
self.headers[key] = value
def del_param_headers(self, key):
"""Remove a specific parameter.
:param key: (str) Key of the header parameters.
"""
self.headers.pop(key, None)
def raw_get(self, path, **kwargs):
"""Submit get request to the path.
:param path: (str) Path for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.get(
urljoin(self.base_url, path),
params=kwargs,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_post(self, path, data, **kwargs):
"""Submit post request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.post(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_put(self, path, data, **kwargs):
"""Submit put request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.put(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
def raw_delete(self, path, data={}, **kwargs):
"""Submit delete request to the path.
:param path: (str) Path for request.
:param data: (dict) Payload for request.
:returns: Response the request.
:raises: HttpError Can't connect to server.
"""
try:
return self._s.delete(
urljoin(self.base_url, path),
params=kwargs,
data=data,
headers=self.headers,
timeout=self.timeout,
verify=self.verify,
)
except Exception as e:
raise KeycloakConnectionError("Can't connect to server (%s)" % e)
| [
"urlparse.urljoin",
"requests.adapters.HTTPAdapter",
"requests.Session"
]
| [((1981, 1999), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1997, 1999), False, 'import requests\n'), ((2311, 2337), 'requests.adapters.HTTPAdapter', 'HTTPAdapter', ([], {'max_retries': '(1)'}), '(max_retries=1)\n', (2322, 2337), False, 'from requests.adapters import HTTPAdapter\n'), ((4972, 5000), 'urlparse.urljoin', 'urljoin', (['self.base_url', 'path'], {}), '(self.base_url, path)\n', (4979, 5000), False, 'from urlparse import urljoin\n'), ((5620, 5648), 'urlparse.urljoin', 'urljoin', (['self.base_url', 'path'], {}), '(self.base_url, path)\n', (5627, 5648), False, 'from urlparse import urljoin\n'), ((6292, 6320), 'urlparse.urljoin', 'urljoin', (['self.base_url', 'path'], {}), '(self.base_url, path)\n', (6299, 6320), False, 'from urlparse import urljoin\n'), ((6976, 7004), 'urlparse.urljoin', 'urljoin', (['self.base_url', 'path'], {}), '(self.base_url, path)\n', (6983, 7004), False, 'from urlparse import urljoin\n')] |
import yaml
from ruamel.yaml import YAML
from ruamel.yaml.error import YAMLError
try:
from yaml import CSafeLoader as SafeLoader
except ImportError:
from yaml import SafeLoader
from dvc.exceptions import StageFileCorruptedError
from dvc.utils.compat import open
def load_stage_file(path):
with open(path, "r", encoding="utf-8") as fd:
return parse_stage(fd.read(), path)
def parse_stage(text, path):
try:
return yaml.load(text, Loader=SafeLoader) or {}
except yaml.error.YAMLError as exc:
raise StageFileCorruptedError(path, cause=exc)
def parse_stage_for_update(text, path):
"""Parses text into Python structure.
Unlike `parse_stage()` this returns ordered dicts, values have special
attributes to store comments and line breaks. This allows us to preserve
all of those upon dump.
This one is, however, several times slower than simple `parse_stage()`.
"""
try:
yaml = YAML()
return yaml.load(text) or {}
except YAMLError as exc:
raise StageFileCorruptedError(path, cause=exc)
def dump_stage_file(path, data):
with open(path, "w", encoding="utf-8") as fd:
yaml = YAML()
yaml.default_flow_style = False
yaml.dump(data, fd)
| [
"dvc.utils.compat.open",
"dvc.exceptions.StageFileCorruptedError",
"yaml.dump",
"yaml.load",
"ruamel.yaml.YAML"
]
| [((310, 343), 'dvc.utils.compat.open', 'open', (['path', '"""r"""'], {'encoding': '"""utf-8"""'}), "(path, 'r', encoding='utf-8')\n", (314, 343), False, 'from dvc.utils.compat import open\n'), ((960, 966), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (964, 966), False, 'from ruamel.yaml import YAML\n'), ((1132, 1165), 'dvc.utils.compat.open', 'open', (['path', '"""w"""'], {'encoding': '"""utf-8"""'}), "(path, 'w', encoding='utf-8')\n", (1136, 1165), False, 'from dvc.utils.compat import open\n'), ((1188, 1194), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (1192, 1194), False, 'from ruamel.yaml import YAML\n'), ((1243, 1262), 'yaml.dump', 'yaml.dump', (['data', 'fd'], {}), '(data, fd)\n', (1252, 1262), False, 'import yaml\n'), ((450, 484), 'yaml.load', 'yaml.load', (['text'], {'Loader': 'SafeLoader'}), '(text, Loader=SafeLoader)\n', (459, 484), False, 'import yaml\n'), ((545, 585), 'dvc.exceptions.StageFileCorruptedError', 'StageFileCorruptedError', (['path'], {'cause': 'exc'}), '(path, cause=exc)\n', (568, 585), False, 'from dvc.exceptions import StageFileCorruptedError\n'), ((982, 997), 'yaml.load', 'yaml.load', (['text'], {}), '(text)\n', (991, 997), False, 'import yaml\n'), ((1047, 1087), 'dvc.exceptions.StageFileCorruptedError', 'StageFileCorruptedError', (['path'], {'cause': 'exc'}), '(path, cause=exc)\n', (1070, 1087), False, 'from dvc.exceptions import StageFileCorruptedError\n')] |
import numpy as np
from pyad.nn import NeuralNet
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
np.random.seed(0)
data = load_breast_cancer()
X_train, X_test, y_train, y_test = train_test_split(
data.data, data.target, train_size=0.8, random_state=0
)
nn = NeuralNet(loss_fn='cross_entropy')
nn.add_layer(X_train.shape[1], 100, activation='linear')
nn.add_layer(100, 100, activation='logistic')
nn.add_layer(100, 1 + np.max(y_train), activation='linear')
nn.train(
X_train, y_train, X_test, y_test,
batch_size=1, learning_rate=1e-3, epochs=20
)
print('Predictions:', nn.predict(X_test))
| [
"sklearn.model_selection.train_test_split",
"sklearn.datasets.load_breast_cancer",
"numpy.max",
"pyad.nn.NeuralNet",
"numpy.random.seed"
]
| [((151, 168), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (165, 168), True, 'import numpy as np\n'), ((176, 196), 'sklearn.datasets.load_breast_cancer', 'load_breast_cancer', ([], {}), '()\n', (194, 196), False, 'from sklearn.datasets import load_breast_cancer\n'), ((233, 305), 'sklearn.model_selection.train_test_split', 'train_test_split', (['data.data', 'data.target'], {'train_size': '(0.8)', 'random_state': '(0)'}), '(data.data, data.target, train_size=0.8, random_state=0)\n', (249, 305), False, 'from sklearn.model_selection import train_test_split\n'), ((318, 352), 'pyad.nn.NeuralNet', 'NeuralNet', ([], {'loss_fn': '"""cross_entropy"""'}), "(loss_fn='cross_entropy')\n", (327, 352), False, 'from pyad.nn import NeuralNet\n'), ((478, 493), 'numpy.max', 'np.max', (['y_train'], {}), '(y_train)\n', (484, 493), True, 'import numpy as np\n')] |
import ssl
import nltk
from textblob import TextBlob
from nltk.corpus import stopwords
# set SSL
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
pass
else:
ssl._create_default_https_context = _create_unverified_https_context
# download noun data (if required)
nltk.download('brown')
nltk.download('punkt')
nltk.download('stopwords')
def extract_nouns(sentence):
"""Extract the nouns from a sentence using the 'textblob' library."""
blob = TextBlob(sentence)
return blob.noun_phrases
def remove_stopwords(sentence):
"""Remove stopwords from a sentence and return the list of words."""
blob = TextBlob(sentence)
return [word for word in blob.words if word not in stopwords.words('english') and len(word)>2]
| [
"textblob.TextBlob",
"nltk.corpus.stopwords.words",
"nltk.download"
]
| [((322, 344), 'nltk.download', 'nltk.download', (['"""brown"""'], {}), "('brown')\n", (335, 344), False, 'import nltk\n'), ((345, 367), 'nltk.download', 'nltk.download', (['"""punkt"""'], {}), "('punkt')\n", (358, 367), False, 'import nltk\n'), ((368, 394), 'nltk.download', 'nltk.download', (['"""stopwords"""'], {}), "('stopwords')\n", (381, 394), False, 'import nltk\n'), ((510, 528), 'textblob.TextBlob', 'TextBlob', (['sentence'], {}), '(sentence)\n', (518, 528), False, 'from textblob import TextBlob\n'), ((675, 693), 'textblob.TextBlob', 'TextBlob', (['sentence'], {}), '(sentence)\n', (683, 693), False, 'from textblob import TextBlob\n'), ((749, 775), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (764, 775), False, 'from nltk.corpus import stopwords\n')] |
from django_celery_beat.models import PeriodicTask, IntervalSchedule
from django.core.management.base import BaseCommand
from django.db import IntegrityError
class Command(BaseCommand):
def handle(self, *args, **options):
try:
schedule_channel, created = IntervalSchedule.objects.get_or_create(
every=4,
period=IntervalSchedule.HOURS,
)
except IntegrityError as e:
pass
try:
schedule_video, created = IntervalSchedule.objects.get_or_create(
every=6,
period=IntervalSchedule.HOURS,
)
except IntegrityError as e:
pass
try:
PeriodicTask.objects.create(
interval=schedule_channel,
name='Scrape Channels',
task='toolbox.scraper.tasks.scrape_youtube_channels',
)
except IntegrityError as e:
pass
try:
PeriodicTask.objects.create(
interval=schedule_video,
name='Scrape Videos',
task='toolbox.scraper.tasks.scrape_youtube_videos',
)
except IntegrityError as e:
pass
| [
"django_celery_beat.models.PeriodicTask.objects.create",
"django_celery_beat.models.IntervalSchedule.objects.get_or_create"
]
| [((282, 360), 'django_celery_beat.models.IntervalSchedule.objects.get_or_create', 'IntervalSchedule.objects.get_or_create', ([], {'every': '(4)', 'period': 'IntervalSchedule.HOURS'}), '(every=4, period=IntervalSchedule.HOURS)\n', (320, 360), False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n'), ((573, 651), 'django_celery_beat.models.IntervalSchedule.objects.get_or_create', 'IntervalSchedule.objects.get_or_create', ([], {'every': '(6)', 'period': 'IntervalSchedule.HOURS'}), '(every=6, period=IntervalSchedule.HOURS)\n', (611, 651), False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n'), ((838, 975), 'django_celery_beat.models.PeriodicTask.objects.create', 'PeriodicTask.objects.create', ([], {'interval': 'schedule_channel', 'name': '"""Scrape Channels"""', 'task': '"""toolbox.scraper.tasks.scrape_youtube_channels"""'}), "(interval=schedule_channel, name=\n 'Scrape Channels', task='toolbox.scraper.tasks.scrape_youtube_channels')\n", (865, 975), False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n'), ((1143, 1273), 'django_celery_beat.models.PeriodicTask.objects.create', 'PeriodicTask.objects.create', ([], {'interval': 'schedule_video', 'name': '"""Scrape Videos"""', 'task': '"""toolbox.scraper.tasks.scrape_youtube_videos"""'}), "(interval=schedule_video, name='Scrape Videos',\n task='toolbox.scraper.tasks.scrape_youtube_videos')\n", (1170, 1273), False, 'from django_celery_beat.models import PeriodicTask, IntervalSchedule\n')] |
# required modules
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib import cm
from matplotlib.colors import Normalize
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.animation import FuncAnimation
# two-dimesional version
def plot_mse_loss_surface_2d(fig, ax, x, y, v=0.0, l2=0.0, w1_range=(-2, 2), w2_range=(2, -2)):
# create weight space
n_w = 100
w1 = np.linspace(w1_range[0], w1_range[1], num=n_w) # weight 1
w2 = np.linspace(w2_range[0], w2_range[1], num=n_w) # weight 2
ws_x, ws_y = np.meshgrid(w1, w2)
cost_ws = np.zeros((n_w, n_w)) # initialize cost matrix
# Fill the cost matrix for each combination of weights
for i in range(n_w):
for j in range(n_w):
y_pred = ws_x[i, j] * ws_y[i, j] * x
y_true = y
cost_ws[i, j] = 0.5 * (y_true - y_pred)**2 + \
0.5 * l2 * (ws_x[i, j]**2 + ws_y[i, j]**2) + 0.5 * v * (ws_x[i, j]*ws_y[i, j])**2
# compute gradients
dy, dx = np.gradient(cost_ws)
# plot vector space
skip = (slice(None, None, 5), slice(None, None, 5))
# fig, ax = plt.subplots(figsize=(8, 8))
#ax.contour(ws_x, ws_y, cost_ws, 200)
im = ax.imshow(cost_ws, extent=[ws_x.min(), ws_x.max(
), ws_y.min(), ws_y.max()], cmap=cm.coolwarm)
ax.quiver(ws_x[skip], ws_y[skip], -dx[skip], dy[skip], cost_ws[skip])
cbar = fig.colorbar(im, ax=ax)
# ax.set(aspect=1, title='Loss Surface')
cbar.ax.set_ylabel('$Loss$', fontsize=15)
ax.set_xlabel('$w_1$', fontsize=15)
ax.set_ylabel('$w_2$', fontsize=15)
# ax.grid()
# add saddle point
ax.scatter(0, 0, label='Saddle point', c='red', marker='*')
# ax.scatter(0,0, c='black', marker=r'$\rightarrow$', label='Negative gradient')
settings = (x, y, v, l2, w1_range, w2_range)
return ax, settings
# three-dimensional version
def plot_mse_loss_surface_3d(ax, x, y, v=0.0, l2=0.0, w1_range=(-2, 2), w2_range=(2, -2), angle=30):
# create weight space
n_w = 100
w1 = np.linspace(w1_range[0], w1_range[1], num=n_w) # weight 1
w2 = np.linspace(w2_range[0], w2_range[1], num=n_w) # weight 2
ws_x, ws_y = np.meshgrid(w1, w2)
cost_ws = np.zeros((n_w, n_w)) # initialize cost matrix
# Fill the cost matrix for each combination of weights
for i in range(n_w):
for j in range(n_w):
y_pred = ws_x[i, j] * ws_y[i, j] * x
y_true = y
cost_ws[i, j] = 0.5 * (y_true - y_pred)**2 + \
0.5 * l2 * (ws_x[i, j]**2 + ws_y[i, j]**2) + 0.5 * v * (ws_x[i, j]*ws_y[i, j])**2
X = ws_x
Y = ws_y
Z = cost_ws
#fig, ax = plt.subplots(figsize=(8, 8))
#ax = fig.add_subplot(1,1,1, projection='3d')
# fourth dimention - colormap
# create colormap according to x-value (can use any 50x50 array)
color_dimension = Z # change to desired fourth dimension
minn, maxx = color_dimension.min(), color_dimension.max()
norm = Normalize(minn, maxx)
m = plt.cm.ScalarMappable(norm=norm, cmap='jet')
m.set_array([])
fcolors = m.to_rgba(color_dimension)
# plot
# fig = plt.figure(figsize=(8, 8))
# ax = fig.gca(projection='3d')
ax.set_zlim(0, 50)
ax.plot([0], [0], 'ro', c='red', marker='*', label='Saddle point')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, facecolors=fcolors,
vmin=minn, vmax=maxx, shade=False, alpha=1)
ax.set_xlabel('$w_1$', fontsize=20)
ax.set_ylabel('$w_2$', fontsize=20)
ax.set_zlabel('$Loss$', fontsize=20)
settings = (x, y, v, l2, w1_range, w2_range)
ax.view_init(angle, 10)
return ax, settings
def plot_global_minimum_manifold_2d(ax, settings):
# retieve cached settings
x, y, v, l2, w1_range, w2_range = settings
n_w = 1000
man_w1 = np.linspace(w1_range[0], w1_range[1], num=n_w)
man_w2 = np.linspace(w2_range[0], w2_range[1], num=n_w)
man_ws_x, man_ws_y = np.meshgrid(man_w1, man_w2)
loss = 0.5 * y *(1 - man_ws_x * man_ws_y * x)**2 + \
0.5 * l2 * (man_ws_x**2 + man_ws_y**2) + 0.5 * v * (man_ws_x * man_ws_y)**2
min_loss = np.min(loss)
manifold_indices = loss < min_loss + 1e-5
manifold_x = man_ws_x[manifold_indices]
manifold_y = man_ws_y[manifold_indices]
# plot manifold of global minima
ax.scatter(manifold_y, manifold_x, s=0.1, c='cyan',
label='Manifold of global minima')
def plot_global_minimum_manifold_3d(ax, settings):
# retieve cached settings
x, y, v, l2, w1_range, w2_range = settings
n_w = 1000
man_w1 = np.linspace(w1_range[0], w1_range[1], num=n_w)
man_w2 = np.linspace(w2_range[0], w2_range[1], num=n_w)
man_ws_x, man_ws_y = np.meshgrid(man_w1, man_w2)
loss = 0.5 * y * (1 - man_ws_x * man_ws_y * x)**2 + \
0.5 * l2 * (man_ws_x**2 + man_ws_y**2) + 0.5 * v * (man_ws_x*man_ws_y)**2
min_loss = np.min(loss)
manifold_indices = loss < min_loss + 1e-5
manifold_x = man_ws_x[manifold_indices]
manifold_y = man_ws_y[manifold_indices]
pos = np.where(np.abs(np.diff(manifold_y)) >= 0.1)[0]+1
x = np.insert(manifold_x, pos, np.nan)
y = np.insert(manifold_y, pos, np.nan)
# plot manifold of global minima
#ax.scatter(manifold_y, manifold_x, 0, s=0.5, c='cyan',
# label='Manifold of global minima')
ax.plot(y, x, c='cyan',
label='Manifold of global minima')
def plot_optimiser_trajectory_2d(ax, weights, **kwargs):
w1_vals = weights['w1']
w2_vals = weights['w2']
ax.plot(w1_vals, w2_vals, **kwargs)
def plot_optimiser_trajectory_3d(ax, settings, weights, **kwargs):
x, y, v, l2, _, _ = settings
w1_vals = np.array(weights['w1'])
w2_vals = np.array(weights['w2'])
loss = 0.5 * y * (1 - w1_vals * w2_vals * x)**2 + \
0.5 * l2 * (w1_vals**2 + w2_vals**2) + 0.5 * v * (w1_vals*w2_vals)**2
ax.plot(w1_vals, w2_vals, loss, **kwargs)
def plot_optimiser_trajectory(x, y, weights, dim='2d', angle=45, manifold=False, **kwargs):
if dim == '3d':
ax, settings = plot_mse_loss_surface_3d(x, y, angle=angle)
if manifold:
plot_global_minimum_manifold_3d(ax, settings)
plot_optimiser_trajectory_3d(ax, settings, weights, **kwargs)
else:
ax, settings = plot_mse_loss_surface_2d(x, y)
if manifold:
plot_global_minimum_manifold_2d(ax, settings)
plot_optimiser_trajectory_2d(ax, weights, **kwargs)
def plot_weight_norm(ax, weights, **kwargs):
w1_vals = np.array(weights['w1'])
w2_vals = np.array(weights['w2'])
epochs = np.arange(0, len(w1_vals), 1)
norms = np.sqrt(w1_vals**2 + w2_vals**2)
ax.set_xlabel('Epoch', fontsize=12)
ax.set_ylabel('Weight norm', fontsize=12)
ax.plot(epochs, norms, linewidth=2.0, **kwargs)
def animate_optimiser_trajectory_2d(i, ax, weights, **kwargs):
w1_vals = weights['w1']
w2_vals = weights['w2']
ax.plot(w1_vals[:i], w2_vals[:i], **kwargs)
return ax
def animate_optimiser_trajectory_3d(i, ax, settings, weights, **kwargs):
x, y, v, l2, _, _ = settings
w1_vals = np.array(weights['w1'])
w2_vals = np.array(weights['w2'])
loss = 0.5 * y * (1 - w1_vals * w2_vals * x)**2 + \
0.5 * l2 * (w1_vals**2 + w2_vals**2) + 0.5 * v * (w1_vals*w2_vals)**2
ax.plot(w1_vals[:i], w2_vals[:i], loss[:i], **kwargs)
return ax
def plot_optimiser_loss(x, y, v, l2, weights, **kwargs):
loss = []
epoch = np.arange(0, len(weights['w1']))
for w1, w2 in zip(weights['w1'], weights['w2']):
loss_val = 0.5 * y * (1 - w1 * w2 * x)**2 + 0.5 * l2 * (w1**2 + w2**2) + 0.5 * v * (w1 * w2)**2
loss.append(loss_val)
plt.plot(epoch, loss, **kwargs)
plt.xlabel('Epoch')
plt.ylabel('Loss')
def plot_interpolated_trajectory_2d(ax, w1_a, w2_a, w1_b, w2_b, start=0, end=1, **kwargs):
alpha = np.arange(start, end, 0.001)
w1_path = []
w2_path = []
for a in alpha:
ww1 = (1 - a) * w1_a + a * w1_b
ww2 = (1 - a) * w2_a + a * w2_b
w1_path.append(ww1)
w2_path.append(ww2)
ax.plot(w1_path, w2_path, **kwargs)
def plot_interpolated_trajectory_3d(ax, settings, w1_a, w2_a, w1_b, w2_b, start=0, end=1, **kwargs):
x, y, _, _ = settings
alpha = np.arange(start, end, 0.001)
w1_path = []
w2_path = []
loss = []
for a in alpha:
ww1 = (1 - a) * w1_a + a * w1_b
ww2 = (1 - a) * w2_a + a * w2_b
loss_val = 0.5 * (y - ww1 * ww2 * x)**2 + 0.5 * l2 * (ww1**2 + ww2**2)
loss.append(loss_val)
w1_path.append(ww1)
w2_path.append(ww2)
ax.plot(w1_path, w2_path, loss, **kwargs)
def plot_interpolated_loss(x, y, w1_a, w2_a, w1_b, w2_b, start=0, end=1, **kwargs):
alpha = np.arange(start, end, 0.001)
interpolated_loss = []
for a in alpha:
ww1 = (1 - a) * w1_a + a * w1_b
ww2 = (1 - a) * w2_a + a * w2_b
loss_val = 0.5 * (y - ww1 * ww2 * x)**2 + 0.5 * l2 * (ww1**2 + ww2**2)
interpolated_loss.append(loss_val)
plt.plot(alpha, interpolated_loss, **kwargs)
plt.xlabel(r'$\alpha$')
plt.ylabel('Loss')
def plot_learning_dynamics(ax, weights, **kwargs):
epoch = np.arange(0, len(weights['w1']))
scores = []
for w1, w2 in zip(weights['w1'], weights['w2']):
scores.append(w1 * w2)
ax.plot(epoch, scores, **kwargs)
def animate_learning_dynamics(i, ax, weights, y, **kwargs):
n_epoch = len(weights['w1'])
epoch = np.arange(1, n_epoch)
scores = []
for w1, w2 in zip(weights['w1'], weights['w2']):
scores.append(w1 * w2)
ax.set_xlim((1, n_epoch))
ax.set_ylim((0, y))
ax.set_xlabel('Epoch', fontsize=15)
ax.set_ylabel('$w_2 \cdot w_1$', fontsize=15)
ax.plot(epoch[:i], scores[:i], **kwargs)
return ax
def animate_learning(weights, save=False, name='anim'):
gs = gridspec.GridSpec(2, 4)
gs.update(wspace=0.5)
fig = plt.figure(figsize=(12, 8))
ax1 = fig.add_subplot(gs[0, :2], )
ax2 = fig.add_subplot(gs[0, 2:], projection='3d')
ax3 = fig.add_subplot(gs[1, 1:3])
# ax1 = fig.add_subplot(2, 2, 1)
# ax2 = fig.add_subplot(2, 2, 2, projection = '3d')
# ax3 = fig.add_subplot(2, 2, 3)
# ax4 = fig.add_subplot(2, 2, 4)
ax1, settings = plot_mse_loss_surface_2d(ax1, 1, 1)
ax2, settings = plot_mse_loss_surface_3d(ax2, 1, 1, angle=60)
plot_global_minimum_manifold_2d(ax1, settings)
plot_global_minimum_manifold_3d(ax2, settings)
def update(i):
animate_optimiser_trajectory_2d(
i, ax1, settings, weights, 'Gradient descent')
animate_optimiser_trajectory_3d(
i, ax2, settings, weights, 'Gradient descent')
animate_learning_dynamics(i, ax3, weights, 1)
# animate_weight_norm(i, ax4, scalarNet.history)
# suncAnimation will call the 'update' function for each frame
anim = FuncAnimation(fig, update, frames=100, interval=5, save_count=50)
# HTML(anim.to_html5_video())
if save:
anim.save(name + '.gif', dpi=80, writer='imagemagick')
plt.show()
| [
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"numpy.array",
"numpy.gradient",
"numpy.arange",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.diff",
"numpy.linspace",
"matplotlib.gridspec.GridSpec",
"numpy.min",
"numpy.meshgrid",
"matplotlib.pyplot.cm.ScalarMappable",
"matplotlib.colors.Normalize",
"matplotlib.pyplot.show",
"numpy.insert",
"matplotlib.animation.FuncAnimation",
"numpy.zeros",
"matplotlib.pyplot.figure"
]
| [((434, 480), 'numpy.linspace', 'np.linspace', (['w1_range[0]', 'w1_range[1]'], {'num': 'n_w'}), '(w1_range[0], w1_range[1], num=n_w)\n', (445, 480), True, 'import numpy as np\n'), ((502, 548), 'numpy.linspace', 'np.linspace', (['w2_range[0]', 'w2_range[1]'], {'num': 'n_w'}), '(w2_range[0], w2_range[1], num=n_w)\n', (513, 548), True, 'import numpy as np\n'), ((578, 597), 'numpy.meshgrid', 'np.meshgrid', (['w1', 'w2'], {}), '(w1, w2)\n', (589, 597), True, 'import numpy as np\n'), ((612, 632), 'numpy.zeros', 'np.zeros', (['(n_w, n_w)'], {}), '((n_w, n_w))\n', (620, 632), True, 'import numpy as np\n'), ((1040, 1060), 'numpy.gradient', 'np.gradient', (['cost_ws'], {}), '(cost_ws)\n', (1051, 1060), True, 'import numpy as np\n'), ((2062, 2108), 'numpy.linspace', 'np.linspace', (['w1_range[0]', 'w1_range[1]'], {'num': 'n_w'}), '(w1_range[0], w1_range[1], num=n_w)\n', (2073, 2108), True, 'import numpy as np\n'), ((2130, 2176), 'numpy.linspace', 'np.linspace', (['w2_range[0]', 'w2_range[1]'], {'num': 'n_w'}), '(w2_range[0], w2_range[1], num=n_w)\n', (2141, 2176), True, 'import numpy as np\n'), ((2206, 2225), 'numpy.meshgrid', 'np.meshgrid', (['w1', 'w2'], {}), '(w1, w2)\n', (2217, 2225), True, 'import numpy as np\n'), ((2240, 2260), 'numpy.zeros', 'np.zeros', (['(n_w, n_w)'], {}), '((n_w, n_w))\n', (2248, 2260), True, 'import numpy as np\n'), ((3007, 3028), 'matplotlib.colors.Normalize', 'Normalize', (['minn', 'maxx'], {}), '(minn, maxx)\n', (3016, 3028), False, 'from matplotlib.colors import Normalize\n'), ((3037, 3081), 'matplotlib.pyplot.cm.ScalarMappable', 'plt.cm.ScalarMappable', ([], {'norm': 'norm', 'cmap': '"""jet"""'}), "(norm=norm, cmap='jet')\n", (3058, 3081), True, 'import matplotlib.pyplot as plt\n'), ((3843, 3889), 'numpy.linspace', 'np.linspace', (['w1_range[0]', 'w1_range[1]'], {'num': 'n_w'}), '(w1_range[0], w1_range[1], num=n_w)\n', (3854, 3889), True, 'import numpy as np\n'), ((3903, 3949), 'numpy.linspace', 'np.linspace', (['w2_range[0]', 'w2_range[1]'], {'num': 'n_w'}), '(w2_range[0], w2_range[1], num=n_w)\n', (3914, 3949), True, 'import numpy as np\n'), ((3975, 4002), 'numpy.meshgrid', 'np.meshgrid', (['man_w1', 'man_w2'], {}), '(man_w1, man_w2)\n', (3986, 4002), True, 'import numpy as np\n'), ((4159, 4171), 'numpy.min', 'np.min', (['loss'], {}), '(loss)\n', (4165, 4171), True, 'import numpy as np\n'), ((4609, 4655), 'numpy.linspace', 'np.linspace', (['w1_range[0]', 'w1_range[1]'], {'num': 'n_w'}), '(w1_range[0], w1_range[1], num=n_w)\n', (4620, 4655), True, 'import numpy as np\n'), ((4669, 4715), 'numpy.linspace', 'np.linspace', (['w2_range[0]', 'w2_range[1]'], {'num': 'n_w'}), '(w2_range[0], w2_range[1], num=n_w)\n', (4680, 4715), True, 'import numpy as np\n'), ((4741, 4768), 'numpy.meshgrid', 'np.meshgrid', (['man_w1', 'man_w2'], {}), '(man_w1, man_w2)\n', (4752, 4768), True, 'import numpy as np\n'), ((4924, 4936), 'numpy.min', 'np.min', (['loss'], {}), '(loss)\n', (4930, 4936), True, 'import numpy as np\n'), ((5140, 5174), 'numpy.insert', 'np.insert', (['manifold_x', 'pos', 'np.nan'], {}), '(manifold_x, pos, np.nan)\n', (5149, 5174), True, 'import numpy as np\n'), ((5183, 5217), 'numpy.insert', 'np.insert', (['manifold_y', 'pos', 'np.nan'], {}), '(manifold_y, pos, np.nan)\n', (5192, 5217), True, 'import numpy as np\n'), ((5717, 5740), 'numpy.array', 'np.array', (["weights['w1']"], {}), "(weights['w1'])\n", (5725, 5740), True, 'import numpy as np\n'), ((5755, 5778), 'numpy.array', 'np.array', (["weights['w2']"], {}), "(weights['w2'])\n", (5763, 5778), True, 'import numpy as np\n'), ((6553, 6576), 'numpy.array', 'np.array', (["weights['w1']"], {}), "(weights['w1'])\n", (6561, 6576), True, 'import numpy as np\n'), ((6591, 6614), 'numpy.array', 'np.array', (["weights['w2']"], {}), "(weights['w2'])\n", (6599, 6614), True, 'import numpy as np\n'), ((6670, 6706), 'numpy.sqrt', 'np.sqrt', (['(w1_vals ** 2 + w2_vals ** 2)'], {}), '(w1_vals ** 2 + w2_vals ** 2)\n', (6677, 6706), True, 'import numpy as np\n'), ((7146, 7169), 'numpy.array', 'np.array', (["weights['w1']"], {}), "(weights['w1'])\n", (7154, 7169), True, 'import numpy as np\n'), ((7184, 7207), 'numpy.array', 'np.array', (["weights['w2']"], {}), "(weights['w2'])\n", (7192, 7207), True, 'import numpy as np\n'), ((7723, 7754), 'matplotlib.pyplot.plot', 'plt.plot', (['epoch', 'loss'], {}), '(epoch, loss, **kwargs)\n', (7731, 7754), True, 'import matplotlib.pyplot as plt\n'), ((7759, 7778), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (7769, 7778), True, 'import matplotlib.pyplot as plt\n'), ((7783, 7801), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (7793, 7801), True, 'import matplotlib.pyplot as plt\n'), ((7907, 7935), 'numpy.arange', 'np.arange', (['start', 'end', '(0.001)'], {}), '(start, end, 0.001)\n', (7916, 7935), True, 'import numpy as np\n'), ((8307, 8335), 'numpy.arange', 'np.arange', (['start', 'end', '(0.001)'], {}), '(start, end, 0.001)\n', (8316, 8335), True, 'import numpy as np\n'), ((8793, 8821), 'numpy.arange', 'np.arange', (['start', 'end', '(0.001)'], {}), '(start, end, 0.001)\n', (8802, 8821), True, 'import numpy as np\n'), ((9075, 9119), 'matplotlib.pyplot.plot', 'plt.plot', (['alpha', 'interpolated_loss'], {}), '(alpha, interpolated_loss, **kwargs)\n', (9083, 9119), True, 'import matplotlib.pyplot as plt\n'), ((9124, 9147), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$\\\\alpha$"""'], {}), "('$\\\\alpha$')\n", (9134, 9147), True, 'import matplotlib.pyplot as plt\n'), ((9152, 9170), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Loss"""'], {}), "('Loss')\n", (9162, 9170), True, 'import matplotlib.pyplot as plt\n'), ((9513, 9534), 'numpy.arange', 'np.arange', (['(1)', 'n_epoch'], {}), '(1, n_epoch)\n', (9522, 9534), True, 'import numpy as np\n'), ((9905, 9928), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(2)', '(4)'], {}), '(2, 4)\n', (9922, 9928), True, 'import matplotlib.gridspec as gridspec\n'), ((9966, 9993), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (9976, 9993), True, 'import matplotlib.pyplot as plt\n'), ((10929, 10994), 'matplotlib.animation.FuncAnimation', 'FuncAnimation', (['fig', 'update'], {'frames': '(100)', 'interval': '(5)', 'save_count': '(50)'}), '(fig, update, frames=100, interval=5, save_count=50)\n', (10942, 10994), False, 'from matplotlib.animation import FuncAnimation\n'), ((11110, 11120), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11118, 11120), True, 'import matplotlib.pyplot as plt\n'), ((5098, 5117), 'numpy.diff', 'np.diff', (['manifold_y'], {}), '(manifold_y)\n', (5105, 5117), True, 'import numpy as np\n')] |
# Copyright 2019 Google LLC
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test layers from qconvolutional.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from numpy.testing import assert_allclose
import pytest
import tempfile
from tensorflow.keras import backend as K
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from tensorflow.keras.backend import clear_session
from qkeras import binary
from qkeras import ternary
from qkeras import QActivation
from qkeras import QDense
from qkeras import QConv1D
from qkeras import QConv2D
from qkeras import QSeparableConv2D
from qkeras import quantized_bits
from qkeras import quantized_relu
from qkeras.utils import model_save_quantized_weights
from qkeras.utils import quantized_model_from_json
from qkeras.utils import load_qmodel
from qkeras import print_qstats
from qkeras import extract_model_operations
# TODO(hzhuang):
# qoctave_conv test
# qbatchnorm test
def test_qnetwork():
x = x_in = Input((28, 28, 1), name='input')
x = QSeparableConv2D(
32, (2, 2),
strides=(2, 2),
depthwise_quantizer=binary(alpha=1.0),
pointwise_quantizer=quantized_bits(4, 0, 1, alpha=1.0),
depthwise_activation=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='conv2d_0_m')(
x)
x = QActivation('quantized_relu(6,2,1)', name='act0_m')(x)
x = QConv2D(
64, (3, 3),
strides=(2, 2),
kernel_quantizer=ternary(alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='conv2d_1_m',
activation=quantized_relu(6, 3, 1))(
x)
x = QConv2D(
64, (2, 2),
strides=(2, 2),
kernel_quantizer=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='conv2d_2_m')(
x)
x = QActivation('quantized_relu(6,4,1)', name='act2_m')(x)
x = Flatten(name='flatten')(x)
x = QDense(
10,
kernel_quantizer=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='dense')(
x)
x = Activation('softmax', name='softmax')(x)
model = Model(inputs=[x_in], outputs=[x])
# reload the model to ensure saving/loading works
json_string = model.to_json()
clear_session()
model = quantized_model_from_json(json_string)
# generate same output for weights
np.random.seed(42)
for layer in model.layers:
all_weights = []
for i, weights in enumerate(layer.get_weights()):
input_size = np.prod(layer.input.shape.as_list()[1:])
if input_size is None:
input_size = 576 * 10 # to avoid learning sizes
shape = weights.shape
assert input_size > 0, 'input size for {} {}'.format(layer.name, i)
# he normal initialization with a scale factor of 2.0
all_weights.append(
10.0 * np.random.normal(0.0, np.sqrt(2.0 / input_size), shape))
if all_weights:
layer.set_weights(all_weights)
# apply quantizer to weights
model_save_quantized_weights(model)
all_weights = []
for layer in model.layers:
for i, weights in enumerate(layer.get_weights()):
w = np.sum(weights)
all_weights.append(w)
all_weights = np.array(all_weights)
# test_qnetwork_weight_quantization
all_weights_signature = np.array(
[2., -6.75, -0.625, -2., -0.25, -56., 1.125, -1.625, -1.125])
assert all_weights.size == all_weights_signature.size
assert np.all(all_weights == all_weights_signature)
# test_qnetwork_forward:
expected_output = np.array(
[[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 0.e+00, 0.e+00, 6.e-08, 1.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[ 0.e+00 ,0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00, 0.e+00, 0.e+00, 5.e-07, 1.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00 ,1.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 1.e+00, 0.e+00, 0.e+00, 0.e+00,
0.e+00 ,0.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 1.e+00,
0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00],
[0.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00,
1.e+00, 0.e+00, 0.e+00, 0.e+00, 0.e+00]]).astype(np.float16)
inputs = 2 * np.random.rand(10, 28, 28, 1)
actual_output = model.predict(inputs).astype(np.float16)
assert_allclose(actual_output, expected_output, rtol=1e-4)
def test_qconv1d():
np.random.seed(33)
x = Input((4, 4,))
y = QConv1D(
2, 1,
kernel_quantizer=quantized_bits(6, 2, 1, alpha=1.0),
bias_quantizer=quantized_bits(4, 0, 1),
name='qconv1d')(
x)
model = Model(inputs=x, outputs=y)
# Extract model operations
model_ops = extract_model_operations(model)
# Assertion about the number of operations for this Conv1D layer
assert model_ops['qconv1d']['number_of_operations'] == 32
# Print qstats to make sure it works with Conv1D layer
print_qstats(model)
# reload the model to ensure saving/loading works
# json_string = model.to_json()
# clear_session()
# model = quantized_model_from_json(json_string)
for layer in model.layers:
all_weights = []
for i, weights in enumerate(layer.get_weights()):
input_size = np.prod(layer.input.shape.as_list()[1:])
if input_size is None:
input_size = 10 * 10
shape = weights.shape
assert input_size > 0, 'input size for {} {}'.format(layer.name, i)
all_weights.append(
10.0 * np.random.normal(0.0, np.sqrt(2.0 / input_size), shape))
if all_weights:
layer.set_weights(all_weights)
# Save the model as an h5 file using Keras's model.save()
fd, fname = tempfile.mkstemp('.h5')
model.save(fname)
del model # Delete the existing model
# Return a compiled model identical to the previous one
model = load_qmodel(fname)
# Clean the created h5 file after loading the model
os.close(fd)
os.remove(fname)
# apply quantizer to weights
model_save_quantized_weights(model)
inputs = np.random.rand(2, 4, 4)
p = model.predict(inputs).astype(np.float16)
y = np.array([[[-2.441, 3.816], [-3.807, -1.426], [-2.684, -1.317],
[-1.659, 0.9834]],
[[-4.99, 1.139], [-2.559, -1.216], [-2.285, 1.905],
[-2.652, -0.467]]]).astype(np.float16)
assert np.all(p == y)
if __name__ == '__main__':
pytest.main([__file__])
| [
"qkeras.quantized_bits",
"numpy.sqrt",
"numpy.random.rand",
"qkeras.utils.quantized_model_from_json",
"qkeras.QActivation",
"numpy.array",
"tensorflow.keras.backend.clear_session",
"qkeras.extract_model_operations",
"os.remove",
"tensorflow.keras.layers.Input",
"qkeras.binary",
"numpy.testing.assert_allclose",
"pytest.main",
"numpy.random.seed",
"tensorflow.keras.models.Model",
"tensorflow.keras.layers.Activation",
"qkeras.utils.model_save_quantized_weights",
"os.close",
"qkeras.utils.load_qmodel",
"tensorflow.keras.layers.Flatten",
"tempfile.mkstemp",
"qkeras.print_qstats",
"qkeras.quantized_relu",
"numpy.sum",
"qkeras.ternary",
"numpy.all"
]
| [((1761, 1793), 'tensorflow.keras.layers.Input', 'Input', (['(28, 28, 1)'], {'name': '"""input"""'}), "((28, 28, 1), name='input')\n", (1766, 1793), False, 'from tensorflow.keras.layers import Input\n'), ((2913, 2946), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': '[x_in]', 'outputs': '[x]'}), '(inputs=[x_in], outputs=[x])\n', (2918, 2946), False, 'from tensorflow.keras.models import Model\n'), ((3034, 3049), 'tensorflow.keras.backend.clear_session', 'clear_session', ([], {}), '()\n', (3047, 3049), False, 'from tensorflow.keras.backend import clear_session\n'), ((3060, 3098), 'qkeras.utils.quantized_model_from_json', 'quantized_model_from_json', (['json_string'], {}), '(json_string)\n', (3085, 3098), False, 'from qkeras.utils import quantized_model_from_json\n'), ((3139, 3157), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (3153, 3157), True, 'import numpy as np\n'), ((3761, 3796), 'qkeras.utils.model_save_quantized_weights', 'model_save_quantized_weights', (['model'], {}), '(model)\n', (3789, 3796), False, 'from qkeras.utils import model_save_quantized_weights\n'), ((3972, 3993), 'numpy.array', 'np.array', (['all_weights'], {}), '(all_weights)\n', (3980, 3993), True, 'import numpy as np\n'), ((4059, 4132), 'numpy.array', 'np.array', (['[2.0, -6.75, -0.625, -2.0, -0.25, -56.0, 1.125, -1.625, -1.125]'], {}), '([2.0, -6.75, -0.625, -2.0, -0.25, -56.0, 1.125, -1.625, -1.125])\n', (4067, 4132), True, 'import numpy as np\n'), ((4203, 4247), 'numpy.all', 'np.all', (['(all_weights == all_weights_signature)'], {}), '(all_weights == all_weights_signature)\n', (4209, 4247), True, 'import numpy as np\n'), ((5385, 5445), 'numpy.testing.assert_allclose', 'assert_allclose', (['actual_output', 'expected_output'], {'rtol': '(0.0001)'}), '(actual_output, expected_output, rtol=0.0001)\n', (5400, 5445), False, 'from numpy.testing import assert_allclose\n'), ((5468, 5486), 'numpy.random.seed', 'np.random.seed', (['(33)'], {}), '(33)\n', (5482, 5486), True, 'import numpy as np\n'), ((5493, 5506), 'tensorflow.keras.layers.Input', 'Input', (['(4, 4)'], {}), '((4, 4))\n', (5498, 5506), False, 'from tensorflow.keras.layers import Input\n'), ((5686, 5712), 'tensorflow.keras.models.Model', 'Model', ([], {'inputs': 'x', 'outputs': 'y'}), '(inputs=x, outputs=y)\n', (5691, 5712), False, 'from tensorflow.keras.models import Model\n'), ((5757, 5788), 'qkeras.extract_model_operations', 'extract_model_operations', (['model'], {}), '(model)\n', (5781, 5788), False, 'from qkeras import extract_model_operations\n'), ((5977, 5996), 'qkeras.print_qstats', 'print_qstats', (['model'], {}), '(model)\n', (5989, 5996), False, 'from qkeras import print_qstats\n'), ((6711, 6734), 'tempfile.mkstemp', 'tempfile.mkstemp', (['""".h5"""'], {}), "('.h5')\n", (6727, 6734), False, 'import tempfile\n'), ((6865, 6883), 'qkeras.utils.load_qmodel', 'load_qmodel', (['fname'], {}), '(fname)\n', (6876, 6883), False, 'from qkeras.utils import load_qmodel\n'), ((6941, 6953), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (6949, 6953), False, 'import os\n'), ((6956, 6972), 'os.remove', 'os.remove', (['fname'], {}), '(fname)\n', (6965, 6972), False, 'import os\n'), ((7007, 7042), 'qkeras.utils.model_save_quantized_weights', 'model_save_quantized_weights', (['model'], {}), '(model)\n', (7035, 7042), False, 'from qkeras.utils import model_save_quantized_weights\n'), ((7055, 7078), 'numpy.random.rand', 'np.random.rand', (['(2)', '(4)', '(4)'], {}), '(2, 4, 4)\n', (7069, 7078), True, 'import numpy as np\n'), ((7365, 7379), 'numpy.all', 'np.all', (['(p == y)'], {}), '(p == y)\n', (7371, 7379), True, 'import numpy as np\n'), ((7411, 7434), 'pytest.main', 'pytest.main', (['[__file__]'], {}), '([__file__])\n', (7422, 7434), False, 'import pytest\n'), ((2119, 2170), 'qkeras.QActivation', 'QActivation', (['"""quantized_relu(6,2,1)"""'], {'name': '"""act0_m"""'}), "('quantized_relu(6,2,1)', name='act0_m')\n", (2130, 2170), False, 'from qkeras import QActivation\n'), ((2604, 2655), 'qkeras.QActivation', 'QActivation', (['"""quantized_relu(6,4,1)"""'], {'name': '"""act2_m"""'}), "('quantized_relu(6,4,1)', name='act2_m')\n", (2615, 2655), False, 'from qkeras import QActivation\n'), ((2665, 2688), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {'name': '"""flatten"""'}), "(name='flatten')\n", (2672, 2688), False, 'from tensorflow.keras.layers import Flatten\n'), ((2861, 2898), 'tensorflow.keras.layers.Activation', 'Activation', (['"""softmax"""'], {'name': '"""softmax"""'}), "('softmax', name='softmax')\n", (2871, 2898), False, 'from tensorflow.keras.layers import Activation\n'), ((5294, 5323), 'numpy.random.rand', 'np.random.rand', (['(10)', '(28)', '(28)', '(1)'], {}), '(10, 28, 28, 1)\n', (5308, 5323), True, 'import numpy as np\n'), ((3911, 3926), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (3917, 3926), True, 'import numpy as np\n'), ((4296, 4863), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, \n 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 6e-08, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0,\n 0.0, 0.0, 0.0, 5e-07, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, \n 0.0, 0.0], [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, \n 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0,\n 1.0, 0.0, 0.0, 0.0, 0.0]]'], {}), '([[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 0.0, 0.0, 6e-08, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, \n 0.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 0.0, \n 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 5e-07, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, \n 0.0, 1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 0.0], [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, \n 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]])\n', (4304, 4863), True, 'import numpy as np\n'), ((7132, 7296), 'numpy.array', 'np.array', (['[[[-2.441, 3.816], [-3.807, -1.426], [-2.684, -1.317], [-1.659, 0.9834]], [\n [-4.99, 1.139], [-2.559, -1.216], [-2.285, 1.905], [-2.652, -0.467]]]'], {}), '([[[-2.441, 3.816], [-3.807, -1.426], [-2.684, -1.317], [-1.659, \n 0.9834]], [[-4.99, 1.139], [-2.559, -1.216], [-2.285, 1.905], [-2.652, \n -0.467]]])\n', (7140, 7296), True, 'import numpy as np\n'), ((1884, 1901), 'qkeras.binary', 'binary', ([], {'alpha': '(1.0)'}), '(alpha=1.0)\n', (1890, 1901), False, 'from qkeras import binary\n'), ((1929, 1963), 'qkeras.quantized_bits', 'quantized_bits', (['(4)', '(0)', '(1)'], {'alpha': '(1.0)'}), '(4, 0, 1, alpha=1.0)\n', (1943, 1963), False, 'from qkeras import quantized_bits\n'), ((1992, 2026), 'qkeras.quantized_bits', 'quantized_bits', (['(6)', '(2)', '(1)'], {'alpha': '(1.0)'}), '(6, 2, 1, alpha=1.0)\n', (2006, 2026), False, 'from qkeras import quantized_bits\n'), ((2049, 2072), 'qkeras.quantized_bits', 'quantized_bits', (['(4)', '(0)', '(1)'], {}), '(4, 0, 1)\n', (2063, 2072), False, 'from qkeras import quantized_bits\n'), ((2252, 2270), 'qkeras.ternary', 'ternary', ([], {'alpha': '(1.0)'}), '(alpha=1.0)\n', (2259, 2270), False, 'from qkeras import ternary\n'), ((2293, 2316), 'qkeras.quantized_bits', 'quantized_bits', (['(4)', '(0)', '(1)'], {}), '(4, 0, 1)\n', (2307, 2316), False, 'from qkeras import quantized_bits\n'), ((2360, 2383), 'qkeras.quantized_relu', 'quantized_relu', (['(6)', '(3)', '(1)'], {}), '(6, 3, 1)\n', (2374, 2383), False, 'from qkeras import quantized_relu\n'), ((2477, 2511), 'qkeras.quantized_bits', 'quantized_bits', (['(6)', '(2)', '(1)'], {'alpha': '(1.0)'}), '(6, 2, 1, alpha=1.0)\n', (2491, 2511), False, 'from qkeras import quantized_bits\n'), ((2534, 2557), 'qkeras.quantized_bits', 'quantized_bits', (['(4)', '(0)', '(1)'], {}), '(4, 0, 1)\n', (2548, 2557), False, 'from qkeras import quantized_bits\n'), ((2739, 2773), 'qkeras.quantized_bits', 'quantized_bits', (['(6)', '(2)', '(1)'], {'alpha': '(1.0)'}), '(6, 2, 1, alpha=1.0)\n', (2753, 2773), False, 'from qkeras import quantized_bits\n'), ((2796, 2819), 'qkeras.quantized_bits', 'quantized_bits', (['(4)', '(0)', '(1)'], {}), '(4, 0, 1)\n', (2810, 2819), False, 'from qkeras import quantized_bits\n'), ((5558, 5592), 'qkeras.quantized_bits', 'quantized_bits', (['(6)', '(2)', '(1)'], {'alpha': '(1.0)'}), '(6, 2, 1, alpha=1.0)\n', (5572, 5592), False, 'from qkeras import quantized_bits\n'), ((5615, 5638), 'qkeras.quantized_bits', 'quantized_bits', (['(4)', '(0)', '(1)'], {}), '(4, 0, 1)\n', (5629, 5638), False, 'from qkeras import quantized_bits\n'), ((3635, 3660), 'numpy.sqrt', 'np.sqrt', (['(2.0 / input_size)'], {}), '(2.0 / input_size)\n', (3642, 3660), True, 'import numpy as np\n'), ((6545, 6570), 'numpy.sqrt', 'np.sqrt', (['(2.0 / input_size)'], {}), '(2.0 / input_size)\n', (6552, 6570), True, 'import numpy as np\n')] |
from typing import Optional, List, TypeVar, Generic, Callable
import discord.ui
from .item import Item
from .select_option import SelectOption
from .custom import CustomSelect
def _default_check(_: discord.Interaction) -> bool:
return True
C = TypeVar("C", bound=discord.ui.Select)
class Select(Item, Generic[C]):
def __init__(
self,
placeholder: Optional[str] = None,
min_values: int = 1,
max_values: int = 1,
options: Optional[list] = None,
cls: C = CustomSelect,
custom_id: Optional[str] = None,
) -> None:
self._placeholder: Optional[str] = placeholder
self._min_values: int = min_values
self._max_values: int = max_values
self._options: list = [] if options is None else options
self._row: Optional[int] = None
self.cls: C = cls
self._custom_id: Optional[str] = custom_id
self.func: Optional[Callable] = None
self.check_func: Callable[[discord.Interaction], bool] = _default_check
def placeholder(self, placeholder: str) -> 'Select':
self._placeholder = placeholder
return self
def min_values(self, min_values: int) -> 'Select':
self._min_values = min_values
return self
def max_values(self, max_values: int) -> 'Select':
self._max_values = max_values
return self
def options(self, options: List[SelectOption]) -> 'Select':
self._options = options
return self
def row(self, row: int) -> 'Select':
self._row = row
return self
def on_select(self, func: Callable) -> 'Select':
self.func = func
return self
def custom_id(self, custom_id: str) -> 'Select':
self._custom_id = custom_id
return self
def check(self, func: Callable[[discord.Interaction], bool]) -> 'Select':
self.check_func = func
return self
def to_discord(self) -> C:
return self.cls(
placeholder=self._placeholder,
min_values=self._min_values,
max_values=self._max_values,
options=[o.to_discord_select_option() for o in self._options],
row=self._row,
custom_id=self._custom_id,
check_func=self.check_func,
callback=self.func
)
| [
"typing.TypeVar"
]
| [((254, 291), 'typing.TypeVar', 'TypeVar', (['"""C"""'], {'bound': 'discord.ui.Select'}), "('C', bound=discord.ui.Select)\n", (261, 291), False, 'from typing import Optional, List, TypeVar, Generic, Callable\n')] |
#!/usr/bin/env python3
import os
import sys
import time
sys.path.append(os.getcwd()+'/lib')
import random
from dataclasses import dataclass, field
from ObsInfo import ObsInfo
def generate_random_obs(num_obs: int, size_list: list, config_data):
"""
config_file_name = "config.json"
json_file = open(config_file_name)
config_data = json.load(json_file)
size_list = [length, width, height]
"""
ObsList = []
if (num_obs > 0.5):
for i in range(0, num_obs):
# random center
center = [random.uniform(config_data["LAB_SPACE_LIMIT"]["LIMIT_X"][0], config_data["LAB_SPACE_LIMIT"]["LIMIT_X"][1]), \
random.uniform(config_data["LAB_SPACE_LIMIT"]["LIMIT_Y"][0], config_data["LAB_SPACE_LIMIT"]["LIMIT_Y"][1]), \
random.uniform(config_data["LAB_SPACE_LIMIT"]["LIMIT_Z"][0], config_data["LAB_SPACE_LIMIT"]["LIMIT_Z"][1])]
ObsList.append( ObsInfo(center, size_list) )
return ObsList | [
"ObsInfo.ObsInfo",
"random.uniform",
"os.getcwd"
]
| [((72, 83), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (81, 83), False, 'import os\n'), ((548, 659), 'random.uniform', 'random.uniform', (["config_data['LAB_SPACE_LIMIT']['LIMIT_X'][0]", "config_data['LAB_SPACE_LIMIT']['LIMIT_X'][1]"], {}), "(config_data['LAB_SPACE_LIMIT']['LIMIT_X'][0], config_data[\n 'LAB_SPACE_LIMIT']['LIMIT_X'][1])\n", (562, 659), False, 'import random\n'), ((674, 785), 'random.uniform', 'random.uniform', (["config_data['LAB_SPACE_LIMIT']['LIMIT_Y'][0]", "config_data['LAB_SPACE_LIMIT']['LIMIT_Y'][1]"], {}), "(config_data['LAB_SPACE_LIMIT']['LIMIT_Y'][0], config_data[\n 'LAB_SPACE_LIMIT']['LIMIT_Y'][1])\n", (688, 785), False, 'import random\n'), ((800, 911), 'random.uniform', 'random.uniform', (["config_data['LAB_SPACE_LIMIT']['LIMIT_Z'][0]", "config_data['LAB_SPACE_LIMIT']['LIMIT_Z'][1]"], {}), "(config_data['LAB_SPACE_LIMIT']['LIMIT_Z'][0], config_data[\n 'LAB_SPACE_LIMIT']['LIMIT_Z'][1])\n", (814, 911), False, 'import random\n'), ((937, 963), 'ObsInfo.ObsInfo', 'ObsInfo', (['center', 'size_list'], {}), '(center, size_list)\n', (944, 963), False, 'from ObsInfo import ObsInfo\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# License: BSD-3 (https://tldrlegal.com/license/bsd-3-clause-license-(revised))
# Copyright (c) 2016-2021, <NAME>; Luczywo, Nadia
# All rights reserved.
# =============================================================================
# DOCS
# =============================================================================
"""Functionalities for remove negatives from criteria.
In addition to the main functionality, an MCDA agnostic function is offered
to push negatives values on an array along an arbitrary axis.
"""
# =============================================================================
# IMPORTS
# =============================================================================
import numpy as np
from ..core import SKCMatrixAndWeightTransformerABC
from ..utils import doc_inherit
# =============================================================================
# FUNCTIONS
# =============================================================================
def push_negatives(arr, axis):
r"""Increment the array until all the valuer are sean >= 0.
If an array has negative values this function increment the values
proportionally to made all the array positive along an axis.
.. math::
\overline{X}_{ij} =
\begin{cases}
X_{ij} + min_{X_{ij}} & \text{if } X_{ij} < 0\\
X_{ij} & \text{otherwise}
\end{cases}
Parameters
----------
arr: :py:class:`numpy.ndarray` like.
A array with values
axis : :py:class:`int` optional
Axis along which to operate. By default, flattened input is used.
Returns
-------
:py:class:`numpy.ndarray`
array with all values >= 0.
Examples
--------
.. code-block:: pycon
>>> from skcriteria.preprocess import push_negatives
>>> mtx = [[1, 2], [3, 4]]
>>> mtx_lt0 = [[-1, 2], [3, 4]] # has a negative value
>>> push_negatives(mtx) # array without negatives don't be affected
array([[1, 2],
[3, 4]])
# all the array is incremented by 1 to eliminate the negative
>>> push_negatives(mtx_lt0)
array([[0, 3],
[4, 5]])
# by column only the first one (with the negative value) is affected
>>> push_negatives(mtx_lt0, axis=0)
array([[0, 2],
[4, 4]])
# by row only the first row (with the negative value) is affected
>>> push_negatives(mtx_lt0, axis=1)
array([[0, 3],
[3, 4]])
"""
arr = np.asarray(arr)
mins = np.min(arr, axis=axis, keepdims=True)
delta = (mins < 0) * mins
return arr - delta
class PushNegatives(SKCMatrixAndWeightTransformerABC):
r"""Increment the matrix/weights until all the valuer are sean >= 0.
If the matrix/weights has negative values this function increment the
values proportionally to made all the matrix/weights positive along an
axis.
.. math::
\overline{X}_{ij} =
\begin{cases}
X_{ij} + min_{X_{ij}} & \text{if } X_{ij} < 0\\
X_{ij} & \text{otherwise}
\end{cases}
"""
@doc_inherit(SKCMatrixAndWeightTransformerABC._transform_weights)
def _transform_weights(self, weights):
return push_negatives(weights, axis=None)
@doc_inherit(SKCMatrixAndWeightTransformerABC._transform_matrix)
def _transform_matrix(self, matrix):
return push_negatives(matrix, axis=0)
| [
"numpy.asarray",
"numpy.min"
]
| [((2605, 2620), 'numpy.asarray', 'np.asarray', (['arr'], {}), '(arr)\n', (2615, 2620), True, 'import numpy as np\n'), ((2632, 2669), 'numpy.min', 'np.min', (['arr'], {'axis': 'axis', 'keepdims': '(True)'}), '(arr, axis=axis, keepdims=True)\n', (2638, 2669), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
#
# This class was auto-generated from the API references found at
# https://support.direct.ingenico.com/documentation/api/reference/
#
from ingenico.direct.sdk.data_object import DataObject
from ingenico.direct.sdk.domain.address import Address
from ingenico.direct.sdk.domain.company_information import CompanyInformation
from ingenico.direct.sdk.domain.personal_information_token import PersonalInformationToken
class CustomerToken(DataObject):
__billing_address = None
__company_information = None
__personal_information = None
@property
def billing_address(self) -> Address:
"""
| Object containing billing address details
Type: :class:`ingenico.direct.sdk.domain.address.Address`
"""
return self.__billing_address
@billing_address.setter
def billing_address(self, value: Address):
self.__billing_address = value
@property
def company_information(self) -> CompanyInformation:
"""
| Object containing company information
Type: :class:`ingenico.direct.sdk.domain.company_information.CompanyInformation`
"""
return self.__company_information
@company_information.setter
def company_information(self, value: CompanyInformation):
self.__company_information = value
@property
def personal_information(self) -> PersonalInformationToken:
"""
Type: :class:`ingenico.direct.sdk.domain.personal_information_token.PersonalInformationToken`
"""
return self.__personal_information
@personal_information.setter
def personal_information(self, value: PersonalInformationToken):
self.__personal_information = value
def to_dictionary(self):
dictionary = super(CustomerToken, self).to_dictionary()
if self.billing_address is not None:
dictionary['billingAddress'] = self.billing_address.to_dictionary()
if self.company_information is not None:
dictionary['companyInformation'] = self.company_information.to_dictionary()
if self.personal_information is not None:
dictionary['personalInformation'] = self.personal_information.to_dictionary()
return dictionary
def from_dictionary(self, dictionary):
super(CustomerToken, self).from_dictionary(dictionary)
if 'billingAddress' in dictionary:
if not isinstance(dictionary['billingAddress'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['billingAddress']))
value = Address()
self.billing_address = value.from_dictionary(dictionary['billingAddress'])
if 'companyInformation' in dictionary:
if not isinstance(dictionary['companyInformation'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['companyInformation']))
value = CompanyInformation()
self.company_information = value.from_dictionary(dictionary['companyInformation'])
if 'personalInformation' in dictionary:
if not isinstance(dictionary['personalInformation'], dict):
raise TypeError('value \'{}\' is not a dictionary'.format(dictionary['personalInformation']))
value = PersonalInformationToken()
self.personal_information = value.from_dictionary(dictionary['personalInformation'])
return self
| [
"ingenico.direct.sdk.domain.address.Address",
"ingenico.direct.sdk.domain.company_information.CompanyInformation",
"ingenico.direct.sdk.domain.personal_information_token.PersonalInformationToken"
]
| [((2597, 2606), 'ingenico.direct.sdk.domain.address.Address', 'Address', ([], {}), '()\n', (2604, 2606), False, 'from ingenico.direct.sdk.domain.address import Address\n'), ((2941, 2961), 'ingenico.direct.sdk.domain.company_information.CompanyInformation', 'CompanyInformation', ([], {}), '()\n', (2959, 2961), False, 'from ingenico.direct.sdk.domain.company_information import CompanyInformation\n'), ((3307, 3333), 'ingenico.direct.sdk.domain.personal_information_token.PersonalInformationToken', 'PersonalInformationToken', ([], {}), '()\n', (3331, 3333), False, 'from ingenico.direct.sdk.domain.personal_information_token import PersonalInformationToken\n')] |
import time
import pykeyboard
# TODO: Replace following two lines with the code that activate the application.
print('Activate the application 3 seconds.')
time.sleep(3)
k = pykeyboard.PyKeyboard()
k.press_key(k.left_key)
time.sleep(1) # Hold down left key for 1 second.
k.release_key(k.left_key)
| [
"pykeyboard.PyKeyboard",
"time.sleep"
]
| [((159, 172), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (169, 172), False, 'import time\n'), ((178, 201), 'pykeyboard.PyKeyboard', 'pykeyboard.PyKeyboard', ([], {}), '()\n', (199, 201), False, 'import pykeyboard\n'), ((226, 239), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (236, 239), False, 'import time\n')] |
import os
import tarfile
from abc import ABC, abstractmethod
from glob import glob
import shutil
import random
import zstandard
"""
This registry is for automatically downloading and extracting datasets.
To register a class you need to inherit the DataDownloader class, provide name, filetype and url attributes, and
(optionally) provide download / extract / exists / tokenize functions to check if the data exists, and, if it doesn't, download,
extract and tokenize the data into the correct directory.
When done, add it to the DATA_DOWNLOADERS dict. The function process_data runs the pre-processing for the selected
dataset.
"""
DATA_DIR = os.environ.get('DATA_DIR', './data')
GPT2_VOCAB_FP = f"{DATA_DIR}/gpt2-vocab.json"
GPT2_VOCAB_URL = "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-vocab.json"
GPT2_MERGE_FP = f"{DATA_DIR}/gpt2-merges.txt"
GPT2_MERGE_URL = "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-merges.txt"
class DataDownloader(ABC):
"""Dataset registry class to automatically download / extract datasets"""
@property
def base_dir(self):
"""base data directory"""
return DATA_DIR
@property
@abstractmethod
def name(self):
"""name of dataset"""
pass
@property
@abstractmethod
def filetype(self):
"""filetype of dataset"""
pass
@property
@abstractmethod
def url(self):
"""URL from which to download dataset"""
pass
def _extract_tar(self):
self.path = os.path.join(self.base_dir, self.name)
os.makedirs(self.path, exist_ok=True)
tarfile_path = os.path.join(self.base_dir, os.path.basename(self.url))
with tarfile.open(tarfile_path, "r:gz") as dataset_tar:
print(f'Extracting files from {tarfile_path}...')
dataset_tar.extractall(self.path)
def _extract_zstd(self, remove_zstd=True):
self.path = os.path.join(self.base_dir, self.name)
os.makedirs(self.path, exist_ok=True)
zstd_file_path = os.path.join(self.base_dir, os.path.basename(self.url))
with open(zstd_file_path, 'rb') as compressed:
decomp = zstandard.ZstdDecompressor()
output_path = zstd_file_path.replace(".zst", "")
with open(output_path, 'wb') as destination:
decomp.copy_stream(compressed, destination)
if remove_zstd:
os.remove(zstd_file_path)
return output_path
def extract(self):
"""extracts dataset and moves to the correct data dir if necessary"""
self._extract_tar()
def exists(self):
"""Checks if the dataset is present"""
return os.path.isdir(f"{self.base_dir}/{self.name}")
def download(self):
"""downloads dataset"""
os.makedirs(self.base_dir, exist_ok=True)
os.system(f"wget {self.url} -O {os.path.join(self.base_dir, os.path.basename(self.url))}")
def tokenize(self):
parent_folder = os.path.join(self.base_dir, self.name)
jsonl_filepath = os.path.join(parent_folder, os.path.basename(self.url)).replace(".zst", "")
assert jsonl_filepath.endswith(".jsonl")
os.system(f"python tools/preprocess_data.py \
--input {jsonl_filepath} \
--output-prefix {parent_folder}/{self.name} \
--vocab {GPT2_VOCAB_FP} \
--dataset-impl mmap \
--tokenizer-type GPT2BPETokenizer \
--merge-file {GPT2_MERGE_FP} \
--append-eod")
def prepare(self):
if not self.exists():
self.download()
self.extract()
self.tokenize()
class Enron(DataDownloader):
name = "enron"
filetype = "jsonl.zst"
url = "http://eaidata.bmk.sh/data/enron_emails.jsonl.zst"
seed = 1
def exists(self):
self.path = os.path.join(self.base_dir, self.name)
return os.path.isfile(os.path.join(self.path, os.path.basename(self.url).replace(".zst", "")))
def extract(self, remove_zstd=True):
self._extract_zstd(remove_zstd=remove_zstd)
shutil.move(os.path.join(self.base_dir, os.path.basename(self.url).replace(".zst", "")), os.path.join(self.base_dir, self.name))
def maybe_download_gpt2_tokenizer_data():
if not os.path.isfile(GPT2_VOCAB_FP):
os.system(f'wget {GPT2_VOCAB_URL} -O {GPT2_VOCAB_FP}')
if not os.path.isfile(GPT2_MERGE_FP):
os.system(f'wget {GPT2_MERGE_URL} -O {GPT2_MERGE_FP}')
DATA_DOWNLOADERS = {
"enron": Enron
}
def prepare_dataset(dataset_name):
os.makedirs(DATA_DIR, exist_ok=True)
maybe_download_gpt2_tokenizer_data()
DownloaderClass = DATA_DOWNLOADERS.get(dataset_name, None)
if DownloaderClass is None:
raise NotImplementedError
else:
d = DownloaderClass()
d.prepare()
| [
"tarfile.open",
"os.makedirs",
"os.path.join",
"os.environ.get",
"os.path.isfile",
"os.path.isdir",
"os.path.basename",
"os.system",
"zstandard.ZstdDecompressor",
"os.remove"
]
| [((648, 684), 'os.environ.get', 'os.environ.get', (['"""DATA_DIR"""', '"""./data"""'], {}), "('DATA_DIR', './data')\n", (662, 684), False, 'import os\n'), ((4552, 4588), 'os.makedirs', 'os.makedirs', (['DATA_DIR'], {'exist_ok': '(True)'}), '(DATA_DIR, exist_ok=True)\n', (4563, 4588), False, 'import os\n'), ((1524, 1562), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.name'], {}), '(self.base_dir, self.name)\n', (1536, 1562), False, 'import os\n'), ((1571, 1608), 'os.makedirs', 'os.makedirs', (['self.path'], {'exist_ok': '(True)'}), '(self.path, exist_ok=True)\n', (1582, 1608), False, 'import os\n'), ((1928, 1966), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.name'], {}), '(self.base_dir, self.name)\n', (1940, 1966), False, 'import os\n'), ((1975, 2012), 'os.makedirs', 'os.makedirs', (['self.path'], {'exist_ok': '(True)'}), '(self.path, exist_ok=True)\n', (1986, 2012), False, 'import os\n'), ((2681, 2726), 'os.path.isdir', 'os.path.isdir', (['f"""{self.base_dir}/{self.name}"""'], {}), "(f'{self.base_dir}/{self.name}')\n", (2694, 2726), False, 'import os\n'), ((2792, 2833), 'os.makedirs', 'os.makedirs', (['self.base_dir'], {'exist_ok': '(True)'}), '(self.base_dir, exist_ok=True)\n', (2803, 2833), False, 'import os\n'), ((2982, 3020), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.name'], {}), '(self.base_dir, self.name)\n', (2994, 3020), False, 'import os\n'), ((3179, 3507), 'os.system', 'os.system', (['f"""python tools/preprocess_data.py --input {jsonl_filepath} --output-prefix {parent_folder}/{self.name} --vocab {GPT2_VOCAB_FP} --dataset-impl mmap --tokenizer-type GPT2BPETokenizer --merge-file {GPT2_MERGE_FP} --append-eod"""'], {}), "(\n f'python tools/preprocess_data.py --input {jsonl_filepath} --output-prefix {parent_folder}/{self.name} --vocab {GPT2_VOCAB_FP} --dataset-impl mmap --tokenizer-type GPT2BPETokenizer --merge-file {GPT2_MERGE_FP} --append-eod'\n )\n", (3188, 3507), False, 'import os\n'), ((3843, 3881), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.name'], {}), '(self.base_dir, self.name)\n', (3855, 3881), False, 'import os\n'), ((4270, 4299), 'os.path.isfile', 'os.path.isfile', (['GPT2_VOCAB_FP'], {}), '(GPT2_VOCAB_FP)\n', (4284, 4299), False, 'import os\n'), ((4309, 4363), 'os.system', 'os.system', (['f"""wget {GPT2_VOCAB_URL} -O {GPT2_VOCAB_FP}"""'], {}), "(f'wget {GPT2_VOCAB_URL} -O {GPT2_VOCAB_FP}')\n", (4318, 4363), False, 'import os\n'), ((4375, 4404), 'os.path.isfile', 'os.path.isfile', (['GPT2_MERGE_FP'], {}), '(GPT2_MERGE_FP)\n', (4389, 4404), False, 'import os\n'), ((4414, 4468), 'os.system', 'os.system', (['f"""wget {GPT2_MERGE_URL} -O {GPT2_MERGE_FP}"""'], {}), "(f'wget {GPT2_MERGE_URL} -O {GPT2_MERGE_FP}')\n", (4423, 4468), False, 'import os\n'), ((1660, 1686), 'os.path.basename', 'os.path.basename', (['self.url'], {}), '(self.url)\n', (1676, 1686), False, 'import os\n'), ((1701, 1735), 'tarfile.open', 'tarfile.open', (['tarfile_path', '"""r:gz"""'], {}), "(tarfile_path, 'r:gz')\n", (1713, 1735), False, 'import tarfile\n'), ((2066, 2092), 'os.path.basename', 'os.path.basename', (['self.url'], {}), '(self.url)\n', (2082, 2092), False, 'import os\n'), ((2170, 2198), 'zstandard.ZstdDecompressor', 'zstandard.ZstdDecompressor', ([], {}), '()\n', (2196, 2198), False, 'import zstandard\n'), ((2413, 2438), 'os.remove', 'os.remove', (['zstd_file_path'], {}), '(zstd_file_path)\n', (2422, 2438), False, 'import os\n'), ((4176, 4214), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.name'], {}), '(self.base_dir, self.name)\n', (4188, 4214), False, 'import os\n'), ((3074, 3100), 'os.path.basename', 'os.path.basename', (['self.url'], {}), '(self.url)\n', (3090, 3100), False, 'import os\n'), ((2902, 2928), 'os.path.basename', 'os.path.basename', (['self.url'], {}), '(self.url)\n', (2918, 2928), False, 'import os\n'), ((3936, 3962), 'os.path.basename', 'os.path.basename', (['self.url'], {}), '(self.url)\n', (3952, 3962), False, 'import os\n'), ((4127, 4153), 'os.path.basename', 'os.path.basename', (['self.url'], {}), '(self.url)\n', (4143, 4153), False, 'import os\n')] |
# -*- coding: utf-8 -*-
import os
from django.db import models
from django.db.models.signals import post_delete
from django.dispatch import receiver
from .base import Pessoa
from djangosige.apps.login.models import Usuario
from djangosige.configs.settings import MEDIA_ROOT
def logo_directory_path(instance, filename):
extension = os.path.splitext(filename)[1]
return 'imagens/empresas/logo_{0}_{1}{2}'.format(instance.nome_razao_social, instance.id, extension)
class Empresa(Pessoa):
logo_file = models.ImageField(
upload_to=logo_directory_path, default='imagens/logo.png', blank=True, null=True)
cnae = models.CharField(max_length=10, blank=True, null=True)
iest = models.CharField(max_length=32, null=True, blank=True)
class Meta:
verbose_name = "Empresa"
@property
def caminho_completo_logo(self):
if self.logo_file.name != 'imagens/logo.png':
return os.path.join(MEDIA_ROOT, self.logo_file.name)
else:
return ''
def save(self, *args, **kwargs):
# Deletar logo se ja existir um
try:
obj = Empresa.objects.get(id=self.id)
if obj.logo_file != self.logo_file and obj.logo_file != 'imagens/logo.png':
obj.logo_file.delete(save=False)
except:
pass
super(Empresa, self).save(*args, **kwargs)
def __unicode__(self):
return u'%s' % self.nome_razao_social
def __str__(self):
return u'%s' % self.nome_razao_social
# Deletar logo quando empresa for deletada
@receiver(post_delete, sender=Empresa)
def logo_post_delete_handler(sender, instance, **kwargs):
# Nao deletar a imagem default 'logo.png'
if instance.logo_file != 'imagens/logo.png':
instance.logo_file.delete(False)
class MinhaEmpresa(models.Model):
m_empresa = models.ForeignKey(
Empresa, on_delete=models.CASCADE, related_name='minha_empresa', blank=True, null=True)
m_usuario = models.ForeignKey(
Usuario, on_delete=models.CASCADE, related_name='empresa_usuario')
| [
"django.db.models.ForeignKey",
"os.path.splitext",
"os.path.join",
"django.db.models.ImageField",
"django.dispatch.receiver",
"django.db.models.CharField"
]
| [((1620, 1657), 'django.dispatch.receiver', 'receiver', (['post_delete'], {'sender': 'Empresa'}), '(post_delete, sender=Empresa)\n', (1628, 1657), False, 'from django.dispatch import receiver\n'), ((535, 638), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'logo_directory_path', 'default': '"""imagens/logo.png"""', 'blank': '(True)', 'null': '(True)'}), "(upload_to=logo_directory_path, default='imagens/logo.png',\n blank=True, null=True)\n", (552, 638), False, 'from django.db import models\n'), ((657, 711), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'blank': '(True)', 'null': '(True)'}), '(max_length=10, blank=True, null=True)\n', (673, 711), False, 'from django.db import models\n'), ((724, 778), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'null': '(True)', 'blank': '(True)'}), '(max_length=32, null=True, blank=True)\n', (740, 778), False, 'from django.db import models\n'), ((1912, 2022), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Empresa'], {'on_delete': 'models.CASCADE', 'related_name': '"""minha_empresa"""', 'blank': '(True)', 'null': '(True)'}), "(Empresa, on_delete=models.CASCADE, related_name=\n 'minha_empresa', blank=True, null=True)\n", (1929, 2022), False, 'from django.db import models\n'), ((2045, 2134), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Usuario'], {'on_delete': 'models.CASCADE', 'related_name': '"""empresa_usuario"""'}), "(Usuario, on_delete=models.CASCADE, related_name=\n 'empresa_usuario')\n", (2062, 2134), False, 'from django.db import models\n'), ((354, 380), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (370, 380), False, 'import os\n'), ((962, 1007), 'os.path.join', 'os.path.join', (['MEDIA_ROOT', 'self.logo_file.name'], {}), '(MEDIA_ROOT, self.logo_file.name)\n', (974, 1007), False, 'import os\n')] |
import os
from nltk.translate.bleu_score import corpus_bleu
from nltk.translate.bleu_score import SmoothingFunction
import json
from tqdm import tqdm, trange
from random import sample
import numpy as np
import pickle
import argparse
import bert_eval_acc
import svm_eval_acc
smooth = SmoothingFunction()
def eval_bleu(ref, pred):
"""
:param ref: list(list(list(any))), a list of reference sentences, each element of the list is a list of references
:param pred: list(list(any)), a list of predictions
:return: corpus bleu score
"""
return corpus_bleu(ref, pred, smoothing_function=smooth.method1)
def eval_bleu_detail(ref, pred):
"""
:param ref: list(list(list(any))), a list of reference sentences, each element of the list is a list of references
:param pred: list(list(any)), a list of predictions
:return: corpus bleu score
"""
return corpus_bleu(ref, pred, weights=[1, 0, 0, 0], smoothing_function=smooth.method1),\
corpus_bleu(ref, pred, weights=[0, 1, 0, 0], smoothing_function=smooth.method1), \
corpus_bleu(ref, pred, weights=[0, 0, 1, 0], smoothing_function=smooth.method1), \
corpus_bleu(ref, pred, weights=[0, 0, 0, 1], smoothing_function=smooth.method1)
def count_ngram(hyps_resp, n):
"""
Count the number of unique n-grams
:param hyps_resp: list, a list of responses
:param n: int, n-gram
:return: the number of unique n-grams in hyps_resp
"""
if len(hyps_resp) == 0:
print("ERROR, eval_distinct get empty input")
return
if type(hyps_resp[0]) != list:
print("ERROR, eval_distinct takes in a list of <class 'list'>, get a list of {} instead".format(
type(hyps_resp[0])))
return
ngram = set()
for resp in hyps_resp:
if len(resp) < n:
continue
for i in range(len(resp) - n + 1):
ngram.add(' '.join(resp[i: i + n]))
return len(ngram)
def eval_distinct_detail(hyps_resp):
"""
compute distinct score for the hyps_resp
:param hyps_resp: list, a list of hyps responses
:return: average distinct score for 1, 2-gram
"""
if len(hyps_resp) == 0:
print("ERROR, eval_distinct get empty input")
return
if type(hyps_resp[0]) != list:
print("ERROR, eval_distinct takes in a list of <class 'list'>, get a list of {} instead".format(
type(hyps_resp[0])))
return
hyps_resp = [[str(x) for x in l] for l in hyps_resp]
hyps_resp = [(' '.join(i)).split() for i in hyps_resp]
num_tokens = sum([len(i) for i in hyps_resp])
dist1 = count_ngram(hyps_resp, 1) / float(num_tokens)
dist2 = count_ngram(hyps_resp, 2) / float(num_tokens)
return dist1, dist2
def eval_f1(ref, pred):
"""
:param ref: list(list(list(any))), a list of reference sentences, each element of the list is a list of references
:param pred: list(list(any)), a list of predictions
:return: f1 score
"""
assert len(ref) == len(pred) > 0
precisions = []
recalls = []
for i, s in enumerate(pred):
ref_set = set()
for rs in ref[i]:
for w in rs:
ref_set.add(w)
pred_set = set()
for w in s:
pred_set.add(w)
p = 0
for w in s:
if w in ref_set:
p += 1
if len(s) > 0:
p /= len(s)
r = 0
for rs in ref[i]:
for w in rs:
if w in pred_set:
r += 1
tot_l = sum([len(rs) for rs in ref[i]])
if tot_l > 0:
r /= tot_l
precisions.append(p)
recalls.append(r)
precision = sum(precisions) / len(precisions)
recall = sum(recalls) / len(recalls)
return 0.0 if precision == recall == 0 else 2 * precision * recall / (precision + recall)
def calc_metrics_value(task, fn, n_sample=None):
with open(fn) as f:
res = [json.loads(i) for i in f.readlines()]
s0_pred, s0_ref = [], []
s1_pred, s1_ref = [], []
for d in res:
if d['style'] == 0:
s0_ref.append([list(d['resp'])])
s0_pred.append(list(d['pred_style0'][0]))
else:
s1_ref.append([list(d['resp'])])
s1_pred.append(list(d['pred_style1'][0]))
if n_sample:
assert len(s0_ref) >= n_sample
assert len(s1_ref) >= n_sample
sampled_idxs = sample(range(len(s0_ref)), n_sample)
s0_ref = [x for i, x in enumerate(s0_ref) if i in sampled_idxs]
s0_pred = [x for i, x in enumerate(s0_pred) if i in sampled_idxs]
sampled_idxs = sample(range(len(s1_ref)), n_sample)
s1_ref = [x for i, x in enumerate(s1_ref) if i in sampled_idxs]
s1_pred = [x for i, x in enumerate(s1_pred) if i in sampled_idxs]
bleu_s0 = eval_bleu_detail(s0_ref, s0_pred)
bleu_s1 = eval_bleu_detail(s1_ref, s1_pred)
dist_s0 = eval_distinct_detail(s0_pred)
dist_s1 = eval_distinct_detail(s1_pred)
f1_s0 = eval_f1(s0_ref, s0_pred)
f1_s1 = eval_f1(s1_ref, s1_pred)
for k in range(1, 4):
print('%d-gram BLEU:' % k,
's0', bleu_s0[k - 1] * 100,
's1', bleu_s1[k - 1] * 100,
'mean', (bleu_s0[k - 1] + bleu_s1[k - 1]) / 2 * 100)
print('F1:',
's0', f1_s0 * 100, 's1', f1_s1 * 100,
'mean', (f1_s0 + f1_s1) / 2 * 100)
print('Dist:',
's0', dist_s0[1] * 100, 's1', dist_s1[1] * 100,
'mean', (dist_s0[1] + dist_s1[1]) / 2 * 100)
parser = argparse.ArgumentParser()
parser.add_argument('--eval_file_path', help='path of the eval file', required=True)
args = parser.parse_args()
file_path = args.eval_file_path
calc_metrics_value(None, file_path)
print("Evaluating acc results:")
bert_eval_acc.main(file_path)
svm_eval_acc.main(file_path)
| [
"nltk.translate.bleu_score.corpus_bleu",
"nltk.translate.bleu_score.SmoothingFunction",
"json.loads",
"argparse.ArgumentParser",
"svm_eval_acc.main",
"bert_eval_acc.main"
]
| [((285, 304), 'nltk.translate.bleu_score.SmoothingFunction', 'SmoothingFunction', ([], {}), '()\n', (302, 304), False, 'from nltk.translate.bleu_score import SmoothingFunction\n'), ((5559, 5584), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5582, 5584), False, 'import argparse\n'), ((5799, 5828), 'bert_eval_acc.main', 'bert_eval_acc.main', (['file_path'], {}), '(file_path)\n', (5817, 5828), False, 'import bert_eval_acc\n'), ((5829, 5857), 'svm_eval_acc.main', 'svm_eval_acc.main', (['file_path'], {}), '(file_path)\n', (5846, 5857), False, 'import svm_eval_acc\n'), ((566, 623), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['ref', 'pred'], {'smoothing_function': 'smooth.method1'}), '(ref, pred, smoothing_function=smooth.method1)\n', (577, 623), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((891, 970), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['ref', 'pred'], {'weights': '[1, 0, 0, 0]', 'smoothing_function': 'smooth.method1'}), '(ref, pred, weights=[1, 0, 0, 0], smoothing_function=smooth.method1)\n', (902, 970), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((984, 1063), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['ref', 'pred'], {'weights': '[0, 1, 0, 0]', 'smoothing_function': 'smooth.method1'}), '(ref, pred, weights=[0, 1, 0, 0], smoothing_function=smooth.method1)\n', (995, 1063), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((1078, 1157), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['ref', 'pred'], {'weights': '[0, 0, 1, 0]', 'smoothing_function': 'smooth.method1'}), '(ref, pred, weights=[0, 0, 1, 0], smoothing_function=smooth.method1)\n', (1089, 1157), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((1172, 1251), 'nltk.translate.bleu_score.corpus_bleu', 'corpus_bleu', (['ref', 'pred'], {'weights': '[0, 0, 0, 1]', 'smoothing_function': 'smooth.method1'}), '(ref, pred, weights=[0, 0, 0, 1], smoothing_function=smooth.method1)\n', (1183, 1251), False, 'from nltk.translate.bleu_score import corpus_bleu\n'), ((3967, 3980), 'json.loads', 'json.loads', (['i'], {}), '(i)\n', (3977, 3980), False, 'import json\n')] |
"""Constants for the UniFi component."""
import logging
LOGGER = logging.getLogger(__package__)
DOMAIN = "unifi"
CONTROLLER_ID = "{host}-{site}"
CONF_CONTROLLER = "controller"
CONF_SITE_ID = "site"
UNIFI_WIRELESS_CLIENTS = "unifi_wireless_clients"
CONF_ALLOW_BANDWIDTH_SENSORS = "allow_bandwidth_sensors"
CONF_BLOCK_CLIENT = "block_client"
CONF_DETECTION_TIME = "detection_time"
CONF_POE_CLIENTS = "poe_clients"
CONF_TRACK_CLIENTS = "track_clients"
CONF_TRACK_DEVICES = "track_devices"
CONF_TRACK_WIRED_CLIENTS = "track_wired_clients"
CONF_SSID_FILTER = "ssid_filter"
DEFAULT_ALLOW_BANDWIDTH_SENSORS = False
DEFAULT_POE_CLIENTS = True
DEFAULT_TRACK_CLIENTS = True
DEFAULT_TRACK_DEVICES = True
DEFAULT_TRACK_WIRED_CLIENTS = True
DEFAULT_DETECTION_TIME = 300
ATTR_MANUFACTURER = "Ubiquiti Networks"
| [
"logging.getLogger"
]
| [((66, 96), 'logging.getLogger', 'logging.getLogger', (['__package__'], {}), '(__package__)\n', (83, 96), False, 'import logging\n')] |
import numpy as np
from treelas import post_order, TreeInstance
def test_demo_3x7_postord():
parent = np.array([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8,
9, 14, 17, 12, 15, 16, 19, 16, 17])
po = post_order(parent, include_root=True)
expect = np.array([12, 11, 19, 20, 21, 14, 15, 18, 17, 16, 13,
10, 7, 8, 9, 3, 6, 2, 5, 4, 1], dtype='i4') - 1
assert (po == expect).all()
def test_demo_3x7():
y = np.fromstring("0.62 0.73 0.71 1.5 1.17 0.43 1.08 0.62 " +
"1.73 0.95 1.46 1.6 1.16 0.38 0.9 0.32 " +
"-0.48 0.95 1.08 0.02 0.4", sep=" ")
parent = np.array([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8,
9, 14, 17, 12, 15, 16, 19, 16, 17])
lam = 1.0
prob = TreeInstance(y, parent, lam=lam)
assert prob.root == 0
assert prob.parent.dtype == np.int32
prob.solve()
assert abs(prob.x.mean() - prob.y.mean()) < 1e-15
assert len(np.unique(prob.x)) == 2
assert max(np.abs(prob.dual[2:]) - lam) < 1e-12
assert max(np.abs(prob.gamma)) < 1e-15
| [
"numpy.abs",
"numpy.unique",
"treelas.post_order",
"treelas.TreeInstance",
"numpy.array",
"numpy.fromstring"
]
| [((108, 193), 'numpy.array', 'np.array', (['[0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16, 17]'], {}), '([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16,\n 17])\n', (116, 193), True, 'import numpy as np\n'), ((222, 259), 'treelas.post_order', 'post_order', (['parent'], {'include_root': '(True)'}), '(parent, include_root=True)\n', (232, 259), False, 'from treelas import post_order, TreeInstance\n'), ((461, 606), 'numpy.fromstring', 'np.fromstring', (["('0.62 0.73 0.71 1.5 1.17 0.43 1.08 0.62 ' +\n '1.73 0.95 1.46 1.6 1.16 0.38 0.9 0.32 ' + '-0.48 0.95 1.08 0.02 0.4')"], {'sep': '""" """'}), "('0.62 0.73 0.71 1.5 1.17 0.43 1.08 0.62 ' +\n '1.73 0.95 1.46 1.6 1.16 0.38 0.9 0.32 ' + '-0.48 0.95 1.08 0.02 0.4',\n sep=' ')\n", (474, 606), True, 'import numpy as np\n'), ((656, 741), 'numpy.array', 'np.array', (['[0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16, 17]'], {}), '([0, 4, 5, 0, 3, 4, 7, 8, 5, 6, 7, 8, 9, 14, 17, 12, 15, 16, 19, 16,\n 17])\n', (664, 741), True, 'import numpy as np\n'), ((786, 818), 'treelas.TreeInstance', 'TreeInstance', (['y', 'parent'], {'lam': 'lam'}), '(y, parent, lam=lam)\n', (798, 818), False, 'from treelas import post_order, TreeInstance\n'), ((273, 374), 'numpy.array', 'np.array', (['[12, 11, 19, 20, 21, 14, 15, 18, 17, 16, 13, 10, 7, 8, 9, 3, 6, 2, 5, 4, 1]'], {'dtype': '"""i4"""'}), "([12, 11, 19, 20, 21, 14, 15, 18, 17, 16, 13, 10, 7, 8, 9, 3, 6, 2,\n 5, 4, 1], dtype='i4')\n", (281, 374), True, 'import numpy as np\n'), ((972, 989), 'numpy.unique', 'np.unique', (['prob.x'], {}), '(prob.x)\n', (981, 989), True, 'import numpy as np\n'), ((1063, 1081), 'numpy.abs', 'np.abs', (['prob.gamma'], {}), '(prob.gamma)\n', (1069, 1081), True, 'import numpy as np\n'), ((1011, 1032), 'numpy.abs', 'np.abs', (['prob.dual[2:]'], {}), '(prob.dual[2:])\n', (1017, 1032), True, 'import numpy as np\n')] |
import argparse
import os
import torch
import yaml
DEFAULT_DEVICE = 'cuda:0'
def load_config():
parser = argparse.ArgumentParser(description='UNet3D training')
parser.add_argument('--config', type=str, help='Path to the YAML config file', required=True)
args = parser.parse_args()
config = _load_config_yaml(args.config)
# Get a device to train on
device = config.get('device', DEFAULT_DEVICE)
config['device'] = torch.device(device if torch.cuda.is_available() else "cpu")
return config
def _load_config_yaml(config_file):
return yaml.load(open(config_file, 'r'), Loader=yaml.FullLoader)
| [
"torch.cuda.is_available",
"argparse.ArgumentParser"
]
| [((113, 167), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""UNet3D training"""'}), "(description='UNet3D training')\n", (136, 167), False, 'import argparse\n'), ((473, 498), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (496, 498), False, 'import torch\n')] |
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from load_cora import load_cora
from baseline_model import create_ffn
from utils import run_experiment
from utils import display_learning_curves
# Graph convolution layer
class GraphConvLayer(layers.Layer):
def __init__(
self,
hidden_units,
dropout_rate=0.2,
aggregation_type="mean",
combination_type="concat",
normalize=False,
*args,
**kwargs
):
super(GraphConvLayer, self).__init__(*args, **kwargs)
self._aggregation_type = aggregation_type
self._combination_type = combination_type
self._normalize = normalize
self._ffn_prepare = create_ffn(hidden_units, dropout_rate)
if self._combination_type == "gated":
self._update_fn = layers.GRU(
units=hidden_units,
activation="tanh",
recurrent_activation="sigmoid",
dropout=dropout_rate,
return_state=True,
recurrent_dropout=dropout_rate
)
else:
self._update_fn = create_ffn(hidden_units, dropout_rate)
def _prepare(self, node_representations, weights=None):
# node_representations shape is [num_edges, embedding_dim]
messages = self._ffn_prepare(node_representations)
if weights is not None:
messages = messages * tf.expand_dims(weights, -1)
return messages
def _aggregate(self, node_indices, neighbour_messages):
# node_indices shape is [num_edges]
# neighbour_messages shape: [num_edges, representation_dim]
num_nodes = tf.math.reduce_max(node_indices) + 1
if self._aggregation_type == "sum":
aggregated_message = tf.math.unsorted_segment_sum(
neighbour_messages,
node_indices,
num_segments=num_nodes
)
elif self._aggregation_type == "mean":
aggregated_message = tf.math.unsorted_segment_mean(
neighbour_messages,
node_indices,
num_segments=num_nodes
)
elif self._aggregation_type == "max":
aggregated_message = tf.math.unsorted_segment_max(
neighbour_messages,
node_indices,
num_segments=num_nodes
)
else:
raise ValueError(f"Invalid aggregation type: {self._aggregation_type}.")
return aggregated_message
def _update(self, node_representations, aggregated_messages):
# node_representations shape is [num_nodes, representation_dim]
# aggregated_messages shape is [num_nodes, representation_dim]
if self._combination_type == "gru":
# Create a sequence of two elements for the GRU layer
h = tf.stack([node_respresentations, aggregated_messages], axis=1)
elif self._combination_type == "concat":
# Concatenate the node_representations and aggregated_messages
h = tf.concat([node_representations, aggregated_messages], axis=1)
elif self._combination_type == "add":
# Add node_representations and aggregated_messages
h = node_representations + aggregated_messages
else:
raise ValueError(f"Invalid combination type: {self._combinatino_type}.")
# Apply the processing function
node_embeddings = self._update_fn(h)
if self._combination_type == "gru":
node_embeddings = tf.unstack(node_embeddings, axis=1)[-1]
if self._normalize:
node_embeddings = tf.nn.l2_normalize(node_embeddings, axis=-1)
return node_embeddings
def call(self, inputs):
"""Process the inputs to produce the node_embeddings.
Args:
Inputs:
A tuple of three elements: node_representations, edges, edge_weights.
Returns:
node_embeddings of shape [num_nodes, representation_dim].
"""
node_representations, edges, edge_weights = inputs
# Get node_indices (source) and neighbour_indices (target) from edges
node_indices, neighbour_indices = edges[0], edges[1]
# neighbour_representations shape is [num_edges, representation_dim]
neighbour_representations = tf.gather(node_representations, neighbour_indices)
# Prepare the messages of the neighbours
neighbour_messages = self._prepare(neighbour_representations, edge_weights)
# Aggregate the neighbour messages
aggregated_messages = self._aggregate(node_indices, neighbour_messages)
# Update the node embedding with the neighbour messages
return self._update(node_representations, aggregated_messages)
class GNNNodeClassifier(tf.keras.Model):
def __init__(
self,
graph_info,
num_classes,
hidden_units,
aggregation_type="sum",
combination_type="concat",
dropout_rate=0.2,
normalize=True,
*args,
**kwargs
):
super(GNNNodeClassifier, self).__init__(*args, **kwargs)
# Unpack graph_info
node_features, edges, edge_weights = graph_info
self._node_features = node_features
self._edges = edges
self._edge_weights = edge_weights
# Set edge_weights to ones if not provided
if self._edge_weights is None:
self._edge_weights = tf.ones(shape=edges.shape[1])
# Scale edge_weights to sum to 1
self._edge_weights = self._edge_weights / tf.math.reduce_sum(self._edge_weights)
# Create a process layer
self._preprocess = create_ffn(hidden_units, dropout_rate, name="preprocess")
# Create the 1st GraphConv layer
self._conv1 = GraphConvLayer(
hidden_units,
dropout_rate,
aggregation_type,
combination_type,
normalize,
name="graph_conv1"
)
# Create the 2nd GraphConv layer
self._conv2 = GraphConvLayer(
hidden_units,
dropout_rate,
aggregation_type,
combination_type,
normalize,
name="graph_conv2"
)
# Create a postprocess layer
self._postprocess = create_ffn(hidden_units, dropout_rate, name="postprocess")
# Create a compute logits layer
self._compute_logits = layers.Dense(units=num_classes, name="logits")
def call(self, input_node_indices):
# Preprocess the node_features to produce node representations
x = self._preprocess(self._node_features)
# Apply the 1st graph conv layer
x1 = self._conv1((x, self._edges, self._edge_weights))
# Skip connection
x = x1 + x
# Apply the 2nd graph conv layer
x2 = self._conv2((x, self._edges, self._edge_weights))
# Skip connection
x = x2 + x
# Postprocess node embedding
x = self._postprocess(x)
# Fetch node embeddings for the input node_indices
node_embeddings = tf.gather(x, input_node_indices)
# Compute logits
return self._compute_logits(node_embeddings)
if __name__ == '__main__':
papers, train_data, test_data, paper_idx, class_idx, citations, feature_names = load_cora(verbose=1)
num_features = len(feature_names)
num_classes = len(class_idx)
hidden_units = [32, 32]
learning_rate = 0.01
dropout_rate = 0.5
epochs = 300
batch_size = 256
# Create an edges array (sparse adjacency matrix) of shape [2, num_edges]
edges = citations[["source", "target"]].to_numpy().T
#print(edges)
# Create an edge weights array of ones (default weights)
edge_weights = tf.ones(shape=edges.shape[1])
# Create a node features array of shape [num_nodes, num_features]
node_features = tf.cast(
papers.sort_values("paper_id")[feature_names].to_numpy(), dtype=tf.float32)
# Create graph info tuple with node_features, edges, and edge_weights
graph_info = (node_features, edges, edge_weights)
print("Edges shape: ", edges.shape)
print("Nodes shape: ", node_features.shape)
gnn_model = GNNNodeClassifier(
graph_info=graph_info,
num_classes=num_classes,
hidden_units=hidden_units,
dropout_rate=dropout_rate,
name="gnn_model"
)
print("GNN output shape: ", gnn_model([1, 10, 100]))
gnn_model.summary()
# Train the GNN model
X_train = train_data.paper_id.to_numpy()
y_train = train_data.subject
history = run_experiment(gnn_model, X_train, y_train, batch_size, epochs, learning_rate)
# Plot the learning curves
display_learning_curves(history, figure_name="gnn.png")
# Evaluate on test data
X_test = test_data.paper_id.to_numpy()
y_test = test_data.subject
_, test_accuracy = gnn_model.evaluate(x=X_test, y=y_test, verbose=1)
print(f"Test accuracy: {round(test_accuracy * 100, 2)}%")
| [
"tensorflow.expand_dims",
"load_cora.load_cora",
"tensorflow.unstack",
"tensorflow.ones",
"tensorflow.math.unsorted_segment_mean",
"tensorflow.nn.l2_normalize",
"utils.display_learning_curves",
"utils.run_experiment",
"tensorflow.concat",
"tensorflow.math.unsorted_segment_sum",
"tensorflow.gather",
"tensorflow.keras.layers.Dense",
"tensorflow.math.unsorted_segment_max",
"tensorflow.math.reduce_max",
"tensorflow.math.reduce_sum",
"baseline_model.create_ffn",
"tensorflow.stack",
"tensorflow.keras.layers.GRU"
]
| [((6709, 6729), 'load_cora.load_cora', 'load_cora', ([], {'verbose': '(1)'}), '(verbose=1)\n', (6718, 6729), False, 'from load_cora import load_cora\n'), ((7127, 7156), 'tensorflow.ones', 'tf.ones', ([], {'shape': 'edges.shape[1]'}), '(shape=edges.shape[1])\n', (7134, 7156), True, 'import tensorflow as tf\n'), ((7921, 7999), 'utils.run_experiment', 'run_experiment', (['gnn_model', 'X_train', 'y_train', 'batch_size', 'epochs', 'learning_rate'], {}), '(gnn_model, X_train, y_train, batch_size, epochs, learning_rate)\n', (7935, 7999), False, 'from utils import run_experiment\n'), ((8032, 8087), 'utils.display_learning_curves', 'display_learning_curves', (['history'], {'figure_name': '"""gnn.png"""'}), "(history, figure_name='gnn.png')\n", (8055, 8087), False, 'from utils import display_learning_curves\n'), ((698, 736), 'baseline_model.create_ffn', 'create_ffn', (['hidden_units', 'dropout_rate'], {}), '(hidden_units, dropout_rate)\n', (708, 736), False, 'from baseline_model import create_ffn\n'), ((3969, 4019), 'tensorflow.gather', 'tf.gather', (['node_representations', 'neighbour_indices'], {}), '(node_representations, neighbour_indices)\n', (3978, 4019), True, 'import tensorflow as tf\n'), ((5213, 5270), 'baseline_model.create_ffn', 'create_ffn', (['hidden_units', 'dropout_rate'], {'name': '"""preprocess"""'}), "(hidden_units, dropout_rate, name='preprocess')\n", (5223, 5270), False, 'from baseline_model import create_ffn\n'), ((5766, 5824), 'baseline_model.create_ffn', 'create_ffn', (['hidden_units', 'dropout_rate'], {'name': '"""postprocess"""'}), "(hidden_units, dropout_rate, name='postprocess')\n", (5776, 5824), False, 'from baseline_model import create_ffn\n'), ((5888, 5934), 'tensorflow.keras.layers.Dense', 'layers.Dense', ([], {'units': 'num_classes', 'name': '"""logits"""'}), "(units=num_classes, name='logits')\n", (5900, 5934), False, 'from tensorflow.keras import layers\n'), ((6492, 6524), 'tensorflow.gather', 'tf.gather', (['x', 'input_node_indices'], {}), '(x, input_node_indices)\n', (6501, 6524), True, 'import tensorflow as tf\n'), ((803, 967), 'tensorflow.keras.layers.GRU', 'layers.GRU', ([], {'units': 'hidden_units', 'activation': '"""tanh"""', 'recurrent_activation': '"""sigmoid"""', 'dropout': 'dropout_rate', 'return_state': '(True)', 'recurrent_dropout': 'dropout_rate'}), "(units=hidden_units, activation='tanh', recurrent_activation=\n 'sigmoid', dropout=dropout_rate, return_state=True, recurrent_dropout=\n dropout_rate)\n", (813, 967), False, 'from tensorflow.keras import layers\n'), ((1060, 1098), 'baseline_model.create_ffn', 'create_ffn', (['hidden_units', 'dropout_rate'], {}), '(hidden_units, dropout_rate)\n', (1070, 1098), False, 'from baseline_model import create_ffn\n'), ((1560, 1592), 'tensorflow.math.reduce_max', 'tf.math.reduce_max', (['node_indices'], {}), '(node_indices)\n', (1578, 1592), True, 'import tensorflow as tf\n'), ((1664, 1755), 'tensorflow.math.unsorted_segment_sum', 'tf.math.unsorted_segment_sum', (['neighbour_messages', 'node_indices'], {'num_segments': 'num_nodes'}), '(neighbour_messages, node_indices, num_segments\n =num_nodes)\n', (1692, 1755), True, 'import tensorflow as tf\n'), ((2609, 2671), 'tensorflow.stack', 'tf.stack', (['[node_respresentations, aggregated_messages]'], {'axis': '(1)'}), '([node_respresentations, aggregated_messages], axis=1)\n', (2617, 2671), True, 'import tensorflow as tf\n'), ((3331, 3375), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (['node_embeddings'], {'axis': '(-1)'}), '(node_embeddings, axis=-1)\n', (3349, 3375), True, 'import tensorflow as tf\n'), ((5008, 5037), 'tensorflow.ones', 'tf.ones', ([], {'shape': 'edges.shape[1]'}), '(shape=edges.shape[1])\n', (5015, 5037), True, 'import tensorflow as tf\n'), ((5121, 5159), 'tensorflow.math.reduce_sum', 'tf.math.reduce_sum', (['self._edge_weights'], {}), '(self._edge_weights)\n', (5139, 5159), True, 'import tensorflow as tf\n'), ((1332, 1359), 'tensorflow.expand_dims', 'tf.expand_dims', (['weights', '(-1)'], {}), '(weights, -1)\n', (1346, 1359), True, 'import tensorflow as tf\n'), ((1859, 1950), 'tensorflow.math.unsorted_segment_mean', 'tf.math.unsorted_segment_mean', (['neighbour_messages', 'node_indices'], {'num_segments': 'num_nodes'}), '(neighbour_messages, node_indices,\n num_segments=num_nodes)\n', (1888, 1950), True, 'import tensorflow as tf\n'), ((2796, 2858), 'tensorflow.concat', 'tf.concat', (['[node_representations, aggregated_messages]'], {'axis': '(1)'}), '([node_representations, aggregated_messages], axis=1)\n', (2805, 2858), True, 'import tensorflow as tf\n'), ((3242, 3277), 'tensorflow.unstack', 'tf.unstack', (['node_embeddings'], {'axis': '(1)'}), '(node_embeddings, axis=1)\n', (3252, 3277), True, 'import tensorflow as tf\n'), ((2054, 2145), 'tensorflow.math.unsorted_segment_max', 'tf.math.unsorted_segment_max', (['neighbour_messages', 'node_indices'], {'num_segments': 'num_nodes'}), '(neighbour_messages, node_indices, num_segments\n =num_nodes)\n', (2082, 2145), True, 'import tensorflow as tf\n')] |
from db import db
class RisklayerPrognosis(db.Model):
__tablename__ = 'risklayer_prognosis'
datenbestand = db.Column(db.TIMESTAMP, primary_key=True, nullable=False)
prognosis = db.Column(db.Float, nullable=False)
# class RisklayerPrognosisSchema(SQLAlchemyAutoSchema):
# class Meta:
# strict = True
# model = RisklayerPrognosis
#
# timestamp = fields.Timestamp(data_key="datenbestand")
# prognosis = fields.Number(data_key="prognosis")
| [
"db.db.Column"
]
| [((118, 175), 'db.db.Column', 'db.Column', (['db.TIMESTAMP'], {'primary_key': '(True)', 'nullable': '(False)'}), '(db.TIMESTAMP, primary_key=True, nullable=False)\n', (127, 175), False, 'from db import db\n'), ((192, 227), 'db.db.Column', 'db.Column', (['db.Float'], {'nullable': '(False)'}), '(db.Float, nullable=False)\n', (201, 227), False, 'from db import db\n')] |
# SPDX-FileCopyrightText: 2014 MicroPython & CircuitPython contributors (https://github.com/adafruit/circuitpython/graphs/contributors)
#
# SPDX-License-Identifier: MIT
import argparse
import os
import sys
sys.path.append("../../tools/usb_descriptor")
from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util
import hid_report_descriptors
DEFAULT_INTERFACE_NAME = 'CircuitPython'
ALL_DEVICES='CDC,MSC,AUDIO,HID'
ALL_DEVICES_SET=frozenset(ALL_DEVICES.split(','))
DEFAULT_DEVICES='CDC,MSC,AUDIO,HID'
ALL_HID_DEVICES='KEYBOARD,MOUSE,CONSUMER,SYS_CONTROL,GAMEPAD,DIGITIZER,XAC_COMPATIBLE_GAMEPAD,RAW'
ALL_HID_DEVICES_SET=frozenset(ALL_HID_DEVICES.split(','))
# Digitizer works on Linux but conflicts with mouse, so omit it.
DEFAULT_HID_DEVICES='KEYBOARD,MOUSE,CONSUMER,GAMEPAD'
parser = argparse.ArgumentParser(description='Generate USB descriptors.')
parser.add_argument('--highspeed', default=False, action='store_true',
help='descriptor for highspeed device')
parser.add_argument('--manufacturer', type=str,
help='manufacturer of the device')
parser.add_argument('--product', type=str,
help='product name of the device')
parser.add_argument('--vid', type=lambda x: int(x, 16),
help='vendor id')
parser.add_argument('--pid', type=lambda x: int(x, 16),
help='product id')
parser.add_argument('--serial_number_length', type=int, default=32,
help='length needed for the serial number in digits')
parser.add_argument('--devices', type=lambda l: tuple(l.split(',')), default=DEFAULT_DEVICES,
help='devices to include in descriptor (AUDIO includes MIDI support)')
parser.add_argument('--hid_devices', type=lambda l: tuple(l.split(',')), default=DEFAULT_HID_DEVICES,
help='HID devices to include in HID report descriptor')
parser.add_argument('--interface_name', type=str,
help='The name/prefix to use in the interface descriptions',
default=DEFAULT_INTERFACE_NAME)
parser.add_argument('--no-renumber_endpoints', dest='renumber_endpoints', action='store_false',
help='use to not renumber endpoint')
parser.add_argument('--cdc_ep_num_notification', type=int, default=0,
help='endpoint number of CDC NOTIFICATION')
parser.add_argument('--cdc_ep_num_data_out', type=int, default=0,
help='endpoint number of CDC DATA OUT')
parser.add_argument('--cdc_ep_num_data_in', type=int, default=0,
help='endpoint number of CDC DATA IN')
parser.add_argument('--msc_ep_num_out', type=int, default=0,
help='endpoint number of MSC OUT')
parser.add_argument('--msc_ep_num_in', type=int, default=0,
help='endpoint number of MSC IN')
parser.add_argument('--hid_ep_num_out', type=int, default=0,
help='endpoint number of HID OUT')
parser.add_argument('--hid_ep_num_in', type=int, default=0,
help='endpoint number of HID IN')
parser.add_argument('--midi_ep_num_out', type=int, default=0,
help='endpoint number of MIDI OUT')
parser.add_argument('--midi_ep_num_in', type=int, default=0,
help='endpoint number of MIDI IN')
parser.add_argument('--output_c_file', type=argparse.FileType('w', encoding='UTF-8'), required=True)
parser.add_argument('--output_h_file', type=argparse.FileType('w', encoding='UTF-8'), required=True)
args = parser.parse_args()
unknown_devices = list(frozenset(args.devices) - ALL_DEVICES_SET)
if unknown_devices:
raise ValueError("Unknown device(s)", unknown_devices)
unknown_hid_devices = list(frozenset(args.hid_devices) - ALL_HID_DEVICES_SET)
if unknown_hid_devices:
raise ValueError("Unknown HID devices(s)", unknown_hid_devices)
if not args.renumber_endpoints:
if 'CDC' in args.devices:
if args.cdc_ep_num_notification == 0:
raise ValueError("CDC notification endpoint number must not be 0")
elif args.cdc_ep_num_data_out == 0:
raise ValueError("CDC data OUT endpoint number must not be 0")
elif args.cdc_ep_num_data_in == 0:
raise ValueError("CDC data IN endpoint number must not be 0")
if 'MSC' in args.devices:
if args.msc_ep_num_out == 0:
raise ValueError("MSC endpoint OUT number must not be 0")
elif args.msc_ep_num_in == 0:
raise ValueError("MSC endpoint IN number must not be 0")
if 'HID' in args.devices:
if args.args.hid_ep_num_out == 0:
raise ValueError("HID endpoint OUT number must not be 0")
elif args.hid_ep_num_in == 0:
raise ValueError("HID endpoint IN number must not be 0")
if 'AUDIO' in args.devices:
if args.args.midi_ep_num_out == 0:
raise ValueError("MIDI endpoint OUT number must not be 0")
elif args.midi_ep_num_in == 0:
raise ValueError("MIDI endpoint IN number must not be 0")
class StringIndex:
"""Assign a monotonically increasing index to each unique string. Start with 0."""
string_to_index = {}
index_to_variable = {}
strings = []
@classmethod
def index(cls, string, *, variable_name = None):
if string in cls.string_to_index:
idx = cls.string_to_index[string]
if not cls.index_to_variable[idx]:
cls.index_to_variable[idx] = variable_name
return idx
else:
idx = len(cls.strings)
cls.string_to_index[string] = idx
cls.strings.append(string)
cls.index_to_variable[idx] = variable_name
return idx
@classmethod
def strings_in_order(cls):
return cls.strings
# langid must be the 0th string descriptor
LANGID_INDEX = StringIndex.index("\u0409", variable_name="language_id")
assert LANGID_INDEX == 0
SERIAL_NUMBER_INDEX = StringIndex.index("S" * args.serial_number_length, variable_name="usb_serial_number")
device = standard.DeviceDescriptor(
description="top",
idVendor=args.vid,
idProduct=args.pid,
iManufacturer=StringIndex.index(args.manufacturer),
iProduct=StringIndex.index(args.product),
iSerialNumber=SERIAL_NUMBER_INDEX)
# Interface numbers are interface-set local and endpoints are interface local
# until util.join_interfaces renumbers them.
cdc_union = cdc.Union(
description="CDC comm",
bMasterInterface=0x00, # Adjust this after interfaces are renumbered.
bSlaveInterface_list=[0x01]) # Adjust this after interfaces are renumbered.
cdc_call_management = cdc.CallManagement(
description="CDC comm",
bmCapabilities=0x01,
bDataInterface=0x01) # Adjust this after interfaces are renumbered.
cdc_comm_interface = standard.InterfaceDescriptor(
description="CDC comm",
bInterfaceClass=cdc.CDC_CLASS_COMM, # Communications Device Class
bInterfaceSubClass=cdc.CDC_SUBCLASS_ACM, # Abstract control model
bInterfaceProtocol=cdc.CDC_PROTOCOL_NONE,
iInterface=StringIndex.index("{} CDC control".format(args.interface_name)),
subdescriptors=[
cdc.Header(
description="CDC comm",
bcdCDC=0x0110),
cdc_call_management,
cdc.AbstractControlManagement(
description="CDC comm",
bmCapabilities=0x02),
cdc_union,
standard.EndpointDescriptor(
description="CDC comm in",
bEndpointAddress=args.cdc_ep_num_notification | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT,
wMaxPacketSize=0x0040,
bInterval=0x10)
])
cdc_data_interface = standard.InterfaceDescriptor(
description="CDC data",
bInterfaceClass=cdc.CDC_CLASS_DATA,
iInterface=StringIndex.index("{} CDC data".format(args.interface_name)),
subdescriptors=[
standard.EndpointDescriptor(
description="CDC data out",
bEndpointAddress=args.cdc_ep_num_data_out | standard.EndpointDescriptor.DIRECTION_OUT,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
standard.EndpointDescriptor(
description="CDC data in",
bEndpointAddress=args.cdc_ep_num_data_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
])
cdc_interfaces = [cdc_comm_interface, cdc_data_interface]
msc_interfaces = [
standard.InterfaceDescriptor(
description="MSC",
bInterfaceClass=msc.MSC_CLASS,
bInterfaceSubClass=msc.MSC_SUBCLASS_TRANSPARENT,
bInterfaceProtocol=msc.MSC_PROTOCOL_BULK,
iInterface=StringIndex.index("{} Mass Storage".format(args.interface_name)),
subdescriptors=[
standard.EndpointDescriptor(
description="MSC in",
bEndpointAddress=args.msc_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
standard.EndpointDescriptor(
description="MSC out",
bEndpointAddress=(args.msc_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT),
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
]
)
]
# When there's only one hid_device, it shouldn't have a report id.
# Otherwise, report ids are assigned sequentially:
# args.hid_devices[0] has report_id 1
# args.hid_devices[1] has report_id 2
# etc.
report_ids = {}
if len(args.hid_devices) == 1:
name = args.hid_devices[0]
combined_hid_report_descriptor = hid.ReportDescriptor(
description=name,
report_descriptor=bytes(hid_report_descriptors.REPORT_DESCRIPTOR_FUNCTIONS[name](0)))
report_ids[name] = 0
else:
report_id = 1
concatenated_descriptors = bytearray()
for name in args.hid_devices:
concatenated_descriptors.extend(
bytes(hid_report_descriptors.REPORT_DESCRIPTOR_FUNCTIONS[name](report_id)))
report_ids[name] = report_id
report_id += 1
combined_hid_report_descriptor = hid.ReportDescriptor(
description="MULTIDEVICE",
report_descriptor=bytes(concatenated_descriptors))
# ASF4 expects keyboard and generic devices to have both in and out endpoints,
# and will fail (possibly silently) if both are not supplied.
hid_endpoint_in_descriptor = standard.EndpointDescriptor(
description="HID in",
bEndpointAddress=args.hid_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT,
bInterval=8)
hid_endpoint_out_descriptor = standard.EndpointDescriptor(
description="HID out",
bEndpointAddress=args.hid_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,
bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT,
bInterval=8)
hid_interfaces = [
standard.InterfaceDescriptor(
description="HID Multiple Devices",
bInterfaceClass=hid.HID_CLASS,
bInterfaceSubClass=hid.HID_SUBCLASS_NOBOOT,
bInterfaceProtocol=hid.HID_PROTOCOL_NONE,
iInterface=StringIndex.index("{} HID".format(args.interface_name)),
subdescriptors=[
hid.HIDDescriptor(
description="HID",
wDescriptorLength=len(bytes(combined_hid_report_descriptor))),
hid_endpoint_in_descriptor,
hid_endpoint_out_descriptor,
]
),
]
# Audio!
# In and out here are relative to CircuitPython
# USB OUT -> midi_in_jack_emb -> midi_out_jack_ext -> CircuitPython
midi_in_jack_emb = midi.InJackDescriptor(
description="MIDI PC -> {}".format(args.interface_name),
bJackType=midi.JACK_TYPE_EMBEDDED,
iJack=StringIndex.index("{} usb_midi.ports[0]".format(args.interface_name)))
midi_out_jack_ext = midi.OutJackDescriptor(
description="MIDI data out to user code.",
bJackType=midi.JACK_TYPE_EXTERNAL,
input_pins=[(midi_in_jack_emb, 1)],
iJack=0)
# USB IN <- midi_out_jack_emb <- midi_in_jack_ext <- CircuitPython
midi_in_jack_ext = midi.InJackDescriptor(
description="MIDI data in from user code.",
bJackType=midi.JACK_TYPE_EXTERNAL,
iJack=0)
midi_out_jack_emb = midi.OutJackDescriptor(
description="MIDI PC <- {}".format(args.interface_name),
bJackType=midi.JACK_TYPE_EMBEDDED,
input_pins=[(midi_in_jack_ext, 1)],
iJack=StringIndex.index("{} usb_midi.ports[1]".format(args.interface_name)))
audio_midi_interface = standard.InterfaceDescriptor(
description="Midi goodness",
bInterfaceClass=audio.AUDIO_CLASS_DEVICE,
bInterfaceSubClass=audio.AUDIO_SUBCLASS_MIDI_STREAMING,
bInterfaceProtocol=audio.AUDIO_PROTOCOL_V1,
iInterface=StringIndex.index("{} MIDI".format(args.interface_name)),
subdescriptors=[
midi.Header(
jacks_and_elements=[
midi_in_jack_emb,
midi_in_jack_ext,
midi_out_jack_emb,
midi_out_jack_ext
],
),
standard.EndpointDescriptor(
description="MIDI data out to {}".format(args.interface_name),
bEndpointAddress=args.midi_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
midi.DataEndpointDescriptor(baAssocJack=[midi_in_jack_emb]),
standard.EndpointDescriptor(
description="MIDI data in from {}".format(args.interface_name),
bEndpointAddress=args.midi_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval = 0x0,
wMaxPacketSize=512 if args.highspeed else 64),
midi.DataEndpointDescriptor(baAssocJack=[midi_out_jack_emb]),
])
cs_ac_interface = audio10.AudioControlInterface(
description="Empty audio control",
audio_streaming_interfaces = [],
midi_streaming_interfaces = [
audio_midi_interface
]
)
audio_control_interface = standard.InterfaceDescriptor(
description="All the audio",
bInterfaceClass=audio.AUDIO_CLASS_DEVICE,
bInterfaceSubClass=audio.AUDIO_SUBCLASS_CONTROL,
bInterfaceProtocol=audio.AUDIO_PROTOCOL_V1,
iInterface=StringIndex.index("{} Audio".format(args.interface_name)),
subdescriptors=[
cs_ac_interface,
])
# Audio streaming interfaces must occur before MIDI ones.
audio_interfaces = [audio_control_interface] + cs_ac_interface.audio_streaming_interfaces + cs_ac_interface.midi_streaming_interfaces
interfaces_to_join = []
if 'CDC' in args.devices:
interfaces_to_join.append(cdc_interfaces)
if 'MSC' in args.devices:
interfaces_to_join.append(msc_interfaces)
if 'HID' in args.devices:
interfaces_to_join.append(hid_interfaces)
if 'AUDIO' in args.devices:
interfaces_to_join.append(audio_interfaces)
# util.join_interfaces() will renumber the endpoints to make them unique across descriptors,
# and renumber the interfaces in order. But we still need to fix up certain
# interface cross-references.
interfaces = util.join_interfaces(interfaces_to_join, renumber_endpoints=args.renumber_endpoints)
# Now adjust the CDC interface cross-references.
cdc_union.bMasterInterface = cdc_comm_interface.bInterfaceNumber
cdc_union.bSlaveInterface_list = [cdc_data_interface.bInterfaceNumber]
cdc_call_management.bDataInterface = cdc_data_interface.bInterfaceNumber
cdc_iad = standard.InterfaceAssociationDescriptor(
description="CDC IAD",
bFirstInterface=cdc_comm_interface.bInterfaceNumber,
bInterfaceCount=len(cdc_interfaces),
bFunctionClass=cdc.CDC_CLASS_COMM, # Communications Device Class
bFunctionSubClass=cdc.CDC_SUBCLASS_ACM, # Abstract control model
bFunctionProtocol=cdc.CDC_PROTOCOL_NONE)
descriptor_list = []
if 'CDC' in args.devices:
# Put the CDC IAD just before the CDC interfaces.
# There appears to be a bug in the Windows composite USB driver that requests the
# HID report descriptor with the wrong interface number if the HID interface is not given
# first. However, it still fetches the descriptor anyway. We could reorder the interfaces but
# the Windows 7 Adafruit_usbser.inf file thinks CDC is at Interface 0, so we'll leave it
# there for backwards compatibility.
descriptor_list.append(cdc_iad)
descriptor_list.extend(cdc_interfaces)
if 'MSC' in args.devices:
descriptor_list.extend(msc_interfaces)
if 'HID' in args.devices:
descriptor_list.extend(hid_interfaces)
if 'AUDIO' in args.devices:
# Only add the control interface because other audio interfaces are managed by it to ensure the
# correct ordering.
descriptor_list.append(audio_control_interface)
# Finally, build the composite descriptor.
configuration = standard.ConfigurationDescriptor(
description="Composite configuration",
wTotalLength=(standard.ConfigurationDescriptor.bLength +
sum([len(bytes(x)) for x in descriptor_list])),
bNumInterfaces=len(interfaces))
descriptor_list.insert(0, configuration)
string_descriptors = [standard.StringDescriptor(string) for string in StringIndex.strings_in_order()]
serial_number_descriptor = string_descriptors[SERIAL_NUMBER_INDEX]
c_file = args.output_c_file
h_file = args.output_h_file
c_file.write("""\
#include <stdint.h>
#include "py/objtuple.h"
#include "shared-bindings/usb_hid/Device.h"
#include "{H_FILE_NAME}"
""".format(H_FILE_NAME=h_file.name))
c_file.write("""\
// {DESCRIPTION} : {CLASS}
""".format(DESCRIPTION=device.description,
CLASS=device.__class__))
c_file.write("""\
const uint8_t usb_desc_dev[] = {
""")
for b in bytes(device):
c_file.write("0x{:02x}, ".format(b))
c_file.write("""\
};
""")
c_file.write("""\
const uint8_t usb_desc_cfg[] = {
""")
# Write out all the regular descriptors as one long array (that's how ASF4 does it).
descriptor_length = 0
for descriptor in descriptor_list:
c_file.write("""\
// {DESCRIPTION} : {CLASS}
""".format(DESCRIPTION=descriptor.description,
CLASS=descriptor.__class__))
b = bytes(descriptor)
notes = descriptor.notes()
i = 0
# This prints each subdescriptor on a separate line.
n = 0
while i < len(b):
length = b[i]
for j in range(length):
c_file.write("0x{:02x}, ".format(b[i + j]))
c_file.write("// " + notes[n])
n += 1
c_file.write("\n")
i += length
descriptor_length += len(b)
c_file.write("""\
};
""")
pointers_to_strings = []
for idx, descriptor in enumerate(string_descriptors):
c_file.write("""\
// {DESCRIPTION} : {CLASS}
""".format(DESCRIPTION=descriptor.description,
CLASS=descriptor.__class__))
b = bytes(descriptor)
notes = descriptor.notes()
i = 0
# This prints each subdescriptor on a separate line.
variable_name = StringIndex.index_to_variable[idx]
if not variable_name:
variable_name = "string_descriptor{}".format(idx)
const = "const "
if variable_name == "usb_serial_number":
const = ""
c_file.write("""\
{const}uint16_t {NAME}[] = {{
""".format(const=const, NAME=variable_name))
pointers_to_strings.append("{name}".format(name=variable_name))
n = 0
while i < len(b):
length = b[i]
for j in range(length // 2):
c_file.write("0x{:04x}, ".format(b[i + 2*j + 1] << 8 | b[i + 2*j]))
n += 1
c_file.write("\n")
i += length
c_file.write("""\
};
""")
c_file.write("""\
// array of pointer to string descriptors
uint16_t const * const string_desc_arr [] =
{
""")
c_file.write(""",\
""".join(pointers_to_strings))
c_file.write("""
};
""")
c_file.write("\n")
hid_descriptor_length = len(bytes(combined_hid_report_descriptor))
# Now we values we need for the .h file.
h_file.write("""\
#ifndef MICROPY_INCLUDED_AUTOGEN_USB_DESCRIPTOR_H
#define MICROPY_INCLUDED_AUTOGEN_USB_DESCRIPTOR_H
#include <stdint.h>
extern const uint8_t usb_desc_dev[{device_length}];
extern const uint8_t usb_desc_cfg[{configuration_length}];
extern uint16_t usb_serial_number[{serial_number_length}];
extern uint16_t const * const string_desc_arr [{string_descriptor_length}];
extern const uint8_t hid_report_descriptor[{hid_report_descriptor_length}];
#define CFG_TUSB_RHPORT0_MODE ({rhport0_mode})
#define USB_HID_NUM_DEVICES {hid_num_devices}
// Vendor name included in Inquiry response, max 8 bytes
#define CFG_TUD_MSC_VENDOR "{msc_vendor}"
// Product name included in Inquiry response, max 16 bytes
#define CFG_TUD_MSC_PRODUCT "{msc_product}"
"""
.format(serial_number_length=len(bytes(serial_number_descriptor)) // 2,
device_length=len(bytes(device)),
configuration_length=descriptor_length,
max_configuration_length=max(hid_descriptor_length, descriptor_length),
string_descriptor_length=len(pointers_to_strings),
hid_report_descriptor_length=len(bytes(combined_hid_report_descriptor)),
rhport0_mode='OPT_MODE_DEVICE | OPT_MODE_HIGH_SPEED' if args.highspeed else 'OPT_MODE_DEVICE',
hid_num_devices=len(args.hid_devices),
msc_vendor=args.manufacturer[:8],
msc_product=args.product[:16]))
# Write out the report descriptor and info
c_file.write("""\
const uint8_t hid_report_descriptor[{HID_DESCRIPTOR_LENGTH}] = {{
""".format(HID_DESCRIPTOR_LENGTH=hid_descriptor_length))
for b in bytes(combined_hid_report_descriptor):
c_file.write("0x{:02x}, ".format(b))
c_file.write("""\
};
""")
# Write out USB HID report buffer definitions.
for name in args.hid_devices:
c_file.write("""\
static uint8_t {name}_report_buffer[{report_length}];
""".format(name=name.lower(), report_length=hid_report_descriptors.HID_DEVICE_DATA[name].report_length))
if hid_report_descriptors.HID_DEVICE_DATA[name].out_report_length > 0:
c_file.write("""\
static uint8_t {name}_out_report_buffer[{report_length}];
""".format(name=name.lower(), report_length=hid_report_descriptors.HID_DEVICE_DATA[name].out_report_length))
# Write out table of device objects.
c_file.write("""
usb_hid_device_obj_t usb_hid_devices[] = {
""")
for name in args.hid_devices:
device_data = hid_report_descriptors.HID_DEVICE_DATA[name]
out_report_buffer = '{}_out_report_buffer'.format(name.lower()) if device_data.out_report_length > 0 else 'NULL'
c_file.write("""\
{{
.base = {{ .type = &usb_hid_device_type }},
.report_buffer = {name}_report_buffer,
.report_id = {report_id},
.report_length = {report_length},
.usage_page = {usage_page:#04x},
.usage = {usage:#04x},
.out_report_buffer = {out_report_buffer},
.out_report_length = {out_report_length},
}},
""".format(name=name.lower(), report_id=report_ids[name],
report_length=device_data.report_length,
usage_page=device_data.usage_page,
usage=device_data.usage,
out_report_buffer=out_report_buffer,
out_report_length=device_data.out_report_length))
c_file.write("""\
};
""")
# Write out tuple of device objects.
c_file.write("""
mp_obj_tuple_t common_hal_usb_hid_devices = {{
.base = {{
.type = &mp_type_tuple,
}},
.len = {num_devices},
.items = {{
""".format(num_devices=len(args.hid_devices)))
for idx in range(len(args.hid_devices)):
c_file.write("""\
(mp_obj_t) &usb_hid_devices[{idx}],
""".format(idx=idx))
c_file.write("""\
},
};
""")
h_file.write("""\
#endif // MICROPY_INCLUDED_AUTOGEN_USB_DESCRIPTOR_H
""")
| [
"adafruit_usb_descriptor.cdc.CallManagement",
"adafruit_usb_descriptor.midi.InJackDescriptor",
"argparse.FileType",
"adafruit_usb_descriptor.midi.OutJackDescriptor",
"adafruit_usb_descriptor.midi.Header",
"argparse.ArgumentParser",
"adafruit_usb_descriptor.cdc.Header",
"adafruit_usb_descriptor.cdc.Union",
"adafruit_usb_descriptor.cdc.AbstractControlManagement",
"adafruit_usb_descriptor.standard.EndpointDescriptor",
"adafruit_usb_descriptor.audio10.AudioControlInterface",
"adafruit_usb_descriptor.standard.StringDescriptor",
"adafruit_usb_descriptor.midi.DataEndpointDescriptor",
"adafruit_usb_descriptor.util.join_interfaces",
"sys.path.append"
]
| [((209, 254), 'sys.path.append', 'sys.path.append', (['"""../../tools/usb_descriptor"""'], {}), "('../../tools/usb_descriptor')\n", (224, 254), False, 'import sys\n'), ((821, 885), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate USB descriptors."""'}), "(description='Generate USB descriptors.')\n", (844, 885), False, 'import argparse\n'), ((6439, 6518), 'adafruit_usb_descriptor.cdc.Union', 'cdc.Union', ([], {'description': '"""CDC comm"""', 'bMasterInterface': '(0)', 'bSlaveInterface_list': '[1]'}), "(description='CDC comm', bMasterInterface=0, bSlaveInterface_list=[1])\n", (6448, 6518), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((6661, 6739), 'adafruit_usb_descriptor.cdc.CallManagement', 'cdc.CallManagement', ([], {'description': '"""CDC comm"""', 'bmCapabilities': '(1)', 'bDataInterface': '(1)'}), "(description='CDC comm', bmCapabilities=1, bDataInterface=1)\n", (6679, 6739), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((10800, 11009), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""HID in"""', 'bEndpointAddress': '(args.hid_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_INTERRUPT', 'bInterval': '(8)'}), "(description='HID in', bEndpointAddress=args.\n hid_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN, bmAttributes=\n standard.EndpointDescriptor.TYPE_INTERRUPT, bInterval=8)\n", (10827, 11009), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((11048, 11259), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""HID out"""', 'bEndpointAddress': '(args.hid_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_INTERRUPT', 'bInterval': '(8)'}), "(description='HID out', bEndpointAddress=args.\n hid_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,\n bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT, bInterval=8)\n", (11075, 11259), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((12235, 12385), 'adafruit_usb_descriptor.midi.OutJackDescriptor', 'midi.OutJackDescriptor', ([], {'description': '"""MIDI data out to user code."""', 'bJackType': 'midi.JACK_TYPE_EXTERNAL', 'input_pins': '[(midi_in_jack_emb, 1)]', 'iJack': '(0)'}), "(description='MIDI data out to user code.', bJackType\n =midi.JACK_TYPE_EXTERNAL, input_pins=[(midi_in_jack_emb, 1)], iJack=0)\n", (12257, 12385), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((12549, 12663), 'adafruit_usb_descriptor.midi.InJackDescriptor', 'midi.InJackDescriptor', ([], {'description': '"""MIDI data in from user code."""', 'bJackType': 'midi.JACK_TYPE_EXTERNAL', 'iJack': '(0)'}), "(description='MIDI data in from user code.', bJackType\n =midi.JACK_TYPE_EXTERNAL, iJack=0)\n", (12570, 12663), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((14416, 14570), 'adafruit_usb_descriptor.audio10.AudioControlInterface', 'audio10.AudioControlInterface', ([], {'description': '"""Empty audio control"""', 'audio_streaming_interfaces': '[]', 'midi_streaming_interfaces': '[audio_midi_interface]'}), "(description='Empty audio control',\n audio_streaming_interfaces=[], midi_streaming_interfaces=[\n audio_midi_interface])\n", (14445, 14570), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((15741, 15830), 'adafruit_usb_descriptor.util.join_interfaces', 'util.join_interfaces', (['interfaces_to_join'], {'renumber_endpoints': 'args.renumber_endpoints'}), '(interfaces_to_join, renumber_endpoints=args.\n renumber_endpoints)\n', (15761, 15830), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((17753, 17786), 'adafruit_usb_descriptor.standard.StringDescriptor', 'standard.StringDescriptor', (['string'], {}), '(string)\n', (17778, 17786), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((3370, 3410), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {'encoding': '"""UTF-8"""'}), "('w', encoding='UTF-8')\n", (3387, 3410), False, 'import argparse\n'), ((3471, 3511), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {'encoding': '"""UTF-8"""'}), "('w', encoding='UTF-8')\n", (3488, 3511), False, 'import argparse\n'), ((7191, 7237), 'adafruit_usb_descriptor.cdc.Header', 'cdc.Header', ([], {'description': '"""CDC comm"""', 'bcdCDC': '(272)'}), "(description='CDC comm', bcdCDC=272)\n", (7201, 7237), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((7304, 7375), 'adafruit_usb_descriptor.cdc.AbstractControlManagement', 'cdc.AbstractControlManagement', ([], {'description': '"""CDC comm"""', 'bmCapabilities': '(2)'}), "(description='CDC comm', bmCapabilities=2)\n", (7333, 7375), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((7432, 7680), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""CDC comm in"""', 'bEndpointAddress': '(args.cdc_ep_num_notification | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_INTERRUPT', 'wMaxPacketSize': '(64)', 'bInterval': '(16)'}), "(description='CDC comm in', bEndpointAddress=\n args.cdc_ep_num_notification | standard.EndpointDescriptor.DIRECTION_IN,\n bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT, wMaxPacketSize\n =64, bInterval=16)\n", (7459, 7680), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((7967, 8233), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""CDC data out"""', 'bEndpointAddress': '(args.cdc_ep_num_data_out | standard.EndpointDescriptor.DIRECTION_OUT)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='CDC data out', bEndpointAddress=\n args.cdc_ep_num_data_out | standard.EndpointDescriptor.DIRECTION_OUT,\n bmAttributes=standard.EndpointDescriptor.TYPE_BULK, bInterval=0,\n wMaxPacketSize=512 if args.highspeed else 64)\n", (7994, 8233), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((8291, 8554), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""CDC data in"""', 'bEndpointAddress': '(args.cdc_ep_num_data_in | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='CDC data in', bEndpointAddress=\n args.cdc_ep_num_data_in | standard.EndpointDescriptor.DIRECTION_IN,\n bmAttributes=standard.EndpointDescriptor.TYPE_BULK, bInterval=0,\n wMaxPacketSize=512 if args.highspeed else 64)\n", (8318, 8554), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((13329, 13439), 'adafruit_usb_descriptor.midi.Header', 'midi.Header', ([], {'jacks_and_elements': '[midi_in_jack_emb, midi_in_jack_ext, midi_out_jack_emb, midi_out_jack_ext]'}), '(jacks_and_elements=[midi_in_jack_emb, midi_in_jack_ext,\n midi_out_jack_emb, midi_out_jack_ext])\n', (13340, 13439), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((13901, 13960), 'adafruit_usb_descriptor.midi.DataEndpointDescriptor', 'midi.DataEndpointDescriptor', ([], {'baAssocJack': '[midi_in_jack_emb]'}), '(baAssocJack=[midi_in_jack_emb])\n', (13928, 13960), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((14328, 14388), 'adafruit_usb_descriptor.midi.DataEndpointDescriptor', 'midi.DataEndpointDescriptor', ([], {'baAssocJack': '[midi_out_jack_emb]'}), '(baAssocJack=[midi_out_jack_emb])\n', (14355, 14388), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((9019, 9273), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""MSC in"""', 'bEndpointAddress': '(args.msc_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='MSC in', bEndpointAddress=args.\n msc_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN, bmAttributes=\n standard.EndpointDescriptor.TYPE_BULK, bInterval=0, wMaxPacketSize=512 if\n args.highspeed else 64)\n", (9046, 9273), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((9354, 9610), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""MSC out"""', 'bEndpointAddress': '(args.msc_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='MSC out', bEndpointAddress=args.\n msc_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,\n bmAttributes=standard.EndpointDescriptor.TYPE_BULK, bInterval=0,\n wMaxPacketSize=512 if args.highspeed else 64)\n", (9381, 9610), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n')] |
from tqdm import tqdm
import pandas as pd
import numpy as np, argparse, time, pickle, random, os, datetime
import torch
import torch.optim as optim
from model import MaskedNLLLoss, BC_LSTM
from dataloader import MELDDataLoader
from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report
def setup_seed(seed):
""" Manually Fix the random seed to get deterministic results.
"""
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
random.seed(seed)
torch.benchmark = False
torch.backends.cudnn.deterministic = True
def train_or_eval_model(model, loss_function, dataloader, epoch, optimizer=None, mode='train'):
losses, preds, labels, masks, losses_sense = [], [], [], [], []
max_sequence_len = []
assert mode != 'train' or optimizer != None
if mode == 'train':
model.train()
else:
model.eval()
with tqdm(dataloader) as td:
for data in td:
if mode == 'train':
optimizer.zero_grad()
textf, acouf, mask, label = [d.cuda() for d in data[:-1]] if args.cuda else data[:-1]
log_prob, _ = model(textf, None, acouf, None, mask)
lp_ = log_prob.transpose(0,1).contiguous().view(-1, log_prob.size()[2]) # batch*seq_len, n_classes
labels_ = label.view(-1) # batch*seq_len
loss = loss_function(lp_, labels_, mask)
pred_ = torch.argmax(lp_,1) # batch*seq_len
preds.append(pred_.data.cpu().numpy())
labels.append(labels_.data.cpu().numpy())
masks.append(mask.view(-1).cpu().numpy())
losses.append(loss.item()*masks[-1].sum())
if mode == 'train':
total_loss = loss
total_loss.backward()
optimizer.step()
if preds!=[]:
preds = np.concatenate(preds)
labels = np.concatenate(labels)
masks = np.concatenate(masks)
else:
return float('nan'), float('nan'), float('nan'), [], [], [], float('nan'),[]
avg_loss = round(np.sum(losses)/np.sum(masks), 4)
avg_sense_loss = round(np.sum(losses_sense)/np.sum(masks), 4)
avg_accuracy = round(accuracy_score(labels,preds, sample_weight=masks)*100, 2)
avg_fscore = round(f1_score(labels,preds, sample_weight=masks, average='weighted')*100, 2)
if mode == 'test':
class_report = classification_report(labels, preds, sample_weight=masks, target_names=['neutral', 'surprise', 'fear', 'sadness', 'joy', 'disgust', 'anger'], digits=6)
print(class_report)
return avg_loss, avg_accuracy, labels, preds, masks, [avg_fscore]
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--num_workers', type=int, default=0,
help='num workers of loading data')
# dataloader settings
parser.add_argument('--batch-size', type=int, default=32, metavar='BS', help='batch size')
parser.add_argument('--data_path', type=str, default='../TextCnn/dataset/MELD_features_raw.pkl')
# model settings.
parser.add_argument('--attention_type', type=str, default='general2')
parser.add_argument('--utterance_dim', type=int, default=600,
help='embedding dims to use')
parser.add_argument('--emotion_state_dim', type=int, default=100)
parser.add_argument('--hidden_layer_dim', type=int, default=100)
parser.add_argument('--dropout', type=float, default=0.25)
parser.add_argument('--n_classes', type=int, default=7)
# late fusion module.
parser.add_argument('--lateFusionModule', type=str, default='concat')
parser.add_argument('--input_features', type=tuple, default=(100, 300))
parser.add_argument('--pre_fusion_hidden_dims', type=tuple, default=(24, 7))
parser.add_argument('--pre_fusion_dropout', type=float, default=0.4)
parser.add_argument('--post_fusion_dropout', type=float, default=0.3)
# train settings.
parser.add_argument('--lr', type=float, default=1e-4, metavar='LR', help='learning rate')
parser.add_argument('--l2', type=float, default=1e-5, metavar='L2', help='L2 regularization weight')
parser.add_argument('--epochs', type=int, default=100, metavar='E', help='number of epochs')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
args.cuda = torch.cuda.is_available()
if args.cuda:
print('Running on GPU')
else:
print('Running on CPU')
for seed in [1, 11, 111, 1111, 11111]:
setup_seed(seed)
args.seed = seed
print(args)
model = BC_LSTM(args)
print('MELD BC_LSTM MODULE ...')
if args.cuda:
model.cuda()
loss_weights = torch.FloatTensor([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
loss_function = MaskedNLLLoss(loss_weights.cuda() if args.cuda else loss_weights)
optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2)
lf = open('logs/cnn_meld_logs.txt', 'a')
dataloader = MELDDataLoader(args)
valid_losses, valid_fscores = [], []
test_fscores, test_accuracys, test_losses = [], [], []
best_loss, best_label, best_pred, best_mask = None, None, None, None
for e in range(args.epochs):
start_time = time.time()
train_loss, train_acc, _, _, _, train_fscore = train_or_eval_model(model, loss_function, dataloader['train'], e, optimizer, mode='train')
valid_loss, valid_acc, _, _, _, valid_fscore = train_or_eval_model(model, loss_function, dataloader['valid'], e, mode='valid')
test_loss, test_acc, test_label, test_pred, test_mask, test_fscore = train_or_eval_model(model, loss_function, dataloader['test'], e, mode='test')
valid_losses.append(valid_loss)
valid_fscores.append(valid_fscore)
test_losses.append(test_loss)
test_accuracys.append(test_acc)
test_fscores.append(test_fscore)
x = 'epoch: {}, train_loss: {}, acc: {}, fscore: {}, valid_loss: {}, acc: {}, fscore: {}, test_loss: {}, acc: {}, fscore: {}, time: {} sec'.format(e+1, train_loss, train_acc, train_fscore, valid_loss, valid_acc, valid_fscore, test_loss, test_acc, test_fscore, round(time.time()-start_time, 2))
print (x)
lf.write(x + '\n')
valid_fscores = np.array(valid_fscores).transpose()
test_fscores = np.array(test_fscores).transpose() # [1, epoches]
test_accuracys = np.array(test_accuracys).transpose() # [epoches]
f1_score1 = test_fscores[0][np.argmin(valid_losses)]
acc_score1 = test_accuracys[np.argmin(valid_losses)]
f1_score2 = test_fscores[0][np.argmax(valid_fscores[0])]
acc_score2 = test_accuracys[np.argmax(valid_fscores[0])]
scores = [acc_score1, f1_score1, acc_score2, f1_score2]
scores = [str(item) for item in scores]
print ('Test Scores: Weighted F1')
print('@Best Valid Loss: Test Acc: {}, Test F1: {}'.format(acc_score1, f1_score1))
print('@Best Valid F1: Test Acc: {}, Test F1: {}'.format(acc_score2, f1_score2))
rf = open('results/cnn_meld_results.txt', 'a')
rf.write('\t'.join(scores) + '\t' + str(args) + '\n')
rf.close()
| [
"sklearn.metrics.classification_report",
"numpy.array",
"torch.cuda.is_available",
"argparse.ArgumentParser",
"model.BC_LSTM",
"dataloader.MELDDataLoader",
"numpy.random.seed",
"numpy.concatenate",
"numpy.argmin",
"torch.argmax",
"numpy.argmax",
"time.time",
"sklearn.metrics.accuracy_score",
"torch.cuda.manual_seed_all",
"torch.manual_seed",
"sklearn.metrics.f1_score",
"tqdm.tqdm",
"random.seed",
"numpy.sum",
"torch.cuda.manual_seed",
"torch.FloatTensor"
]
| [((427, 450), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (444, 450), False, 'import torch\n'), ((455, 483), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (477, 483), False, 'import torch\n'), ((488, 520), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed'], {}), '(seed)\n', (514, 520), False, 'import torch\n'), ((525, 545), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (539, 545), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((550, 567), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (561, 567), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2807, 2832), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2830, 2832), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((4486, 4511), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4509, 4511), False, 'import torch\n'), ((978, 994), 'tqdm.tqdm', 'tqdm', (['dataloader'], {}), '(dataloader)\n', (982, 994), False, 'from tqdm import tqdm\n'), ((1982, 2003), 'numpy.concatenate', 'np.concatenate', (['preds'], {}), '(preds)\n', (1996, 2003), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2021, 2043), 'numpy.concatenate', 'np.concatenate', (['labels'], {}), '(labels)\n', (2035, 2043), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2061, 2082), 'numpy.concatenate', 'np.concatenate', (['masks'], {}), '(masks)\n', (2075, 2082), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2524, 2684), 'sklearn.metrics.classification_report', 'classification_report', (['labels', 'preds'], {'sample_weight': 'masks', 'target_names': "['neutral', 'surprise', 'fear', 'sadness', 'joy', 'disgust', 'anger']", 'digits': '(6)'}), "(labels, preds, sample_weight=masks, target_names=[\n 'neutral', 'surprise', 'fear', 'sadness', 'joy', 'disgust', 'anger'],\n digits=6)\n", (2545, 2684), False, 'from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report\n'), ((4744, 4757), 'model.BC_LSTM', 'BC_LSTM', (['args'], {}), '(args)\n', (4751, 4757), False, 'from model import MaskedNLLLoss, BC_LSTM\n'), ((4879, 4933), 'torch.FloatTensor', 'torch.FloatTensor', (['[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])\n', (4896, 4933), False, 'import torch\n'), ((5207, 5227), 'dataloader.MELDDataLoader', 'MELDDataLoader', (['args'], {}), '(args)\n', (5221, 5227), False, 'from dataloader import MELDDataLoader\n'), ((1541, 1561), 'torch.argmax', 'torch.argmax', (['lp_', '(1)'], {}), '(lp_, 1)\n', (1553, 1561), False, 'import torch\n'), ((2200, 2214), 'numpy.sum', 'np.sum', (['losses'], {}), '(losses)\n', (2206, 2214), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2215, 2228), 'numpy.sum', 'np.sum', (['masks'], {}), '(masks)\n', (2221, 2228), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2260, 2280), 'numpy.sum', 'np.sum', (['losses_sense'], {}), '(losses_sense)\n', (2266, 2280), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2281, 2294), 'numpy.sum', 'np.sum', (['masks'], {}), '(masks)\n', (2287, 2294), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2325, 2375), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['labels', 'preds'], {'sample_weight': 'masks'}), '(labels, preds, sample_weight=masks)\n', (2339, 2375), False, 'from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report\n'), ((2406, 2470), 'sklearn.metrics.f1_score', 'f1_score', (['labels', 'preds'], {'sample_weight': 'masks', 'average': '"""weighted"""'}), "(labels, preds, sample_weight=masks, average='weighted')\n", (2414, 2470), False, 'from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report\n'), ((5478, 5489), 'time.time', 'time.time', ([], {}), '()\n', (5487, 5489), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6811, 6834), 'numpy.argmin', 'np.argmin', (['valid_losses'], {}), '(valid_losses)\n', (6820, 6834), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6872, 6895), 'numpy.argmin', 'np.argmin', (['valid_losses'], {}), '(valid_losses)\n', (6881, 6895), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6933, 6960), 'numpy.argmax', 'np.argmax', (['valid_fscores[0]'], {}), '(valid_fscores[0])\n', (6942, 6960), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6998, 7025), 'numpy.argmax', 'np.argmax', (['valid_fscores[0]'], {}), '(valid_fscores[0])\n', (7007, 7025), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6591, 6614), 'numpy.array', 'np.array', (['valid_fscores'], {}), '(valid_fscores)\n', (6599, 6614), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6650, 6672), 'numpy.array', 'np.array', (['test_fscores'], {}), '(test_fscores)\n', (6658, 6672), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6725, 6749), 'numpy.array', 'np.array', (['test_accuracys'], {}), '(test_accuracys)\n', (6733, 6749), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6472, 6483), 'time.time', 'time.time', ([], {}), '()\n', (6481, 6483), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n')] |
# noinspection PyUnresolvedReferences
import os
import re
# TODO I'm going to need to make a dictionary for my big list of stuff i care about and what's needed for
# every file type....
RAF = ['EXIF:LensModel', 'MakerNotes:RawImageHeight', 'MakerNotes:RawImageWidth', 'EXIF:CreateDate', 'EXIF:ModifyDate',
'EXIF:SerialNumber', 'Composite:Aperture', 'EXIF:FocalLength', 'EXIF:Make', 'EXIF:Model', 'EXIF:LensMake']
MOV = ['EXIF:LensModel', 'MakerNotes:RawImageHeight', 'MakerNotes:RawImageWidth', 'EXIF:CreateDate', 'EXIF:ModifyDate',
'EXIF:SerialNumber', 'Composite:Aperture', 'EXIF:FocalLength', 'EXIF:Make', 'EXIF:Model', 'EXIF:LensMake',
'QuickTime:VideoFrameRate', 'QuickTime:Duration']
R3D = ['ClipName', 'EdgeTC', 'EndEdgeTC', 'TotalFrames', 'FrameHeight', 'FrameWidth', 'Aperture', 'ISO', 'Date',
'AudioSlate', 'VideoSlate', 'Camera', 'CameraModel', 'CameraPIN', 'MediaSerialNumber', 'LensSerialNumber',
'FPS', 'AspectRatio', 'Kelvin', 'LensName', 'LensBrand', 'FocalLength', 'Shutter(deg)', 'SensorID', 'SensorName',
'Take']
def check_exiftool():
"""
checks if exiftool is installed.
:return:
"""
pass
def check_redline():
"""
checks if redline is installed
:return:
"""
pass
def check_ffprobe():
"""
checks if ffprobe is installed
:return:
"""
pass
def get(filein, tool='exiftool', print_output=False):
"""
Due to issues with the exiftool module this is provided as a way to parse output directly
from exiftool through the system commands and cglexecute. For the moment it's only designed
to get the lumberdata for a single file.
:param filein:
:return: dictionary containing lumberdata from exiftool
"""
ext = os.path.splitext(filein)[-1]
d = {}
if tool == 'exiftool':
command = r'exiftool %s' % filein
output = cgl_execute(command=command, verbose=False, print_output=print_output)
for each in output['printout']:
key, value = re.split("\s+:\s+", each)
d[key] = value
return d
elif tool == 'ffprobe':
command = r'%s %s' % ('ffprobe', filein)
output = cgl_execute(command=command)
for each in output['printout']:
try:
values = re.split(":\s+", each)
key = values[0]
values.pop(0)
if 'Stream' in key:
split_v = values[1].split(',')
d['Image Size'] = split_v[2].split()[0]
d['Source Image Width'], d['Source Image Height'] = d['Image Size'].split('x')
d['Video Frame Rate'] = split_v[4].split(' fps')[0].replace(' ', '')
if 'Duration' in key:
d['Track Duration'] = '%s s' % values[0].split(',')[0]
value = ' '.join(values)
d[key] = value
except ValueError:
print('skipping %s' % each)
return d
def get_red_data(filein):
"""
method for pulling lumberdata from r3d files. REDLINE is a command line interface from RED that is required
for this
https://www.red.com/downloads/options?itemInternalId=16144
:param filein:
:return:
"""
file_, ext_ = os.path.splitext(filein)
if ext_.upper() == '.R3D':
command = r'REDLINE --i %s --printMeta 1' % filein
d = {}
for line in os.popen(command).readlines():
line = line.strip('\n')
line = line.replace('\t', '')
line = line.replace(' ', '')
try:
key_, value = line.split(':', 1)
if key_ != 'None':
d[key_] = value
except ValueError:
pass
return d
| [
"re.split",
"os.popen",
"os.path.splitext"
]
| [((3281, 3305), 'os.path.splitext', 'os.path.splitext', (['filein'], {}), '(filein)\n', (3297, 3305), False, 'import os\n'), ((1764, 1788), 'os.path.splitext', 'os.path.splitext', (['filein'], {}), '(filein)\n', (1780, 1788), False, 'import os\n'), ((2026, 2053), 're.split', 're.split', (['"""\\\\s+:\\\\s+"""', 'each'], {}), "('\\\\s+:\\\\s+', each)\n", (2034, 2053), False, 'import re\n'), ((3431, 3448), 'os.popen', 'os.popen', (['command'], {}), '(command)\n', (3439, 3448), False, 'import os\n'), ((2301, 2324), 're.split', 're.split', (['""":\\\\s+"""', 'each'], {}), "(':\\\\s+', each)\n", (2309, 2324), False, 'import re\n')] |
import logging
from typing import List, Callable
import numpy as np
from pyquaternion import Quaternion
from pyrep import PyRep
from pyrep.errors import IKError
from pyrep.objects import Dummy, Object
from rlbench import utils
from rlbench.action_modes import ArmActionMode, ActionMode
from rlbench.backend.exceptions import BoundaryError, WaypointError
from rlbench.backend.observation import Observation
from rlbench.backend.robot import Robot
from rlbench.backend.scene import Scene
from rlbench.backend.task import Task
from rlbench.demo import Demo
from rlbench.observation_config import ObservationConfig
_TORQUE_MAX_VEL = 9999
_DT = 0.05
_MAX_RESET_ATTEMPTS = 40
_MAX_DEMO_ATTEMPTS = 10
class InvalidActionError(Exception):
pass
class TaskEnvironmentError(Exception):
pass
class TaskEnvironment(object):
def __init__(self, pyrep: PyRep, robot: Robot, scene: Scene, task: Task,
action_mode: ActionMode, dataset_root: str,
obs_config: ObservationConfig,
static_positions: bool = False,
attach_grasped_objects: bool = True):
self._pyrep = pyrep
self._robot = robot
self._scene = scene
self._task = task
self._variation_number = 0
self._action_mode = action_mode
self._dataset_root = dataset_root
self._obs_config = obs_config
self._static_positions = static_positions
self._attach_grasped_objects = attach_grasped_objects
self._reset_called = False
self._prev_ee_velocity = None
self._enable_path_observations = False
self._scene.load(self._task)
self._pyrep.start()
self._target_workspace_check = Dummy.create()
self._last_e = None
def get_name(self) -> str:
return self._task.get_name()
def sample_variation(self) -> int:
self._variation_number = np.random.randint(
0, self._task.variation_count())
return self._variation_number
def set_variation(self, v: int) -> None:
if v >= self.variation_count():
raise TaskEnvironmentError(
'Requested variation %d, but there are only %d variations.' % (
v, self.variation_count()))
self._variation_number = v
def variation_count(self) -> int:
return self._task.variation_count()
def reset(self) -> (List[str], Observation):
self._scene.reset()
try:
desc = self._scene.init_episode(
self._variation_number, max_attempts=_MAX_RESET_ATTEMPTS,
randomly_place=not self._static_positions)
except (BoundaryError, WaypointError) as e:
raise TaskEnvironmentError(
'Could not place the task %s in the scene. This should not '
'happen, please raise an issues on this task.'
% self._task.get_name()) from e
self._reset_called = True
# redundancy resolution
self._last_e = None
# Returns a list of descriptions and the first observation
return desc, self._scene.get_observation()
def get_observation(self) -> Observation:
return self._scene.get_observation()
def get_joint_upper_velocity_limits(self):
return self._robot.arm.get_joint_upper_velocity_limits()
def get_all_graspable_objects(self):
return self._task.get_graspable_objects()
def get_robot_visuals(self):
return self._robot.arm.get_visuals()
def get_all_graspable_object_positions(self, relative_to_cameras=False):
""" returns the positions of all graspable object relative to all enabled cameras """
objects = self._task.get_graspable_objects()
positions = []
for ob in objects:
if relative_to_camera:
positions.append(self._scene.get_object_position_relative_to_cameras(ob))
else:
positions.append({"left_shoulder_camera": ob.get_position(),
"right_shoulder_camera": ob.get_position(),
"front_camera": ob.get_position(),
"wrist_camera": ob.get_position()})
return positions
def get_all_graspable_object_poses(self, relative_to_cameras=False):
""" returns the pose of all graspable object relative to all enabled cameras """
objects = self._task.get_graspable_objects()
poses = []
for ob in objects:
if relative_to_cameras:
poses.append(self._scene.get_object_pose_relative_to_cameras(ob))
else:
poses.append({"left_shoulder_camera": ob.get_pose(),
"right_shoulder_camera": ob.get_pose(),
"front_camera": ob.get_pose(),
"wrist_camera": ob.get_pose()})
return poses
def _assert_action_space(self, action, expected_shape):
if np.shape(action) != expected_shape:
raise RuntimeError(
'Expected the action shape to be: %s, but was shape: %s' % (
str(expected_shape), str(np.shape(action))))
def _assert_unit_quaternion(self, quat):
if not np.isclose(np.linalg.norm(quat), 1.0):
raise RuntimeError('Action contained non unit quaternion!')
def _torque_action(self, action):
self._robot.arm.set_joint_target_velocities(
[(_TORQUE_MAX_VEL if t < 0 else -_TORQUE_MAX_VEL)
for t in action])
self._robot.arm.set_joint_forces(np.abs(action))
def _ee_action(self, action, relative_to=None):
self._assert_unit_quaternion(action[3:])
try:
joint_positions = self._robot.arm.solve_ik(
action[:3], quaternion=action[3:], relative_to=relative_to)
self._robot.arm.set_joint_target_positions(joint_positions)
except IKError as e:
raise InvalidActionError('Could not find a path.') from e
done = False
prev_values = None
# Move until reached target joint positions or until we stop moving
# (e.g. when we collide wth something)
while not done:
self._scene.step()
cur_positions = self._robot.arm.get_joint_positions()
reached = np.allclose(cur_positions, joint_positions, atol=0.01)
not_moving = False
if prev_values is not None:
not_moving = np.allclose(
cur_positions, prev_values, atol=0.001)
prev_values = cur_positions
done = reached or not_moving
def _path_action(self, action, relative_to=None):
self._assert_unit_quaternion(action[3:])
try:
# Check if the target is in the workspace; if not, then quick reject
# Only checks position, not rotation
pos_to_check = action[:3]
if relative_to is not None:
self._target_workspace_check.set_position(
pos_to_check, relative_to)
pos_to_check = self._target_workspace_check.get_position()
valid = self._scene.check_target_in_workspace(pos_to_check)
if not valid:
raise InvalidActionError('Target is outside of workspace.')
path = self._robot.arm.get_path(
action[:3], quaternion=action[3:], ignore_collisions=True,
relative_to=relative_to)
done = False
observations = []
while not done:
done = path.step()
self._scene.step()
if self._enable_path_observations:
observations.append(self._scene.get_observation())
success, terminate = self._task.success()
# If the task succeeds while traversing path, then break early
if success:
break
observations.append(self._scene.get_observation())
return observations
except IKError as e:
raise InvalidActionError('Could not find a path.') from e
def step(self, action, camcorder=None) -> (Observation, int, bool):
# returns observation, reward, done, info
if not self._reset_called:
raise RuntimeError(
"Call 'reset' before calling 'step' on a task.")
# action should contain 1 extra value for gripper open close state
arm_action = np.array(action[:-1])
ee_action = action[-1]
if 0.0 > ee_action > 1.0:
raise ValueError('Gripper action expected to be within 0 and 1.')
# Discretize the gripper action
current_ee = (1.0 if self._robot.gripper.get_open_amount()[0] > 0.9 else 0.0)
if ee_action > 0.5:
ee_action = 1.0
elif ee_action < 0.5:
ee_action = 0.0
if current_ee != ee_action:
arm_action = np.array([0.0]*7)
if self._action_mode.arm == ArmActionMode.ABS_JOINT_VELOCITY:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
self._robot.arm.set_joint_target_velocities(arm_action)
self._scene.step()
# if needed save some images
if camcorder:
obs = self._scene.get_observation()
camcorder.save(obs, self.get_robot_visuals(), self.get_all_graspable_objects())
elif self._action_mode.arm == ArmActionMode.DELTA_JOINT_VELOCITY:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
cur = np.array(self._robot.arm.get_joint_velocities())
self._robot.arm.set_joint_target_velocities(cur + arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.ABS_JOINT_POSITION:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
self._robot.arm.set_joint_target_positions(arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.DELTA_JOINT_POSITION:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
cur = np.array(self._robot.arm.get_joint_positions())
self._robot.arm.set_joint_target_positions(cur + arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.ABS_JOINT_TORQUE:
self._assert_action_space(
arm_action, (len(self._robot.arm.joints),))
self._torque_action(arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.DELTA_JOINT_TORQUE:
cur = np.array(self._robot.arm.get_joint_forces())
new_action = cur + arm_action
self._torque_action(new_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.ABS_EE_POSE_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
self._ee_action(list(arm_action))
elif self._action_mode.arm == ArmActionMode.ABS_EE_POSE_PLAN_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
self._path_observations = []
self._path_observations = self._path_action(list(arm_action))
elif self._action_mode.arm == ArmActionMode.DELTA_EE_POSE_PLAN_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
a_x, a_y, a_z, a_qx, a_qy, a_qz, a_qw = arm_action
x, y, z, qx, qy, qz, qw = self._robot.arm.get_tip().get_pose()
new_rot = Quaternion(a_qw, a_qx, a_qy, a_qz) * Quaternion(qw, qx,
qy, qz)
qw, qx, qy, qz = list(new_rot)
new_pose = [a_x + x, a_y + y, a_z + z] + [qx, qy, qz, qw]
self._path_observations = []
self._path_observations = self._path_action(list(new_pose))
elif self._action_mode.arm == ArmActionMode.DELTA_EE_POSE_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
a_x, a_y, a_z, a_qx, a_qy, a_qz, a_qw = arm_action
x, y, z, qx, qy, qz, qw = self._robot.arm.get_tip().get_pose()
new_rot = Quaternion(a_qw, a_qx, a_qy, a_qz) * Quaternion(
qw, qx, qy, qz)
qw, qx, qy, qz = list(new_rot)
new_pose = [a_x + x, a_y + y, a_z + z] + [qx, qy, qz, qw]
self._ee_action(list(new_pose))
elif self._action_mode.arm == ArmActionMode.EE_POSE_EE_FRAME:
self._assert_action_space(arm_action, (7,))
self._ee_action(
list(arm_action), relative_to=self._robot.arm.get_tip())
elif self._action_mode.arm == ArmActionMode.EE_POSE_PLAN_EE_FRAME:
self._assert_action_space(arm_action, (7,))
self._path_observations = []
self._path_observations = self._path_action(
list(arm_action), relative_to=self._robot.arm.get_tip())
else:
raise RuntimeError('Unrecognised action mode.')
if current_ee != ee_action:
done = False
while not done:
done = self._robot.gripper.actuate(ee_action, velocity=0.2)
self._pyrep.step()
self._task.step()
# if needed save some images
if camcorder:
obs = self._scene.get_observation()
camcorder.save(obs, self.get_robot_visuals(), self.get_all_graspable_objects())
if ee_action == 0.0 and self._attach_grasped_objects:
# If gripper close action, the check for grasp.
for g_obj in self._task.get_graspable_objects():
self._robot.gripper.grasp(g_obj)
else:
# If gripper open action, the check for ungrasp.
self._robot.gripper.release()
success, terminate = self._task.success()
task_reward = self._task.reward()
reward = float(success) if task_reward is None else task_reward
return self._scene.get_observation(), reward, terminate
def resolve_redundancy_joint_velocities(self, actions, setup):
"""
Resolves redundant self-motion into the nullspace without changing the gripper tip position
:param actions:
Current actions without redundancy resolution.
:param setup:
Setup for redundancy resolution defining the mode, weighting etc.
:return: Array of joint velocities, which move the robot's tip according to the provided actions yet push
the joint position towards a reference position.
"""
# get the Jacobian
J = self._robot.arm.get_jacobian()
J = np.transpose(J)
J = np.flip(J)
J = J[-3:]
# compute the pseudo inverse
J_plus = np.linalg.pinv(J)
# weighting
if type(setup["W"]) is list:
W = np.array(setup["W"])
elif setup["W"] is None:
# use default weighting later
W = None
else:
raise TypeError("Unsupported type %s for weighting vector." % type(setup["W"]))
# compute the error
if setup["mode"] == "reference_position":
dL, L = self.get_loss_reference_position(setup["ref_position"], W)
elif setup["mode"] == "collision_avoidance":
dL, L = self.get_loss_collision_avoidance(W, setup)
# compute the joint velocities
q_dot_redundancy = setup["alpha"] * np.matmul((np.identity(len(self._robot.arm.joints)) - np.matmul(J_plus, J)), dL)
# the provided jacobian seems to be inaccurate resulting in slight movement of the ee. This is why
# the velocites are set to 0 once the error stops changing much.
e = dL
if setup["cut-off_error"] is not None:
if self._last_e is not None:
e_dot = np.sum(np.abs(e - self._last_e))
if self._last_e is not None and e_dot < setup["cut-off_error"]:
q_dot_redundancy = np.array([0.0] * 7)
self._last_e = e
else:
self._last_e = e
return actions - q_dot_redundancy, L
def get_loss_reference_position(self, ref_pos, W):
"""
Calculates the summed squarred error between the current and the reference consfiguration as well as
its partial derivatives with respect to al q's for redundancy resoltuion.
-> L(q) = 1/2 sum_{i=1}^N w_i (q_i - \tilde{q}_i)^2
:param ref_pos:
Reference position.
:param W:
Weighting vector.
:return:
1: The partial derivatives of the summed squarred error between the current and the
reference configuration -> -> \nabla_q L(q)
2: Summed squarred error between the current and the reference configuration. -> L(q)
"""
if W is None:
# default weighting
W = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
e = (self._robot.arm.get_joint_positions() - ref_pos)
return e * W, 0.5*np.dot(e,e*W)
def get_loss_collision_avoidance(self, W, setup):
"""
Calculates the loss as well as the respective partial derivatives for redundancy resoltuion with
collision avoidance. This only works with tasks that include one obstacles!
L(q) = \sum_{i=1}^N d(q)^{-1}
:param W:
Weighting vector.
:return:
1: The partial derivatives of the loss above. -> \nable_q L(q)
2: The loss shown above.-> L(q)
"""
# get the position of the object
p_obs = self._task.obstacle.get_position() + np.array([0, 0, 0.33]) - self._robot.arm.joints[0].get_position()
#p_obs = self._task.obstacle.get_position()
p_obs = np.append(p_obs, [1])
# get the transformation matrices, their derivatives, and the positions of the links
A_1, A_2, A_3, A_4, A_5, A_6, A_7 = self._robot.get_transformation_matrices()
dA_1, dA_2, dA_3, dA_4, dA_5, dA_6, dA_7 = self._robot.get_transformation_matrices_derivatives()
p_1, p_2, p_3, p_4, p_5, p_6, p_7 = self._robot.get_link_positions_in_ref_frames()
# we use reciprocal of the distance between each link and an obstacle as our Loss
# the chain rule delivers: d/dq L = (p_i^0 (q_1,..., q_i) - p_obs)^T * d/dq (p_i^0 (q_1,..., q_i) - p_obs)
# where p_i^0 = (\prod_{j=1}^i A_j^{j-1}(q_j)) * p_i
# as the left side of d/dq L is used often, let's calculate it in advance
d_1_T = np.transpose(A_1.dot(p_1) - p_obs)
d_2_T = np.transpose(A_1.dot(A_2).dot(p_2) - p_obs)
d_3_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(p_3) - p_obs)
d_4_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(p_4) - p_obs)
d_5_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(p_5) - p_obs)
d_6_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6) - p_obs)
d_7_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7) - p_obs)
# now we can calculate the derivatives in each dimension
dq_1 = -np.matmul(d_1_T, dA_1.dot(p_1)) + \
-np.matmul(d_2_T, dA_1.dot(A_2).dot(p_2)) + \
-np.matmul(d_3_T, dA_1.dot(A_2).dot(A_3).dot(p_3)) + \
-np.matmul(d_4_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(p_4)) + \
-np.matmul(d_5_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_2 = -np.matmul(d_2_T, A_1.dot(dA_2).dot(p_2)) + \
-np.matmul(d_3_T, A_1.dot(dA_2).dot(A_3).dot(p_3)) + \
-np.matmul(d_4_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(p_4)) + \
-np.matmul(d_5_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_3 = -np.matmul(d_3_T, A_1.dot(A_2).dot(dA_3).dot(p_3)) + \
-np.matmul(d_4_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(p_4)) + \
-np.matmul(d_5_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_4 = -np.matmul(d_4_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(p_4)) + \
-np.matmul(d_5_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_5 = -np.matmul(d_5_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(dA_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(dA_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(dA_5).dot(A_6).dot(A_7).dot(p_7))
dq_6 = -np.matmul(d_6_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(dA_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(dA_6).dot(A_7).dot(p_7))
dq_7 = -np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(dA_7).dot(p_7))
if W is None:
# default weighting vector -> based on the reciprocal of the distance. The greater the distance the smaller
# the weight. That is, it is concentrated on close objects.
W = np.array([1 / np.sum(np.square(d_1_T)),
1 / np.sum(np.square(d_2_T)) ,
1 / np.sum(np.square(d_3_T)) ,
1 / np.sum(np.square(d_4_T)) ,
1 / np.sum(np.square(d_5_T)) ,
1 / np.sum(np.square(d_6_T)) ,
1 / np.sum(np.square(d_7_T)) ]) * 0.1
# --- scaling to keep distance to joint limits ---
# get the minimum distance of each joint to its limit
joint_positions = np.array([j.get_joint_position() for j in self._robot.arm.joints])
lower_joint_limits = np.array(setup["lower_joint_pos_limit"])
upper_joint_limits = np.array(setup["upper_joint_pos_limit"])
min_j_distances = [np.minimum(u-j, j-l) for l,u,j in zip(lower_joint_limits, upper_joint_limits,
joint_positions)]
# start scaling down error when joint limit is 15° away.
# Scaling is done linearly from 0 to 1 for 0° <= d <= 15°
rad_thres = 15*(np.pi/180)
W *= np.array([ np.minimum((1/rad_thres)*d, 1.0) for d in min_j_distances])
# concatenate the derivaties to vector and apply weightig
dL = np.array([dq_1, dq_2, dq_3, dq_4, dq_5, dq_6, dq_7])*W
# calculate the loss
L = np.sqrt(np.dot(d_1_T, d_1_T))*W[0] \
+ np.sqrt(np.dot(d_2_T, d_2_T))*W[1] \
+ np.sqrt(np.dot(d_3_T, d_3_T))*W[2] \
+ np.sqrt(np.dot(d_4_T, d_4_T))*W[3] \
+ np.sqrt(np.dot(d_5_T, d_5_T))*W[4] \
+ np.sqrt(np.dot(d_6_T, d_6_T))*W[5] \
+ np.sqrt(np.dot(d_7_T, d_7_T))*W[6]
return dL, L
def enable_path_observations(self, value: bool) -> None:
if (self._action_mode.arm != ArmActionMode.DELTA_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.ABS_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.EE_POSE_PLAN_EE_FRAME):
raise RuntimeError('Only available in DELTA_EE_POSE_PLAN or '
'ABS_EE_POSE_PLAN action mode.')
self._enable_path_observations = value
def get_path_observations(self):
if (self._action_mode.arm != ArmActionMode.DELTA_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.ABS_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.EE_POSE_PLAN_EE_FRAME):
raise RuntimeError('Only available in DELTA_EE_POSE_PLAN or '
'ABS_EE_POSE_PLAN action mode.')
return self._path_observations
def get_demos(self, amount: int, live_demos: bool = False,
image_paths: bool = False,
callable_each_step: Callable[[Observation], None] = None,
max_attempts: int = _MAX_DEMO_ATTEMPTS,
) -> List[Demo]:
"""Negative means all demos"""
if not live_demos and (self._dataset_root is None
or len(self._dataset_root) == 0):
raise RuntimeError(
"Can't ask for a stored demo when no dataset root provided.")
if not live_demos:
if self._dataset_root is None or len(self._dataset_root) == 0:
raise RuntimeError(
"Can't ask for stored demo when no dataset root provided.")
demos = utils.get_stored_demos(
amount, image_paths, self._dataset_root, self._variation_number,
self._task.get_name(), self._obs_config)
else:
ctr_loop = self._robot.arm.joints[0].is_control_loop_enabled()
self._robot.arm.set_control_loop_enabled(True)
demos = self._get_live_demos(
amount, callable_each_step, max_attempts)
self._robot.arm.set_control_loop_enabled(ctr_loop)
return demos
def _get_live_demos(self, amount: int,
callable_each_step: Callable[
[Observation], None] = None,
max_attempts: int = _MAX_DEMO_ATTEMPTS) -> List[Demo]:
demos = []
for i in range(amount):
attempts = max_attempts
while attempts > 0:
random_seed = np.random.get_state()
self.reset()
logging.info('Collecting demo %d' % i)
try:
demo = self._scene.get_demo(
callable_each_step=callable_each_step)
demo.random_seed = random_seed
demos.append(demo)
break
except Exception as e:
attempts -= 1
logging.info('Bad demo. ' + str(e))
if attempts <= 0:
raise RuntimeError(
'Could not collect demos. Maybe a problem with the task?')
return demos
def reset_to_demo(self, demo: Demo) -> (List[str], Observation):
demo.restore_state()
return self.reset()
| [
"numpy.flip",
"numpy.abs",
"numpy.allclose",
"numpy.random.get_state",
"numpy.linalg.pinv",
"numpy.minimum",
"pyquaternion.Quaternion",
"numpy.square",
"pyrep.objects.Dummy.create",
"numpy.array",
"numpy.append",
"numpy.dot",
"numpy.matmul",
"numpy.linalg.norm",
"numpy.shape",
"numpy.transpose",
"logging.info"
]
| [((1721, 1735), 'pyrep.objects.Dummy.create', 'Dummy.create', ([], {}), '()\n', (1733, 1735), False, 'from pyrep.objects import Dummy, Object\n'), ((8536, 8557), 'numpy.array', 'np.array', (['action[:-1]'], {}), '(action[:-1])\n', (8544, 8557), True, 'import numpy as np\n'), ((14997, 15012), 'numpy.transpose', 'np.transpose', (['J'], {}), '(J)\n', (15009, 15012), True, 'import numpy as np\n'), ((15025, 15035), 'numpy.flip', 'np.flip', (['J'], {}), '(J)\n', (15032, 15035), True, 'import numpy as np\n'), ((15110, 15127), 'numpy.linalg.pinv', 'np.linalg.pinv', (['J'], {}), '(J)\n', (15124, 15127), True, 'import numpy as np\n'), ((18089, 18110), 'numpy.append', 'np.append', (['p_obs', '[1]'], {}), '(p_obs, [1])\n', (18098, 18110), True, 'import numpy as np\n'), ((22729, 22769), 'numpy.array', 'np.array', (["setup['lower_joint_pos_limit']"], {}), "(setup['lower_joint_pos_limit'])\n", (22737, 22769), True, 'import numpy as np\n'), ((22799, 22839), 'numpy.array', 'np.array', (["setup['upper_joint_pos_limit']"], {}), "(setup['upper_joint_pos_limit'])\n", (22807, 22839), True, 'import numpy as np\n'), ((5012, 5028), 'numpy.shape', 'np.shape', (['action'], {}), '(action)\n', (5020, 5028), True, 'import numpy as np\n'), ((5620, 5634), 'numpy.abs', 'np.abs', (['action'], {}), '(action)\n', (5626, 5634), True, 'import numpy as np\n'), ((6368, 6422), 'numpy.allclose', 'np.allclose', (['cur_positions', 'joint_positions'], {'atol': '(0.01)'}), '(cur_positions, joint_positions, atol=0.01)\n', (6379, 6422), True, 'import numpy as np\n'), ((9006, 9025), 'numpy.array', 'np.array', (['([0.0] * 7)'], {}), '([0.0] * 7)\n', (9014, 9025), True, 'import numpy as np\n'), ((15202, 15222), 'numpy.array', 'np.array', (["setup['W']"], {}), "(setup['W'])\n", (15210, 15222), True, 'import numpy as np\n'), ((17229, 17274), 'numpy.array', 'np.array', (['[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])\n', (17237, 17274), True, 'import numpy as np\n'), ((22867, 22891), 'numpy.minimum', 'np.minimum', (['(u - j)', '(j - l)'], {}), '(u - j, j - l)\n', (22877, 22891), True, 'import numpy as np\n'), ((23356, 23408), 'numpy.array', 'np.array', (['[dq_1, dq_2, dq_3, dq_4, dq_5, dq_6, dq_7]'], {}), '([dq_1, dq_2, dq_3, dq_4, dq_5, dq_6, dq_7])\n', (23364, 23408), True, 'import numpy as np\n'), ((5294, 5314), 'numpy.linalg.norm', 'np.linalg.norm', (['quat'], {}), '(quat)\n', (5308, 5314), True, 'import numpy as np\n'), ((6523, 6574), 'numpy.allclose', 'np.allclose', (['cur_positions', 'prev_values'], {'atol': '(0.001)'}), '(cur_positions, prev_values, atol=0.001)\n', (6534, 6574), True, 'import numpy as np\n'), ((16317, 16336), 'numpy.array', 'np.array', (['([0.0] * 7)'], {}), '([0.0] * 7)\n', (16325, 16336), True, 'import numpy as np\n'), ((17363, 17379), 'numpy.dot', 'np.dot', (['e', '(e * W)'], {}), '(e, e * W)\n', (17369, 17379), True, 'import numpy as np\n'), ((17954, 17976), 'numpy.array', 'np.array', (['[0, 0, 0.33]'], {}), '([0, 0, 0.33])\n', (17962, 17976), True, 'import numpy as np\n'), ((23216, 23250), 'numpy.minimum', 'np.minimum', (['(1 / rad_thres * d)', '(1.0)'], {}), '(1 / rad_thres * d, 1.0)\n', (23226, 23250), True, 'import numpy as np\n'), ((26437, 26458), 'numpy.random.get_state', 'np.random.get_state', ([], {}), '()\n', (26456, 26458), True, 'import numpy as np\n'), ((26504, 26542), 'logging.info', 'logging.info', (["('Collecting demo %d' % i)"], {}), "('Collecting demo %d' % i)\n", (26516, 26542), False, 'import logging\n'), ((15838, 15858), 'numpy.matmul', 'np.matmul', (['J_plus', 'J'], {}), '(J_plus, J)\n', (15847, 15858), True, 'import numpy as np\n'), ((16180, 16204), 'numpy.abs', 'np.abs', (['(e - self._last_e)'], {}), '(e - self._last_e)\n', (16186, 16204), True, 'import numpy as np\n'), ((23761, 23781), 'numpy.dot', 'np.dot', (['d_7_T', 'd_7_T'], {}), '(d_7_T, d_7_T)\n', (23767, 23781), True, 'import numpy as np\n'), ((23711, 23731), 'numpy.dot', 'np.dot', (['d_6_T', 'd_6_T'], {}), '(d_6_T, d_6_T)\n', (23717, 23731), True, 'import numpy as np\n'), ((5202, 5218), 'numpy.shape', 'np.shape', (['action'], {}), '(action)\n', (5210, 5218), True, 'import numpy as np\n'), ((23661, 23681), 'numpy.dot', 'np.dot', (['d_5_T', 'd_5_T'], {}), '(d_5_T, d_5_T)\n', (23667, 23681), True, 'import numpy as np\n'), ((22117, 22133), 'numpy.square', 'np.square', (['d_1_T'], {}), '(d_1_T)\n', (22126, 22133), True, 'import numpy as np\n'), ((22173, 22189), 'numpy.square', 'np.square', (['d_2_T'], {}), '(d_2_T)\n', (22182, 22189), True, 'import numpy as np\n'), ((22230, 22246), 'numpy.square', 'np.square', (['d_3_T'], {}), '(d_3_T)\n', (22239, 22246), True, 'import numpy as np\n'), ((22287, 22303), 'numpy.square', 'np.square', (['d_4_T'], {}), '(d_4_T)\n', (22296, 22303), True, 'import numpy as np\n'), ((22344, 22360), 'numpy.square', 'np.square', (['d_5_T'], {}), '(d_5_T)\n', (22353, 22360), True, 'import numpy as np\n'), ((22401, 22417), 'numpy.square', 'np.square', (['d_6_T'], {}), '(d_6_T)\n', (22410, 22417), True, 'import numpy as np\n'), ((22458, 22474), 'numpy.square', 'np.square', (['d_7_T'], {}), '(d_7_T)\n', (22467, 22474), True, 'import numpy as np\n'), ((23611, 23631), 'numpy.dot', 'np.dot', (['d_4_T', 'd_4_T'], {}), '(d_4_T, d_4_T)\n', (23617, 23631), True, 'import numpy as np\n'), ((23561, 23581), 'numpy.dot', 'np.dot', (['d_3_T', 'd_3_T'], {}), '(d_3_T, d_3_T)\n', (23567, 23581), True, 'import numpy as np\n'), ((23461, 23481), 'numpy.dot', 'np.dot', (['d_1_T', 'd_1_T'], {}), '(d_1_T, d_1_T)\n', (23467, 23481), True, 'import numpy as np\n'), ((23511, 23531), 'numpy.dot', 'np.dot', (['d_2_T', 'd_2_T'], {}), '(d_2_T, d_2_T)\n', (23517, 23531), True, 'import numpy as np\n'), ((11795, 11829), 'pyquaternion.Quaternion', 'Quaternion', (['a_qw', 'a_qx', 'a_qy', 'a_qz'], {}), '(a_qw, a_qx, a_qy, a_qz)\n', (11805, 11829), False, 'from pyquaternion import Quaternion\n'), ((11832, 11858), 'pyquaternion.Quaternion', 'Quaternion', (['qw', 'qx', 'qy', 'qz'], {}), '(qw, qx, qy, qz)\n', (11842, 11858), False, 'from pyquaternion import Quaternion\n'), ((12452, 12486), 'pyquaternion.Quaternion', 'Quaternion', (['a_qw', 'a_qx', 'a_qy', 'a_qz'], {}), '(a_qw, a_qx, a_qy, a_qz)\n', (12462, 12486), False, 'from pyquaternion import Quaternion\n'), ((12489, 12515), 'pyquaternion.Quaternion', 'Quaternion', (['qw', 'qx', 'qy', 'qz'], {}), '(qw, qx, qy, qz)\n', (12499, 12515), False, 'from pyquaternion import Quaternion\n')] |
from django import forms
from django.contrib.contenttypes.forms import generic_inlineformset_factory
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.test import TestCase
from django.test.utils import isolate_apps
from .models import (
Animal, ForProxyModelModel, Gecko, Mineral, ProxyRelatedModel, TaggedItem,
)
class CustomWidget(forms.TextInput):
pass
class TaggedItemForm(forms.ModelForm):
class Meta:
model = TaggedItem
fields = '__all__'
widgets = {'tag': CustomWidget}
class GenericInlineFormsetTests(TestCase):
def test_output(self):
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
formset = GenericFormSet()
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">
Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text"
name="generic_relations-taggeditem-content_type-object_id-0-tag" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">
<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id"
id="id_generic_relations-taggeditem-content_type-object_id-0-id"></p>"""
)
formset = GenericFormSet(instance=Animal())
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">
Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag"
type="text" name="generic_relations-taggeditem-content_type-object_id-0-tag" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE"><input type="hidden"
name="generic_relations-taggeditem-content_type-object_id-0-id"
id="id_generic_relations-taggeditem-content_type-object_id-0-id"></p>"""
)
platypus = Animal.objects.create(
common_name='Platypus', latin_name='Ornithorhynchus anatinus',
)
platypus.tags.create(tag='shiny')
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
formset = GenericFormSet(instance=platypus)
tagged_item_id = TaggedItem.objects.get(tag='shiny', object_id=platypus.id).id
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">Tag:</label>
<input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text"
name="generic_relations-taggeditem-content_type-object_id-0-tag" value="shiny" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">
<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id"
value="%s" id="id_generic_relations-taggeditem-content_type-object_id-0-id"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-1-tag">Tag:</label>
<input id="id_generic_relations-taggeditem-content_type-object_id-1-tag" type="text"
name="generic_relations-taggeditem-content_type-object_id-1-tag" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-1-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-1-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-1-DELETE">
<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-1-id"
id="id_generic_relations-taggeditem-content_type-object_id-1-id"></p>""" % tagged_item_id
)
lion = Animal.objects.create(common_name='Lion', latin_name='Panthera leo')
formset = GenericFormSet(instance=lion, prefix='x')
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_x-0-tag">Tag:</label>
<input id="id_x-0-tag" type="text" name="x-0-tag" maxlength="50"></p>
<p><label for="id_x-0-DELETE">Delete:</label> <input type="checkbox" name="x-0-DELETE" id="id_x-0-DELETE">
<input type="hidden" name="x-0-id" id="id_x-0-id"></p>"""
)
def test_options(self):
TaggedItemFormSet = generic_inlineformset_factory(
TaggedItem,
can_delete=False,
exclude=['tag'],
extra=3,
)
platypus = Animal.objects.create(common_name='Platypus', latin_name='Ornithorhynchus anatinus')
harmless = platypus.tags.create(tag='harmless')
mammal = platypus.tags.create(tag='mammal')
# Works without a queryset.
formset = TaggedItemFormSet(instance=platypus)
self.assertEqual(len(formset.forms), 5)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id" value="%s" '
'id="id_generic_relations-taggeditem-content_type-object_id-0-id">' % harmless.pk
)
self.assertEqual(formset.forms[0].instance, harmless)
self.assertEqual(formset.forms[1].instance, mammal)
self.assertIsNone(formset.forms[2].instance.pk)
# A queryset can be used to alter display ordering.
formset = TaggedItemFormSet(instance=platypus, queryset=TaggedItem.objects.order_by('-tag'))
self.assertEqual(len(formset.forms), 5)
self.assertEqual(formset.forms[0].instance, mammal)
self.assertEqual(formset.forms[1].instance, harmless)
self.assertIsNone(formset.forms[2].instance.pk)
# A queryset that omits items.
formset = TaggedItemFormSet(instance=platypus, queryset=TaggedItem.objects.filter(tag__startswith='harm'))
self.assertEqual(len(formset.forms), 4)
self.assertEqual(formset.forms[0].instance, harmless)
self.assertIsNone(formset.forms[1].instance.pk)
def test_get_queryset_ordering(self):
"""
BaseGenericInlineFormSet.get_queryset() adds default ordering, if
needed.
"""
inline_formset = generic_inlineformset_factory(TaggedItem, exclude=('tag',))
formset = inline_formset(instance=Gecko.objects.create())
self.assertIs(formset.get_queryset().ordered, True)
def test_initial(self):
quartz = Mineral.objects.create(name='Quartz', hardness=7)
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
ctype = ContentType.objects.get_for_model(quartz)
initial_data = [{
'tag': 'lizard',
'content_type': ctype.pk,
'object_id': quartz.pk,
}]
formset = GenericFormSet(initial=initial_data)
self.assertEqual(formset.forms[0].initial, initial_data[0])
def test_meta_widgets(self):
"""TaggedItemForm has a widget defined in Meta."""
Formset = generic_inlineformset_factory(TaggedItem, TaggedItemForm)
form = Formset().forms[0]
self.assertIsInstance(form['tag'].field.widget, CustomWidget)
@isolate_apps('generic_relations')
def test_incorrect_content_type(self):
class BadModel(models.Model):
content_type = models.PositiveIntegerField()
msg = "fk_name 'generic_relations.BadModel.content_type' is not a ForeignKey to ContentType"
with self.assertRaisesMessage(Exception, msg):
generic_inlineformset_factory(BadModel, TaggedItemForm)
def test_save_new_uses_form_save(self):
class SaveTestForm(forms.ModelForm):
def save(self, *args, **kwargs):
self.instance.saved_by = 'custom method'
return super().save(*args, **kwargs)
Formset = generic_inlineformset_factory(ForProxyModelModel, fields='__all__', form=SaveTestForm)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj = formset.save()[0]
self.assertEqual(new_obj.saved_by, 'custom method')
def test_save_new_for_proxy(self):
Formset = generic_inlineformset_factory(ForProxyModelModel, fields='__all__', for_concrete_model=False)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj, = formset.save()
self.assertEqual(new_obj.obj, instance)
def test_save_new_for_concrete(self):
Formset = generic_inlineformset_factory(ForProxyModelModel, fields='__all__', for_concrete_model=True)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj, = formset.save()
self.assertNotIsInstance(new_obj.obj, ProxyRelatedModel)
def test_initial_count(self):
GenericFormSet = generic_inlineformset_factory(TaggedItem)
data = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '3',
'form-MAX_NUM_FORMS': '',
}
formset = GenericFormSet(data=data, prefix='form')
self.assertEqual(formset.initial_form_count(), 3)
formset = GenericFormSet(data=data, prefix='form', save_as_new=True)
self.assertEqual(formset.initial_form_count(), 0)
def test_save_as_new(self):
"""
The save_as_new parameter creates new items that are associated with
the object.
"""
lion = Animal.objects.create(common_name='Lion', latin_name='Panthera leo')
yellow = lion.tags.create(tag='yellow')
hairy = lion.tags.create(tag='hairy')
GenericFormSet = generic_inlineformset_factory(TaggedItem)
data = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '2',
'form-MAX_NUM_FORMS': '',
'form-0-id': str(yellow.pk),
'form-0-tag': 'hunts',
'form-1-id': str(hairy.pk),
'form-1-tag': 'roars',
}
formset = GenericFormSet(data, instance=lion, prefix='form', save_as_new=True)
self.assertTrue(formset.is_valid())
tags = formset.save()
self.assertEqual([tag.tag for tag in tags], ['hunts', 'roars'])
hunts, roars = tags
self.assertSequenceEqual(lion.tags.order_by('tag'), [hairy, hunts, roars, yellow])
| [
"django.db.models.PositiveIntegerField",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"django.test.utils.isolate_apps",
"django.contrib.contenttypes.forms.generic_inlineformset_factory"
]
| [((7814, 7847), 'django.test.utils.isolate_apps', 'isolate_apps', (['"""generic_relations"""'], {}), "('generic_relations')\n", (7826, 7847), False, 'from django.test.utils import isolate_apps\n'), ((692, 742), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'extra': '(1)'}), '(TaggedItem, extra=1)\n', (721, 742), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((2634, 2684), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'extra': '(1)'}), '(TaggedItem, extra=1)\n', (2663, 2684), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((4952, 5041), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'can_delete': '(False)', 'exclude': "['tag']", 'extra': '(3)'}), "(TaggedItem, can_delete=False, exclude=['tag'],\n extra=3)\n", (4981, 5041), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((6834, 6893), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'exclude': "('tag',)"}), "(TaggedItem, exclude=('tag',))\n", (6863, 6893), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((7147, 7197), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'extra': '(1)'}), '(TaggedItem, extra=1)\n', (7176, 7197), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((7215, 7256), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['quartz'], {}), '(quartz)\n', (7248, 7256), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((7642, 7699), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem', 'TaggedItemForm'], {}), '(TaggedItem, TaggedItemForm)\n', (7671, 7699), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((8490, 8581), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['ForProxyModelModel'], {'fields': '"""__all__"""', 'form': 'SaveTestForm'}), "(ForProxyModelModel, fields='__all__', form=\n SaveTestForm)\n", (8519, 8581), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((9085, 9182), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['ForProxyModelModel'], {'fields': '"""__all__"""', 'for_concrete_model': '(False)'}), "(ForProxyModelModel, fields='__all__',\n for_concrete_model=False)\n", (9114, 9182), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((9676, 9772), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['ForProxyModelModel'], {'fields': '"""__all__"""', 'for_concrete_model': '(True)'}), "(ForProxyModelModel, fields='__all__',\n for_concrete_model=True)\n", (9705, 9772), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((10282, 10323), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {}), '(TaggedItem)\n', (10311, 10323), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((11093, 11134), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {}), '(TaggedItem)\n', (11122, 11134), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((7959, 7988), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (7986, 7988), False, 'from django.db import models\n'), ((8162, 8217), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['BadModel', 'TaggedItemForm'], {}), '(BadModel, TaggedItemForm)\n', (8191, 8217), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2017, <NAME> <<EMAIL>>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from . import SubCommand
__all__ = ['DemoCommand']
class DemoCommand(SubCommand):
'''Create demo directory.
'''
name = "demo"
def run(self, *args, **kwargs):
import os
from os import path
print('''
Jug will create a directory called 'jug-demo/' with a file called 'primes.py'
inside.
You can test jug by switching to that directory and running the commands:
jug status primes.py
followed by
jug execute primes.py
Upon termination of the process, results will be in a file called 'output.txt'.
PARALLEL USAGE
You can speed up the process by running several 'jug execute' in parallel:
jug execute primes.py &
jug execute primes.py &
jug execute primes.py &
jug execute primes.py &
TROUBLE SHOOTING:
Should you run into issues, you can run the internal tests for jug with
jug test-jug
FURTHER READING
The online documentation contains further reading. You can read the next
tutorial here:
http://jug.readthedocs.io/en/latest/decrypt-example.html
''')
if path.exists('jug-demo'):
print("Jug-demo previously created")
return
os.mkdir('jug-demo')
output = open('jug-demo/primes.py', 'wt')
output.write(r'''
from time import sleep
from jug import TaskGenerator
@TaskGenerator
def is_prime(n):
sleep(1.)
for j in range(2, n - 1):
if (n % j) == 0:
return False
return True
@TaskGenerator
def count_primes(ps):
return sum(ps)
@TaskGenerator
def write_output(n):
output = open('output.txt', 'wt')
output.write("Found {0} primes <= 100.\n".format(n))
output.close()
primes100 = []
for n in range(2, 101):
primes100.append(is_prime(n))
n_primes = count_primes(primes100)
write_output(n_primes)
''')
output.close()
demo = DemoCommand()
| [
"os.path.exists",
"os.mkdir"
]
| [((2249, 2272), 'os.path.exists', 'path.exists', (['"""jug-demo"""'], {}), "('jug-demo')\n", (2260, 2272), False, 'from os import path\n'), ((2350, 2370), 'os.mkdir', 'os.mkdir', (['"""jug-demo"""'], {}), "('jug-demo')\n", (2358, 2370), False, 'import os\n')] |
#!/usr/bin/env python
# ROS Libraries
import actionlib
from actionlib_msgs.msg import GoalStatus
from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal
from kuri_wandering_robot.msg import Power
from wandering_behavior.msg import WanderAction, WanderGoal
import rospy
from sensor_msgs.msg import CompressedImage
from std_msgs.msg import Empty
from trajectory_msgs.msg import JointTrajectoryPoint
# Python Default Libraries
import base64
import csv
from enum import Enum
import os
import requests
import threading
import time
import traceback
# Custom Libraries
from sent_messages_database import SentMessagesDatabase
class KuriWanderingRobotState(Enum):
"""
During NORMAL, the base moves according to wandering_behavior.
During CHARGING, the robot's eyes are closed and it is charging. The robot
transitions from NORMAL to CHARGING if its battery is below a threshold and
it is on the charger. It transitions from CHARGING to NORMAL if it's battery
is above a threshold or it is off the charger.
"""
NORMAL = 1
CHARGING = 2
class KuriWanderingRobot(object):
"""
The central executive node. This node runs a control loop that manages the
robot's state: turning on and monitoring progress of the wandering module
in NORMAL, turning off wandering in CHARGING, and switching back to NORMAL
when the robot is sufficiently charged.
This node also runs anomaly detection to detect low battery; when it detects
low battery, it sends a low battery request to the Slackbot, which then
sends it to the helpers. This node can be extended with additional anomaly
detection and help requests, as needed. This node also subscribes to a dummy
`where_am_i_help` topic, which sends helpers the sample `where_am_i` help
message. Note that that is only in place to illsutrate the sample
`where_am_i` help message, and actually using that would require developing
a custom anomaly detection system to trigger the robot asking for that type
of help.
Finally, this node has a separate thread that continually queries the
Slackbot for responses to its help requests.
"""
def __init__(self):
"""
Initialize an instance of the KuriWanderingRobot class
"""
self.has_loaded = False
# Get the Slackbot URL
self.slackbot_url = rospy.get_param('~slackbot_url')
# Initialize the state.
self.state_lock = threading.Lock()
self.state_changed = True
self.state = KuriWanderingRobotState.NORMAL
# Initialize the wandering module
self.wandering_module_action = actionlib.SimpleActionClient('/wandering_behavior/navigate', WanderAction)
# Initialize the eye controller
self.eyelid_controller_action = actionlib.SimpleActionClient('/eyelids_controller/follow_joint_trajectory', FollowJointTrajectoryAction)
self.eye_closed_position = 0.41
self.eye_open_position = 0.0
# Initialize the camera
self.img_sub = rospy.Subscriber(
'/upward_looking_camera/compressed', CompressedImage, self.image_callback, queue_size=1)
self.latest_image = None
self.latest_image_lock = threading.Lock()
# Initialize low battery anomaly detector
self.battery_sub = rospy.Subscriber(
"/mobile_base/power", Power, self.power_callback, queue_size=1)
self.previous_battery_lock = threading.Lock()
self.previous_battery = None
self.previous_dock_present = None
self.battery_notification_thresholds = rospy.get_param('~battery_notification_thresholds', [40, 20, 10, 5, 4, 3, 2, 1])
# if the battery is less than this and Kuri is docked, charge
self.to_charge_threshold = rospy.get_param('~to_charge_threshold', 50)
# if the batter is greater than this and Kuri is charging, switch back to NORMAL
self.charging_done_threshold = rospy.get_param('~charging_done_threshold', 90)
# Whether the low battery message should include Kuri's current camera image
self.low_battery_message_include_image = rospy.get_param('~low_battery_message_include_image', True)
# Initialize the dummy `where_am_i` anomaly detector
self.where_am_i_help_sub = rospy.Subscriber(
"/where_am_i_help", Empty, self.where_am_i_help_callback, queue_size=1)
# Initialize storing images and message IDs
self.sent_messages_database_filepath = rospy.get_param('~send_messages_database_filepath')
self.sent_messages_database = SentMessagesDatabase.load(
self.sent_messages_database_filepath)
self.database_save_interval = 1
self.database_updates_since_last_save = 0
# Initialize the head controller
self.head_state_sub = rospy.Subscriber(
"/head_controller/state", JointTrajectoryControllerState, self.head_state_callback, queue_size=1)
self.head_controller_action = actionlib.SimpleActionClient('/head_controller/follow_joint_trajectory', FollowJointTrajectoryAction)
self.head_tilt_speed = 0.2 # head tilt is in [-0.8, 0.3]
self.head_pan_speed = 0.2 # head pan is in [-0.75, 0.75]
# Initialize the Slackbot updates thread
self.slackbot_responses_thread = threading.Thread(
target=self.get_slackbot_updates,
)
self.slackbot_responses_thread.start()
# Initialize the state machine
self.state_machine_thread = threading.Thread(
target=self.state_machine_control_loop,
)
self.state_machine_thread.start()
self.has_centered_head = False
self.has_loaded = True
def database_updated(self, num_updates=1):
"""
Called everytime the database is updated. Saves the database every
self.database_save_interval updates
"""
self.database_updates_since_last_save += num_updates
if self.database_updates_since_last_save % self.database_save_interval == 0:
self.sent_messages_database.save(self.sent_messages_database_filepath)
rospy.logdebug("Saved sent_messages_database!")
def open_eyes(self, duration_secs=0.2):
"""
Open the robot's eyes
"""
rospy.logdebug("Open Eyes")
duration = rospy.Duration.from_sec(duration_secs)
goal = FollowJointTrajectoryGoal()
goal.trajectory.header.stamp = rospy.Time.now()
goal.trajectory.joint_names = ["eyelids_joint"]
point = JointTrajectoryPoint()
point.positions = [self.eye_open_position]
point.velocities = []
point.accelerations = []
point.effort = []
point.time_from_start = duration
goal.trajectory.points = [point]
# Send the goal
self.eyelid_controller_action.wait_for_server()
self.eyelid_controller_action.send_goal(goal)
self.eyelid_controller_action.wait_for_result(duration)
def close_eyes(self, duration_secs=0.2):
"""
Close the robot's eyes
"""
rospy.logdebug("Close Eyes")
duration = rospy.Duration.from_sec(duration_secs)
goal = FollowJointTrajectoryGoal()
goal.trajectory.header.stamp = rospy.Time.now()
goal.trajectory.joint_names = ["eyelids_joint"]
point = JointTrajectoryPoint()
point.positions = [self.eye_closed_position]
point.velocities = []
point.accelerations = []
point.effort = []
point.time_from_start = duration
goal.trajectory.points = [point]
# Send the goal
self.eyelid_controller_action.wait_for_server()
self.eyelid_controller_action.send_goal(goal)
self.eyelid_controller_action.wait_for_result(duration)
def head_state_callback(self, head_state_msg):
"""
Get the head's current position
"""
if not self.has_loaded:
return
if not self.has_centered_head:
self.center_head(head_state_msg.actual.positions[0], head_state_msg.actual.positions[1])
def center_head(self, current_pan, current_tilt):
"""
Center Kuri's head. This involves moving from the current pan and tilt
to the centered values of (0.0, -0.3)
"""
pan_endpoint = 0.0
tilt_endpoint = -0.3
n_waypoints = 10
# Compute the actual endpoint and duration_secs
duration_secs = max(
abs(pan_endpoint-current_pan)/self.head_pan_speed,
abs(tilt_endpoint-current_tilt)/self.head_tilt_speed)
duration = rospy.Duration.from_sec(duration_secs)
# Create the goal
goal = FollowJointTrajectoryGoal()
goal.trajectory.header.stamp = rospy.Time.now()
goal.trajectory.joint_names = ["head_1_joint", "head_2_joint"]
goal.trajectory.points = []
pan_interval = (pan_endpoint-current_pan)/(n_waypoints-1)
tilt_interval = (tilt_endpoint-current_tilt)/(n_waypoints-1)
time_interval = duration/n_waypoints
for i in range(n_waypoints):
point = JointTrajectoryPoint()
point.positions = [current_pan + i*pan_interval, current_tilt + i*tilt_interval]
point.velocities = []
point.accelerations = []
point.effort = []
point.time_from_start = (i+1)*time_interval
goal.trajectory.points.append(point)
# Send the goal
self.head_controller_action.wait_for_server()
self.head_controller_action.send_goal(goal)
self.head_controller_action.wait_for_result(duration)
self.has_centered_head = True
def state_machine_control_loop(self, rate_hz=10):
"""
The control loop for the state machine. All of the state machine logic
is handled in this function and the functions it calls.
During NORMAL, the base moves according to wandering_behavior.
During CHARGING, the robot's eyes are closed and it is charging. The
robot transitions from NORMAL to CHARGING if its battery is below a
threshold and it is on the charger. It transitions from CHARGING to
NORMAL if it's battery is above a threshold or it is off the charger.
"""
rate = rospy.Rate(rate_hz)
while not rospy.is_shutdown():
rate.sleep()
with self.state_lock:
state_at_start_of_loop = self.state
if (self.state == KuriWanderingRobotState.NORMAL):
goal_state = self.wandering_module_action.get_state()
if (self.state_changed or goal_state == GoalStatus.ABORTED or goal_state == GoalStatus.SUCCEEDED):
rospy.logdebug("Waiting for wandering_module_action server")
self.wandering_module_action.wait_for_server()
rospy.logdebug("Sending goal to wandering_module_action")
# Effort -1 means "don't stop unless preempted"
self.wandering_module_action.send_goal(WanderGoal(effort=-1))
self.open_eyes()
with self.previous_battery_lock:
if (self.previous_battery is not None and self.previous_battery < self.to_charge_threshold and self.previous_dock_present):
self.close_eyes()
self.state = KuriWanderingRobotState.CHARGING
self.wandering_module_action.cancel_all_goals()
rospy.loginfo("State: NORMAL ==> CHARGING")
elif self.state == KuriWanderingRobotState.CHARGING:
with self.previous_battery_lock:
if (self.previous_battery is None or not self.previous_dock_present or self.previous_battery >= self.charging_done_threshold):
self.state = KuriWanderingRobotState.NORMAL
rospy.loginfo("State: CHARGING ==> NORMAL")
state_at_end_of_loop = self.state
self.state_changed = (state_at_start_of_loop != state_at_end_of_loop)
def image_callback(self, img_msg):
"""
Store the latest image.
"""
if not self.has_loaded: return
with self.latest_image_lock:
self.latest_image = img_msg
def power_callback(self, msg):
"""
Callback function for Kuri's power update. It Kuri's battery has crossed
a battery_notification_threshold, notify the Slackbot.
"""
if not self.has_loaded: return
with self.state_lock:
with self.previous_battery_lock:
self.previous_dock_present = msg.dock_present
if self.state == KuriWanderingRobotState.CHARGING:
self.previous_battery = msg.battery.pct
else:
update_previous_battery = True
if msg.battery.pct <= self.battery_notification_thresholds[0]:
# Send the low-battery helper notifications when the battery
# crosses the thresholds defined in self.battery_notification_thresholds
for i in range(len(self.battery_notification_thresholds)):
if (self.previous_battery is None or (self.previous_battery > self.battery_notification_thresholds[i]) and msg.battery.pct <= self.battery_notification_thresholds[i]):
try:
# Send a low_battery_alert
dict_to_send = {'battery_pct':msg.battery.pct}
if self.low_battery_message_include_image:
with self.latest_image_lock:
if self.latest_image is not None:
image_contents = base64.b64encode(bytearray(self.latest_image.data)).decode('ascii')
dict_to_send['image'] = image_contents
rospy.loginfo("Sending battery request for pct %s" % msg.battery.pct)
res = requests.post(
os.path.join(self.slackbot_url, 'low_battery'),
json=dict_to_send,
)
res_json = res.json()
if not res_json['success']:
update_previous_battery = False
except Exception as e:
rospy.logwarn("Error communicating with Slackbot /low_battery at URL %s." % self.slackbot_url)
if "res" in locals():
rospy.logwarn("Response text %s." % res.text)
rospy.logwarn(traceback.format_exc())
rospy.logwarn("Error %s." % e)
update_previous_battery = False
break
if (update_previous_battery and (self.previous_battery is None or msg.battery.pct < self.previous_battery)):
self.previous_battery = msg.battery.pct
def where_am_i_help_callback(self, msg):
"""
A dummy callback that triggers sending a where_am_i help message to the
Slackbot. This is merely intended to showcase some of the Slackbot's
capabilities. Users who want a robot that autonomously asks the human to
tell it where it is should implement their own anomaly detection system
for triggering this help request.
"""
with self.latest_image_lock:
if self.latest_image is None:
rospy.loginfo("Attempted to send where_am_i help request but have no image.")
return
try:
# Send a low_battery_alert
rospy.loginfo("Sending where_am_i help request")
with self.latest_image_lock:
image_contents = base64.b64encode(bytearray(self.latest_image.data)).decode('ascii')
res = requests.post(
os.path.join(self.slackbot_url, 'where_am_i'),
json={'image':image_contents, 'options':['Lounge', "Office#252", "200 Corridoor", "Atrium"]},
)
res_json = res.json()
message_id = res_json['message_id']
self.sent_messages_database.add_respondable_message(message_id)
self.database_updated()
except Exception as e:
rospy.logwarn("Error communicating with Slackbot /where_am_i at URL %s." % self.slackbot_url)
if "res" in locals():
rospy.logwarn("Response text %s." % res.text)
rospy.logwarn(traceback.format_exc())
rospy.logwarn("Error %s." % e)
def get_slackbot_updates(self, refresh_secs=5.0):
"""
Once every refresh_secs seconds, request updates (e.g., human responses)
from the Slackbot. Note that you can optionally request updates for
partular message_ids (e.g., those that have not received responses yet)
"""
r = rospy.Rate(1.0/refresh_secs)
while not rospy.is_shutdown():
if not self.has_loaded: r.sleep()
try:
message_ids_and_action_ts = self.sent_messages_database.get_message_ids_and_latest_action_ts()
# Request responses for those message_ids
res = requests.post(
os.path.join(self.slackbot_url, 'get_updates'),
json={'message_ids_and_action_ts':message_ids_and_action_ts},
)
res_json = res.json()
rospy.logdebug("Got updates from Slackbot %s" % res_json)
message_id_to_responses = res_json["message_id_to_responses"]
if len(message_id_to_responses) > 0:
num_updates = 0
# Insert reactions into the database
for message_id in message_id_to_responses:
for action_ts, response in message_id_to_responses[message_id]:
rospy.loginfo("Got reaction %s from at ts %s" % (response, action_ts))
self.sent_messages_database.add_user_response(message_id, action_ts, response)
num_updates += 1
self.database_updated(num_updates)
except Exception as e:
rospy.logwarn("Error communicating with Slackbot /get_updates at URL %s." % self.slackbot_url)
if "res" in locals():
rospy.logwarn("Response text %s." % res.text)
rospy.logwarn(traceback.format_exc())
rospy.logwarn("Error %s." % e)
r.sleep()
if __name__ == "__main__":
rospy.init_node("kuri_wandering_robot")
kuri_wandering_robot = KuriWanderingRobot()
rospy.spin()
| [
"rospy.logwarn",
"rospy.init_node",
"rospy.Rate",
"threading.Lock",
"wandering_behavior.msg.WanderGoal",
"rospy.Duration.from_sec",
"rospy.spin",
"rospy.Subscriber",
"sent_messages_database.SentMessagesDatabase.load",
"actionlib.SimpleActionClient",
"rospy.get_param",
"rospy.Time.now",
"rospy.logdebug",
"rospy.loginfo",
"traceback.format_exc",
"control_msgs.msg.FollowJointTrajectoryGoal",
"rospy.is_shutdown",
"os.path.join",
"threading.Thread",
"trajectory_msgs.msg.JointTrajectoryPoint"
]
| [((19163, 19202), 'rospy.init_node', 'rospy.init_node', (['"""kuri_wandering_robot"""'], {}), "('kuri_wandering_robot')\n", (19178, 19202), False, 'import rospy\n'), ((19257, 19269), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (19267, 19269), False, 'import rospy\n'), ((2419, 2451), 'rospy.get_param', 'rospy.get_param', (['"""~slackbot_url"""'], {}), "('~slackbot_url')\n", (2434, 2451), False, 'import rospy\n'), ((2511, 2527), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2525, 2527), False, 'import threading\n'), ((2696, 2770), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['"""/wandering_behavior/navigate"""', 'WanderAction'], {}), "('/wandering_behavior/navigate', WanderAction)\n", (2724, 2770), False, 'import actionlib\n'), ((2852, 2960), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['"""/eyelids_controller/follow_joint_trajectory"""', 'FollowJointTrajectoryAction'], {}), "('/eyelids_controller/follow_joint_trajectory',\n FollowJointTrajectoryAction)\n", (2880, 2960), False, 'import actionlib\n'), ((3090, 3200), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/upward_looking_camera/compressed"""', 'CompressedImage', 'self.image_callback'], {'queue_size': '(1)'}), "('/upward_looking_camera/compressed', CompressedImage, self\n .image_callback, queue_size=1)\n", (3106, 3200), False, 'import rospy\n'), ((3275, 3291), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (3289, 3291), False, 'import threading\n'), ((3370, 3455), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/mobile_base/power"""', 'Power', 'self.power_callback'], {'queue_size': '(1)'}), "('/mobile_base/power', Power, self.power_callback, queue_size=1\n )\n", (3386, 3455), False, 'import rospy\n'), ((3501, 3517), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (3515, 3517), False, 'import threading\n'), ((3644, 3729), 'rospy.get_param', 'rospy.get_param', (['"""~battery_notification_thresholds"""', '[40, 20, 10, 5, 4, 3, 2, 1]'], {}), "('~battery_notification_thresholds', [40, 20, 10, 5, 4, 3, 2, 1]\n )\n", (3659, 3729), False, 'import rospy\n'), ((3830, 3873), 'rospy.get_param', 'rospy.get_param', (['"""~to_charge_threshold"""', '(50)'], {}), "('~to_charge_threshold', 50)\n", (3845, 3873), False, 'import rospy\n'), ((4002, 4049), 'rospy.get_param', 'rospy.get_param', (['"""~charging_done_threshold"""', '(90)'], {}), "('~charging_done_threshold', 90)\n", (4017, 4049), False, 'import rospy\n'), ((4184, 4243), 'rospy.get_param', 'rospy.get_param', (['"""~low_battery_message_include_image"""', '(True)'], {}), "('~low_battery_message_include_image', True)\n", (4199, 4243), False, 'import rospy\n'), ((4341, 4433), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/where_am_i_help"""', 'Empty', 'self.where_am_i_help_callback'], {'queue_size': '(1)'}), "('/where_am_i_help', Empty, self.where_am_i_help_callback,\n queue_size=1)\n", (4357, 4433), False, 'import rospy\n'), ((4543, 4594), 'rospy.get_param', 'rospy.get_param', (['"""~send_messages_database_filepath"""'], {}), "('~send_messages_database_filepath')\n", (4558, 4594), False, 'import rospy\n'), ((4633, 4696), 'sent_messages_database.SentMessagesDatabase.load', 'SentMessagesDatabase.load', (['self.sent_messages_database_filepath'], {}), '(self.sent_messages_database_filepath)\n', (4658, 4696), False, 'from sent_messages_database import SentMessagesDatabase\n'), ((4872, 4990), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/head_controller/state"""', 'JointTrajectoryControllerState', 'self.head_state_callback'], {'queue_size': '(1)'}), "('/head_controller/state', JointTrajectoryControllerState,\n self.head_state_callback, queue_size=1)\n", (4888, 4990), False, 'import rospy\n'), ((5038, 5143), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['"""/head_controller/follow_joint_trajectory"""', 'FollowJointTrajectoryAction'], {}), "('/head_controller/follow_joint_trajectory',\n FollowJointTrajectoryAction)\n", (5066, 5143), False, 'import actionlib\n'), ((5361, 5411), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.get_slackbot_updates'}), '(target=self.get_slackbot_updates)\n', (5377, 5411), False, 'import threading\n'), ((5558, 5614), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.state_machine_control_loop'}), '(target=self.state_machine_control_loop)\n', (5574, 5614), False, 'import threading\n'), ((6338, 6365), 'rospy.logdebug', 'rospy.logdebug', (['"""Open Eyes"""'], {}), "('Open Eyes')\n", (6352, 6365), False, 'import rospy\n'), ((6385, 6423), 'rospy.Duration.from_sec', 'rospy.Duration.from_sec', (['duration_secs'], {}), '(duration_secs)\n', (6408, 6423), False, 'import rospy\n'), ((6439, 6466), 'control_msgs.msg.FollowJointTrajectoryGoal', 'FollowJointTrajectoryGoal', ([], {}), '()\n', (6464, 6466), False, 'from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal\n'), ((6506, 6522), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (6520, 6522), False, 'import rospy\n'), ((6595, 6617), 'trajectory_msgs.msg.JointTrajectoryPoint', 'JointTrajectoryPoint', ([], {}), '()\n', (6615, 6617), False, 'from trajectory_msgs.msg import JointTrajectoryPoint\n'), ((7148, 7176), 'rospy.logdebug', 'rospy.logdebug', (['"""Close Eyes"""'], {}), "('Close Eyes')\n", (7162, 7176), False, 'import rospy\n'), ((7196, 7234), 'rospy.Duration.from_sec', 'rospy.Duration.from_sec', (['duration_secs'], {}), '(duration_secs)\n', (7219, 7234), False, 'import rospy\n'), ((7250, 7277), 'control_msgs.msg.FollowJointTrajectoryGoal', 'FollowJointTrajectoryGoal', ([], {}), '()\n', (7275, 7277), False, 'from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal\n'), ((7317, 7333), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (7331, 7333), False, 'import rospy\n'), ((7406, 7428), 'trajectory_msgs.msg.JointTrajectoryPoint', 'JointTrajectoryPoint', ([], {}), '()\n', (7426, 7428), False, 'from trajectory_msgs.msg import JointTrajectoryPoint\n'), ((8679, 8717), 'rospy.Duration.from_sec', 'rospy.Duration.from_sec', (['duration_secs'], {}), '(duration_secs)\n', (8702, 8717), False, 'import rospy\n'), ((8760, 8787), 'control_msgs.msg.FollowJointTrajectoryGoal', 'FollowJointTrajectoryGoal', ([], {}), '()\n', (8785, 8787), False, 'from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal\n'), ((8827, 8843), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (8841, 8843), False, 'import rospy\n'), ((10359, 10378), 'rospy.Rate', 'rospy.Rate', (['rate_hz'], {}), '(rate_hz)\n', (10369, 10378), False, 'import rospy\n'), ((17475, 17505), 'rospy.Rate', 'rospy.Rate', (['(1.0 / refresh_secs)'], {}), '(1.0 / refresh_secs)\n', (17485, 17505), False, 'import rospy\n'), ((6183, 6230), 'rospy.logdebug', 'rospy.logdebug', (['"""Saved sent_messages_database!"""'], {}), "('Saved sent_messages_database!')\n", (6197, 6230), False, 'import rospy\n'), ((9188, 9210), 'trajectory_msgs.msg.JointTrajectoryPoint', 'JointTrajectoryPoint', ([], {}), '()\n', (9208, 9210), False, 'from trajectory_msgs.msg import JointTrajectoryPoint\n'), ((10397, 10416), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (10414, 10416), False, 'import rospy\n'), ((16216, 16264), 'rospy.loginfo', 'rospy.loginfo', (['"""Sending where_am_i help request"""'], {}), "('Sending where_am_i help request')\n", (16229, 16264), False, 'import rospy\n'), ((17522, 17541), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (17539, 17541), False, 'import rospy\n'), ((16051, 16128), 'rospy.loginfo', 'rospy.loginfo', (['"""Attempted to send where_am_i help request but have no image."""'], {}), "('Attempted to send where_am_i help request but have no image.')\n", (16064, 16128), False, 'import rospy\n'), ((16456, 16501), 'os.path.join', 'os.path.join', (['self.slackbot_url', '"""where_am_i"""'], {}), "(self.slackbot_url, 'where_am_i')\n", (16468, 16501), False, 'import os\n'), ((16864, 16961), 'rospy.logwarn', 'rospy.logwarn', (["('Error communicating with Slackbot /where_am_i at URL %s.' % self.slackbot_url\n )"], {}), "('Error communicating with Slackbot /where_am_i at URL %s.' %\n self.slackbot_url)\n", (16877, 16961), False, 'import rospy\n'), ((17116, 17146), 'rospy.logwarn', 'rospy.logwarn', (["('Error %s.' % e)"], {}), "('Error %s.' % e)\n", (17129, 17146), False, 'import rospy\n'), ((18032, 18089), 'rospy.logdebug', 'rospy.logdebug', (["('Got updates from Slackbot %s' % res_json)"], {}), "('Got updates from Slackbot %s' % res_json)\n", (18046, 18089), False, 'import rospy\n'), ((17008, 17053), 'rospy.logwarn', 'rospy.logwarn', (["('Response text %s.' % res.text)"], {}), "('Response text %s.' % res.text)\n", (17021, 17053), False, 'import rospy\n'), ((17080, 17102), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (17100, 17102), False, 'import traceback\n'), ((17830, 17876), 'os.path.join', 'os.path.join', (['self.slackbot_url', '"""get_updates"""'], {}), "(self.slackbot_url, 'get_updates')\n", (17842, 17876), False, 'import os\n'), ((18821, 18919), 'rospy.logwarn', 'rospy.logwarn', (["('Error communicating with Slackbot /get_updates at URL %s.' % self.\n slackbot_url)"], {}), "('Error communicating with Slackbot /get_updates at URL %s.' %\n self.slackbot_url)\n", (18834, 18919), False, 'import rospy\n'), ((19078, 19108), 'rospy.logwarn', 'rospy.logwarn', (["('Error %s.' % e)"], {}), "('Error %s.' % e)\n", (19091, 19108), False, 'import rospy\n'), ((10813, 10873), 'rospy.logdebug', 'rospy.logdebug', (['"""Waiting for wandering_module_action server"""'], {}), "('Waiting for wandering_module_action server')\n", (10827, 10873), False, 'import rospy\n'), ((10969, 11026), 'rospy.logdebug', 'rospy.logdebug', (['"""Sending goal to wandering_module_action"""'], {}), "('Sending goal to wandering_module_action')\n", (10983, 11026), False, 'import rospy\n'), ((18968, 19013), 'rospy.logwarn', 'rospy.logwarn', (["('Response text %s.' % res.text)"], {}), "('Response text %s.' % res.text)\n", (18981, 19013), False, 'import rospy\n'), ((19041, 19063), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (19061, 19063), False, 'import traceback\n'), ((11162, 11183), 'wandering_behavior.msg.WanderGoal', 'WanderGoal', ([], {'effort': '(-1)'}), '(effort=-1)\n', (11172, 11183), False, 'from wandering_behavior.msg import WanderAction, WanderGoal\n'), ((11651, 11694), 'rospy.loginfo', 'rospy.loginfo', (['"""State: NORMAL ==> CHARGING"""'], {}), "('State: NORMAL ==> CHARGING')\n", (11664, 11694), False, 'import rospy\n'), ((18494, 18564), 'rospy.loginfo', 'rospy.loginfo', (["('Got reaction %s from at ts %s' % (response, action_ts))"], {}), "('Got reaction %s from at ts %s' % (response, action_ts))\n", (18507, 18564), False, 'import rospy\n'), ((12068, 12111), 'rospy.loginfo', 'rospy.loginfo', (['"""State: CHARGING ==> NORMAL"""'], {}), "('State: CHARGING ==> NORMAL')\n", (12081, 12111), False, 'import rospy\n'), ((14250, 14319), 'rospy.loginfo', 'rospy.loginfo', (["('Sending battery request for pct %s' % msg.battery.pct)"], {}), "('Sending battery request for pct %s' % msg.battery.pct)\n", (14263, 14319), False, 'import rospy\n'), ((14417, 14463), 'os.path.join', 'os.path.join', (['self.slackbot_url', '"""low_battery"""'], {}), "(self.slackbot_url, 'low_battery')\n", (14429, 14463), False, 'import os\n'), ((14847, 14945), 'rospy.logwarn', 'rospy.logwarn', (["('Error communicating with Slackbot /low_battery at URL %s.' % self.\n slackbot_url)"], {}), "('Error communicating with Slackbot /low_battery at URL %s.' %\n self.slackbot_url)\n", (14860, 14945), False, 'import rospy\n'), ((15196, 15226), 'rospy.logwarn', 'rospy.logwarn', (["('Error %s.' % e)"], {}), "('Error %s.' % e)\n", (15209, 15226), False, 'import rospy\n'), ((15040, 15085), 'rospy.logwarn', 'rospy.logwarn', (["('Response text %s.' % res.text)"], {}), "('Response text %s.' % res.text)\n", (15053, 15085), False, 'import rospy\n'), ((15136, 15158), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (15156, 15158), False, 'import traceback\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 <NAME> <<EMAIL>>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
import logging
import unittest
import numpy as np
from gensim.corpora.mmcorpus import MmCorpus
from gensim.models import rpmodel
from gensim import matutils
from gensim.test.utils import datapath, get_tmpfile
class TestRpModel(unittest.TestCase):
def setUp(self):
self.corpus = MmCorpus(datapath('testcorpus.mm'))
def test_transform(self):
# create the transformation model
# HACK; set fixed seed so that we always get the same random matrix (and can compare against expected results)
np.random.seed(13)
model = rpmodel.RpModel(self.corpus, num_topics=2)
# transform one document
doc = list(self.corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = np.array([-0.70710677, 0.70710677])
self.assertTrue(np.allclose(vec, expected)) # transformed entries must be equal up to sign
def test_persistence(self):
fname = get_tmpfile('gensim_models.tst')
model = rpmodel.RpModel(self.corpus, num_topics=2)
model.save(fname)
model2 = rpmodel.RpModel.load(fname)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.projection, model2.projection))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def test_persistence_compressed(self):
fname = get_tmpfile('gensim_models.tst.gz')
model = rpmodel.RpModel(self.corpus, num_topics=2)
model.save(fname)
model2 = rpmodel.RpModel.load(fname, mmap=None)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.projection, model2.projection))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
| [
"logging.basicConfig",
"gensim.models.rpmodel.RpModel.load",
"gensim.matutils.sparse2full",
"numpy.allclose",
"gensim.test.utils.get_tmpfile",
"gensim.models.rpmodel.RpModel",
"numpy.array",
"numpy.random.seed",
"gensim.test.utils.datapath",
"unittest.main"
]
| [((2218, 2314), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s : %(levelname)s : %(message)s"""', 'level': 'logging.DEBUG'}), "(format='%(asctime)s : %(levelname)s : %(message)s',\n level=logging.DEBUG)\n", (2237, 2314), False, 'import logging\n'), ((2315, 2330), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2328, 2330), False, 'import unittest\n'), ((779, 797), 'numpy.random.seed', 'np.random.seed', (['(13)'], {}), '(13)\n', (793, 797), True, 'import numpy as np\n'), ((814, 856), 'gensim.models.rpmodel.RpModel', 'rpmodel.RpModel', (['self.corpus'], {'num_topics': '(2)'}), '(self.corpus, num_topics=2)\n', (829, 856), False, 'from gensim.models import rpmodel\n'), ((973, 1009), 'gensim.matutils.sparse2full', 'matutils.sparse2full', (['transformed', '(2)'], {}), '(transformed, 2)\n', (993, 1009), False, 'from gensim import matutils\n'), ((1084, 1119), 'numpy.array', 'np.array', (['[-0.70710677, 0.70710677]'], {}), '([-0.70710677, 0.70710677])\n', (1092, 1119), True, 'import numpy as np\n'), ((1269, 1301), 'gensim.test.utils.get_tmpfile', 'get_tmpfile', (['"""gensim_models.tst"""'], {}), "('gensim_models.tst')\n", (1280, 1301), False, 'from gensim.test.utils import datapath, get_tmpfile\n'), ((1318, 1360), 'gensim.models.rpmodel.RpModel', 'rpmodel.RpModel', (['self.corpus'], {'num_topics': '(2)'}), '(self.corpus, num_topics=2)\n', (1333, 1360), False, 'from gensim.models import rpmodel\n'), ((1404, 1431), 'gensim.models.rpmodel.RpModel.load', 'rpmodel.RpModel.load', (['fname'], {}), '(fname)\n', (1424, 1431), False, 'from gensim.models import rpmodel\n'), ((1750, 1785), 'gensim.test.utils.get_tmpfile', 'get_tmpfile', (['"""gensim_models.tst.gz"""'], {}), "('gensim_models.tst.gz')\n", (1761, 1785), False, 'from gensim.test.utils import datapath, get_tmpfile\n'), ((1802, 1844), 'gensim.models.rpmodel.RpModel', 'rpmodel.RpModel', (['self.corpus'], {'num_topics': '(2)'}), '(self.corpus, num_topics=2)\n', (1817, 1844), False, 'from gensim.models import rpmodel\n'), ((1888, 1926), 'gensim.models.rpmodel.RpModel.load', 'rpmodel.RpModel.load', (['fname'], {'mmap': 'None'}), '(fname, mmap=None)\n', (1908, 1926), False, 'from gensim.models import rpmodel\n'), ((552, 577), 'gensim.test.utils.datapath', 'datapath', (['"""testcorpus.mm"""'], {}), "('testcorpus.mm')\n", (560, 577), False, 'from gensim.test.utils import datapath, get_tmpfile\n'), ((1144, 1170), 'numpy.allclose', 'np.allclose', (['vec', 'expected'], {}), '(vec, expected)\n', (1155, 1170), True, 'import numpy as np\n'), ((1518, 1566), 'numpy.allclose', 'np.allclose', (['model.projection', 'model2.projection'], {}), '(model.projection, model2.projection)\n', (1529, 1566), True, 'import numpy as np\n'), ((1612, 1654), 'numpy.allclose', 'np.allclose', (['model[tstvec]', 'model2[tstvec]'], {}), '(model[tstvec], model2[tstvec])\n', (1623, 1654), True, 'import numpy as np\n'), ((2013, 2061), 'numpy.allclose', 'np.allclose', (['model.projection', 'model2.projection'], {}), '(model.projection, model2.projection)\n', (2024, 2061), True, 'import numpy as np\n'), ((2107, 2149), 'numpy.allclose', 'np.allclose', (['model[tstvec]', 'model2[tstvec]'], {}), '(model[tstvec], model2[tstvec])\n', (2118, 2149), True, 'import numpy as np\n')] |
import tensorflow as tf
from tensorflow import keras
class CondGeneratorModel(keras.Model):
def __init__(self):
super(CondGeneratorModel, self).__init__()
# Expand 7*7*128 features into a (7,7,128) tensor
self.dense_1 = keras.layers.Dense(7*7*256)
self.reshape_1 = keras.layers.Reshape((7, 7, 256))
# Expand (10,) to (7,7,1)
self.embedder = keras.layers.Embedding(10, 100)
self.dense_2 = keras.layers.Dense(7*7*256)
# From (7,7,256) to (7,7,128)
self.convt_1 = keras.layers.Conv2DTranspose(
128, (5, 5), strides=1, padding='same', use_bias=False)
self.convt_bn_1 = keras.layers.BatchNormalization()
self.convt_relu_1 = keras.layers.LeakyReLU()
# From (7,7,128) to (14,14,64)
self.convt_2 = keras.layers.Conv2DTranspose(
64, (5, 5), strides=2, padding='same', use_bias=False)
self.convt_bn_2 = keras.layers.BatchNormalization()
self.convt_relu_2 = keras.layers.LeakyReLU()
# From (14,14,64) to (28,28,1)
self.convt_out = keras.layers.Conv2DTranspose(
1, (5, 5), strides=2, padding='same', use_bias=False)
def call(self, inputs):
feat_x = inputs[0]
label = inputs[2]
# Expand label input to be the same as latent feature
label_x = self.embedder(label)
label_x = self.dense_2(label_x)
label_x = tf.squeeze(label_x, 1)
# Expand features to image channels
feat_x = self.dense_1(feat_x)
# Combine latent feature and label input
x = tf.math.multiply(feat_x, label_x)
x = self.reshape_1(x)
# From (7,7,256) to (7,7,128)
x = self.convt_1(x)
x = self.convt_bn_1(x)
x = self.convt_relu_1(x)
# From (7,7,128) to (14,14,64)
x = self.convt_2(x)
x = self.convt_bn_2(x)
x = self.convt_relu_2(x)
# From (14,14,64) to (28,28,1)
x = self.convt_out(x)
return [x, None, label]
class CondDiscriminatorModel(keras.Model):
def __init__(self):
super(CondDiscriminatorModel, self).__init__()
self.embedder = keras.layers.Embedding(10, 100)
self.expand_layer = keras.layers.Dense(28*28*1)
self.reshape_layer = keras.layers.Reshape((28, 28, 1))
self.conv_1 = keras.layers.Conv2D(
64, (5, 5), strides=2, padding='same', input_shape=(28, 28, 1))
self.relu_1 = keras.layers.LeakyReLU()
self.drop_1 = keras.layers.Dropout(0.3)
self.conv_2 = keras.layers.Conv2D(
128, (5, 5), strides=2, padding='same')
self.relu_2 = keras.layers.LeakyReLU()
self.drop_2 = keras.layers.Dropout(0.3)
self.flatten = keras.layers.Flatten()
self.out = keras.layers.Dense(1)
def call(self, inputs):
images_x = inputs[0]
labels = inputs[2]
labels_x = self.embedder(labels)
labels_x = self.expand_layer(labels_x)
labels_x = self.reshape_layer(labels_x)
x = tf.math.multiply(images_x, labels_x)
x = self.conv_1(x)
x = self.relu_1(x)
x = self.drop_1(x)
x = self.conv_2(x)
x = self.relu_2(x)
x = self.drop_2(x)
x = self.flatten(x)
x = self.out(x)
return x
| [
"tensorflow.keras.layers.Reshape",
"tensorflow.keras.layers.Conv2DTranspose",
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.LeakyReLU",
"tensorflow.keras.layers.BatchNormalization",
"tensorflow.keras.layers.Embedding",
"tensorflow.math.multiply",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.layers.Flatten",
"tensorflow.squeeze"
]
| [((250, 281), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(7 * 7 * 256)'], {}), '(7 * 7 * 256)\n', (268, 281), False, 'from tensorflow import keras\n'), ((303, 336), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['(7, 7, 256)'], {}), '((7, 7, 256))\n', (323, 336), False, 'from tensorflow import keras\n'), ((395, 426), 'tensorflow.keras.layers.Embedding', 'keras.layers.Embedding', (['(10)', '(100)'], {}), '(10, 100)\n', (417, 426), False, 'from tensorflow import keras\n'), ((450, 481), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(7 * 7 * 256)'], {}), '(7 * 7 * 256)\n', (468, 481), False, 'from tensorflow import keras\n'), ((539, 627), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(128)', '(5, 5)'], {'strides': '(1)', 'padding': '"""same"""', 'use_bias': '(False)'}), "(128, (5, 5), strides=1, padding='same',\n use_bias=False)\n", (567, 627), False, 'from tensorflow import keras\n'), ((663, 696), 'tensorflow.keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {}), '()\n', (694, 696), False, 'from tensorflow import keras\n'), ((725, 749), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (747, 749), False, 'from tensorflow import keras\n'), ((812, 899), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(64)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""', 'use_bias': '(False)'}), "(64, (5, 5), strides=2, padding='same',\n use_bias=False)\n", (840, 899), False, 'from tensorflow import keras\n'), ((935, 968), 'tensorflow.keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {}), '()\n', (966, 968), False, 'from tensorflow import keras\n'), ((997, 1021), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (1019, 1021), False, 'from tensorflow import keras\n'), ((1086, 1173), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(1)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""', 'use_bias': '(False)'}), "(1, (5, 5), strides=2, padding='same', use_bias\n =False)\n", (1114, 1173), False, 'from tensorflow import keras\n'), ((1423, 1445), 'tensorflow.squeeze', 'tf.squeeze', (['label_x', '(1)'], {}), '(label_x, 1)\n', (1433, 1445), True, 'import tensorflow as tf\n'), ((1589, 1622), 'tensorflow.math.multiply', 'tf.math.multiply', (['feat_x', 'label_x'], {}), '(feat_x, label_x)\n', (1605, 1622), True, 'import tensorflow as tf\n'), ((2163, 2194), 'tensorflow.keras.layers.Embedding', 'keras.layers.Embedding', (['(10)', '(100)'], {}), '(10, 100)\n', (2185, 2194), False, 'from tensorflow import keras\n'), ((2223, 2254), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(28 * 28 * 1)'], {}), '(28 * 28 * 1)\n', (2241, 2254), False, 'from tensorflow import keras\n'), ((2280, 2313), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['(28, 28, 1)'], {}), '((28, 28, 1))\n', (2300, 2313), False, 'from tensorflow import keras\n'), ((2336, 2423), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(64)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""', 'input_shape': '(28, 28, 1)'}), "(64, (5, 5), strides=2, padding='same', input_shape=(28,\n 28, 1))\n", (2355, 2423), False, 'from tensorflow import keras\n'), ((2455, 2479), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (2477, 2479), False, 'from tensorflow import keras\n'), ((2502, 2527), 'tensorflow.keras.layers.Dropout', 'keras.layers.Dropout', (['(0.3)'], {}), '(0.3)\n', (2522, 2527), False, 'from tensorflow import keras\n'), ((2550, 2609), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(128)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""'}), "(128, (5, 5), strides=2, padding='same')\n", (2569, 2609), False, 'from tensorflow import keras\n'), ((2645, 2669), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (2667, 2669), False, 'from tensorflow import keras\n'), ((2692, 2717), 'tensorflow.keras.layers.Dropout', 'keras.layers.Dropout', (['(0.3)'], {}), '(0.3)\n', (2712, 2717), False, 'from tensorflow import keras\n'), ((2741, 2763), 'tensorflow.keras.layers.Flatten', 'keras.layers.Flatten', ([], {}), '()\n', (2761, 2763), False, 'from tensorflow import keras\n'), ((2783, 2804), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {}), '(1)\n', (2801, 2804), False, 'from tensorflow import keras\n'), ((3038, 3074), 'tensorflow.math.multiply', 'tf.math.multiply', (['images_x', 'labels_x'], {}), '(images_x, labels_x)\n', (3054, 3074), True, 'import tensorflow as tf\n')] |
# -*- coding: utf-8 -*-
from selectable.decorators import login_required
from maestros.models import TiposMedidasActuacion, TiposLimitesCriticos, TiposMedidasVigilancia, TiposTemperaturas, TiposFrecuencias, Zonas, Terceros, CatalogoEquipos, Personal, Consumibles, ParametrosAnalisis, Actividades, Etapas, Peligros, TiposCursos, TiposLegislacion, Unidades, Firmas, HorarioTurnos
from selectable.base import ModelLookup
from selectable.registry import registry
from maestros_generales.models import Empresas
from siva import settings
__author__ = 'julian'
@login_required
class TPActuacionPrevLookup(ModelLookup):
model = TiposMedidasActuacion
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TPActuacionPrevLookup, self).get_query(request, term)
results = results.filter(tipo="P",empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TPActuacionPrevLookup)
@login_required
class TPActuacionCorrLookup(ModelLookup):
model = TiposMedidasActuacion
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TPActuacionCorrLookup, self).get_query(request, term)
results = results.filter(tipo="C",empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TPActuacionCorrLookup)
@login_required
class TPLimitesCritLookup(ModelLookup):
model = TiposLimitesCriticos
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TPLimitesCritLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TPLimitesCritLookup)
@login_required
class ActividadesLookup(ModelLookup):
model = Actividades
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ActividadesLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ActividadesLookup)
@login_required
class TipoMedidasVigilanciaLookup(ModelLookup):
model = TiposMedidasVigilancia
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TipoMedidasVigilanciaLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TipoMedidasVigilanciaLookup)
@login_required
class TiposTemperaturasLookup(ModelLookup):
model = TiposTemperaturas
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposTemperaturasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposTemperaturasLookup)
@login_required
class TiposFrecuenciasLookup(ModelLookup):
model = TiposFrecuencias
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposFrecuenciasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposFrecuenciasLookup)
@login_required
class ZonasLookup(ModelLookup):
model = Zonas
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ZonasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ZonasLookup)
@login_required
class TercerosLookup(ModelLookup):
model = Terceros
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TercerosLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TercerosLookup)
@login_required
class TercerosTiposLookup(ModelLookup):
model = Terceros
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TercerosTiposLookup, self).get_query(request, term)
results = results.filter(tipotercero__descripcion=settings.ASESORSANITARIO, empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TercerosTiposLookup)
@login_required
class CatalogoEquiposLookup(ModelLookup):
model = CatalogoEquipos
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(CatalogoEquiposLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(CatalogoEquiposLookup)
@login_required
class PersonalLookup(ModelLookup):
model = Personal
search_fields = ('apellidos__icontains',)
def get_query(self, request, term):
results = super(PersonalLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.apellidos
def get_item_label(self, item):
return "%s %s" % (item.apellidos, item.nombres)
registry.register(PersonalLookup)
@login_required
class TiposCursosLookup(ModelLookup):
model = TiposCursos
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposCursosLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposCursosLookup)
@login_required
class TiposLegislacionLookup(ModelLookup):
model = TiposLegislacion
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposLegislacionLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposLegislacionLookup)
@login_required
class ConsumiblesLookup(ModelLookup):
model = Consumibles
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ConsumiblesLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ConsumiblesLookup)
@login_required
class ParametrosAnalisisLookup(ModelLookup):
model = ParametrosAnalisis
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ParametrosAnalisisLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ParametrosAnalisisLookup)
@login_required
class EtapasLookup(ModelLookup):
model = Etapas
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(EtapasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(EtapasLookup)
@login_required
class PeligrosLookup(ModelLookup):
model = Peligros
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(PeligrosLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(PeligrosLookup)
@login_required
class UnidadesLookup(ModelLookup):
model = Unidades
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(UnidadesLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(UnidadesLookup)
@login_required
class FirmasLookup(ModelLookup):
model = Firmas
search_fields = ('personal__apellidos__icontains',)
def get_query(self, request, term):
results = super(FirmasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.personal.apellidos
def get_item_label(self, item):
return "%s %s" % (item.personal__apellidos, item.personal__nombres)
registry.register(FirmasLookup)
@login_required
class HorarioTurnoLookup(ModelLookup):
model = HorarioTurnos
search_fields = ('ihora__icontains','fhora__icontains')
def get_query(self, request, term):
results = super(HorarioTurnoLookup, self).get_query(request, term)
idtpturno = request.GET.get('idtpturno', '')
if idtpturno:
results = results.filter(tpturnos_id=idtpturno)
return results
def get_item_value(self, item):
return "%s - %s" % (item.ihora, item.fhora)
def get_item_label(self, item):
return "%s - %s" % (item.ihora, item.fhora)
registry.register(HorarioTurnoLookup)
| [
"selectable.registry.registry.register",
"maestros_generales.models.Empresas.objects.filter"
]
| [((1100, 1140), 'selectable.registry.registry.register', 'registry.register', (['TPActuacionPrevLookup'], {}), '(TPActuacionPrevLookup)\n', (1117, 1140), False, 'from selectable.registry import registry\n'), ((1686, 1726), 'selectable.registry.registry.register', 'registry.register', (['TPActuacionCorrLookup'], {}), '(TPActuacionCorrLookup)\n', (1703, 1726), False, 'from selectable.registry import registry\n'), ((2258, 2296), 'selectable.registry.registry.register', 'registry.register', (['TPLimitesCritLookup'], {}), '(TPLimitesCritLookup)\n', (2275, 2296), False, 'from selectable.registry import registry\n'), ((2816, 2852), 'selectable.registry.registry.register', 'registry.register', (['ActividadesLookup'], {}), '(ActividadesLookup)\n', (2833, 2852), False, 'from selectable.registry import registry\n'), ((3402, 3448), 'selectable.registry.registry.register', 'registry.register', (['TipoMedidasVigilanciaLookup'], {}), '(TipoMedidasVigilanciaLookup)\n', (3419, 3448), False, 'from selectable.registry import registry\n'), ((3985, 4027), 'selectable.registry.registry.register', 'registry.register', (['TiposTemperaturasLookup'], {}), '(TiposTemperaturasLookup)\n', (4002, 4027), False, 'from selectable.registry import registry\n'), ((4561, 4602), 'selectable.registry.registry.register', 'registry.register', (['TiposFrecuenciasLookup'], {}), '(TiposFrecuenciasLookup)\n', (4578, 4602), False, 'from selectable.registry import registry\n'), ((5103, 5133), 'selectable.registry.registry.register', 'registry.register', (['ZonasLookup'], {}), '(ZonasLookup)\n', (5120, 5133), False, 'from selectable.registry import registry\n'), ((5643, 5676), 'selectable.registry.registry.register', 'registry.register', (['TercerosLookup'], {}), '(TercerosLookup)\n', (5660, 5676), False, 'from selectable.registry import registry\n'), ((6247, 6285), 'selectable.registry.registry.register', 'registry.register', (['TercerosTiposLookup'], {}), '(TercerosTiposLookup)\n', (6264, 6285), False, 'from selectable.registry import registry\n'), ((6817, 6857), 'selectable.registry.registry.register', 'registry.register', (['CatalogoEquiposLookup'], {}), '(CatalogoEquiposLookup)\n', (6834, 6857), False, 'from selectable.registry import registry\n'), ((7375, 7408), 'selectable.registry.registry.register', 'registry.register', (['PersonalLookup'], {}), '(PersonalLookup)\n', (7392, 7408), False, 'from selectable.registry import registry\n'), ((7927, 7963), 'selectable.registry.registry.register', 'registry.register', (['TiposCursosLookup'], {}), '(TiposCursosLookup)\n', (7944, 7963), False, 'from selectable.registry import registry\n'), ((8498, 8539), 'selectable.registry.registry.register', 'registry.register', (['TiposLegislacionLookup'], {}), '(TiposLegislacionLookup)\n', (8515, 8539), False, 'from selectable.registry import registry\n'), ((9059, 9095), 'selectable.registry.registry.register', 'registry.register', (['ConsumiblesLookup'], {}), '(ConsumiblesLookup)\n', (9076, 9095), False, 'from selectable.registry import registry\n'), ((9636, 9679), 'selectable.registry.registry.register', 'registry.register', (['ParametrosAnalisisLookup'], {}), '(ParametrosAnalisisLookup)\n', (9653, 9679), False, 'from selectable.registry import registry\n'), ((10183, 10214), 'selectable.registry.registry.register', 'registry.register', (['EtapasLookup'], {}), '(EtapasLookup)\n', (10200, 10214), False, 'from selectable.registry import registry\n'), ((10724, 10757), 'selectable.registry.registry.register', 'registry.register', (['PeligrosLookup'], {}), '(PeligrosLookup)\n', (10741, 10757), False, 'from selectable.registry import registry\n'), ((11268, 11301), 'selectable.registry.registry.register', 'registry.register', (['UnidadesLookup'], {}), '(UnidadesLookup)\n', (11285, 11301), False, 'from selectable.registry import registry\n'), ((11853, 11884), 'selectable.registry.registry.register', 'registry.register', (['FirmasLookup'], {}), '(FirmasLookup)\n', (11870, 11884), False, 'from selectable.registry import registry\n'), ((12482, 12519), 'selectable.registry.registry.register', 'registry.register', (['HorarioTurnoLookup'], {}), '(HorarioTurnoLookup)\n', (12499, 12519), False, 'from selectable.registry import registry\n'), ((870, 925), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (893, 925), False, 'from maestros_generales.models import Empresas\n'), ((1456, 1511), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (1479, 1511), False, 'from maestros_generales.models import Empresas\n'), ((2028, 2083), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (2051, 2083), False, 'from maestros_generales.models import Empresas\n'), ((2586, 2641), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (2609, 2641), False, 'from maestros_generales.models import Empresas\n'), ((3172, 3227), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (3195, 3227), False, 'from maestros_generales.models import Empresas\n'), ((3755, 3810), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (3778, 3810), False, 'from maestros_generales.models import Empresas\n'), ((4331, 4386), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (4354, 4386), False, 'from maestros_generales.models import Empresas\n'), ((4873, 4928), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (4896, 4928), False, 'from maestros_generales.models import Empresas\n'), ((5413, 5468), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (5436, 5468), False, 'from maestros_generales.models import Empresas\n'), ((6017, 6072), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (6040, 6072), False, 'from maestros_generales.models import Empresas\n'), ((6587, 6642), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (6610, 6642), False, 'from maestros_generales.models import Empresas\n'), ((7134, 7189), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (7157, 7189), False, 'from maestros_generales.models import Empresas\n'), ((7697, 7752), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (7720, 7752), False, 'from maestros_generales.models import Empresas\n'), ((8268, 8323), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (8291, 8323), False, 'from maestros_generales.models import Empresas\n'), ((8829, 8884), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (8852, 8884), False, 'from maestros_generales.models import Empresas\n'), ((9406, 9461), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (9429, 9461), False, 'from maestros_generales.models import Empresas\n'), ((9953, 10008), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (9976, 10008), False, 'from maestros_generales.models import Empresas\n'), ((10494, 10549), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (10517, 10549), False, 'from maestros_generales.models import Empresas\n'), ((11038, 11093), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (11061, 11093), False, 'from maestros_generales.models import Empresas\n'), ((11583, 11638), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (11606, 11638), False, 'from maestros_generales.models import Empresas\n')] |
"""Define commands for Python 2.7"""
import argparse
import traceback
from . import util
from .cmd import run
from .cmd import extractpipenv
def main():
"""Main function"""
print("This version is not supported! It has limitted analysis features")
parser = argparse.ArgumentParser(description='Analyze Jupyter Notebooks')
subparsers = parser.add_subparsers()
run.create_subparsers(subparsers)
extractpipenv.create_subparsers(subparsers)
args, rest = parser.parse_known_args()
try:
if not getattr(args, 'func', None):
parser.print_help()
else:
args.func(args, rest)
if not util.EXITED:
util.do_exit(0)
except: # pylint: disable=bare-except
if not util.EXITED:
traceback.print_exc()
util.do_exit(1)
| [
"traceback.print_exc",
"argparse.ArgumentParser"
]
| [((270, 334), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Analyze Jupyter Notebooks"""'}), "(description='Analyze Jupyter Notebooks')\n", (293, 334), False, 'import argparse\n'), ((778, 799), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (797, 799), False, 'import traceback\n')] |
# -*- coding: utf-8 -*-
"""This python module aims to manage
`DokuWiki <https://www.dokuwiki.org/dokuwiki>`_ wikis by using the
provided `XML-RPC API <https://www.dokuwiki.org/devel:xmlrpc>`_. It is
compatible with python2.7 and python3+.
Installation
------------
It is on `PyPi <https://pypi.python.org/pypi/dokuwiki>`_ so you can use
the ``pip`` command to install it::
pip install dokuwiki
Otherwise sources are in `github <https://github.com/fmenabe/python-dokuwiki>`_
"""
import re
import sys
import base64
import weakref
from xml.parsers.expat import ExpatError
if sys.version_info[0] == 3:
from xmlrpc.client import ServerProxy, Binary, Fault, Transport
from urllib.parse import urlencode
else:
from xmlrpclib import ServerProxy, Binary, Fault, Transport
from urllib import urlencode
from datetime import datetime, timedelta
ERR = 'XML or text declaration not at start of entity: line 2, column 0'
_URL_RE = re.compile(r'(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?')
def date(date):
"""DokuWiki returns dates of `xmlrpclib`/`xmlrpc.client` ``DateTime``
type and the format changes between DokuWiki versions ... This function
convert *date* to a `datetime` object.
"""
date = date.value
return (datetime.strptime(date[:-5], '%Y-%m-%dT%H:%M:%S')
if len(date) == 24
else datetime.strptime(date, '%Y%m%dT%H:%M:%S'))
def utc2local(date):
"""DokuWiki returns date with a +0000 timezone. This function convert *date*
to the local time.
"""
date_offset = (datetime.now() - datetime.utcnow())
# Python < 2.7 don't have the 'total_seconds' method so calculate it by hand!
date_offset = (date_offset.microseconds +
(date_offset.seconds + date_offset.days * 24 * 3600) * 1e6) / 1e6
date_offset = int(round(date_offset / 60 / 60))
return date + timedelta(hours=date_offset)
class DokuWikiError(Exception):
"""Exception raised by this module when there is an error."""
pass
class CookiesTransport(Transport):
"""A Python3 xmlrpc.client.Transport subclass that retains cookies."""
def __init__(self):
Transport.__init__(self)
self._cookies = dict()
def send_headers(self, connection, headers):
if self._cookies:
cookies = map(lambda x: x[0] + '=' + x[1], self._cookies.items())
connection.putheader("Cookie", "; ".join(cookies))
Transport.send_headers(self, connection, headers)
def parse_response(self, response):
"""parse and store cookie"""
try:
for header in response.msg.get_all("Set-Cookie"):
cookie = header.split(";", 1)[0]
cookieKey, cookieValue = cookie.split("=", 1)
self._cookies[cookieKey] = cookieValue
finally:
return Transport.parse_response(self, response)
class CookiesTransport2(Transport):
"""A Python2 xmlrpclib.Transport subclass that retains cookies."""
def __init__(self):
Transport.__init__(self)
self._cookies = dict()
def send_request(self, connection, handler, request_body):
Transport.send_request(self, connection, handler, request_body)
# set cookie below handler
if self._cookies:
cookies = map(lambda x: x[0] + '=' + x[1], self._cookies.items())
connection.putheader("Cookie", "; ".join(cookies))
def parse_response(self, response):
"""parse and store cookie"""
try:
for header in response.getheader("set-cookie").split(", "):
# filter 'expire' information
if not header.startswith("D"):
continue
cookie = header.split(";", 1)[0]
cookieKey, cookieValue = cookie.split("=", 1)
self._cookies[cookieKey] = cookieValue
finally:
return Transport.parse_response(self, response)
class DokuWiki(object):
"""Initialize a connection to a DokuWiki wiki. *url*, *user* and
*password* are respectively the URL, the login and the password for
connecting to the wiki. *kwargs* are `xmlrpclib`/`xmlrpc.client`
**ServerProxy** parameters.
The exception `DokuWikiError` is raised if the authentification
fails but others exceptions (like ``gaierror`` for invalid domain,
``ProtocolError`` for an invalid wiki, ...) are not catched.
.. code::
try:
wiki = dokuwiki.DokuWiki('URL', 'USER', 'PASSWORD', cookieAuth=False)
except (DokuWikiError, Exception) as err:
print('unable to connect: %s' % err)
"""
def __init__(self, url, user, password, cookieAuth=False, **kwargs):
"""Initialize the object by connecting to the XMLRPC server."""
# Initialize XMLRPC client.
try:
params = _URL_RE.search(url).groupdict()
if cookieAuth == False:
url = '%s://%s:%s@%s%s/lib/exe/xmlrpc.php' % (
params['proto'], user, password, params['host'], params['uri'] or '')
else:
url = '%s://%s%s/lib/exe/xmlrpc.php' % (
params['proto'], params['host'], params['uri'] or '')
except AttributeError:
raise DokuWikiError("invalid url '%s'" % url)
if cookieAuth == False:
self.proxy = ServerProxy(url, **kwargs)
else:
if sys.version_info[0] == 3:
self.proxy = ServerProxy(url, CookiesTransport(), **kwargs)
else:
self.proxy = ServerProxy(url, CookiesTransport2(), **kwargs)
# Force login to check the connection.
if not self.login(user, password):
raise DokuWikiError('invalid login or password!')
# Set "namespaces" for pages and medias functions.
self.pages = _Pages(weakref.ref(self)())
self.medias = _Medias(weakref.ref(self)())
def send(self, command, *args, **kwargs):
"""Generic method for executing an XML-RPC *command*. *args* and
*kwargs* are the arguments and parameters needed by the command.
"""
args = list(args)
if kwargs:
args.append(kwargs)
method = self.proxy
for elt in command.split('.'):
method = getattr(method, elt)
try:
return method(*args)
except Fault as err:
if err.faultCode == 121:
return {}
elif err.faultCode == 321:
return []
raise DokuWikiError(err)
except ExpatError as err:
if str(err) != ERR:
raise DokuWikiError(err)
@property
def version(self):
"""Property that returns the DokuWiki version of the remote Wiki."""
return self.send('dokuwiki.getVersion')
@property
def time(self):
"""Property that returns the current time at the remote wiki server as
Unix timestamp.
"""
return self.send('dokuwiki.getTime')
@property
def xmlrpc_version(self):
"""Property that returns the XML RPC interface version of the remote
Wiki. This is DokuWiki implementation specific and independent of the
supported standard API version returned by ``wiki.getRPCVersionSupported``.
"""
return self.send('dokuwiki.getXMLRPCAPIVersion')
@property
def xmlrpc_supported_version(self):
"""Property that returns *2* with the supported RPC API version."""
return self.send('wiki.getRPCVersionSupported')
@property
def title(self):
"""Property that returns the title of the wiki."""
return self.send('dokuwiki.getTitle')
def login(self, user, password):
"""Log to the wiki using *user* and *password* credentials. It returns
a boolean that indicates if the user succesfully authenticate."""
return self.send('dokuwiki.login', user, password)
def add_acl(self, scope, user, permission):
"""Add an `ACL <https://www.dokuwiki.org/acl>`_ rule that restricts
the page/namespace *scope* to *user* (use *@group* syntax for groups)
with *permission* level. It returns a boolean that indicate if the rule
was correctly added.
"""
return self.send('plugin.acl.addAcl', scope, user, permission)
def del_acl(self, scope, user):
"""Delete any ACL matching the given *scope* and *user* (or group if
*@group* syntax is used). It returns a boolean that indicate if the rule
was correctly removed.
"""
return self.send('plugin.acl.delAcl', scope, user)
class _Pages(object):
"""This object regroup methods for managing pages of a DokuWiki. This object
is accessible from the ``pages`` property of an `DokuWiki` instance::
wiki = dokuwiki.DokuWiki('URL', 'User', 'Password')
wiki.pages.list()
"""
def __init__(self, dokuwiki):
self._dokuwiki = dokuwiki
def list(self, namespace='/', **options):
"""List all pages of the given *namespace*.
Valid *options* are:
* *depth*: (int) recursion level, 0 for all
* *hash*: (bool) do an md5 sum of content
* *skipacl*: (bool) list everything regardless of ACL
"""
return self._dokuwiki.send('dokuwiki.getPagelist', namespace, options)
def changes(self, timestamp):
"""Returns a list of changes since given *timestamp*.
For example, for returning all changes since *2016-01-01*::
from datetime import datetime
wiki.pages.changes(datetime(2016, 1, 1).timestamp())
"""
return self._dokuwiki.send('wiki.getRecentChanges', timestamp)
def search(self, string):
"""Performs a fulltext search on *string* and returns the first 15
results.
"""
return self._dokuwiki.send('dokuwiki.search', string)
def versions(self, page, offset=0):
"""Returns the available versions of *page*. *offset* can be used to
list earlier versions in the history.
"""
return self._dokuwiki.send('wiki.getPageVersions', page, offset)
def info(self, page, version=None):
"""Returns informations of *page*. Informations of the last version
is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageInfoVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageInfo', page))
def get(self, page, version=None):
"""Returns the content of *page*. The content of the last version is
returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPage', page))
def append(self, page, content, **options):
"""Appends *content* text to *page*.
Valid *options* are:
* *sum*: (str) change summary
* *minor*: (bool) whether this is a minor change
"""
return self._dokuwiki.send('dokuwiki.appendPage', page, content, options)
def html(self, page, version=None):
"""Returns HTML content of *page*. The HTML content of the last version
of the page is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageHTMLVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageHTML', page))
def set(self, page, content, **options):
"""Set/replace the *content* of *page*.
Valid *options* are:
* *sum*: (str) change summary
* *minor*: (bool) whether this is a minor change
"""
try:
return self._dokuwiki.send('wiki.putPage', page, content, options)
except ExpatError as err:
# Sometime the first line of the XML response is blank which raise
# the 'ExpatError' exception although the change has been done. This
# allow to ignore the error.
if str(err) != ERR:
raise DokuWikiError(err)
def delete(self, page):
"""Delete *page* by setting an empty content."""
return self.set(page, '')
def lock(self, page):
"""Locks *page*."""
result = self._dokuwiki.send('dokuwiki.setLocks',
lock=[page], unlock=[])
if result['lockfail']:
raise DokuWikiError('unable to lock page')
def unlock(self, page):
"""Unlocks *page*."""
result = self._dokuwiki.send('dokuwiki.setLocks',
lock=[], unlock=[page])
if result['unlockfail']:
raise DokuWikiError('unable to unlock page')
def permission(self, page):
"""Returns the permission level of *page*."""
return self._dokuwiki.send('wiki.aclCheck', page)
def links(self, page):
"""Returns a list of all links contained in *page*."""
return self._dokuwiki.send('wiki.listLinks', page)
def backlinks(self, page):
"""Returns a list of all links referencing *page*."""
return self._dokuwiki.send('wiki.getBackLinks', page)
class _Medias(object):
"""This object regroup methods for managing medias of a DokuWiki. This
object is accessible from the ``medias`` property of an `DokuWiki`
instance::
wiki = dokuwiki.DokuWiki('URL', 'User', 'Password')
wiki.medias.list()
"""
def __init__(self, dokuwiki):
self._dokuwiki = dokuwiki
def list(self, namespace='/', **options):
"""Returns all medias of the given *namespace*.
Valid *options* are:
* *depth*: (int) recursion level, 0 for all
* *skipacl*: (bool) skip acl checking
* *pattern*: (str) check given pattern
* *hash*: (bool) add hashes to result list
"""
return self._dokuwiki.send('wiki.getAttachments', namespace, options)
def changes(self, timestamp):
"""Returns the list of medias changed since given *timestamp*.
For example, for returning all changes since *2016-01-01*::
from datetime import datetime
wiki.medias.changes(datetime(2016, 1, 1).timestamp())
"""
return self._dokuwiki.send('wiki.getRecentMediaChanges', timestamp)
def get(self, media, dirpath=None, filename=None, overwrite=False, b64decode=False):
"""Returns the binary data of *media* or save it to a file. If *dirpath*
is not set the binary data is returned, otherwise the data is saved
to a file. By default, the filename is the name of the media but it can
be changed with *filename* parameter. *overwrite* parameter allow to
overwrite the file if it already exists locally.
"""
import os
data = self._dokuwiki.send('wiki.getAttachment', media)
data = base64.b64decode(data) if b64decode else data.data
if dirpath is None:
return data
if filename is None:
filename = media.replace('/', ':').split(':')[-1]
if not os.path.exists(dirpath):
os.makedirs(dirpath)
filepath = os.path.join(dirpath, filename)
if os.path.exists(filepath) and not overwrite:
raise FileExistsError("[Errno 17] File exists: '%s'" % filepath)
with open(filepath, 'wb') as fhandler:
fhandler.write(data)
def info(self, media):
"""Returns informations of *media*."""
return self._dokuwiki.send('wiki.getAttachmentInfo', media)
def add(self, media, filepath, overwrite=True):
"""Set *media* from local file *filepath*. *overwrite* parameter specify
if the media must be overwrite if it exists remotely.
"""
with open(filepath, 'rb') as fhandler:
self._dokuwiki.send('wiki.putAttachment', media,
Binary(fhandler.read()), ow=overwrite)
def set(self, media, _bytes, overwrite=True, b64encode=False):
"""Set *media* from *_bytes*. *overwrite* parameter specify if the media
must be overwrite if it exists remotely.
"""
data = base64.b64encode(_bytes) if b64encode else Binary(_bytes)
self._dokuwiki.send('wiki.putAttachment', media, data, ow=overwrite)
def delete(self, media):
"""Delete *media*."""
return self._dokuwiki.send('wiki.deleteAttachment', media)
class Dataentry(object):
"""Object that manage `data entries <https://www.dokuwiki.org/plugin:data>`_."""
@staticmethod
def get(content, keep_order=False):
"""Get dataentry from *content*. *keep_order* indicates whether to
return an ordered dictionnay."""
if keep_order:
from collections import OrderedDict
dataentry = OrderedDict()
else:
dataentry = {}
found = False
for line in content.split('\n'):
if line.strip().startswith('---- dataentry'):
found = True
continue
elif line == '----':
break
elif not found:
continue
line_split = line.split(':')
key = line_split[0].strip()
value = re.sub('#.*$', '', ':'.join(line_split[1:])).strip()
dataentry.setdefault(key, value)
if not found:
raise DokuWikiError('no dataentry found')
return dataentry
@staticmethod
def gen(name, data):
"""Generate dataentry *name* from *data*."""
return '---- dataentry %s ----\n%s\n----' % (name, '\n'.join(
'%s:%s' % (attr, value) for attr, value in data.items()))
@staticmethod
def ignore(content):
"""Remove dataentry from *content*."""
page_content = []
start = False
for line in content.split('\n'):
if line == '----' and not start:
start = True
continue
if start:
page_content.append(line)
return '\n'.join(page_content) if page_content else content
| [
"os.path.exists",
"collections.OrderedDict",
"xmlrpclib.Binary",
"os.makedirs",
"datetime.datetime.utcnow",
"datetime.datetime.strptime",
"re.compile",
"base64.b64encode",
"os.path.join",
"xmlrpclib.Transport.parse_response",
"base64.b64decode",
"datetime.datetime.now",
"xmlrpclib.Transport.send_request",
"xmlrpclib.Transport.send_headers",
"datetime.timedelta",
"weakref.ref",
"xmlrpclib.Transport.__init__",
"xmlrpclib.ServerProxy"
]
| [((945, 1007), 're.compile', 're.compile', (['"""(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?"""'], {}), "('(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?')\n", (955, 1007), False, 'import re\n'), ((1261, 1310), 'datetime.datetime.strptime', 'datetime.strptime', (['date[:-5]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(date[:-5], '%Y-%m-%dT%H:%M:%S')\n", (1278, 1310), False, 'from datetime import datetime, timedelta\n'), ((1359, 1401), 'datetime.datetime.strptime', 'datetime.strptime', (['date', '"""%Y%m%dT%H:%M:%S"""'], {}), "(date, '%Y%m%dT%H:%M:%S')\n", (1376, 1401), False, 'from datetime import datetime, timedelta\n'), ((1556, 1570), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1568, 1570), False, 'from datetime import datetime, timedelta\n'), ((1573, 1590), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1588, 1590), False, 'from datetime import datetime, timedelta\n'), ((1875, 1903), 'datetime.timedelta', 'timedelta', ([], {'hours': 'date_offset'}), '(hours=date_offset)\n', (1884, 1903), False, 'from datetime import datetime, timedelta\n'), ((2156, 2180), 'xmlrpclib.Transport.__init__', 'Transport.__init__', (['self'], {}), '(self)\n', (2174, 2180), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((2437, 2486), 'xmlrpclib.Transport.send_headers', 'Transport.send_headers', (['self', 'connection', 'headers'], {}), '(self, connection, headers)\n', (2459, 2486), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((3023, 3047), 'xmlrpclib.Transport.__init__', 'Transport.__init__', (['self'], {}), '(self)\n', (3041, 3047), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((3151, 3214), 'xmlrpclib.Transport.send_request', 'Transport.send_request', (['self', 'connection', 'handler', 'request_body'], {}), '(self, connection, handler, request_body)\n', (3173, 3214), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((15357, 15388), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (15369, 15388), False, 'import os\n'), ((2842, 2882), 'xmlrpclib.Transport.parse_response', 'Transport.parse_response', (['self', 'response'], {}), '(self, response)\n', (2866, 2882), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((3904, 3944), 'xmlrpclib.Transport.parse_response', 'Transport.parse_response', (['self', 'response'], {}), '(self, response)\n', (3928, 3944), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((5369, 5395), 'xmlrpclib.ServerProxy', 'ServerProxy', (['url'], {}), '(url, **kwargs)\n', (5380, 5395), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((15070, 15092), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (15086, 15092), False, 'import base64\n'), ((15280, 15303), 'os.path.exists', 'os.path.exists', (['dirpath'], {}), '(dirpath)\n', (15294, 15303), False, 'import os\n'), ((15317, 15337), 'os.makedirs', 'os.makedirs', (['dirpath'], {}), '(dirpath)\n', (15328, 15337), False, 'import os\n'), ((15400, 15424), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (15414, 15424), False, 'import os\n'), ((16357, 16381), 'base64.b64encode', 'base64.b64encode', (['_bytes'], {}), '(_bytes)\n', (16373, 16381), False, 'import base64\n'), ((16400, 16414), 'xmlrpclib.Binary', 'Binary', (['_bytes'], {}), '(_bytes)\n', (16406, 16414), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((17001, 17014), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (17012, 17014), False, 'from collections import OrderedDict\n'), ((5863, 5880), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (5874, 5880), False, 'import weakref\n'), ((5914, 5931), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (5925, 5931), False, 'import weakref\n')] |
import setuptools
import re
with open("README.md", "r") as fh:
long_description = fh.read()
# get version from _version.py file, from below
# https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
VERSION_FILE = "test_aide/_version.py"
version_file_str = open(VERSION_FILE, "rt").read()
VERSION_STR_RE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VERSION_STR_RE, version_file_str, re.M)
if mo:
version = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSION_FILE,))
def list_reqs(fname="requirements.txt"):
with open(fname) as fd:
return fd.read().splitlines()
setuptools.setup(
name="test-aide",
version=version,
author="LV GI Data Science Team",
author_email="<EMAIL>",
description="Package of helper functions to be used for unit testing",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=list_reqs(),
python_requires=">=3.6",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
"License :: OSI Approved :: BSD License",
],
)
| [
"setuptools.find_packages",
"re.search"
]
| [((391, 440), 're.search', 're.search', (['VERSION_STR_RE', 'version_file_str', 're.M'], {}), '(VERSION_STR_RE, version_file_str, re.M)\n', (400, 440), False, 'import re\n'), ((977, 1003), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (1001, 1003), False, 'import setuptools\n')] |
#! /usr/bin/env python3
import importlib
import logging
import os
import subprocess
from setuptools import setup
from setuptools.command.install import install as install
from setuptools.command.develop import develop as develop
logger = logging.getLogger(__name__)
stan_model_files = [
os.path.join("nonperiodic", "no-periodicity.stan"),
os.path.join("nonperiodic", "start-high-high-low.stan"),
os.path.join("nonperiodic", "start-high-low-high.stan"),
os.path.join("periodic", "start-high-low-low.stan"),
os.path.join("untranslated", "gaussian-naive-bayes.stan"),
os.path.join("translated", "periodic-gaussian-mixture.stan")
]
stan_pickle_files = [
os.path.join("nonperiodic", "no-periodicity.pkl"),
os.path.join("nonperiodic", "start-high-high-low.pkl"),
os.path.join("nonperiodic", "start-high-low-high.pkl"),
os.path.join("periodic", "start-high-low-low.pkl"),
os.path.join("untranslated", "gaussian-naive-bayes.pkl"),
os.path.join("translated", "periodic-gaussian-mixture.pkl")
]
def _pickle_it(stan, pickle):
import shlex
dirname = os.path.dirname(pickle)
if not os.path.exists(dirname):
os.makedirs(dirname)
cmd = "pickle-stan {} {}".format(shlex.quote(stan), shlex.quote(pickle))
logging.info(cmd)
subprocess.call(cmd, shell=True)
def _post_install(force_recompile):
import site
importlib.reload(site)
import pbio.ribo.ribo_filenames as filenames
import pbio.misc.shell_utils as shell_utils
smf = [os.path.join("rpbp_models", s) for s in stan_model_files]
models_base = filenames.get_default_models_base()
spf = [os.path.join(models_base, s) for s in stan_pickle_files]
# Compile and pickle the Stan models
if force_recompile:
for stan, pickle in zip(smf, spf):
_pickle_it(stan, pickle)
else: # default
for stan, pickle in zip(smf, spf):
if os.path.exists(pickle):
msg = "A model already exists at: {}. Skipping.".format(pickle)
logging.warning(msg)
continue
_pickle_it(stan, pickle)
# Check for the prerequisite programs
programs = ['flexbar']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='flexbar', logger=logger)
programs = ['STAR']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='STAR', logger=logger)
programs = ['bowtie2', 'bowtie2-build-s']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='bowtie2', logger=logger)
programs = ['samtools']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='SAMtools', logger=logger)
class SetupInstall(install):
user_options = install.user_options + [
('force-recompile', None, 'Set this flag to recompile the Stan models'),
]
def initialize_options(self):
install.initialize_options(self)
self.force_recompile = None
def finalize_options(self):
install.finalize_options(self)
def run(self):
force_recompile = self.force_recompile # 0 or 1
level = logging.getLevelName("INFO")
logging.basicConfig(level=level,
format='%(levelname)-8s : %(message)s')
install.run(self)
# skip if RTD
if not os.environ.get('READTHEDOCS') == 'True':
_post_install(force_recompile)
class SetupDevelop(develop):
user_options = develop.user_options + [
('force-recompile', None, 'Set this flag to recompile the Stan models'),
]
def initialize_options(self):
develop.initialize_options(self)
self.force_recompile = None
def finalize_options(self):
develop.finalize_options(self)
def run(self):
force_recompile = self.force_recompile # 0 or 1
level = logging.getLevelName("INFO")
logging.basicConfig(level=level,
format='%(levelname)-8s : %(message)s')
develop.run(self)
# skip if RTD
if not os.environ.get('READTHEDOCS') == 'True':
_post_install(force_recompile)
setup(
cmdclass={
'install': SetupInstall,
'develop': SetupDevelop
}
)
| [
"logging.getLogger",
"setuptools.command.install.install.run",
"setuptools.command.install.install.finalize_options",
"logging.info",
"pbio.misc.shell_utils.check_programs_exist",
"os.path.exists",
"subprocess.call",
"shlex.quote",
"setuptools.setup",
"logging.warning",
"os.path.dirname",
"pbio.ribo.ribo_filenames.get_default_models_base",
"logging.getLevelName",
"logging.basicConfig",
"os.makedirs",
"setuptools.command.install.install.initialize_options",
"setuptools.command.develop.develop.initialize_options",
"os.path.join",
"os.environ.get",
"importlib.reload",
"setuptools.command.develop.develop.finalize_options",
"setuptools.command.develop.develop.run"
]
| [((242, 269), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (259, 269), False, 'import logging\n'), ((4350, 4416), 'setuptools.setup', 'setup', ([], {'cmdclass': "{'install': SetupInstall, 'develop': SetupDevelop}"}), "(cmdclass={'install': SetupInstall, 'develop': SetupDevelop})\n", (4355, 4416), False, 'from setuptools import setup\n'), ((297, 347), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""no-periodicity.stan"""'], {}), "('nonperiodic', 'no-periodicity.stan')\n", (309, 347), False, 'import os\n'), ((353, 408), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-high-low.stan"""'], {}), "('nonperiodic', 'start-high-high-low.stan')\n", (365, 408), False, 'import os\n'), ((414, 469), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-low-high.stan"""'], {}), "('nonperiodic', 'start-high-low-high.stan')\n", (426, 469), False, 'import os\n'), ((475, 526), 'os.path.join', 'os.path.join', (['"""periodic"""', '"""start-high-low-low.stan"""'], {}), "('periodic', 'start-high-low-low.stan')\n", (487, 526), False, 'import os\n'), ((532, 589), 'os.path.join', 'os.path.join', (['"""untranslated"""', '"""gaussian-naive-bayes.stan"""'], {}), "('untranslated', 'gaussian-naive-bayes.stan')\n", (544, 589), False, 'import os\n'), ((595, 655), 'os.path.join', 'os.path.join', (['"""translated"""', '"""periodic-gaussian-mixture.stan"""'], {}), "('translated', 'periodic-gaussian-mixture.stan')\n", (607, 655), False, 'import os\n'), ((686, 735), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""no-periodicity.pkl"""'], {}), "('nonperiodic', 'no-periodicity.pkl')\n", (698, 735), False, 'import os\n'), ((741, 795), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-high-low.pkl"""'], {}), "('nonperiodic', 'start-high-high-low.pkl')\n", (753, 795), False, 'import os\n'), ((801, 855), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-low-high.pkl"""'], {}), "('nonperiodic', 'start-high-low-high.pkl')\n", (813, 855), False, 'import os\n'), ((861, 911), 'os.path.join', 'os.path.join', (['"""periodic"""', '"""start-high-low-low.pkl"""'], {}), "('periodic', 'start-high-low-low.pkl')\n", (873, 911), False, 'import os\n'), ((917, 973), 'os.path.join', 'os.path.join', (['"""untranslated"""', '"""gaussian-naive-bayes.pkl"""'], {}), "('untranslated', 'gaussian-naive-bayes.pkl')\n", (929, 973), False, 'import os\n'), ((979, 1038), 'os.path.join', 'os.path.join', (['"""translated"""', '"""periodic-gaussian-mixture.pkl"""'], {}), "('translated', 'periodic-gaussian-mixture.pkl')\n", (991, 1038), False, 'import os\n'), ((1106, 1129), 'os.path.dirname', 'os.path.dirname', (['pickle'], {}), '(pickle)\n', (1121, 1129), False, 'import os\n'), ((1277, 1294), 'logging.info', 'logging.info', (['cmd'], {}), '(cmd)\n', (1289, 1294), False, 'import logging\n'), ((1299, 1331), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (1314, 1331), False, 'import subprocess\n'), ((1391, 1413), 'importlib.reload', 'importlib.reload', (['site'], {}), '(site)\n', (1407, 1413), False, 'import importlib\n'), ((1605, 1640), 'pbio.ribo.ribo_filenames.get_default_models_base', 'filenames.get_default_models_base', ([], {}), '()\n', (1638, 1640), True, 'import pbio.ribo.ribo_filenames as filenames\n'), ((2211, 2318), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""flexbar"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='flexbar', logger=logger)\n", (2243, 2318), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((2389, 2493), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""STAR"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='STAR', logger=logger)\n", (2421, 2493), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((2578, 2685), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""bowtie2"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='bowtie2', logger=logger)\n", (2610, 2685), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((2752, 2860), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""SAMtools"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='SAMtools', logger=logger)\n", (2784, 2860), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((1141, 1164), 'os.path.exists', 'os.path.exists', (['dirname'], {}), '(dirname)\n', (1155, 1164), False, 'import os\n'), ((1174, 1194), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (1185, 1194), False, 'import os\n'), ((1233, 1250), 'shlex.quote', 'shlex.quote', (['stan'], {}), '(stan)\n', (1244, 1250), False, 'import shlex\n'), ((1252, 1271), 'shlex.quote', 'shlex.quote', (['pickle'], {}), '(pickle)\n', (1263, 1271), False, 'import shlex\n'), ((1528, 1558), 'os.path.join', 'os.path.join', (['"""rpbp_models"""', 's'], {}), "('rpbp_models', s)\n", (1540, 1558), False, 'import os\n'), ((1652, 1680), 'os.path.join', 'os.path.join', (['models_base', 's'], {}), '(models_base, s)\n', (1664, 1680), False, 'import os\n'), ((3100, 3132), 'setuptools.command.install.install.initialize_options', 'install.initialize_options', (['self'], {}), '(self)\n', (3126, 3132), True, 'from setuptools.command.install import install as install\n'), ((3210, 3240), 'setuptools.command.install.install.finalize_options', 'install.finalize_options', (['self'], {}), '(self)\n', (3234, 3240), True, 'from setuptools.command.install import install as install\n'), ((3335, 3363), 'logging.getLevelName', 'logging.getLevelName', (['"""INFO"""'], {}), "('INFO')\n", (3355, 3363), False, 'import logging\n'), ((3372, 3444), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level', 'format': '"""%(levelname)-8s : %(message)s"""'}), "(level=level, format='%(levelname)-8s : %(message)s')\n", (3391, 3444), False, 'import logging\n'), ((3482, 3499), 'setuptools.command.install.install.run', 'install.run', (['self'], {}), '(self)\n', (3493, 3499), True, 'from setuptools.command.install import install as install\n'), ((3827, 3859), 'setuptools.command.develop.develop.initialize_options', 'develop.initialize_options', (['self'], {}), '(self)\n', (3853, 3859), True, 'from setuptools.command.develop import develop as develop\n'), ((3937, 3967), 'setuptools.command.develop.develop.finalize_options', 'develop.finalize_options', (['self'], {}), '(self)\n', (3961, 3967), True, 'from setuptools.command.develop import develop as develop\n'), ((4062, 4090), 'logging.getLevelName', 'logging.getLevelName', (['"""INFO"""'], {}), "('INFO')\n", (4082, 4090), False, 'import logging\n'), ((4099, 4171), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level', 'format': '"""%(levelname)-8s : %(message)s"""'}), "(level=level, format='%(levelname)-8s : %(message)s')\n", (4118, 4171), False, 'import logging\n'), ((4209, 4226), 'setuptools.command.develop.develop.run', 'develop.run', (['self'], {}), '(self)\n', (4220, 4226), True, 'from setuptools.command.develop import develop as develop\n'), ((1934, 1956), 'os.path.exists', 'os.path.exists', (['pickle'], {}), '(pickle)\n', (1948, 1956), False, 'import os\n'), ((2054, 2074), 'logging.warning', 'logging.warning', (['msg'], {}), '(msg)\n', (2069, 2074), False, 'import logging\n'), ((3537, 3566), 'os.environ.get', 'os.environ.get', (['"""READTHEDOCS"""'], {}), "('READTHEDOCS')\n", (3551, 3566), False, 'import os\n'), ((4264, 4293), 'os.environ.get', 'os.environ.get', (['"""READTHEDOCS"""'], {}), "('READTHEDOCS')\n", (4278, 4293), False, 'import os\n')] |
import os
import h5py
import nibabel as nb
import numpy as np
import torch
import torch.utils.data as data
from torchvision import transforms
import utils.preprocessor as preprocessor
# transform_train = transforms.Compose([
# transforms.RandomCrop(200, padding=56),
# transforms.ToTensor(),
# ])
class ImdbData(data.Dataset):
def __init__(self, X, y, w, transforms=None):
self.X = X if len(X.shape) == 4 else X[:, np.newaxis, :, :]
self.y = y
self.w = w
self.transforms = transforms
def __getitem__(self, index):
img = torch.from_numpy(self.X[index])
label = torch.from_numpy(self.y[index])
weight = torch.from_numpy(self.w[index])
return img, label, weight
def __len__(self):
return len(self.y)
def get_imdb_dataset(data_params):
data_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_data_file']), 'r')
label_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_label_file']), 'r')
class_weight_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_class_weights_file']), 'r')
weight_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_weights_file']), 'r')
data_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_data_file']), 'r')
label_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_label_file']), 'r')
class_weight_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_class_weights_file']), 'r')
weight_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_weights_file']), 'r')
return (ImdbData(data_train['data'][()], label_train['label'][()], class_weight_train['class_weights'][()]),
ImdbData(data_test['data'][()], label_test['label'][()], class_weight_test['class_weights'][()]))
def load_dataset(file_paths,
orientation,
remap_config,
return_weights=False,
reduce_slices=False,
remove_black=False):
print("Loading and preprocessing data...")
volume_list, labelmap_list, headers, class_weights_list, weights_list = [], [], [], [], []
for file_path in file_paths:
volume, labelmap, class_weights, weights, header = load_and_preprocess(file_path, orientation,
remap_config=remap_config,
reduce_slices=reduce_slices,
remove_black=remove_black,
return_weights=return_weights)
volume_list.append(volume)
labelmap_list.append(labelmap)
if return_weights:
class_weights_list.append(class_weights)
weights_list.append(weights)
headers.append(header)
print("#", end='', flush=True)
print("100%", flush=True)
if return_weights:
return volume_list, labelmap_list, class_weights_list, weights_list, headers
else:
return volume_list, labelmap_list, headers
def load_and_preprocess(file_path, orientation, remap_config, reduce_slices=False,
remove_black=False,
return_weights=False):
volume, labelmap, header = load_data(file_path, orientation)
volume, labelmap, class_weights, weights = preprocess(volume, labelmap, remap_config=remap_config,
reduce_slices=reduce_slices,
remove_black=remove_black,
return_weights=return_weights)
return volume, labelmap, class_weights, weights, header
def load_and_preprocess_eval(file_path, orientation, notlabel=True):
volume_nifty = nb.load(file_path[0])
header = volume_nifty.header
volume = volume_nifty.get_fdata()
if notlabel:
volume = (volume - np.min(volume)) / (np.max(volume) - np.min(volume))
else:
volume = np.round(volume)
if orientation == "COR":
volume = volume.transpose((2, 0, 1))
elif orientation == "AXI":
volume = volume.transpose((1, 2, 0))
return volume, header
def load_data(file_path, orientation):
volume_nifty, labelmap_nifty = nb.load(file_path[0]), nb.load(file_path[1])
volume, labelmap = volume_nifty.get_fdata(), labelmap_nifty.get_fdata()
volume = (volume - np.min(volume)) / (np.max(volume) - np.min(volume))
volume, labelmap = preprocessor.rotate_orientation(volume, labelmap, orientation)
return volume, labelmap, volume_nifty.header
def preprocess(volume, labelmap, remap_config, reduce_slices=False, remove_black=False, return_weights=False):
if reduce_slices:
volume, labelmap = preprocessor.reduce_slices(volume, labelmap)
if remap_config:
labelmap = preprocessor.remap_labels(labelmap, remap_config)
if remove_black:
volume, labelmap = preprocessor.remove_black(volume, labelmap)
if return_weights:
class_weights, weights = preprocessor.estimate_weights_mfb(labelmap)
return volume, labelmap, class_weights, weights
else:
return volume, labelmap, None, None
# def load_file_paths(data_dir, label_dir, volumes_txt_file=None):
# """
# This function returns the file paths combined as a list where each element is a 2 element tuple, 0th being data and 1st being label.
# It should be modified to suit the need of the project
# :param data_dir: Directory which contains the data files
# :param label_dir: Directory which contains the label files
# :param volumes_txt_file: (Optional) Path to the a csv file, when provided only these data points will be read
# :return: list of file paths as string
# """
#
# volume_exclude_list = ['IXI290', 'IXI423']
# if volumes_txt_file:
# with open(volumes_txt_file) as file_handle:
# volumes_to_use = file_handle.read().splitlines()
# else:
# volumes_to_use = [name for name in os.listdir(data_dir) if
# name.startswith('IXI') and name not in volume_exclude_list]
#
# file_paths = [
# [os.path.join(data_dir, vol, 'mri/orig.mgz'), os.path.join(label_dir, vol, 'mri/aseg.auto_noCCseg.mgz')]
# for
# vol in volumes_to_use]
# return file_paths
def load_file_paths(data_dir, label_dir, data_id, volumes_txt_file=None):
"""
This function returns the file paths combined as a list where each element is a 2 element tuple, 0th being data and 1st being label.
It should be modified to suit the need of the project
:param data_dir: Directory which contains the data files
:param label_dir: Directory which contains the label files
:param data_id: A flag indicates the name of Dataset for proper file reading
:param volumes_txt_file: (Optional) Path to the a csv file, when provided only these data points will be read
:return: list of file paths as string
"""
if volumes_txt_file:
with open(volumes_txt_file) as file_handle:
volumes_to_use = file_handle.read().splitlines()
else:
volumes_to_use = [name for name in os.listdir(data_dir)]
if data_id == "MALC":
file_paths = [
[os.path.join(data_dir, vol, 'mri/orig.mgz'), os.path.join(label_dir, vol + '_glm.mgz')]
for
vol in volumes_to_use]
elif data_id == "ADNI":
file_paths = [
[os.path.join(data_dir, vol, 'orig.mgz'), os.path.join(label_dir, vol, 'Lab_con.mgz')]
for
vol in volumes_to_use]
elif data_id == "CANDI":
file_paths = [
[os.path.join(data_dir, vol + '/' + vol + '_1.mgz'),
os.path.join(label_dir, vol + '/' + vol + '_1_seg.mgz')]
for
vol in volumes_to_use]
elif data_id == "IBSR":
file_paths = [
[os.path.join(data_dir, vol, 'mri/orig.mgz'), os.path.join(label_dir, vol + '_map.nii.gz')]
for
vol in volumes_to_use]
elif data_id == "BORIS": #BORIS
file_paths = [
[os.path.join(data_dir, vol), os.path.join(label_dir, vol.replace('.nii', '_seg.nii'))]
for
vol in volumes_to_use]
else:
raise ValueError("Invalid entry, valid options are MALC, ADNI, CANDI and IBSR")
return file_paths
def load_file_paths_eval(data_dir, volumes_txt_file, dir_struct):
"""
This function returns the file paths combined as a list where each element is a 2 element tuple, 0th being data and 1st being label.
It should be modified to suit the need of the project
:param data_dir: Directory which contains the data files
:param volumes_txt_file: Path to the a csv file, when provided only these data points will be read
:param dir_struct: If the id_list is in FreeSurfer style or normal
:return: list of file paths as string
"""
with open(volumes_txt_file) as file_handle:
volumes_to_use = file_handle.read().splitlines()
if dir_struct == "FS":
file_paths = [
[os.path.join(data_dir, vol, 'mri/orig.mgz')]
for
vol in volumes_to_use]
elif dir_struct == "Linear":
file_paths = [
[os.path.join(data_dir, vol)]
for
vol in volumes_to_use]
elif dir_struct == "part_FS":
file_paths = [
[os.path.join(data_dir, vol, 'orig.mgz')]
for
vol in volumes_to_use]
else:
raise ValueError("Invalid entry, valid options are FS and Linear")
return file_paths
| [
"os.listdir",
"nibabel.load",
"os.path.join",
"torch.from_numpy",
"utils.preprocessor.estimate_weights_mfb",
"utils.preprocessor.remap_labels",
"numpy.max",
"utils.preprocessor.rotate_orientation",
"numpy.min",
"utils.preprocessor.reduce_slices",
"utils.preprocessor.remove_black",
"numpy.round"
]
| [((4062, 4083), 'nibabel.load', 'nb.load', (['file_path[0]'], {}), '(file_path[0])\n', (4069, 4083), True, 'import nibabel as nb\n'), ((4766, 4828), 'utils.preprocessor.rotate_orientation', 'preprocessor.rotate_orientation', (['volume', 'labelmap', 'orientation'], {}), '(volume, labelmap, orientation)\n', (4797, 4828), True, 'import utils.preprocessor as preprocessor\n'), ((582, 613), 'torch.from_numpy', 'torch.from_numpy', (['self.X[index]'], {}), '(self.X[index])\n', (598, 613), False, 'import torch\n'), ((630, 661), 'torch.from_numpy', 'torch.from_numpy', (['self.y[index]'], {}), '(self.y[index])\n', (646, 661), False, 'import torch\n'), ((679, 710), 'torch.from_numpy', 'torch.from_numpy', (['self.w[index]'], {}), '(self.w[index])\n', (695, 710), False, 'import torch\n'), ((860, 929), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_data_file']"], {}), "(data_params['data_dir'], data_params['train_data_file'])\n", (872, 929), False, 'import os\n'), ((964, 1034), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_label_file']"], {}), "(data_params['data_dir'], data_params['train_label_file'])\n", (976, 1034), False, 'import os\n'), ((1076, 1154), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_class_weights_file']"], {}), "(data_params['data_dir'], data_params['train_class_weights_file'])\n", (1088, 1154), False, 'import os\n'), ((1190, 1262), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_weights_file']"], {}), "(data_params['data_dir'], data_params['train_weights_file'])\n", (1202, 1262), False, 'import os\n'), ((1296, 1364), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_data_file']"], {}), "(data_params['data_dir'], data_params['test_data_file'])\n", (1308, 1364), False, 'import os\n'), ((1398, 1467), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_label_file']"], {}), "(data_params['data_dir'], data_params['test_label_file'])\n", (1410, 1467), False, 'import os\n'), ((1508, 1585), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_class_weights_file']"], {}), "(data_params['data_dir'], data_params['test_class_weights_file'])\n", (1520, 1585), False, 'import os\n'), ((1620, 1691), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_weights_file']"], {}), "(data_params['data_dir'], data_params['test_weights_file'])\n", (1632, 1691), False, 'import os\n'), ((4278, 4294), 'numpy.round', 'np.round', (['volume'], {}), '(volume)\n', (4286, 4294), True, 'import numpy as np\n'), ((4547, 4568), 'nibabel.load', 'nb.load', (['file_path[0]'], {}), '(file_path[0])\n', (4554, 4568), True, 'import nibabel as nb\n'), ((4570, 4591), 'nibabel.load', 'nb.load', (['file_path[1]'], {}), '(file_path[1])\n', (4577, 4591), True, 'import nibabel as nb\n'), ((5040, 5084), 'utils.preprocessor.reduce_slices', 'preprocessor.reduce_slices', (['volume', 'labelmap'], {}), '(volume, labelmap)\n', (5066, 5084), True, 'import utils.preprocessor as preprocessor\n'), ((5126, 5175), 'utils.preprocessor.remap_labels', 'preprocessor.remap_labels', (['labelmap', 'remap_config'], {}), '(labelmap, remap_config)\n', (5151, 5175), True, 'import utils.preprocessor as preprocessor\n'), ((5225, 5268), 'utils.preprocessor.remove_black', 'preprocessor.remove_black', (['volume', 'labelmap'], {}), '(volume, labelmap)\n', (5250, 5268), True, 'import utils.preprocessor as preprocessor\n'), ((5326, 5369), 'utils.preprocessor.estimate_weights_mfb', 'preprocessor.estimate_weights_mfb', (['labelmap'], {}), '(labelmap)\n', (5359, 5369), True, 'import utils.preprocessor as preprocessor\n'), ((4691, 4705), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4697, 4705), True, 'import numpy as np\n'), ((4710, 4724), 'numpy.max', 'np.max', (['volume'], {}), '(volume)\n', (4716, 4724), True, 'import numpy as np\n'), ((4727, 4741), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4733, 4741), True, 'import numpy as np\n'), ((4199, 4213), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4205, 4213), True, 'import numpy as np\n'), ((4218, 4232), 'numpy.max', 'np.max', (['volume'], {}), '(volume)\n', (4224, 4232), True, 'import numpy as np\n'), ((4235, 4249), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4241, 4249), True, 'import numpy as np\n'), ((7469, 7489), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (7479, 7489), False, 'import os\n'), ((7554, 7597), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""mri/orig.mgz"""'], {}), "(data_dir, vol, 'mri/orig.mgz')\n", (7566, 7597), False, 'import os\n'), ((7599, 7640), 'os.path.join', 'os.path.join', (['label_dir', "(vol + '_glm.mgz')"], {}), "(label_dir, vol + '_glm.mgz')\n", (7611, 7640), False, 'import os\n'), ((9395, 9438), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""mri/orig.mgz"""'], {}), "(data_dir, vol, 'mri/orig.mgz')\n", (9407, 9438), False, 'import os\n'), ((7757, 7796), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""orig.mgz"""'], {}), "(data_dir, vol, 'orig.mgz')\n", (7769, 7796), False, 'import os\n'), ((7798, 7841), 'os.path.join', 'os.path.join', (['label_dir', 'vol', '"""Lab_con.mgz"""'], {}), "(label_dir, vol, 'Lab_con.mgz')\n", (7810, 7841), False, 'import os\n'), ((9560, 9587), 'os.path.join', 'os.path.join', (['data_dir', 'vol'], {}), '(data_dir, vol)\n', (9572, 9587), False, 'import os\n'), ((7959, 8009), 'os.path.join', 'os.path.join', (['data_dir', "(vol + '/' + vol + '_1.mgz')"], {}), "(data_dir, vol + '/' + vol + '_1.mgz')\n", (7971, 8009), False, 'import os\n'), ((8024, 8079), 'os.path.join', 'os.path.join', (['label_dir', "(vol + '/' + vol + '_1_seg.mgz')"], {}), "(label_dir, vol + '/' + vol + '_1_seg.mgz')\n", (8036, 8079), False, 'import os\n'), ((9710, 9749), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""orig.mgz"""'], {}), "(data_dir, vol, 'orig.mgz')\n", (9722, 9749), False, 'import os\n'), ((8196, 8239), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""mri/orig.mgz"""'], {}), "(data_dir, vol, 'mri/orig.mgz')\n", (8208, 8239), False, 'import os\n'), ((8241, 8285), 'os.path.join', 'os.path.join', (['label_dir', "(vol + '_map.nii.gz')"], {}), "(label_dir, vol + '_map.nii.gz')\n", (8253, 8285), False, 'import os\n'), ((8410, 8437), 'os.path.join', 'os.path.join', (['data_dir', 'vol'], {}), '(data_dir, vol)\n', (8422, 8437), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'design.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(650, 550)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(20, 10, 140, 13))
self.label.setObjectName("label")
self.song_title = QtWidgets.QLineEdit(self.centralwidget)
self.song_title.setGeometry(QtCore.QRect(90, 30, 113, 20))
self.song_title.setObjectName("song_title")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(20, 30, 60, 13))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(220, 30, 80, 13))
self.label_3.setObjectName("label_3")
self.song_autor = QtWidgets.QLineEdit(self.centralwidget)
self.song_autor.setGeometry(QtCore.QRect(310, 30, 113, 20))
self.song_autor.setObjectName("song_autor")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(20, 90, 140, 13))
self.label_4.setObjectName("label_4")
self.orig_text = QtWidgets.QPlainTextEdit(self.centralwidget)
self.orig_text.setGeometry(QtCore.QRect(20, 150, 270, 340))
self.orig_text.setObjectName("orig_text")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(20, 120, 60, 13))
self.label_5.setObjectName("label_5")
self.trans_text = QtWidgets.QPlainTextEdit(self.centralwidget)
self.trans_text.setGeometry(QtCore.QRect(320, 150, 270, 340))
self.trans_text.setObjectName("trans_text")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(320, 120, 120, 13))
self.label_6.setObjectName("label_6")
self.get_text = QtWidgets.QPushButton(self.centralwidget)
self.get_text.setGeometry(QtCore.QRect(310, 70, 100, 23))
self.get_text.setObjectName("get_text")
self.pretty_flag = QtWidgets.QCheckBox(self.centralwidget)
self.pretty_flag.setGeometry(QtCore.QRect(20, 60, 250, 20))
self.pretty_flag.setObjectName("pretty_flag")
self.info = QtWidgets.QLabel(self.centralwidget)
self.info.setGeometry(QtCore.QRect(30, 500, 560, 13))
self.info.setText("")
self.info.setObjectName("info")
self.error_text = QtWidgets.QLabel(self.centralwidget)
self.error_text.setGeometry(QtCore.QRect(30, 520, 560, 20))
self.error_text.setText("")
self.error_text.setObjectName("error_text")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Проект 1"))
self.label.setText(_translate("MainWindow", "Введите данные о песне:"))
self.label_2.setText(_translate("MainWindow", "Название:"))
self.label_3.setText(_translate("MainWindow", "Исполнитель:"))
self.label_4.setText(_translate("MainWindow", "Полученный текст песни:"))
self.label_5.setText(_translate("MainWindow", "Оригинал:"))
self.label_6.setText(_translate("MainWindow", "Перевод на русский:"))
self.get_text.setText(_translate("MainWindow", "Запрос текста"))
self.pretty_flag.setText(_translate("MainWindow", "Красивый текст (без указания на припев)"))
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QPlainTextEdit",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QCheckBox",
"PyQt5.QtWidgets.QLineEdit"
]
| [((527, 556), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (544, 556), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((638, 674), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (654, 674), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((808, 847), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (827, 847), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((993, 1029), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1009, 1029), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1165, 1201), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1181, 1201), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1341, 1380), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1360, 1380), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1527, 1563), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1543, 1563), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1702, 1746), 'PyQt5.QtWidgets.QPlainTextEdit', 'QtWidgets.QPlainTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1726, 1746), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1891, 1927), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1907, 1927), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2067, 2111), 'PyQt5.QtWidgets.QPlainTextEdit', 'QtWidgets.QPlainTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2091, 2111), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2260, 2296), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2276, 2296), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2436, 2477), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2457, 2477), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2622, 2661), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2641, 2661), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2807, 2843), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2823, 2843), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3006, 3042), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3022, 3042), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3310, 3359), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (3347, 3359), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((707, 736), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(10)', '(140)', '(13)'], {}), '(20, 10, 140, 13)\n', (719, 736), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((885, 914), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(90)', '(30)', '(113)', '(20)'], {}), '(90, 30, 113, 20)\n', (897, 914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1064, 1092), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(30)', '(60)', '(13)'], {}), '(20, 30, 60, 13)\n', (1076, 1092), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1236, 1265), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(220)', '(30)', '(80)', '(13)'], {}), '(220, 30, 80, 13)\n', (1248, 1265), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1418, 1448), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(310)', '(30)', '(113)', '(20)'], {}), '(310, 30, 113, 20)\n', (1430, 1448), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1598, 1627), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(90)', '(140)', '(13)'], {}), '(20, 90, 140, 13)\n', (1610, 1627), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1783, 1814), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(150)', '(270)', '(340)'], {}), '(20, 150, 270, 340)\n', (1795, 1814), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1962, 1991), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(120)', '(60)', '(13)'], {}), '(20, 120, 60, 13)\n', (1974, 1991), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2149, 2181), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(150)', '(270)', '(340)'], {}), '(320, 150, 270, 340)\n', (2161, 2181), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2331, 2362), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(120)', '(120)', '(13)'], {}), '(320, 120, 120, 13)\n', (2343, 2362), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2513, 2543), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(310)', '(70)', '(100)', '(23)'], {}), '(310, 70, 100, 23)\n', (2525, 2543), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2700, 2729), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(60)', '(250)', '(20)'], {}), '(20, 60, 250, 20)\n', (2712, 2729), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2875, 2905), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(30)', '(500)', '(560)', '(13)'], {}), '(30, 500, 560, 13)\n', (2887, 2905), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3080, 3110), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(30)', '(520)', '(560)', '(20)'], {}), '(30, 520, 560, 20)\n', (3092, 3110), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
from simulation.car import spawn_drivers
from simulation.passenger import spawn_passengers
from simulation.core import World, Clock
conf = {
"x": 100,
"y": 100,
"drivers": 200,
"users": 1000,
"start": "2019-07-08T00:00:00",
"end": "2019-07-08T00:01:00"
}
clock = Clock(conf["start"], conf["end"])
if __name__ == '__main__':
world = World([conf['x'], conf['y']], clock=clock)
world.register_drivers(spawn_drivers(conf["drivers"], conf['x'], conf['y']))
world.register_passengers(spawn_passengers(conf["users"], conf['x'], conf['y']))
world.run(log=False)
| [
"simulation.passenger.spawn_passengers",
"simulation.car.spawn_drivers",
"simulation.core.World",
"simulation.core.Clock"
]
| [((289, 322), 'simulation.core.Clock', 'Clock', (["conf['start']", "conf['end']"], {}), "(conf['start'], conf['end'])\n", (294, 322), False, 'from simulation.core import World, Clock\n'), ((363, 405), 'simulation.core.World', 'World', (["[conf['x'], conf['y']]"], {'clock': 'clock'}), "([conf['x'], conf['y']], clock=clock)\n", (368, 405), False, 'from simulation.core import World, Clock\n'), ((433, 485), 'simulation.car.spawn_drivers', 'spawn_drivers', (["conf['drivers']", "conf['x']", "conf['y']"], {}), "(conf['drivers'], conf['x'], conf['y'])\n", (446, 485), False, 'from simulation.car import spawn_drivers\n'), ((517, 570), 'simulation.passenger.spawn_passengers', 'spawn_passengers', (["conf['users']", "conf['x']", "conf['y']"], {}), "(conf['users'], conf['x'], conf['y'])\n", (533, 570), False, 'from simulation.passenger import spawn_passengers\n')] |
import json
d1 = {}
with open("/home/qinyuan/zs/out/bart-large-with-description-grouped-1e-5-outerbsz4-innerbsz32-adapterdim4-unfreeze-dec29/test_predictions.jsonl") as fin:
for line in fin:
d = json.loads(line)
d1[d["id"]] = d["output"][0]["answer"]
d2 = {}
dq = {}
with open("/home/qinyuan/zs/out/bart-large-zsre-with-description-LR2e-5-FREQ32-dec27/test_predictions_submitted.jsonl") as fin:
for line in fin:
d = json.loads(line)
d2[d["id"]] = d["output"][0]["answer"]
dq[d["id"]] = d["input"]
d3 = {}
with open("/home/qinyuan/zs/data/structured_zeroshot-test.jsonl") as fin:
for line in fin:
d = json.loads(line)
d3[d["id"]] = [item["answer"] for item in d["output"]]
count = 0
win1 = 0
win2 = 0
for key in d1.keys():
if d1[key]!= d2[key]:
print("{}. {}. {}. {}. {}".format(key, dq[key], d1[key], d2[key], d3[key]))
count += 1
if d1[key] in d3[key] and d2[key] not in d3[key]:
win1 += 1
print(d1[key])
print(d2[key])
if d2[key] in d3[key] and d1[key] not in d3[key]:
win2 += 1
print(d1[key])
print(d2[key])
print(count)
print(win1)
print(win2)
| [
"json.loads"
]
| [((208, 224), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (218, 224), False, 'import json\n'), ((450, 466), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (460, 466), False, 'import json\n'), ((663, 679), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (673, 679), False, 'import json\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.