code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
from typing import List, Callable
from aoc2020.day5.main import SolverDay5, Seat
from tests.utils.puzzle_examples_checker import PuzzleExamplesChecker, Example
class TestSolverDay5(PuzzleExamplesChecker):
day: int = 4
solver: SolverDay5 = SolverDay5
examples: List[Example] = [
Example(data="BFFFBBFRRR", solution_part1=567),
Example(data="FFFBBBFRRR", solution_part1=119),
Example(data="BBFFBBFRLL", solution_part1=820),
]
parser: Callable[[str], List[Seat]] = SolverDay5.parser
| [
"tests.utils.puzzle_examples_checker.Example"
] | [((301, 347), 'tests.utils.puzzle_examples_checker.Example', 'Example', ([], {'data': '"""BFFFBBFRRR"""', 'solution_part1': '(567)'}), "(data='BFFFBBFRRR', solution_part1=567)\n", (308, 347), False, 'from tests.utils.puzzle_examples_checker import PuzzleExamplesChecker, Example\n'), ((357, 403), 'tests.utils.puzzle_examples_checker.Example', 'Example', ([], {'data': '"""FFFBBBFRRR"""', 'solution_part1': '(119)'}), "(data='FFFBBBFRRR', solution_part1=119)\n", (364, 403), False, 'from tests.utils.puzzle_examples_checker import PuzzleExamplesChecker, Example\n'), ((413, 459), 'tests.utils.puzzle_examples_checker.Example', 'Example', ([], {'data': '"""BBFFBBFRLL"""', 'solution_part1': '(820)'}), "(data='BBFFBBFRLL', solution_part1=820)\n", (420, 459), False, 'from tests.utils.puzzle_examples_checker import PuzzleExamplesChecker, Example\n')] |
# -*- coding: utf-8 -*-
import unittest
from wbc.views.api.suggest import Suggest
class SuggestTest(unittest.TestCase):
@staticmethod
def test_trim_query():
assert Suggest.trim_query('') == ''
assert Suggest.trim_query('s') == 's'
assert Suggest.trim_query('sp') == 'sp'
assert Suggest.trim_query('spr') == 'spr'
assert Suggest.trim_query('sprawa') == 'sprawa'
assert Suggest.trim_query('sprawa ') == 'sprawa'
assert Suggest.trim_query('sprawa p') == 'sprawa'
assert Suggest.trim_query('sprawa pa') == 'sprawa'
assert Suggest.trim_query('sprawa pau') == 'sprawa pau'
assert Suggest.trim_query('sprawa paula von hi') == 'sprawa paula von'
| [
"wbc.views.api.suggest.Suggest.trim_query"
] | [((183, 205), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['""""""'], {}), "('')\n", (201, 205), False, 'from wbc.views.api.suggest import Suggest\n'), ((227, 250), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""s"""'], {}), "('s')\n", (245, 250), False, 'from wbc.views.api.suggest import Suggest\n'), ((273, 297), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sp"""'], {}), "('sp')\n", (291, 297), False, 'from wbc.views.api.suggest import Suggest\n'), ((321, 346), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""spr"""'], {}), "('spr')\n", (339, 346), False, 'from wbc.views.api.suggest import Suggest\n'), ((372, 400), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sprawa"""'], {}), "('sprawa')\n", (390, 400), False, 'from wbc.views.api.suggest import Suggest\n'), ((428, 457), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sprawa """'], {}), "('sprawa ')\n", (446, 457), False, 'from wbc.views.api.suggest import Suggest\n'), ((485, 515), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sprawa p"""'], {}), "('sprawa p')\n", (503, 515), False, 'from wbc.views.api.suggest import Suggest\n'), ((543, 574), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sprawa pa"""'], {}), "('sprawa pa')\n", (561, 574), False, 'from wbc.views.api.suggest import Suggest\n'), ((603, 635), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sprawa pau"""'], {}), "('sprawa pau')\n", (621, 635), False, 'from wbc.views.api.suggest import Suggest\n'), ((668, 709), 'wbc.views.api.suggest.Suggest.trim_query', 'Suggest.trim_query', (['"""sprawa paula von hi"""'], {}), "('sprawa paula von hi')\n", (686, 709), False, 'from wbc.views.api.suggest import Suggest\n')] |
# -*- coding: utf-8 -*-
# MIT License
# Copyright (c) 2021 Brokenwind
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import threading
import pandas as pd
class LocalizeThread(threading.Thread):
'''
数据本地化线程
'''
def __init__(self, data, file_path, columns=[], sep='\t'):
'''
:param data: 需要本地化的数据
:param file_path: 本地化的文件名及其路径
'''
threading.Thread.__init__(self)
self.data = data
self.file_path = file_path
self.columns = columns
self.sep = sep
def run(self):
if isinstance(self.data, pd.DataFrame) or isinstance(self.data, pd.Series):
# DataFrame 或者Series直接调用其to_csv方法
if self.columns:
self.data.to_csv(self.file_path, encoding="utf-8", index=False, sep=self.sep, columns=self.columns)
else:
self.data.to_csv(self.file_path, encoding="utf-8", index=False, sep=self.sep, header=None)
elif isinstance(self.data, dict) or isinstance(self.data, list):
# dict 获取 list进行json化
data_str = json.dumps(self.data, indent=4, ensure_ascii=False)
self._localize_data(data_str, self.file_path)
else:
data_str = str(self.data)
self._localize_data(data_str, self.file_path)
def _localize_data(self, data: str, file_path):
'''
本地化数据
:param data: 需要保存的数据
:param file_path: 保存的路径
:return:
'''
with open(file_path, 'w', encoding='utf-8') as file:
file.write(data)
| [
"threading.Thread.__init__",
"json.dumps"
] | [((1449, 1480), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1474, 1480), False, 'import threading\n'), ((2160, 2211), 'json.dumps', 'json.dumps', (['self.data'], {'indent': '(4)', 'ensure_ascii': '(False)'}), '(self.data, indent=4, ensure_ascii=False)\n', (2170, 2211), False, 'import json\n')] |
import json
import os
import sys
import pathlib as pl
from flask import Flask, render_template, abort, Response, Markup
import mistune
app = Flask(__name__)
CONFIG_PATH, PROJECT_NAME = sys.argv[1:]
@app.route("/")
def todos():
todos = get_todos(project=PROJECT_NAME)
return render_template("todo.html", todos=todos)
def get_todos(project):
try:
with open(CONFIG_PATH) as c:
config = json.load(c)
project_dir = pl.Path(config["PROJECT_DIR"])
if project == "all":
return [
(p, project_html_todos(project_dir=project_dir / p))
for p in os.listdir(project_dir)
]
else:
return [
(project, project_html_todos(project_dir=project_dir / project))
]
except FileNotFoundError:
abort(Response("Config file was not found. Consider using 'vs init' first."))
except Exception as e:
print(e)
abort(Response(f"An unexpected error occurred."))
def project_html_todos(project_dir):
try:
with pl.Path.open(project_dir / ".todo") as todos:
return Markup(mistune.markdown(todos.read()))
except FileNotFoundError:
return Markup("<ul><li>Project doesn't contain .todo file.</li></ul>")
except Exception as e:
print(e)
abort(Response(f"An unexpected error occurred."))
@app.route("/call/<project>")
def call_back(project):
try:
os.system(f"vs.py project {project}")
except Exception as e:
print(e)
abort(Response(f"An unexpected error occurred."))
else:
return "Success"
if __name__ == "__main__":
app.run(debug=True)
| [
"flask.render_template",
"os.listdir",
"pathlib.Path",
"flask.Flask",
"flask.Markup",
"pathlib.Path.open",
"flask.Response",
"json.load",
"os.system"
] | [((143, 158), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (148, 158), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((286, 327), 'flask.render_template', 'render_template', (['"""todo.html"""'], {'todos': 'todos'}), "('todo.html', todos=todos)\n", (301, 327), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((1507, 1544), 'os.system', 'os.system', (['f"""vs.py project {project}"""'], {}), "(f'vs.py project {project}')\n", (1516, 1544), False, 'import os\n'), ((421, 433), 'json.load', 'json.load', (['c'], {}), '(c)\n', (430, 433), False, 'import json\n'), ((460, 490), 'pathlib.Path', 'pl.Path', (["config['PROJECT_DIR']"], {}), "(config['PROJECT_DIR'])\n", (467, 490), True, 'import pathlib as pl\n'), ((1119, 1154), 'pathlib.Path.open', 'pl.Path.open', (["(project_dir / '.todo')"], {}), "(project_dir / '.todo')\n", (1131, 1154), True, 'import pathlib as pl\n'), ((1268, 1331), 'flask.Markup', 'Markup', (['"""<ul><li>Project doesn\'t contain .todo file.</li></ul>"""'], {}), '("<ul><li>Project doesn\'t contain .todo file.</li></ul>")\n', (1274, 1331), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((884, 954), 'flask.Response', 'Response', (['"""Config file was not found. Consider using \'vs init\' first."""'], {}), '("Config file was not found. Consider using \'vs init\' first.")\n', (892, 954), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((1014, 1056), 'flask.Response', 'Response', (['f"""An unexpected error occurred."""'], {}), "(f'An unexpected error occurred.')\n", (1022, 1056), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((1390, 1432), 'flask.Response', 'Response', (['f"""An unexpected error occurred."""'], {}), "(f'An unexpected error occurred.')\n", (1398, 1432), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((1603, 1645), 'flask.Response', 'Response', (['f"""An unexpected error occurred."""'], {}), "(f'An unexpected error occurred.')\n", (1611, 1645), False, 'from flask import Flask, render_template, abort, Response, Markup\n'), ((652, 675), 'os.listdir', 'os.listdir', (['project_dir'], {}), '(project_dir)\n', (662, 675), False, 'import os\n')] |
import logging
import json
import hashlib
import random
import string
import time
from homeassistant.const import STATE_ON, STATE_OFF
from .const import (
DOMAIN,
LISTEN_TOPIC,
PUBLISH_TOPIC,
DEVICE_ON,
APP_CONTROL_TOGGLE,
APP_CONTROL_ELEC,
APP_METHOD_GET,
APP_METHOD_PUSH,
APP_METHOD_SET,
APP_SYS_ALL,
APP_SYS_CLOCK,
)
_LOGGER = logging.getLogger(__name__)
class MQTTDevice():
"""
MQTTDevice represents a Meross MQTT device\.
The intent is to use this for more devices as their use of the common protocol is discovered.
"""
def __init__(self, id, key, validate, callback):
self.id = id
self.key = key
self.validate = validate
self.callback = callback
def start(self, service):
""" starts listening to topic for device requests/responses """
self.service = service
topic = LISTEN_TOPIC.format(self.id)
_LOGGER.info("%s: device MQTT subscription to %s", DOMAIN, topic)
self.service.subscribe(topic, self.message_received)
return self
def message_received(self, msg):
""" handler for incoming messages from a device """
p = Packet(json.loads(msg.payload))
_LOGGER.debug("received %s", p)
if not p.validSignature(self.key):
if self.validate:
_LOGGER.error("invalid signature %s: %s", self.id, p.header.namespace)
return
else:
_LOGGER.info("ignoreing signature error: %s", self.id, p.header.namespace)
if p.header.method == "ERROR":
_LOGGER.error("error occured: %s", self.id, p.payload)
return
if p.header.method == APP_METHOD_PUSH and p.header.namespace == APP_CONTROL_TOGGLE:
self.callback(
ToggleState(
p.payload.get("channel", 0),
p.payload["toggle"]["onoff"],
)
)
# Respond to clock events with the current time.
if p.header.method == APP_METHOD_PUSH and p.header.namespace == APP_SYS_CLOCK:
self.sendPacket(
self.createPacket(
APP_METHOD_PUSH,
APP_SYS_CLOCK,
{
"clock": {
"timestamp": int(time.time()),
}
}
)
)
if p.header.namespace == APP_CONTROL_ELEC:
self.callback(
PowerUsage(p.payload["electricity"]["power"],p.payload["electricity"]["current"],p.payload["electricity"]["voltage"])
)
if p.header.namespace == APP_SYS_ALL:
# TODO(arandall): check for channel (if applicable)
self.callback(ToggleState(0, p.payload["all"]["control"]["toggle"]["onoff"]))
self.callback(
SystemState(
p.payload["all"]["system"]["hardware"]["macAddress"],
p.payload["all"]["system"]["firmware"]["innerIp"],
"{}-{} v{} - {} (fw v{})".format(
p.payload["all"]["system"]["hardware"]["type"],
p.payload["all"]["system"]["hardware"]["subType"],
p.payload["all"]["system"]["hardware"]["version"],
p.payload["all"]["system"]["hardware"]["chipType"],
p.payload["all"]["system"]["firmware"]["version"],
)
)
)
def createPacket(self, method, namespace, payload):
""" createPacket ready for transmission via MQTT or HTTP """
p = Packet({
"header": {
"from": "homeassistant/meross/subscribe",
"method": method,
"namespace": namespace,
},
"payload": payload
})
p.sign(self.key)
return p
def sendPacket(self, p):
""" sendPacket via MQTT """
_LOGGER.debug("sending %s", p)
self.service.publish(PUBLISH_TOPIC.format(self.id), json.dumps(p, default=serialize))
def SetOnOff(self, channel, state):
""" SetOnOff state for given channel """
self.sendPacket(
self.createPacket(
APP_METHOD_SET,
APP_CONTROL_TOGGLE,
{
"channel": channel,
"toggle": {
"onoff": state,
}
}
)
)
def GetElectricityUsage(self):
self.sendPacket(
self.createPacket(
APP_METHOD_GET,
APP_CONTROL_ELEC,
{}
)
)
def SystemAll(self):
""" Get all system parameters """
self.sendPacket(
self.createPacket(
APP_METHOD_GET,
APP_SYS_ALL,
{}
)
)
class ToggleState():
""" ToggleState represents the state of a switch """
def __init__(self, channel, state):
self.channel = channel
if state == DEVICE_ON:
self.state = STATE_ON
else:
self.state = STATE_OFF
def __str__(self):
return "ToggleState channel:{} state:{}".format(self.channel, self.state)
class SystemState():
""" SystemState represents the state of a Meross Appliance """
def __init__(self, mac, ip, version):
self.mac = mac
self.ip = ip
self.version = version
def __str__(self):
return "SystemState mac:{} ip:{} version:{}".format(self.mac, self.ip, self.version)
class PowerUsage():
""" PowerUsage represents the current power usage of a Meross Appliance """
def __init__(self, power, current, voltage):
self.power = power / 1000
self.current = current / 1000
self.voltage = voltage / 10
def __str__(self):
return "PowerUsage {}W".format(self.power/1000)
class Header():
def __init__(self, pk):
if pk == None:
pk = {}
self.from_ = pk.get("from")
self.messageId = pk.get("messageId", ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(32)))
self.method = pk.get("method")
self.namespace = pk.get("namespace")
self.payloadVersion = pk.get("payloadVersion", 1)
self.sign = pk.get("sign")
self.timestamp = pk.get("timestamp", int(time.time()))
class Packet():
def __init__(self, pk):
self.header = Header(pk.get("header"))
self.payload = pk.get("payload")
def __str__(self):
return "meross-packet({}): {} - {} [{}] ".format(self.header.messageId, self.header.method, self.header.namespace, self.header.from_)
def calcSignature(self, key):
signatureString = ""
for arg in [self.header.messageId, key, self.header.timestamp]:
signatureString += str(arg)
return hashlib.md5(signatureString.encode()).hexdigest()
def sign(self, key):
self.header.sign = self.calcSignature(key)
def validSignature(self, key):
return self.header.sign == self.calcSignature(key)
def serialize(obj):
if obj is Header:
dict = obj.__dict__
dict["from"] = dict.pop("from_")
return dict
return obj.__dict__ | [
"logging.getLogger",
"json.loads",
"json.dumps",
"random.SystemRandom",
"time.time"
] | [((375, 402), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (392, 402), False, 'import logging\n'), ((1209, 1232), 'json.loads', 'json.loads', (['msg.payload'], {}), '(msg.payload)\n', (1219, 1232), False, 'import json\n'), ((4138, 4170), 'json.dumps', 'json.dumps', (['p'], {'default': 'serialize'}), '(p, default=serialize)\n', (4148, 4170), False, 'import json\n'), ((6533, 6544), 'time.time', 'time.time', ([], {}), '()\n', (6542, 6544), False, 'import time\n'), ((6217, 6238), 'random.SystemRandom', 'random.SystemRandom', ([], {}), '()\n', (6236, 6238), False, 'import random\n'), ((2355, 2366), 'time.time', 'time.time', ([], {}), '()\n', (2364, 2366), False, 'import time\n')] |
from typing import Union
from boa3.builtin.nativecontract.stdlib import StdLib
def main(mem1: Union[bytes, str]) -> int:
return StdLib.memory_compare(mem1)
| [
"boa3.builtin.nativecontract.stdlib.StdLib.memory_compare"
] | [((135, 162), 'boa3.builtin.nativecontract.stdlib.StdLib.memory_compare', 'StdLib.memory_compare', (['mem1'], {}), '(mem1)\n', (156, 162), False, 'from boa3.builtin.nativecontract.stdlib import StdLib\n')] |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, deepsense.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import fnmatch
import os
import sys
import humanize
from kitchen.i18n import to_bytes, to_unicode
from pathlib2 import Path
from tqdm import tqdm
from neptune.internal.cli.commands.parsers.utils.validators import ArgumentsValidationException
from neptune.internal.common.utils.paths import join_paths
from neptune.internal.common.utils.system import IS_WINDOWS
to_unicode = to_unicode
to_bytestring = to_bytes
# pylint:disable=no-member
class CopySizeCounter(object):
def __init__(self, description):
self.size = 0
self.description = description
@staticmethod
def _print(msg, size):
print(u"\r{msg}: {size}".format(msg=msg,
size=humanize.naturalsize(size, format=u"%.2f").ljust(10)),
end=u'', file=sys.stderr)
sys.stderr.flush()
def add(self, src_path, dst_path):
# pylint: disable=unused-argument
self.size += os.path.getsize(src_path)
self._print(u"Calculating {} size".format(self.description), self.size)
def finalize(self):
self._print(u"\rCalculated {} size".format(self.description), self.size)
print(u"")
class CopyProgressBar(object):
def __init__(self, total_size, desc=u"Creating experiment snapshot"):
self.progress_bar = tqdm(desc=desc,
total=total_size,
unit=u'B',
unit_scale=True,
file=sys.stderr,
ascii=IS_WINDOWS)
def set_description(self, desc=None):
self.progress_bar.set_description(desc)
def update(self, block_size):
self.progress_bar.update(block_size)
def finalize(self):
self.progress_bar.close()
def _create_filter_list(exclude):
filter_list = []
if exclude:
for pattern in exclude:
if os.path.isabs(pattern):
raise ArgumentsValidationException(
u"Invalid exclude pattern: exclude patterns must be relative")
filter_list.append(pattern)
filter_list.append("*"+os.sep+pattern)
filter_list.append(pattern+os.sep+"*")
filter_list.append("*"+os.sep+pattern+os.sep+"*")
return filter_list
def collect_files(p=None, exclude=None, description=u"experiment snapshot"):
if p is None:
p = "."
# Remove trailing '/' chars.
if exclude is not None:
exclude = [e.rstrip(os.sep) for e in exclude]
counter = CopySizeCounter(description)
filter_list = _create_filter_list(exclude)
files_list = []
empty_dir_list = []
path = Path(p).resolve()
if path.is_dir():
# get list of all files and directories under target path
found_files = sorted([(
found_file.absolute(),
found_file.relative_to(path),
join_paths(p, str(found_file.relative_to(path)))
) for found_file in path.glob('**' + os.sep + '*')])
if not found_files:
empty_dir_list.append((str(path.resolve()), path.resolve().name))
for file_path in found_files:
# skip files that match any of the exclude patterns
if not any(fnmatch.fnmatch(str(file_path[1]), pattern) for pattern in filter_list):
if file_path[0].is_dir():
# add all directories - non-empty ones are removed later
empty_dir_list.append((str(file_path[0]), str(file_path[2])))
else:
files_list.append((str(file_path[0]), str(file_path[2])))
counter.add(str(file_path[0]), None)
else:
files_list.append((str(path.resolve()), path.resolve().name))
counter.add(str(path.resolve()), None)
counter.finalize()
return files_list, counter.size, empty_dir_list
| [
"neptune.internal.cli.commands.parsers.utils.validators.ArgumentsValidationException",
"os.path.getsize",
"os.path.isabs",
"humanize.naturalsize",
"sys.stderr.flush",
"tqdm.tqdm",
"pathlib2.Path"
] | [((1463, 1481), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (1479, 1481), False, 'import sys\n'), ((1585, 1610), 'os.path.getsize', 'os.path.getsize', (['src_path'], {}), '(src_path)\n', (1600, 1610), False, 'import os\n'), ((1952, 2053), 'tqdm.tqdm', 'tqdm', ([], {'desc': 'desc', 'total': 'total_size', 'unit': 'u"""B"""', 'unit_scale': '(True)', 'file': 'sys.stderr', 'ascii': 'IS_WINDOWS'}), "(desc=desc, total=total_size, unit=u'B', unit_scale=True, file=sys.\n stderr, ascii=IS_WINDOWS)\n", (1956, 2053), False, 'from tqdm import tqdm\n'), ((2564, 2586), 'os.path.isabs', 'os.path.isabs', (['pattern'], {}), '(pattern)\n', (2577, 2586), False, 'import os\n'), ((3328, 3335), 'pathlib2.Path', 'Path', (['p'], {}), '(p)\n', (3332, 3335), False, 'from pathlib2 import Path\n'), ((2610, 2706), 'neptune.internal.cli.commands.parsers.utils.validators.ArgumentsValidationException', 'ArgumentsValidationException', (['u"""Invalid exclude pattern: exclude patterns must be relative"""'], {}), "(\n u'Invalid exclude pattern: exclude patterns must be relative')\n", (2638, 2706), False, 'from neptune.internal.cli.commands.parsers.utils.validators import ArgumentsValidationException\n'), ((1360, 1402), 'humanize.naturalsize', 'humanize.naturalsize', (['size'], {'format': 'u"""%.2f"""'}), "(size, format=u'%.2f')\n", (1380, 1402), False, 'import humanize\n')] |
import torch
import torch.nn as nn
from .GroupLinearLayer import GroupLinearLayer
class SharedGroupLinearLayer(nn.Module):
"""All the parameters are shared using soft attention this layer is used for sharing Q,K,V parameters of MHA"""
def __init__(self, din, dout, n_templates):
super(SharedGroupLinearLayer, self).__init__()
self.w = nn.ModuleList([nn.Linear(din, dout, bias = False) for _ in range(0,n_templates)])
self.gll_write = GroupLinearLayer(dout,16, n_templates)
self.gll_read = GroupLinearLayer(din,16,1)
#self.register_buffer(self.w)
def forward(self,x):
#input size (bs,num_blocks,din), required matching num_blocks vs n_templates
bs_size = x.shape[0]
k = x.shape[1]
x= x.reshape(k*bs_size,-1)
x_read = self.gll_read((x*1.0).reshape((x.shape[0], 1, x.shape[1])))
x_next = []
for mod in self.w:
x_next_l = mod(x)
x_next.append(x_next_l)
x_next = torch.stack(x_next,1) #(k*bs,n_templates,dout)
x_write = self.gll_write(x_next)
sm = nn.Softmax(2)
att = sm(torch.bmm(x_read, x_write.permute(0, 2, 1)))
x_next = torch.bmm(att, x_next)
x_next = x_next.mean(dim=1).reshape(bs_size,k,-1)
return x_next
if __name__ == "__main__":
GLN = SharedGroupLinearLayer(25,22,6)
x = torch.randn(64,12,25)
print(GLN(x).shape)
for p in GLN.parameters():
print(p.shape)
| [
"torch.nn.Softmax",
"torch.stack",
"torch.nn.Linear",
"torch.bmm",
"torch.randn"
] | [((1412, 1435), 'torch.randn', 'torch.randn', (['(64)', '(12)', '(25)'], {}), '(64, 12, 25)\n', (1423, 1435), False, 'import torch\n'), ((1003, 1025), 'torch.stack', 'torch.stack', (['x_next', '(1)'], {}), '(x_next, 1)\n', (1014, 1025), False, 'import torch\n'), ((1113, 1126), 'torch.nn.Softmax', 'nn.Softmax', (['(2)'], {}), '(2)\n', (1123, 1126), True, 'import torch.nn as nn\n'), ((1215, 1237), 'torch.bmm', 'torch.bmm', (['att', 'x_next'], {}), '(att, x_next)\n', (1224, 1237), False, 'import torch\n'), ((378, 410), 'torch.nn.Linear', 'nn.Linear', (['din', 'dout'], {'bias': '(False)'}), '(din, dout, bias=False)\n', (387, 410), True, 'import torch.nn as nn\n')] |
import logging
import os
import sys
import select
import stat
import scitag
import scitag.settings
from scitag.config import config
log = logging.getLogger('scitag')
def init():
log.debug('np_api init')
if os.path.exists(scitag.settings.NP_API_FILE) and stat.S_ISFIFO(os.stat(scitag.settings.NP_API_FILE).st_mode):
return
try:
os.mkfifo(scitag.settings.NP_API_FILE, mode=0o666)
except IOError as e:
log.error('Unable to create command pipe {}'.format(scitag.settings.NP_API_FILE))
sys.exit(1)
def run(flow_queue, term_event):
np_api_fd = os.open(scitag.settings.NP_API_FILE, os.O_RDWR | os.O_NONBLOCK)
sp = select.poll()
sp.register(np_api_fd, select.POLLIN | select.POLLPRI)
while not term_event.is_set():
try:
tr = sp.poll(3)
if not tr:
continue
np_content = os.read(np_api_fd, 65535)
except IOError as e:
log.exception('Failed to read command pipe {}'.format(scitag.settings.NP_API_FILE))
term_event.wait(3)
continue
log.debug(np_content)
flow_ids = np_content.decode('utf-8').splitlines()
log.debug(flow_ids)
for f_id in flow_ids:
entry = f_id.strip().split(' ')
if len(entry) != 8:
log.error('Unable to parse flow identifier received {}'.format(entry))
continue
# todo: validate entries
flow_id = scitag.FlowID(entry[0].strip(), entry[1].strip(), entry[2].strip(), entry[3].strip(),
entry[4].strip(), entry[5].strip(), entry[6].strip(), entry[7].strip())
log.debug(' --> {}'.format(flow_id))
flow_queue.put(flow_id)
os.unlink(scitag.settings.NP_API_FILE)
| [
"logging.getLogger",
"os.path.exists",
"os.open",
"select.poll",
"os.unlink",
"os.mkfifo",
"sys.exit",
"os.stat",
"os.read"
] | [((140, 167), 'logging.getLogger', 'logging.getLogger', (['"""scitag"""'], {}), "('scitag')\n", (157, 167), False, 'import logging\n'), ((596, 659), 'os.open', 'os.open', (['scitag.settings.NP_API_FILE', '(os.O_RDWR | os.O_NONBLOCK)'], {}), '(scitag.settings.NP_API_FILE, os.O_RDWR | os.O_NONBLOCK)\n', (603, 659), False, 'import os\n'), ((669, 682), 'select.poll', 'select.poll', ([], {}), '()\n', (680, 682), False, 'import select\n'), ((1774, 1812), 'os.unlink', 'os.unlink', (['scitag.settings.NP_API_FILE'], {}), '(scitag.settings.NP_API_FILE)\n', (1783, 1812), False, 'import os\n'), ((218, 261), 'os.path.exists', 'os.path.exists', (['scitag.settings.NP_API_FILE'], {}), '(scitag.settings.NP_API_FILE)\n', (232, 261), False, 'import os\n'), ((359, 407), 'os.mkfifo', 'os.mkfifo', (['scitag.settings.NP_API_FILE'], {'mode': '(438)'}), '(scitag.settings.NP_API_FILE, mode=438)\n', (368, 407), False, 'import os\n'), ((533, 544), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (541, 544), False, 'import sys\n'), ((891, 916), 'os.read', 'os.read', (['np_api_fd', '(65535)'], {}), '(np_api_fd, 65535)\n', (898, 916), False, 'import os\n'), ((280, 316), 'os.stat', 'os.stat', (['scitag.settings.NP_API_FILE'], {}), '(scitag.settings.NP_API_FILE)\n', (287, 316), False, 'import os\n')] |
from django.contrib import admin
from django.urls import path, include
from django.conf.urls import url
from api.user.view import (
getUsers, getUser,
signin, user_api_root, signup,
email_signup
)
from api.user.root import user_api_root_detail
app_name = 'user'
urlpatterns = [
url('user-api-root/',user_api_root,name='user_api_root'),
url('user-api-root-detail/',user_api_root_detail,name='user_api_root_detail'),
url('getUsers/',getUsers,name='getUsers'),
url('getUser',getUser,name='getUser'),
url('signin',signin,name='signin'),
url('email-signup',email_signup,name='email_signup'),
url('signup',signup,name='signup'),
]
| [
"django.conf.urls.url"
] | [((296, 354), 'django.conf.urls.url', 'url', (['"""user-api-root/"""', 'user_api_root'], {'name': '"""user_api_root"""'}), "('user-api-root/', user_api_root, name='user_api_root')\n", (299, 354), False, 'from django.conf.urls import url\n'), ((358, 437), 'django.conf.urls.url', 'url', (['"""user-api-root-detail/"""', 'user_api_root_detail'], {'name': '"""user_api_root_detail"""'}), "('user-api-root-detail/', user_api_root_detail, name='user_api_root_detail')\n", (361, 437), False, 'from django.conf.urls import url\n'), ((441, 484), 'django.conf.urls.url', 'url', (['"""getUsers/"""', 'getUsers'], {'name': '"""getUsers"""'}), "('getUsers/', getUsers, name='getUsers')\n", (444, 484), False, 'from django.conf.urls import url\n'), ((488, 527), 'django.conf.urls.url', 'url', (['"""getUser"""', 'getUser'], {'name': '"""getUser"""'}), "('getUser', getUser, name='getUser')\n", (491, 527), False, 'from django.conf.urls import url\n'), ((531, 567), 'django.conf.urls.url', 'url', (['"""signin"""', 'signin'], {'name': '"""signin"""'}), "('signin', signin, name='signin')\n", (534, 567), False, 'from django.conf.urls import url\n'), ((571, 625), 'django.conf.urls.url', 'url', (['"""email-signup"""', 'email_signup'], {'name': '"""email_signup"""'}), "('email-signup', email_signup, name='email_signup')\n", (574, 625), False, 'from django.conf.urls import url\n'), ((629, 665), 'django.conf.urls.url', 'url', (['"""signup"""', 'signup'], {'name': '"""signup"""'}), "('signup', signup, name='signup')\n", (632, 665), False, 'from django.conf.urls import url\n')] |
# -*- coding: utf-8 -*-
#
# Copyright 2018 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import pytest
from gordon_introspection import __version__
from gordon_introspection import app
@pytest.fixture
def server_app(config):
return app.ServerApp(config)
# loop fixture is from aiohttp, not to be confused with pytest-asyncio's
# event_loop fixture
async def test_ping(aiohttp_client, loop, server_app):
"""Request /ping healthcheck endpoint."""
client = await aiohttp_client(server_app.app)
resp = await client.get('/ping')
assert 200 == resp.status
text = await resp.text()
assert 'pong' == text
async def test_version(aiohttp_client, loop, server_app):
"""Request /version metadata endpoint."""
client = await aiohttp_client(server_app.app)
resp = await client.get('/version')
assert 200 == resp.status
resp = json.loads(await resp.text())
assert __version__ == resp['gordon-introspection']
async def test_system(aiohttp_client, loop, server_app, mocker):
"""Request /system metadata endpoint."""
client = await aiohttp_client(server_app.app)
resp = await client.get('/system')
assert 200 == resp.status
resp = json.loads(await resp.text())
assert 'uptime' in resp
assert 'system_time' in resp
@pytest.fixture
def loggers(monkeypatch):
loggers = {
'testing1': logging.getLogger('testing1'),
'testing2': logging.getLogger('testing2'),
'testing3': logging.PlaceHolder(logging.getLogger('placeholder')),
'root': logging.getLogger(''),
}
loggers['testing1'].setLevel('INFO')
loggers['testing2'].setLevel('DEBUG')
monkeypatch.setattr(
'gordon_introspection.views.logging.Logger.manager.loggerDict', loggers)
return loggers
@pytest.mark.parametrize('req,exp_status,exp_body', (
('', 200, json.dumps(
{'root': 'WARNING', 'testing1': 'INFO', 'testing2': 'DEBUG'})),
('?logger=testing1', 200, json.dumps({'testing1': 'INFO'})),
('?logger=""', 200, json.dumps({'root': 'WARNING'})),
('?logger=testing3', 400, '400: "testing3" is a PlaceHolder object.'),
('?logger=notalogger', 404, '404: Unknown logger: "notalogger".')
))
async def test_get_log_state(req, exp_status, exp_body, loggers,
aiohttp_client, loop, server_app):
"""Request /logging endpoint."""
client = await aiohttp_client(server_app.app)
resp = await client.get(f'/logging{req}')
assert exp_status == resp.status
text = await resp.text()
assert exp_body == text
@pytest.mark.parametrize('req,exp_status,exp_body', (
('', 400, '400: Query parameters "logger" and "level" are required.'),
('?logger=testing1', 400,
'400: Query parameters "logger" and "level" are required.'),
('?level=INFO', 400,
'400: Query parameters "logger" and "level" are required.'),
('?logger=notalogger&level=WARN', 404,
'404: Unknown logger: "notalogger".'),
('?logger=testing1&level=FUU', 404, '404: Unknown log level: "FUU".'),
('?logger=testing1&level=DEBUG', 204, ''),
('?logger=""&level=DEBUG', 204, ''),
('?logger=root&level=DEBUG', 204, ''),
))
async def test_set_log_state(req, exp_status, exp_body, loggers,
aiohttp_client, loop, server_app):
"""Update logger level via /logging?logger=NAME&level=LEVEL."""
client = await aiohttp_client(server_app.app)
resp = await client.post(f'/logging{req}')
assert exp_status == resp.status
text = await resp.text()
assert exp_body == text
async def test_set_log_state_fails(monkeypatch, loggers, aiohttp_client, loop,
server_app):
"""Level could not be updated."""
monkeypatch.setattr(
'gordon_introspection.views.logging.getLevelName', lambda x: 'ERROR')
client = await aiohttp_client(server_app.app)
resp = await client.post('/logging?logger=testing1&level=INFO')
assert 500 == resp.status
text = await resp.text()
assert f'500: Could not update log level of "testing1" to "INFO".' == text
| [
"gordon_introspection.app.ServerApp",
"pytest.mark.parametrize",
"json.dumps",
"logging.getLogger"
] | [((3095, 3700), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""req,exp_status,exp_body"""', '((\'\', 400, \'400: Query parameters "logger" and "level" are required.\'), (\n \'?logger=testing1\', 400,\n \'400: Query parameters "logger" and "level" are required.\'), (\n \'?level=INFO\', 400,\n \'400: Query parameters "logger" and "level" are required.\'), (\n \'?logger=notalogger&level=WARN\', 404,\n \'404: Unknown logger: "notalogger".\'), (\'?logger=testing1&level=FUU\', \n 404, \'404: Unknown log level: "FUU".\'), (\'?logger=testing1&level=DEBUG\',\n 204, \'\'), (\'?logger=""&level=DEBUG\', 204, \'\'), (\n \'?logger=root&level=DEBUG\', 204, \'\'))'], {}), '(\'req,exp_status,exp_body\', ((\'\', 400,\n \'400: Query parameters "logger" and "level" are required.\'), (\n \'?logger=testing1\', 400,\n \'400: Query parameters "logger" and "level" are required.\'), (\n \'?level=INFO\', 400,\n \'400: Query parameters "logger" and "level" are required.\'), (\n \'?logger=notalogger&level=WARN\', 404,\n \'404: Unknown logger: "notalogger".\'), (\'?logger=testing1&level=FUU\', \n 404, \'404: Unknown log level: "FUU".\'), (\'?logger=testing1&level=DEBUG\',\n 204, \'\'), (\'?logger=""&level=DEBUG\', 204, \'\'), (\n \'?logger=root&level=DEBUG\', 204, \'\')))\n', (3118, 3700), False, 'import pytest\n'), ((775, 796), 'gordon_introspection.app.ServerApp', 'app.ServerApp', (['config'], {}), '(config)\n', (788, 796), False, 'from gordon_introspection import app\n'), ((1901, 1930), 'logging.getLogger', 'logging.getLogger', (['"""testing1"""'], {}), "('testing1')\n", (1918, 1930), False, 'import logging\n'), ((1952, 1981), 'logging.getLogger', 'logging.getLogger', (['"""testing2"""'], {}), "('testing2')\n", (1969, 1981), False, 'import logging\n'), ((2074, 2095), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (2091, 2095), False, 'import logging\n'), ((2023, 2055), 'logging.getLogger', 'logging.getLogger', (['"""placeholder"""'], {}), "('placeholder')\n", (2040, 2055), False, 'import logging\n'), ((2381, 2453), 'json.dumps', 'json.dumps', (["{'root': 'WARNING', 'testing1': 'INFO', 'testing2': 'DEBUG'}"], {}), "({'root': 'WARNING', 'testing1': 'INFO', 'testing2': 'DEBUG'})\n", (2391, 2453), False, 'import json\n'), ((2495, 2527), 'json.dumps', 'json.dumps', (["{'testing1': 'INFO'}"], {}), "({'testing1': 'INFO'})\n", (2505, 2527), False, 'import json\n'), ((2554, 2585), 'json.dumps', 'json.dumps', (["{'root': 'WARNING'}"], {}), "({'root': 'WARNING'})\n", (2564, 2585), False, 'import json\n')] |
from dawg_fsm_dictionary.dawg_fsm import DawgFsm
import unittest
class TestDawgFsm(unittest.TestCase):
def test_dawg_fsm(self):
"""Simple node verification no minimization has taking place."""
dawg_test = DawgFsm()
dawg_test.create_words("kat")
dawg_test.create_words("klap")
dawg_test.create_words("knaap")
dawg_test.clear_redundant()
self.assertEqual(dawg_test.is_word("klap"), True)
self.assertEqual(dawg_test.is_word("katten"), False)
self.assertEqual(dawg_test.is_word("klap"), True)
self.assertEqual(dawg_test.is_word("katt"), False)
self.assertEqual(dawg_test.is_word("ka"), False)
def test_dawg_fsm_two(self):
"""Multiple end node point are merged"""
dawg_test = DawgFsm()
dawg_test.create_words("blaf")
dawg_test.create_words("blafen")
dawg_test.create_words("kalf")
dawg_test.create_words("kat")
dawg_test.create_words("katten")
dawg_test.clear_redundant()
self.assertEqual(dawg_test.is_word("blaf"), True)
self.assertEqual(dawg_test.is_word("blafen"), True)
self.assertEqual(dawg_test.is_word("kat"), True)
self.assertEqual(dawg_test.is_word("katten"), True)
self.assertEqual(dawg_test.is_word("katt"), False)
self.assertEqual(dawg_test.is_word("blafe"), False)
self.assertEqual(dawg_test.is_word("kalf"), True)
| [
"dawg_fsm_dictionary.dawg_fsm.DawgFsm"
] | [((228, 237), 'dawg_fsm_dictionary.dawg_fsm.DawgFsm', 'DawgFsm', ([], {}), '()\n', (235, 237), False, 'from dawg_fsm_dictionary.dawg_fsm import DawgFsm\n'), ((788, 797), 'dawg_fsm_dictionary.dawg_fsm.DawgFsm', 'DawgFsm', ([], {}), '()\n', (795, 797), False, 'from dawg_fsm_dictionary.dawg_fsm import DawgFsm\n')] |
import json
oFile = open("datab.json", "w")
output = json.dumps(
{
"074410490577":"seaweed, roasted",
"070303022160":"sardines in olive oil"
}, sort_keys = True, indent = 4)
print(output)
oFile.write(output)
oFile.close()
| [
"json.dumps"
] | [((55, 174), 'json.dumps', 'json.dumps', (["{'074410490577': 'seaweed, roasted', '070303022160': 'sardines in olive oil'}"], {'sort_keys': '(True)', 'indent': '(4)'}), "({'074410490577': 'seaweed, roasted', '070303022160':\n 'sardines in olive oil'}, sort_keys=True, indent=4)\n", (65, 174), False, 'import json\n')] |
import os
import pickle
import copy
import cv2
import numpy as np
import streamlit as st
import tensorflow as tf
import grpc
from tensorflow_serving.apis import (
prediction_service_pb2_grpc,
predict_pb2
)
from consts import (
TRAIN_FD,
TRAIN_PKL_FP,
TRAIN_LABEL_FP,
CLASSIFIER_MODEL
)
@st.cache
def load_prec_embs():
with open(TRAIN_PKL_FP, "rb") as f:
train_embs = pickle.load(f)
with open(TRAIN_LABEL_FP, "rb") as f:
train_labels = pickle.load(f)
train_img_fps = wfile(TRAIN_FD)
assert len(train_img_fps) == train_embs.shape[0]
return train_img_fps, train_embs, train_labels
def wfile(root):
img_fps = []
for path, subdirs, files in os.walk(root):
for name in files:
img_fps.append(os.path.join(path, name))
return sorted(img_fps)
class FlowerArc:
def __init__(self,
host="192.168.19.37",
port=8700,
model_name="flower",
model_signature="flower_signature",
input_name="input_image",
output_name="emb_pred"):
self.host = host
self.port = port
self.channel = grpc.insecure_channel("{}:{}".format(
self.host, self.port
))
self.stub = prediction_service_pb2_grpc.PredictionServiceStub(
self.channel
)
self.input_name = input_name
self.output_name = output_name
self.request = predict_pb2.PredictRequest()
self.request.model_spec.name = model_name
self.request.model_spec.signature_name = model_signature
def norm_mean_std(self,
img):
img = img / 255
img = img.astype('float32')
mean = np.mean(img, axis=(0, 1, 2))
std = np.std(img, axis=(0, 1, 2))
img = (img - mean) / std
return img
def test_preprocess(self,
img,
img_size=(384, 384),
expand=True):
img = cv2.resize(img, img_size)
# normalize image
img = self.norm_mean_std(img)
if expand:
img = np.expand_dims(img, axis=0)
return img
def predict(self, img):
assert img.ndim == 3
img = self.test_preprocess(img)
self.request.inputs[self.input_name].CopyFrom(
tf.contrib.util.make_tensor_proto(
img,
dtype=tf.float32,
shape=img.shape
)
)
result = self.stub.Predict(self.request, 10.0)
emb_pred = tf.contrib.util.make_ndarray(
result.outputs[self.output_name]
)
return emb_pred
class SaliencyDetection:
"""docstring for SaliencyDetection"""
def __init__(self,
host="192.168.19.37",
port=8700,
model_name="saliency",
model_signature="serving_default",
input_image="input_image",
pred_mask="pred_mask"):
self.host = host
self.port = port
self.channel = grpc.insecure_channel("{}:{}".format(
self.host, self.port
))
self.stub = prediction_service_pb2_grpc.PredictionServiceStub(
self.channel
)
self.input_image = input_image
self.pred_mask = pred_mask
self.request = predict_pb2.PredictRequest()
self.request.model_spec.name = model_name
self.request.model_spec.signature_name = model_signature
def saliency_predict(self, img):
img = cv2.resize(img, (320, 240))
img = np.expand_dims(img, axis = 0)
self.request.inputs[self.input_image].CopyFrom(
tf.contrib.util.make_tensor_proto(
img,
dtype=np.float32,
shape=img.shape
)
)
result = self.stub.Predict(self.request, 10.0)
pred_mask = tf.contrib.util.make_ndarray(result.outputs[self.pred_mask])[0]
return pred_mask
def bounding_box(self, img, map_img_source):
map_img = copy.deepcopy(map_img_source)
map_img = map_img.astype(np.float32)
thres = 0.02
map_img[map_img >= thres] = 1
map_img[map_img < thres] = 0
# crop bbox
horizontal_indicies = np.where(np.any(map_img, axis=0))[0]
vertical_indicies = np.where(np.any(map_img, axis=1))[0]
if horizontal_indicies.shape[0]:
x1, x2 = horizontal_indicies[[0, -1]]
y1, y2 = vertical_indicies[[0, -1]]
# x2 and y2 should not be part of the box. Increment by 1.
x2 += 1
y2 += 1
else:
# No mask for this instance. Might happen due to
# resizing or cropping. Set bbox to zeros
x1, x2, y1, y2 = 0, 0, 0, 0
img_arr_2 = copy.deepcopy(img)
height, width, channels = img.shape
h_ratio = height / 240
w_ratio = width / 320
cv2.rectangle(img_arr_2, (int(x1 * w_ratio), int(y1 * h_ratio)), (int(x2 * w_ratio), int(y2 * h_ratio)), (255,225,0), 4)
return img_arr_2
class Classifier(object):
"""docstring for ClassName"""
def __init__(self,
host="192.168.19.37",
port=8700,
model_name="classifier",
model_signature="classifier",
input_image="input_image",
y_pred="y_pred"):
self.host = host
self.port = port
self.channel = grpc.insecure_channel("{}:{}".format(
self.host, self.port
))
self.stub = prediction_service_pb2_grpc.PredictionServiceStub(
self.channel
)
self.input_image = input_image
self.y_pred = y_pred
self.request = predict_pb2.PredictRequest()
self.request.model_spec.name = model_name
self.request.model_spec.signature_name = model_signature
def norm_mean_std(self,
img):
img = img / 255
img = img.astype('float32')
mean = np.mean(img, axis=(0, 1, 2))
std = np.std(img, axis=(0, 1, 2))
img = (img - mean) / std
return img
def test_preprocess(self,
img,
img_size=(384, 384),
expand=True):
img = cv2.resize(img, img_size)
# normalize image
img = self.norm_mean_std(img)
if expand:
img = np.expand_dims(img, axis=0)
return img
def classification_predict(self, img, threshold):
img = self.test_preprocess(img, img_size=(224, 224),expand=True)
self.request.inputs[self.input_image].CopyFrom(
tf.contrib.util.make_tensor_proto(
img,
dtype=np.float32,
shape=img.shape
)
)
result = self.stub.Predict(self.request, 10.0)
y_pred = tf.contrib.util.make_ndarray(result.outputs[self.y_pred])[0]
if y_pred[0] < threshold: result = 0
else: result = 1
return result
| [
"numpy.mean",
"copy.deepcopy",
"tensorflow_serving.apis.predict_pb2.PredictRequest",
"tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub",
"pickle.load",
"os.path.join",
"numpy.any",
"numpy.expand_dims",
"numpy.std",
"tensorflow.contrib.util.make_tensor_proto",
"cv2.resize",
"tensorflow.contrib.util.make_ndarray",
"os.walk"
] | [((710, 723), 'os.walk', 'os.walk', (['root'], {}), '(root)\n', (717, 723), False, 'import os\n'), ((404, 418), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (415, 418), False, 'import pickle\n'), ((485, 499), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (496, 499), False, 'import pickle\n'), ((1296, 1359), 'tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub', 'prediction_service_pb2_grpc.PredictionServiceStub', (['self.channel'], {}), '(self.channel)\n', (1345, 1359), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((1482, 1510), 'tensorflow_serving.apis.predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (1508, 1510), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((1760, 1788), 'numpy.mean', 'np.mean', (['img'], {'axis': '(0, 1, 2)'}), '(img, axis=(0, 1, 2))\n', (1767, 1788), True, 'import numpy as np\n'), ((1803, 1830), 'numpy.std', 'np.std', (['img'], {'axis': '(0, 1, 2)'}), '(img, axis=(0, 1, 2))\n', (1809, 1830), True, 'import numpy as np\n'), ((2042, 2067), 'cv2.resize', 'cv2.resize', (['img', 'img_size'], {}), '(img, img_size)\n', (2052, 2067), False, 'import cv2\n'), ((2610, 2672), 'tensorflow.contrib.util.make_ndarray', 'tf.contrib.util.make_ndarray', (['result.outputs[self.output_name]'], {}), '(result.outputs[self.output_name])\n', (2638, 2672), True, 'import tensorflow as tf\n'), ((3236, 3299), 'tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub', 'prediction_service_pb2_grpc.PredictionServiceStub', (['self.channel'], {}), '(self.channel)\n', (3285, 3299), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((3420, 3448), 'tensorflow_serving.apis.predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (3446, 3448), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((3624, 3651), 'cv2.resize', 'cv2.resize', (['img', '(320, 240)'], {}), '(img, (320, 240))\n', (3634, 3651), False, 'import cv2\n'), ((3666, 3693), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (3680, 3693), True, 'import numpy as np\n'), ((4145, 4174), 'copy.deepcopy', 'copy.deepcopy', (['map_img_source'], {}), '(map_img_source)\n', (4158, 4174), False, 'import copy\n'), ((4909, 4927), 'copy.deepcopy', 'copy.deepcopy', (['img'], {}), '(img)\n', (4922, 4927), False, 'import copy\n'), ((5683, 5746), 'tensorflow_serving.apis.prediction_service_pb2_grpc.PredictionServiceStub', 'prediction_service_pb2_grpc.PredictionServiceStub', (['self.channel'], {}), '(self.channel)\n', (5732, 5746), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((5861, 5889), 'tensorflow_serving.apis.predict_pb2.PredictRequest', 'predict_pb2.PredictRequest', ([], {}), '()\n', (5887, 5889), False, 'from tensorflow_serving.apis import prediction_service_pb2_grpc, predict_pb2\n'), ((6139, 6167), 'numpy.mean', 'np.mean', (['img'], {'axis': '(0, 1, 2)'}), '(img, axis=(0, 1, 2))\n', (6146, 6167), True, 'import numpy as np\n'), ((6182, 6209), 'numpy.std', 'np.std', (['img'], {'axis': '(0, 1, 2)'}), '(img, axis=(0, 1, 2))\n', (6188, 6209), True, 'import numpy as np\n'), ((6421, 6446), 'cv2.resize', 'cv2.resize', (['img', 'img_size'], {}), '(img, img_size)\n', (6431, 6446), False, 'import cv2\n'), ((2171, 2198), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (2185, 2198), True, 'import numpy as np\n'), ((2388, 2461), 'tensorflow.contrib.util.make_tensor_proto', 'tf.contrib.util.make_tensor_proto', (['img'], {'dtype': 'tf.float32', 'shape': 'img.shape'}), '(img, dtype=tf.float32, shape=img.shape)\n', (2421, 2461), True, 'import tensorflow as tf\n'), ((3765, 3838), 'tensorflow.contrib.util.make_tensor_proto', 'tf.contrib.util.make_tensor_proto', (['img'], {'dtype': 'np.float32', 'shape': 'img.shape'}), '(img, dtype=np.float32, shape=img.shape)\n', (3798, 3838), True, 'import tensorflow as tf\n'), ((3987, 4047), 'tensorflow.contrib.util.make_ndarray', 'tf.contrib.util.make_ndarray', (['result.outputs[self.pred_mask]'], {}), '(result.outputs[self.pred_mask])\n', (4015, 4047), True, 'import tensorflow as tf\n'), ((6550, 6577), 'numpy.expand_dims', 'np.expand_dims', (['img'], {'axis': '(0)'}), '(img, axis=0)\n', (6564, 6577), True, 'import numpy as np\n'), ((6794, 6867), 'tensorflow.contrib.util.make_tensor_proto', 'tf.contrib.util.make_tensor_proto', (['img'], {'dtype': 'np.float32', 'shape': 'img.shape'}), '(img, dtype=np.float32, shape=img.shape)\n', (6827, 6867), True, 'import tensorflow as tf\n'), ((7013, 7070), 'tensorflow.contrib.util.make_ndarray', 'tf.contrib.util.make_ndarray', (['result.outputs[self.y_pred]'], {}), '(result.outputs[self.y_pred])\n', (7041, 7070), True, 'import tensorflow as tf\n'), ((779, 803), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (791, 803), False, 'import os\n'), ((4376, 4399), 'numpy.any', 'np.any', (['map_img'], {'axis': '(0)'}), '(map_img, axis=0)\n', (4382, 4399), True, 'import numpy as np\n'), ((4441, 4464), 'numpy.any', 'np.any', (['map_img'], {'axis': '(1)'}), '(map_img, axis=1)\n', (4447, 4464), True, 'import numpy as np\n')] |
def mock_api(path, file_path, query=None, data=None):
from httmock import urlmatch, response
import json
@urlmatch(scheme='https', netloc='api.chatwork.com', path='/v1' + path)
def cw_api_mock(url, request):
assert query is None or url.query == query
assert data is None or json.loads(request.body) == data
from os import path
import codecs
dump = path.dirname(path.abspath(__file__)) + '/mock/' + file_path
file = codecs.open(dump, 'r', 'utf-8')
lines = file.readlines()
file.close
status = 0
headers = {}
body = ''
body_started = False
for i in range(len(lines)):
line = lines[i]
if i == 0:
status = int(line.split(' ')[1])
elif body_started:
body += line
elif (line.strip() == ''):
body_started = True
else:
key, value = line.split(':', 1)
headers[key] = value.strip()
return response(status,
content=body.encode('utf-8'),
headers=headers,
request=request)
return cw_api_mock
| [
"os.path.abspath",
"codecs.open",
"json.loads",
"httmock.urlmatch"
] | [((119, 189), 'httmock.urlmatch', 'urlmatch', ([], {'scheme': '"""https"""', 'netloc': '"""api.chatwork.com"""', 'path': "('/v1' + path)"}), "(scheme='https', netloc='api.chatwork.com', path='/v1' + path)\n", (127, 189), False, 'from httmock import urlmatch, response\n'), ((481, 512), 'codecs.open', 'codecs.open', (['dump', '"""r"""', '"""utf-8"""'], {}), "(dump, 'r', 'utf-8')\n", (492, 512), False, 'import codecs\n'), ((307, 331), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (317, 331), False, 'import json\n'), ((419, 441), 'os.path.abspath', 'path.abspath', (['__file__'], {}), '(__file__)\n', (431, 441), False, 'from os import path\n')] |
from django.contrib.auth import login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMessage
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from .tokens import account_activation_token
from . import utils
from .forms import *
from .models import *
from pprint import pprint
import requests
import logging
# TODO: View stubs
def add_new_property(request):
return render(request, 'demo/add_new_property.html')
def description(request):
return render(request, 'demo/description.html')
def manager_profile(request):
return render(request, 'demo/manager_profile.html')
def survey(request):
return render(request, 'demo/survey.html')
def index(request):
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
# Filter all null values from filter set
filters = {
key: value for key, value in form.cleaned_data.items()
if value is not '' and value is not False and value is not None and '%s'.lower() % value != 'all'
}
# Separate domicile filters
domicile_filters = {
key: value for key, value in filters.items() if key in Domicile.__dict__
}
all(map(filters.pop, domicile_filters))
logging.debug("Listing filters: %s" % filters)
for key, value in domicile_filters.items():
logging.debug("Domicile filter: (%s, %s)" % (key, value))
results = Domicile.objects.all()
# Filter domiciles
if domicile_filters:
# Case-insensitive city search
if 'city' in domicile_filters:
city_value = domicile_filters.pop('city')
results = results.filter(city__iexact=city_value).filter(**domicile_filters)
else:
results = results.filter(**domicile_filters)
listings = ValidListing.objects.all().filter(pk__in=results).filter(**filters)
# Filter domiciles from filtered listings
if filters:
results = results.filter(pk__in=listings)
searched_lat_lng = get_lat_long(results)
for key, value in filters.items():
logging.debug("Search filters: (%s , %s)" % (key, value))
for entry in listings:
try:
logging.debug(entry.photo.url)
except ValueError as exception:
logging.warning(exception)
context = {
'form': form,
'search_results': results,
'listing_results': listings,
'search_count': len(results),
'lat_lng': searched_lat_lng
}
return render(request, 'demo/listing.html', {'context': context})
else:
form = SearchForm()
context = {
'form': form,
'search_results': []
}
return render(request, 'demo/index.html', {'context': context})
# LISTING PAGES #
def listing(request):
if request.method == 'POST':
form = SearchForm(request.POST)
if form.is_valid():
# Filter all null values from filter set
filters = {
key: value for key, value in form.cleaned_data.items()
if value is not '' and value is not False and value is not None and '%s'.lower() % value != 'all'
}
# Separate domicile filters
domicile_filters = {
key: value for key, value in filters.items() if key in Domicile.__dict__
}
all(map(filters.pop, domicile_filters))
logging.debug("Listing filters: %s" % filters)
for key, value in domicile_filters.items():
logging.debug("Domicile filter: (%s, %s)" % (key, value))
results = Domicile.objects.all()
# Filter domiciles
if domicile_filters:
# Case-insensitive city search
if 'city' in domicile_filters:
city_value = domicile_filters.pop('city')
results = results.filter(city__iexact=city_value)
# Filter for square footage
if 'size' in domicile_filters:
size = domicile_filters.pop('size')
results = results.filter(size__gte=size)
results = results.filter(**domicile_filters)
listings = ValidListing.objects.all().filter(pk__in=results).filter(**filters)
# Filter domiciles from filtered listings
if filters:
results = results.filter(pk__in=listings)
searched_lat_lng = get_lat_long(results)
for key, value in filters.items():
logging.debug("Search filters: (%s , %s)" % (key, value))
for entry in listings:
try:
logging.debug(entry.photo.url)
except ValueError as exception:
logging.warning(exception)
context = {
'form': form,
'search_results': results,
'listing_results': listings,
'search_count': len(results),
'lat_lng': searched_lat_lng
}
return render(request, 'demo/listing.html', {'context': context})
else:
form = SearchForm()
# Should have some default listings displayed (maybe most recent?)
context = {
'form': form,
'search_results': []
}
return render(request, 'demo/listing.html', {'context': context})
@login_required
def create_listing(request):
if request.method == 'POST':
domicile_form = CreateDomicileForm(request.POST)
listing_form = CreateListingForm(request.POST)
if domicile_form.is_valid() and listing_form.is_valid():
for key, value in domicile_form.cleaned_data.items():
logging.debug("(%s, %s)" % (key, value))
for key, value in listing_form.cleaned_data.items():
logging.debug("(%s, %s)" % (key, value))
try:
logging.info("Creating new residence...")
owner = listing_form.cleaned_data.pop('owner')
owner_exists = len(VerifiedUser.objects.all().filter(username=owner)) >= 1
if not owner_exists:
raise KeyError("Owner '%s' not found." % owner)
domicile = Domicile()
domicile.update(**domicile_form.cleaned_data)
domicile.save()
logging.info("Creating new listing...")
listing = ValidListing()
listing.update(**listing_form.cleaned_data)
listing.residence = domicile
listing.save()
except Exception as error_message:
logging.error("Operation failed: %s" % error_message)
else:
domicile_form = CreateDomicileForm()
listing_form = CreateListingForm()
context = {
'domicile_form': domicile_form,
'listing_form': listing_form
}
return render(request, 'demo/add_new_property.html', {'context': context})
@login_required
def edit_listing(request, listing_id):
listing = get_object_or_404(ValidListing, pk=listing_id)
if request.method == 'POST':
form = EditListingForm(request.POST)
if form.is_valid():
try:
logging.info("Editing listing '%s'..." % listing_id)
listing.update(**form.cleaned_data)
listing.save()
context = {
'form': form,
'update_success': True,
'error_message': ''
}
except Exception as error_message:
context = {
'form': form,
'update_success': False,
'error_message': '%s' % error_message
}
else:
context = {
'form': form,
'update_success': False,
'error_message': '%s' % form.errors
}
else:
form = EditListingForm(instance=listing)
context = {
'form': form,
'update_success': False,
'error_message': ''
}
if context['error_message']:
logging.error("Edit operation failed: %s" % context['error_message'])
return render(request, "demo/modify_listing.html", {'context': context})
@login_required
def view_listing(request, listing_id):
listing = get_object_or_404(ValidListing, pk=listing_id)
domicile = listing.residence
full_address = domicile.address + " " + domicile.city + " " + domicile.state + " " + str(domicile.zip_code)
# get_lat_long takes a list of listings as argument and returns a list of dicts
lat_long = get_lat_long([domicile])
# Get the single dictionary
single_lat_long = lat_long[0]
context = {
'listing': listing,
'domicile': domicile,
'address': full_address,
'lat_long': single_lat_long,
}
return render(request, 'demo/description.html', {'context': context})
# USER PAGES #
def create_account(request):
if request.method == 'POST':
form = CreateUserForm(request.POST)
if form.is_valid():
user_attributes = {
key: value for key, value in form.cleaned_data.items()
}
# Encrypt password before creating account
user_attributes.pop('confirm_password')
secret = '%s' % user_attributes.get('password', '')
secret = utils.encrypt_password(secret)
user_attributes['password'] = secret
try:
logging.info("Creating new user...")
user = RegisteredUser()
user.update(**user_attributes)
user.save()
logging.info("Created user '%s'." % user.email)
# User creation success, now send email to activate full account
current_site = get_current_site(request)
mail_subject = 'Pegasus account registration'
message = render_to_string('registration/signup_email.html', {
'user': user,
'domain': current_site.domain,
'uid': urlsafe_base64_encode(force_bytes(user.username)).decode(),
'token': account_activation_token.make_token(user),
})
email = EmailMessage(mail_subject, message, to=[user.email])
email.send()
logging.info("Sent confirmation email to user '%s' for activation." % email)
return render(request, 'demo/login_confirmation.html')
except Exception as error_message:
context = {
'form': form,
'creation_success': False,
'form_submitted': True,
'error_message': '%s' % error_message
}
else:
context = {
'form': form,
'creation_success': False,
'form_submitted': False,
'error_message': '%s' % form.errors
}
else:
form = CreateUserForm()
context = {
'form': form,
'creation_success': False,
'form_submitted': False,
'error_message': ''
}
return render(request, 'demo/create_account.html', {'context': context})
def activate(request, uidb64, token):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = RegisteredUser.objects.get(username=uid)
except (TypeError, ValueError, RegisteredUser.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
# Coerce registered user to verified user first. Then promote to either Student or Landlord
user.is_active = True
user.__class__ = VerifiedUser
user.save(force_insert=True)
if user.is_student:
user.__class__ = Student
else:
user.__class__ = Landlord
user.save(force_insert=True)
login(request, user, backend='demo.utils.AuthBackend')
return HttpResponse('Thank you for your email confirmation. You may now login with your account.')
else:
if user is not None:
logging.warning("Got invalid token activation from user '%s'." % user.username)
return HttpResponse('Activation link is invalid!')
def user_login(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
form_data = {
key: value for key, value in form.cleaned_data.items()
}
username = form_data['username']
password = form_data['password']
auth_backend = utils.AuthBackend()
user = auth_backend.authenticate(
username=username, password=password)
logging.info("Got login request from '%s'." % username)
# Login success
if user is not None:
logging.info("Login success.")
login(request, user, backend='demo.utils.AuthBackend')
# Check if user needs to redirect to another page other than index
next_url = request.POST.get('next', '')
if next_url:
return redirect(next_url)
else:
return HttpResponseRedirect(reverse('index'))
# Login failure
else:
logging.info("Login failure. Check username or password.")
context = {
'login_form': form,
'error_message': 'Username or password is incorrect.'
}
else:
context = {
'login_form': form,
'error_message': '%s' % form.errors
}
else:
form = LoginForm()
context = {
'login_form': form,
'error_message': ''
}
return render(request, 'demo/login.html', {'context': context})
@login_required
def compatibility_score(request):
if request.method == 'POST':
compatibility_form = CompatibilityScoreForm(request.POST)
if compatibility_form.is_valid():
pass
else:
compatibility_form = CompatibilityScoreForm()
context = {
'form': compatibility_form
}
return render(request, 'demo/compatibility_score.html', {'context': context})
@login_required
def user_logout(request):
logout(request)
return HttpResponseRedirect(reverse('index'))
@login_required
def view_profile(request, username=None):
try:
# If no username is provided, default to currently logged in account
if username is None:
username = request.user.username
user_instance = RegisteredUser.objects.get(username=username)
if user_instance:
context = {
'user_found': True,
'user': user_instance,
'error_message': ''
}
else:
context = {
'user_found': False,
'user': None,
'error_message': "User '%s' not found." % username
}
except Exception as error_message:
context = {
'user_found': False,
'user': None,
'error_message': "User '%s' not found." % username
}
return render(request, 'demo/view_profile.html', {'context': context})
@login_required
def modify_profile(request):
current_user = request.user.username
user_instance = RegisteredUser.objects.get(username=current_user)
if request.method == 'POST':
form = EditUserForm(request.POST)
if form.is_valid():
user_attributes = {
key: value for key, value in form.cleaned_data.items()
}
try:
user_instance.update(**user_attributes)
user_instance.save()
context = {
'form': form,
'update_success': True,
'error_message': ''
}
except Exception as error_message:
context = {
'form': form,
'update_success': False,
'error_message': '%s' % error_message
}
else:
context = {
'form': form,
'update_success': False,
'error_message': '%s.' % form.errors
}
else:
form = EditUserForm(instance=user_instance)
context = {
'form': form,
'update_success': False,
'error_message': ''
}
return render(request, 'demo/modify_profile.html', {'context': context})
@login_required
def delete_user(request):
if request.method == 'POST':
form = DeleteUserForm(request.POST)
if form.is_valid():
pass
else:
form = DeleteUserForm()
context = {
'form': form
}
return render(request, 'demo/delete_account.html', {'context': context})
def forgot_password(request):
if request.method == 'POST':
form = ForgotPasswordForm(request.POST)
if form.is_valid():
pass
else:
form = ForgotPasswordForm()
context = {
'form': form
}
return render(request, 'demo/forgot_password.html', {'context': context})
# Get geocoding data (lat / long) for searched listings
def get_lat_long(residences):
# List of dictionaries {'lat': xxx, 'lng':xxx}
all_lat_lng = []
for residence in residences:
geodata = {
'lat': 0,
'lng': 0
}
addr = residence.address
if addr:
GOOGLE_MAPS_API_URL = 'https://maps.googleapis.com/maps/api/geocode/json?address=' \
+ addr \
+ '&key=<KEY>'
params = {'address': addr}
map_request = requests.get(GOOGLE_MAPS_API_URL, params=params)
response = map_request.json()
if len(response['results']) > 0:
result = response['results'][0]
geodata['lat'] = result['geometry']['location']['lat']
geodata['lng'] = result['geometry']['location']['lng']
all_lat_lng.append(geodata)
return all_lat_lng
#TODO: remove once functional
def maps(request):
listings = ValidListing.objects.all()
for listing in listings:
print(listing.residence.address)
# Holds geocoding data for all addresses (each of which is a dictionary)
all_lat_lng = []
for listing in listings:
geodata = dict()
geodata['lat'] = 0
geodata['lng'] = 0
addr = listing.residence.address
if addr:
GOOGLE_MAPS_API_URL = 'https://maps.googleapis.com/maps/api/geocode/json?address=' + addr + '&key=<KEY>'
params = {'address': addr}
map_request = requests.get(GOOGLE_MAPS_API_URL, params=params)
response = map_request.json()
# print('response: ', response)
if len(response['results']) > 0:
result = response['results'][0]
geodata['lat'] = result['geometry']['location']['lat']
geodata['lng'] = result['geometry']['location']['lng']
all_lat_lng.append(geodata)
context = {
'addresses': listings,
'latitude': geodata['lat'],
'longitude': geodata['lng'],
'all_lat_lng': all_lat_lng,
}
pprint(context)
return render(request, 'demo/maps.html', context)
| [
"django.shortcuts.render",
"logging.debug",
"django.http.HttpResponse",
"django.shortcuts.get_object_or_404",
"django.contrib.auth.login",
"logging.warning",
"logging.info",
"requests.get",
"django.utils.encoding.force_bytes",
"django.shortcuts.redirect",
"django.urls.reverse",
"django.core.mail.EmailMessage",
"django.utils.http.urlsafe_base64_decode",
"logging.error",
"pprint.pprint",
"django.contrib.auth.logout",
"django.contrib.sites.shortcuts.get_current_site"
] | [((779, 824), 'django.shortcuts.render', 'render', (['request', '"""demo/add_new_property.html"""'], {}), "(request, 'demo/add_new_property.html')\n", (785, 824), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((864, 904), 'django.shortcuts.render', 'render', (['request', '"""demo/description.html"""'], {}), "(request, 'demo/description.html')\n", (870, 904), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((948, 992), 'django.shortcuts.render', 'render', (['request', '"""demo/manager_profile.html"""'], {}), "(request, 'demo/manager_profile.html')\n", (954, 992), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1027, 1062), 'django.shortcuts.render', 'render', (['request', '"""demo/survey.html"""'], {}), "(request, 'demo/survey.html')\n", (1033, 1062), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3388, 3444), 'django.shortcuts.render', 'render', (['request', '"""demo/index.html"""', "{'context': context}"], {}), "(request, 'demo/index.html', {'context': context})\n", (3394, 3444), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6019, 6077), 'django.shortcuts.render', 'render', (['request', '"""demo/listing.html"""', "{'context': context}"], {}), "(request, 'demo/listing.html', {'context': context})\n", (6025, 6077), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((7612, 7679), 'django.shortcuts.render', 'render', (['request', '"""demo/add_new_property.html"""', "{'context': context}"], {}), "(request, 'demo/add_new_property.html', {'context': context})\n", (7618, 7679), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((7751, 7797), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['ValidListing'], {'pk': 'listing_id'}), '(ValidListing, pk=listing_id)\n', (7768, 7797), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((8952, 9017), 'django.shortcuts.render', 'render', (['request', '"""demo/modify_listing.html"""', "{'context': context}"], {}), "(request, 'demo/modify_listing.html', {'context': context})\n", (8958, 9017), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((9089, 9135), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['ValidListing'], {'pk': 'listing_id'}), '(ValidListing, pk=listing_id)\n', (9106, 9135), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((9635, 9697), 'django.shortcuts.render', 'render', (['request', '"""demo/description.html"""', "{'context': context}"], {}), "(request, 'demo/description.html', {'context': context})\n", (9641, 9697), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12017, 12082), 'django.shortcuts.render', 'render', (['request', '"""demo/create_account.html"""', "{'context': context}"], {}), "(request, 'demo/create_account.html', {'context': context})\n", (12023, 12082), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((14725, 14781), 'django.shortcuts.render', 'render', (['request', '"""demo/login.html"""', "{'context': context}"], {}), "(request, 'demo/login.html', {'context': context})\n", (14731, 14781), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15127, 15197), 'django.shortcuts.render', 'render', (['request', '"""demo/compatibility_score.html"""', "{'context': context}"], {}), "(request, 'demo/compatibility_score.html', {'context': context})\n", (15133, 15197), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15246, 15261), 'django.contrib.auth.logout', 'logout', (['request'], {}), '(request)\n', (15252, 15261), False, 'from django.contrib.auth import login, logout\n'), ((16169, 16232), 'django.shortcuts.render', 'render', (['request', '"""demo/view_profile.html"""', "{'context': context}"], {}), "(request, 'demo/view_profile.html', {'context': context})\n", (16175, 16232), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((17495, 17560), 'django.shortcuts.render', 'render', (['request', '"""demo/modify_profile.html"""', "{'context': context}"], {}), "(request, 'demo/modify_profile.html', {'context': context})\n", (17501, 17560), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((17826, 17891), 'django.shortcuts.render', 'render', (['request', '"""demo/delete_account.html"""', "{'context': context}"], {}), "(request, 'demo/delete_account.html', {'context': context})\n", (17832, 17891), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((18153, 18219), 'django.shortcuts.render', 'render', (['request', '"""demo/forgot_password.html"""', "{'context': context}"], {}), "(request, 'demo/forgot_password.html', {'context': context})\n", (18159, 18219), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20381, 20396), 'pprint.pprint', 'pprint', (['context'], {}), '(context)\n', (20387, 20396), False, 'from pprint import pprint\n'), ((20408, 20450), 'django.shortcuts.render', 'render', (['request', '"""demo/maps.html"""', 'context'], {}), "(request, 'demo/maps.html', context)\n", (20414, 20450), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((8871, 8940), 'logging.error', 'logging.error', (["('Edit operation failed: %s' % context['error_message'])"], {}), "('Edit operation failed: %s' % context['error_message'])\n", (8884, 8940), False, 'import logging\n'), ((12779, 12833), 'django.contrib.auth.login', 'login', (['request', 'user'], {'backend': '"""demo.utils.AuthBackend"""'}), "(request, user, backend='demo.utils.AuthBackend')\n", (12784, 12833), False, 'from django.contrib.auth import login, logout\n'), ((12849, 12950), 'django.http.HttpResponse', 'HttpResponse', (['"""Thank you for your email confirmation. You may now login with your account."""'], {}), "(\n 'Thank you for your email confirmation. You may now login with your account.'\n )\n", (12861, 12950), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((13087, 13130), 'django.http.HttpResponse', 'HttpResponse', (['"""Activation link is invalid!"""'], {}), "('Activation link is invalid!')\n", (13099, 13130), False, 'from django.http import HttpResponseRedirect, HttpResponse\n'), ((15294, 15310), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (15301, 15310), False, 'from django.urls import reverse\n'), ((1704, 1750), 'logging.debug', 'logging.debug', (["('Listing filters: %s' % filters)"], {}), "('Listing filters: %s' % filters)\n", (1717, 1750), False, 'import logging\n'), ((3206, 3264), 'django.shortcuts.render', 'render', (['request', '"""demo/listing.html"""', "{'context': context}"], {}), "(request, 'demo/listing.html', {'context': context})\n", (3212, 3264), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4106, 4152), 'logging.debug', 'logging.debug', (["('Listing filters: %s' % filters)"], {}), "('Listing filters: %s' % filters)\n", (4119, 4152), False, 'import logging\n'), ((5765, 5823), 'django.shortcuts.render', 'render', (['request', '"""demo/listing.html"""', "{'context': context}"], {}), "(request, 'demo/listing.html', {'context': context})\n", (5771, 5823), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12157, 12186), 'django.utils.http.urlsafe_base64_decode', 'urlsafe_base64_decode', (['uidb64'], {}), '(uidb64)\n', (12178, 12186), False, 'from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode\n'), ((12992, 13071), 'logging.warning', 'logging.warning', (['("Got invalid token activation from user \'%s\'." % user.username)'], {}), '("Got invalid token activation from user \'%s\'." % user.username)\n', (13007, 13071), False, 'import logging\n'), ((13620, 13675), 'logging.info', 'logging.info', (['("Got login request from \'%s\'." % username)'], {}), '("Got login request from \'%s\'." % username)\n', (13632, 13675), False, 'import logging\n'), ((18791, 18839), 'requests.get', 'requests.get', (['GOOGLE_MAPS_API_URL'], {'params': 'params'}), '(GOOGLE_MAPS_API_URL, params=params)\n', (18803, 18839), False, 'import requests\n'), ((19799, 19847), 'requests.get', 'requests.get', (['GOOGLE_MAPS_API_URL'], {'params': 'params'}), '(GOOGLE_MAPS_API_URL, params=params)\n', (19811, 19847), False, 'import requests\n'), ((1824, 1881), 'logging.debug', 'logging.debug', (["('Domicile filter: (%s, %s)' % (key, value))"], {}), "('Domicile filter: (%s, %s)' % (key, value))\n", (1837, 1881), False, 'import logging\n'), ((2679, 2736), 'logging.debug', 'logging.debug', (["('Search filters: (%s , %s)' % (key, value))"], {}), "('Search filters: (%s , %s)' % (key, value))\n", (2692, 2736), False, 'import logging\n'), ((4226, 4283), 'logging.debug', 'logging.debug', (["('Domicile filter: (%s, %s)' % (key, value))"], {}), "('Domicile filter: (%s, %s)' % (key, value))\n", (4239, 4283), False, 'import logging\n'), ((5238, 5295), 'logging.debug', 'logging.debug', (["('Search filters: (%s , %s)' % (key, value))"], {}), "('Search filters: (%s , %s)' % (key, value))\n", (5251, 5295), False, 'import logging\n'), ((6418, 6458), 'logging.debug', 'logging.debug', (["('(%s, %s)' % (key, value))"], {}), "('(%s, %s)' % (key, value))\n", (6431, 6458), False, 'import logging\n'), ((6540, 6580), 'logging.debug', 'logging.debug', (["('(%s, %s)' % (key, value))"], {}), "('(%s, %s)' % (key, value))\n", (6553, 6580), False, 'import logging\n'), ((6615, 6656), 'logging.info', 'logging.info', (['"""Creating new residence..."""'], {}), "('Creating new residence...')\n", (6627, 6656), False, 'import logging\n'), ((7067, 7106), 'logging.info', 'logging.info', (['"""Creating new listing..."""'], {}), "('Creating new listing...')\n", (7079, 7106), False, 'import logging\n'), ((7939, 7991), 'logging.info', 'logging.info', (['("Editing listing \'%s\'..." % listing_id)'], {}), '("Editing listing \'%s\'..." % listing_id)\n', (7951, 7991), False, 'import logging\n'), ((10273, 10309), 'logging.info', 'logging.info', (['"""Creating new user..."""'], {}), "('Creating new user...')\n", (10285, 10309), False, 'import logging\n'), ((10441, 10488), 'logging.info', 'logging.info', (['("Created user \'%s\'." % user.email)'], {}), '("Created user \'%s\'." % user.email)\n', (10453, 10488), False, 'import logging\n'), ((10602, 10627), 'django.contrib.sites.shortcuts.get_current_site', 'get_current_site', (['request'], {}), '(request)\n', (10618, 10627), False, 'from django.contrib.sites.shortcuts import get_current_site\n'), ((11056, 11108), 'django.core.mail.EmailMessage', 'EmailMessage', (['mail_subject', 'message'], {'to': '[user.email]'}), '(mail_subject, message, to=[user.email])\n', (11068, 11108), False, 'from django.core.mail import EmailMessage\n'), ((11154, 11230), 'logging.info', 'logging.info', (['("Sent confirmation email to user \'%s\' for activation." % email)'], {}), '("Sent confirmation email to user \'%s\' for activation." % email)\n', (11166, 11230), False, 'import logging\n'), ((11254, 11301), 'django.shortcuts.render', 'render', (['request', '"""demo/login_confirmation.html"""'], {}), "(request, 'demo/login_confirmation.html')\n", (11260, 11301), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((13754, 13784), 'logging.info', 'logging.info', (['"""Login success."""'], {}), "('Login success.')\n", (13766, 13784), False, 'import logging\n'), ((13801, 13855), 'django.contrib.auth.login', 'login', (['request', 'user'], {'backend': '"""demo.utils.AuthBackend"""'}), "(request, user, backend='demo.utils.AuthBackend')\n", (13806, 13855), False, 'from django.contrib.auth import login, logout\n'), ((14222, 14280), 'logging.info', 'logging.info', (['"""Login failure. Check username or password."""'], {}), "('Login failure. Check username or password.')\n", (14234, 14280), False, 'import logging\n'), ((2814, 2844), 'logging.debug', 'logging.debug', (['entry.photo.url'], {}), '(entry.photo.url)\n', (2827, 2844), False, 'import logging\n'), ((5373, 5403), 'logging.debug', 'logging.debug', (['entry.photo.url'], {}), '(entry.photo.url)\n', (5386, 5403), False, 'import logging\n'), ((7348, 7401), 'logging.error', 'logging.error', (["('Operation failed: %s' % error_message)"], {}), "('Operation failed: %s' % error_message)\n", (7361, 7401), False, 'import logging\n'), ((14052, 14070), 'django.shortcuts.redirect', 'redirect', (['next_url'], {}), '(next_url)\n', (14060, 14070), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2913, 2939), 'logging.warning', 'logging.warning', (['exception'], {}), '(exception)\n', (2928, 2939), False, 'import logging\n'), ((5472, 5498), 'logging.warning', 'logging.warning', (['exception'], {}), '(exception)\n', (5487, 5498), False, 'import logging\n'), ((14141, 14157), 'django.urls.reverse', 'reverse', (['"""index"""'], {}), "('index')\n", (14148, 14157), False, 'from django.urls import reverse\n'), ((10903, 10929), 'django.utils.encoding.force_bytes', 'force_bytes', (['user.username'], {}), '(user.username)\n', (10914, 10929), False, 'from django.utils.encoding import force_bytes, force_text\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
('widget_def', '0025_geodataset_geom_type'),
('widget_data', '0009_rawdatarecord_csv'),
]
operations = [
migrations.CreateModel(
name='GeoFeature',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('geometry', django.contrib.gis.db.models.fields.GeometryField(srid=4326)),
('dataset', models.ForeignKey(to='widget_def.GeoDataset')),
],
),
migrations.CreateModel(
name='GeoProperty',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('intval', models.IntegerField(null=True, blank=True)),
('decval', models.DecimalField(null=True, max_digits=10, decimal_places=4, blank=True)),
('strval', models.CharField(max_length=400, null=True, blank=True)),
('dateval', models.DateField(null=True, blank=True)),
('timeval', models.TimeField(null=True, blank=True)),
('datetimeval', models.DateTimeField(null=True, blank=True)),
('feature', models.ForeignKey(to='widget_data.GeoFeature')),
('prop', models.ForeignKey(to='widget_def.GeoPropertyDefinition')),
],
),
migrations.AlterUniqueTogether(
name='geoproperty',
unique_together=set([('feature', 'prop')]),
),
]
| [
"django.db.models.DateField",
"django.db.models.TimeField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.DecimalField",
"django.db.models.CharField"
] | [((449, 542), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (465, 542), False, 'from django.db import models, migrations\n'), ((661, 706), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""widget_def.GeoDataset"""'}), "(to='widget_def.GeoDataset')\n", (678, 706), False, 'from django.db import models, migrations\n'), ((843, 936), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (859, 936), False, 'from django.db import models, migrations\n'), ((962, 1004), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (981, 1004), False, 'from django.db import models, migrations\n'), ((1034, 1109), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'null': '(True)', 'max_digits': '(10)', 'decimal_places': '(4)', 'blank': '(True)'}), '(null=True, max_digits=10, decimal_places=4, blank=True)\n', (1053, 1109), False, 'from django.db import models, migrations\n'), ((1139, 1194), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(400)', 'null': '(True)', 'blank': '(True)'}), '(max_length=400, null=True, blank=True)\n', (1155, 1194), False, 'from django.db import models, migrations\n'), ((1225, 1264), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1241, 1264), False, 'from django.db import models, migrations\n'), ((1295, 1334), 'django.db.models.TimeField', 'models.TimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1311, 1334), False, 'from django.db import models, migrations\n'), ((1369, 1412), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1389, 1412), False, 'from django.db import models, migrations\n'), ((1443, 1489), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""widget_data.GeoFeature"""'}), "(to='widget_data.GeoFeature')\n", (1460, 1489), False, 'from django.db import models, migrations\n'), ((1517, 1573), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""widget_def.GeoPropertyDefinition"""'}), "(to='widget_def.GeoPropertyDefinition')\n", (1534, 1573), False, 'from django.db import models, migrations\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import sys
sys.dont_write_bytecode = True
MISSING_DEPENDENCY = False
try:
from django.conf import settings
except ImportError:
sys.stdout.write("You'll need to `pip install Django>=1.4` to run this demo\n")
MISSING_DEPENDENCY = True
if MISSING_DEPENDENCY:
sys.exit(1)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
if __name__ == "__main__":
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"os.environ.setdefault",
"django.core.wsgi.get_wsgi_application",
"django.core.management.execute_from_command_line",
"sys.exit",
"sys.stdout.write"
] | [((379, 443), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""test_settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'test_settings')\n", (400, 443), False, 'import os\n'), ((509, 531), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (529, 531), False, 'from django.core.wsgi import get_wsgi_application\n'), ((625, 660), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (['sys.argv'], {}), '(sys.argv)\n', (650, 660), False, 'from django.core.management import execute_from_command_line\n'), ((366, 377), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (374, 377), False, 'import sys\n'), ((231, 310), 'sys.stdout.write', 'sys.stdout.write', (['"""You\'ll need to `pip install Django>=1.4` to run this demo\n"""'], {}), '("You\'ll need to `pip install Django>=1.4` to run this demo\\n")\n', (247, 310), False, 'import sys\n')] |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import copy
import logging
from abc import ABCMeta
from functools import lru_cache
from typing import Callable, Dict, List, Optional, Tuple
import torch
import torch.distributions as dist
import torch.nn as nn
import torch.optim
from torch import Tensor
from tqdm.auto import tqdm
from ...legacy.inference.abstract_infer import AbstractInference
from ...model.rv_identifier import RVIdentifier
from ...model.utils import LogLevel
from .mean_field_variational_approximation import MeanFieldVariationalApproximation
from .optim import BMMultiOptimizer, BMOptim
LOGGER = logging.getLogger("beanmachine.vi")
cpu_device = torch.device("cpu")
default_params = {}
class MeanFieldVariationalInference(AbstractInference, metaclass=ABCMeta):
"""Inference class for mean-field variational inference.
Fits a mean-field reparameterized guide on unconstrained latent space
following ADVI (https://arxiv.org/pdf/1603.00788.pdf). The mean-field
factors are IAF transforms (https://arxiv.org/pdf/1606.04934.pdf) of a
given `base_dist`.
"""
def infer( # noqa: C901
self,
queries: List[RVIdentifier],
observations: Dict[RVIdentifier, Tensor],
num_iter: int = 100,
lr: float = 1e-3,
base_dist: Optional[dist.Distribution] = None,
base_args: Optional[dict] = None,
random_seed: Optional[int] = None,
num_elbo_mc_samples=100,
) -> Callable[[RVIdentifier], MeanFieldVariationalApproximation]:
"""
Trains a set of mean-field variational approximation (one per site).
All tensors in `queries` and `observations` must be allocated on the
same `torch.device`. Inference algorithms will attempt to allocate
intermediate tensors on the same device.
:param queries: queried random variables
:param observations: observations dict
:param num_iter: number of worlds to train over
:param lr: learning rate
:param base_dist: constructor fn for base distribution for flow
:param base_args: arguments to base_dist (will optimize any `nn.Parameter`s)
"""
if not base_dist:
base_dist = dist.Normal
base_args = {"loc": torch.tensor([0.0]), "scale": torch.tensor([1.0])}
# TODO: reinterpret batch dimension?
if not base_args:
base_args = {}
try:
if not random_seed:
random_seed = (
torch.randint(MeanFieldVariationalInference._rand_int_max, (1,))
.int()
.item()
)
self.set_seed(random_seed)
self.queries_ = queries
self.observations_ = observations
def _get_var_approx(rvid):
target_dist = self.world_.get_node_in_world_raise_error(
rvid
).distribution
# NOTE: this assumes the target distribution's event_shape and
# support do not change
return MeanFieldVariationalApproximation(
lr=lr,
target_dist=target_dist,
base_dist=base_dist,
base_args=copy.deepcopy(base_args),
)
vi_dicts = lru_cache(maxsize=None)(_get_var_approx)
for _ in tqdm(iterable=range(num_iter), desc="Training iterations"):
# sample world x ~ q_t
self.initialize_world(False, vi_dicts)
nodes = self.world_.get_all_world_vars()
latent_rvids = list(
filter(lambda rvid: rvid not in self.observations_, nodes.keys())
)
loss = torch.zeros(1)
# decompose mean-field ELBO expectation E_x = E_s E_\s and
# iterate over latent sites x_s.
for rvid in latent_rvids:
v_approx = vi_dicts(rvid)
# Form single-site Gibbs density approximating E_\s using
# previously sampled x_\s, i.e. x_s -> E_\s log p(x_s, x_\s)
# ~= x_s -> log p(x_s, z) with z ~ p(x_\s)
def _target_log_prob(x):
(
_,
_,
_,
proposed_score,
) = self.world_.propose_change_transformed_value(
rvid, x, start_new_diff=False
)
self.world_.reject_diff()
return proposed_score
# MC approximate E_s using `num_elbo_mc_samples` (reparameterized)
# samples x_{s,i} ~ q_t(x_s) i.e.
# ELBO ~= E_s log p(x_s, x_\s) / q(x_s)
# ~= (1/N) \sum_i^N log p(x_{s,i}, x_\s) / q(x_{s,i})
loss -= v_approx.elbo(
_target_log_prob,
num_elbo_mc_samples,
)
if not torch.isnan(loss) and not torch.isinf(loss):
for rvid in latent_rvids:
v_approx = vi_dicts(rvid)
v_approx.optim.zero_grad()
loss.backward(retain_graph=True)
for rvid in latent_rvids:
v_approx = vi_dicts(rvid)
v_approx.optim.step()
v_approx.recompute_transformed_distribution()
else:
# TODO: caused by e.g. negative scales in `dist.Normal`;
# fix using pytorch's `constraint_registry` to account for
# `Distribution.arg_constraints`
LOGGER.log(
LogLevel.INFO.value, "Encountered NaNs in loss, skipping epoch"
)
except BaseException as x:
raise x
finally:
self.reset()
return vi_dicts
class VariationalInference(AbstractInference, metaclass=ABCMeta):
"""
Stochastic Variational Inference.
Fits a variational approximation represented as a guide program by
Monte-Carlo approximating ELBO and optimizing over any `bm.param`s
used in the guide.
"""
def __init__(self):
super().__init__()
def infer(
self,
model_to_guide_ids: Dict[RVIdentifier, RVIdentifier],
observations: Dict[RVIdentifier, Tensor],
num_iter: int = 100,
lr: float = 1e-3,
random_seed: Optional[int] = None,
on_iter: Optional[Callable] = None,
params: Dict[RVIdentifier, nn.Parameter] = default_params,
optimizer: Optional[BMMultiOptimizer] = None,
progress_bar: Optional[bool] = True,
device: Optional[torch.device] = cpu_device,
) -> Dict[RVIdentifier, nn.Parameter]:
"""
A multiple-step version of `.step()` to perform Stochastic Variational Inference.
This is convenient for full-batch training.
:param model_to_guide_ids: mapping from latent variables to their
respective guide random variables
:param observations: observed random variables with their values
:param num_iter: number of iterations of optimizer steps
:param lr: learning rate
:param random_seed: random seed
:param on_iter: callable executed after each optimizer iteration
:param params: parameter random_variable keys and their values, used
to initialize optimization if present
:param optimizer: BMOptim (wrapped torch optimizer) instance to reuse
:param progress_bar: flag for tqdm progress, disable for less output
when minibatching
:param device: default torch device for tensor allocations
:returns: mapping from all `bm.param` `RVIdentifier`s encountered
to their optimized values
"""
try:
if not random_seed:
random_seed = (
torch.randint(MeanFieldVariationalInference._rand_int_max, (1,))
.int()
.item()
)
self.set_seed(random_seed)
if not optimizer:
# initialize world so guide params available
self.queries_ = list(model_to_guide_ids.keys())
self.observations_ = observations
self.initialize_world(
False,
model_to_guide_ids=model_to_guide_ids,
params=params,
)
# optimizer = torch.optim.Adam(self.world_.params_.values(), lr=lr)
optimizer = BMMultiOptimizer(
BMOptim(
torch.optim.Adam,
{"lr": lr},
)
)
for it in (
tqdm(iterable=range(num_iter), desc="Training iterations")
if progress_bar
else range(num_iter)
):
loss, params, optimizer = self.step(
model_to_guide_ids, observations, optimizer, params, device
)
if on_iter:
on_iter(it, loss, params)
except BaseException as x:
raise x
finally:
self.reset()
return params
def step(
self,
model_to_guide_ids: Dict[RVIdentifier, RVIdentifier],
observations: Dict[RVIdentifier, Tensor],
optimizer: BMMultiOptimizer,
params: Dict[RVIdentifier, nn.Parameter] = default_params,
device: Optional[torch.device] = cpu_device,
) -> Tuple[torch.Tensor, Dict[RVIdentifier, nn.Parameter], BMMultiOptimizer]:
"""
Perform one step of stochastic variational inference.
All `bm.param`s referenced in guide random variables are optimized to
minimize a Monte Carlo approximation of negative ELBO loss. The
negative ELBO loss is Monte-Carlo approximated by sampling the guide
`bm.random_variable`s (i.e. values of `model_to_guide_ids`) to draw
trace samples from the variational approximation and scored against
the model `bm.random_variables` (i.e. keys of `model_to_guide_ids`).
It is the end-user's responsibility to interpret the optimized values
for the `bm.param`s returned.
:param model_to_guide_ids: mapping from latent variables to their
respective guide random variables
:param observations: observed random variables with their values
:param lr: learning rate
:param random_seed: random seed
:param params: parameter random_variable keys and their values, used
to initialize optimization if present
:param optimizer: optimizer state (e.g. momentum and weight decay)
to reuse
:param device: default torch device for tensor allocations
:returns: loss value, mapping from all `bm.param` `RVIdentifier`s
encountered to their optimized values, optimizer for the respective
`bm.param` tensors
"""
self.queries_ = list(model_to_guide_ids.keys())
self.observations_ = observations
# sample world x ~ q_t
self.initialize_world(
False,
model_to_guide_ids=model_to_guide_ids,
params=params,
)
# TODO: add new `self.world_.params_` not already in optimizer
nodes = self.world_.get_all_world_vars()
latent_rvids = list(
filter(
lambda rvid: (
rvid not in self.observations_
and rvid not in model_to_guide_ids.values()
),
nodes.keys(),
)
)
loss = torch.zeros(1).to(device)
# -ELBO == E[log p(obs, x) - log q(x)] ~= log p(obs | x) +
# \sum_s (log p(x_s) - log q(x_s)) where x_s ~ q_t were sampled
# during `initialize_world`.
# Here we compute the second term suming over latent sites x_s.
for rvid in latent_rvids:
assert (
rvid in model_to_guide_ids
), f"expected every latent to have a guide, but did not find one for {rvid}"
node_var = nodes[rvid]
v_approx = nodes[model_to_guide_ids[rvid]]
if isinstance(node_var.distribution, dist.Bernoulli) and isinstance(
v_approx.distribution, dist.Bernoulli
):
# binary cross entropy, analytical ELBO
# TODO: more general enumeration
loss += nn.BCELoss()(
# pyre-fixme[16]: `Distribution` has no attribute `probs`.
v_approx.distribution.probs,
node_var.distribution.probs,
)
# TODO: downstream observation likelihoods p(obs | rvid)
else:
# MC ELBO
loss += v_approx.distribution.log_prob(node_var.value).sum()
loss -= node_var.distribution.log_prob(node_var.value).sum()
# Add the remaining likelihood term log p(obs | x)
for obs_rvid in self.observations_:
obs_var = nodes[obs_rvid]
loss -= obs_var.distribution.log_prob(
self.observations_[obs_rvid]
).sum()
if not torch.isnan(loss):
# loss.backward()
optimizer.step(loss, self.world_.params_)
# optimizer.zero_grad()
params = self.world_.params_
else:
# TODO: caused by e.g. negative scales in `dist.Normal`;
# fix using pytorch's `constraint_registry` to account for
# `Distribution.arg_constraints`
LOGGER.log(LogLevel.INFO.value, "Encountered NaNs in loss, skipping epoch")
return loss, params, optimizer
| [
"logging.getLogger",
"torch.tensor",
"torch.nn.BCELoss",
"torch.randint",
"copy.deepcopy",
"functools.lru_cache",
"torch.isinf",
"torch.isnan",
"torch.zeros",
"torch.device"
] | [((752, 787), 'logging.getLogger', 'logging.getLogger', (['"""beanmachine.vi"""'], {}), "('beanmachine.vi')\n", (769, 787), False, 'import logging\n'), ((801, 820), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (813, 820), False, 'import torch\n'), ((13710, 13727), 'torch.isnan', 'torch.isnan', (['loss'], {}), '(loss)\n', (13721, 13727), False, 'import torch\n'), ((2404, 2423), 'torch.tensor', 'torch.tensor', (['[0.0]'], {}), '([0.0])\n', (2416, 2423), False, 'import torch\n'), ((2434, 2453), 'torch.tensor', 'torch.tensor', (['[1.0]'], {}), '([1.0])\n', (2446, 2453), False, 'import torch\n'), ((3470, 3493), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': 'None'}), '(maxsize=None)\n', (3479, 3493), False, 'from functools import lru_cache\n'), ((3908, 3922), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (3919, 3922), False, 'import torch\n'), ((12076, 12090), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (12087, 12090), False, 'import torch\n'), ((12907, 12919), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (12917, 12919), True, 'import torch.nn as nn\n'), ((3402, 3426), 'copy.deepcopy', 'copy.deepcopy', (['base_args'], {}), '(base_args)\n', (3415, 3426), False, 'import copy\n'), ((5277, 5294), 'torch.isnan', 'torch.isnan', (['loss'], {}), '(loss)\n', (5288, 5294), False, 'import torch\n'), ((5303, 5320), 'torch.isinf', 'torch.isinf', (['loss'], {}), '(loss)\n', (5314, 5320), False, 'import torch\n'), ((2654, 2718), 'torch.randint', 'torch.randint', (['MeanFieldVariationalInference._rand_int_max', '(1,)'], {}), '(MeanFieldVariationalInference._rand_int_max, (1,))\n', (2667, 2718), False, 'import torch\n'), ((8275, 8339), 'torch.randint', 'torch.randint', (['MeanFieldVariationalInference._rand_int_max', '(1,)'], {}), '(MeanFieldVariationalInference._rand_int_max, (1,))\n', (8288, 8339), False, 'import torch\n')] |
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from hkjournalist import Journalist
from fbprophet import Prophet
# data and models from fbprophet tutorials https://facebook.github.io/prophet/docs/quick_start.html#python-api
# markdown report template './reports/simple_model_report_template.md'
sns.set(style="darkgrid")
# maintain a dict to record all variables used in reports
config = {}
# load data
df = pd.read_csv('./data/example_wp_log_peyton_manning.csv')
df['ds'] = pd.to_datetime(df['ds'])
# plot full data set (time series)
ax = df.set_index('ds').plot(figsize=(20, 10))
plt.tight_layout()
config['data_plot'] = ax
# train/test split
train_end_date = '20131231'
config['train_end_date'] = train_end_date
train_df = df[df['ds'] <= train_end_date]
test_df = df[df['ds'] > train_end_date]
test_df['year'] = test_df['ds'].dt.year
test_df['month'] = test_df['ds'].dt.month
# model build
model = Prophet(weekly_seasonality=True, yearly_seasonality=True)
model.add_seasonality('monthly', period=30.5, fourier_order=12, prior_scale=10)
model.add_seasonality('quarter', period=364.5 / 4, fourier_order=10, prior_scale=5)
# model train & test
model.fit(train_df)
config['seasonality'] = pd.DataFrame(model.seasonalities)
test_df['y_pred'] = model.predict(test_df[['ds']])['yhat'].values
ax = test_df[['ds', 'y', 'y_pred']].set_index('ds').plot(figsize=(20, 10)) # plot predict result
plt.tight_layout()
config['pred_plot'] = ax
# define a new kpi metric according to rules
def kpi_mape(df, y_true, y_pred):
# mape group by yearmonth
df[y_pred] = df[y_pred].clip(0, None)
df['diff'] = abs(df[y_true] - df[y_pred])
mape_df = df.groupby(['year', 'month']).agg({'diff': 'sum', y_true: 'sum'}).reset_index()
mape_df['mape'] = mape_df['diff'] / mape_df[y_true]
res_df = mape_df.pivot(index='month', columns='year', values='mape')
return res_df
config['metric_func'] = kpi_mape
# evaluate prediction on test set
kpi_df = kpi_mape(test_df, 'y', 'y_pred')
plt.figure(figsize=(4, 6))
ax = sns.heatmap(kpi_df, annot=True, cmap='YlGn', linewidth=.5, fmt='.2f')
plt.tight_layout()
config['error_plot'] = ax
# record extra info about model
config['note'] = "Prophet with no holidays info"
# Besides maintaining config dict, all code you need to generate reports is simply 3 lines below:
report_journalist = Journalist(template_file='./reports/1_prophet_report_template.md')
report_journalist.hear(config)
report_journalist.report(output_file='./reports/1_prophet_report.pdf', beamer=True, overwrite=False)
| [
"seaborn.set",
"pandas.read_csv",
"hkjournalist.Journalist",
"seaborn.heatmap",
"matplotlib.pyplot.figure",
"fbprophet.Prophet",
"matplotlib.pyplot.tight_layout",
"pandas.DataFrame",
"pandas.to_datetime"
] | [((324, 349), 'seaborn.set', 'sns.set', ([], {'style': '"""darkgrid"""'}), "(style='darkgrid')\n", (331, 349), True, 'import seaborn as sns\n'), ((439, 494), 'pandas.read_csv', 'pd.read_csv', (['"""./data/example_wp_log_peyton_manning.csv"""'], {}), "('./data/example_wp_log_peyton_manning.csv')\n", (450, 494), True, 'import pandas as pd\n'), ((506, 530), 'pandas.to_datetime', 'pd.to_datetime', (["df['ds']"], {}), "(df['ds'])\n", (520, 530), True, 'import pandas as pd\n'), ((614, 632), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (630, 632), True, 'import matplotlib.pyplot as plt\n'), ((936, 993), 'fbprophet.Prophet', 'Prophet', ([], {'weekly_seasonality': '(True)', 'yearly_seasonality': '(True)'}), '(weekly_seasonality=True, yearly_seasonality=True)\n', (943, 993), False, 'from fbprophet import Prophet\n'), ((1224, 1257), 'pandas.DataFrame', 'pd.DataFrame', (['model.seasonalities'], {}), '(model.seasonalities)\n', (1236, 1257), True, 'import pandas as pd\n'), ((1422, 1440), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (1438, 1440), True, 'import matplotlib.pyplot as plt\n'), ((2018, 2044), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4, 6)'}), '(figsize=(4, 6))\n', (2028, 2044), True, 'import matplotlib.pyplot as plt\n'), ((2050, 2120), 'seaborn.heatmap', 'sns.heatmap', (['kpi_df'], {'annot': '(True)', 'cmap': '"""YlGn"""', 'linewidth': '(0.5)', 'fmt': '""".2f"""'}), "(kpi_df, annot=True, cmap='YlGn', linewidth=0.5, fmt='.2f')\n", (2061, 2120), True, 'import seaborn as sns\n'), ((2120, 2138), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2136, 2138), True, 'import matplotlib.pyplot as plt\n'), ((2366, 2432), 'hkjournalist.Journalist', 'Journalist', ([], {'template_file': '"""./reports/1_prophet_report_template.md"""'}), "(template_file='./reports/1_prophet_report_template.md')\n", (2376, 2432), False, 'from hkjournalist import Journalist\n')] |
import sys
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
# Declarative System used for mapping database
Base = declarative_base()
#F5Device holds information specific to each F5 device
class F5Device(Base):
__tablename__ = 'f5_device'
id = Column(Integer, primary_key=True)
ipAddress = Column(String(20), nullable=False)
hostName = Column(String(50))
details = Column(String(250))
apiUserName = Column(String(50))
apiPassword = Column(String(50))
#AFMStat holds information about the various AFM stats and contains the URI to gather the data through the F5 API.
class AFMStat(Base):
__tablename__ = 'afm_stat'
id = Column(Integer, primary_key=True)
f5Device_Id = Column(Integer, ForeignKey('f5_device.id'))
statType = Column(String(20))
statDescription = Column(String(250))
statURL = Column(String(250), nullable=False)
#StatValue holds date, time and value information for the various
class StatValue(Base):
__tablename__ = 'stat_value'
id = Column(Integer, primary_key=True)
afmStat_id = Column(Integer, ForeignKey('afm_stat.id'))
dateTime = Column(DateTime)
statValue = Column(Integer)
#Create database named F5AFM_App.db
engine = create_engine('sqlite:///F5AFM_App.db')
#Apply all metadata to database, F5AFM_App.db.
Base.metadata.create_all(engine)
| [
"sqlalchemy.create_engine",
"sqlalchemy.ForeignKey",
"sqlalchemy.String",
"sqlalchemy.ext.declarative.declarative_base",
"sqlalchemy.Column"
] | [((268, 286), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (284, 286), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((1372, 1411), 'sqlalchemy.create_engine', 'create_engine', (['"""sqlite:///F5AFM_App.db"""'], {}), "('sqlite:///F5AFM_App.db')\n", (1385, 1411), False, 'from sqlalchemy import create_engine\n'), ((407, 440), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (413, 440), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((812, 845), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (818, 845), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((1168, 1201), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1174, 1201), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((1277, 1293), 'sqlalchemy.Column', 'Column', (['DateTime'], {}), '(DateTime)\n', (1283, 1293), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((1310, 1325), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (1316, 1325), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((464, 474), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (470, 474), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((514, 524), 'sqlalchemy.String', 'String', (['(50)'], {}), '(50)\n', (520, 524), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((547, 558), 'sqlalchemy.String', 'String', (['(250)'], {}), '(250)\n', (553, 558), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((585, 595), 'sqlalchemy.String', 'String', (['(50)'], {}), '(50)\n', (591, 595), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((622, 632), 'sqlalchemy.String', 'String', (['(50)'], {}), '(50)\n', (628, 632), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((880, 906), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""f5_device.id"""'], {}), "('f5_device.id')\n", (890, 906), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((930, 940), 'sqlalchemy.String', 'String', (['(20)'], {}), '(20)\n', (936, 940), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((971, 982), 'sqlalchemy.String', 'String', (['(250)'], {}), '(250)\n', (977, 982), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((1005, 1016), 'sqlalchemy.String', 'String', (['(250)'], {}), '(250)\n', (1011, 1016), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n'), ((1235, 1260), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""afm_stat.id"""'], {}), "('afm_stat.id')\n", (1245, 1260), False, 'from sqlalchemy import Column, Integer, String, DateTime, ForeignKey\n')] |
import logging
from .utils import chunked
log = logging.getLogger('gmailsync')
# Sending batches larger than 50 requests is not recommended.
# https://developers.google.com/gmail/api/v1/reference/quota
CHUNK_SIZE = 50
class Synchronizer:
def __init__(self, client, channels):
self.client = client
self.channels = channels
def sync(self):
# TODO: optimize routing for messages tagged with multiple labels
for channel in self.channels:
self.sync_channel(channel)
def sync_channel(self, channel):
last_timestamp = channel.mailbox.get_last_timestamp()
log.debug('Channel [%s] - Getting new messages', channel.name)
msg_descs = self.client.list(query=channel.query, since=last_timestamp)
msg_descs.reverse() # ASC order, oldest first
log.debug('Channel [%s] - Fetching %s new messages', channel.name, len(msg_descs))
total = 0
for chunk in chunked(msg_descs, CHUNK_SIZE):
messages = self.client.fetch(chunk)
for message in messages:
channel.mailbox.add(message)
total += 1
log.debug('Channel [%s] - %s new messages stored', channel.name, total)
log.info('Channel [%s] - %s new messages synchronized', channel.name, total) | [
"logging.getLogger"
] | [((51, 81), 'logging.getLogger', 'logging.getLogger', (['"""gmailsync"""'], {}), "('gmailsync')\n", (68, 81), False, 'import logging\n')] |
from operator import itemgetter
class MaximumScoredNumber:
def getNumber(self, lowerBound, upperBound):
def score(n):
c, s = 0, set()
for i in xrange(int(n ** 0.5) + 1):
j = (n - i ** 2) ** 0.5
if j == int(j) and i not in s:
s.add(j)
c += 1
return c
return sorted(
[(i, score(i)) for i in xrange(lowerBound, upperBound + 1)],
key=itemgetter(1, 0),
reverse=True,
)[0][0]
| [
"operator.itemgetter"
] | [((485, 501), 'operator.itemgetter', 'itemgetter', (['(1)', '(0)'], {}), '(1, 0)\n', (495, 501), False, 'from operator import itemgetter\n')] |
# Copyright (c) 2020 Lightricks. All rights reserved.
import imageio
from os.path import join, split, isdir, isfile
from os import listdir, makedirs, remove
import numpy as np
import cv2
def put_text(img, text, location=(10, 50)):
if location[0] < 0:
location = (location[0] + img.shape[0], location[1])
if location[1] < 0:
location = (location[0], location[1] + img.shape[1])
font = cv2.FONT_HERSHEY_SIMPLEX
bottomLeftCornerOfText = location
fontScale = 1
fontColor = (255, 255, 255)
lineType = 2
cv2.putText(img, text,
bottomLeftCornerOfText,
font,
fontScale,
fontColor,
lineType)
def merge_videos(src1, src2, name1, name2, dst):
reader1 = imageio.get_reader(src1)
reader2 = imageio.get_reader(src2)
fps = reader1.get_meta_data()['fps']
writer = imageio.get_writer(dst, fps=fps)
for f1, f2 in zip(reader1, reader2):
put_text(f1, name1)
put_text(f2, name2)
f_merged = np.concatenate([f1, f2], axis=1)
writer.append_data(f_merged)
writer.close()
def merge_videos_dir(path1, path2, dst_path):
name1 = split(path1)[1]
name2 = split(path2)[1]
if not isdir(dst_path):
makedirs(dst_path)
for f_name in listdir(path1):
if f_name not in listdir(path2) or not f_name.endswith('.mp4'):
continue
src1 = join(path1, f_name)
src2 = join(path2, f_name)
dst = join(dst_path, f_name)
try:
merge_videos(src1, src2, name1, name2, dst)
except:
print('Error in:', f_name)
if isfile(dst):
remove(dst)
path1 = '/Users/ravid/Pictures/face_videos/YOLACT/output/orig_yolact'
path2 = '/Users/ravid/Pictures/face_videos/YOLACT/output/person_yolact'
dst = '/Users/ravid/Pictures/face_videos/YOLACT/output/tolact_orig_vs_person'
merge_videos_dir(path1, path2, dst) | [
"os.listdir",
"os.makedirs",
"os.path.join",
"cv2.putText",
"os.path.split",
"os.path.isfile",
"os.remove",
"os.path.isdir",
"numpy.concatenate",
"imageio.get_writer",
"imageio.get_reader"
] | [((550, 638), 'cv2.putText', 'cv2.putText', (['img', 'text', 'bottomLeftCornerOfText', 'font', 'fontScale', 'fontColor', 'lineType'], {}), '(img, text, bottomLeftCornerOfText, font, fontScale, fontColor,\n lineType)\n', (561, 638), False, 'import cv2\n'), ((780, 804), 'imageio.get_reader', 'imageio.get_reader', (['src1'], {}), '(src1)\n', (798, 804), False, 'import imageio\n'), ((819, 843), 'imageio.get_reader', 'imageio.get_reader', (['src2'], {}), '(src2)\n', (837, 843), False, 'import imageio\n'), ((898, 930), 'imageio.get_writer', 'imageio.get_writer', (['dst'], {'fps': 'fps'}), '(dst, fps=fps)\n', (916, 930), False, 'import imageio\n'), ((1314, 1328), 'os.listdir', 'listdir', (['path1'], {}), '(path1)\n', (1321, 1328), False, 'from os import listdir, makedirs, remove\n'), ((1048, 1080), 'numpy.concatenate', 'np.concatenate', (['[f1, f2]'], {'axis': '(1)'}), '([f1, f2], axis=1)\n', (1062, 1080), True, 'import numpy as np\n'), ((1197, 1209), 'os.path.split', 'split', (['path1'], {}), '(path1)\n', (1202, 1209), False, 'from os.path import join, split, isdir, isfile\n'), ((1225, 1237), 'os.path.split', 'split', (['path2'], {}), '(path2)\n', (1230, 1237), False, 'from os.path import join, split, isdir, isfile\n'), ((1252, 1267), 'os.path.isdir', 'isdir', (['dst_path'], {}), '(dst_path)\n', (1257, 1267), False, 'from os.path import join, split, isdir, isfile\n'), ((1277, 1295), 'os.makedirs', 'makedirs', (['dst_path'], {}), '(dst_path)\n', (1285, 1295), False, 'from os import listdir, makedirs, remove\n'), ((1438, 1457), 'os.path.join', 'join', (['path1', 'f_name'], {}), '(path1, f_name)\n', (1442, 1457), False, 'from os.path import join, split, isdir, isfile\n'), ((1473, 1492), 'os.path.join', 'join', (['path2', 'f_name'], {}), '(path2, f_name)\n', (1477, 1492), False, 'from os.path import join, split, isdir, isfile\n'), ((1507, 1529), 'os.path.join', 'join', (['dst_path', 'f_name'], {}), '(dst_path, f_name)\n', (1511, 1529), False, 'from os.path import join, split, isdir, isfile\n'), ((1355, 1369), 'os.listdir', 'listdir', (['path2'], {}), '(path2)\n', (1362, 1369), False, 'from os import listdir, makedirs, remove\n'), ((1669, 1680), 'os.path.isfile', 'isfile', (['dst'], {}), '(dst)\n', (1675, 1680), False, 'from os.path import join, split, isdir, isfile\n'), ((1698, 1709), 'os.remove', 'remove', (['dst'], {}), '(dst)\n', (1704, 1709), False, 'from os import listdir, makedirs, remove\n')] |
# import boto
#
# from boto.s3.key import Key
import boto3
import vcr
from botocore.exceptions import ClientError
import dlkit.runtime.configs
from dlkit.runtime.primordium import DataInputStream, Type, Id, DateTime
from dlkit.runtime.configs import S3_TEST_BUCKET, S3_TEST_PUBLIC_KEY,\
S3_TEST_PRIVATE_KEY, CLOUDFRONT_TEST_DISTRO,\
CLOUDFRONT_SIGNING_KEYPAIR_ID
from dlkit.runtime import RUNTIME, PROXY_SESSION
from .utilities.testing import DLKitTestCase, TEST_REPOSITORY_GENUS
def simple_matcher(r1, r2):
# https://github.com/kevin1024/vcrpy/blob/master/docs/advanced.rst
return r1.uri == r2.uri and r1.method == r2.method
aws_test_recorder = vcr.VCR()
aws_test_recorder.register_matcher('simple', simple_matcher)
class AWSAdapterTests(DLKitTestCase):
def _get_aws_manager(self, manager_type):
condition = PROXY_SESSION.get_proxy_condition()
condition.set_http_request(self.req)
proxy = PROXY_SESSION.get_proxy(condition)
return RUNTIME.get_service_manager(manager_type.upper(),
implementation='TEST_SERVICE_AWS',
proxy=proxy)
def _get_test_repository(self):
rm = self._get_aws_manager('repository')
querier = rm.get_repository_query()
querier.match_genus_type(TEST_REPOSITORY_GENUS, True)
repo = next(rm.get_repositories_by_query(querier))
return rm.get_repository(repo.ident) # to make sure we get a services repository
def create_asset_with_content(self):
form = self._repo.get_asset_form_for_create([])
form.display_name = 'My new asset'
form.description = 'Asset container for'
new_asset = self._repo.create_asset(form)
asset_content_types = []
try:
config = self._repo._osid_object._runtime.get_configuration()
parameter_id = Id('parameter:assetContentRecordTypeForFiles@json')
asset_content_types.append(
config.get_value_by_parameter(parameter_id).get_type_value())
except AttributeError:
pass
content_form = self._repo.get_asset_content_form_for_create(new_asset.ident,
asset_content_types)
blob = DataInputStream(self.test_file1)
content_form.set_data(blob)
self._repo.create_asset_content(content_form)
asset = self._repo.get_asset(new_asset.ident)
return asset
def is_cloudfront_url(self, _url):
self.assertIn(
'https://{0}/'.format(CLOUDFRONT_TEST_DISTRO),
_url
)
expected_params = ['?Expires=',
'&Signature=',
'&Key-Pair-Id={0}'.format(CLOUDFRONT_SIGNING_KEYPAIR_ID)]
for param in expected_params:
self.assertIn(
param,
_url
)
def s3_file_exists(self, key):
client = boto3.client(
's3',
aws_access_key_id=S3_TEST_PUBLIC_KEY,
aws_secret_access_key=S3_TEST_PRIVATE_KEY
)
try:
client.get_object(
Bucket=S3_TEST_BUCKET,
Key=key
)
except ClientError as ex:
if ex.response['Error']['Code'] == 'NoSuchKey':
return False
raise ex
return True
@aws_test_recorder.use_cassette('tests/fixtures/vcr_cassettes/aws/AWSAdapterTests/setUp.yaml',
record_mode='new_episodes',
match_on=['simple'],
filter_headers=['authorization'])
def setUp(self):
super(AWSAdapterTests, self).setUp()
self._repo = self._get_test_repository()
self._asset = self.create_asset_with_content()
def tearDown(self):
"""
Remove the test user from all groups in Membership
Start from the smallest groupId because need to
remove "parental" roles like for DepartmentAdmin / DepartmentOfficer
"""
super(AWSAdapterTests, self).tearDown()
@aws_test_recorder.use_cassette('tests/fixtures/vcr_cassettes/aws/AWSAdapterTests/test_repository_assets_put_into_s3.yaml',
record_mode='new_episodes',
match_on=['simple'],
filter_headers=['authorization'])
def test_repository_assets_put_into_s3(self):
expected_filekey = self._repo.ident.identifier + '/' + self.test_file1.name.split('/')[-1]
self.assertTrue(self.s3_file_exists(expected_filekey))
def test_repository_assets_return_cloudfront_url_when_queried(self):
asset_content = next(self._asset.get_asset_contents())
url = asset_content.get_url()
self.is_cloudfront_url(url)
@aws_test_recorder.use_cassette(
'tests/fixtures/vcr_cassettes/aws/AWSAdapterTests/test_s3_files_deleted_when_asset_content_deleted.yaml',
record_mode='new_episodes',
match_on=['simple'],
filter_headers=['authorization'])
def test_s3_files_deleted_when_asset_content_deleted(self):
expected_filekey = self._repo.ident.identifier + '/' + self.test_file1.name.split('/')[-1]
# self.assertTrue(self.s3_file_exists(expected_filekey))
asset_content = next(self._asset.get_asset_contents())
self._repo.delete_asset_content(asset_content.ident)
self.assertFalse(self.s3_file_exists(expected_filekey))
| [
"dlkit.runtime.PROXY_SESSION.get_proxy_condition",
"boto3.client",
"dlkit.runtime.primordium.DataInputStream",
"dlkit.runtime.primordium.Id",
"vcr.VCR",
"dlkit.runtime.PROXY_SESSION.get_proxy"
] | [((669, 678), 'vcr.VCR', 'vcr.VCR', ([], {}), '()\n', (676, 678), False, 'import vcr\n'), ((846, 881), 'dlkit.runtime.PROXY_SESSION.get_proxy_condition', 'PROXY_SESSION.get_proxy_condition', ([], {}), '()\n', (879, 881), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((943, 977), 'dlkit.runtime.PROXY_SESSION.get_proxy', 'PROXY_SESSION.get_proxy', (['condition'], {}), '(condition)\n', (966, 977), False, 'from dlkit.runtime import RUNTIME, PROXY_SESSION\n'), ((2317, 2349), 'dlkit.runtime.primordium.DataInputStream', 'DataInputStream', (['self.test_file1'], {}), '(self.test_file1)\n', (2332, 2349), False, 'from dlkit.runtime.primordium import DataInputStream, Type, Id, DateTime\n'), ((3012, 3115), 'boto3.client', 'boto3.client', (['"""s3"""'], {'aws_access_key_id': 'S3_TEST_PUBLIC_KEY', 'aws_secret_access_key': 'S3_TEST_PRIVATE_KEY'}), "('s3', aws_access_key_id=S3_TEST_PUBLIC_KEY,\n aws_secret_access_key=S3_TEST_PRIVATE_KEY)\n", (3024, 3115), False, 'import boto3\n'), ((1908, 1959), 'dlkit.runtime.primordium.Id', 'Id', (['"""parameter:assetContentRecordTypeForFiles@json"""'], {}), "('parameter:assetContentRecordTypeForFiles@json')\n", (1910, 1959), False, 'from dlkit.runtime.primordium import DataInputStream, Type, Id, DateTime\n')] |
#==============================================================================
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==============================================================================
import os
import ConfigParser
import logging
from aws.cfn.bridge.resources import CustomResource
# Construct a logger to write messages about bridges
log = logging.getLogger("cfn.resourcebridge")
def _parse_config(config_file):
"""Parses the provided configuration; returns list of sections
When provided with a valid configuration file, will load all of the sections and return a list of
CustomResources that match the provided configuration. It is assumed the file was already checked
for existence before being passed in.
"""
config = ConfigParser.SafeConfigParser()
config.read(config_file)
resources = []
for resource_name in config.sections():
# Convert configuration options into dictionary (lowercasing all keys)
options = dict((i[0].lower(), i[1]) for i in config.items(resource_name))
# Construct a new CustomResource with the provided configuration
resources.append(CustomResource(resource_name, config_file, options))
return resources
def _parse_configurations(config_files):
"""Parses the provided configurations; returns a list of CustomResources
Iterates over the list of configuration files and creates a list of CustomResources matching
the sections in the configurations. It is assumed the files were already checked for existence.
"""
resources = []
# Iterate through the config files and try to parse them
for bridge_file in config_files:
# Attempt to parse the configuration
resources += _parse_config(bridge_file)
return resources
def load_resources_from_configuration(config_dir):
"""Locates and parses configuration files
Given a configuration directory, reads in the cfn-resource-bridge.conf file
and any configurations under the bridge.d/ directory. It requires at least
one configuration file to exist.
"""
config_file = os.path.join(config_dir, 'cfn-resource-bridge.conf')
bridge_files = []
# Add the default configuration file if it exists
if os.path.isfile(config_file):
bridge_files.append(config_file)
# Add any bridge hook files, if they exist
bridges_dir = os.path.join(config_dir, 'bridge.d')
if os.path.isdir(bridges_dir):
for hook_file in os.listdir(bridges_dir):
if os.path.isfile(os.path.join(bridges_dir, hook_file)) and hook_file.endswith('.conf'):
bridge_files.append(os.path.join(bridges_dir, hook_file))
# If we can't find any bridge files, error out.
if not bridge_files:
raise ValueError(u"Could not find default configuration file, %s, or additional"
u" configurations in the %s directory"
% (config_file, bridges_dir))
# Load our configurations and get the custom resource definitions
resources = _parse_configurations(bridge_files)
# Fail if we have not found any custom resources.
if not resources:
raise ValueError(u"No resources were defined in (%s)" % bridge_files)
return resources | [
"logging.getLogger",
"os.listdir",
"aws.cfn.bridge.resources.CustomResource",
"ConfigParser.SafeConfigParser",
"os.path.join",
"os.path.isfile",
"os.path.isdir"
] | [((938, 977), 'logging.getLogger', 'logging.getLogger', (['"""cfn.resourcebridge"""'], {}), "('cfn.resourcebridge')\n", (955, 977), False, 'import logging\n'), ((1347, 1378), 'ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (1376, 1378), False, 'import ConfigParser\n'), ((2687, 2739), 'os.path.join', 'os.path.join', (['config_dir', '"""cfn-resource-bridge.conf"""'], {}), "(config_dir, 'cfn-resource-bridge.conf')\n", (2699, 2739), False, 'import os\n'), ((2824, 2851), 'os.path.isfile', 'os.path.isfile', (['config_file'], {}), '(config_file)\n', (2838, 2851), False, 'import os\n'), ((2960, 2996), 'os.path.join', 'os.path.join', (['config_dir', '"""bridge.d"""'], {}), "(config_dir, 'bridge.d')\n", (2972, 2996), False, 'import os\n'), ((3004, 3030), 'os.path.isdir', 'os.path.isdir', (['bridges_dir'], {}), '(bridges_dir)\n', (3017, 3030), False, 'import os\n'), ((3057, 3080), 'os.listdir', 'os.listdir', (['bridges_dir'], {}), '(bridges_dir)\n', (3067, 3080), False, 'import os\n'), ((1732, 1783), 'aws.cfn.bridge.resources.CustomResource', 'CustomResource', (['resource_name', 'config_file', 'options'], {}), '(resource_name, config_file, options)\n', (1746, 1783), False, 'from aws.cfn.bridge.resources import CustomResource\n'), ((3112, 3148), 'os.path.join', 'os.path.join', (['bridges_dir', 'hook_file'], {}), '(bridges_dir, hook_file)\n', (3124, 3148), False, 'import os\n'), ((3219, 3255), 'os.path.join', 'os.path.join', (['bridges_dir', 'hook_file'], {}), '(bridges_dir, hook_file)\n', (3231, 3255), False, 'import os\n')] |
from django.utils.safestring import mark_safe
from django.conf import settings
from django.core.files import File
from wagtail.admin.models import Page
from wagtail.core import hooks
class QuickLinksPanel:
order = 100
def render(self):
raw = File(open(
settings.BASE_DIR + '/ckanorg/templates/snippets/admin_home_quick_links_block.html'))
html = raw.read()
blog_page_id = Page.objects.get(title="Blog").id
contact_us_page_id = Page.objects.get(title="Contact Us").id
return mark_safe(html.format(blog_page_id, contact_us_page_id))
@hooks.register('construct_homepage_panels')
def add_another_welcome_panel(request, panels):
panels.append(QuickLinksPanel())
| [
"wagtail.admin.models.Page.objects.get",
"wagtail.core.hooks.register"
] | [((596, 639), 'wagtail.core.hooks.register', 'hooks.register', (['"""construct_homepage_panels"""'], {}), "('construct_homepage_panels')\n", (610, 639), False, 'from wagtail.core import hooks\n'), ((419, 449), 'wagtail.admin.models.Page.objects.get', 'Page.objects.get', ([], {'title': '"""Blog"""'}), "(title='Blog')\n", (435, 449), False, 'from wagtail.admin.models import Page\n'), ((482, 518), 'wagtail.admin.models.Page.objects.get', 'Page.objects.get', ([], {'title': '"""Contact Us"""'}), "(title='Contact Us')\n", (498, 518), False, 'from wagtail.admin.models import Page\n')] |
"""
Module providing operation of the deployments.
"""
from deployable.deployment.defaults import default_work_path # For default config options
from deployable.deployment.stage.stage import Stage # For stage
from deployable.deployment.stage.echo import Echo # For stage
from deployable.report.error import report_error # Error reporting
from typing import Any, Dict, List # For typing
class Deployment:
"""
Class for deployment process from a deployment config.
"""
config: Dict[str, Any] = dict()
lifecycle: bool = True
name: str = deployment_name
stage: List[Stage] = list()
def __init__(self, config: List[Dict[str, Any]], system: Dict[str, str]) -> None:
"""
Creates an instance of Deployment.
"""
try:
# Initialize const
if "const" in config:
if type(config["const"]) is dict:
self.config["const"] = config["const"]
else:
if type(config["const"]) is not None:
raise TypeError
config["const"] = dict()
# Initialize option
config["options"] = dict()
if "options" in config:
if type(config["options"]) is dict:
self.config["options"] = config["options"]
else:
if type(config["options"]) is not None:
raise TypeError
if "work_dir" not in self.config["options"]:
self.config["options"]["work_dir"] = default_work_path
# Initialize arg
if "arg" in config:
if type(config["arg"]) is dict:
self.config["arg"] = config["arg"]
else:
if type(config["arg"]) is not None:
raise TypeError
config["arg"] = dict()
# Initialize var
if "var" in config:
if type(config["var"]) is dict:
self.config["var"] = config["var"]
else:
if type(config["var"]) is not None:
raise TypeError
config["var"] = dict()
# Initialize var
if "alias" in config:
if type(config["alias"]) is list:
self.config["alias"] = config["alias"]
else:
if type(config["alias"]) is not None:
raise TypeError
config["alias"] = dict()
# Initialize system
self.config["system"] = system
# Create stage
if "stage" in config:
if type(config["stage"]) is list:
for stage in config:
# Verify the consistency of data to satisfy the stage
if type(stage) is not dict:
raise TypeError
# Append to self
self.stage.append(Stage(stage["echo"]))
else:
raise TypeError
else:
raise ValueError
except TypeError:
self.lifecycle = False
report_error("arg", "the deployment arguments failed the sanity test by type")
except ValueError:
self.lifecycle = False
report_error("arg", "the deployment arguments failed the sanity test by value")
# Validator, that fails by default
def validate(self, id: str) -> bool:
"""
Dummy validator, fails by default.
To be replaced by classes extending it.
"""
return False
| [
"deployable.report.error.report_error",
"deployable.deployment.stage.stage.Stage"
] | [((2454, 2532), 'deployable.report.error.report_error', 'report_error', (['"""arg"""', '"""the deployment arguments failed the sanity test by type"""'], {}), "('arg', 'the deployment arguments failed the sanity test by type')\n", (2466, 2532), False, 'from deployable.report.error import report_error\n'), ((2583, 2662), 'deployable.report.error.report_error', 'report_error', (['"""arg"""', '"""the deployment arguments failed the sanity test by value"""'], {}), "('arg', 'the deployment arguments failed the sanity test by value')\n", (2595, 2662), False, 'from deployable.report.error import report_error\n'), ((2322, 2342), 'deployable.deployment.stage.stage.Stage', 'Stage', (["stage['echo']"], {}), "(stage['echo'])\n", (2327, 2342), False, 'from deployable.deployment.stage.stage import Stage\n')] |
import pandas as pd
import numpy as np
import re
import os
import glob
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.naive_bayes import MultinomialNB
from sklearn.model_selection import train_test_split
from sklearn.externals import joblib
result = pd.read_csv('./disease-symptom-db.csv', encoding='utf-8', index_col=None, header=0)
def isValid(cui):
cui = str(cui)
pattern = re.compile("C\\d{7}")
if not pattern.match(cui):
return False
return True
def cuiToNumber(cui):
return cui.strip("C").strip("0")
def convertCUI(cui):
cui = str(cui)
if not isValid(cui):
return "C" + cui.zfill(7)
else:
return cui
def clean(the_string):
return str(the_string.encode('utf-8'))
result['Disease'] = result['Disease'].apply(convertCUI)
result['Symptom'] = result['Symptom'].apply(convertCUI)
result.to_csv("./disease-symptom-db.csv",index=False)
df_foreign = pd.read_csv('./DiseaseSymptomKB.csv', encoding='utf-8', index_col=None, header=0)
result = result.append(df_foreign)
result.to_csv("./disease-symptom-merged.csv",index=False)
result['Disease'] = result['Disease'].astype(str)
result['Symptom'] = result['Symptom'].astype(str)
result['Symptom'].replace('', np.nan, inplace=True)
result.dropna(subset=['Symptom'], inplace=True)
result['Disease'].replace('', np.nan, inplace=True)
result.dropna(subset=['Disease'], inplace=True)
df = pd.DataFrame(result)
df.columns
df_1 = pd.get_dummies(df.Symptom)
df_s = df['Disease']
df_pivoted = pd.concat([df_s,df_1], axis=1)
df_pivoted.drop_duplicates(keep='first',inplace=True)
cols = df_pivoted.columns
cols = cols[1:] # skip 'Disease'
df_pivoted = df_pivoted.groupby('Disease').sum()
df_pivoted = df_pivoted.reset_index()
all_files_ml = "./data/all-files-for-ml"
df_pivoted.to_csv(os.path.join(all_files_ml, "all_pivoted.csv"), index=False)
cols = df_pivoted.columns
cols = cols[1:] # skip 'title'
x = df_pivoted[cols] # symptom rows
y = df_pivoted['Disease'] # diseases
x.to_csv(os.path.join(all_files_ml, "all_x.csv"), index=False)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=42)
mnb = MultinomialNB()
mnb = mnb.fit(x_train, y_train)
mnb.score(x_test, y_test)
mnb_tot = MultinomialNB()
mnb_tot = mnb_tot.fit(x, y)
mnb_tot.score(x, y)
disease_pred = mnb_tot.predict(x)
disease_real = y.values
for i in range(0, len(disease_real)):
if disease_pred[i]!=disease_real[i]:
print ('Pred: {0} Actual:{1}'.format(disease_pred[i], disease_real[i]))
joblib.dump(mnb, os.path.join(all_files_ml, 'all_mnb.pkl'), protocol=2)
data = pd.read_csv(os.path.join(all_files_ml, "all_x.csv"))
df = pd.DataFrame(data)
cols = df.columns
features = cols # = symptoms
features_raw = [str(features[x]) for x in range(len(features))]
features_raw = ','.join(map(str, features_raw))
# convert feature array into dict of symptom: index
feature_dict = {}
for i,f in enumerate(features):
feature_dict[f] = i
def findFeatures(disease):
return result.loc[result['Disease'] == disease]["Symptom"].values.astype(str)
sample = np.zeros((len(features),), dtype=np.int)
sampe = sample.tolist()
search = ["C0857794", "C0149793", "C0000786"]
for i,s in enumerate(search):
sample[feature_dict[s]] = 1
sample = np.array(sample).reshape(1,len(sample))
results = mnb.predict_proba(sample)[0]
# gets a dictionary of {'class_name': probability}
prob_per_class_dictionary = dict(zip(mnb.classes_, results))
# gets a list of ['most_probable_class', 'second_most_probable_class', ..., 'least_class']
results_ordered_by_probability = map(lambda x: {"disease": x[0],"prop": x[1] * 100, "sy": findFeatures(x[0])}, sorted(zip(mnb.classes_, results), key=lambda x: x[1], reverse=True))
print (list(results_ordered_by_probability))
#store the predicted probabilities for class 1
y_pred_prob = mnb.predict_proba(sample)[0] | [
"pandas.read_csv",
"re.compile",
"sklearn.model_selection.train_test_split",
"os.path.join",
"pandas.get_dummies",
"numpy.array",
"sklearn.naive_bayes.MultinomialNB",
"pandas.DataFrame",
"pandas.concat"
] | [((272, 359), 'pandas.read_csv', 'pd.read_csv', (['"""./disease-symptom-db.csv"""'], {'encoding': '"""utf-8"""', 'index_col': 'None', 'header': '(0)'}), "('./disease-symptom-db.csv', encoding='utf-8', index_col=None,\n header=0)\n", (283, 359), True, 'import pandas as pd\n'), ((938, 1023), 'pandas.read_csv', 'pd.read_csv', (['"""./DiseaseSymptomKB.csv"""'], {'encoding': '"""utf-8"""', 'index_col': 'None', 'header': '(0)'}), "('./DiseaseSymptomKB.csv', encoding='utf-8', index_col=None,\n header=0)\n", (949, 1023), True, 'import pandas as pd\n'), ((1424, 1444), 'pandas.DataFrame', 'pd.DataFrame', (['result'], {}), '(result)\n', (1436, 1444), True, 'import pandas as pd\n'), ((1464, 1490), 'pandas.get_dummies', 'pd.get_dummies', (['df.Symptom'], {}), '(df.Symptom)\n', (1478, 1490), True, 'import pandas as pd\n'), ((1525, 1556), 'pandas.concat', 'pd.concat', (['[df_s, df_1]'], {'axis': '(1)'}), '([df_s, df_1], axis=1)\n', (1534, 1556), True, 'import pandas as pd\n'), ((2110, 2165), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.33)', 'random_state': '(42)'}), '(x, y, test_size=0.33, random_state=42)\n', (2126, 2165), False, 'from sklearn.model_selection import train_test_split\n'), ((2173, 2188), 'sklearn.naive_bayes.MultinomialNB', 'MultinomialNB', ([], {}), '()\n', (2186, 2188), False, 'from sklearn.naive_bayes import MultinomialNB\n'), ((2260, 2275), 'sklearn.naive_bayes.MultinomialNB', 'MultinomialNB', ([], {}), '()\n', (2273, 2275), False, 'from sklearn.naive_bayes import MultinomialNB\n'), ((2687, 2705), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (2699, 2705), True, 'import pandas as pd\n'), ((408, 429), 're.compile', 're.compile', (['"""C\\\\d{7}"""'], {}), "('C\\\\d{7}')\n", (418, 429), False, 'import re\n'), ((1820, 1865), 'os.path.join', 'os.path.join', (['all_files_ml', '"""all_pivoted.csv"""'], {}), "(all_files_ml, 'all_pivoted.csv')\n", (1832, 1865), False, 'import os\n'), ((2020, 2059), 'os.path.join', 'os.path.join', (['all_files_ml', '"""all_x.csv"""'], {}), "(all_files_ml, 'all_x.csv')\n", (2032, 2059), False, 'import os\n'), ((2565, 2606), 'os.path.join', 'os.path.join', (['all_files_ml', '"""all_mnb.pkl"""'], {}), "(all_files_ml, 'all_mnb.pkl')\n", (2577, 2606), False, 'import os\n'), ((2640, 2679), 'os.path.join', 'os.path.join', (['all_files_ml', '"""all_x.csv"""'], {}), "(all_files_ml, 'all_x.csv')\n", (2652, 2679), False, 'import os\n'), ((3297, 3313), 'numpy.array', 'np.array', (['sample'], {}), '(sample)\n', (3305, 3313), True, 'import numpy as np\n')] |
import connexion
import logging
import datetime
from connexion import NoContent
# our memory-only pet storage
PETS = {}
def get_pets(limit, animal_type=None):
return {'pets': [pet for pet in PETS.values() if not animal_type or pet['animal_type'] == animal_type][:limit]}
def get_pet(pet_id):
pet = PETS.get(pet_id)
return pet or ('Not found', 404)
def put_pet(pet_id, pet):
exists = pet_id in PETS
pet['id'] = pet_id
if exists:
logging.info('Updating pet %s...', pet_id)
PETS[pet_id].update(pet)
else:
logging.info('Creating pet %s...', pet_id)
pet['created'] = datetime.datetime.utcnow()
PETS[pet_id] = pet
return NoContent, (200 if exists else 201)
def delete_pet(pet_id):
if pet_id in PETS:
logging.info('Deleting pet %s...', pet_id)
del PETS[pet_id]
return NoContent, 204
else:
return NoContent, 404
logging.basicConfig(level=logging.INFO)
app = connexion.FlaskApp(__name__, specification_dir='openapi/')
app.add_api('api.yaml')
app.run(port=8080)
application = app.app | [
"logging.basicConfig",
"logging.info",
"connexion.FlaskApp",
"datetime.datetime.utcnow"
] | [((869, 908), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (888, 908), False, 'import logging\n'), ((915, 973), 'connexion.FlaskApp', 'connexion.FlaskApp', (['__name__'], {'specification_dir': '"""openapi/"""'}), "(__name__, specification_dir='openapi/')\n", (933, 973), False, 'import connexion\n'), ((449, 491), 'logging.info', 'logging.info', (['"""Updating pet %s..."""', 'pet_id'], {}), "('Updating pet %s...', pet_id)\n", (461, 491), False, 'import logging\n'), ((533, 575), 'logging.info', 'logging.info', (['"""Creating pet %s..."""', 'pet_id'], {}), "('Creating pet %s...', pet_id)\n", (545, 575), False, 'import logging\n'), ((597, 623), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (621, 623), False, 'import datetime\n'), ((742, 784), 'logging.info', 'logging.info', (['"""Deleting pet %s..."""', 'pet_id'], {}), "('Deleting pet %s...', pet_id)\n", (754, 784), False, 'import logging\n')] |
import utime
import uos
from sonar import Ultrasonic
from buzzer import Buzzer
from temperature import celsius
# our Reset/s Time Clock at boot
with open('main.txt', 'w') as f:
f.write("{} at main.py said Hello World on {}".format(uos.uname().machine, utime.localtime()))
# take ur time
#utime.sleep(5)
# surpise! boot.py globals are visible in main (too)
# reconfigure the timer, hence led to flash a bit less
timer.init(freq=1, mode=Timer.PERIODIC, callback=tick)
# instantiate all (3) distance sensors
uLeft = Ultrasonic(triggerPin=3, echoPin=2)
uRight = Ultrasonic(triggerPin=7, echoPin=6)
uCenter = Ultrasonic(triggerPin=11, echoPin=10)
hLeft = Buzzer(signalPin=18)
hRight = Buzzer(signalPin=13)
while True:
# start measuring
utime.sleep(0.100)
uCenter.measure()
utime.sleep(0.100)
uLeft.measure()
utime.sleep(0.100)
uRight.measure()
utime.sleep(0.100)
# sonars are ready
ucm = min(uLeft.cm, uCenter.cm, uRight.cm)
if ucm == uLeft.cm: hLeft.beep(ucm)
if ucm == uRight.cm: hRight.beep(ucm)
if ucm == uCenter.cm:
hLeft.alert(ucm)
hRight.alert(ucm)
# a bit longer beep
utime.sleep(0.200)
hLeft.stop()
hRight.stop()
print("nearest obstacle detected at", ucm, "cm, air temperature cca", celsius(), "C")
print("measurements [in cm] from all existing sonars were", uLeft.cm, uCenter.cm, uRight.cm)
#utime.sleep(0.2)
| [
"utime.sleep",
"utime.localtime",
"buzzer.Buzzer",
"uos.uname",
"sonar.Ultrasonic",
"temperature.celsius"
] | [((523, 558), 'sonar.Ultrasonic', 'Ultrasonic', ([], {'triggerPin': '(3)', 'echoPin': '(2)'}), '(triggerPin=3, echoPin=2)\n', (533, 558), False, 'from sonar import Ultrasonic\n'), ((568, 603), 'sonar.Ultrasonic', 'Ultrasonic', ([], {'triggerPin': '(7)', 'echoPin': '(6)'}), '(triggerPin=7, echoPin=6)\n', (578, 603), False, 'from sonar import Ultrasonic\n'), ((614, 651), 'sonar.Ultrasonic', 'Ultrasonic', ([], {'triggerPin': '(11)', 'echoPin': '(10)'}), '(triggerPin=11, echoPin=10)\n', (624, 651), False, 'from sonar import Ultrasonic\n'), ((661, 681), 'buzzer.Buzzer', 'Buzzer', ([], {'signalPin': '(18)'}), '(signalPin=18)\n', (667, 681), False, 'from buzzer import Buzzer\n'), ((691, 711), 'buzzer.Buzzer', 'Buzzer', ([], {'signalPin': '(13)'}), '(signalPin=13)\n', (697, 711), False, 'from buzzer import Buzzer\n'), ((752, 768), 'utime.sleep', 'utime.sleep', (['(0.1)'], {}), '(0.1)\n', (763, 768), False, 'import utime\n'), ((797, 813), 'utime.sleep', 'utime.sleep', (['(0.1)'], {}), '(0.1)\n', (808, 813), False, 'import utime\n'), ((840, 856), 'utime.sleep', 'utime.sleep', (['(0.1)'], {}), '(0.1)\n', (851, 856), False, 'import utime\n'), ((884, 900), 'utime.sleep', 'utime.sleep', (['(0.1)'], {}), '(0.1)\n', (895, 900), False, 'import utime\n'), ((1188, 1204), 'utime.sleep', 'utime.sleep', (['(0.2)'], {}), '(0.2)\n', (1199, 1204), False, 'import utime\n'), ((1325, 1334), 'temperature.celsius', 'celsius', ([], {}), '()\n', (1332, 1334), False, 'from temperature import celsius\n'), ((259, 276), 'utime.localtime', 'utime.localtime', ([], {}), '()\n', (274, 276), False, 'import utime\n'), ((238, 249), 'uos.uname', 'uos.uname', ([], {}), '()\n', (247, 249), False, 'import uos\n')] |
import logging
import pandas as pd
def csv_to_dataframe(csv):
# read csv as pandas dataframe
df = pd.read_csv(csv)
# Print the keys as bug messages
logging.debug(df.keys())
return df
| [
"pandas.read_csv"
] | [((108, 124), 'pandas.read_csv', 'pd.read_csv', (['csv'], {}), '(csv)\n', (119, 124), True, 'import pandas as pd\n')] |
import tornado
import redis
import json
from random import shuffle, randint
from tornado.netutil import bind_sockets
from tornado.tcpserver import TCPServer
from tornado.ioloop import IOLoop
from tornado.iostream import StreamClosedError
from status_code import (
STATUS_NEW_GAME,
STATUS_GAME_OVER,
STATUS_GET_INIT_CARDS,
STATUS_EXCHANGE_CARDS,
STATUS_PLAY_CARD,
STATUS_GET_CARD,
STATUS_WAIT_FOR_INIT_CARDS,
STATUS_WAIT_FOR_EXCHANGE_CARDS,
STATUS_WAIT_FOR_GET_CARD,
RESPONSE_GAME_OVER,
RESPONSE_GAME_OVER_NO_CARD,
RESPONSE_PLAY_CARD,
RESPONSE_GET_SELF_CARD,
RESPONSE_GET_OTHER_CARD
)
class EchoServer(TCPServer):
def __init__(self):
super(EchoServer, self).__init__()
self.redis = self.get_redis_connection()
def hset_redis(self, name, key, value):
self.redis.hset(name, key, json.dumps(value))
def hget_redis(self, name, key):
if self.redis.hget(name, key):
return json.loads(self.redis.hget(name, key))
else:
return None
def set_redis(self, key, value):
self.redis.set(key, json.dumps(value))
def get_redis(self, key):
if self.redis.get(key):
return json.loads(self.redis.get(key))
else:
return None
def get_redis_connection(self):
pool = redis.ConnectionPool(
host='127.0.0.1', port=6379
)
r = redis.Redis(connection_pool=pool)
return r
def get_current_table(self):
seat_num = self.get_redis('seat_num')
return seat_num if seat_num else 1
def get_current_num(self):
table_num = self.get_redis('table_num')
return table_num if table_num else 1
def apply_table(self):
table_num = self.get_redis('table_num')
seat_num = self.get_redis('seat_num')
if not table_num:
table_num = 1
seat_num = 1
else:
if seat_num == 4:
table_num += 1
seat_num = 1
else:
seat_num += 1
self.set_redis('table_num', table_num)
self.set_redis('seat_num', seat_num)
return table_num, seat_num
def shuffle_cards(self, table_number):
# 洗牌并给客户端分发初始的牌
table = str(table_number)
cards_list = [i + 1 for i in range(108)]
shuffle(cards_list)
cards = dict()
for i in range(53):
key = str(i % 4 + 1)
if key not in cards:
cards[key] = [cards_list[i]]
else:
cards[key].append(cards_list[i])
self.hset_redis('cards', table, cards)
self.hset_redis('left_cards', table, cards_list[53:])
def pop_card(self, table, num, card_list):
for one in card_list:
one = int(one)
cards = self.hget_redis('cards', str(table))
if one in cards[str(num)]:
cards[str(num)].remove(one)
self.hset_redis('cards', str(table), cards)
else:
print(str(one) + "is not in list")
def push_card(self, table, num, card_list):
cards = self.hget_redis('cards', str(table))
for one in card_list:
cards[str(num)].append(int(one))
self.hset_redis('cards', str(table), cards)
def exchange_cards(self, table, table_exchange_cards, exchange_mode):
for one in table_exchange_cards:
self.pop_card(str(table), one, table_exchange_cards[one])
for one in table_exchange_cards:
exchange_num = (int(one) + exchange_mode) % 4
if exchange_num == 0:
exchange_num = 4
self.push_card(str(table), str(exchange_num), table_exchange_cards[one])
self.hset_redis('table_exchange_status', table, 1)
async def response_shuffle_cards(self, stream):
table_number, seat_number = self.apply_table()
if seat_number == 4:
self.shuffle_cards(table_number)
await stream.write(
bytes(
str(table_number) + " " + str(seat_number),
encoding="utf8"
)
)
async def response_init_cards(self, stream, data_list):
table = data_list[1]
num = data_list[2]
current_table = self.get_current_table()
current_num = self.get_current_num()
cards = self.hget_redis('cards', table)
if not cards:
await stream.write(
bytes(STATUS_WAIT_FOR_INIT_CARDS, encoding="utf8")
)
else:
if current_table > int(table) or \
(current_table == int(table) and current_num == 4):
cards = str(cards[str(num)])
await stream.write(bytes(cards, encoding="utf8"))
else:
await stream.write(
bytes(STATUS_WAIT_FOR_INIT_CARDS, encoding="utf8")
)
async def response_exchange_cards(self, stream, data_list):
table = data_list[1]
num = data_list[2]
exchange_mode = 0
table_exchange_mode = self.hget_redis('table_exchange_mode', table)
table_exchange_cards = self.hget_redis('table_exchange_cards', table)
table_exchange_status = self.hget_redis('table_exchange_status', table)
if table_exchange_mode:
exchange_mode = table_exchange_mode
else:
exchange_mode = randint(1, 3)
self.hset_redis('table_exchange_mode', table, exchange_mode)
if not table_exchange_cards:
table_exchange_cards = {}
table_exchange_cards[str(num)] = [data_list[i + 3] for i in range(3)]
self.hset_redis('table_exchange_cards', table, table_exchange_cards)
else:
if str(num) not in table_exchange_cards:
table_exchange_cards[str(num)] = [data_list[i + 3] for i in range(3)]
self.hset_redis('table_exchange_cards', table, table_exchange_cards)
if len(table_exchange_cards) == 4 and table_exchange_status != 1:
self.exchange_cards(table, table_exchange_cards, exchange_mode)
if table_exchange_status != 1:
await stream.write(bytes(STATUS_WAIT_FOR_EXCHANGE_CARDS, encoding="utf8"))
else:
exchange_num = (int(num) + 4 - exchange_mode) % 4
if exchange_num == 0:
exchange_num = 4
exchang_list = table_exchange_cards[str(exchange_num)]
exchang_list = [str(one) for one in exchang_list]
await stream.write(bytes(','.join(exchang_list), encoding="utf8"))
async def response_play_card(self, stream, data_list):
table = data_list[1]
num = data_list[2]
card = int(data_list[3])
cards = self.hget_redis('cards', table)
cards[str(num)].remove(card)
self.hset_redis('cards', table, cards)
self.hset_redis('table_last_hand', table, num)
self.hset_redis('table_last_turn', table, card)
get_status_list = [i + 1 for i in range(4) if i + 1 != int(num)]
self.hset_redis('table_last_hand_status', table, get_status_list)
await stream.write(bytes(RESPONSE_PLAY_CARD, encoding="utf8"))
async def response_get_card(self, stream, data_list):
table = data_list[1]
num = data_list[2]
left_cards = self.hget_redis('left_cards', table)
table_last_hand_status = self.hget_redis('table_last_hand_status', table)
if not table_last_hand_status:
if not left_cards:
await stream.write(bytes(RESPONSE_GAME_OVER_NO_CARD, encoding="utf8"))
else:
card = left_cards[0]
left_cards = left_cards[1:]
table_last_hand = self.hget_redis('table_last_hand', table)
table_last_hand = int(table_last_hand) if table_last_hand else 0
table_last_hand += 1
if table_last_hand == 5:
table_last_hand = 1
# self.hset_redis('table_last_hand', table, table_last_hand)
if int(num) == table_last_hand:
cards = self.hget_redis('cards', table)
cards[str(num)].append(card)
self.hset_redis('cards', table, cards)
self.hset_redis('left_cards', table, left_cards)
res_str = RESPONSE_GET_SELF_CARD + " " + str(card)
await stream.write(bytes(res_str, encoding="utf8"))
else:
await stream.write(bytes(STATUS_WAIT_FOR_GET_CARD, encoding="utf8"))
else:
num = int(num)
table_last_turn = self.hget_redis('table_last_turn', table)
if table_last_hand_status and num in table_last_hand_status:
table_last_hand_status.remove(num)
self.hset_redis(
'table_last_hand_status', table, table_last_hand_status)
res_str = RESPONSE_GET_OTHER_CARD + " " + str(table_last_turn)
await stream.write(bytes(res_str, encoding="utf8"))
else:
await stream.write(bytes(STATUS_WAIT_FOR_GET_CARD, encoding="utf8"))
async def handle_stream(self, stream, address):
while True:
try:
data = await stream.read_until(b"\n")
data_str = str(data, encoding="utf8")
data_list = data_str.strip().split(' ')
if data_list[0] == STATUS_NEW_GAME:
# 申请桌号的位置, 随机洗牌
await self.response_shuffle_cards(stream)
elif data_list[0] == STATUS_GET_INIT_CARDS:
# 桌满发牌,桌未满等待
await self.response_init_cards(stream, data_list)
elif data_list[0] == STATUS_EXCHANGE_CARDS:
# 换三张
await self.response_exchange_cards(stream, data_list)
elif data_list[0] == STATUS_GAME_OVER:
# 有人赢牌 游戏结束
# table = data_list[1]
# num = data_list[2]
await stream.write(bytes(RESPONSE_GAME_OVER, encoding="utf8"))
elif data_list[0] == STATUS_PLAY_CARD:
# 出牌
await self.response_play_card(stream, data_list)
elif data_list[0] == STATUS_GET_CARD:
# 给用户推送出的牌
await self.response_get_card(stream, data_list)
else:
pass
except StreamClosedError:
break
if __name__ == '__main__':
sockets = bind_sockets(8888)
tornado.process.fork_processes(0)
server = EchoServer()
server.add_sockets(sockets)
IOLoop.current().start()
| [
"random.shuffle",
"json.dumps",
"redis.ConnectionPool",
"tornado.ioloop.IOLoop.current",
"redis.Redis",
"tornado.netutil.bind_sockets",
"random.randint",
"tornado.process.fork_processes"
] | [((10717, 10735), 'tornado.netutil.bind_sockets', 'bind_sockets', (['(8888)'], {}), '(8888)\n', (10729, 10735), False, 'from tornado.netutil import bind_sockets\n'), ((10740, 10773), 'tornado.process.fork_processes', 'tornado.process.fork_processes', (['(0)'], {}), '(0)\n', (10770, 10773), False, 'import tornado\n'), ((1349, 1398), 'redis.ConnectionPool', 'redis.ConnectionPool', ([], {'host': '"""127.0.0.1"""', 'port': '(6379)'}), "(host='127.0.0.1', port=6379)\n", (1369, 1398), False, 'import redis\n'), ((1433, 1466), 'redis.Redis', 'redis.Redis', ([], {'connection_pool': 'pool'}), '(connection_pool=pool)\n', (1444, 1466), False, 'import redis\n'), ((2372, 2391), 'random.shuffle', 'shuffle', (['cards_list'], {}), '(cards_list)\n', (2379, 2391), False, 'from random import shuffle, randint\n'), ((868, 885), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (878, 885), False, 'import json\n'), ((1126, 1143), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (1136, 1143), False, 'import json\n'), ((5459, 5472), 'random.randint', 'randint', (['(1)', '(3)'], {}), '(1, 3)\n', (5466, 5472), False, 'from random import shuffle, randint\n'), ((10836, 10852), 'tornado.ioloop.IOLoop.current', 'IOLoop.current', ([], {}), '()\n', (10850, 10852), False, 'from tornado.ioloop import IOLoop\n')] |
#!/usr/bin/env python
# Created: Sun Dec 3 13:38:52 2000
# Last changed: Time-stamp: <01/09/04 09:51:21 thomas>
# <EMAIL>, http://www.cbs.dtu.dk/thomas
# File: xbb_search.py
import re
import os, sys, commands
sys.path.insert(0, '.')
from Tkinter import *
from tkColorChooser import askcolor
from Bio.Data.IUPACData import ambiguous_dna_values
import re
from Bio.Seq import reverse_complement
class DNAsearch:
def __init__(self):
self.init_alphabet()
self.sequence = ''
def init_alphabet(self):
self.alphabet = ambiguous_dna_values
other = ''.join(self.alphabet.keys())
self.alphabet['N'] = self.alphabet['N'] + other
for key in self.alphabet.keys():
if key == 'N': continue
if key in self.alphabet[key]: continue
self.alphabet[key] = self.alphabet[key] + key
def SetSeq(self, seq): self.sequence = seq
def SetPattern(self, pattern):
self.pattern = pattern
self.rx_pattern = self.IUPAC2regex(pattern)
self.rx = re.compile(self.rx_pattern)
def IUPAC2regex(self, s):
rx = ''
for i in s:
r = self.alphabet.get(i,i)
if len(r) > 1:
rx = '%s[%s]' % (rx, r)
else:
rx += r
return rx
def _Search(self, start = 0):
pos = self.rx.search(self.sequence, start)
return pos
def Search(self, start = 0):
pos = self.rx.search(self.sequence, start)
if pos:
return pos.start()
else:
return -1
def SearchAll(self):
pos = -1
positions = []
while 1:
m = self._Search(pos+1)
if not m: break
pos = m.start()
if pos == -1:
break
positions.append(pos)
return positions
class XDNAsearch(Toplevel, DNAsearch):
def __init__(self, seq= '', master= None, highlight = 0):
DNAsearch.__init__(self)
self.master = master
self.highlight = highlight
self.colors = []
self.init_graphics()
self.sequence = seq
self.cur_pos = 0
def init_graphics(self):
Toplevel.__init__(self, self.master)
self.frame = Frame(self)
self.frame.pack(fill = BOTH, expand = 1)
self.search_entry = Entry(self.frame)
self.search_entry.pack(fill = BOTH, expand = 1)
f2 = Frame(self.frame)
f2.pack(side = TOP, fill = BOTH, expand = 1)
f = f2
self.forward = Button(f, text = 'Search +', command = self.do_search)
self.forward.pack(side = LEFT)
self.forward = Button(f, text = 'Search -',
command = lambda x=self.do_search: x(other_strand=1))
self.forward.pack(side = LEFT)
self.cancel = Button(f, text = 'Cancel', command = self.exit)
self.cancel.pack(side = LEFT)
self.current_color = 'cyan'
self.colorb = Button(f, text = 'Color', command = self.change_color, foreground = self.current_color)
self.colorb.pack(side = LEFT)
self.config_color(self.current_color)
def config_color(self, color = None):
if not self.highlight: return
if not color:
try:
color = askcolor()[1]
except:
color = 'cyan'
self.current_color = color
self.current_tag = 'searched_%s' % self.current_color
self.master.tag_config(self.current_tag, background=self.current_color)
self.master.tag_config(self.current_tag+'R', background=self.current_color, underline = 1)
self.colors.append(color)
def change_color(self):
self.config_color()
self.colorb.configure(foreground = self.current_color)
self.colorb.update()
def get_pattern(self):
pattern = self.search_entry.get()
return pattern
def do_search(self, other_strand = 0):
pattern = self.get_pattern()
if other_strand: pattern = reverse_complement(pattern)
self.SetPattern(pattern)
pos = self.Search(self.cur_pos)
self.cur_pos = pos +1
w = self.master
if pos != -1:
if self.highlight:
start, stop = pos, pos + len(self.pattern)
if other_strand:
w.tag_add(self.current_tag+'R', '1.%d' % start, '1.%s' % stop)
else:
w.tag_add(self.current_tag, '1.%d' % start, '1.%s' % stop)
w.see('1.%d' % start)
def exit(self):
for c in self.colors:
self.master.tag_remove('searched_%s' % c, 1.0, END)
self.master.tag_remove('searched_%sR' % c, 1.0, END)
self.destroy()
del(self)
def showcolor(self):
pass
if __name__ == '__main__':
seq = 'ATGGTGTGTGTGTACGATCGCCCCCCCCAGTCGATCGATGCATCGTA'
win = Tk()
xtest = XDNAsearch(seq = seq, master = win)
win.mainloop()
| [
"tkColorChooser.askcolor",
"Bio.Seq.reverse_complement",
"sys.path.insert",
"re.compile"
] | [((211, 234), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""."""'], {}), "(0, '.')\n", (226, 234), False, 'import os, sys, commands\n'), ((1053, 1080), 're.compile', 're.compile', (['self.rx_pattern'], {}), '(self.rx_pattern)\n', (1063, 1080), False, 'import re\n'), ((4155, 4182), 'Bio.Seq.reverse_complement', 'reverse_complement', (['pattern'], {}), '(pattern)\n', (4173, 4182), False, 'from Bio.Seq import reverse_complement\n'), ((3394, 3404), 'tkColorChooser.askcolor', 'askcolor', ([], {}), '()\n', (3402, 3404), False, 'from tkColorChooser import askcolor\n')] |
from django.utils import timezone
from django.db import models
from .translations import Translation
from .attachment import Attachment
class Report(Attachment):
"""
Meldungen sind themenbezogene, in sich abgeschlossene Nachrichten.</p><p>
Sie enthalten eine Information auf Deutsch. Zudem können Fragmente mit der Übersetzung
zu weiteren Sprachen enthalten sein.
"""
class Meta:
verbose_name = 'Meldung'
verbose_name_plural = 'Meldungen'
ordering = ['-created']
published = models.BooleanField(
'Freigegeben', null=False, default=False,
help_text='Solange dieser Haken nicht gesetzt ist, wird diese Meldung nicht versendet und/oder angezeigt.')
delivered = models.BooleanField(
'Versendet', null=False, default=False)
created = models.DateTimeField(
'Erstellt',
default=timezone.now)
headline = models.CharField('Titel', max_length=200, null=False)
german = models.BooleanField('Deutsch', null=False, blank=True, default=False)
arabic = models.BooleanField(
'Arabisch',
help_text="Soll auf Arabisch übersetzt werden",
null=False,
blank=True,
default=False)
persian = models.BooleanField(
'Persisch',
help_text="Soll auf Persisch übersetzt werden",
null=False,
blank=True,
default=False)
english = models.BooleanField(
'Englisch',
help_text="Soll auf Englisch übersetzt werden",
null=False,
blank=True,
default=False)
text = models.CharField('Text Deutsch', max_length=628, null=False)
link = models.CharField(
'Link', max_length=1024, null=True, blank=True,
help_text='Hier eine Link-URL eintragen, wird als Button an die Push-Nachricht angehängt.')
def __str__(self):
return self.headline
@classmethod
def last(cls, *, count=1, offset=0, only_published=True, delivered=False, by_date=True):
reports = cls.objects.all()
if only_published:
reports = reports.filter(published=True)
if not delivered:
reports = reports.filter(delivered=False)
if by_date:
reports = reports.order_by('-created')
else:
reports = reports.order_by('-id')
return reports[offset:count]
class ReportTranslation(Translation):
class Meta:
verbose_name = 'Meldungs-Übersetzung'
verbose_name_plural = 'Meldungs-Übersetzung'
ordering = ('id', )
report = models.ForeignKey('Report', on_delete=models.CASCADE, related_name='translations',
related_query_name='translation')
def __str__(self):
return self.report.headline
| [
"django.db.models.DateTimeField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.BooleanField"
] | [((532, 711), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Freigegeben"""'], {'null': '(False)', 'default': '(False)', 'help_text': '"""Solange dieser Haken nicht gesetzt ist, wird diese Meldung nicht versendet und/oder angezeigt."""'}), "('Freigegeben', null=False, default=False, help_text=\n 'Solange dieser Haken nicht gesetzt ist, wird diese Meldung nicht versendet und/oder angezeigt.'\n )\n", (551, 711), False, 'from django.db import models\n'), ((735, 794), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Versendet"""'], {'null': '(False)', 'default': '(False)'}), "('Versendet', null=False, default=False)\n", (754, 794), False, 'from django.db import models\n'), ((818, 872), 'django.db.models.DateTimeField', 'models.DateTimeField', (['"""Erstellt"""'], {'default': 'timezone.now'}), "('Erstellt', default=timezone.now)\n", (838, 872), False, 'from django.db import models\n'), ((906, 959), 'django.db.models.CharField', 'models.CharField', (['"""Titel"""'], {'max_length': '(200)', 'null': '(False)'}), "('Titel', max_length=200, null=False)\n", (922, 959), False, 'from django.db import models\n'), ((973, 1042), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Deutsch"""'], {'null': '(False)', 'blank': '(True)', 'default': '(False)'}), "('Deutsch', null=False, blank=True, default=False)\n", (992, 1042), False, 'from django.db import models\n'), ((1056, 1184), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Arabisch"""'], {'help_text': '"""Soll auf Arabisch übersetzt werden"""', 'null': '(False)', 'blank': '(True)', 'default': '(False)'}), "('Arabisch', help_text=\n 'Soll auf Arabisch übersetzt werden', null=False, blank=True, default=False\n )\n", (1075, 1184), False, 'from django.db import models\n'), ((1230, 1358), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Persisch"""'], {'help_text': '"""Soll auf Persisch übersetzt werden"""', 'null': '(False)', 'blank': '(True)', 'default': '(False)'}), "('Persisch', help_text=\n 'Soll auf Persisch übersetzt werden', null=False, blank=True, default=False\n )\n", (1249, 1358), False, 'from django.db import models\n'), ((1404, 1532), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Englisch"""'], {'help_text': '"""Soll auf Englisch übersetzt werden"""', 'null': '(False)', 'blank': '(True)', 'default': '(False)'}), "('Englisch', help_text=\n 'Soll auf Englisch übersetzt werden', null=False, blank=True, default=False\n )\n", (1423, 1532), False, 'from django.db import models\n'), ((1576, 1636), 'django.db.models.CharField', 'models.CharField', (['"""Text Deutsch"""'], {'max_length': '(628)', 'null': '(False)'}), "('Text Deutsch', max_length=628, null=False)\n", (1592, 1636), False, 'from django.db import models\n'), ((1648, 1814), 'django.db.models.CharField', 'models.CharField', (['"""Link"""'], {'max_length': '(1024)', 'null': '(True)', 'blank': '(True)', 'help_text': '"""Hier eine Link-URL eintragen, wird als Button an die Push-Nachricht angehängt."""'}), "('Link', max_length=1024, null=True, blank=True, help_text=\n 'Hier eine Link-URL eintragen, wird als Button an die Push-Nachricht angehängt.'\n )\n", (1664, 1814), False, 'from django.db import models\n'), ((2552, 2673), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""Report"""'], {'on_delete': 'models.CASCADE', 'related_name': '"""translations"""', 'related_query_name': '"""translation"""'}), "('Report', on_delete=models.CASCADE, related_name=\n 'translations', related_query_name='translation')\n", (2569, 2673), False, 'from django.db import models\n')] |
from decimal import Decimal, ROUND_UP
from logging.config import dictConfig
import json
import os
with open(os.path.join(os.path.dirname(__file__), '..', 'logging.json')) as f:
dictConfig(json.load(f))
from flask import Flask
from flask_babel import Babel
from flask_limiter import Limiter
from flask_limiter.util import get_remote_address
from flask_login import LoginManager
from flask_migrate import Migrate
from flask_navigation import Navigation
from flask_sqlalchemy import SQLAlchemy
from flask_wtf import CSRFProtect
from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics
from werkzeug.middleware.proxy_fix import ProxyFix
from app.cli import register_commands
from app.json_serializer import SteuerlotseJSONEncoder, SteuerlotseJSONDecoder
app = Flask(__name__)
# This needs to happen before any extensions are used that may rely on config values.
app.config.from_object(f'app.config.{app.env.capitalize()}Config')
# Because it runs behind an nginx proxy use the X-FORWARDED-FOR header without the last proxy
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1)
limiter = Limiter(
app,
key_func=get_remote_address,
strategy='moving-window'
)
babel = Babel(app)
nav = Navigation(app)
csrf = CSRFProtect(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = 'strong'
db = SQLAlchemy(app)
migrate = Migrate(app, db)
register_commands(app)
app.json_encoder = SteuerlotseJSONEncoder
app.json_decoder = SteuerlotseJSONDecoder
if app.config['PROMETHEUS_EXPORTER_ENABLED']:
metrics = GunicornInternalPrometheusMetrics(app)
metrics.info('up', 'WebApp is up')
@babel.localeselector
def get_locale():
return 'de'
@app.context_processor
def utility_processor():
def EUR(decimal):
return u"%s€" % decimal.quantize(Decimal('1.00'), rounding=ROUND_UP)
return dict(EUR=EUR)
@app.context_processor
def inject_template_globals():
return dict(plausible_domain=app.config['PLAUSIBLE_DOMAIN'])
from app import routes
| [
"flask_login.LoginManager",
"flask.Flask",
"flask_navigation.Navigation",
"app.cli.register_commands",
"prometheus_flask_exporter.multiprocess.GunicornInternalPrometheusMetrics",
"flask_babel.Babel",
"werkzeug.middleware.proxy_fix.ProxyFix",
"os.path.dirname",
"flask_sqlalchemy.SQLAlchemy",
"flask_migrate.Migrate",
"json.load",
"flask_limiter.Limiter",
"flask_wtf.CSRFProtect",
"decimal.Decimal"
] | [((792, 807), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (797, 807), False, 'from flask import Flask\n'), ((1071, 1102), 'werkzeug.middleware.proxy_fix.ProxyFix', 'ProxyFix', (['app.wsgi_app'], {'x_for': '(1)'}), '(app.wsgi_app, x_for=1)\n', (1079, 1102), False, 'from werkzeug.middleware.proxy_fix import ProxyFix\n'), ((1113, 1180), 'flask_limiter.Limiter', 'Limiter', (['app'], {'key_func': 'get_remote_address', 'strategy': '"""moving-window"""'}), "(app, key_func=get_remote_address, strategy='moving-window')\n", (1120, 1180), False, 'from flask_limiter import Limiter\n'), ((1204, 1214), 'flask_babel.Babel', 'Babel', (['app'], {}), '(app)\n', (1209, 1214), False, 'from flask_babel import Babel\n'), ((1221, 1236), 'flask_navigation.Navigation', 'Navigation', (['app'], {}), '(app)\n', (1231, 1236), False, 'from flask_navigation import Navigation\n'), ((1245, 1261), 'flask_wtf.CSRFProtect', 'CSRFProtect', (['app'], {}), '(app)\n', (1256, 1261), False, 'from flask_wtf import CSRFProtect\n'), ((1279, 1293), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (1291, 1293), False, 'from flask_login import LoginManager\n'), ((1372, 1387), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (1382, 1387), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((1398, 1414), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (1405, 1414), False, 'from flask_migrate import Migrate\n'), ((1416, 1438), 'app.cli.register_commands', 'register_commands', (['app'], {}), '(app)\n', (1433, 1438), False, 'from app.cli import register_commands\n'), ((1584, 1622), 'prometheus_flask_exporter.multiprocess.GunicornInternalPrometheusMetrics', 'GunicornInternalPrometheusMetrics', (['app'], {}), '(app)\n', (1617, 1622), False, 'from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics\n'), ((193, 205), 'json.load', 'json.load', (['f'], {}), '(f)\n', (202, 205), False, 'import json\n'), ((122, 147), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (137, 147), False, 'import os\n'), ((1835, 1850), 'decimal.Decimal', 'Decimal', (['"""1.00"""'], {}), "('1.00')\n", (1842, 1850), False, 'from decimal import Decimal, ROUND_UP\n')] |
#!/usr/bin/env python3
import os
import shutil
import argparse
import numpy as np
# local imports
import extract_particles
import extract_star_meta
def gen_data():
# parse arguments
parser = argparse.ArgumentParser(description='Extract good and bad particles and split into test/validation and training datasets.')
parser.add_argument('-p', '--projpath', type=str, help='path for project', required=True)
parser.add_argument('-g', '--stargood', type=str, help='input star file with good particles', required=True)
parser.add_argument('-b', '--starbad', type=str, help='input star file with bad particles', required=True)
parser.add_argument('-c', '--cleardata', action='store_true', help='clear training/test data before extracting')
args = parser.parse_args()
# create directory structure
work_dir = args.projpath
if work_dir.endswith('/'):
work_dir = work_dir.rstrip('/')
data_dir = work_dir + '/ClassBin'
train_dir = work_dir + '/ClassBin/train'
test_dir = work_dir + '/ClassBin/test'
if os.path.exists(data_dir) == False:
os.mkdir(data_dir)
# extract box size, box apix, and original image apix
meta_good = extract_star_meta.extract(args.stargood)
meta_bad = extract_star_meta.extract(args.starbad)
if meta_good != meta_bad:
print("Headers for good and bad particle star files do not match. Exiting.")
exit()
else:
print("\nMetadata from STAR files:\nImage apix: " + meta_good[2] + "\nBox size: " + meta_good[0] + "\nBox apix: " + meta_good[1] + "\n")
box = int(meta_good[0])
b_apix = float(meta_good[1])
i_apix = float(meta_good[2])
# option to clear particle cache and re-extract particles
if (args.cleardata == True):
if ('train' in os.listdir(data_dir) or 'test' in os.listdir(data_dir)):
print('Clearing particle cache...', end="")
try:
shutil.rmtree(train_dir)
except FileNotFoundError:
pass
try:
shutil.rmtree(test_dir)
except FileNotFoundError:
pass
print('done.\n')
else:
print('Cannot clear particle cache because no particles found. Exiting.')
exit()
# create directories to store training/test data
if ('train' in os.listdir(data_dir) or 'test' in os.listdir(data_dir)):
print('Particle extraction halted because existing particles found. Re-run with -c flag to clear existing particles. Exiting.\n')
exit()
else:
os.mkdir(train_dir)
os.mkdir(test_dir)
good_train_dir = train_dir + '/good'
os.mkdir(good_train_dir)
bad_train_dir = train_dir + '/bad'
os.mkdir(bad_train_dir)
good_test_dir = test_dir + '/good'
os.mkdir(good_test_dir)
bad_test_dir = test_dir + '/bad'
os.mkdir(bad_test_dir)
# extract good and bad particle data
extract_particles.extract(args.projpath, args.stargood, good_train_dir, good_test_dir, 'good')
extract_particles.extract(args.projpath, args.starbad, bad_train_dir, bad_test_dir, 'bad')
print('Particle extraction complete. Training and test datasets have been created.\n')
if __name__ == "__main__":
gen_data() | [
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"os.mkdir",
"shutil.rmtree",
"extract_star_meta.extract",
"extract_particles.extract"
] | [((207, 340), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Extract good and bad particles and split into test/validation and training datasets."""'}), "(description=\n 'Extract good and bad particles and split into test/validation and training datasets.'\n )\n", (230, 340), False, 'import argparse\n'), ((1210, 1250), 'extract_star_meta.extract', 'extract_star_meta.extract', (['args.stargood'], {}), '(args.stargood)\n', (1235, 1250), False, 'import extract_star_meta\n'), ((1266, 1305), 'extract_star_meta.extract', 'extract_star_meta.extract', (['args.starbad'], {}), '(args.starbad)\n', (1291, 1305), False, 'import extract_star_meta\n'), ((1073, 1097), 'os.path.exists', 'os.path.exists', (['data_dir'], {}), '(data_dir)\n', (1087, 1097), False, 'import os\n'), ((1116, 1134), 'os.mkdir', 'os.mkdir', (['data_dir'], {}), '(data_dir)\n', (1124, 1134), False, 'import os\n'), ((2634, 2653), 'os.mkdir', 'os.mkdir', (['train_dir'], {}), '(train_dir)\n', (2642, 2653), False, 'import os\n'), ((2662, 2680), 'os.mkdir', 'os.mkdir', (['test_dir'], {}), '(test_dir)\n', (2670, 2680), False, 'import os\n'), ((2734, 2758), 'os.mkdir', 'os.mkdir', (['good_train_dir'], {}), '(good_train_dir)\n', (2742, 2758), False, 'import os\n'), ((2811, 2834), 'os.mkdir', 'os.mkdir', (['bad_train_dir'], {}), '(bad_train_dir)\n', (2819, 2834), False, 'import os\n'), ((2887, 2910), 'os.mkdir', 'os.mkdir', (['good_test_dir'], {}), '(good_test_dir)\n', (2895, 2910), False, 'import os\n'), ((2961, 2983), 'os.mkdir', 'os.mkdir', (['bad_test_dir'], {}), '(bad_test_dir)\n', (2969, 2983), False, 'import os\n'), ((3038, 3136), 'extract_particles.extract', 'extract_particles.extract', (['args.projpath', 'args.stargood', 'good_train_dir', 'good_test_dir', '"""good"""'], {}), "(args.projpath, args.stargood, good_train_dir,\n good_test_dir, 'good')\n", (3063, 3136), False, 'import extract_particles\n'), ((3141, 3235), 'extract_particles.extract', 'extract_particles.extract', (['args.projpath', 'args.starbad', 'bad_train_dir', 'bad_test_dir', '"""bad"""'], {}), "(args.projpath, args.starbad, bad_train_dir,\n bad_test_dir, 'bad')\n", (3166, 3235), False, 'import extract_particles\n'), ((2406, 2426), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (2416, 2426), False, 'import os\n'), ((2440, 2460), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (2450, 2460), False, 'import os\n'), ((1830, 1850), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (1840, 1850), False, 'import os\n'), ((1864, 1884), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (1874, 1884), False, 'import os\n'), ((1976, 2000), 'shutil.rmtree', 'shutil.rmtree', (['train_dir'], {}), '(train_dir)\n', (1989, 2000), False, 'import shutil\n'), ((2093, 2116), 'shutil.rmtree', 'shutil.rmtree', (['test_dir'], {}), '(test_dir)\n', (2106, 2116), False, 'import shutil\n')] |
# coding: utf-8
from __future__ import unicode_literals
from copy import deepcopy
import json
from django.utils.safestring import mark_safe
from easy_thumbnails.files import Thumbnailer, ThumbnailOptions
from easy_thumbnails.utils import get_storage_hash
from django.forms.widgets import FileInput
from .cache import get_cache, set_cache, set_cache_by_key, build_key
from .utils.json import CustomJsonEncoder
def ThumbnailOptions__init__(self, *args, **kwargs):
ThumbnailOptions__init__.old(self, *args, **kwargs)
for key in ['thumbnail_option_id', 'admin']:
self.pop(key, None)
def Thumbnailer__get_full_options(self, alias):
from .models import ThumbnailOption
from easy_thumbnails.alias import aliases
options = aliases.get(alias, target=self.alias_target)
if not options:
raise KeyError(alias)
options = deepcopy(options)
name = self.name
storage_hash = get_storage_hash(self.storage)
try:
override_option = get_cache(alias, name, storage_hash)
if override_option is None:
thumbnail_option = ThumbnailOption.objects.get(
source__name=name,
source__storage_hash=storage_hash,
alias=alias)
set_cache(thumbnail_option)
override_option = thumbnail_option.options
override_option['thumbnail_option_id'] = thumbnail_option.id
options.update(override_option or {})
except ThumbnailOption.DoesNotExist:
set_cache_by_key(build_key(alias, name, storage_hash), False)
return options
def Thumbnailer____getitem__(self, alias):
"""
Retrieve a thumbnail matching the alias options (or raise a
``KeyError`` if no such alias exists).
"""
options = self.get_full_options(alias)
return self.get_thumbnail(options, silent_template_exception=True)
def FileInput__render(self, name, value, attrs=None):
from easy_thumbnails.files import Thumbnailer
from easy_thumbnails_admin.options import get_options
is_thumbnailer = isinstance(value, Thumbnailer)
if is_thumbnailer and value:
attrs = attrs or {}
attrs['data-easy-thumbnail-admin-input'] = 1
attrs['data-name'] = value.name
attrs['data-target'] = str(value.field)
rendered = FileInput__render.old(self, name, value, attrs)
if is_thumbnailer:
rendered += (
'<script>'
'window.easyThumbnailAdminOptions = {};'
' </script>'.format(json.dumps(get_options(), ensure_ascii=False, cls=CustomJsonEncoder)))
return rendered
def FileInput__media(self):
from ._version import VERSION
media = FileInput__media.old.fget(self)
# TODO hash of file
media.add_js(['easy_thumbnails_admin/js/app.js'])
return media
def patch():
Thumbnailer.__getitem__ = Thumbnailer____getitem__
Thumbnailer.get_full_options = Thumbnailer__get_full_options
ThumbnailOptions__init__.old = ThumbnailOptions.__init__
ThumbnailOptions.__init__ = ThumbnailOptions__init__
FileInput__render.old = FileInput.render
FileInput.render = FileInput__render
FileInput__media.old = FileInput.media
FileInput.media = property(FileInput__media)
| [
"easy_thumbnails_admin.options.get_options",
"easy_thumbnails.utils.get_storage_hash",
"easy_thumbnails.alias.aliases.get",
"copy.deepcopy"
] | [((749, 793), 'easy_thumbnails.alias.aliases.get', 'aliases.get', (['alias'], {'target': 'self.alias_target'}), '(alias, target=self.alias_target)\n', (760, 793), False, 'from easy_thumbnails.alias import aliases\n'), ((859, 876), 'copy.deepcopy', 'deepcopy', (['options'], {}), '(options)\n', (867, 876), False, 'from copy import deepcopy\n'), ((917, 947), 'easy_thumbnails.utils.get_storage_hash', 'get_storage_hash', (['self.storage'], {}), '(self.storage)\n', (933, 947), False, 'from easy_thumbnails.utils import get_storage_hash\n'), ((2502, 2515), 'easy_thumbnails_admin.options.get_options', 'get_options', ([], {}), '()\n', (2513, 2515), False, 'from easy_thumbnails_admin.options import get_options\n')] |
import math
import requests
from django.utils.translation import ugettext_lazy as _
from drf_yasg import openapi
from drf_yasg.utils import swagger_auto_schema
from rest_framework.decorators import permission_classes
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST
from poolink_backend.apps.link.api.serializers import (
LinkDestroySerializer,
LinkSearchSerializer,
LinkSerializer,
)
from poolink_backend.apps.link.grabicon import Favicon
from poolink_backend.apps.link.models import Board, Link
from poolink_backend.apps.link.opengraph import LinkImage
from poolink_backend.apps.pagination import CustomPagination
from poolink_backend.apps.permissions import IsWriterOrReadonly, LinkDeletePermission
from poolink_backend.bases.api.serializers import MessageSerializer
from poolink_backend.bases.api.views import APIView as BaseAPIView
from poolink_backend.bases.api.viewsets import ModelViewSet
class LinkViewSet(ModelViewSet):
permission_classes = ([IsWriterOrReadonly])
serializer_class = LinkSerializer
queryset = Link.objects.filter(show=True,)
def partial_update(self, request, *args, **kwargs):
if "url" in request.data:
favicon = Favicon().get_favicon(request.data["url"])
meta_image = LinkImage().get_link_image(request.data["url"])
self.get_object().update(favicon=favicon, meta_image=meta_image)
return super().partial_update(request)
class LinkView(BaseAPIView):
allowed_method = ["DELETE", "POST", "GET"]
filterset_fields = ["hide"]
@swagger_auto_schema(
operation_id=_("Get Link"),
operation_description=_("탐색 페이지에서 보여질 링크들입니다. 유저의 선호카테고리로 필터링 됩니다."),
manual_parameters=[
openapi.Parameter('page', openapi.IN_QUERY, type='integer')],
responses={200: openapi.Response(_("OK"), LinkSerializer, )},
tags=[_("링크"), ],
)
def get(self, request):
paginator = CustomPagination()
paginator.page_size = 50
user = self.request.user
filtered_board = Board.objects.filter(
category__in=user.prefer.through.objects.filter(user_id=user.id).values('category_id')
)
links = Link.objects.filter(board__in=filtered_board, show=True)
result = paginator.paginate_queryset(links, request)
data_count = len(links)
page_count = math.ceil(data_count / 50)
return Response(status=HTTP_200_OK, data={"dataCount": data_count,
"totalPageCount": page_count,
"results": LinkSerializer(result, many=True).data})
@swagger_auto_schema(
operation_id=_("Create Link"),
operation_description=_("링크를 추가합니다."),
request_body=openapi.Schema(type=openapi.TYPE_OBJECT,
properties={
'board': openapi.Schema(type=openapi.TYPE_INTEGER,
description="링크가 속한 보드 아이디를 입력하세요"),
'label': openapi.Schema(type=openapi.TYPE_STRING,
description="링크 라벨입니다"),
'url': openapi.Schema(type=openapi.TYPE_STRING,
description="링크 url을 저장하세요"),
'show': openapi.Schema(type=openapi.TYPE_BOOLEAN,
description="링크의 공개 여부를 나타냅니 ")
}),
responses={200: openapi.Response(_("OK"), MessageSerializer)},
tags=[_("링크"), ],
)
@permission_classes([IsWriterOrReadonly])
def post(self, request):
if request.user == Board.objects.get(id=request.data['board']).user:
serializer = LinkSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
response = requests.get(serializer.validated_data['url'])
favicon = None
meta_image = None
if response.headers['Content-Type'] == 'application/pdf':
pass
else:
favicon = Favicon().get_favicon(serializer.validated_data['url'])
meta_image = LinkImage().get_link_image(serializer.validated_data['url'])
Link.objects.create(
board=serializer.validated_data['board'],
label=serializer.validated_data['label'],
url=serializer.validated_data['url'],
show=serializer.validated_data['show'],
favicon=favicon,
meta_image=meta_image
)
return Response(
status=HTTP_200_OK,
data=MessageSerializer({"message": _("링크를 저장했습니다.")}).data,
)
else:
print("유저 다름")
return Response(data=MessageSerializer({"message": _("접근 권한이 없습니다.")}).data)
@swagger_auto_schema(
operation_id=_("Delete My Link"),
operation_description=_("링크를 삭제합니다."),
request_body=LinkDestroySerializer,
responses={200: openapi.Response(_("OK"), MessageSerializer)},
tags=[_("링크"), ]
)
@permission_classes([LinkDeletePermission])
def delete(self, request):
serializer = LinkDestroySerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
query = Link.objects.filter(
board_id__in=Board.objects.filter(user=request.user).values('id'),
id__in=serializer.validated_data["links"],
)
if not query:
return Response(status=HTTP_400_BAD_REQUEST,
data=MessageSerializer({"message": _("링크 삭제의 권한이 없거나 존재하지 않는 링크입니다.")}).data)
else:
query.delete()
return Response(status=HTTP_200_OK, data=MessageSerializer({"message": _("링크를 삭제했습니다.")}).data)
link_view = LinkView.as_view()
class LinkSearchView(BaseAPIView):
allowed_method = ["POST"]
filterset_fields = ["my"]
@swagger_auto_schema(
operation_id=_("Search Link"),
operation_description=_("링크를 검색합니다."),
manual_parameters=[
openapi.Parameter('my', openapi.IN_QUERY, type='bool')],
request_body=LinkSearchSerializer,
responses={200: openapi.Response(_("OK"), LinkSearchSerializer)},
tags=[_("링크"), ]
)
def post(self, request):
user = request.user
serializer = LinkSearchSerializer(data=request.data)
if serializer.is_valid(raise_exception=True):
my = request.query_params.get('my', None)
# 저장페이지에서 링크 검색
if my:
scrapped_board = self.request.user.scrap.all()
my_board = Board.objects.filter(user_id=user.id)
boards = scrapped_board | my_board
links = Link.objects.filter(board__in=boards, label__contains=serializer.validated_data['text'])
# 탐색페이지에서 링크 검색
else:
text = serializer.validated_data['text']
if "".__eq__(text):
filtered_board = Board.objects.filter(
category__in=user.prefer.through.objects.values('category_id')
)
links = Link.objects.filter(board__in=filtered_board, show=True)
else:
links = Link.objects.filter(
label__contains=serializer.validated_data['text'],
show=True
)
return Response(status=HTTP_200_OK, data=LinkSerializer(links, many=True).data)
link_search_view = LinkSearchView.as_view()
| [
"rest_framework.decorators.permission_classes",
"django.utils.translation.ugettext_lazy",
"math.ceil",
"poolink_backend.apps.link.api.serializers.LinkSearchSerializer",
"poolink_backend.apps.pagination.CustomPagination",
"poolink_backend.apps.link.api.serializers.LinkSerializer",
"poolink_backend.apps.link.models.Link.objects.create",
"poolink_backend.apps.link.api.serializers.LinkDestroySerializer",
"requests.get",
"poolink_backend.apps.link.opengraph.LinkImage",
"poolink_backend.apps.link.models.Board.objects.get",
"poolink_backend.apps.link.grabicon.Favicon",
"drf_yasg.openapi.Schema",
"poolink_backend.apps.link.models.Board.objects.filter",
"drf_yasg.openapi.Parameter",
"poolink_backend.apps.link.models.Link.objects.filter"
] | [((1111, 1141), 'poolink_backend.apps.link.models.Link.objects.filter', 'Link.objects.filter', ([], {'show': '(True)'}), '(show=True)\n', (1130, 1141), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((3820, 3860), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[IsWriterOrReadonly]'], {}), '([IsWriterOrReadonly])\n', (3838, 3860), False, 'from rest_framework.decorators import permission_classes\n'), ((5470, 5512), 'rest_framework.decorators.permission_classes', 'permission_classes', (['[LinkDeletePermission]'], {}), '([LinkDeletePermission])\n', (5488, 5512), False, 'from rest_framework.decorators import permission_classes\n'), ((1999, 2017), 'poolink_backend.apps.pagination.CustomPagination', 'CustomPagination', ([], {}), '()\n', (2015, 2017), False, 'from poolink_backend.apps.pagination import CustomPagination\n'), ((2256, 2312), 'poolink_backend.apps.link.models.Link.objects.filter', 'Link.objects.filter', ([], {'board__in': 'filtered_board', 'show': '(True)'}), '(board__in=filtered_board, show=True)\n', (2275, 2312), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((2427, 2453), 'math.ceil', 'math.ceil', (['(data_count / 50)'], {}), '(data_count / 50)\n', (2436, 2453), False, 'import math\n'), ((5565, 5605), 'poolink_backend.apps.link.api.serializers.LinkDestroySerializer', 'LinkDestroySerializer', ([], {'data': 'request.data'}), '(data=request.data)\n', (5586, 5605), False, 'from poolink_backend.apps.link.api.serializers import LinkDestroySerializer, LinkSearchSerializer, LinkSerializer\n'), ((6781, 6820), 'poolink_backend.apps.link.api.serializers.LinkSearchSerializer', 'LinkSearchSerializer', ([], {'data': 'request.data'}), '(data=request.data)\n', (6801, 6820), False, 'from poolink_backend.apps.link.api.serializers import LinkDestroySerializer, LinkSearchSerializer, LinkSerializer\n'), ((1654, 1667), 'django.utils.translation.ugettext_lazy', '_', (['"""Get Link"""'], {}), "('Get Link')\n", (1655, 1667), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1699, 1745), 'django.utils.translation.ugettext_lazy', '_', (['"""탐색 페이지에서 보여질 링크들입니다. 유저의 선호카테고리로 필터링 됩니다."""'], {}), "('탐색 페이지에서 보여질 링크들입니다. 유저의 선호카테고리로 필터링 됩니다.')\n", (1700, 1745), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3992, 4025), 'poolink_backend.apps.link.api.serializers.LinkSerializer', 'LinkSerializer', ([], {'data': 'request.data'}), '(data=request.data)\n', (4006, 4025), False, 'from poolink_backend.apps.link.api.serializers import LinkDestroySerializer, LinkSearchSerializer, LinkSerializer\n'), ((2760, 2776), 'django.utils.translation.ugettext_lazy', '_', (['"""Create Link"""'], {}), "('Create Link')\n", (2761, 2776), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2808, 2823), 'django.utils.translation.ugettext_lazy', '_', (['"""링크를 추가합니다."""'], {}), "('링크를 추가합니다.')\n", (2809, 2823), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5251, 5270), 'django.utils.translation.ugettext_lazy', '_', (['"""Delete My Link"""'], {}), "('Delete My Link')\n", (5252, 5270), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5302, 5317), 'django.utils.translation.ugettext_lazy', '_', (['"""링크를 삭제합니다."""'], {}), "('링크를 삭제합니다.')\n", (5303, 5317), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6393, 6409), 'django.utils.translation.ugettext_lazy', '_', (['"""Search Link"""'], {}), "('Search Link')\n", (6394, 6409), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6441, 6456), 'django.utils.translation.ugettext_lazy', '_', (['"""링크를 검색합니다."""'], {}), "('링크를 검색합니다.')\n", (6442, 6456), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1787, 1846), 'drf_yasg.openapi.Parameter', 'openapi.Parameter', (['"""page"""', 'openapi.IN_QUERY'], {'type': '"""integer"""'}), "('page', openapi.IN_QUERY, type='integer')\n", (1804, 1846), False, 'from drf_yasg import openapi\n'), ((1933, 1940), 'django.utils.translation.ugettext_lazy', '_', (['"""링크"""'], {}), "('링크')\n", (1934, 1940), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3917, 3960), 'poolink_backend.apps.link.models.Board.objects.get', 'Board.objects.get', ([], {'id': "request.data['board']"}), "(id=request.data['board'])\n", (3934, 3960), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((4111, 4157), 'requests.get', 'requests.get', (["serializer.validated_data['url']"], {}), "(serializer.validated_data['url'])\n", (4123, 4157), False, 'import requests\n'), ((4542, 4778), 'poolink_backend.apps.link.models.Link.objects.create', 'Link.objects.create', ([], {'board': "serializer.validated_data['board']", 'label': "serializer.validated_data['label']", 'url': "serializer.validated_data['url']", 'show': "serializer.validated_data['show']", 'favicon': 'favicon', 'meta_image': 'meta_image'}), "(board=serializer.validated_data['board'], label=\n serializer.validated_data['label'], url=serializer.validated_data['url'\n ], show=serializer.validated_data['show'], favicon=favicon, meta_image=\n meta_image)\n", (4561, 4778), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((3797, 3804), 'django.utils.translation.ugettext_lazy', '_', (['"""링크"""'], {}), "('링크')\n", (3798, 3804), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5448, 5455), 'django.utils.translation.ugettext_lazy', '_', (['"""링크"""'], {}), "('링크')\n", (5449, 5455), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7067, 7104), 'poolink_backend.apps.link.models.Board.objects.filter', 'Board.objects.filter', ([], {'user_id': 'user.id'}), '(user_id=user.id)\n', (7087, 7104), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((7180, 7273), 'poolink_backend.apps.link.models.Link.objects.filter', 'Link.objects.filter', ([], {'board__in': 'boards', 'label__contains': "serializer.validated_data['text']"}), "(board__in=boards, label__contains=serializer.\n validated_data['text'])\n", (7199, 7273), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((6498, 6552), 'drf_yasg.openapi.Parameter', 'openapi.Parameter', (['"""my"""', 'openapi.IN_QUERY'], {'type': '"""bool"""'}), "('my', openapi.IN_QUERY, type='bool')\n", (6515, 6552), False, 'from drf_yasg import openapi\n'), ((6686, 6693), 'django.utils.translation.ugettext_lazy', '_', (['"""링크"""'], {}), "('링크')\n", (6687, 6693), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1256, 1265), 'poolink_backend.apps.link.grabicon.Favicon', 'Favicon', ([], {}), '()\n', (1263, 1265), False, 'from poolink_backend.apps.link.grabicon import Favicon\n'), ((1324, 1335), 'poolink_backend.apps.link.opengraph.LinkImage', 'LinkImage', ([], {}), '()\n', (1333, 1335), False, 'from poolink_backend.apps.link.opengraph import LinkImage\n'), ((1890, 1897), 'django.utils.translation.ugettext_lazy', '_', (['"""OK"""'], {}), "('OK')\n", (1891, 1897), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3753, 3760), 'django.utils.translation.ugettext_lazy', '_', (['"""OK"""'], {}), "('OK')\n", (3754, 3760), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5404, 5411), 'django.utils.translation.ugettext_lazy', '_', (['"""OK"""'], {}), "('OK')\n", (5405, 5411), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7604, 7660), 'poolink_backend.apps.link.models.Link.objects.filter', 'Link.objects.filter', ([], {'board__in': 'filtered_board', 'show': '(True)'}), '(board__in=filtered_board, show=True)\n', (7623, 7660), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((7712, 7798), 'poolink_backend.apps.link.models.Link.objects.filter', 'Link.objects.filter', ([], {'label__contains': "serializer.validated_data['text']", 'show': '(True)'}), "(label__contains=serializer.validated_data['text'], show\n =True)\n", (7731, 7798), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((7913, 7945), 'poolink_backend.apps.link.api.serializers.LinkSerializer', 'LinkSerializer', (['links'], {'many': '(True)'}), '(links, many=True)\n', (7927, 7945), False, 'from poolink_backend.apps.link.api.serializers import LinkDestroySerializer, LinkSearchSerializer, LinkSerializer\n'), ((6639, 6646), 'django.utils.translation.ugettext_lazy', '_', (['"""OK"""'], {}), "('OK')\n", (6640, 6646), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2671, 2704), 'poolink_backend.apps.link.api.serializers.LinkSerializer', 'LinkSerializer', (['result'], {'many': '(True)'}), '(result, many=True)\n', (2685, 2704), False, 'from poolink_backend.apps.link.api.serializers import LinkDestroySerializer, LinkSearchSerializer, LinkSerializer\n'), ((2985, 3062), 'drf_yasg.openapi.Schema', 'openapi.Schema', ([], {'type': 'openapi.TYPE_INTEGER', 'description': '"""링크가 속한 보드 아이디를 입력하세요"""'}), "(type=openapi.TYPE_INTEGER, description='링크가 속한 보드 아이디를 입력하세요')\n", (2999, 3062), False, 'from drf_yasg import openapi\n'), ((3177, 3241), 'drf_yasg.openapi.Schema', 'openapi.Schema', ([], {'type': 'openapi.TYPE_STRING', 'description': '"""링크 라벨입니다"""'}), "(type=openapi.TYPE_STRING, description='링크 라벨입니다')\n", (3191, 3241), False, 'from drf_yasg import openapi\n'), ((3354, 3423), 'drf_yasg.openapi.Schema', 'openapi.Schema', ([], {'type': 'openapi.TYPE_STRING', 'description': '"""링크 url을 저장하세요"""'}), "(type=openapi.TYPE_STRING, description='링크 url을 저장하세요')\n", (3368, 3423), False, 'from drf_yasg import openapi\n'), ((3535, 3608), 'drf_yasg.openapi.Schema', 'openapi.Schema', ([], {'type': 'openapi.TYPE_BOOLEAN', 'description': '"""링크의 공개 여부를 나타냅니 """'}), "(type=openapi.TYPE_BOOLEAN, description='링크의 공개 여부를 나타냅니 ')\n", (3549, 3608), False, 'from drf_yasg import openapi\n'), ((4375, 4384), 'poolink_backend.apps.link.grabicon.Favicon', 'Favicon', ([], {}), '()\n', (4382, 4384), False, 'from poolink_backend.apps.link.grabicon import Favicon\n'), ((4464, 4475), 'poolink_backend.apps.link.opengraph.LinkImage', 'LinkImage', ([], {}), '()\n', (4473, 4475), False, 'from poolink_backend.apps.link.opengraph import LinkImage\n'), ((5730, 5769), 'poolink_backend.apps.link.models.Board.objects.filter', 'Board.objects.filter', ([], {'user': 'request.user'}), '(user=request.user)\n', (5750, 5769), False, 'from poolink_backend.apps.link.models import Board, Link\n'), ((5177, 5194), 'django.utils.translation.ugettext_lazy', '_', (['"""접근 권한이 없습니다."""'], {}), "('접근 권한이 없습니다.')\n", (5178, 5194), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((5030, 5046), 'django.utils.translation.ugettext_lazy', '_', (['"""링크를 저장했습니다."""'], {}), "('링크를 저장했습니다.')\n", (5031, 5046), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6011, 6045), 'django.utils.translation.ugettext_lazy', '_', (['"""링크 삭제의 권한이 없거나 존재하지 않는 링크입니다."""'], {}), "('링크 삭제의 권한이 없거나 존재하지 않는 링크입니다.')\n", (6012, 6045), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6190, 6206), 'django.utils.translation.ugettext_lazy', '_', (['"""링크를 삭제했습니다."""'], {}), "('링크를 삭제했습니다.')\n", (6191, 6206), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
import ngs_tools as ngs
logger = ngs.logging.Logger(__name__)
ngs.logging.set_logger(logger)
| [
"ngs_tools.logging.Logger",
"ngs_tools.logging.set_logger"
] | [((34, 62), 'ngs_tools.logging.Logger', 'ngs.logging.Logger', (['__name__'], {}), '(__name__)\n', (52, 62), True, 'import ngs_tools as ngs\n'), ((63, 93), 'ngs_tools.logging.set_logger', 'ngs.logging.set_logger', (['logger'], {}), '(logger)\n', (85, 93), True, 'import ngs_tools as ngs\n')] |
"""
Setup for NFSyndication
See https://packaging.python.org/guides/distributing-packages-using-setuptools/ on how to package and share Python projects to PyPa
"""
from os.path import dirname, abspath, join, exists
from setuptools import setup, find_packages
from NFSyndication import __version__ as pkgversion
with open("README.rst", "r") as fh:
long_description = fh.read()
install_reqs = [req for req in open(abspath(join(dirname(__file__), 'requirements.txt')))]
setup(
name = "NFSyndication",
version = pkgversion,
packages=find_packages(exclude=['test', 'test.*']),
entry_points = {
'console_scripts': [
'nfsyndication-src = NFSyndication.__main__:run',
]
},
package_data = {'NFSyndication': ['templates/_layout.html', 'templates/**/*.*']},
description= "News Feed Syndication - A package that read and fetch RSS feeds from the publications.",
long_description=long_description,
long_description_content_type="text/x-rst",
author = "Web SRC",
author_email = "<EMAIL>",
install_requires=install_reqs,
extras_require = {
'test': ['multipart', 'flask', 'pre-commit', 'pytest', 'pytest-cov', 'pigments', 'requests-toolbelt', 'responses>=0.11.0', 'tornado', 'twine']
},
license = "BSD-2-Clause",
keywords = "rss, news",
project_urls={
'Source': 'https://github.com/web-sys1/NFSyndication/'
},
classifiers=[
'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'
]
)
| [
"os.path.dirname",
"setuptools.find_packages"
] | [((552, 593), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['test', 'test.*']"}), "(exclude=['test', 'test.*'])\n", (565, 593), False, 'from setuptools import setup, find_packages\n'), ((435, 452), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (442, 452), False, 'from os.path import dirname, abspath, join, exists\n')] |
import numpy as np
from pandas import Series, DataFrame
import pandas as pd
from numpy.random import randn
# Now we'll learn about Index Hierarchy
# pandas allows you to have multiple index levels, which is very clear
# with this example:
ser = Series(np.random.randn(6), index=[
[1, 1, 1, 2, 2, 2], ['a', 'b', 'c', 'a', 'b', 'c']])
# We can check the multiple levels
ser.index
# Now we can sleect specific subsets
ser[1]
# We can also select from an internal index level
ser[:, 'a']
# We can also create Data Frames from Series with multiple levels
dframe = ser.unstack()
# Can also reverse
dframe.unstack()
# We can also apply multiple level indexing to DataFrames
dframe2 = DataFrame(np.arange(16).reshape(4, 4),
index=[['a', 'a', 'b', 'b'], [1, 2, 1, 2]],
columns=[['NY', 'NY', 'LA', 'SF'],
['cold', 'hot', 'hot', 'cold']])
# We can also give these index levels names
# Name the index levels
dframe2.index.names = ['INDEX_1', 'INDEX_2']
# Name the column levels
dframe2.columns.names = ['Cities', 'Temp']
# We can also interchange level orders (note the axis=1 for columns)
dframe2.swaplevel('Cities', 'Temp', axis=1)
# We can also sort levels
dframe2.sortlevel(1)
# Note the change in sorting, now the Dframe index is sorted by the INDEX_2
# We can also perform operations on particular levels
dframe2.sum(level='Temp', axis=1)
| [
"numpy.random.randn",
"numpy.arange"
] | [((256, 274), 'numpy.random.randn', 'np.random.randn', (['(6)'], {}), '(6)\n', (271, 274), True, 'import numpy as np\n'), ((712, 725), 'numpy.arange', 'np.arange', (['(16)'], {}), '(16)\n', (721, 725), True, 'import numpy as np\n')] |
# Copyright (c) 2016 Civic Knowledge. This file is licensed under the terms of the
# MIT License, included in this distribution as LICENSE.txt
"""
Tables and columns
"""
from itertools import zip_longest
from rowgenerators.exceptions import ConfigurationError
from rowgenerators.exceptions import SchemaError
from rowgenerators.table import Column as RGColumn
from rowgenerators.table import Table as RGTable
from rowgenerators.valuetype import resolve_value_type
class Table(RGTable):
def add_column(self, name, datatype=None, valuetype=None, transform=None, width=None):
c = Column(name, datatype, width, valuetype, transform)
self.columns.append(c)
return c
@property
def stage_transforms(self):
"""Expanded transforms, organized as stages. Each entry is a list of one stage of transforms for all columns.
Each stage has an entry for every colum, even when no transform is specified for that column, either
setting the datatype for the first stage, or a passthrough function for any others."""
stages = list(zip_longest(*[c.expanded_transform for c in self]))
new_stages = []
columns = list(self)
for i, stage in enumerate(stages):
new_stage = []
for j, tr in enumerate(stage):
if tr:
new_stage.append(tr)
elif i == 0:
new_stage.append(TransformSegment(datatype=columns[j].valuetype or columns[j].datatype,
column=columns[j]))
else:
new_stage.append(TransformSegment(transforms=['v'], column=columns[j]))
new_stages.append(new_stage)
return new_stages
def __str__(self):
from tabulate import tabulate
headers = 'name datatype valuetype transform width'.split()
rows = [(c.name, c.datatype.__name__, c.valuetype.__name__, c.transform, c.width) for c in self.columns]
return ('Table: {}\n'.format(self.name)) + tabulate(rows, headers)
class TransformSegment(object):
def __init__(self, init=None, datatype=None, transforms=None, exception=None, column=None):
self.init = init
self.transforms = transforms or []
self.datatype = datatype
self.exception = exception
self.column = column
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, key, value):
return setattr(self, key, value)
def __iter__(self):
"""Iterate the sequence of parts, including the init, datatype and transforms"""
if self.init:
yield self.init
if self.datatype:
yield self.datatype
yield from self.transforms
def __repr__(self):
fields = []
for f in 'init datatype transforms exception'.split():
if self[f]:
if f == 'datatype':
fields.append('{}={}'.format(f, self[f].__name__))
else:
fields.append('{}={}'.format(f, self[f]))
return "<Transform {} {} >".format(self.column.name, ' '.join(fields))
def str(self, stage_n):
return '|'.join(e.__name__ if isinstance(e, type) else e for e in list(self))
class Column(RGColumn):
def __init__(self, name, datatype=None, width=None, valuetype=None, transform=None):
if valuetype is not None and datatype is None:
self.valuetype = resolve_value_type(valuetype)
elif datatype is not None:
self.valuetype = resolve_value_type(datatype)
else:
self.valuetype = None
if self.valuetype is None:
raise SchemaError("Could not resovle type for for column '{}' datatype='{}' and valuetype='{}' "
.format(name, datatype, valuetype))
self.transform = transform
if width is not None and width != '':
width = int(width)
super().__init__(name, self.valuetype.python_type(), width)
def __repr__(self):
return "<Column {name} dt={datatype} vt={valuetype} {transform}>" \
.format(name=self.name, datatype=self.datatype.__name__, valuetype=self.valuetype.__name__,
transform=self.transform)
@property
def dict(self):
return dict(
name=self.name,
datatype=self.datatype,
valuetype=self.valuetype,
transform=self.transform
)
@property
def expanded_transform(self):
"""Expands the transform string into segments """
transform = self.transform.rstrip('|') if self.transform else ''
segments = [TransformSegment(column=self)]
for i, seg_str in enumerate(transform.split(';')): # ';' seperates pipe stages
pipes = seg_str.split('|') # seperate pipes in each stage.
d = TransformSegment(column=self)
for pipe in pipes:
if not pipe.strip():
continue
if pipe[0] == '^': # First, the initializer
if segments[0].init:
raise ConfigurationError('Can only have one initializer in a pipeline segment')
if i != 0:
raise ConfigurationError('Can only have an initializer in the first pipeline segment')
segments[0].init = pipe[1:] # initializers only go on the first segment
elif pipe[0] == '!': # Exception Handler
d.exception = pipe[1:]
else: # Assume before the datatype
d['transforms'].append(pipe.strip())
if d['transforms'] or d.exception:
segments.append(d)
if not segments[0].init:
segments[0].init = self.valuetype or self.datatype
return segments
| [
"tabulate.tabulate",
"rowgenerators.exceptions.ConfigurationError",
"itertools.zip_longest",
"rowgenerators.valuetype.resolve_value_type"
] | [((1088, 1138), 'itertools.zip_longest', 'zip_longest', (['*[c.expanded_transform for c in self]'], {}), '(*[c.expanded_transform for c in self])\n', (1099, 1138), False, 'from itertools import zip_longest\n'), ((2062, 2085), 'tabulate.tabulate', 'tabulate', (['rows', 'headers'], {}), '(rows, headers)\n', (2070, 2085), False, 'from tabulate import tabulate\n'), ((3512, 3541), 'rowgenerators.valuetype.resolve_value_type', 'resolve_value_type', (['valuetype'], {}), '(valuetype)\n', (3530, 3541), False, 'from rowgenerators.valuetype import resolve_value_type\n'), ((3607, 3635), 'rowgenerators.valuetype.resolve_value_type', 'resolve_value_type', (['datatype'], {}), '(datatype)\n', (3625, 3635), False, 'from rowgenerators.valuetype import resolve_value_type\n'), ((5208, 5281), 'rowgenerators.exceptions.ConfigurationError', 'ConfigurationError', (['"""Can only have one initializer in a pipeline segment"""'], {}), "('Can only have one initializer in a pipeline segment')\n", (5226, 5281), False, 'from rowgenerators.exceptions import ConfigurationError\n'), ((5343, 5428), 'rowgenerators.exceptions.ConfigurationError', 'ConfigurationError', (['"""Can only have an initializer in the first pipeline segment"""'], {}), "('Can only have an initializer in the first pipeline segment'\n )\n", (5361, 5428), False, 'from rowgenerators.exceptions import ConfigurationError\n')] |
# -*- coding: utf-8 -*-
from sqlalchemy import Column
from sqlalchemy.dialects.mysql import TINYINT, BIGINT, VARCHAR, TEXT, DECIMAL
from application.model.base_model import Base, BaseModelMixin
class ServiceTemplate(Base, BaseModelMixin):
__tablename__ = 'service_template'
__comment__ = '学术服务'
type = Column(TINYINT, nullable=False, comment='0 - 包月套餐,1 - 流量套餐')
title = Column(VARCHAR(64), nullable=False, comment='套餐名')
subtitle = Column(VARCHAR(64), nullable=False, comment='副标题')
description = Column(TEXT, nullable=False, comment='套餐描述')
package = Column(BIGINT, nullable=False, comment='总流量')
price = Column(DECIMAL(12, 2), nullable=False, comment='价格')
initialization_fee = Column(DECIMAL(12, 2), nullable=False, comment='初装费')
__required_columns_for_creation__ = ['type', 'title', 'subtitle', 'description', 'package', 'price',
'initialization_fee']
__allow_columns_for_creation__ = ['status']
class STATUS(object):
# 初始化
INITIALIZATION = 0
# 有效
VALID = 1
# 已删除
DELETED = 2
# 下架
SUSPEND = 3
class TYPE(object):
# 包月
MONTHLY = 0
# 流量
DATA = 1
# 推荐
RECOMMENDATION = 2
# def __init__(self, *args, **kwargs):
# super().__init__(*args, **kwargs)
def __init__(self, service_type: int = None, title: str = None, subtitle: str = None, description: str = None,
package: int = None, price=None, initialization_fee=None, status=1, *args, **kwargs):
super().__init__(*args, **kwargs)
self.type = service_type
self.title = title
self.subtitle = subtitle
self.description = description
self.package = package
self.price = price
self.initialization_fee = initialization_fee
self.status = status
cacheable = ServiceTemplate
| [
"sqlalchemy.dialects.mysql.VARCHAR",
"sqlalchemy.dialects.mysql.DECIMAL",
"sqlalchemy.Column"
] | [((318, 378), 'sqlalchemy.Column', 'Column', (['TINYINT'], {'nullable': '(False)', 'comment': '"""0 - 包月套餐,1 - 流量套餐"""'}), "(TINYINT, nullable=False, comment='0 - 包月套餐,1 - 流量套餐')\n", (324, 378), False, 'from sqlalchemy import Column\n'), ((526, 570), 'sqlalchemy.Column', 'Column', (['TEXT'], {'nullable': '(False)', 'comment': '"""套餐描述"""'}), "(TEXT, nullable=False, comment='套餐描述')\n", (532, 570), False, 'from sqlalchemy import Column\n'), ((585, 630), 'sqlalchemy.Column', 'Column', (['BIGINT'], {'nullable': '(False)', 'comment': '"""总流量"""'}), "(BIGINT, nullable=False, comment='总流量')\n", (591, 630), False, 'from sqlalchemy import Column\n'), ((398, 409), 'sqlalchemy.dialects.mysql.VARCHAR', 'VARCHAR', (['(64)'], {}), '(64)\n', (405, 409), False, 'from sqlalchemy.dialects.mysql import TINYINT, BIGINT, VARCHAR, TEXT, DECIMAL\n'), ((464, 475), 'sqlalchemy.dialects.mysql.VARCHAR', 'VARCHAR', (['(64)'], {}), '(64)\n', (471, 475), False, 'from sqlalchemy.dialects.mysql import TINYINT, BIGINT, VARCHAR, TEXT, DECIMAL\n'), ((650, 664), 'sqlalchemy.dialects.mysql.DECIMAL', 'DECIMAL', (['(12)', '(2)'], {}), '(12, 2)\n', (657, 664), False, 'from sqlalchemy.dialects.mysql import TINYINT, BIGINT, VARCHAR, TEXT, DECIMAL\n'), ((728, 742), 'sqlalchemy.dialects.mysql.DECIMAL', 'DECIMAL', (['(12)', '(2)'], {}), '(12, 2)\n', (735, 742), False, 'from sqlalchemy.dialects.mysql import TINYINT, BIGINT, VARCHAR, TEXT, DECIMAL\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""VM-related helper functions/classes."""
from __future__ import print_function
import distutils.version # pylint: disable=import-error,no-name-in-module
import errno
import fcntl
import glob
import multiprocessing
import os
import re
import shutil
import socket
import sys
import time
from chromite.cli.cros import cros_chrome_sdk
from chromite.lib import constants
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import device
from chromite.lib import image_lib
from chromite.lib import osutils
from chromite.lib import path_util
from chromite.lib import remote_access
from chromite.lib import retry_util
from chromite.utils import memoize
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
class VMError(device.DeviceError):
"""Exception for VM errors."""
def VMIsUpdatable(path):
"""Check if the existing VM image is updatable.
Args:
path: Path to the VM image.
Returns:
True if VM is updatable; False otherwise.
"""
table = {p.name: p for p in image_lib.GetImageDiskPartitionInfo(path)}
# Assume if size of the two root partitions match, the image
# is updatable.
return table['ROOT-B'].size == table['ROOT-A'].size
def CreateVMImage(image=None, board=None, updatable=True, dest_dir=None):
"""Returns the path of the image built to run in a VM.
By default, the returned VM is a test image that can run full update
testing on it. If there exists a VM image with the matching
|updatable| setting, this method returns the path to the existing
image. If |dest_dir| is set, it will copy/create the VM image to the
|dest_dir|.
Args:
image: Path to the (non-VM) image. Defaults to None to use the latest
image for the board.
board: Board that the image was built with. If None, attempts to use the
configured default board.
updatable: Create a VM image that supports AU.
dest_dir: If set, create/copy the VM image to |dest|; otherwise,
use the folder where |image| resides.
"""
if not image and not board:
raise VMError('Cannot create VM when both image and board are None.')
image_dir = os.path.dirname(image)
src_path = dest_path = os.path.join(image_dir, constants.VM_IMAGE_BIN)
if dest_dir:
dest_path = os.path.join(dest_dir, constants.VM_IMAGE_BIN)
exists = False
# Do not create a new VM image if a matching image already exists.
exists = os.path.exists(src_path) and (
not updatable or VMIsUpdatable(src_path))
if exists and dest_dir:
# Copy the existing VM image to dest_dir.
shutil.copyfile(src_path, dest_path)
if not exists:
# No existing VM image that we can reuse. Create a new VM image.
logging.info('Creating %s', dest_path)
cmd = [os.path.join(constants.CROSUTILS_DIR, 'image_to_vm.sh'),
'--test_image']
if image:
cmd.append('--from=%s' % path_util.ToChrootPath(image_dir))
if updatable:
cmd.extend(['--disk_layout', '2gb-rootfs-updatable'])
if board:
cmd.extend(['--board', board])
# image_to_vm.sh only runs in chroot, but dest_dir may not be
# reachable from chroot. In that case, we copy it to a temporary
# directory in chroot, and then move it to dest_dir .
tempdir = None
if dest_dir:
# Create a temporary directory in chroot to store the VM
# image. This is to avoid the case where dest_dir is not
# reachable within chroot.
tempdir = cros_build_lib.run(
['mktemp', '-d'],
capture_output=True,
enter_chroot=True).output.strip()
cmd.append('--to=%s' % tempdir)
msg = 'Failed to create the VM image'
try:
cros_build_lib.run(cmd, enter_chroot=True, cwd=constants.SOURCE_ROOT)
except cros_build_lib.RunCommandError as e:
logging.error('%s: %s', msg, e)
if tempdir:
osutils.RmDir(
path_util.FromChrootPath(tempdir), ignore_missing=True)
raise VMError(msg)
if dest_dir:
# Move VM from tempdir to dest_dir.
shutil.move(
path_util.FromChrootPath(
os.path.join(tempdir, constants.VM_IMAGE_BIN)), dest_path)
osutils.RmDir(path_util.FromChrootPath(tempdir), ignore_missing=True)
if not os.path.exists(dest_path):
raise VMError(msg)
return dest_path
class VM(device.Device):
"""Class for managing a VM."""
SSH_PORT = 9222
SSH_NON_KVM_CONNECT_TIMEOUT = 120
IMAGE_FORMAT = 'raw'
# kvm_* should match kvm_intel, kvm_amd, etc.
NESTED_KVM_GLOB = '/sys/module/kvm_*/parameters/nested'
def __init__(self, opts):
"""Initialize VM.
Args:
opts: command line options.
"""
super(VM, self).__init__(opts)
self.qemu_path = opts.qemu_path
self.qemu_img_path = opts.qemu_img_path
self.qemu_bios_path = opts.qemu_bios_path
self.qemu_m = opts.qemu_m
self.qemu_cpu = opts.qemu_cpu
self.qemu_smp = opts.qemu_smp
if self.qemu_smp == 0:
self.qemu_smp = min(8, multiprocessing.cpu_count())
self.qemu_hostfwd = opts.qemu_hostfwd
self.qemu_args = opts.qemu_args
if opts.enable_kvm is None:
self.enable_kvm = os.path.exists('/dev/kvm')
else:
self.enable_kvm = opts.enable_kvm
self.copy_on_write = opts.copy_on_write
# We don't need sudo access for software emulation or if /dev/kvm is
# writeable.
self.use_sudo = self.enable_kvm and not os.access('/dev/kvm', os.W_OK)
self.display = opts.display
self.image_path = opts.image_path
self.image_format = opts.image_format
self.device = remote_access.LOCALHOST
self.ssh_port = self.ssh_port or opts.ssh_port or VM.SSH_PORT
self.start = opts.start
self.stop = opts.stop
self.chroot_path = opts.chroot_path
self.cache_dir = os.path.abspath(opts.cache_dir)
assert os.path.isdir(self.cache_dir), "Cache directory doesn't exist"
self.vm_dir = opts.vm_dir
if not self.vm_dir:
self.vm_dir = os.path.join(osutils.GetGlobalTempDir(),
'cros_vm_%d' % self.ssh_port)
self._CreateVMDir()
self.pidfile = os.path.join(self.vm_dir, 'kvm.pid')
self.kvm_monitor = os.path.join(self.vm_dir, 'kvm.monitor')
self.kvm_pipe_in = '%s.in' % self.kvm_monitor # to KVM
self.kvm_pipe_out = '%s.out' % self.kvm_monitor # from KVM
self.kvm_serial = '%s.serial' % self.kvm_monitor
self.copy_image_on_shutdown = False
self.image_copy_dir = None
# Wait 2 min for non-KVM.
connect_timeout = (VM.SSH_CONNECT_TIMEOUT if self.enable_kvm else
VM.SSH_NON_KVM_CONNECT_TIMEOUT)
self.InitRemote(connect_timeout=connect_timeout)
def _CreateVMDir(self):
"""Safely create vm_dir."""
if not osutils.SafeMakedirs(self.vm_dir):
# For security, ensure that vm_dir is not a symlink, and is owned by us.
error_str = ('VM state dir is misconfigured; please recreate: %s'
% self.vm_dir)
assert os.path.isdir(self.vm_dir), error_str
assert not os.path.islink(self.vm_dir), error_str
assert os.stat(self.vm_dir).st_uid == os.getuid(), error_str
def _CreateQcow2Image(self):
"""Creates a qcow2-formatted image in the temporary VM dir.
This image will get removed on VM shutdown.
Returns:
Tuple of (path to qcow2 image, format of qcow2 image)
"""
cow_image_path = os.path.join(self.vm_dir, 'qcow2.img')
qemu_img_args = [
self.qemu_img_path,
'create', '-f', 'qcow2',
'-o', 'backing_file=%s' % self.image_path,
cow_image_path,
]
cros_build_lib.run(qemu_img_args, dryrun=self.dryrun)
logging.info('qcow2 image created at %s.', cow_image_path)
return cow_image_path, 'qcow2'
def _RmVMDir(self):
"""Cleanup vm_dir."""
osutils.RmDir(self.vm_dir, ignore_missing=True, sudo=self.use_sudo)
@memoize.MemoizedSingleCall
def QemuVersion(self):
"""Determine QEMU version.
Returns:
QEMU version.
"""
version_str = cros_build_lib.run([self.qemu_path, '--version'],
capture_output=True, dryrun=self.dryrun,
encoding='utf-8').stdout
# version string looks like one of these:
# QEMU emulator version 2.0.0 (Debian 2.0.0+dfsg-2ubuntu1.36), Copyright (c)
# 2003-2008 <NAME>
#
# QEMU emulator version 2.6.0, Copyright (c) 2003-2008 <NAME>
#
# qemu-x86_64 version 2.10.1
# Copyright (c) 2003-2017 <NAME> and the QEMU Project developers
m = re.search(r'version ([0-9.]+)', version_str)
if not m:
raise VMError('Unable to determine QEMU version from:\n%s.' % version_str)
return m.group(1)
def _CheckQemuMinVersion(self):
"""Ensure minimum QEMU version."""
if self.dryrun:
return
min_qemu_version = '2.6.0'
logging.info('QEMU version %s', self.QemuVersion())
LooseVersion = distutils.version.LooseVersion
if LooseVersion(self.QemuVersion()) < LooseVersion(min_qemu_version):
raise VMError('QEMU %s is the minimum supported version. You have %s.'
% (min_qemu_version, self.QemuVersion()))
def _SetQemuPath(self):
"""Find a suitable Qemu executable."""
qemu_exe = 'qemu-system-x86_64'
qemu_exe_path = os.path.join('usr/bin', qemu_exe)
# Check SDK cache.
if not self.qemu_path:
qemu_dir = cros_chrome_sdk.SDKFetcher.GetCachePath(
cros_chrome_sdk.SDKFetcher.QEMU_BIN_PATH, self.cache_dir, self.board)
if qemu_dir:
qemu_path = os.path.join(qemu_dir, qemu_exe_path)
if os.path.isfile(qemu_path):
self.qemu_path = qemu_path
# Check chroot.
if not self.qemu_path:
qemu_path = os.path.join(self.chroot_path, qemu_exe_path)
if os.path.isfile(qemu_path):
self.qemu_path = qemu_path
# Check system.
if not self.qemu_path:
logging.warning('Using system QEMU.')
self.qemu_path = osutils.Which(qemu_exe)
if not self.qemu_path or not os.path.isfile(self.qemu_path):
raise VMError('QEMU not found.')
if self.copy_on_write:
if not self.qemu_img_path:
# Look for qemu-img right next to qemu-system-x86_64.
self.qemu_img_path = os.path.join(
os.path.dirname(self.qemu_path), 'qemu-img')
if not os.path.isfile(self.qemu_img_path):
raise VMError('qemu-img not found. (Needed to create qcow2 image).')
logging.debug('QEMU path: %s', self.qemu_path)
self._CheckQemuMinVersion()
def _GetBuiltVMImagePath(self):
"""Get path of a locally built VM image."""
vm_image_path = os.path.join(
constants.SOURCE_ROOT, 'src/build/images', self.board,
'latest', constants.VM_IMAGE_BIN)
return vm_image_path if os.path.isfile(vm_image_path) else None
def _GetCacheVMImagePath(self):
"""Get path of a cached VM image."""
cache_path = cros_chrome_sdk.SDKFetcher.GetCachePath(
constants.VM_IMAGE_TAR, self.cache_dir, self.board)
if cache_path:
vm_image = os.path.join(cache_path, constants.VM_IMAGE_BIN)
if os.path.isfile(vm_image):
return vm_image
return None
def _SetVMImagePath(self):
"""Detect VM image path in SDK and chroot."""
if not self.image_path:
self.image_path = (self._GetCacheVMImagePath() or
self._GetBuiltVMImagePath())
if not self.image_path:
raise VMError('No VM image found. Use cros chrome-sdk --download-vm.')
if not os.path.isfile(self.image_path):
# Checks if the image path points to a directory containing the bin file.
image_path = os.path.join(self.image_path, constants.VM_IMAGE_BIN)
if os.path.isfile(image_path):
self.image_path = image_path
else:
raise VMError('VM image does not exist: %s' % self.image_path)
logging.debug('VM image path: %s', self.image_path)
def _SetBoard(self):
"""Sets the board.
Picks the first non-None board from the user-specified board,
SDK environment variable, cros default board.
Raises:
DieSystemExit: If a board cannot be found.
"""
if self.board:
return
sdk_board_env = os.environ.get(cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV)
self.board = cros_build_lib.GetBoard(sdk_board_env, strict=True)
def _WaitForSSHPort(self, sleep=5):
"""Wait for SSH port to become available."""
class _SSHPortInUseError(Exception):
"""Exception for _CheckSSHPortBusy to throw."""
def _CheckSSHPortBusy(ssh_port):
"""Check if the SSH port is in use."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind((remote_access.LOCALHOST_IP, ssh_port))
except socket.error as e:
if e.errno == errno.EADDRINUSE:
logging.info('SSH port %d in use...', self.ssh_port)
raise _SSHPortInUseError()
finally:
sock.close()
try:
retry_util.RetryException(
exception=_SSHPortInUseError,
max_retry=10,
functor=lambda: _CheckSSHPortBusy(self.ssh_port),
sleep=sleep)
except _SSHPortInUseError:
raise VMError('SSH port %d in use' % self.ssh_port)
def Run(self):
"""Performs an action, one of start, stop, or run a command in the VM."""
if not self.start and not self.stop and not self.cmd:
raise VMError('Must specify one of start, stop, or cmd.')
if self.start:
self.Start()
if self.cmd:
if not self.IsRunning():
raise VMError('VM not running.')
self.remote_run(self.cmd, stream_output=True)
if self.stop:
self.Stop()
def _GetQemuArgs(self, image_path, image_format):
"""Returns the args to qemu used to launch the VM.
Args:
image_path: Path to QEMU image.
image_format: Format of the image.
"""
# Append 'check' to warn if the requested CPU is not fully supported.
if 'check' not in self.qemu_cpu.split(','):
self.qemu_cpu += ',check'
# Append 'vmx=on' if the host supports nested virtualization. It can be
# enabled via 'vmx+' or 'vmx=on' (or similarly disabled) so just test for
# the presence of 'vmx'. For more details, see:
# https://www.kernel.org/doc/Documentation/virtual/kvm/nested-vmx.txt
if 'vmx' not in self.qemu_cpu and self.enable_kvm:
for f in glob.glob(self.NESTED_KVM_GLOB):
if cros_build_lib.BooleanShellValue(osutils.ReadFile(f).strip(), False):
self.qemu_cpu += ',vmx=on'
break
qemu_args = [self.qemu_path]
if self.qemu_bios_path:
if not os.path.isdir(self.qemu_bios_path):
raise VMError('Invalid QEMU bios path: %s' % self.qemu_bios_path)
qemu_args += ['-L', self.qemu_bios_path]
qemu_args += [
'-m', self.qemu_m, '-smp', str(self.qemu_smp), '-vga', 'virtio',
'-daemonize',
'-pidfile', self.pidfile,
'-chardev', 'pipe,id=control_pipe,path=%s' % self.kvm_monitor,
'-serial', 'file:%s' % self.kvm_serial,
'-mon', 'chardev=control_pipe',
'-cpu', self.qemu_cpu,
'-usb', '-device', 'usb-tablet',
'-device', 'virtio-net,netdev=eth0',
'-device', 'virtio-scsi-pci,id=scsi',
'-device', 'virtio-rng',
'-device', 'scsi-hd,drive=hd',
'-drive', 'if=none,id=hd,file=%s,cache=unsafe,format=%s'
% (image_path, image_format),
]
# netdev args, including hostfwds.
netdev_args = ('user,id=eth0,net=10.0.2.0/27,hostfwd=tcp:%s:%d-:%d'
% (remote_access.LOCALHOST_IP, self.ssh_port,
remote_access.DEFAULT_SSH_PORT))
if self.qemu_hostfwd:
for hostfwd in self.qemu_hostfwd:
netdev_args += ',hostfwd=%s' % hostfwd
qemu_args += ['-netdev', netdev_args]
if self.qemu_args:
for arg in self.qemu_args:
qemu_args += arg.split()
if self.enable_kvm:
qemu_args += ['-enable-kvm']
if not self.display:
qemu_args += ['-display', 'none']
return qemu_args
def Start(self, retries=1):
"""Start the VM.
Args:
retries: Number of times to retry launching the VM if it fails to boot-up.
"""
if not self.enable_kvm:
logging.warning('KVM is not supported; Chrome VM will be slow')
self._SetBoard()
self._SetQemuPath()
self._SetVMImagePath()
logging.info('Pid file: %s', self.pidfile)
for attempt in range(0, retries + 1):
self.Stop()
logging.debug('Start VM, attempt #%d', attempt)
self._CreateVMDir()
image_path = self.image_path
image_format = self.image_format
if self.copy_on_write:
image_path, image_format = self._CreateQcow2Image()
qemu_args = self._GetQemuArgs(image_path, image_format)
# Make sure we can read these files later on by creating them as
# ourselves.
osutils.Touch(self.kvm_serial)
for pipe in [self.kvm_pipe_in, self.kvm_pipe_out]:
os.mkfifo(pipe, 0o600)
osutils.Touch(self.pidfile)
# Add use_sudo support to cros_build_lib.run.
run = cros_build_lib.sudo_run if self.use_sudo else cros_build_lib.run
run(qemu_args, dryrun=self.dryrun)
try:
self.WaitForBoot()
return
except device.DeviceError:
if attempt == retries:
raise
else:
logging.warning('Error when launching VM. Retrying...')
def _GetVMPid(self):
"""Get the pid of the VM.
Returns:
pid of the VM.
"""
if not os.path.exists(self.vm_dir):
logging.debug('%s not present.', self.vm_dir)
return 0
if not os.path.exists(self.pidfile):
logging.info('%s does not exist.', self.pidfile)
return 0
pid = osutils.ReadFile(self.pidfile).rstrip()
if not pid.isdigit():
# Ignore blank/empty files.
if pid:
logging.error('%s in %s is not a pid.', pid, self.pidfile)
return 0
return int(pid)
def IsRunning(self):
"""Returns True if there's a running VM.
Returns:
True if there's a running VM.
"""
pid = self._GetVMPid()
if not pid:
return False
# Make sure the process actually exists.
return os.path.isdir('/proc/%i' % pid)
def SaveVMImageOnShutdown(self, output_dir):
"""Takes a VM snapshot via savevm and signals to save the VM image later.
Args:
output_dir: A path specifying the directory that the VM image should be
saved to.
"""
logging.debug('Taking VM snapshot')
self.copy_image_on_shutdown = True
self.image_copy_dir = output_dir
if not self.copy_on_write:
logging.warning(
'Attempting to take a VM snapshot without --copy-on-write. Saved '
'VM image may not contain the desired snapshot.')
with open(self.kvm_pipe_in, 'w') as monitor_pipe:
# Saving the snapshot will take an indeterminate amount of time, so also
# send a fake command that the monitor will complain about so we can know
# when the snapshot saving is done.
monitor_pipe.write('savevm chromite_lib_vm_snapshot\n')
monitor_pipe.write('thisisafakecommand\n')
with open(self.kvm_pipe_out) as monitor_pipe:
# Set reads to be non-blocking
fd = monitor_pipe.fileno()
cur_flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, cur_flags | os.O_NONBLOCK)
# 30 second timeout.
success = False
start_time = time.time()
while time.time() - start_time < 30:
try:
line = monitor_pipe.readline()
if 'thisisafakecommand' in line:
logging.debug('Finished attempting to take VM snapshot')
success = True
break
logging.debug('VM monitor output: %s', line)
except IOError:
time.sleep(1)
if not success:
logging.warning('Timed out trying to take VM snapshot')
def _KillVM(self):
"""Kill the VM process."""
pid = self._GetVMPid()
if pid:
# Add use_sudo support to cros_build_lib.run.
run = cros_build_lib.sudo_run if self.use_sudo else cros_build_lib.run
run(['kill', '-9', str(pid)], check=False, dryrun=self.dryrun)
def _MaybeCopyVMImage(self):
"""Saves the VM image to a location on disk if previously told to."""
if not self.copy_image_on_shutdown:
return
if not self.image_copy_dir:
logging.debug('Told to copy VM image, but no output directory set')
return
shutil.copy(self.image_path, os.path.join(
self.image_copy_dir, os.path.basename(self.image_path)))
def Stop(self):
"""Stop the VM."""
logging.debug('Stop VM')
self._KillVM()
self._WaitForSSHPort()
self._MaybeCopyVMImage()
self._RmVMDir()
def _WaitForProcs(self, sleep=2):
"""Wait for expected processes to launch."""
class _TooFewPidsException(Exception):
"""Exception for _GetRunningPids to throw."""
def _GetRunningPids(exe, numpids):
pids = self.remote.GetRunningPids(exe, full_path=False)
logging.info('%s pids: %s', exe, repr(pids))
if len(pids) < numpids:
raise _TooFewPidsException()
def _WaitForProc(exe, numpids):
try:
retry_util.RetryException(
exception=_TooFewPidsException,
max_retry=5,
functor=lambda: _GetRunningPids(exe, numpids),
sleep=sleep)
except _TooFewPidsException:
raise VMError('_WaitForProcs failed: timed out while waiting for '
'%d %s processes to start.' % (numpids, exe))
# We could also wait for session_manager, nacl_helper, etc, but chrome is
# the long pole. We expect the parent, 2 zygotes, gpu-process,
# utility-process, 3 renderers.
_WaitForProc('chrome', 8)
def WaitForBoot(self, max_retry=3, sleep=5):
"""Wait for the VM to boot up.
Wait for ssh connection to become active, and wait for all expected chrome
processes to be launched. Set max_retry to a lower value since we can easily
restart the VM if something is stuck and timing out.
"""
if not os.path.exists(self.vm_dir):
self.Start()
super(VM, self).WaitForBoot(sleep=sleep, max_retry=max_retry)
# Chrome can take a while to start with software emulation.
if not self.enable_kvm:
self._WaitForProcs()
@staticmethod
def GetParser():
"""Parse a list of args.
Args:
argv: list of command line arguments.
Returns:
List of parsed opts.
"""
parser = device.Device.GetParser()
parser.add_argument('--start', action='store_true', default=False,
help='Start the VM.')
parser.add_argument('--stop', action='store_true', default=False,
help='Stop the VM.')
parser.add_argument('--image-path', type='path',
help='Path to VM image to launch with --start.')
parser.add_argument('--image-format', default=VM.IMAGE_FORMAT,
help='Format of the VM image (raw, qcow2, ...).')
parser.add_argument('--qemu-path', type='path',
help='Path of qemu binary to launch with --start.')
parser.add_argument('--qemu-m', type=str, default='8G',
help='Memory argument that will be passed to qemu.')
parser.add_argument('--qemu-smp', type=int, default='0',
help='SMP argument that will be passed to qemu. (0 '
'means auto-detection.)')
# TODO(pwang): replace SandyBridge to Haswell-noTSX once lab machine
# running VMTest all migrate to GCE.
parser.add_argument('--qemu-cpu', type=str,
default='SandyBridge,-invpcid,-tsc-deadline',
help='CPU argument that will be passed to qemu.')
parser.add_argument('--qemu-bios-path', type='path',
help='Path of directory with qemu bios files.')
parser.add_argument('--qemu-hostfwd', action='append',
help='Ports to forward from the VM to the host in the '
'QEMU hostfwd format, eg tcp:127.0.0.1:12345-:54321 to '
'forward port 54321 on the VM to 12345 on the host.')
parser.add_argument('--qemu-args', action='append',
help='Additional args to pass to qemu.')
parser.add_argument('--copy-on-write', action='store_true', default=False,
help='Generates a temporary copy-on-write image backed '
'by the normal boot image. All filesystem changes '
'will instead be reflected in the temporary '
'image.')
parser.add_argument('--qemu-img-path', type='path',
help='Path to qemu-img binary used to create temporary '
'copy-on-write images.')
parser.add_argument('--disable-kvm', dest='enable_kvm',
action='store_false', default=None,
help='Disable KVM, use software emulation.')
parser.add_argument('--no-display', dest='display',
action='store_false', default=True,
help='Do not display video output.')
parser.add_argument('--ssh-port', type=int,
help='ssh port to communicate with VM.')
parser.add_argument('--chroot-path', type='path',
default=os.path.join(constants.SOURCE_ROOT,
constants.DEFAULT_CHROOT_DIR))
parser.add_argument('--cache-dir', type='path',
default=path_util.GetCacheDir(),
help='Cache directory to use.')
parser.add_argument('--vm-dir', type='path',
help='Temp VM directory to use.')
return parser
| [
"chromite.lib.osutils.Touch",
"os.getuid",
"chromite.lib.cros_logging.debug",
"fcntl.fcntl",
"multiprocessing.cpu_count",
"time.sleep",
"chromite.lib.osutils.Which",
"os.path.islink",
"chromite.lib.osutils.SafeMakedirs",
"re.search",
"os.path.exists",
"chromite.lib.cros_build_lib.run",
"chromite.cli.cros.cros_chrome_sdk.SDKFetcher.GetCachePath",
"chromite.lib.path_util.GetCacheDir",
"chromite.lib.osutils.GetGlobalTempDir",
"chromite.lib.path_util.ToChrootPath",
"os.path.isdir",
"chromite.lib.osutils.ReadFile",
"chromite.lib.cros_logging.warning",
"glob.glob",
"chromite.lib.image_lib.GetImageDiskPartitionInfo",
"os.access",
"os.path.isfile",
"os.path.dirname",
"shutil.copyfile",
"chromite.lib.device.Device.GetParser",
"chromite.lib.cros_logging.error",
"time.time",
"chromite.lib.path_util.FromChrootPath",
"chromite.lib.cros_build_lib.GetBoard",
"socket.socket",
"os.path.join",
"os.environ.get",
"chromite.lib.osutils.RmDir",
"os.path.basename",
"os.mkfifo",
"os.path.abspath",
"os.stat",
"chromite.lib.cros_logging.info"
] | [((2360, 2382), 'os.path.dirname', 'os.path.dirname', (['image'], {}), '(image)\n', (2375, 2382), False, 'import os\n'), ((2408, 2455), 'os.path.join', 'os.path.join', (['image_dir', 'constants.VM_IMAGE_BIN'], {}), '(image_dir, constants.VM_IMAGE_BIN)\n', (2420, 2455), False, 'import os\n'), ((2488, 2534), 'os.path.join', 'os.path.join', (['dest_dir', 'constants.VM_IMAGE_BIN'], {}), '(dest_dir, constants.VM_IMAGE_BIN)\n', (2500, 2534), False, 'import os\n'), ((2633, 2657), 'os.path.exists', 'os.path.exists', (['src_path'], {}), '(src_path)\n', (2647, 2657), False, 'import os\n'), ((2789, 2825), 'shutil.copyfile', 'shutil.copyfile', (['src_path', 'dest_path'], {}), '(src_path, dest_path)\n', (2804, 2825), False, 'import shutil\n'), ((2917, 2955), 'chromite.lib.cros_logging.info', 'logging.info', (['"""Creating %s"""', 'dest_path'], {}), "('Creating %s', dest_path)\n", (2929, 2955), True, 'from chromite.lib import cros_logging as logging\n'), ((4451, 4476), 'os.path.exists', 'os.path.exists', (['dest_path'], {}), '(dest_path)\n', (4465, 4476), False, 'import os\n'), ((5974, 6005), 'os.path.abspath', 'os.path.abspath', (['opts.cache_dir'], {}), '(opts.cache_dir)\n', (5989, 6005), False, 'import os\n'), ((6017, 6046), 'os.path.isdir', 'os.path.isdir', (['self.cache_dir'], {}), '(self.cache_dir)\n', (6030, 6046), False, 'import os\n'), ((6303, 6339), 'os.path.join', 'os.path.join', (['self.vm_dir', '"""kvm.pid"""'], {}), "(self.vm_dir, 'kvm.pid')\n", (6315, 6339), False, 'import os\n'), ((6363, 6403), 'os.path.join', 'os.path.join', (['self.vm_dir', '"""kvm.monitor"""'], {}), "(self.vm_dir, 'kvm.monitor')\n", (6375, 6403), False, 'import os\n'), ((7574, 7612), 'os.path.join', 'os.path.join', (['self.vm_dir', '"""qcow2.img"""'], {}), "(self.vm_dir, 'qcow2.img')\n", (7586, 7612), False, 'import os\n'), ((7781, 7834), 'chromite.lib.cros_build_lib.run', 'cros_build_lib.run', (['qemu_img_args'], {'dryrun': 'self.dryrun'}), '(qemu_img_args, dryrun=self.dryrun)\n', (7799, 7834), False, 'from chromite.lib import cros_build_lib\n'), ((7839, 7897), 'chromite.lib.cros_logging.info', 'logging.info', (['"""qcow2 image created at %s."""', 'cow_image_path'], {}), "('qcow2 image created at %s.', cow_image_path)\n", (7851, 7897), True, 'from chromite.lib import cros_logging as logging\n'), ((7986, 8053), 'chromite.lib.osutils.RmDir', 'osutils.RmDir', (['self.vm_dir'], {'ignore_missing': '(True)', 'sudo': 'self.use_sudo'}), '(self.vm_dir, ignore_missing=True, sudo=self.use_sudo)\n', (7999, 8053), False, 'from chromite.lib import osutils\n'), ((8729, 8772), 're.search', 're.search', (['"""version ([0-9.]+)"""', 'version_str'], {}), "('version ([0-9.]+)', version_str)\n", (8738, 8772), False, 'import re\n'), ((9474, 9507), 'os.path.join', 'os.path.join', (['"""usr/bin"""', 'qemu_exe'], {}), "('usr/bin', qemu_exe)\n", (9486, 9507), False, 'import os\n'), ((10630, 10676), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""QEMU path: %s"""', 'self.qemu_path'], {}), "('QEMU path: %s', self.qemu_path)\n", (10643, 10676), True, 'from chromite.lib import cros_logging as logging\n'), ((10812, 10917), 'os.path.join', 'os.path.join', (['constants.SOURCE_ROOT', '"""src/build/images"""', 'self.board', '"""latest"""', 'constants.VM_IMAGE_BIN'], {}), "(constants.SOURCE_ROOT, 'src/build/images', self.board,\n 'latest', constants.VM_IMAGE_BIN)\n", (10824, 10917), False, 'import os\n'), ((11092, 11188), 'chromite.cli.cros.cros_chrome_sdk.SDKFetcher.GetCachePath', 'cros_chrome_sdk.SDKFetcher.GetCachePath', (['constants.VM_IMAGE_TAR', 'self.cache_dir', 'self.board'], {}), '(constants.VM_IMAGE_TAR, self.\n cache_dir, self.board)\n', (11131, 11188), False, 'from chromite.cli.cros import cros_chrome_sdk\n'), ((12034, 12085), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""VM image path: %s"""', 'self.image_path'], {}), "('VM image path: %s', self.image_path)\n", (12047, 12085), True, 'from chromite.lib import cros_logging as logging\n'), ((12372, 12428), 'os.environ.get', 'os.environ.get', (['cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV'], {}), '(cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV)\n', (12386, 12428), False, 'import os\n'), ((12446, 12497), 'chromite.lib.cros_build_lib.GetBoard', 'cros_build_lib.GetBoard', (['sdk_board_env'], {'strict': '(True)'}), '(sdk_board_env, strict=True)\n', (12469, 12497), False, 'from chromite.lib import cros_build_lib\n'), ((16526, 16568), 'chromite.lib.cros_logging.info', 'logging.info', (['"""Pid file: %s"""', 'self.pidfile'], {}), "('Pid file: %s', self.pidfile)\n", (16538, 16568), True, 'from chromite.lib import cros_logging as logging\n'), ((18360, 18391), 'os.path.isdir', 'os.path.isdir', (["('/proc/%i' % pid)"], {}), "('/proc/%i' % pid)\n", (18373, 18391), False, 'import os\n'), ((18639, 18674), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""Taking VM snapshot"""'], {}), "('Taking VM snapshot')\n", (18652, 18674), True, 'from chromite.lib import cros_logging as logging\n'), ((20790, 20814), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""Stop VM"""'], {}), "('Stop VM')\n", (20803, 20814), True, 'from chromite.lib import cros_logging as logging\n'), ((22675, 22700), 'chromite.lib.device.Device.GetParser', 'device.Device.GetParser', ([], {}), '()\n', (22698, 22700), False, 'from chromite.lib import device\n'), ((1254, 1295), 'chromite.lib.image_lib.GetImageDiskPartitionInfo', 'image_lib.GetImageDiskPartitionInfo', (['path'], {}), '(path)\n', (1289, 1295), False, 'from chromite.lib import image_lib\n'), ((2967, 3022), 'os.path.join', 'os.path.join', (['constants.CROSUTILS_DIR', '"""image_to_vm.sh"""'], {}), "(constants.CROSUTILS_DIR, 'image_to_vm.sh')\n", (2979, 3022), False, 'import os\n'), ((3887, 3956), 'chromite.lib.cros_build_lib.run', 'cros_build_lib.run', (['cmd'], {'enter_chroot': '(True)', 'cwd': 'constants.SOURCE_ROOT'}), '(cmd, enter_chroot=True, cwd=constants.SOURCE_ROOT)\n', (3905, 3956), False, 'from chromite.lib import cros_build_lib\n'), ((5349, 5375), 'os.path.exists', 'os.path.exists', (['"""/dev/kvm"""'], {}), "('/dev/kvm')\n", (5363, 5375), False, 'import os\n'), ((6932, 6965), 'chromite.lib.osutils.SafeMakedirs', 'osutils.SafeMakedirs', (['self.vm_dir'], {}), '(self.vm_dir)\n', (6952, 6965), False, 'from chromite.lib import osutils\n'), ((7165, 7191), 'os.path.isdir', 'os.path.isdir', (['self.vm_dir'], {}), '(self.vm_dir)\n', (7178, 7191), False, 'import os\n'), ((8201, 8313), 'chromite.lib.cros_build_lib.run', 'cros_build_lib.run', (["[self.qemu_path, '--version']"], {'capture_output': '(True)', 'dryrun': 'self.dryrun', 'encoding': '"""utf-8"""'}), "([self.qemu_path, '--version'], capture_output=True,\n dryrun=self.dryrun, encoding='utf-8')\n", (8219, 8313), False, 'from chromite.lib import cros_build_lib\n'), ((9576, 9690), 'chromite.cli.cros.cros_chrome_sdk.SDKFetcher.GetCachePath', 'cros_chrome_sdk.SDKFetcher.GetCachePath', (['cros_chrome_sdk.SDKFetcher.QEMU_BIN_PATH', 'self.cache_dir', 'self.board'], {}), '(cros_chrome_sdk.SDKFetcher.\n QEMU_BIN_PATH, self.cache_dir, self.board)\n', (9615, 9690), False, 'from chromite.cli.cros import cros_chrome_sdk\n'), ((9915, 9960), 'os.path.join', 'os.path.join', (['self.chroot_path', 'qemu_exe_path'], {}), '(self.chroot_path, qemu_exe_path)\n', (9927, 9960), False, 'import os\n'), ((9970, 9995), 'os.path.isfile', 'os.path.isfile', (['qemu_path'], {}), '(qemu_path)\n', (9984, 9995), False, 'import os\n'), ((10086, 10123), 'chromite.lib.cros_logging.warning', 'logging.warning', (['"""Using system QEMU."""'], {}), "('Using system QEMU.')\n", (10101, 10123), True, 'from chromite.lib import cros_logging as logging\n'), ((10147, 10170), 'chromite.lib.osutils.Which', 'osutils.Which', (['qemu_exe'], {}), '(qemu_exe)\n', (10160, 10170), False, 'from chromite.lib import osutils\n'), ((10959, 10988), 'os.path.isfile', 'os.path.isfile', (['vm_image_path'], {}), '(vm_image_path)\n', (10973, 10988), False, 'import os\n'), ((11229, 11277), 'os.path.join', 'os.path.join', (['cache_path', 'constants.VM_IMAGE_BIN'], {}), '(cache_path, constants.VM_IMAGE_BIN)\n', (11241, 11277), False, 'import os\n'), ((11287, 11311), 'os.path.isfile', 'os.path.isfile', (['vm_image'], {}), '(vm_image)\n', (11301, 11311), False, 'import os\n'), ((11687, 11718), 'os.path.isfile', 'os.path.isfile', (['self.image_path'], {}), '(self.image_path)\n', (11701, 11718), False, 'import os\n'), ((11819, 11872), 'os.path.join', 'os.path.join', (['self.image_path', 'constants.VM_IMAGE_BIN'], {}), '(self.image_path, constants.VM_IMAGE_BIN)\n', (11831, 11872), False, 'import os\n'), ((11882, 11908), 'os.path.isfile', 'os.path.isfile', (['image_path'], {}), '(image_path)\n', (11896, 11908), False, 'import os\n'), ((12777, 12826), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (12790, 12826), False, 'import socket\n'), ((14527, 14558), 'glob.glob', 'glob.glob', (['self.NESTED_KVM_GLOB'], {}), '(self.NESTED_KVM_GLOB)\n', (14536, 14558), False, 'import glob\n'), ((16386, 16449), 'chromite.lib.cros_logging.warning', 'logging.warning', (['"""KVM is not supported; Chrome VM will be slow"""'], {}), "('KVM is not supported; Chrome VM will be slow')\n", (16401, 16449), True, 'from chromite.lib import cros_logging as logging\n'), ((16637, 16684), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""Start VM, attempt #%d"""', 'attempt'], {}), "('Start VM, attempt #%d', attempt)\n", (16650, 16684), True, 'from chromite.lib import cros_logging as logging\n'), ((17033, 17063), 'chromite.lib.osutils.Touch', 'osutils.Touch', (['self.kvm_serial'], {}), '(self.kvm_serial)\n', (17046, 17063), False, 'from chromite.lib import osutils\n'), ((17158, 17185), 'chromite.lib.osutils.Touch', 'osutils.Touch', (['self.pidfile'], {}), '(self.pidfile)\n', (17171, 17185), False, 'from chromite.lib import osutils\n'), ((17678, 17705), 'os.path.exists', 'os.path.exists', (['self.vm_dir'], {}), '(self.vm_dir)\n', (17692, 17705), False, 'import os\n'), ((17713, 17758), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""%s not present."""', 'self.vm_dir'], {}), "('%s not present.', self.vm_dir)\n", (17726, 17758), True, 'from chromite.lib import cros_logging as logging\n'), ((17786, 17814), 'os.path.exists', 'os.path.exists', (['self.pidfile'], {}), '(self.pidfile)\n', (17800, 17814), False, 'import os\n'), ((17822, 17870), 'chromite.lib.cros_logging.info', 'logging.info', (['"""%s does not exist."""', 'self.pidfile'], {}), "('%s does not exist.', self.pidfile)\n", (17834, 17870), True, 'from chromite.lib import cros_logging as logging\n'), ((18788, 18927), 'chromite.lib.cros_logging.warning', 'logging.warning', (['"""Attempting to take a VM snapshot without --copy-on-write. Saved VM image may not contain the desired snapshot."""'], {}), "(\n 'Attempting to take a VM snapshot without --copy-on-write. Saved VM image may not contain the desired snapshot.'\n )\n", (18803, 18927), True, 'from chromite.lib import cros_logging as logging\n'), ((19446, 19476), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_GETFL'], {}), '(fd, fcntl.F_GETFL)\n', (19457, 19476), False, 'import fcntl\n'), ((19483, 19540), 'fcntl.fcntl', 'fcntl.fcntl', (['fd', 'fcntl.F_SETFL', '(cur_flags | os.O_NONBLOCK)'], {}), '(fd, fcntl.F_SETFL, cur_flags | os.O_NONBLOCK)\n', (19494, 19540), False, 'import fcntl\n'), ((19609, 19620), 'time.time', 'time.time', ([], {}), '()\n', (19618, 19620), False, 'import time\n'), ((20551, 20618), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""Told to copy VM image, but no output directory set"""'], {}), "('Told to copy VM image, but no output directory set')\n", (20564, 20618), True, 'from chromite.lib import cros_logging as logging\n'), ((22258, 22285), 'os.path.exists', 'os.path.exists', (['self.vm_dir'], {}), '(self.vm_dir)\n', (22272, 22285), False, 'import os\n'), ((4011, 4042), 'chromite.lib.cros_logging.error', 'logging.error', (['"""%s: %s"""', 'msg', 'e'], {}), "('%s: %s', msg, e)\n", (4024, 4042), True, 'from chromite.lib import cros_logging as logging\n'), ((4385, 4418), 'chromite.lib.path_util.FromChrootPath', 'path_util.FromChrootPath', (['tempdir'], {}), '(tempdir)\n', (4409, 4418), False, 'from chromite.lib import path_util\n'), ((5185, 5212), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (5210, 5212), False, 'import multiprocessing\n'), ((5604, 5634), 'os.access', 'os.access', (['"""/dev/kvm"""', 'os.W_OK'], {}), "('/dev/kvm', os.W_OK)\n", (5613, 5634), False, 'import os\n'), ((6168, 6194), 'chromite.lib.osutils.GetGlobalTempDir', 'osutils.GetGlobalTempDir', ([], {}), '()\n', (6192, 6194), False, 'from chromite.lib import osutils\n'), ((7220, 7247), 'os.path.islink', 'os.path.islink', (['self.vm_dir'], {}), '(self.vm_dir)\n', (7234, 7247), False, 'import os\n'), ((7303, 7314), 'os.getuid', 'os.getuid', ([], {}), '()\n', (7312, 7314), False, 'import os\n'), ((9736, 9773), 'os.path.join', 'os.path.join', (['qemu_dir', 'qemu_exe_path'], {}), '(qemu_dir, qemu_exe_path)\n', (9748, 9773), False, 'import os\n'), ((9785, 9810), 'os.path.isfile', 'os.path.isfile', (['qemu_path'], {}), '(qemu_path)\n', (9799, 9810), False, 'import os\n'), ((10205, 10235), 'os.path.isfile', 'os.path.isfile', (['self.qemu_path'], {}), '(self.qemu_path)\n', (10219, 10235), False, 'import os\n'), ((10512, 10546), 'os.path.isfile', 'os.path.isfile', (['self.qemu_img_path'], {}), '(self.qemu_img_path)\n', (10526, 10546), False, 'import os\n'), ((14769, 14803), 'os.path.isdir', 'os.path.isdir', (['self.qemu_bios_path'], {}), '(self.qemu_bios_path)\n', (14782, 14803), False, 'import os\n'), ((17129, 17149), 'os.mkfifo', 'os.mkfifo', (['pipe', '(384)'], {}), '(pipe, 384)\n', (17138, 17149), False, 'import os\n'), ((17897, 17927), 'chromite.lib.osutils.ReadFile', 'osutils.ReadFile', (['self.pidfile'], {}), '(self.pidfile)\n', (17913, 17927), False, 'from chromite.lib import osutils\n'), ((18019, 18077), 'chromite.lib.cros_logging.error', 'logging.error', (['"""%s in %s is not a pid."""', 'pid', 'self.pidfile'], {}), "('%s in %s is not a pid.', pid, self.pidfile)\n", (18032, 18077), True, 'from chromite.lib import cros_logging as logging\n'), ((20008, 20063), 'chromite.lib.cros_logging.warning', 'logging.warning', (['"""Timed out trying to take VM snapshot"""'], {}), "('Timed out trying to take VM snapshot')\n", (20023, 20063), True, 'from chromite.lib import cros_logging as logging\n'), ((20708, 20741), 'os.path.basename', 'os.path.basename', (['self.image_path'], {}), '(self.image_path)\n', (20724, 20741), False, 'import os\n'), ((25623, 25688), 'os.path.join', 'os.path.join', (['constants.SOURCE_ROOT', 'constants.DEFAULT_CHROOT_DIR'], {}), '(constants.SOURCE_ROOT, constants.DEFAULT_CHROOT_DIR)\n', (25635, 25688), False, 'import os\n'), ((25819, 25842), 'chromite.lib.path_util.GetCacheDir', 'path_util.GetCacheDir', ([], {}), '()\n', (25840, 25842), False, 'from chromite.lib import path_util\n'), ((3097, 3130), 'chromite.lib.path_util.ToChrootPath', 'path_util.ToChrootPath', (['image_dir'], {}), '(image_dir)\n', (3119, 3130), False, 'from chromite.lib import path_util\n'), ((4306, 4351), 'os.path.join', 'os.path.join', (['tempdir', 'constants.VM_IMAGE_BIN'], {}), '(tempdir, constants.VM_IMAGE_BIN)\n', (4318, 4351), False, 'import os\n'), ((7272, 7292), 'os.stat', 'os.stat', (['self.vm_dir'], {}), '(self.vm_dir)\n', (7279, 7292), False, 'import os\n'), ((10454, 10485), 'os.path.dirname', 'os.path.dirname', (['self.qemu_path'], {}), '(self.qemu_path)\n', (10469, 10485), False, 'import os\n'), ((19633, 19644), 'time.time', 'time.time', ([], {}), '()\n', (19642, 19644), False, 'import time\n'), ((19885, 19929), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""VM monitor output: %s"""', 'line'], {}), "('VM monitor output: %s', line)\n", (19898, 19929), True, 'from chromite.lib import cros_logging as logging\n'), ((3668, 3744), 'chromite.lib.cros_build_lib.run', 'cros_build_lib.run', (["['mktemp', '-d']"], {'capture_output': '(True)', 'enter_chroot': '(True)'}), "(['mktemp', '-d'], capture_output=True, enter_chroot=True)\n", (3686, 3744), False, 'from chromite.lib import cros_build_lib\n'), ((4096, 4129), 'chromite.lib.path_util.FromChrootPath', 'path_util.FromChrootPath', (['tempdir'], {}), '(tempdir)\n', (4120, 4129), False, 'from chromite.lib import path_util\n'), ((12978, 13030), 'chromite.lib.cros_logging.info', 'logging.info', (['"""SSH port %d in use..."""', 'self.ssh_port'], {}), "('SSH port %d in use...', self.ssh_port)\n", (12990, 13030), True, 'from chromite.lib import cros_logging as logging\n'), ((17514, 17569), 'chromite.lib.cros_logging.warning', 'logging.warning', (['"""Error when launching VM. Retrying..."""'], {}), "('Error when launching VM. Retrying...')\n", (17529, 17569), True, 'from chromite.lib import cros_logging as logging\n'), ((19773, 19829), 'chromite.lib.cros_logging.debug', 'logging.debug', (['"""Finished attempting to take VM snapshot"""'], {}), "('Finished attempting to take VM snapshot')\n", (19786, 19829), True, 'from chromite.lib import cros_logging as logging\n'), ((19964, 19977), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (19974, 19977), False, 'import time\n'), ((14604, 14623), 'chromite.lib.osutils.ReadFile', 'osutils.ReadFile', (['f'], {}), '(f)\n', (14620, 14623), False, 'from chromite.lib import osutils\n')] |
from django.conf.urls import url
from django.views.generic import TemplateView
app_name = 'core'
urlpatterns = [
url(
regex=r'^$',
view=TemplateView.as_view(template_name='core/home.html'),
name='home'
),
url(
regex=r'^info/$',
view=TemplateView.as_view(template_name='core/info.html'),
name='about'
),
url(
regex=r'^vaccine/$',
view=TemplateView.as_view(template_name='core/vaccine_schedule.html'),
name='vaccine'
),
url(
regex=r'^vaccine/location/$',
view=TemplateView.as_view(
template_name='core/vaccination_locations.html'
),
name='locations'
),
url(
regex=r'^help-home/$',
view=TemplateView.as_view(template_name='core/help_page.html'),
name='help_home'
),
url(
regex=r'^help-side/$',
view=TemplateView.as_view(template_name='core/help_page.html'),
name='help_side'
),
]
| [
"django.views.generic.TemplateView.as_view"
] | [((158, 210), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""core/home.html"""'}), "(template_name='core/home.html')\n", (178, 210), False, 'from django.views.generic import TemplateView\n'), ((287, 339), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""core/info.html"""'}), "(template_name='core/info.html')\n", (307, 339), False, 'from django.views.generic import TemplateView\n'), ((420, 484), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""core/vaccine_schedule.html"""'}), "(template_name='core/vaccine_schedule.html')\n", (440, 484), False, 'from django.views.generic import TemplateView\n'), ((574, 643), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""core/vaccination_locations.html"""'}), "(template_name='core/vaccination_locations.html')\n", (594, 643), False, 'from django.views.generic import TemplateView\n'), ((750, 807), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""core/help_page.html"""'}), "(template_name='core/help_page.html')\n", (770, 807), False, 'from django.views.generic import TemplateView\n'), ((894, 951), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""core/help_page.html"""'}), "(template_name='core/help_page.html')\n", (914, 951), False, 'from django.views.generic import TemplateView\n')] |
# -*- coding:utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.wait import WebDriverWait
"""
定位元素的8种方法:
el = driver.find_element_by_id 使用id值定位
el = driver.find_elements_by_xpath 使用xpath定位
el = driver.find_elements_by_tag_name 使用标签名定位
el = driver.find_elements_by_link_text 使用文本定位
el = driver.find_elements_by_partial_link_text 使用部分文本定位
el = driver.find_elements_by_name 使用name属性值定位
el = driver.find_elements_by_class_name 使用class属性值定位
el = driver.find_elements_by_css_selector 使用css选择器定位
首选定位到iframe的XPATH然后继续XPATH操作
"""
options = Options()
# 如果想使用Headless Chrome 对Chrome版本有一定的要求,从官方文档我们可以看出,mac和linux环境要求chrome版本是59+,而windows版本的chrome要求是60+,同时chromedriver要求2.30+版本
options.add_argument('--headless') # 无头参数
# 创建浏览器对象
driver = webdriver.Chrome()
# driver = webdriver.PhantomJS()
print(dir(driver))
# 发送请求,获取结果
wait = WebDriverWait(driver, timeout=10)
driver.get("http://www.baidu.com")
print("打开浏览器了!!")
html = driver.page_source # 获取源码(是elements里面的源码不是network中的)
cookies = driver.get_cookies() # 获取所有的cookie
driver.delete_cookie("name") # 删除指定名字的cookie
driver.delete_all_cookies() # 删除所有的cookie
current_url = driver.current_url # 获取请求的url
title = driver.title # 获取title标签的text
# driver.maximize_window() # 全屏操作
# 保存快照
driver.save_screenshot("baidu.jpg")
driver.get_screenshot_as_file("baidu2.png")
driver.get_screenshot_as_png() # 直接保存为png
# res = driver.find_element_by_xpath("//div")
# 定位百度的搜索框 返回的element对象
more_ele = driver.find_elements_by_tag_name("a")
input_text = driver.find_element_by_id("kw")
input_text.send_keys("哈哈")
input_text.get_attribute("value") # 元素获取属性
input_text.text # 获取文本
input_text.submit()
ret1 = driver.find_element_by_link_text("新闻")
# 定位到包含指定字符的字符串,必须要有link
ret2 = driver.find_element_by_partial_link_text("hao") # 注意如果有多个重复的包含值返回第一个因为是element
# 往指定的标签中填写字符串
input_text.send_keys("斗鱼")
# 定位搜索按钮
# submitBy = driver.find_element(By.XPATH("//a")) # 不经常使用
submit = driver.find_element_by_xpath("//input[@type='submit']")
submit.click()
input_text.clear()
input_text.send_keys("哈哈")
submit.click()
# 关闭浏览器
import time
time.sleep(10)
# 建议使用close()
driver.close()
# driver.quit()
| [
"selenium.webdriver.chrome.options.Options",
"selenium.webdriver.support.wait.WebDriverWait",
"selenium.webdriver.Chrome",
"time.sleep"
] | [((669, 678), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (676, 678), False, 'from selenium.webdriver.chrome.options import Options\n'), ((866, 884), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (882, 884), False, 'from selenium import webdriver\n'), ((956, 989), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['driver'], {'timeout': '(10)'}), '(driver, timeout=10)\n', (969, 989), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((2197, 2211), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (2207, 2211), False, 'import time\n')] |
__author__ = '<NAME>'
import iptools
import ssl
from datetime import datetime, timedelta
from flask import request
from pybitcointools import serialize_script, b58check_to_hex, hex_to_b58check, deserialize_script
from pycoin.key.BIP32Node import BIP32Node
from redis import Redis
from addressimo.paymentrequest.paymentrequest_pb2 import PaymentRequest, PaymentDetails, X509Certificates
from addressimo.config import config
from addressimo.plugin import PluginManager
from addressimo.util import LogUtil
####################
# Bitcoin OP_CODES #
####################
OP_DUP = 118
OP_HASH160 = 169
OP_EQUALVERIFY = 136
OP_CHECKSIG = 172
log = LogUtil.setup_logging()
def derive_branch():
branch = iptools.ipv4.ip2long(request.remote_addr)
# This method will have a collision occasionally between two IP addresses.
if branch > 2**31:
branch %= 2**31
# Use last two octets of the IP for branch uniqueness
return branch & 0x0000ffff
def generate_bip32_address_from_extended_pubkey(extended_pubkey, branch, index):
ext_key = BIP32Node.from_wallet_key(extended_pubkey)
return ext_key.subkey_for_path('%d/%d' % (branch, index)).address()
def get_certs(x509_pem_format):
certs = []
loading_cert = ''
for line in x509_pem_format.split('\n'):
if not line:
pass
loading_cert += line
if line == '-----END CERTIFICATE-----':
if loading_cert:
der_cert = ssl.PEM_cert_to_DER_cert(loading_cert)
certs.append(der_cert)
loading_cert = ''
return certs
def generate_payment_request(crypto_addr, x509_cert, signer=None, amount=0, expires=None, memo=None, payment_url=None, merchant_data=None):
# Setup & Populate PaymentDetails
payment_details = PaymentDetails()
# Setup Single PaymentDetails Output
output = payment_details.outputs.add()
output.amount = amount * 100000000 # BTC to Satoshis
if crypto_addr[0] == '1':
output.script = serialize_script([OP_DUP, OP_HASH160, b58check_to_hex(crypto_addr), OP_EQUALVERIFY, OP_CHECKSIG]).decode('hex')
else:
try:
int(crypto_addr, 16)
output.script = str(crypto_addr).decode('hex')
except ValueError:
output.script = str(crypto_addr)
# Add current and expiration epoch time values
payment_details.time = int(datetime.utcnow().strftime('%s'))
if expires:
if isinstance(expires, int) or isinstance(expires, long):
payment_details.expires = int((datetime.utcnow() + timedelta(seconds=expires)).strftime('%s'))
elif isinstance(expires, datetime.__class__):
payment_details.expires = int(expires.strftime('%s'))
else:
payment_details.expires = int((datetime.utcnow() + timedelta(seconds=config.bip70_default_expiration)).strftime('%s'))
# Handle Various Optional Fields in PaymentDetails
payment_details.memo = memo if memo else ''
payment_details.payment_url = payment_url if payment_url else ''
payment_details.merchant_data = str(merchant_data) if merchant_data else ''
# Setup & Populate PaymentRequest
payment_request = PaymentRequest()
payment_request.payment_details_version = 1
payment_request.serialized_payment_details = payment_details.SerializeToString()
# Set PKI Type / Data
if not x509_cert or not signer:
payment_request.pki_type = 'none'
payment_request.pki_data = ''
else:
payment_request.pki_type = signer.get_pki_type()
pki_data = X509Certificates()
for cert in get_certs(x509_cert):
pki_data.certificate.append(cert)
payment_request.pki_data = pki_data.SerializeToString()
# Sign PaymentRequest
if signer and x509_cert:
payment_request.signature = ''
payment_request.signature = signer.sign(payment_request.SerializeToString())
# Log Payment Request to Logging System
logger = PluginManager.get_plugin('LOGGER', config.logger_type)
logger.log_payment_request(crypto_addr, signer.__class__.__name__, amount, expires, memo, payment_url, merchant_data)
log.debug('Generated Payment Request [Address: %s | Signer: %s | Amount: %s | Expires: %s | Memo: %s | Payment URL: %s | Merchant Data: %s]' %
(crypto_addr, signer.__class__.__name__, amount, expires, memo, payment_url, merchant_data))
return payment_request.SerializeToString()
def get_unused_presigned_payment_request(id_obj):
redis_conn = Redis.from_url(config.redis_addr_cache_uri)
return_pr = None
used_pr = []
for pr in id_obj.presigned_payment_requests:
if any([redis_conn.get(x) for x in get_addrs_from_paymentrequest(pr.decode('hex'))]):
used_pr.append(pr)
continue
return_pr = pr
break
for pr in used_pr:
id_obj.presigned_payment_requests.remove(pr)
if used_pr:
resolver = PluginManager.get_plugin('RESOLVER', config.resolver_type)
resolver.save(id_obj)
return return_pr
def get_addrs_from_paymentrequest(pr):
ret_list = []
pr_obj = PaymentRequest()
pr_obj.ParseFromString(pr)
pd = PaymentDetails()
pd.ParseFromString(pr_obj.serialized_payment_details)
for output in pd.outputs:
script = deserialize_script(output.script)
if script[0] == OP_DUP and script[1] == OP_HASH160 and script[3] == OP_EQUALVERIFY and script[4] == OP_CHECKSIG:
ret_list.append(hex_to_b58check(script[2].encode('hex')))
return ret_list
| [
"redis.Redis.from_url",
"addressimo.util.LogUtil.setup_logging",
"datetime.datetime.utcnow",
"ssl.PEM_cert_to_DER_cert",
"addressimo.paymentrequest.paymentrequest_pb2.X509Certificates",
"pybitcointools.b58check_to_hex",
"addressimo.paymentrequest.paymentrequest_pb2.PaymentDetails",
"iptools.ipv4.ip2long",
"addressimo.paymentrequest.paymentrequest_pb2.PaymentRequest",
"pycoin.key.BIP32Node.BIP32Node.from_wallet_key",
"datetime.timedelta",
"addressimo.plugin.PluginManager.get_plugin",
"pybitcointools.deserialize_script"
] | [((645, 668), 'addressimo.util.LogUtil.setup_logging', 'LogUtil.setup_logging', ([], {}), '()\n', (666, 668), False, 'from addressimo.util import LogUtil\n'), ((704, 745), 'iptools.ipv4.ip2long', 'iptools.ipv4.ip2long', (['request.remote_addr'], {}), '(request.remote_addr)\n', (724, 745), False, 'import iptools\n'), ((1060, 1102), 'pycoin.key.BIP32Node.BIP32Node.from_wallet_key', 'BIP32Node.from_wallet_key', (['extended_pubkey'], {}), '(extended_pubkey)\n', (1085, 1102), False, 'from pycoin.key.BIP32Node import BIP32Node\n'), ((1792, 1808), 'addressimo.paymentrequest.paymentrequest_pb2.PaymentDetails', 'PaymentDetails', ([], {}), '()\n', (1806, 1808), False, 'from addressimo.paymentrequest.paymentrequest_pb2 import PaymentRequest, PaymentDetails, X509Certificates\n'), ((3183, 3199), 'addressimo.paymentrequest.paymentrequest_pb2.PaymentRequest', 'PaymentRequest', ([], {}), '()\n', (3197, 3199), False, 'from addressimo.paymentrequest.paymentrequest_pb2 import PaymentRequest, PaymentDetails, X509Certificates\n'), ((3974, 4028), 'addressimo.plugin.PluginManager.get_plugin', 'PluginManager.get_plugin', (['"""LOGGER"""', 'config.logger_type'], {}), "('LOGGER', config.logger_type)\n", (3998, 4028), False, 'from addressimo.plugin import PluginManager\n'), ((4523, 4566), 'redis.Redis.from_url', 'Redis.from_url', (['config.redis_addr_cache_uri'], {}), '(config.redis_addr_cache_uri)\n', (4537, 4566), False, 'from redis import Redis\n'), ((5138, 5154), 'addressimo.paymentrequest.paymentrequest_pb2.PaymentRequest', 'PaymentRequest', ([], {}), '()\n', (5152, 5154), False, 'from addressimo.paymentrequest.paymentrequest_pb2 import PaymentRequest, PaymentDetails, X509Certificates\n'), ((5196, 5212), 'addressimo.paymentrequest.paymentrequest_pb2.PaymentDetails', 'PaymentDetails', ([], {}), '()\n', (5210, 5212), False, 'from addressimo.paymentrequest.paymentrequest_pb2 import PaymentRequest, PaymentDetails, X509Certificates\n'), ((3563, 3581), 'addressimo.paymentrequest.paymentrequest_pb2.X509Certificates', 'X509Certificates', ([], {}), '()\n', (3579, 3581), False, 'from addressimo.paymentrequest.paymentrequest_pb2 import PaymentRequest, PaymentDetails, X509Certificates\n'), ((4954, 5012), 'addressimo.plugin.PluginManager.get_plugin', 'PluginManager.get_plugin', (['"""RESOLVER"""', 'config.resolver_type'], {}), "('RESOLVER', config.resolver_type)\n", (4978, 5012), False, 'from addressimo.plugin import PluginManager\n'), ((5319, 5352), 'pybitcointools.deserialize_script', 'deserialize_script', (['output.script'], {}), '(output.script)\n', (5337, 5352), False, 'from pybitcointools import serialize_script, b58check_to_hex, hex_to_b58check, deserialize_script\n'), ((1464, 1502), 'ssl.PEM_cert_to_DER_cert', 'ssl.PEM_cert_to_DER_cert', (['loading_cert'], {}), '(loading_cert)\n', (1488, 1502), False, 'import ssl\n'), ((2388, 2405), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2403, 2405), False, 'from datetime import datetime, timedelta\n'), ((2044, 2072), 'pybitcointools.b58check_to_hex', 'b58check_to_hex', (['crypto_addr'], {}), '(crypto_addr)\n', (2059, 2072), False, 'from pybitcointools import serialize_script, b58check_to_hex, hex_to_b58check, deserialize_script\n'), ((2781, 2798), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2796, 2798), False, 'from datetime import datetime, timedelta\n'), ((2801, 2851), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'config.bip70_default_expiration'}), '(seconds=config.bip70_default_expiration)\n', (2810, 2851), False, 'from datetime import datetime, timedelta\n'), ((2548, 2565), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2563, 2565), False, 'from datetime import datetime, timedelta\n'), ((2568, 2594), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'expires'}), '(seconds=expires)\n', (2577, 2594), False, 'from datetime import datetime, timedelta\n')] |
import sys
import time
import os
from PyQt5.QtWidgets import (QApplication, QWidget, QGridLayout, QGroupBox,
QToolButton, QSplitter, QVBoxLayout, QHBoxLayout,
QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView,
QLineEdit, QFileDialog, QToolTip, QComboBox)
from PyQt5.QtGui import QIcon, QFont
from PyQt5.QtCore import Qt, QSize
from model import database
# import database
class StudentPage(QWidget):
def __init__(self, stu_mes):
super().__init__()
self.focus = 0
self.stu_mes = stu_mes
self.initUI()
def initUI(self):
# 标题栏
self.titleBar = QWidget()
self.titleBar.setFixedSize(1250, 50)
self.setTitleBar()
# 分割
self.body = QSplitter()
self.setLeftMunu()
self.content = None
self.setContent()
self.bodyLayout = QGridLayout()
self.bodyLayout.addWidget(self.titleBar, 0, 0, 1, 7)
self.bodyLayout.addWidget(self.body, 1, 0, 7, 7)
self.setContentsMargins(0, 0, 0, 0)
self.setLayout(self.bodyLayout)
self.setFixedSize(1280, 720)
self.setMyStyle()
# 设置标题栏
def setTitleBar(self):
self.title = QLabel()
self.title.setText('欢迎使用图书馆管理系统')
self.title.setFixedHeight(30)
self.account = QToolButton()
self.account.setIcon(QIcon('icon/person.png'))
self.account.setText(self.stu_mes['SID'])
self.account.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
self.account.setFixedHeight(20)
self.account.setEnabled(False)
self.out = QToolButton()
self.out.setText('退出')
self.out.setFixedHeight(30)
titleLayout = QHBoxLayout()
titleLayout.addSpacing(100)
titleLayout.addWidget(self.title)
titleLayout.addWidget(self.account)
titleLayout.addWidget(self.out)
self.titleBar.setLayout(titleLayout)
# 左侧菜单栏
def setLeftMunu(self):
# 查询按钮
self.bookSearch = QToolButton()
self.bookSearch.setText('图书查询')
self.bookSearch.setFixedSize(160, 50)
self.bookSearch.setIcon(QIcon('icon/book.png'))
self.bookSearch.setIconSize(QSize(30, 30))
self.bookSearch.clicked.connect(
lambda: self.switch(0, self.bookSearch))
self.bookSearch.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
# 借阅按钮
self.borrow = QToolButton()
self.borrow.setText('借阅信息')
self.borrow.setFixedSize(160, 50)
self.borrow.setIcon(QIcon('icon/borrowing.png'))
self.borrow.setIconSize(QSize(30, 30))
self.borrow.clicked.connect(lambda: self.switch(1, self.borrow))
self.borrow.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
# 借阅历史
self.history = QToolButton()
self.history.setText('借阅历史')
self.history.setFixedSize(160, 50)
self.history.setIcon(QIcon('icon/history.png'))
self.history.setIconSize(QSize(30, 30))
self.history.clicked.connect(lambda: self.switch(2, self.history))
self.history.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
# 个人信息
self.detial = QToolButton()
self.detial.setText('个人信息')
self.detial.setFixedSize(160, 50)
self.detial.setIcon(QIcon('icon/detial.png'))
self.detial.setIconSize(QSize(30, 30))
self.detial.clicked.connect(lambda: self.switch(3, self.detial))
self.detial.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
self.btnList = [self.bookSearch,
self.borrow, self.history, self.detial]
self.layout = QVBoxLayout()
self.layout.addWidget(self.bookSearch)
self.layout.addWidget(self.borrow)
self.layout.addWidget(self.history)
self.layout.addWidget(self.detial)
self.layout.addStretch()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.setSpacing(0)
self.menu = QGroupBox()
self.menu.setFixedSize(160, 500)
self.menu.setLayout(self.layout)
self.menu.setContentsMargins(0, 0, 0, 0)
self.body.addWidget(self.menu)
def switch(self, index, btn):
self.focus = index
for i in self.btnList:
i.setStyleSheet('''
*{
background: white;
}
QToolButton:hover{
background-color: rgba(230, 230, 230, 0.3);
}
''')
btn.setStyleSheet('''
QToolButton{
background-color: rgba(230, 230, 230, 0.7);
}
''')
self.setContent()
# 设置右侧信息页
def setContent(self):
if self.content is not None:
self.content.deleteLater()
if self.focus == 0:
self.content = Books(self.stu_mes)
elif self.focus == 1:
self.content = BorrowingBooks(self.stu_mes)
elif self.focus == 2:
self.content = History(self.stu_mes)
else:
self.content = Detial(self.stu_mes)
self.body.addWidget(self.content)
def setMyStyle(self):
self.setStyleSheet('''
QWidget{
background-color: white;
}
''')
self.titleBar.setStyleSheet('''
QWidget{
background-color: rgba(44,44,44,1);
border:1px solid black;
border-radius: 10px;
}
''')
self.menu.setStyleSheet('''
QWidget{
border: 0px;
border-right: 1px solid rgba(227, 227, 227, 1);
}
QToolButton{
color: rgba(51, 90, 129, 1);
font-family: 微软雅黑;
font-size: 25px;
border-right: 1px solid rgba(227, 227, 227, 1);
}
QToolButton:hover{
background-color: rgba(230, 230, 230, 0.3);
}
''')
self.title.setStyleSheet('''
*{
color: white;
font-family: 微软雅黑;
font-size: 25px;
border: 0px;
}
''')
self.account.setStyleSheet('''
*{
color: white;
font-weight: 微软雅黑;
font-size: 25px;
border: 0px;
}
''')
self.out.setStyleSheet('''
QToolButton{
color: white;
border:0px;
font-size: 12px;
}
QToolButton:hover{
color: rgba(11, 145, 255, 1);
}
''')
class Books(QGroupBox):
def __init__(self, stu_mes):
super().__init__()
self.stu_mes = stu_mes
self.book_list = []
self.body = QVBoxLayout()
self.table = None
self.setTitleBar()
self.setSearchBar()
self.searchFunction()
self.setLayout(self.body)
self.setFixedSize(1100, 600)
self.setMyStyle()
# 标题栏
def setTitleBar(self):
self.title = QLabel()
self.title.setText('书籍信息')
self.title.setFixedHeight(25)
titleLayout = QHBoxLayout()
titleLayout.addSpacing(50)
titleLayout.addWidget(self.title)
self.titleBar = QWidget()
self.titleBar.setFixedSize(900, 50)
self.titleBar.setLayout(titleLayout)
self.body.addWidget(self.titleBar)
# 设置搜索框
def setSearchBar(self):
self.selectBox = QComboBox()
self.selectBox.addItems(['书号', '分类', '出版社', '作者', '书名'])
self.selectBox.setFixedHeight(30)
self.searchTitle = QLabel()
self.searchTitle.setText('搜索书籍')
self.searchInput = QLineEdit()
self.searchInput.setText('')
self.searchInput.setClearButtonEnabled(True)
self.searchInput.setFixedSize(400, 40)
self.searchButton = QToolButton()
self.searchButton.setFixedSize(100, 40)
self.searchButton.setText('搜索')
self.searchButton.clicked.connect(self.searchFunction)
searchLayout = QHBoxLayout()
searchLayout.addStretch()
searchLayout.addWidget(self.selectBox)
searchLayout.addWidget(self.searchTitle)
searchLayout.addWidget(self.searchInput)
searchLayout.addWidget(self.searchButton)
searchLayout.addStretch()
self.searchWidget = QWidget()
self.searchWidget.setLayout(searchLayout)
self.body.addWidget(self.searchWidget)
# 搜索方法
def searchFunction(self):
convert = {'书号': 'BID', '分类': 'CLASSIFICATION', '出版社': 'PRESS', '作者': 'AUTHOR', '书名': 'BNAME', '': 'BNAME'}
self.book_list = database.search_book(self.searchInput.text(), convert[self.selectBox.currentText()], self.stu_mes['SID'])
if self.book_list == []:
print('未找到')
if self.table is not None:
self.table.deleteLater()
self.setTable()
# 设置表格
def setTable(self):
self.table = QTableWidget(1, 9)
self.table.setContentsMargins(10, 10, 10, 10)
self.table.verticalHeader().setVisible(False)
self.table.horizontalHeader().setVisible(False)
self.table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.table.setFocusPolicy(Qt.NoFocus)
self.table.setColumnWidth(0, 80)
# self.table.setColumnWidth(1, 150)
# self.table.setColumnWidth(2, 125)
# self.table.setColumnWidth(3, 125)
# self.table.setColumnWidth(4, 100)
self.table.setColumnWidth(6, 80)
self.table.setItem(0, 0, QTableWidgetItem('书号'))
self.table.setItem(0, 1, QTableWidgetItem('书名'))
self.table.setItem(0, 2, QTableWidgetItem('作者'))
self.table.setItem(0, 3, QTableWidgetItem('出版日期'))
self.table.setItem(0, 4, QTableWidgetItem('出版社'))
self.table.setItem(0, 5, QTableWidgetItem('分类'))
self.table.setItem(0, 6, QTableWidgetItem('位置'))
self.table.setItem(0, 7, QTableWidgetItem('总数/剩余'))
self.table.setItem(0, 8, QTableWidgetItem('操作'))
for i in range(9):
self.table.item(0, i).setTextAlignment(Qt.AlignCenter)
self.table.item(0, i).setFont(QFont('微软雅黑', 15))
# 显示借阅详情
for i in self.book_list:
self.insertRow(i)
self.body.addWidget(self.table)
# 插入行
def insertRow(self, val: list):
itemBID = QTableWidgetItem(val[0])
itemBID.setTextAlignment(Qt.AlignCenter)
itemNAME = QTableWidgetItem('《' + val[1] + '》')
itemNAME.setTextAlignment(Qt.AlignCenter)
itemAUTHOR = QTableWidgetItem(val[2])
itemAUTHOR.setTextAlignment(Qt.AlignCenter)
itemDATE = QTableWidgetItem(val[3])
itemDATE.setTextAlignment(Qt.AlignCenter)
itemPRESS = QTableWidgetItem(val[4])
itemPRESS.setTextAlignment(Qt.AlignCenter)
itemPOSITION = QTableWidgetItem(val[5])
itemPOSITION.setTextAlignment(Qt.AlignCenter)
itemSUM = QTableWidgetItem(str(val[6])+'/'+str(val[7]))
itemSUM.setTextAlignment(Qt.AlignCenter)
itemCLASSIFICATION = QTableWidgetItem(val[8])
itemCLASSIFICATION.setTextAlignment(Qt.AlignCenter)
itemOPERATE = QToolButton(self.table)
itemOPERATE.setFixedSize(70, 25)
if val[-1] == '借书':
itemOPERATE.setText('借书')
itemOPERATE.clicked.connect(lambda: self.borrowBook(val[0]))
itemOPERATE.setStyleSheet('''
*{
color: white;
font-family: 微软雅黑;
background: rgba(38, 175, 217, 1);
border: 0;
border-radius: 10px;
font-size:18px;
}
''')
else:
itemOPERATE.setText('不可借')
itemOPERATE.setEnabled(False)
itemOPERATE.setToolTip(val[-1])
QToolTip.setFont(QFont('微软雅黑', 15))
itemOPERATE.setStyleSheet('''
QToolButton{
color: white;
font-family: 微软雅黑;
background: rgba(200, 200, 200, 1);
border: 0;
border-radius: 10px;
font-size:18px;
}
QToolTip{
color: black;
border: 1px solid rgba(200, 200, 200, 1);
}
''')
itemLayout = QHBoxLayout()
itemLayout.setContentsMargins(0, 0, 0, 0)
itemLayout.addWidget(itemOPERATE)
itemWidget = QWidget()
itemWidget.setLayout(itemLayout)
self.table.insertRow(1)
self.table.setItem(1, 0, itemBID)
self.table.setItem(1, 1, itemNAME)
self.table.setItem(1, 2, itemAUTHOR)
self.table.setItem(1, 3, itemDATE)
self.table.setItem(1, 4, itemPRESS)
self.table.setItem(1, 5, itemCLASSIFICATION)
self.table.setItem(1, 6, itemPOSITION)
self.table.setItem(1, 7, itemSUM)
self.table.setCellWidget(1, 8, itemWidget)
def borrowBook(self, BID: str):
ans = database.borrow_book(BID, self.stu_mes['SID'])
# 刷新表格
if ans:
self.searchFunction()
def setMyStyle(self):
self.setStyleSheet('''
*{
background-color: white;
border:0px;
}
''')
self.titleBar.setStyleSheet('''
QWidget {
border:0;
background-color: rgba(216, 216, 216, 1);
border-radius: 20px;
color: rgba(113, 118, 121, 1);
}
QLabel{
font-size: 25px;
font-family: 微软雅黑;
}
''')
self.searchTitle.setStyleSheet('''
QLabel{
font-size:20px;
color: black;
font-family: 微软雅黑;
}
''')
self.searchInput.setStyleSheet('''
QLineEdit{
border: 1px solid rgba(201, 201, 201, 1);
border-radius: 5px;
color: rgba(120, 120, 120, 1)
}
''')
self.searchButton.setStyleSheet('''
QToolButton{
border-radius: 10px;
background-color:rgba(52, 118, 176, 1);
color: white;
font-size: 25px;
font-family: 微软雅黑;
}
''')
self.selectBox.setStyleSheet('''
*{
border: 0px;
}
QComboBox{
border: 1px solid rgba(201, 201, 201, 1);
}
''')
# 正在借阅的书
class BorrowingBooks(QGroupBox):
def __init__(self, stu_mes):
super().__init__()
self.stu_mes = stu_mes
self.body = QVBoxLayout()
self.setTitleBar()
self.setTable()
self.setLayout(self.body)
self.initUI()
# 标题栏
def setTitleBar(self):
self.title = QLabel()
self.title.setText('借阅信息')
self.title.setFixedHeight(25)
titleLayout = QHBoxLayout()
titleLayout.addSpacing(50)
titleLayout.addWidget(self.title)
self.titleBar = QWidget()
self.titleBar.setFixedSize(900, 50)
self.titleBar.setLayout(titleLayout)
self.body.addWidget(self.titleBar)
def setTable(self, val: dict = None):
self.table = QTableWidget(1, 6)
self.table.setContentsMargins(10, 10, 10, 10)
self.table.verticalHeader().setVisible(False)
self.table.horizontalHeader().setVisible(False)
self.table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.table.setFocusPolicy(Qt.NoFocus)
self.table.setColumnWidth(0, 150)
self.table.setColumnWidth(1, 150)
self.table.setColumnWidth(2, 175)
self.table.setColumnWidth(3, 175)
self.table.setColumnWidth(4, 100)
self.table.setColumnWidth(5, 150)
self.table.setItem(0, 0, QTableWidgetItem('书号'))
self.table.setItem(0, 1, QTableWidgetItem('书名'))
self.table.setItem(0, 2, QTableWidgetItem('借书日期'))
self.table.setItem(0, 3, QTableWidgetItem('还书日期'))
self.table.setItem(0, 4, QTableWidgetItem('罚金'))
self.table.setItem(0, 5, QTableWidgetItem('操作'))
for i in range(6):
self.table.item(0, i).setTextAlignment(Qt.AlignCenter)
self.table.item(0, i).setFont(QFont('微软雅黑', 15))
self.body.addWidget(self.table)
# 显示借阅详情
self.book_list = database.get_borrowing_books(self.stu_mes['SID'])
for i in self.book_list:
self.insertRow(i)
self.table.setStyleSheet('''
*{
font-size:18px;
color: black;
background-color: white;
font-family: 微软雅黑;
}
''')
# 插入行
def insertRow(self, val: list):
itemBID = QTableWidgetItem(val[1])
itemBID.setTextAlignment(Qt.AlignCenter)
itemNAME = QTableWidgetItem('《' + val[2] + '》')
itemNAME.setTextAlignment(Qt.AlignCenter)
itemBEGIN = QTableWidgetItem(val[3])
itemBEGIN.setTextAlignment(Qt.AlignCenter)
itemBACK = QTableWidgetItem(val[4])
itemBACK.setTextAlignment(Qt.AlignCenter)
itemPUNISHED = QLabel()
itemPUNISHED.setText('0')
itemPUNISHED.setAlignment(Qt.AlignCenter)
isPunished = database.days_between(
val[4], time.strftime("%Y-%m-%d-%H:%M"))
if isPunished <= 0:
itemPUNISHED.setStyleSheet('''
*{
color: green;
font-size:20px;
font-family: 微软雅黑;
}
''')
else:
itemPUNISHED.setText(str(isPunished))
itemPUNISHED.setStyleSheet('''
*{
color: red;
font-size:20px;
font-family: 微软雅黑;
}
''')
itemOPERATE = QToolButton(self.table)
itemOPERATE.setFixedSize(70, 25)
if isPunished <= 0:
itemOPERATE.setText('还书')
itemOPERATE.clicked.connect(lambda: self.retrurnBook(val[1]))
itemOPERATE.setStyleSheet('''
*{
color: white;
font-family: 微软雅黑;
background: rgba(38, 175, 217, 1);
border: 0;
border-radius: 10px;
font-size:18px;
}
''')
else:
itemOPERATE.setText('交罚金')
itemOPERATE.clicked.connect(
lambda: self.pay(val[1], isPunished))
itemOPERATE.setStyleSheet('''
*{
color: white;
font-family: 微软雅黑;
background: rgba(222, 52, 65, 1);
border: 0;
border-radius: 10px;
font-size:18px;
}
''')
itemLayout = QHBoxLayout()
itemLayout.setContentsMargins(0, 0, 0, 0)
itemLayout.addWidget(itemOPERATE)
itemWidget = QWidget()
itemWidget.setLayout(itemLayout)
self.table.insertRow(1)
self.table.setItem(1, 0, itemBID)
self.table.setItem(1, 1, itemNAME)
self.table.setItem(1, 2, itemBEGIN)
self.table.setItem(1, 3, itemBACK)
self.table.setCellWidget(1, 4, itemPUNISHED)
self.table.setCellWidget(1, 5, itemWidget)
def retrurnBook(self, BID: str):
ans = database.return_book(BID, self.stu_mes['SID'])
# 刷新表格
if ans:
self.book_list = database.get_borrowing_books(self.stu_mes['SID'])
self.table.deleteLater()
self.setTable()
def pay(self, BID: str, PUNISH):
ans = database.pay(BID, self.stu_mes['SID'], PUNISH)
# 刷新表格
if ans:
self.book_list = database.get_borrowing_books(self.stu_mes['SID'])
self.table.deleteLater()
self.setTable()
def initUI(self):
self.setFixedSize(1000, 600)
self.setStyleSheet('''
*{
background-color: white;
border:0px;
}
''')
self.titleBar.setStyleSheet('''
QWidget {
border:0;
background-color: rgba(216, 216, 216, 1);
border-radius: 20px;
color: rgba(113, 118, 121, 1);
}
QLabel{
font-size: 25px;
font-family: 微软雅黑;
}
''')
class History(QGroupBox):
def __init__(self, stu_mes):
super().__init__()
self.stu_mes = stu_mes
self.body = QVBoxLayout()
self.setTitleBar()
self.setTable()
self.setOut()
self.body.addStretch()
self.setLayout(self.body)
self.initUI()
# 标题栏
def setTitleBar(self):
self.title = QLabel()
self.title.setText('借阅记录')
self.title.setFixedHeight(25)
titleLayout = QHBoxLayout()
titleLayout.addSpacing(50)
titleLayout.addWidget(self.title)
self.titleBar = QWidget()
self.titleBar.setFixedSize(900, 50)
self.titleBar.setLayout(titleLayout)
self.body.addWidget(self.titleBar)
# 创建表格
def setTable(self, val: dict = None):
self.table = QTableWidget(1, 5)
self.table.setFixedHeight(400)
self.table.setContentsMargins(10, 10, 10, 10)
self.table.verticalHeader().setVisible(False)
self.table.horizontalHeader().setVisible(False)
self.table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.table.setFocusPolicy(Qt.NoFocus)
self.table.setColumnWidth(0, 200)
self.table.setColumnWidth(1, 250)
self.table.setColumnWidth(2, 175)
self.table.setColumnWidth(3, 175)
self.table.setColumnWidth(4, 100)
self.table.setItem(0, 0, QTableWidgetItem('书号'))
self.table.setItem(0, 1, QTableWidgetItem('书名'))
self.table.setItem(0, 2, QTableWidgetItem('借书日期'))
self.table.setItem(0, 3, QTableWidgetItem('还书日期'))
self.table.setItem(0, 4, QTableWidgetItem('罚金'))
for i in range(5):
self.table.item(0, i).setTextAlignment(Qt.AlignCenter)
self.table.item(0, i).setFont(QFont('微软雅黑', 15))
self.list = database.get_log(self.stu_mes['SID'])
for i in self.list:
self.insertRow(i)
self.body.addWidget(self.table)
# 插入行
def insertRow(self, val: list):
itemBID = QTableWidgetItem(val[1])
itemBID.setTextAlignment(Qt.AlignCenter)
itemNAME = QTableWidgetItem('《' + val[2] + '》')
itemNAME.setTextAlignment(Qt.AlignCenter)
itemBEGIN = QTableWidgetItem(val[3])
itemBEGIN.setTextAlignment(Qt.AlignCenter)
itemBACK = QTableWidgetItem(val[4])
itemBACK.setTextAlignment(Qt.AlignCenter)
itemPUNISHED = QLabel()
itemPUNISHED.setText(str(val[5]))
itemPUNISHED.setAlignment(Qt.AlignCenter)
if val[5] == 0:
itemPUNISHED.setStyleSheet('''
*{
color: green;
font-size: 20px;
}
''')
else:
itemPUNISHED.setStyleSheet('''
*{
color: red;
font-size: 20px;
}
''')
self.table.insertRow(1)
self.table.setItem(1, 0, itemBID)
self.table.setItem(1, 1, itemNAME)
self.table.setItem(1, 2, itemBEGIN)
self.table.setItem(1, 3, itemBACK)
self.table.setCellWidget(1, 4, itemPUNISHED)
# 导出文件
def setOut(self):
self.outButton = QToolButton()
self.outButton.setText('导出')
self.outButton.clicked.connect(self.outFunction)
self.outButton.setFixedSize(100, 50)
outLayout = QHBoxLayout()
outLayout.addStretch()
outLayout.addWidget(self.outButton)
outWidget = QWidget()
outWidget.setLayout(outLayout)
self.body.addWidget(outWidget)
def outFunction(self):
import csv
dirName = QFileDialog.getExistingDirectory(self, '选择文件夹')
title = ['SID', 'BID', 'BNAME', 'BORROW_DATE', 'BACK_DATE', 'PUNISHED']
with open(os.path.join(dirName, self.stu_mes['SID'] + '.csv'), 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(title)
for row in self.list:
writer.writerow(row)
def initUI(self):
self.setFixedSize(1000, 600)
self.setStyleSheet('''
*{
background-color: white;
border:0px;
}
''')
self.titleBar.setStyleSheet('''
QWidget {
border:0;
background-color: rgba(216, 216, 216, 1);
border-radius: 20px;
color: rgba(113, 118, 121, 1);
}
QLabel{
font-size: 25px;
font-family: 微软雅黑;
}
''')
self.table.setStyleSheet('''
font-size:18px;
color: black;
background-color: white;
font-family: 微软雅黑;
''')
self.outButton.setStyleSheet('''
QToolButton{
border-radius: 10px;
background-color:rgba(52, 118, 176, 1);
color: white;
font-size: 25px;
font-family: 微软雅黑;
}
''')
class Detial(QWidget):
def __init__(self, stu_mes):
super().__init__()
self.stu_mes = database.get_student_info(stu_mes['SID'])
# 学号输入框
account = QLabel()
account.setText('学号')
self.accountInput = QLineEdit()
self.accountInput.setFixedSize(400, 40)
self.accountInput.setText(self.stu_mes['SID'])
self.accountInput.setTextMargins(5, 5, 5, 5)
self.accountInput.setEnabled(False)
accountLayout = QHBoxLayout()
accountLayout.addStretch()
accountLayout.addWidget(account)
accountLayout.addWidget(self.accountInput)
# 姓名输入框
name = QLabel()
name.setText('姓名')
self.nameInput = QLineEdit()
self.nameInput.setFixedSize(400, 40)
self.nameInput.setText(self.stu_mes['SNAME'])
self.nameInput.setTextMargins(5, 5, 5, 5)
self.nameInput.setEnabled(False)
nameLayout = QHBoxLayout()
nameLayout.addStretch()
nameLayout.addWidget(name)
nameLayout.addWidget(self.nameInput)
# 密码
password = QLabel()
password.setText('密码')
self.passwordInput = QLineEdit()
self.passwordInput.setFixedSize(400, 40)
self.passwordInput.setText('******')
self.passwordInput.setEchoMode(QLineEdit.Password)
self.passwordInput.setTextMargins(5, 5, 5, 5)
self.passwordInput.setEnabled(False)
passwordLayout = QHBoxLayout()
passwordLayout.addStretch()
passwordLayout.addWidget(password)
passwordLayout.addWidget(self.passwordInput)
# 重复密码
repPassword = QLabel()
repPassword.setText('重复密码')
self.repPasswordInput = QLineEdit()
self.repPasswordInput.setFixedSize(400, 40)
self.repPasswordInput.setText('******')
self.repPasswordInput.setEchoMode(QLineEdit.Password)
self.repPasswordInput.setTextMargins(5, 5, 5, 5)
self.repPasswordInput.setEnabled(False)
repPasswordLayout = QHBoxLayout()
repPasswordLayout.addStretch()
repPasswordLayout.addWidget(repPassword)
repPasswordLayout.addWidget(self.repPasswordInput)
# 最大借书数
maxNum = QLabel()
maxNum.setText('最大借书数')
self.maxNumInput = QLineEdit()
self.maxNumInput.setFixedSize(400, 40)
self.maxNumInput.setText(str(self.stu_mes['MAX']))
self.maxNumInput.setTextMargins(5, 5, 5, 5)
self.maxNumInput.setEnabled(False)
maxNumLayout = QHBoxLayout()
maxNumLayout.addStretch()
maxNumLayout.addWidget(maxNum)
maxNumLayout.addWidget(self.maxNumInput)
# 学院
dept = QLabel()
dept.setText('学院')
self.deptInput = QLineEdit()
self.deptInput.setFixedSize(400, 40)
self.deptInput.setText(self.stu_mes['DEPARTMENT'])
self.deptInput.setTextMargins(5, 5, 5, 5)
self.deptInput.setEnabled(False)
deptLayout = QHBoxLayout()
deptLayout.addStretch()
deptLayout.addWidget(dept)
deptLayout.addWidget(self.deptInput)
# 专业
major = QLabel()
major.setText('专业')
self.majorInput = QLineEdit()
self.majorInput.setFixedSize(400, 40)
self.majorInput.setText(self.stu_mes['MAJOR'])
self.majorInput.setTextMargins(5, 5, 5, 5)
self.majorInput.setEnabled(False)
majorLayout = QHBoxLayout()
majorLayout.addStretch()
majorLayout.addWidget(major)
majorLayout.addWidget(self.majorInput)
# 保存
self.save = QToolButton()
self.save.setText('保存')
self.save.setFixedSize(100, 40)
self.save.setEnabled(False)
self.save.clicked.connect(self.saveFunction)
# 修改
self.modify = QToolButton()
self.modify.setText('修改')
self.modify.setFixedSize(100, 40)
self.modify.clicked.connect(self.modifyFunction)
btnLayout = QHBoxLayout()
btnLayout.addSpacing(130)
btnLayout.addWidget(self.modify)
btnLayout.addWidget(self.save)
btnLayout.addStretch()
self.bodyLayout = QVBoxLayout()
self.bodyLayout.addLayout(accountLayout)
self.bodyLayout.addLayout(nameLayout)
self.bodyLayout.addLayout(passwordLayout)
self.bodyLayout.addLayout(repPasswordLayout)
self.bodyLayout.addLayout(deptLayout)
self.bodyLayout.addLayout(majorLayout)
self.bodyLayout.addLayout(maxNumLayout)
self.bodyLayout.addLayout(btnLayout)
self.bodyLayout.addStretch()
self.setLayout(self.bodyLayout)
self.initUI()
def saveFunction(self):
if self.passwordInput.text() != self.repPasswordInput.text():
print('密码不一致')
return
if not self.maxNumInput.text().isalnum():
print('最大数量输入错误')
return
if self.passwordInput.text() != '******':
self.stu_mes['PASSWORD'] = database.encrypt(self.passwordInput.text())
self.stu_mes['SNAME'] = self.nameInput.text()
self.stu_mes['DEPARTMENT'] = self.deptInput.text()
self.stu_mes['MAJOR'] = self.majorInput.text()
self.stu_mes['MAX'] = int(self.maxNumInput.text())
if not database.update_student(self.stu_mes):
print('更新失败')
return
self.save.setEnabled(False)
self.nameInput.setEnabled(False)
self.passwordInput.setEnabled(False)
self.repPasswordInput.setEnabled(False)
self.deptInput.setEnabled(False)
self.majorInput.setEnabled(False)
self.maxNumInput.setEnabled(False)
self.setMyStyle()
def modifyFunction(self):
self.save.setEnabled(True)
self.nameInput.setEnabled(True)
self.passwordInput.setEnabled(True)
self.repPasswordInput.setEnabled(True)
self.deptInput.setEnabled(True)
self.majorInput.setEnabled(True)
self.maxNumInput.setEnabled(True)
self.setStyleSheet('''
QWidget{
background-color: white;
}
QLabel{
font-size: 20px;
font-family: 微软雅黑;
}
QLineEdit{
border: 1px solid rgba(229, 229, 229, 1);
border-radius: 10px;
color: black;
}
QToolButton{
border-radius: 10px;
background-color:rgba(52, 118, 176, 1);
color: white;
font-size: 25px;
font-family: 微软雅黑;
}
''')
self.save.setStyleSheet('''
*{
background-color:rgba(52, 118, 176, 1);
}
''')
def initUI(self):
self.setFixedSize(550, 600)
self.setMyStyle()
def setMyStyle(self):
self.setStyleSheet('''
QWidget{
background-color: white;
}
QLabel{
font-size: 20px;
font-family: 微软雅黑;
}
QLineEdit{
border: 1px solid rgba(229, 229, 229, 1);
border-radius: 10px;
color: grey;
}
QToolButton{
border-radius: 10px;
background-color:rgba(52, 118, 176, 1);
color: white;
font-size: 25px;
font-family: 微软雅黑;
}
''')
self.save.setStyleSheet('''
*{
background-color: gray;
}
''')
if __name__ == '__main__':
app = QApplication(sys.argv)
user_message = {
'SID': '1',
'SNAME': '1',
'DEPARTMENT': '1',
'MAJOR': '1',
'MAX': 5
}
ex = StudentPage(user_message)
ex.show()
sys.exit(app.exec_())
| [
"PyQt5.QtGui.QIcon",
"PyQt5.QtWidgets.QApplication",
"model.database.borrow_book",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtWidgets.QComboBox",
"model.database.get_student_info",
"model.database.update_student",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QGroupBox",
"model.database.get_borrowing_books",
"model.database.get_log",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QToolButton",
"PyQt5.QtGui.QFont",
"csv.writer",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QGridLayout",
"PyQt5.QtWidgets.QFileDialog.getExistingDirectory",
"model.database.return_book",
"time.strftime",
"os.path.join",
"PyQt5.QtWidgets.QSplitter",
"model.database.pay",
"PyQt5.QtWidgets.QTableWidgetItem",
"PyQt5.QtCore.QSize"
] | [((32802, 32824), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (32814, 32824), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((694, 703), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (701, 703), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((810, 821), 'PyQt5.QtWidgets.QSplitter', 'QSplitter', ([], {}), '()\n', (819, 821), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((930, 943), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (941, 943), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((1270, 1278), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (1276, 1278), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((1383, 1396), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (1394, 1396), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((1670, 1683), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (1681, 1683), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((1774, 1787), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (1785, 1787), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((2076, 2089), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (2087, 2089), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((2487, 2500), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (2498, 2500), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((2863, 2876), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (2874, 2876), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((3243, 3256), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (3254, 3256), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((3706, 3719), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (3717, 3719), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((4036, 4047), 'PyQt5.QtWidgets.QGroupBox', 'QGroupBox', ([], {}), '()\n', (4045, 4047), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((6693, 6706), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (6704, 6706), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((6975, 6983), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (6981, 6983), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7079, 7092), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (7090, 7092), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7194, 7203), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (7201, 7203), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7402, 7413), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (7411, 7413), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7548, 7556), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (7554, 7556), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7625, 7636), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7634, 7636), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7802, 7815), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (7813, 7815), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((7990, 8003), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (8001, 8003), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((8295, 8304), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (8302, 8304), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((8902, 8920), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', (['(1)', '(9)'], {}), '(1, 9)\n', (8914, 8920), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((10320, 10344), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[0]'], {}), '(val[0])\n', (10336, 10344), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((10414, 10450), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (["('《' + val[1] + '》')"], {}), "('《' + val[1] + '》')\n", (10430, 10450), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((10523, 10547), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[2]'], {}), '(val[2])\n', (10539, 10547), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((10620, 10644), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[3]'], {}), '(val[3])\n', (10636, 10644), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((10716, 10740), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[4]'], {}), '(val[4])\n', (10732, 10740), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((10816, 10840), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[5]'], {}), '(val[5])\n', (10832, 10840), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((11039, 11063), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[8]'], {}), '(val[8])\n', (11055, 11063), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((11147, 11170), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', (['self.table'], {}), '(self.table)\n', (11158, 11170), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((12295, 12308), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (12306, 12308), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((12422, 12431), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (12429, 12431), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((12967, 13013), 'model.database.borrow_book', 'database.borrow_book', (['BID', "self.stu_mes['SID']"], {}), "(BID, self.stu_mes['SID'])\n", (12987, 13013), False, 'from model import database\n'), ((14596, 14609), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (14607, 14609), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((14777, 14785), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (14783, 14785), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((14881, 14894), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (14892, 14894), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((14996, 15005), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (15003, 15005), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((15202, 15220), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', (['(1)', '(6)'], {}), '(1, 6)\n', (15214, 15220), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((16338, 16387), 'model.database.get_borrowing_books', 'database.get_borrowing_books', (["self.stu_mes['SID']"], {}), "(self.stu_mes['SID'])\n", (16366, 16387), False, 'from model import database\n'), ((16709, 16733), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[1]'], {}), '(val[1])\n', (16725, 16733), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((16802, 16838), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (["('《' + val[2] + '》')"], {}), "('《' + val[2] + '》')\n", (16818, 16838), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((16909, 16933), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[3]'], {}), '(val[3])\n', (16925, 16933), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((17004, 17028), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[4]'], {}), '(val[4])\n', (17020, 17028), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((17102, 17110), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (17108, 17110), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((17816, 17839), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', (['self.table'], {}), '(self.table)\n', (17827, 17839), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((18790, 18803), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (18801, 18803), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((18917, 18926), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (18924, 18926), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((19329, 19375), 'model.database.return_book', 'database.return_book', (['BID', "self.stu_mes['SID']"], {}), "(BID, self.stu_mes['SID'])\n", (19349, 19375), False, 'from model import database\n'), ((19603, 19649), 'model.database.pay', 'database.pay', (['BID', "self.stu_mes['SID']", 'PUNISH'], {}), "(BID, self.stu_mes['SID'], PUNISH)\n", (19615, 19649), False, 'from model import database\n'), ((20469, 20482), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (20480, 20482), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((20703, 20711), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (20709, 20711), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((20807, 20820), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (20818, 20820), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((20922, 20931), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (20929, 20931), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((21139, 21157), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', (['(1)', '(5)'], {}), '(1, 5)\n', (21151, 21157), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((22153, 22190), 'model.database.get_log', 'database.get_log', (["self.stu_mes['SID']"], {}), "(self.stu_mes['SID'])\n", (22169, 22190), False, 'from model import database\n'), ((22354, 22378), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[1]'], {}), '(val[1])\n', (22370, 22378), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((22447, 22483), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (["('《' + val[2] + '》')"], {}), "('《' + val[2] + '》')\n", (22463, 22483), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((22554, 22578), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[3]'], {}), '(val[3])\n', (22570, 22578), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((22649, 22673), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['val[4]'], {}), '(val[4])\n', (22665, 22673), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((22747, 22755), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (22753, 22755), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((23537, 23550), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (23548, 23550), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((23710, 23723), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (23721, 23723), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((23819, 23828), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (23826, 23828), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((23973, 24020), 'PyQt5.QtWidgets.QFileDialog.getExistingDirectory', 'QFileDialog.getExistingDirectory', (['self', '"""选择文件夹"""'], {}), "(self, '选择文件夹')\n", (24005, 24020), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((25378, 25419), 'model.database.get_student_info', 'database.get_student_info', (["stu_mes['SID']"], {}), "(stu_mes['SID'])\n", (25403, 25419), False, 'from model import database\n'), ((25455, 25463), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (25461, 25463), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((25522, 25533), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (25531, 25533), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((25758, 25771), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (25769, 25771), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((25931, 25939), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (25937, 25939), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((25992, 26003), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (26001, 26003), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((26215, 26228), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (26226, 26228), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((26374, 26382), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (26380, 26382), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((26443, 26454), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (26452, 26454), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((26732, 26745), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (26743, 26745), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((26916, 26924), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (26922, 26924), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((26993, 27004), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (27002, 27004), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((27300, 27313), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (27311, 27313), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((27495, 27503), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (27501, 27503), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((27563, 27574), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (27572, 27574), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((27799, 27812), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (27810, 27812), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((27964, 27972), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (27970, 27972), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((28025, 28036), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (28034, 28036), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((28253, 28266), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (28264, 28266), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((28409, 28417), 'PyQt5.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (28415, 28417), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((28472, 28483), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (28481, 28483), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((28700, 28713), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (28711, 28713), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((28865, 28878), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (28876, 28878), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((29076, 29089), 'PyQt5.QtWidgets.QToolButton', 'QToolButton', ([], {}), '()\n', (29087, 29089), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((29244, 29257), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (29255, 29257), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((29430, 29443), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (29441, 29443), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((1426, 1450), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icon/person.png"""'], {}), "('icon/person.png')\n", (1431, 1450), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((2208, 2230), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icon/book.png"""'], {}), "('icon/book.png')\n", (2213, 2230), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((2268, 2281), 'PyQt5.QtCore.QSize', 'QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (2273, 2281), False, 'from PyQt5.QtCore import Qt, QSize\n'), ((2607, 2634), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icon/borrowing.png"""'], {}), "('icon/borrowing.png')\n", (2612, 2634), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((2668, 2681), 'PyQt5.QtCore.QSize', 'QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (2673, 2681), False, 'from PyQt5.QtCore import Qt, QSize\n'), ((2986, 3011), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icon/history.png"""'], {}), "('icon/history.png')\n", (2991, 3011), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((3046, 3059), 'PyQt5.QtCore.QSize', 'QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (3051, 3059), False, 'from PyQt5.QtCore import Qt, QSize\n'), ((3363, 3387), 'PyQt5.QtGui.QIcon', 'QIcon', (['"""icon/detial.png"""'], {}), "('icon/detial.png')\n", (3368, 3387), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((3421, 3434), 'PyQt5.QtCore.QSize', 'QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (3426, 3434), False, 'from PyQt5.QtCore import Qt, QSize\n'), ((9492, 9514), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""书号"""'], {}), "('书号')\n", (9508, 9514), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9549, 9571), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""书名"""'], {}), "('书名')\n", (9565, 9571), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9606, 9628), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""作者"""'], {}), "('作者')\n", (9622, 9628), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9663, 9687), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""出版日期"""'], {}), "('出版日期')\n", (9679, 9687), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9722, 9745), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""出版社"""'], {}), "('出版社')\n", (9738, 9745), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9780, 9802), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""分类"""'], {}), "('分类')\n", (9796, 9802), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9837, 9859), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""位置"""'], {}), "('位置')\n", (9853, 9859), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9894, 9919), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""总数/剩余"""'], {}), "('总数/剩余')\n", (9910, 9919), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((9954, 9976), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""操作"""'], {}), "('操作')\n", (9970, 9976), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((15786, 15808), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""书号"""'], {}), "('书号')\n", (15802, 15808), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((15843, 15865), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""书名"""'], {}), "('书名')\n", (15859, 15865), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((15900, 15924), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""借书日期"""'], {}), "('借书日期')\n", (15916, 15924), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((15959, 15983), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""还书日期"""'], {}), "('还书日期')\n", (15975, 15983), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((16018, 16040), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""罚金"""'], {}), "('罚金')\n", (16034, 16040), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((16075, 16097), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""操作"""'], {}), "('操作')\n", (16091, 16097), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((17259, 17290), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d-%H:%M"""'], {}), "('%Y-%m-%d-%H:%M')\n", (17272, 17290), False, 'import time\n'), ((19436, 19485), 'model.database.get_borrowing_books', 'database.get_borrowing_books', (["self.stu_mes['SID']"], {}), "(self.stu_mes['SID'])\n", (19464, 19485), False, 'from model import database\n'), ((19710, 19759), 'model.database.get_borrowing_books', 'database.get_borrowing_books', (["self.stu_mes['SID']"], {}), "(self.stu_mes['SID'])\n", (19738, 19759), False, 'from model import database\n'), ((21720, 21742), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""书号"""'], {}), "('书号')\n", (21736, 21742), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((21777, 21799), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""书名"""'], {}), "('书名')\n", (21793, 21799), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((21834, 21858), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""借书日期"""'], {}), "('借书日期')\n", (21850, 21858), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((21893, 21917), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""还书日期"""'], {}), "('还书日期')\n", (21909, 21917), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((21952, 21974), 'PyQt5.QtWidgets.QTableWidgetItem', 'QTableWidgetItem', (['"""罚金"""'], {}), "('罚金')\n", (21968, 21974), False, 'from PyQt5.QtWidgets import QApplication, QWidget, QGridLayout, QGroupBox, QToolButton, QSplitter, QVBoxLayout, QHBoxLayout, QLabel, QTableWidget, QTableWidgetItem, QAbstractItemView, QLineEdit, QFileDialog, QToolTip, QComboBox\n'), ((24217, 24230), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (24227, 24230), False, 'import csv\n'), ((30546, 30583), 'model.database.update_student', 'database.update_student', (['self.stu_mes'], {}), '(self.stu_mes)\n', (30569, 30583), False, 'from model import database\n'), ((10115, 10132), 'PyQt5.QtGui.QFont', 'QFont', (['"""微软雅黑"""', '(15)'], {}), "('微软雅黑', 15)\n", (10120, 10132), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((11819, 11836), 'PyQt5.QtGui.QFont', 'QFont', (['"""微软雅黑"""', '(15)'], {}), "('微软雅黑', 15)\n", (11824, 11836), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((16236, 16253), 'PyQt5.QtGui.QFont', 'QFont', (['"""微软雅黑"""', '(15)'], {}), "('微软雅黑', 15)\n", (16241, 16253), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((22113, 22130), 'PyQt5.QtGui.QFont', 'QFont', (['"""微软雅黑"""', '(15)'], {}), "('微软雅黑', 15)\n", (22118, 22130), False, 'from PyQt5.QtGui import QIcon, QFont\n'), ((24120, 24171), 'os.path.join', 'os.path.join', (['dirName', "(self.stu_mes['SID'] + '.csv')"], {}), "(dirName, self.stu_mes['SID'] + '.csv')\n", (24132, 24171), False, 'import os\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on 2017-12-22
@author: foxty
"""
import os
import unittest
from master_cli import download_py, _FILE_OF_PY27, parse_nodelist
class Tests(unittest.TestCase):
def test_download_py(self):
download_py()
self.assertTrue(os.path.exists(_FILE_OF_PY27))
self.assertEqual(17176758, os.stat(_FILE_OF_PY27).st_size)
def test_parsenodelist(self):
nodelist = parse_nodelist('./master_cli_test_nodes.txt')
self.assertEqual(4, len(nodelist))
self.assertEqual(('node1', 'root', '123456'), nodelist[0])
self.assertEqual(('node2', 'root', '123456@!#'), nodelist[1])
self.assertEqual(('node3', 'root', '0987654321!@#'), nodelist[2])
self.assertEqual(('node5', 'haha', 'passfree123'), nodelist[3])
if __name__ == '__main__':
unittest.main() | [
"os.path.exists",
"master_cli.download_py",
"master_cli.parse_nodelist",
"unittest.main",
"os.stat"
] | [((850, 865), 'unittest.main', 'unittest.main', ([], {}), '()\n', (863, 865), False, 'import unittest\n'), ((255, 268), 'master_cli.download_py', 'download_py', ([], {}), '()\n', (266, 268), False, 'from master_cli import download_py, _FILE_OF_PY27, parse_nodelist\n'), ((445, 490), 'master_cli.parse_nodelist', 'parse_nodelist', (['"""./master_cli_test_nodes.txt"""'], {}), "('./master_cli_test_nodes.txt')\n", (459, 490), False, 'from master_cli import download_py, _FILE_OF_PY27, parse_nodelist\n'), ((293, 322), 'os.path.exists', 'os.path.exists', (['_FILE_OF_PY27'], {}), '(_FILE_OF_PY27)\n', (307, 322), False, 'import os\n'), ((359, 381), 'os.stat', 'os.stat', (['_FILE_OF_PY27'], {}), '(_FILE_OF_PY27)\n', (366, 381), False, 'import os\n')] |
# Copyright (c) 2021, VRAI Labs and/or its affiliates. All rights reserved.
#
# This software is licensed under the Apache License, Version 2.0 (the
# "License") as published by the Apache Software Foundation.
#
# You may not use this file except in compliance with the License. You may
# obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from typing import Any, Dict, Union
from supertokens_python.recipe.openid.interfaces import \
GetOpenIdDiscoveryConfigurationResult
from supertokens_python.recipe.openid.recipe import OpenIdRecipe
from ...jwt.interfaces import CreateJwtResult, GetJWKSResult
async def create_jwt(payload: Union[None, Dict[str, Any]] = None, validity_seconds: Union[None, int] = None, user_context: Union[Dict[str, Any], None] = None) -> CreateJwtResult:
if user_context is None:
user_context = {}
if payload is None:
payload = {}
return await OpenIdRecipe.get_instance().recipe_implementation.create_jwt(payload, validity_seconds, user_context)
async def get_jwks(user_context: Union[Dict[str, Any], None] = None) -> GetJWKSResult:
if user_context is None:
user_context = {}
return await OpenIdRecipe.get_instance().recipe_implementation.get_jwks(user_context)
async def get_open_id_discovery_configuration(user_context: Union[Dict[str, Any], None] = None) -> GetOpenIdDiscoveryConfigurationResult:
if user_context is None:
user_context = {}
return await OpenIdRecipe.get_instance().recipe_implementation.get_open_id_discovery_configuration(user_context)
| [
"supertokens_python.recipe.openid.recipe.OpenIdRecipe.get_instance"
] | [((1245, 1272), 'supertokens_python.recipe.openid.recipe.OpenIdRecipe.get_instance', 'OpenIdRecipe.get_instance', ([], {}), '()\n', (1270, 1272), False, 'from supertokens_python.recipe.openid.recipe import OpenIdRecipe\n'), ((1508, 1535), 'supertokens_python.recipe.openid.recipe.OpenIdRecipe.get_instance', 'OpenIdRecipe.get_instance', ([], {}), '()\n', (1533, 1535), False, 'from supertokens_python.recipe.openid.recipe import OpenIdRecipe\n'), ((1793, 1820), 'supertokens_python.recipe.openid.recipe.OpenIdRecipe.get_instance', 'OpenIdRecipe.get_instance', ([], {}), '()\n', (1818, 1820), False, 'from supertokens_python.recipe.openid.recipe import OpenIdRecipe\n')] |
import pickle
import os
import numpy as np
import matplotlib.pyplot as plt
from nltk.corpus import brown
from pprint import pprint
from collections import defaultdict
from tqdm import tqdm
from private_quotes.core.risk_evaluation import RiskEvaluator
def prepare_reference_probabilities():
""" This function is 'struggle coding',
and should be totally refactored.
"""
risk_evaluator = RiskEvaluator()
risk_evaluator.load_dataset()
all_privacy_scores = defaultdict(list)
for sent in tqdm(brown.sents()):
privacy_scores, privacy_percents, probabilities, ngram_tokens = risk_evaluator.evaluate_quote(' '.join(sent))
all_privacy_scores[1] += [privacy_scores[0]]
all_privacy_scores[2] += [privacy_scores[1]]
all_privacy_scores[1] = np.sort(all_privacy_scores[1])
all_privacy_scores[2] = np.sort(all_privacy_scores[2])
# pprint(all_privacy_scores[1])
# plt.hist(all_privacy_scores[1], normed=True, bins=30)
# plt.ylabel('Probability')
# plt.show()
output_filepath = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../data/reference_scores.pkl')
with open(output_filepath, 'wb') as f:
pickle.dump(all_privacy_scores, f)
return
if __name__ == '__main__':
prepare_reference_probabilities() | [
"pickle.dump",
"numpy.sort",
"private_quotes.core.risk_evaluation.RiskEvaluator",
"nltk.corpus.brown.sents",
"os.path.realpath",
"collections.defaultdict"
] | [((411, 426), 'private_quotes.core.risk_evaluation.RiskEvaluator', 'RiskEvaluator', ([], {}), '()\n', (424, 426), False, 'from private_quotes.core.risk_evaluation import RiskEvaluator\n'), ((487, 504), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (498, 504), False, 'from collections import defaultdict\n'), ((795, 825), 'numpy.sort', 'np.sort', (['all_privacy_scores[1]'], {}), '(all_privacy_scores[1])\n', (802, 825), True, 'import numpy as np\n'), ((854, 884), 'numpy.sort', 'np.sort', (['all_privacy_scores[2]'], {}), '(all_privacy_scores[2])\n', (861, 884), True, 'import numpy as np\n'), ((526, 539), 'nltk.corpus.brown.sents', 'brown.sents', ([], {}), '()\n', (537, 539), False, 'from nltk.corpus import brown\n'), ((1198, 1232), 'pickle.dump', 'pickle.dump', (['all_privacy_scores', 'f'], {}), '(all_privacy_scores, f)\n', (1209, 1232), False, 'import pickle\n'), ((1082, 1108), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1098, 1108), False, 'import os\n')] |
import sqlite3, json, base64, sys
from decimal import Decimal, ROUND_DOWN, InvalidOperation
from stellar_base.asset import Asset
from stellar_base.operation import Payment
from stellar_base.transaction import Transaction
from stellar_base.transaction_envelope import TransactionEnvelope as Te
from stellar_base.horizon import horizon_testnet, horizon_livenet
from stellar_base.utils import AccountNotExistError
pool_address = "GCCD6AJOYZCUAQLX32ZJF2MKFFAUJ53PVCFQI3RHWKL3V47QYE2BNAUT"
db_address = "../core/stellar.db"
select_donations_op = """
SELECT * FROM accountdata WHERE
`dataname` LIKE 'Lumenaut.net donation%'"""
select_accounts_op = """
SELECT `accounts`.`accountid`, `balance` FROM `accounts`
WHERE `inflationdest`='%s'""" % pool_address
select_total_balance = "SELECT Sum(balance) FROM accounts WHERE `inflationdest`=?"
select_num_accounts = "SELECT Count(*) FROM accounts WHERE `inflationdest`=?"
select_sequence_num = "SELECT seqnum FROM accounts WHERE `accountid`=?"
network = "PUBLIC"
horizon = horizon_livenet()
BASE_FEE = 100
XLM_STROOP = 10000000
XLM_FEE = Decimal(BASE_FEE / XLM_STROOP)
donation_payouts = {}
def writeflushed(out):
sys.stdout.write(out)
sys.stdout.flush()
def XLM_Decimal(n):
# 7 decimal places is the longest supported
return Decimal(n).quantize(Decimal('.0000001'), rounding=ROUND_DOWN)
def query_one(cursor, query_str, args):
cursor.execute(query_str, args)
return cursor.fetchone()[0]
def parse_donation(donation_data):
donation_data = base64.b64decode(donation_data).decode("utf-8")
# Get the donation percentage and destination address from the
# base64 data string
try:
pct, address = donation_data.split("%")
pct = Decimal(pct)
if pct < 0 or pct > 100 or len(address) != 56 or address[0] != 'G':
return None
else:
return (address, pct / 100)
except ValueError:
# Split didn't produce two elements (no '%' char in the donation_string)
return None
except InvalidOperation:
# XLM_Decimal() can't produce a valid value (malformed string)
return None
def accounts_payouts(cur, pool_addr, inflation, size=100):
total_balance = XLM_Decimal(query_one(cur, select_total_balance, (pool_address, )))
num_accounts = query_one(cur, select_num_accounts, (pool_address, ))
cur.execute(select_donations_op)
donations = {}
for row in cur:
donor = row[0]
donation = parse_donation(row[2])
if donation != None:
donation_address, percentage = donation
if donor not in donations:
donations[donor] = {}
donations[donor][donation_address] = percentage
payouts = []
batch = []
cur.execute(select_accounts_op)
i = 1.0
for row in cur:
writeflushed("\rCalculating donation amounts: %d%%" % ((i / num_accounts) * 100))
i += 1
accountid = row[0]
account_balance = XLM_Decimal(row[1])
account_inflation = (account_balance / total_balance) * inflation
if accountid in donations:
inflation_sub = 0
for address in donations[accountid]:
pct = donations[accountid][address]
donation_amt = account_inflation * pct
donation_payouts[address] = XLM_Decimal(donation_payouts.get(address, 0) + donation_amt)
inflation_sub += donation_amt + XLM_FEE # take the transaction fee from donations (even though they will be bundled)
account_inflation -= inflation_sub
batch.append((accountid, XLM_Decimal(account_inflation - XLM_FEE)))
if len(batch) >= 100:
payouts.append(batch)
batch = []
writeflushed("\rCalculated donation amounts successfully.\n")
for address in donation_payouts:
batch.append((address, donation_payouts[address]))
if len(batch) >= 100:
payouts.append(batch)
batch = []
if len(batch) > 0:
payouts.append(batch)
batch = []
return payouts, total_balance, num_accounts
def make_payment_op(account_id, amount):
return Payment({
'destination': account_id,
'source': pool_address,
'amount': str(amount),
'asset': Asset('XLM')
})
def main(inflation):
# TODO: Let user select the connection type
# The stellar/quickstart Docker image uses PostgreSQL
conn = sqlite3.connect(db_address)
cur = conn.cursor()
# Get the next sequence number for the transactions
sequence = query_one(cur, select_sequence_num, (pool_address, ))
inflation = XLM_Decimal(inflation)
transactions = []
num_payments = 0
total_payments_cost = XLM_Decimal(0)
total_fee_cost = XLM_Decimal(0)
batches, total_balance, num_accounts = accounts_payouts(cur, pool_address, inflation)
num_batches = len(batches)
dest_sequence = sequence + num_batches
i = 1.0
# Create one transaction for each batch
for batch in batches:
writeflushed("\rCreating and encoding transactions: %d%%" % (i / num_batches * 100))
i += 1
operations = []
for aid, amount in batch:
# Include payment operation on ops{}
payment_op = make_payment_op(aid, amount)
operations.append(payment_op)
total_payments_cost += amount
# Build transaction
tx = Transaction(
source=pool_address,
opts={"sequence": sequence, "operations": operations, "fee": len(batch) * BASE_FEE}
)
# Bundle transaction into an envelope to be encoded to xdr
envelope = Te(tx=tx, opts={"network_id": network})
# Append the transaction plain-text (JSON) on the list
transactions.append(envelope.xdr().decode("utf-8"))
# Calculate stats
total_fee_cost += tx.fee
num_payments += len(operations)
# Prepare the next sequence number for the transactions
sequence += 1
with open("transactions.json", 'w') as outf:
json.dump(transactions, outf)
writeflushed("\rSuccessfully built transaction file: written to transactions.json.\n\n")
print((
"Stats: \n"
"\tInflation received: %s\n"
"\tNumber of accounts voting for Lumenaut: %d (%s XLM)\n"
"\tA total of %s XLM paid over %s inflation payments using %s XLM in fees.\n"
"\tNumber of transactions needed: %s\n"
"\tNumber of unique donation addresses: %s\n") % (
inflation,
num_accounts,
(total_balance / XLM_STROOP),
total_payments_cost,
num_payments,
(total_fee_cost / XLM_STROOP),
len(transactions),
len(donation_payouts)
)
)
if __name__ == '__main__':
main(49855.2650163) # test amount | [
"stellar_base.transaction_envelope.TransactionEnvelope",
"sqlite3.connect",
"json.dump",
"base64.b64decode",
"stellar_base.asset.Asset",
"stellar_base.horizon.horizon_livenet",
"sys.stdout.flush",
"decimal.Decimal",
"sys.stdout.write"
] | [((1021, 1038), 'stellar_base.horizon.horizon_livenet', 'horizon_livenet', ([], {}), '()\n', (1036, 1038), False, 'from stellar_base.horizon import horizon_testnet, horizon_livenet\n'), ((1087, 1117), 'decimal.Decimal', 'Decimal', (['(BASE_FEE / XLM_STROOP)'], {}), '(BASE_FEE / XLM_STROOP)\n', (1094, 1117), False, 'from decimal import Decimal, ROUND_DOWN, InvalidOperation\n'), ((1166, 1187), 'sys.stdout.write', 'sys.stdout.write', (['out'], {}), '(out)\n', (1182, 1187), False, 'import sqlite3, json, base64, sys\n'), ((1189, 1207), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1205, 1207), False, 'import sqlite3, json, base64, sys\n'), ((4054, 4081), 'sqlite3.connect', 'sqlite3.connect', (['db_address'], {}), '(db_address)\n', (4069, 4081), False, 'import sqlite3, json, base64, sys\n'), ((1303, 1322), 'decimal.Decimal', 'Decimal', (['""".0000001"""'], {}), "('.0000001')\n", (1310, 1322), False, 'from decimal import Decimal, ROUND_DOWN, InvalidOperation\n'), ((1693, 1705), 'decimal.Decimal', 'Decimal', (['pct'], {}), '(pct)\n', (1700, 1705), False, 'from decimal import Decimal, ROUND_DOWN, InvalidOperation\n'), ((5129, 5168), 'stellar_base.transaction_envelope.TransactionEnvelope', 'Te', ([], {'tx': 'tx', 'opts': "{'network_id': network}"}), "(tx=tx, opts={'network_id': network})\n", (5131, 5168), True, 'from stellar_base.transaction_envelope import TransactionEnvelope as Te\n'), ((5487, 5516), 'json.dump', 'json.dump', (['transactions', 'outf'], {}), '(transactions, outf)\n', (5496, 5516), False, 'import sqlite3, json, base64, sys\n'), ((1283, 1293), 'decimal.Decimal', 'Decimal', (['n'], {}), '(n)\n', (1290, 1293), False, 'from decimal import Decimal, ROUND_DOWN, InvalidOperation\n'), ((1502, 1533), 'base64.b64decode', 'base64.b64decode', (['donation_data'], {}), '(donation_data)\n', (1518, 1533), False, 'import sqlite3, json, base64, sys\n'), ((3906, 3918), 'stellar_base.asset.Asset', 'Asset', (['"""XLM"""'], {}), "('XLM')\n", (3911, 3918), False, 'from stellar_base.asset import Asset\n')] |
from django.contrib import admin
from django.urls import path, include
from rest_framework import routers
from . import views
app_name = 'index'
urlpatterns = [
path('', views.index, name = 'ind'),
path('test/', views.AddView.as_view(), name = 'ind-add'),
path('success/<int:pk>/', views.SuccessView.as_view(), name='ind-view'),
path('api/data/', views.get_data),
path('api/data/charts/', views.ChartData.as_view()),
path('api/data/chartss/', views.ChartData1.as_view()),
#path('', include('index.urls')),
#path('admin/', admin.site.urls),
]
| [
"django.urls.path"
] | [((173, 206), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""ind"""'}), "('', views.index, name='ind')\n", (177, 206), False, 'from django.urls import path, include\n'), ((347, 380), 'django.urls.path', 'path', (['"""api/data/"""', 'views.get_data'], {}), "('api/data/', views.get_data)\n", (351, 380), False, 'from django.urls import path, include\n')] |
import _multiprocessing as _mp
import multiprocessing.reduction as mp_r
import sys
from gevent.hub import _get_hub_noargs as get_hub
def recvfd(sockfd):
return get_hub().threadpool.apply(_mp.recvfd, (sockfd,))
def sendfd(sockfd, fd):
return get_hub().threadpool.apply(_mp.sendfd, (sockfd, fd))
SemLock = _mp.SemLock
address_of_buffer = _mp.address_of_buffer
flags = _mp.flags
class Connection():
def __init__(self, handle, readable=True, writable=True):
self._conn = _mp.Connection(handle, readable, writable)
def poll(self, *args, **kwargs):
return get_hub().threadpool.apply(self._conn.poll, args, kwargs)
def recv(self):
return get_hub().threadpool.apply(self._conn.recv)
def recv_bytes(self, *args, **kwargs):
return get_hub().threadpool.apply(self._conn.recv_bytes, args, kwargs)
def recv_bytes_into(self, *args, **kwargs):
return get_hub().threadpool.apply(self._conn.recv_bytes_into, args, kwargs)
def send(self, *args, **kwargs):
return get_hub().threadpool.apply(self._conn.send, args, kwargs)
def send_bytes(self, *args, **kwargs):
return get_hub().threadpool.apply(self._conn.send_bytes, args, kwargs)
def close(self):
return get_hub().threadpool.apply(self._conn.close)
def fileno(self):
return self._conn.fileno()
def __repr__(self):
return self._conn.__repr__()
@property
def closed(self):
return self._conn.closed
@property
def readable(self):
return self._conn.readable
@property
def writable(self):
return self._conn.writable
if sys.platform == 'win32':
PipeConnection = type("PipeConnection", (object,), dict(Connection.__dict__))
del Connection
mp_r.ForkingPickler.register(PipeConnection, mp_r.reduce_pipe_connection)
else:
mp_r.ForkingPickler.register(Connection, mp_r.reduce_connection)
| [
"gevent.hub._get_hub_noargs",
"_multiprocessing.Connection",
"multiprocessing.reduction.ForkingPickler.register"
] | [((1775, 1848), 'multiprocessing.reduction.ForkingPickler.register', 'mp_r.ForkingPickler.register', (['PipeConnection', 'mp_r.reduce_pipe_connection'], {}), '(PipeConnection, mp_r.reduce_pipe_connection)\n', (1803, 1848), True, 'import multiprocessing.reduction as mp_r\n'), ((1859, 1923), 'multiprocessing.reduction.ForkingPickler.register', 'mp_r.ForkingPickler.register', (['Connection', 'mp_r.reduce_connection'], {}), '(Connection, mp_r.reduce_connection)\n', (1887, 1923), True, 'import multiprocessing.reduction as mp_r\n'), ((496, 538), '_multiprocessing.Connection', '_mp.Connection', (['handle', 'readable', 'writable'], {}), '(handle, readable, writable)\n', (510, 538), True, 'import _multiprocessing as _mp\n'), ((167, 176), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (174, 176), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((254, 263), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (261, 263), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((592, 601), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (599, 601), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((686, 695), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (693, 695), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((789, 798), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (796, 798), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((917, 926), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (924, 926), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((1039, 1048), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (1046, 1048), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((1156, 1165), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (1163, 1165), True, 'from gevent.hub import _get_hub_noargs as get_hub\n'), ((1257, 1266), 'gevent.hub._get_hub_noargs', 'get_hub', ([], {}), '()\n', (1264, 1266), True, 'from gevent.hub import _get_hub_noargs as get_hub\n')] |
from unittest.case import TestCase
try:
from mock import MagicMock
except ImportError:
from unittest.mock import MagicMock
from examples.handlers.country_handler import CountryHandler
from responsebot.models import Tweet
from responsebot.responsebot_client import ResponseBotClient
class CountryHandlerTestCase(TestCase):
def test_reply_country_info(self):
client = MagicMock(tweet=MagicMock())
handler = CountryHandler(client)
tweet = Tweet({'text': '@bot Andorra'})
handler.on_tweet(tweet)
client.tweet.assert_called_once_with(
'Country: Andorra\n'
'Population: 84000\n'
'Languages: ca\n'
'Continent: Europe'
)
def test_does_not_reply_non_existent_country(self):
client = MagicMock(tweet=MagicMock())
handler = CountryHandler(client)
tweet = Tweet({'text': '@bot Azeroth'})
handler.on_tweet(tweet)
client.tweet.assert_not_called()
| [
"examples.handlers.country_handler.CountryHandler",
"unittest.mock.MagicMock",
"responsebot.models.Tweet"
] | [((437, 459), 'examples.handlers.country_handler.CountryHandler', 'CountryHandler', (['client'], {}), '(client)\n', (451, 459), False, 'from examples.handlers.country_handler import CountryHandler\n'), ((476, 507), 'responsebot.models.Tweet', 'Tweet', (["{'text': '@bot Andorra'}"], {}), "({'text': '@bot Andorra'})\n", (481, 507), False, 'from responsebot.models import Tweet\n'), ((848, 870), 'examples.handlers.country_handler.CountryHandler', 'CountryHandler', (['client'], {}), '(client)\n', (862, 870), False, 'from examples.handlers.country_handler import CountryHandler\n'), ((887, 918), 'responsebot.models.Tweet', 'Tweet', (["{'text': '@bot Azeroth'}"], {}), "({'text': '@bot Azeroth'})\n", (892, 918), False, 'from responsebot.models import Tweet\n'), ((406, 417), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (415, 417), False, 'from unittest.mock import MagicMock\n'), ((817, 828), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (826, 828), False, 'from unittest.mock import MagicMock\n')] |
#PUT LICENCE HERE!
"""
ArtemisPCIE Driver
"""
import sys
import os
import time
from array import array as Array
sys.path.append(os.path.join(os.path.dirname(__file__),
os.pardir))
from nysa.host.driver import driver
#Sub Module ID
#Use 'nysa devices' to get a list of different available devices
DEVICE_TYPE = "Experiment"
SDB_ABI_VERSION_MINOR = 0
SDB_VENDOR_ID = 0
try:
SDB_ABI_VERSION_MINOR = 0
SDB_VENDOR_ID = 0x800000000000C594
except SyntaxError:
pass
#Register Constants
CONTROL = 0x000
STATUS = 0x001
CFG_READ_EXEC = 0x002
CFG_SM_STATE = 0x003
CTR_SM_STATE = 0x004
INGRESS_COUNT = 0x005
INGRESS_STATE = 0x006
INGRESS_RI_COUNT = 0x007
INGRESS_CI_COUNT = 0x008
INGRESS_ADDR = 0x009
INGRESS_CMPLT_COUNT = 0x00A
IH_STATE = 0x00B
OH_STATE = 0x00C
BRAM_NUM_READS = 0x00D
LOCAL_BUFFER_SIZE = 0x00E
DBG_ID_VALUE = 0x00F
DBG_COMMAND_VALUE = 0x010
DBG_COUNT_VALUE = 0x011
DBG_ADDRESS_VALUE = 0x012
CTRL_BIT_SOURCE_EN = 0
CTRL_BIT_CANCEL_WRITE = 1
CTRL_BIT_SINK_EN = 2
STS_BIT_LINKUP = 0
STS_BIT_READ_IDLE = 1
STS_PER_FIFO_SEL = 2
STS_MEM_FIFO_SEL = 3
STS_DMA_FIFO_SEL = 4
STS_WRITE_EN = 5
STS_READ_EN = 6
LOCAL_BUFFER_OFFSET = 0x100
class ArtemisPCIEDriver(driver.Driver):
""" ArtemisPCIE
Communication with a DutDriver ArtemisPCIE Core
"""
@staticmethod
def get_abi_class():
return 0
@staticmethod
def get_abi_major():
return driver.get_device_id_from_name(DEVICE_TYPE)
@staticmethod
def get_abi_minor():
return SDB_ABI_VERSION_MINOR
@staticmethod
def get_vendor_id():
return SDB_VENDOR_ID
def __init__(self, nysa, urn, debug = False):
super(ArtemisPCIEDriver, self).__init__(nysa, urn, debug)
self.buffer_size = None
def set_control(self, control):
self.write_register(CONTROL, control)
def get_control(self):
return self.read_register(CONTROL)
def get_config_state(self):
return self.read_register(CFG_SM_STATE)
def get_control_state(self):
return self.read_register(CTR_SM_STATE)
def get_config_state_read_count(self):
return self.read_register(CFG_READ_EXEC)
def get_ingress_state(self):
return self.read_register(INGRESS_STATE)
def get_ingress_count(self):
return self.read_register(INGRESS_COUNT)
def get_ingress_ri_count(self):
return self.read_register(INGRESS_RI_COUNT)
def get_ingress_ci_count(self):
return self.read_register(INGRESS_CI_COUNT)
def get_ingress_cmplt_count(self):
return self.read_register(INGRESS_CMPLT_COUNT)
def get_ingress_addr(self):
return self.read_register(INGRESS_ADDR)
def get_ih_state(self):
return self.read_register(IH_STATE)
def get_oh_state(self):
return self.read_register(OH_STATE)
def get_local_buffer_size(self):
return self.read_register(LOCAL_BUFFER_SIZE)
def read_local_buffer(self, address = 0x00, size = None):
"""
Read the local buffer within the core, if no size is specified
read the entire buffer,
if no address is specified read from the beginning
Args:
address (Integer): address of data (32-bit aligned) Default 0x00
size (Integer): Size of read (32-bit words) Default 512
Returns (Array of Bytes):
Returns the data as an array of bytes
Raises:
Nothing
"""
if size is None:
if self.buffer_size is None:
self.buffer_size = self.get_local_buffer_size()
size = self.buffer_size
return self.read(address + (LOCAL_BUFFER_OFFSET), length = size)
def write_local_buffer(self, data, address = 0x00):
"""
Write data to the local buffer that be used to send to the Hard Drive
By Default the address is set to 0x00
Args:
data (Array of bytes): data
address (Integer): Address within local buffer 0 - 511 (Default 0)
Returns:
Nothing
Raises:
Nothing
"""
self.write(address + (LOCAL_BUFFER_OFFSET), data)
def get_dbg_id_value(self):
return self.read_register(DBG_ID_VALUE)
def get_dbg_command_value(self):
return self.read_register(DBG_COMMAND_VALUE)
def get_dbg_count_value(self):
return self.read_register(DBG_COUNT_VALUE)
def get_dbg_address_value(self):
return self.read_register(DBG_ADDRESS_VALUE)
def is_link_up(self):
return self.is_register_bit_set(STATUS, STS_BIT_LINKUP)
def is_read_idle(self):
return self.is_register_bit_set(STATUS, STS_BIT_READ_IDLE)
def is_peripheral_bus_selected(self):
return self.is_register_bit_set(STATUS, STS_PER_FIFO_SEL)
def is_memory_bus_selected(self):
return self.is_register_bit_set(STATUS, STS_MEM_FIFO_SEL)
def is_dma_bus_selected(self):
return self.is_register_bit_set(STATUS, STS_DMA_FIFO_SEL)
def generate_dma_data(self):
self.enable_register_bit(CONTROL, CTRL_BIT_SOURCE_EN, True)
def is_write_enabled(self):
return self.is_register_bit_set(STATUS, STS_WRITE_EN)
def is_read_enabled(self):
return self.is_register_bit_set(STATUS, STS_READ_EN)
| [
"os.path.dirname",
"nysa.host.driver.driver.get_device_id_from_name"
] | [((145, 170), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (160, 170), False, 'import os\n'), ((1985, 2028), 'nysa.host.driver.driver.get_device_id_from_name', 'driver.get_device_id_from_name', (['DEVICE_TYPE'], {}), '(DEVICE_TYPE)\n', (2015, 2028), False, 'from nysa.host.driver import driver\n')] |
import json
from os.path import join
from algorithms.utils import experiment_dir
from utils.utils import log
# noinspection PyMethodMayBeStatic
class Params:
def __init__(self, experiment_name):
self.experiments_root = None
# internal params, not for CLI
self._experiment_name = experiment_name
self._command_line = None
self._params_serialized = False
def filename_prefix(self):
return ''
def _params_file(self):
params_filename = self.filename_prefix() + 'params.json'
return join(self.experiment_dir(), params_filename)
def experiment_dir(self):
return experiment_dir(self._experiment_name, self.experiments_root)
def set_command_line(self, argv):
self._command_line = ' '.join(argv)
def ensure_serialized(self):
if not self._params_serialized:
self.serialize()
self._params_serialized = True
def serialize(self):
with open(self._params_file(), 'w') as json_file:
json.dump(self.__dict__, json_file, indent=2)
def load(self):
with open(self._params_file()) as json_file:
json_params = json.load(json_file)
self.__dict__.update(json_params)
return self
@classmethod
def add_cli_args(cls, parser):
tmp_params = cls(None)
def add_arg(argname, type_, default):
parser.add_argument('--' + argname, type=type_, default=default)
def str2bool(v):
return v.lower() == 'true'
for name, value in tmp_params.__dict__.items():
# do not add "protected" attributes to CLI
if name.startswith('_'):
continue
if value is None:
add_arg(name, str, value)
elif type(value) is bool:
parser.add_argument('--' + name, type=str2bool, default=str(value))
elif type(value) in (int, float, str):
add_arg(name, type(value), value)
def update(self, args):
arg_attrs = args.__dict__.keys()
for name, value in self.__dict__.items():
if name in arg_attrs:
new_value = args.__dict__[name]
if value != new_value:
log.info('Replacing default value for %s (%r) with new value: %r', name, value, new_value)
setattr(self, name, new_value) | [
"json.load",
"algorithms.utils.experiment_dir",
"utils.utils.log.info",
"json.dump"
] | [((654, 714), 'algorithms.utils.experiment_dir', 'experiment_dir', (['self._experiment_name', 'self.experiments_root'], {}), '(self._experiment_name, self.experiments_root)\n', (668, 714), False, 'from algorithms.utils import experiment_dir\n'), ((1040, 1085), 'json.dump', 'json.dump', (['self.__dict__', 'json_file'], {'indent': '(2)'}), '(self.__dict__, json_file, indent=2)\n', (1049, 1085), False, 'import json\n'), ((1186, 1206), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (1195, 1206), False, 'import json\n'), ((2282, 2376), 'utils.utils.log.info', 'log.info', (['"""Replacing default value for %s (%r) with new value: %r"""', 'name', 'value', 'new_value'], {}), "('Replacing default value for %s (%r) with new value: %r', name,\n value, new_value)\n", (2290, 2376), False, 'from utils.utils import log\n')] |
import os
import unittest
import json
import jc.parsers.traceroute
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
class MyTests(unittest.TestCase):
def setUp(self):
# input
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/traceroute.out'), 'r', encoding='utf-8') as f:
self.centos_7_7_traceroute = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-no-header.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_noheader = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-asn.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_asn = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-mult-addresses.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_mult_addresses = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-q.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_q = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute6-mult-addresses.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute6_mult_addresses = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute6.out'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute6 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute.out'), 'r', encoding='utf-8') as f:
self.freebsd12_traceroute = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute6.out'), 'r', encoding='utf-8') as f:
self.freebsd12_traceroute6 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute1.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute1 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute2.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute2 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute3.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute3 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute4.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute4 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute5.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute5 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute6.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute6 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute7.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute7 = f.read()
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute8.out'), 'r', encoding='utf-8') as f:
self.generic_traceroute8 = f.read()
# output
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-no-header.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_no_header_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/traceroute.json'), 'r', encoding='utf-8') as f:
self.centos_7_7_traceroute_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-asn.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_asn_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-mult-addresses.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_mult_addresses_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-q.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_q_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute6-mult-addresses.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute6_mult_addresses_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute6.json'), 'r', encoding='utf-8') as f:
self.osx_10_14_6_traceroute6_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute.json'), 'r', encoding='utf-8') as f:
self.freebsd12_traceroute_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute6.json'), 'r', encoding='utf-8') as f:
self.freebsd12_traceroute6_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute1.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute1_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute2.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute2_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute3.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute3_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute4.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute4_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute5.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute5_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute6.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute6_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute7.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute7_json = json.loads(f.read())
with open(os.path.join(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute8.json'), 'r', encoding='utf-8') as f:
self.generic_traceroute8_json = json.loads(f.read())
def test_traceroute_nodata(self):
"""
Test 'traceroute' with no data
"""
self.assertEqual(jc.parsers.traceroute.parse('', quiet=True), {})
def test_traceroute_noheader(self):
"""
Test 'traceroute' with missing header row
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute_noheader, quiet=True), self.osx_10_14_6_traceroute_no_header_json)
def test_traceroute_centos_7_7(self):
"""
Test 'traceroute' on Centos 7.7
"""
self.assertEqual(jc.parsers.traceroute.parse(self.centos_7_7_traceroute, quiet=True), self.centos_7_7_traceroute_json)
def test_traceroute_a_osx_10_14_6(self):
"""
Test 'traceroute -a' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute_asn, quiet=True), self.osx_10_14_6_traceroute_asn_json)
def test_traceroute_mult_addresses_osx_10_14_6(self):
"""
Test 'traceroute' with multiple addresses returned via dns on OSX 10.14.6
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute_mult_addresses, quiet=True), self.osx_10_14_6_traceroute_mult_addresses_json)
def test_traceroute_q_osx_10_14_6(self):
"""
Test 'traceroute -q' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute_q, quiet=True), self.osx_10_14_6_traceroute_q_json)
def test_traceroute_osx_10_14_6(self):
"""
Test 'traceroute' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute, quiet=True), self.osx_10_14_6_traceroute_json)
def test_traceroute6_mult_addresses_osx_10_14_6(self):
"""
Test 'traceroute6' with multiple addresses returned via dns on OSX 10.14.6
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute6_mult_addresses, quiet=True), self.osx_10_14_6_traceroute6_mult_addresses_json)
def test_traceroute6_osx_10_14_6(self):
"""
Test 'traceroute6' on OSX 10.14.6
"""
self.assertEqual(jc.parsers.traceroute.parse(self.osx_10_14_6_traceroute6, quiet=True), self.osx_10_14_6_traceroute6_json)
def test_traceroute_freebsd12(self):
"""
Test 'traceroute' on freebsd12
"""
self.assertEqual(jc.parsers.traceroute.parse(self.freebsd12_traceroute, quiet=True), self.freebsd12_traceroute_json)
def test_traceroute6_freebsd12(self):
"""
Test 'traceroute6' on freebsd12
"""
self.assertEqual(jc.parsers.traceroute.parse(self.freebsd12_traceroute6, quiet=True), self.freebsd12_traceroute6_json)
def test_traceroute1_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute1, quiet=True), self.generic_traceroute1_json)
def test_traceroute2_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute2, quiet=True), self.generic_traceroute2_json)
def test_traceroute3_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute3, quiet=True), self.generic_traceroute3_json)
def test_traceroute4_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute4, quiet=True), self.generic_traceroute4_json)
def test_traceroute5_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute5, quiet=True), self.generic_traceroute5_json)
def test_traceroute6_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute6, quiet=True), self.generic_traceroute6_json)
def test_traceroute7_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute7, quiet=True), self.generic_traceroute7_json)
def test_traceroute8_generic(self):
"""
Test 'traceroute'
"""
self.assertEqual(jc.parsers.traceroute.parse(self.generic_traceroute8, quiet=True), self.generic_traceroute8_json)
if __name__ == '__main__':
unittest.main()
| [
"os.path.abspath",
"os.path.join",
"unittest.main"
] | [((95, 120), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (110, 120), False, 'import os\n'), ((11571, 11586), 'unittest.main', 'unittest.main', ([], {}), '()\n', (11584, 11586), False, 'import unittest\n'), ((214, 291), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/centos-7.7/traceroute.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/traceroute.out')\n", (226, 291), False, 'import os\n'), ((391, 483), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-no-header.out"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-no-header.out')\n", (403, 483), False, 'import os\n'), ((589, 675), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-asn.out"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-asn.out')\n", (601, 675), False, 'import os\n'), ((776, 873), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-mult-addresses.out"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-mult-addresses.out')\n", (788, 873), False, 'import os\n'), ((985, 1070), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-q.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute-q.out'\n )\n", (997, 1070), False, 'import os\n'), ((1168, 1246), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute.out')\n", (1180, 1246), False, 'import os\n'), ((1347, 1445), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute6-mult-addresses.out"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute6-mult-addresses.out')\n", (1359, 1445), False, 'import os\n'), ((1558, 1637), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute6.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute6.out')\n", (1570, 1637), False, 'import os\n'), ((1739, 1815), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/freebsd12/traceroute.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute.out')\n", (1751, 1815), False, 'import os\n'), ((1914, 1991), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/freebsd12/traceroute6.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute6.out')\n", (1926, 1991), False, 'import os\n'), ((2091, 2166), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute1.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute1.out')\n", (2103, 2166), False, 'import os\n'), ((2264, 2339), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute2.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute2.out')\n", (2276, 2339), False, 'import os\n'), ((2437, 2512), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute3.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute3.out')\n", (2449, 2512), False, 'import os\n'), ((2610, 2685), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute4.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute4.out')\n", (2622, 2685), False, 'import os\n'), ((2783, 2858), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute5.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute5.out')\n", (2795, 2858), False, 'import os\n'), ((2956, 3031), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute6.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute6.out')\n", (2968, 3031), False, 'import os\n'), ((3129, 3204), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute7.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute7.out')\n", (3141, 3204), False, 'import os\n'), ((3302, 3377), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute8.out"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute8.out')\n", (3314, 3377), False, 'import os\n'), ((3492, 3585), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-no-header.json"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-no-header.json')\n", (3504, 3585), False, 'import os\n'), ((3709, 3787), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/centos-7.7/traceroute.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/centos-7.7/traceroute.json')\n", (3721, 3787), False, 'import os\n'), ((3904, 3991), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-asn.json"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-asn.json')\n", (3916, 3991), False, 'import os\n'), ((4109, 4207), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-mult-addresses.json"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-mult-addresses.json')\n", (4121, 4207), False, 'import os\n'), ((4336, 4421), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute-q.json"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute-q.json')\n", (4348, 4421), False, 'import os\n'), ((4537, 4616), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute.json')\n", (4549, 4616), False, 'import os\n'), ((4734, 4833), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute6-mult-addresses.json"""'], {}), "(THIS_DIR, os.pardir,\n 'tests/fixtures/osx-10.14.6/traceroute6-mult-addresses.json')\n", (4746, 4833), False, 'import os\n'), ((4963, 5048), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/osx-10.14.6/traceroute6.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/osx-10.14.6/traceroute6.json'\n )\n", (4975, 5048), False, 'import os\n'), ((5162, 5239), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/freebsd12/traceroute.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute.json')\n", (5174, 5239), False, 'import os\n'), ((5355, 5433), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/freebsd12/traceroute6.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/freebsd12/traceroute6.json')\n", (5367, 5433), False, 'import os\n'), ((5550, 5626), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute1.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute1.json')\n", (5562, 5626), False, 'import os\n'), ((5741, 5817), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute2.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute2.json')\n", (5753, 5817), False, 'import os\n'), ((5932, 6008), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute3.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute3.json')\n", (5944, 6008), False, 'import os\n'), ((6123, 6199), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute4.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute4.json')\n", (6135, 6199), False, 'import os\n'), ((6314, 6390), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute5.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute5.json')\n", (6326, 6390), False, 'import os\n'), ((6505, 6581), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute6.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute6.json')\n", (6517, 6581), False, 'import os\n'), ((6696, 6772), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute7.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute7.json')\n", (6708, 6772), False, 'import os\n'), ((6887, 6963), 'os.path.join', 'os.path.join', (['THIS_DIR', 'os.pardir', '"""tests/fixtures/generic/traceroute8.json"""'], {}), "(THIS_DIR, os.pardir, 'tests/fixtures/generic/traceroute8.json')\n", (6899, 6963), False, 'import os\n')] |
from datetime import datetime
from hobbit_core.db import Column, BaseModel, reference_col, SurrogatePK
from sqlalchemy.orm import relationship, exc
from app.exts import bcrypt, db
# from app.models import Role # (ORM之间的model不需要互相导入,可以直接使用)
# class SubMenu(db.Model, SurrogatePK):
# __tablename__ = 'submenus'
# child_id = Column(db.Integer, db.ForeignKey('menus.id'), primary_key=True)
# # child_id = reference_col('menus', nullable=False)
# parent_id = Column(db.Integer, db.ForeignKey('menus.id'), primary_key=True)
# # parent_id = reference_col('menus', nullable=False)
# def __init__(self, **kwargs):
# db.Model.__init__(self, **kwargs)
# class Menu(BaseModel):
# __tablename__ = 'menus'
# exclude_columns = ['created_at', 'updated_at']
# auth_name = Column(db.String(32), nullable=False, index=True)
# path = Column(db.String(256), nullable=False)
# children = relationship('SubMenu',
# foreign_keys=[SubMenu.parent_id],
# backref = db.backref('parent', lazy='joined'),
# lazy = 'dynamic',
# cascade = 'all, delete-orphan'
# )
# parents = relationship('SubMenu',
# foreign_keys=[SubMenu.child_id],
# backref = db.backref('child', lazy='joined'),
# lazy = 'dynamic',
# cascade = 'all, delete-orphan'
# )
# def add_child(self, menu):
# if not self.is_sub_menu(menu):
# sub_menu = SubMenu(parent=self, child=menu)
# db.session.add(sub_menu)
# def remove_child(self, menu):
# sub_menu = self.children.filter_by(child_id=menu.id).first()
# if sub_menu:
# db.session.delete(sub_menu)
# def is_sub_menu(self, menu):
# return self.children.filter_by(child_id=menu.id).first() is not None
# def __init__(self, auth_name, path, **kwargs):
# db.Model.__init__(self, auth_name=auth_name, path=path, **kwargs)
# class Menu(BaseModel):
# __tablename__ = 'menus'
# # exclude_columns = ['created_at', 'updated_at']
# auth_name = Column(db.String(32), nullable=False, index=True)
# path = Column(db.String(256), nullable=False)
# level = Column(db.Integer, nullable=True)
# # children = relationship("SubMenu", backref='parent')
# def __init__(self, auth_name, path, **kwargs):
# db.Model.__init__(self, auth_name=auth_name, path=path, **kwargs)
# class SubMenu(BaseModel):
# __tablename__ = 'submenus'
# exclude_columns = ['created_at', 'updated_at']
# auth_name = Column(db.String(32), nullable=False, index=True)
# path = Column(db.String(256), nullable=False)
# level = Column(db.Integer, nullable=True)
# parent_id = Column(db.Integer, db.ForeignKey('menus.id'))
# parent = relationship('Menu', backref=db.backref('children', order_by=auth_name))
# def __init__(self, auth_name, path, **kwargs):
# db.Model.__init__(self, auth_name=auth_name, path=path, **kwargs)
association_table = db.Table('association',
# Column('menu_id', db.Integer, db.ForeignKey('menus.id', ondelete='CASCADE')),
# Column('role_id', db.Integer, db.ForeignKey('roles.id', ondelete='CASCADE'))
Column('menu_id', db.Integer, db.ForeignKey('menus.id')),
Column('role_id', db.Integer, db.ForeignKey('roles.id'))
)
class Menu(db.Model):
__tablename__ = 'menus'
id = db.Column(db.Integer, primary_key = True)
exclude_columns = ['created_at', 'updated_at']
auth_name = Column(db.String(32), nullable=False, index=True)
path = Column(db.String(256), nullable=False)
level = Column(db.Integer, nullable=True)
created_at = Column(db.Date, nullable=True, default=datetime.now)
updated_at = Column(db.Date, nullable=True, default=datetime.now)
parent_id = db.Column(db.Integer, db.ForeignKey('menus.id'))
children = db.relationship('Menu', back_populates='parent')
parent = db.relationship('Menu', back_populates='children', remote_side=[id])
# role_id = Column(db.Integer, db.ForeignKey('roles.id'))
# role = relationship('Role', backref=db.backref('menus', order_by=id))
# roles = relationship('Role', secondary=association_table, back_populates="menus")
# 将back_populates修改为db.backref() 指定 lazy = 'dynamic' 参数,关系两侧返回的查询都可接受额外的过滤器
roles = relationship('Role', secondary=association_table, backref=db.backref("menus", lazy='dynamic'))
def __init__(self, auth_name, path, **kwargs):
db.Model.__init__(self, auth_name=auth_name, path=path, **kwargs)
# association_table = Table('association', db.Model.metadata,
# Column('menu_id', db.Integer, ForeignKey('menus.id')),
# Column('role_id', db.Integer, ForeignKey('roles.id'))
# )
| [
"app.exts.db.Column",
"app.exts.db.backref",
"app.exts.db.ForeignKey",
"app.exts.db.String",
"hobbit_core.db.Column",
"app.exts.db.relationship",
"app.exts.db.Model.__init__"
] | [((3587, 3626), 'app.exts.db.Column', 'db.Column', (['db.Integer'], {'primary_key': '(True)'}), '(db.Integer, primary_key=True)\n', (3596, 3626), False, 'from app.exts import bcrypt, db\n'), ((3808, 3841), 'hobbit_core.db.Column', 'Column', (['db.Integer'], {'nullable': '(True)'}), '(db.Integer, nullable=True)\n', (3814, 3841), False, 'from hobbit_core.db import Column, BaseModel, reference_col, SurrogatePK\n'), ((3859, 3911), 'hobbit_core.db.Column', 'Column', (['db.Date'], {'nullable': '(True)', 'default': 'datetime.now'}), '(db.Date, nullable=True, default=datetime.now)\n', (3865, 3911), False, 'from hobbit_core.db import Column, BaseModel, reference_col, SurrogatePK\n'), ((3929, 3981), 'hobbit_core.db.Column', 'Column', (['db.Date'], {'nullable': '(True)', 'default': 'datetime.now'}), '(db.Date, nullable=True, default=datetime.now)\n', (3935, 3981), False, 'from hobbit_core.db import Column, BaseModel, reference_col, SurrogatePK\n'), ((4062, 4110), 'app.exts.db.relationship', 'db.relationship', (['"""Menu"""'], {'back_populates': '"""parent"""'}), "('Menu', back_populates='parent')\n", (4077, 4110), False, 'from app.exts import bcrypt, db\n'), ((4124, 4192), 'app.exts.db.relationship', 'db.relationship', (['"""Menu"""'], {'back_populates': '"""children"""', 'remote_side': '[id]'}), "('Menu', back_populates='children', remote_side=[id])\n", (4139, 4192), False, 'from app.exts import bcrypt, db\n'), ((3435, 3460), 'app.exts.db.ForeignKey', 'db.ForeignKey', (['"""menus.id"""'], {}), "('menus.id')\n", (3448, 3460), False, 'from app.exts import bcrypt, db\n'), ((3497, 3522), 'app.exts.db.ForeignKey', 'db.ForeignKey', (['"""roles.id"""'], {}), "('roles.id')\n", (3510, 3522), False, 'from app.exts import bcrypt, db\n'), ((3703, 3716), 'app.exts.db.String', 'db.String', (['(32)'], {}), '(32)\n', (3712, 3716), False, 'from app.exts import bcrypt, db\n'), ((3764, 3778), 'app.exts.db.String', 'db.String', (['(256)'], {}), '(256)\n', (3773, 3778), False, 'from app.exts import bcrypt, db\n'), ((4020, 4045), 'app.exts.db.ForeignKey', 'db.ForeignKey', (['"""menus.id"""'], {}), "('menus.id')\n", (4033, 4045), False, 'from app.exts import bcrypt, db\n'), ((4668, 4733), 'app.exts.db.Model.__init__', 'db.Model.__init__', (['self'], {'auth_name': 'auth_name', 'path': 'path'}), '(self, auth_name=auth_name, path=path, **kwargs)\n', (4685, 4733), False, 'from app.exts import bcrypt, db\n'), ((4570, 4605), 'app.exts.db.backref', 'db.backref', (['"""menus"""'], {'lazy': '"""dynamic"""'}), "('menus', lazy='dynamic')\n", (4580, 4605), False, 'from app.exts import bcrypt, db\n')] |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
import json
from datetime import datetime
from fta import settings
from fta.www.apiservice import fta_api_page as app
from fta.www.utils import response
from project.utils.component import bk
@app.route("/status/process/")
@response.log
def health():
monitor = HealthMonitor()
results = monitor.get_data()
return json.dumps(results)
class HealthMonitor(object):
def __init__(self):
self.status = []
def get_data(self):
checkers = {
"redis": self.check_redis,
"mysql": self.check_mysql,
"beanstalk": self.check_beanstalk,
"supervisor": self.check_supervisor,
"cc": self.check_cc,
}
results = {}
fine = True
for namespace, checker in checkers.items():
error = ""
details = None
try:
error, details = checker()
except Exception as err:
error = str(err)
if error:
fine = False
results.update({
"%s:details" % namespace: details,
"%s:error" % namespace: error,
})
results["ok"] = fine
results['result'] = fine
return results
def check_redis(self):
from fta.storage.cache import Cache
redis = Cache('redis')
cache_key = "fta_health_check"
error = ""
can_set = False
can_expire = False
try:
can_set = redis.set(cache_key, 1)
can_expire = redis.expire(cache_key, 0)
except Exception as err:
error = str(err)
return error, {
"can_set": can_set,
"can_expire": can_expire,
}
def check_mysql(self):
from fta.storage import tables
from fta.storage.mysql import session
error = ""
try:
session.query(tables.FtaSolutionsAppAlarminstancearchive).filter(
tables.FtaSolutionsAppAlarminstancearchive.date >= datetime.today(),
).count()
except Exception as err:
error = str(err)
return error, {}
def check_beanstalk(self):
import beanstalkc
def _get_bean_stats(host, port, queues):
error_info = ""
bean = beanstalkc.Connection(host=host, port=port)
bean_stats = {}
try:
main_stats = bean.stats()
except Exception as e:
error_info = str(e)
main_stats = {"exception": str(e)}
bean_stats['main'] = main_stats
for q in queues:
try:
q_stats = bean.stats_tube(q)
except Exception as e:
error_info = str(e)
q_stats = {"exception": str(e)}
bean_stats[q] = q_stats
return error_info, bean_stats
bean_queues = [
settings.QUEUE_COLLECT,
settings.QUEUE_CONVERGE,
settings.QUEUE_SOLUTION,
settings.QUEUE_JOB,
settings.QUEUE_SCHEDULER,
settings.QUEUE_POLLING,
settings.QUEUE_MATCH
]
stats = {}
error = ""
if isinstance(settings.BEANSTALKD_HOST, (list, set)):
bean_hosts = settings.BEANSTALKD_HOST
else:
bean_hosts = [settings.BEANSTALKD_HOST]
for host in bean_hosts:
err, bean_stats = _get_bean_stats(
host, settings.BEANSTALKD_PORT, bean_queues,
)
if err:
error = err
key = '%s:%s' % (host, settings.BEANSTALKD_PORT)
stats[key] = bean_stats
return error, stats
def check_supervisor(self):
from fta.utils.supervisorctl import get_supervisor_client
error = ""
result = {}
proxy = get_supervisor_client()
for process in proxy.supervisor.getAllProcessInfo():
name = process.get("name")
description = process.get("description")
statename = process.get("statename")
if statename != "RUNNING":
error = "%s not running" % name
result["%s_description" % name] = description
result["%s_statename" % name] = statename
return error, result
def check_cc(self):
error = ""
try:
bk.cc.get_plat_id()
except Exception as err:
error = str(err)
if error.startswith("40"): # 40x
error = ""
return error, {}
| [
"fta.storage.mysql.session.query",
"fta.utils.supervisorctl.get_supervisor_client",
"fta.storage.cache.Cache",
"json.dumps",
"datetime.datetime.today",
"fta.www.apiservice.fta_api_page.route",
"beanstalkc.Connection",
"project.utils.component.bk.cc.get_plat_id"
] | [((935, 964), 'fta.www.apiservice.fta_api_page.route', 'app.route', (['"""/status/process/"""'], {}), "('/status/process/')\n", (944, 964), True, 'from fta.www.apiservice import fta_api_page as app\n'), ((1067, 1086), 'json.dumps', 'json.dumps', (['results'], {}), '(results)\n', (1077, 1086), False, 'import json\n'), ((2070, 2084), 'fta.storage.cache.Cache', 'Cache', (['"""redis"""'], {}), "('redis')\n", (2075, 2084), False, 'from fta.storage.cache import Cache\n'), ((4637, 4660), 'fta.utils.supervisorctl.get_supervisor_client', 'get_supervisor_client', ([], {}), '()\n', (4658, 4660), False, 'from fta.utils.supervisorctl import get_supervisor_client\n'), ((3043, 3086), 'beanstalkc.Connection', 'beanstalkc.Connection', ([], {'host': 'host', 'port': 'port'}), '(host=host, port=port)\n', (3064, 3086), False, 'import beanstalkc\n'), ((5161, 5180), 'project.utils.component.bk.cc.get_plat_id', 'bk.cc.get_plat_id', ([], {}), '()\n', (5178, 5180), False, 'from project.utils.component import bk\n'), ((2628, 2685), 'fta.storage.mysql.session.query', 'session.query', (['tables.FtaSolutionsAppAlarminstancearchive'], {}), '(tables.FtaSolutionsAppAlarminstancearchive)\n', (2641, 2685), False, 'from fta.storage.mysql import session\n'), ((2761, 2777), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (2775, 2777), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
"""
Server module
"""
from bokeh.server.server import Server
from tornado.ioloop import IOLoop
from .bokeh_server import modify_doc
def bk_worker(origin, bport):
"""A worker to run Bokeh application
Args:
origin - The origin of the websocket connections
bport - The port of the Bokeh webapp
"""
# Can't pass num_procs > 1 in this configuration. If you need to run
# multiple processes, see e.g. flask_gunicorn_embed.py
server = Server(
{"/bokeh_server": modify_doc},
io_loop=IOLoop(),
allow_websocket_origin=[
"localhost", # localhost access
"turingazureusagetest.azurewebsites.net",
"turingazureusage.azurewebsites.net",
"%s:%d" % ("localhost", int(bport)), # localhost access
"%s:%d" % (origin, int(bport)), # origin access
],
port=int(bport),
)
server.start()
server.io_loop.start()
return server
| [
"tornado.ioloop.IOLoop"
] | [((561, 569), 'tornado.ioloop.IOLoop', 'IOLoop', ([], {}), '()\n', (567, 569), False, 'from tornado.ioloop import IOLoop\n')] |
# -*- coding: UTF-8 -*-#
"""Serialize the scheduled action."""
from typing import Dict
from django.utils.translation import ugettext, ugettext_lazy as _
from rest_framework import serializers
from rest_framework.exceptions import APIException
from ontask import get_incorrect_email, models
from ontask.dataops import sql
from ontask.scheduler.services import schedule_crud_factory
class ScheduledOperationSerializer(serializers.ModelSerializer):
"""Serializer to take care of a few fields and the item column."""
def extra_validation(self, validated_data: Dict):
"""Check for extra properties.
Checking for extra properties in the information contained in the
validated data. Namely:
- The action name corresponds with a valid action for the user.
- The execute time must be in the future
- The received object has a payload
- The item_column, if present, must be a correct column name
- Exclude_values must be a list
- Exclude_values can only be non-empty if item_column is given.
:param validated_data:
:return: Nothing. Exceptions are raised when anomalies are detected
"""
this_user = self.context['request'].user
# Workflow should be owned by the user
workflow = validated_data['workflow']
if workflow.user != this_user:
raise APIException(
_('Incorrect permission to manipulate workflow'))
# If action is given, it should be owned by the user
action = validated_data['action']
if action is not None and action.workflow != workflow:
# The action could not be found.
raise APIException(_('Incorrect permission to manipulate action.'))
# Execution date/times must be correct
diagnostic_msg = models.ScheduledOperation.validate_times(
validated_data.get('execute'),
validated_data.get('frequency'),
validated_data.get('execute_until'))
if diagnostic_msg:
raise APIException(diagnostic_msg)
# Check that the received object has a payload
payload = validated_data.get('payload', {})
if not payload:
raise APIException(_('Scheduled objects need a payload.'))
# Item_column, if present has to be a correct column name
item_column = payload.get('item_column')
if item_column:
item_column = action.workflow.columns.filter(
name=item_column,
).first()
if not item_column:
raise APIException(
_('Invalid column name for selecting items'))
exclude_values = payload.get('exclude_values', [])
# Exclude_values has to be a list
if exclude_values is not None and not isinstance(exclude_values, list):
raise APIException(_('Exclude_values must be a list'))
# Exclude_values can only have content if item_column is given.
if not item_column and payload.get('exclude_values'):
raise APIException(
_('Exclude items needs a column in item_column'))
def create(self, validated_data, **kwargs) -> models.ScheduledOperation:
"""Create a new instance of the scheduled data."""
del kwargs
try:
self.extra_validation(validated_data)
scheduled_obj = schedule_crud_factory.crud_create_or_update(
self.context['request'].user,
validated_data)
except Exception as exc:
raise APIException(
ugettext('Scheduled action could not be created: {0}').format(
str(exc)))
return scheduled_obj
def update(self, instance: models.ScheduledOperation, validated_data):
"""Update the information in the scheduled action."""
try:
self.extra_validation(validated_data)
scheduled_obj = schedule_crud_factory.crud_create_or_update(
self.context['request'].user,
validated_data,
instance)
except Exception as exc:
raise APIException(
ugettext('Unable to update scheduled action: {0}').format(
str(exc)))
return scheduled_obj
class Meta:
"""Select model and define fields."""
model = models.ScheduledOperation
fields = (
'id',
'name',
'description_text',
'operation_type',
'execute',
'frequency',
'execute_until',
'workflow',
'action',
'payload')
class ScheduledEmailSerializer(ScheduledOperationSerializer):
"""Validate the presence of certain fields."""
def extra_validation(self, validated_data: Dict):
"""Validate the presence of certain fields."""
super().extra_validation(validated_data)
action = validated_data['action']
payload = validated_data['payload']
item_column_name = payload.get('item_column')
if not item_column_name:
raise APIException(
_('Personalized text need a column name in payload '
'field item_column.'))
item_column = action.workflow.columns.filter(
name=item_column_name).first()
if not item_column:
raise APIException(
_('Incorrect column name in field item_column.'))
if action.action_type != models.Action.PERSONALIZED_TEXT:
raise APIException(_('Incorrect type of action to schedule.'))
subject = payload.get('subject')
if not subject:
raise APIException(_('Personalized text needs a subject.'))
# Check if the values in the email column are correct emails
try:
column_data = sql.get_rows(
action.workflow.get_data_frame_table_name(),
column_names=[item_column.name])
incorrect_email = get_incorrect_email(
[row[item_column.name] for row in column_data])
if incorrect_email:
# column has incorrect email addresses
raise APIException(
_('Incorrect email value "{0}".').format(incorrect_email))
except TypeError:
raise APIException(
_('The column with email addresses has incorrect values.'))
payload['item_column'] = item_column.id
try:
incorrect_email = get_incorrect_email(
[email for email in payload.get('cc_email', '').split()
if email])
if incorrect_email:
raise APIException(
_('Incorrect email value "{0}".').format(incorrect_email))
except Exception:
raise APIException(
_('cc_email must be a space-separated list of emails.'))
try:
incorrect_email = get_incorrect_email(
[email for email in payload.get('bcc_email', '').split()
if email])
if incorrect_email:
raise APIException(
_('Incorrect email value "{0}".').format(incorrect_email))
except Exception:
raise APIException(
_('bcc_email must be a space-separated list of emails.'))
class ScheduledJSONSerializer(ScheduledOperationSerializer):
"""Class to add an extra check for the presence of a token."""
def extra_validation(self, validated_data):
"""Check that the token is present before execution."""
super().extra_validation(validated_data)
action = validated_data['action']
if action.action_type != models.Action.PERSONALIZED_JSON:
raise APIException(_('Incorrect type of action to schedule.'))
payload = validated_data['payload']
item_column_name = payload.get('item_column')
if not item_column_name:
raise APIException(
_('Personalized text need a column name in payload '
'field item_column.'))
item_column = action.workflow.columns.filter(
name=item_column_name).first()
if not item_column:
raise APIException(
_('Incorrect column name in field item_column.'))
payload['item_column'] = item_column.id
if not payload.get('token'):
raise APIException(_(
'Personalized JSON needs a token in payload.'))
| [
"rest_framework.exceptions.APIException",
"django.utils.translation.ugettext_lazy",
"ontask.scheduler.services.schedule_crud_factory.crud_create_or_update",
"ontask.get_incorrect_email",
"django.utils.translation.ugettext"
] | [((2064, 2092), 'rest_framework.exceptions.APIException', 'APIException', (['diagnostic_msg'], {}), '(diagnostic_msg)\n', (2076, 2092), False, 'from rest_framework.exceptions import APIException\n'), ((3413, 3506), 'ontask.scheduler.services.schedule_crud_factory.crud_create_or_update', 'schedule_crud_factory.crud_create_or_update', (["self.context['request'].user", 'validated_data'], {}), "(self.context['request'].user,\n validated_data)\n", (3456, 3506), False, 'from ontask.scheduler.services import schedule_crud_factory\n'), ((3970, 4073), 'ontask.scheduler.services.schedule_crud_factory.crud_create_or_update', 'schedule_crud_factory.crud_create_or_update', (["self.context['request'].user", 'validated_data', 'instance'], {}), "(self.context['request'].user,\n validated_data, instance)\n", (4013, 4073), False, 'from ontask.scheduler.services import schedule_crud_factory\n'), ((6050, 6117), 'ontask.get_incorrect_email', 'get_incorrect_email', (['[row[item_column.name] for row in column_data]'], {}), '([row[item_column.name] for row in column_data])\n', (6069, 6117), False, 'from ontask import get_incorrect_email, models\n'), ((1425, 1473), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect permission to manipulate workflow"""'], {}), "('Incorrect permission to manipulate workflow')\n", (1426, 1473), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((1718, 1765), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect permission to manipulate action."""'], {}), "('Incorrect permission to manipulate action.')\n", (1719, 1765), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((2256, 2294), 'django.utils.translation.ugettext_lazy', '_', (['"""Scheduled objects need a payload."""'], {}), "('Scheduled objects need a payload.')\n", (2257, 2294), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((2897, 2931), 'django.utils.translation.ugettext_lazy', '_', (['"""Exclude_values must be a list"""'], {}), "('Exclude_values must be a list')\n", (2898, 2931), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3116, 3164), 'django.utils.translation.ugettext_lazy', '_', (['"""Exclude items needs a column in item_column"""'], {}), "('Exclude items needs a column in item_column')\n", (3117, 3164), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5189, 5260), 'django.utils.translation.ugettext_lazy', '_', (['"""Personalized text need a column name in payload field item_column."""'], {}), "('Personalized text need a column name in payload field item_column.')\n", (5190, 5260), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5457, 5505), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect column name in field item_column."""'], {}), "('Incorrect column name in field item_column.')\n", (5458, 5505), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5605, 5647), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect type of action to schedule."""'], {}), "('Incorrect type of action to schedule.')\n", (5606, 5647), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((5746, 5785), 'django.utils.translation.ugettext_lazy', '_', (['"""Personalized text needs a subject."""'], {}), "('Personalized text needs a subject.')\n", (5747, 5785), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((7839, 7881), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect type of action to schedule."""'], {}), "('Incorrect type of action to schedule.')\n", (7840, 7881), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((8063, 8134), 'django.utils.translation.ugettext_lazy', '_', (['"""Personalized text need a column name in payload field item_column."""'], {}), "('Personalized text need a column name in payload field item_column.')\n", (8064, 8134), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((8331, 8379), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect column name in field item_column."""'], {}), "('Incorrect column name in field item_column.')\n", (8332, 8379), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((8498, 8546), 'django.utils.translation.ugettext_lazy', '_', (['"""Personalized JSON needs a token in payload."""'], {}), "('Personalized JSON needs a token in payload.')\n", (8499, 8546), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((2638, 2682), 'django.utils.translation.ugettext_lazy', '_', (['"""Invalid column name for selecting items"""'], {}), "('Invalid column name for selecting items')\n", (2639, 2682), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((6411, 6469), 'django.utils.translation.ugettext_lazy', '_', (['"""The column with email addresses has incorrect values."""'], {}), "('The column with email addresses has incorrect values.')\n", (6412, 6469), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((6905, 6960), 'django.utils.translation.ugettext_lazy', '_', (['"""cc_email must be a space-separated list of emails."""'], {}), "('cc_email must be a space-separated list of emails.')\n", (6906, 6960), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((7349, 7405), 'django.utils.translation.ugettext_lazy', '_', (['"""bcc_email must be a space-separated list of emails."""'], {}), "('bcc_email must be a space-separated list of emails.')\n", (7350, 7405), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((3617, 3671), 'django.utils.translation.ugettext', 'ugettext', (['"""Scheduled action could not be created: {0}"""'], {}), "('Scheduled action could not be created: {0}')\n", (3625, 3671), False, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((4200, 4250), 'django.utils.translation.ugettext', 'ugettext', (['"""Unable to update scheduled action: {0}"""'], {}), "('Unable to update scheduled action: {0}')\n", (4208, 4250), False, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((6278, 6311), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect email value "{0}"."""'], {}), '(\'Incorrect email value "{0}".\')\n', (6279, 6311), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((6772, 6805), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect email value "{0}"."""'], {}), '(\'Incorrect email value "{0}".\')\n', (6773, 6805), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n'), ((7216, 7249), 'django.utils.translation.ugettext_lazy', '_', (['"""Incorrect email value "{0}"."""'], {}), '(\'Incorrect email value "{0}".\')\n', (7217, 7249), True, 'from django.utils.translation import ugettext, ugettext_lazy as _\n')] |
from swcf import dbConnect
from swcf.others.format import responseJSON, rows_to_dict, rows_to_dict_list
def selectOne():
conn = dbConnect()
try:
cursor = conn.cursor()
query = '''SELECT 1 '''
cursor.execute(query, )
data = rows_to_dict(cursor)
return responseJSON('T', 'sukses select', data)
except Exception as err:
return responseJSON('F', str(err), data)
finally:
if(conn):
conn.close()
def insertPost(name, email, issue, content):
conn = dbConnect()
try:
cursor = conn.cursor()
query = '''INSERT INTO post VALUES(
(SELECT COALESCE(MAX(id)+1, 1) FROM post),
%s, %s, %s, %s); '''
cursor.execute(query, (name, email, issue, content, ))
conn.commit()
return responseJSON('T', 'sukses insert', None)
except Exception as err:
return responseJSON('F', str(err), None)
finally:
if(conn):
conn.close() | [
"swcf.others.format.rows_to_dict",
"swcf.dbConnect",
"swcf.others.format.responseJSON"
] | [((133, 144), 'swcf.dbConnect', 'dbConnect', ([], {}), '()\n', (142, 144), False, 'from swcf import dbConnect\n'), ((533, 544), 'swcf.dbConnect', 'dbConnect', ([], {}), '()\n', (542, 544), False, 'from swcf import dbConnect\n'), ((264, 284), 'swcf.others.format.rows_to_dict', 'rows_to_dict', (['cursor'], {}), '(cursor)\n', (276, 284), False, 'from swcf.others.format import responseJSON, rows_to_dict, rows_to_dict_list\n'), ((300, 340), 'swcf.others.format.responseJSON', 'responseJSON', (['"""T"""', '"""sukses select"""', 'data'], {}), "('T', 'sukses select', data)\n", (312, 340), False, 'from swcf.others.format import responseJSON, rows_to_dict, rows_to_dict_list\n'), ((841, 881), 'swcf.others.format.responseJSON', 'responseJSON', (['"""T"""', '"""sukses insert"""', 'None'], {}), "('T', 'sukses insert', None)\n", (853, 881), False, 'from swcf.others.format import responseJSON, rows_to_dict, rows_to_dict_list\n')] |
from __future__ import division
from __future__ import print_function
import subprocess
import time
from string import ascii_uppercase
import numpy as np
import pandas as pd
import pandascharm as pc
import random
from pylab import *
import pickle as pic
from pathlib import Path
import Bio.Phylo as Phylo
from Bio.Phylo.TreeConstruction import DistanceCalculator, ParsimonyScorer
from Bio import AlignIO
from Bio.Align import MultipleSeqAlignment
from Bio.Phylo.TreeConstruction import DistanceTreeConstructor
import networkx as nx
from skbio import DistanceMatrix
from skbio.tree import nj
import scipy as sp
import sys
import os
import argparse
from cassiopeia.TreeSolver.lineage_solver.lineage_solver import *
from cassiopeia.TreeSolver.simulation_tools.simulation_utils import *
from cassiopeia.TreeSolver.utilities import (
fill_in_tree,
tree_collapse,
tree_collapse2,
convert_network_to_newick_format,
newick_to_network,
)
from cassiopeia.TreeSolver.Cassiopeia_Tree import Cassiopeia_Tree
from cassiopeia.TreeSolver.Node import Node
from numba import jit
import cassiopeia as sclt
SCLT_PATH = Path(sclt.__path__[0])
@jit(parallel=True)
def compute_distance_mat(cm, C, priors=None):
dm = np.zeros(C * (C - 1) // 2, dtype=float)
k = 0
for i in range(C - 1):
for j in range(i + 1, C):
s1 = cm[i]
s2 = cm[j]
dm[k] = pairwise_dist(s1, s2, priors)
k += 1
return dm
def pairwise_dist(s1, s2, priors=None):
d = 0
num_present = 0
for i in range(len(s1)):
if s1[i] == "-" or s2[i] == "-":
continue
num_present += 1
if priors:
if s1[i] == s2[i]:
d += np.log(priors[i][str(s1[i])])
if s1[i] != s2[i]:
if s1[i] == "0" or s2[i] == "0":
if priors:
if s1[i] != "0":
d -= np.log(priors[i][str(s1[i])])
else:
d -= np.log(priors[i][str(s2[i])])
else:
d += 1
else:
if priors:
d -= np.log(priors[i][str(s1[i])]) + np.log(priors[i][str(s2[i])])
else:
d += 2
if num_present == 0:
return 0
return d / num_present
def write_leaves_to_charmat(target_nodes, fn):
"""
Helper function to write TARGET_NODES to a character matrix to conver to multistate;
needed to run camin-sokal.
"""
number_of_characters = len(target_nodes[0].char_string.split("|"))
with open(fn, "w") as f:
f.write("cellBC")
for i in range(number_of_characters):
f.write("\t" + str(i))
f.write("\n")
for n in target_nodes:
charstring, sname = n.char_string, n.name
f.write(sname)
chars = charstring.split("|")
for c in chars:
f.write("\t" + c)
f.write("\n")
def unique_alignments(aln):
new_aln = []
obs = []
for a in aln:
if a.seq in obs:
continue
new_aln.append(a)
obs.append(a.seq)
return MultipleSeqAlignment(new_aln)
def nx_to_charmat(target_nodes):
number_of_characters = len(target_nodes[0].split("|"))
cm = pd.DataFrame(np.zeros((len(target_nodes), number_of_characters)))
ind = []
for i in range(len(target_nodes)):
nr = []
n = target_nodes[i]
charstring, sname = n.split("_")
ind.append("s" + sname)
chars = charstring.split("|")
for c in chars:
nr.append(c)
cm.iloc[i] = np.array(nr)
cm.columns = [("r" + str(i)) for i in range(number_of_characters)]
cm.index = ind
return cm
def construct_weights(phy, weights_fn, write=True):
"""
Given some binary phylip infile file path, compute the character-wise log frequencies
and translate to the phylip scaling (0-Z) for the weights file.
"""
aln = AlignIO.read(phy, "phylip")
df = pc.from_bioalignment(aln)
abund = df.apply(lambda x: len(x[x == "1"]) / len(x), axis=1)
labund = np.array(
list(map(lambda x: float(-1 * np.log2(x)) if x > 1 else x, abund))
)
labund[labund == 0] = labund.min()
# scale linearly to range for phylip weights
_min = 0
_max = 35
scaled = (_max - _min) / (labund.max() - labund.min()) * (
labund - labund.max()
) + _max
scaled = list(map(lambda x: int(x), scaled))
weights_range = [str(i) for i in range(10)] + [l for l in ascii_uppercase]
weights_dict = dict(zip(range(36), weights_range))
scaled = list(map(lambda x: weights_dict[x], scaled))
if write:
with open(weights_fn, "w") as f:
f.write("".join(scaled))
return scaled
def main():
"""
Takes in a character matrix, an algorithm, and an output file and
returns a tree in newick format.
"""
parser = argparse.ArgumentParser()
parser.add_argument("netfp", type=str, help="character_matrix")
parser.add_argument("-nj", "--neighbor-joining", action="store_true", default=False)
parser.add_argument(
"--neighbor_joining_weighted", action="store_true", default=False
)
parser.add_argument("--ilp", action="store_true", default=False)
parser.add_argument("--hybrid", action="store_true", default=False)
parser.add_argument(
"--cutoff", type=int, default=80, help="Cutoff for ILP during Hybrid algorithm"
)
parser.add_argument(
"--hybrid_lca_mode",
action="store_true",
help="Use LCA distances to transition in hybrid mode, instead of number of cells",
)
parser.add_argument(
"--time_limit", type=int, default=-1, help="Time limit for ILP convergence"
)
parser.add_argument(
"--iter_limit",
type=int,
default=-1,
help="Max number of iterations for ILP solver",
)
parser.add_argument("--greedy", "-g", action="store_true", default=False)
parser.add_argument("--camin-sokal", "-cs", action="store_true", default=False)
parser.add_argument(
"--verbose", action="store_true", default=False, help="output verbosity"
)
parser.add_argument("--mutation_map", type=str, default="")
parser.add_argument("--num_threads", type=int, default=1)
parser.add_argument("--no_triplets", action="store_true", default=False)
parser.add_argument("--max_neighborhood_size", type=str, default=3000)
parser.add_argument("--out_fp", type=str, default=None, help="optional output file")
parser.add_argument(
"--seed", type=int, default=None, help="Random seed for ILP solver"
)
args = parser.parse_args()
netfp = args.netfp
outfp = args.out_fp
verbose = args.verbose
lca_mode = args.hybrid_lca_mode
if lca_mode:
lca_cutoff = args.cutoff
cell_cutoff = None
else:
cell_cutoff = args.cutoff
lca_cutoff = None
time_limit = args.time_limit
iter_limit = args.iter_limit
num_threads = args.num_threads
max_neighborhood_size = args.max_neighborhood_size
seed = args.seed
if seed is not None:
random.seed(seed)
np.random.seed(seed)
score_triplets = not args.no_triplets
prior_probs = None
if args.mutation_map != "":
prior_probs = pic.load(open(args.mutation_map, "rb"))
name = netfp.split("/")[-1]
stem = ".".join(name.split(".")[:-1])
true_network = nx.read_gpickle(netfp)
if isinstance(true_network, Cassiopeia_Tree):
true_network = true_network.get_network()
target_nodes = get_leaves_of_tree(true_network)
target_nodes_uniq = []
seen_charstrings = []
for t in target_nodes:
if t.char_string not in seen_charstrings:
seen_charstrings.append(t.char_string)
target_nodes_uniq.append(t)
if args.greedy:
if verbose:
print(
"Running Greedy Algorithm on " + str(len(target_nodes_uniq)) + " Cells"
)
reconstructed_network_greedy = solve_lineage_instance(
target_nodes_uniq, method="greedy", prior_probabilities=prior_probs
)
net = reconstructed_network_greedy[0]
if outfp is None:
outfp = name.replace("true", "greedy")
pic.dump(net, open(outfp, "wb"))
elif args.hybrid:
if verbose:
print(
"Running Hybrid Algorithm on " + str(len(target_nodes_uniq)) + " Cells"
)
print(
"Parameters: ILP on sets of "
+ str(cutoff)
+ " cells "
+ str(time_limit)
+ "s to complete optimization"
)
reconstructed_network_hybrid = solve_lineage_instance(
target_nodes_uniq,
method="hybrid",
hybrid_cell_cutoff=cell_cutoff,
hybrid_lca_cutoff=lca_cutoff,
prior_probabilities=prior_probs,
time_limit=time_limit,
threads=num_threads,
max_neighborhood_size=max_neighborhood_size,
seed=seed,
num_iter=iter_limit,
)
net = reconstructed_network_hybrid[0]
if outfp is None:
outfp = name.replace("true", "hybrid")
pic.dump(net, open(outfp, "wb"))
elif args.ilp:
if verbose:
print(
"Running Hybrid Algorithm on " + str(len(target_nodes_uniq)) + " Cells"
)
print(
"Parameters: ILP on sets of "
+ str(cutoff)
+ " cells "
+ str(time_limit)
+ "s to complete optimization"
)
reconstructed_network_ilp = solve_lineage_instance(
target_nodes_uniq,
method="ilp",
hybrid_subset_cutoff=cutoff,
prior_probabilities=prior_probs,
time_limit=time_limit,
max_neighborhood_size=max_neighborhood_size,
seed=seed,
num_iter=iter_limit,
)
net = reconstructed_network_ilp[0]
# reconstructed_network_ilp = nx.relabel_nodes(reconstructed_network_ilp, string_to_sample)
if outfp is None:
outfp = name.replace("true", "ilp")
pic.dump(net, open(outfp, "wb"))
elif args.neighbor_joining:
if verbose:
print(
"Running Neighbor-Joining on "
+ str(len(target_nodes_uniq))
+ " Unique Cells"
)
infile = "".join(name.split(".")[:-1]) + "infile.txt"
fn = "".join(name.split(".")[:-1]) + "phylo.txt"
write_leaves_to_charmat(target_nodes_uniq, fn)
script = SCLT_PATH / "TreeSolver" / "binarize_multistate_charmat.py"
cmd = "python3.6 " + str(script) + " " + fn + " " + infile + " --relaxed"
p = subprocess.Popen(cmd, shell=True)
pid, ecode = os.waitpid(p.pid, 0)
aln = AlignIO.read(infile, "phylip-relaxed")
aln = unique_alignments(aln)
t0 = time.time()
calculator = DistanceCalculator("identity", skip_letters="?")
constructor = DistanceTreeConstructor(calculator, "nj")
tree = constructor.build_tree(aln)
tree.root_at_midpoint()
nj_net = Phylo.to_networkx(tree)
# convert labels to characters for writing to file
i = 0
rndict = {}
for n in nj_net:
if n.name is None:
rndict[n] = Node("state-node", [])
# n.name = "internal" + str(i)
# i += 1
else:
rndict[n] = Node(n.name, [])
nj_net = nx.relabel_nodes(nj_net, rndict)
# convert labels to strings, not Bio.Phylo.Clade objects
# c2str = map(lambda x: x.name, list(nj_net.nodes()))
# c2strdict = dict(zip(list(nj_net.nodes()), c2str))
# nj_net = nx.relabel_nodes(nj_net, c2strdict)
cm = pd.read_csv(fn, sep="\t", index_col=0)
cm_lookup = dict(
zip(
list(cm.apply(lambda x: "|".join([str(k) for k in x.values]), axis=1)),
cm.index.values,
)
)
nj_net = fill_in_tree(nj_net, cm)
nj_net = tree_collapse(nj_net)
for n in nj_net:
if n.char_string in cm_lookup.keys():
n.is_target = True
nj_net = Cassiopeia_Tree("neighbor-joining", network=nj_net)
if outfp is None:
outfp = name.replace("true", "nj")
pic.dump(nj_net, open(outfp, "wb"))
# Phylo.write(tree, out, 'newick')
os.system("rm " + infile)
os.system("rm " + fn)
elif args.neighbor_joining_weighted:
if verbose:
print(
"Running Neighbor-Joining with Weighted Scoring on "
+ str(len(target_nodes_uniq))
+ " Unique Cells"
)
target_node_charstrings = np.array(
[t.get_character_vec() for t in target_nodes_uniq]
)
dm = compute_distance_mat(
target_node_charstrings, len(target_node_charstrings), priors=prior_probs
)
ids = [t.name for t in target_nodes_uniq]
cm_uniq = pd.DataFrame(target_node_charstrings)
cm_uniq.index = ids
dm = sp.spatial.distance.squareform(dm)
dm = DistanceMatrix(dm, ids)
newick_str = nj(dm, result_constructor=str)
tree = newick_to_network(newick_str, cm_uniq)
nj_net = fill_in_tree(tree, cm_uniq)
nj_net = tree_collapse(nj_net)
cm_lookup = dict(
zip(
list(
cm_uniq.apply(
lambda x: "|".join([str(k) for k in x.values]), axis=1
)
),
cm_uniq.index.values,
)
)
rdict = {}
for n in nj_net:
if n.char_string in cm_lookup:
n.is_target = True
else:
n.is_target = False
nj_net = Cassiopeia_Tree("neighbor-joining", network=nj_net)
if outfp is None:
outfp = name.replace("true", "nj_weighted")
pic.dump(nj_net, open(outfp, "wb"))
elif args.camin_sokal:
if verbose:
print(
"Running Camin-Sokal Max Parsimony Algorithm on "
+ str(len(target_nodes_uniq))
+ " Unique Cells"
)
samples_to_cells = {}
indices = []
for i, n in zip(range(len(target_nodes_uniq)), target_nodes_uniq):
samples_to_cells["s" + str(i)] = n.name
indices.append(n.name)
n.name = str(i)
infile = "".join(name.split(".")[:-1]) + "_cs_infile.txt"
fn = "".join(name.split(".")[:-1]) + "_cs_phylo.txt"
weights_fn = "".join(name.split(".")[:-1]) + "_cs_weights.txt"
write_leaves_to_charmat(target_nodes_uniq, fn)
script = SCLT_PATH / "TreeSolver" / "binarize_multistate_charmat.py"
cmd = "python3.6 " + str(script) + " " + fn + " " + infile
pi = subprocess.Popen(cmd, shell=True)
pid, ecode = os.waitpid(pi.pid, 0)
weights = construct_weights(infile, weights_fn)
os.system("touch outfile")
os.system("touch outtree")
outfile = stem + "outfile.txt"
outtree = stem + "outtree.txt"
# run phylip mix with camin-sokal
responses = "." + stem + ".temp.txt"
FH = open(responses, "w")
current_dir = os.getcwd()
FH.write(infile + "\n")
FH.write("F\n" + outfile + "\n")
FH.write("P\n")
FH.write("W\n")
FH.write("Y\n")
FH.write(weights_fn + "\n")
FH.write("F\n" + outtree + "\n")
FH.close()
t0 = time.time()
cmd = "~/software/phylip-3.697/exe/mix"
cmd += " < " + responses + " > screenout1"
p = subprocess.Popen(cmd, shell=True)
pid, ecode = os.waitpid(p.pid, 0)
consense_outtree = stem + "consenseouttree.txt"
consense_outfile = stem + "consenseoutfile.txt"
FH = open(responses, "w")
FH.write(outtree + "\n")
FH.write("F\n" + consense_outfile + "\n")
FH.write("Y\n")
FH.write("F\n" + consense_outtree + "\n")
FH.close()
if verbose:
print("Computing Consensus Tree, elasped time: " + str(time.time() - t0))
cmd = "~/software/phylip-3.697/exe/consense"
cmd += " < " + responses + " > screenout"
p2 = subprocess.Popen(cmd, shell=True)
pid, ecode = os.waitpid(p2.pid, 0)
newick_str = ""
with open(consense_outtree, "r") as f:
for l in f:
l = l.strip()
newick_str += l
cm = pd.read_csv(fn, sep="\t", index_col=0, dtype=str)
cm.index = indices
cs_net = newick_to_network(newick_str, cm)
for n in cs_net:
if n.name in samples_to_cells:
n.name = samples_to_cells[n.name]
cs_net = fill_in_tree(cs_net, cm)
cs_net = tree_collapse2(cs_net)
cm_lookup = dict(
zip(
list(cm.apply(lambda x: "|".join([str(k) for k in x.values]), axis=1)),
cm.index.values,
)
)
for n in cs_net:
if n.char_string in cm_lookup.keys():
n.is_target = True
cs_net = Cassiopeia_Tree("camin-sokal", network=cs_net)
if outfp is None:
outfp = name.replace("true", "cs")
pic.dump(cs_net, open(outfp, "wb"))
os.system("rm " + outfile)
os.system("rm " + responses)
os.system("rm " + outtree)
os.system("rm " + consense_outfile)
os.system("rm " + infile)
os.system("rm " + fn)
else:
raise Exception(
"Please choose an algorithm from the list: greedy, hybrid, ilp, nj, or camin-sokal"
)
if __name__ == "__main__":
main()
| [
"Bio.AlignIO.read",
"pandas.read_csv",
"skbio.tree.nj",
"numpy.array",
"cassiopeia.TreeSolver.utilities.tree_collapse",
"Bio.Phylo.TreeConstruction.DistanceCalculator",
"Bio.Phylo.TreeConstruction.DistanceTreeConstructor",
"networkx.relabel_nodes",
"argparse.ArgumentParser",
"pathlib.Path",
"subprocess.Popen",
"cassiopeia.TreeSolver.utilities.fill_in_tree",
"numpy.random.seed",
"pandas.DataFrame",
"os.system",
"scipy.spatial.distance.squareform",
"os.waitpid",
"skbio.DistanceMatrix",
"Bio.Phylo.to_networkx",
"pandascharm.from_bioalignment",
"numba.jit",
"networkx.read_gpickle",
"numpy.log2",
"time.time",
"cassiopeia.TreeSolver.utilities.newick_to_network",
"random.seed",
"os.getcwd",
"numpy.zeros",
"cassiopeia.TreeSolver.Node.Node",
"Bio.Align.MultipleSeqAlignment",
"cassiopeia.TreeSolver.Cassiopeia_Tree.Cassiopeia_Tree",
"cassiopeia.TreeSolver.utilities.tree_collapse2"
] | [((1131, 1153), 'pathlib.Path', 'Path', (['sclt.__path__[0]'], {}), '(sclt.__path__[0])\n', (1135, 1153), False, 'from pathlib import Path\n'), ((1157, 1175), 'numba.jit', 'jit', ([], {'parallel': '(True)'}), '(parallel=True)\n', (1160, 1175), False, 'from numba import jit\n'), ((1232, 1271), 'numpy.zeros', 'np.zeros', (['(C * (C - 1) // 2)'], {'dtype': 'float'}), '(C * (C - 1) // 2, dtype=float)\n', (1240, 1271), True, 'import numpy as np\n'), ((3200, 3229), 'Bio.Align.MultipleSeqAlignment', 'MultipleSeqAlignment', (['new_aln'], {}), '(new_aln)\n', (3220, 3229), False, 'from Bio.Align import MultipleSeqAlignment\n'), ((4037, 4064), 'Bio.AlignIO.read', 'AlignIO.read', (['phy', '"""phylip"""'], {}), "(phy, 'phylip')\n", (4049, 4064), False, 'from Bio import AlignIO\n'), ((4075, 4100), 'pandascharm.from_bioalignment', 'pc.from_bioalignment', (['aln'], {}), '(aln)\n', (4095, 4100), True, 'import pandascharm as pc\n'), ((5002, 5027), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5025, 5027), False, 'import argparse\n'), ((7548, 7570), 'networkx.read_gpickle', 'nx.read_gpickle', (['netfp'], {}), '(netfp)\n', (7563, 7570), True, 'import networkx as nx\n'), ((3679, 3691), 'numpy.array', 'np.array', (['nr'], {}), '(nr)\n', (3687, 3691), True, 'import numpy as np\n'), ((7244, 7261), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (7255, 7261), False, 'import random\n'), ((7270, 7290), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (7284, 7290), True, 'import numpy as np\n'), ((10987, 11020), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (11003, 11020), False, 'import subprocess\n'), ((11042, 11062), 'os.waitpid', 'os.waitpid', (['p.pid', '(0)'], {}), '(p.pid, 0)\n', (11052, 11062), False, 'import os\n'), ((11078, 11116), 'Bio.AlignIO.read', 'AlignIO.read', (['infile', '"""phylip-relaxed"""'], {}), "(infile, 'phylip-relaxed')\n", (11090, 11116), False, 'from Bio import AlignIO\n'), ((11169, 11180), 'time.time', 'time.time', ([], {}), '()\n', (11178, 11180), False, 'import time\n'), ((11202, 11250), 'Bio.Phylo.TreeConstruction.DistanceCalculator', 'DistanceCalculator', (['"""identity"""'], {'skip_letters': '"""?"""'}), "('identity', skip_letters='?')\n", (11220, 11250), False, 'from Bio.Phylo.TreeConstruction import DistanceCalculator, ParsimonyScorer\n'), ((11273, 11314), 'Bio.Phylo.TreeConstruction.DistanceTreeConstructor', 'DistanceTreeConstructor', (['calculator', '"""nj"""'], {}), "(calculator, 'nj')\n", (11296, 11314), False, 'from Bio.Phylo.TreeConstruction import DistanceTreeConstructor\n'), ((11410, 11433), 'Bio.Phylo.to_networkx', 'Phylo.to_networkx', (['tree'], {}), '(tree)\n', (11427, 11433), True, 'import Bio.Phylo as Phylo\n'), ((11789, 11821), 'networkx.relabel_nodes', 'nx.relabel_nodes', (['nj_net', 'rndict'], {}), '(nj_net, rndict)\n', (11805, 11821), True, 'import networkx as nx\n'), ((12080, 12118), 'pandas.read_csv', 'pd.read_csv', (['fn'], {'sep': '"""\t"""', 'index_col': '(0)'}), "(fn, sep='\\t', index_col=0)\n", (12091, 12118), True, 'import pandas as pd\n'), ((12326, 12350), 'cassiopeia.TreeSolver.utilities.fill_in_tree', 'fill_in_tree', (['nj_net', 'cm'], {}), '(nj_net, cm)\n', (12338, 12350), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((12369, 12390), 'cassiopeia.TreeSolver.utilities.tree_collapse', 'tree_collapse', (['nj_net'], {}), '(nj_net)\n', (12382, 12390), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((12520, 12571), 'cassiopeia.TreeSolver.Cassiopeia_Tree.Cassiopeia_Tree', 'Cassiopeia_Tree', (['"""neighbor-joining"""'], {'network': 'nj_net'}), "('neighbor-joining', network=nj_net)\n", (12535, 12571), False, 'from cassiopeia.TreeSolver.Cassiopeia_Tree import Cassiopeia_Tree\n'), ((12741, 12766), 'os.system', 'os.system', (["('rm ' + infile)"], {}), "('rm ' + infile)\n", (12750, 12766), False, 'import os\n'), ((12775, 12796), 'os.system', 'os.system', (["('rm ' + fn)"], {}), "('rm ' + fn)\n", (12784, 12796), False, 'import os\n'), ((13360, 13397), 'pandas.DataFrame', 'pd.DataFrame', (['target_node_charstrings'], {}), '(target_node_charstrings)\n', (13372, 13397), True, 'import pandas as pd\n'), ((13439, 13473), 'scipy.spatial.distance.squareform', 'sp.spatial.distance.squareform', (['dm'], {}), '(dm)\n', (13469, 13473), True, 'import scipy as sp\n'), ((13488, 13511), 'skbio.DistanceMatrix', 'DistanceMatrix', (['dm', 'ids'], {}), '(dm, ids)\n', (13502, 13511), False, 'from skbio import DistanceMatrix\n'), ((13534, 13564), 'skbio.tree.nj', 'nj', (['dm'], {'result_constructor': 'str'}), '(dm, result_constructor=str)\n', (13536, 13564), False, 'from skbio.tree import nj\n'), ((13581, 13619), 'cassiopeia.TreeSolver.utilities.newick_to_network', 'newick_to_network', (['newick_str', 'cm_uniq'], {}), '(newick_str, cm_uniq)\n', (13598, 13619), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((13638, 13665), 'cassiopeia.TreeSolver.utilities.fill_in_tree', 'fill_in_tree', (['tree', 'cm_uniq'], {}), '(tree, cm_uniq)\n', (13650, 13665), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((13683, 13704), 'cassiopeia.TreeSolver.utilities.tree_collapse', 'tree_collapse', (['nj_net'], {}), '(nj_net)\n', (13696, 13704), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((14183, 14234), 'cassiopeia.TreeSolver.Cassiopeia_Tree.Cassiopeia_Tree', 'Cassiopeia_Tree', (['"""neighbor-joining"""'], {'network': 'nj_net'}), "('neighbor-joining', network=nj_net)\n", (14198, 14234), False, 'from cassiopeia.TreeSolver.Cassiopeia_Tree import Cassiopeia_Tree\n'), ((11613, 11635), 'cassiopeia.TreeSolver.Node.Node', 'Node', (['"""state-node"""', '[]'], {}), "('state-node', [])\n", (11617, 11635), False, 'from cassiopeia.TreeSolver.Node import Node\n'), ((11754, 11770), 'cassiopeia.TreeSolver.Node.Node', 'Node', (['n.name', '[]'], {}), '(n.name, [])\n', (11758, 11770), False, 'from cassiopeia.TreeSolver.Node import Node\n'), ((15243, 15276), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (15259, 15276), False, 'import subprocess\n'), ((15298, 15319), 'os.waitpid', 'os.waitpid', (['pi.pid', '(0)'], {}), '(pi.pid, 0)\n', (15308, 15319), False, 'import os\n'), ((15386, 15412), 'os.system', 'os.system', (['"""touch outfile"""'], {}), "('touch outfile')\n", (15395, 15412), False, 'import os\n'), ((15421, 15447), 'os.system', 'os.system', (['"""touch outtree"""'], {}), "('touch outtree')\n", (15430, 15447), False, 'import os\n'), ((15670, 15681), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (15679, 15681), False, 'import os\n'), ((15937, 15948), 'time.time', 'time.time', ([], {}), '()\n', (15946, 15948), False, 'import time\n'), ((16060, 16093), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (16076, 16093), False, 'import subprocess\n'), ((16115, 16135), 'os.waitpid', 'os.waitpid', (['p.pid', '(0)'], {}), '(p.pid, 0)\n', (16125, 16135), False, 'import os\n'), ((16684, 16717), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (16700, 16717), False, 'import subprocess\n'), ((16739, 16760), 'os.waitpid', 'os.waitpid', (['p2.pid', '(0)'], {}), '(p2.pid, 0)\n', (16749, 16760), False, 'import os\n'), ((16933, 16982), 'pandas.read_csv', 'pd.read_csv', (['fn'], {'sep': '"""\t"""', 'index_col': '(0)', 'dtype': 'str'}), "(fn, sep='\\t', index_col=0, dtype=str)\n", (16944, 16982), True, 'import pandas as pd\n'), ((17028, 17061), 'cassiopeia.TreeSolver.utilities.newick_to_network', 'newick_to_network', (['newick_str', 'cm'], {}), '(newick_str, cm)\n', (17045, 17061), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((17199, 17223), 'cassiopeia.TreeSolver.utilities.fill_in_tree', 'fill_in_tree', (['cs_net', 'cm'], {}), '(cs_net, cm)\n', (17211, 17223), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((17242, 17264), 'cassiopeia.TreeSolver.utilities.tree_collapse2', 'tree_collapse2', (['cs_net'], {}), '(cs_net)\n', (17256, 17264), False, 'from cassiopeia.TreeSolver.utilities import fill_in_tree, tree_collapse, tree_collapse2, convert_network_to_newick_format, newick_to_network\n'), ((17583, 17629), 'cassiopeia.TreeSolver.Cassiopeia_Tree.Cassiopeia_Tree', 'Cassiopeia_Tree', (['"""camin-sokal"""'], {'network': 'cs_net'}), "('camin-sokal', network=cs_net)\n", (17598, 17629), False, 'from cassiopeia.TreeSolver.Cassiopeia_Tree import Cassiopeia_Tree\n'), ((17756, 17782), 'os.system', 'os.system', (["('rm ' + outfile)"], {}), "('rm ' + outfile)\n", (17765, 17782), False, 'import os\n'), ((17791, 17819), 'os.system', 'os.system', (["('rm ' + responses)"], {}), "('rm ' + responses)\n", (17800, 17819), False, 'import os\n'), ((17828, 17854), 'os.system', 'os.system', (["('rm ' + outtree)"], {}), "('rm ' + outtree)\n", (17837, 17854), False, 'import os\n'), ((17863, 17898), 'os.system', 'os.system', (["('rm ' + consense_outfile)"], {}), "('rm ' + consense_outfile)\n", (17872, 17898), False, 'import os\n'), ((17907, 17932), 'os.system', 'os.system', (["('rm ' + infile)"], {}), "('rm ' + infile)\n", (17916, 17932), False, 'import os\n'), ((17941, 17962), 'os.system', 'os.system', (["('rm ' + fn)"], {}), "('rm ' + fn)\n", (17950, 17962), False, 'import os\n'), ((4230, 4240), 'numpy.log2', 'np.log2', (['x'], {}), '(x)\n', (4237, 4240), True, 'import numpy as np\n'), ((16548, 16559), 'time.time', 'time.time', ([], {}), '()\n', (16557, 16559), False, 'import time\n')] |
import torch
from torch.autograd import Variable
import sys
sys.path.append('./models')
sys.path.append('./utils')
from models.model import *
from utils.data import CelebA
G = Generator(num_channels=3, resolution=1024, fmap_max=512, fmap_base=8192, latent_size=512)
D = Discriminator(num_channels=3, resolution=1024, fmap_max=512, fmap_base=8192)
param_G = G.named_parameters()
print('G:')
for name, p in param_G:
print(name, p.size())
print('\n')
param_D = D.named_parameters()
print('D:')
for name, p in param_D:
print(name, p.size())
print(G)
print(D)
G.cuda(1)
D.cuda(1)
data = CelebA()
z = Variable((torch.rand(3, 512)-0.5)*2).cuda(1)
x = G(z, cur_level=1.2)
# x = Variable(torch.from_numpy(data(3, size=8))).cuda(1)
print('x:', x.size())
d = D(x, cur_level=1.2, gdrop_strength=0.2)
d = torch.mean(d)
print(d)
d.backward()
print('G:')
for name, p in G.named_parameters():
if p.grad is not None:
print(name, p.size(), p.grad.mean().data[0])
print('D:')
for name, p in D.named_parameters():
if p.grad is not None:
print(name, p.size(), p.grad.mean().data[0])
| [
"torch.mean",
"sys.path.append",
"torch.rand",
"utils.data.CelebA"
] | [((60, 87), 'sys.path.append', 'sys.path.append', (['"""./models"""'], {}), "('./models')\n", (75, 87), False, 'import sys\n'), ((88, 114), 'sys.path.append', 'sys.path.append', (['"""./utils"""'], {}), "('./utils')\n", (103, 114), False, 'import sys\n'), ((591, 599), 'utils.data.CelebA', 'CelebA', ([], {}), '()\n', (597, 599), False, 'from utils.data import CelebA\n'), ((801, 814), 'torch.mean', 'torch.mean', (['d'], {}), '(d)\n', (811, 814), False, 'import torch\n'), ((614, 632), 'torch.rand', 'torch.rand', (['(3)', '(512)'], {}), '(3, 512)\n', (624, 632), False, 'import torch\n')] |
# This file is subject to the terms and conditions defined in
# file 'LICENSE', which is part of this source code package.
import numpy as np
import matplotlib.pyplot as plt
from src.rrt.rrt_star import RRTStar
from src.search_space.search_space import SearchSpace
from src.utilities.plotting import Plot
import test2
import pklexa
X_dimensions = np.array([(-20, 20), (0, 10), (0, 2)]) # dimensions of Search Space
x_init = (0, 0, 0) # starting location
# x_goal= (10, 20, 2) # goal location
x_goal = (0.1,0.1,0.1)
# x_goal= (4.5, 6.0, 1.575) # goal location
# [2.5, 3.0, 0.75], [4.5, 6.0, 1.575], [3.0, 8.25, 0.75], [4.5, 12.0, 1.25]
# # obstacles
# Obstacles = np.array(
# [(20, 20, 20, 40, 40, 40), (20, 20, 60, 40, 40, 80), (20, 60, 20, 40, 80, 40), (60, 60, 20, 80, 80, 40),
# (60, 20, 20, 80, 40, 40), (60, 20, 60, 80, 40, 80), (20, 60, 60, 40, 80, 80), (60, 60, 60, 80, 80, 80)])
# (20, 60, 20, 30, 65, 40)
# 55, 55,
# (60, 20, 20, 55, 55, 25)
# Obstacles = np.array(
# [(20, 20, 20, 25, 60, 25),(20, 55, 20, 60, 60, 25),(20, 20, 20,60, 25, 25),(55, 25, 20, 60, 60, 25)])
# goallist = [(10, 20, 40), (30, 40, 40)]
# Obstacles = test2.genSerObs(goallist)
goallist, Obstacles = pklexa.readfile()
print(goallist)
# print(Obstacles)
# input("111")
Obstacles = np.array(Obstacles)
############### Adding appropirate size factor here ######################################
size_factor = 0.15
for i in range(np.size(Obstacles,0)):
Obstacles[i][0]=Obstacles[i][0]-size_factor
Obstacles[i][1]=Obstacles[i][1]-size_factor
Obstacles[i][2]=Obstacles[i][2]-size_factor
Obstacles[i][3]=Obstacles[i][3]+size_factor
Obstacles[i][4]=Obstacles[i][4]+size_factor
Obstacles[i][5]=Obstacles[i][5]+size_factor
# Obstacles = np.array(
# [(20, 20, 20, 25, 60, 25),(20, 55, 20, 60, 60, 25),(60, 25, 25,20, 20, 20),(55, 25, 20, 60, 60, 25)])
# goal_points = np.array([x_init,(10, 20, 40),(30, 40, 40),(100,100,100)])
goal_points = np.array([x_init ])
################## Adding waypoints for Path to be planned #####################################
goal_points = np.vstack((goal_points, goallist))
# goal_points2 = np.array([x_goal ])
# goal_points = np.vstack((goal_points, goal_points2))
# goal_points2 = goallist[1]
# goal_points = np.vstack((goal_points, goal_points2))
# x_goal = goallist[1]
print(goal_points)
# goal_points = np.array([x_init,(-5.5 , 3.0 , 0.75),(-4. , 7. , 0.75),(1. , 10.5 , 0.75), (4.5 , 5. , 0.75), (2.5 ,2. , 0.75), x_init])
# input("111")
# [ 0. 0. 0. ]
# [ 2.5 2. 0.75]
# [ 4.5 5. 0.75]
# [-5.5 3. 0.75]
# [-4. 7. 0.75]
# [ 1. 10.5 0.75]
# [ 4.5 5. 0.75]]
# (60,60,60),(50,50,50),(75,75,75),
Q = np.array([(8, 4)]) # length of tree edges
r = 1 # length of smallest edge to check for intersection with obstacles
max_samples = 1024 # max number of samples to take before timing out
rewire_count = 32 # optional, number of nearby branches to rewire
prc = 1 # probability of checking for a connection to goal
# create Search Space
X = SearchSpace(X_dimensions, Obstacles)
# create rrt_search
# rrt = RRTStar(X, Q, x_init, x_goal, max_samples, r, prc, rewire_count)
# path = rrt.rrt_star()
# rrt = RRTStar(X, Q, x_goal, x_goal2, max_samples, r, prc, rewire_count)
# path2 = rrt.rrt_star()
# path = path + path2
path_final = []
for i in range(0,(np.size(goal_points,0)-1)):
a = (goal_points[i][0],goal_points[i][1],goal_points[i][2]) #goal_points[i]
b = (goal_points[i+1][0],goal_points[i+1][1],goal_points[i+1][2]) #goal_points[i+1]
rrt = RRTStar(X, Q, a, b, max_samples, r, prc, rewire_count)
path = rrt.rrt_star()
path_final = path_final + path
path = path_final
# print(path)
# print(Obstacles)
# a1 = goal_points[1]
# print(a1)
# print(np.size(Obstacles,0))
# plot
plot = Plot("rrt_star_3d")
plot.plot_tree(X, rrt.trees)
# input("Press Enter to continue...")
if path is not None:
plot.plot_path(X, path)
plot.plot_obstacles(X, Obstacles)
# input("Press Enter to continue...")
plot.plot_start(X, x_init)
# plot.draw(auto_open=True)
# input("Press Enter to continue...")
# plt.plot3D(1,1,1)
for i in range(1,(np.size(goal_points,0))):
a = (goal_points[i][0],goal_points[i][1],goal_points[i][2])
plot.plot_goal(X, a)
# plot.plot_goal(X, x_goal)
# plot.plot_goal(X, x_goal2)
# for i in range(np.size(goal_points,0)-1)
np.savetxt("path3.csv", path, delimiter=",")
plot.draw(auto_open=True)
# input("Press Enter to continue...")
# plot.plot_start(X, x_init)
# plot.draw(auto_open=True)
| [
"src.search_space.search_space.SearchSpace",
"src.rrt.rrt_star.RRTStar",
"numpy.size",
"pklexa.readfile",
"numpy.array",
"numpy.vstack",
"numpy.savetxt",
"src.utilities.plotting.Plot"
] | [((349, 387), 'numpy.array', 'np.array', (['[(-20, 20), (0, 10), (0, 2)]'], {}), '([(-20, 20), (0, 10), (0, 2)])\n', (357, 387), True, 'import numpy as np\n'), ((1209, 1226), 'pklexa.readfile', 'pklexa.readfile', ([], {}), '()\n', (1224, 1226), False, 'import pklexa\n'), ((1289, 1308), 'numpy.array', 'np.array', (['Obstacles'], {}), '(Obstacles)\n', (1297, 1308), True, 'import numpy as np\n'), ((1973, 1991), 'numpy.array', 'np.array', (['[x_init]'], {}), '([x_init])\n', (1981, 1991), True, 'import numpy as np\n'), ((2111, 2145), 'numpy.vstack', 'np.vstack', (['(goal_points, goallist)'], {}), '((goal_points, goallist))\n', (2120, 2145), True, 'import numpy as np\n'), ((2737, 2755), 'numpy.array', 'np.array', (['[(8, 4)]'], {}), '([(8, 4)])\n', (2745, 2755), True, 'import numpy as np\n'), ((3078, 3114), 'src.search_space.search_space.SearchSpace', 'SearchSpace', (['X_dimensions', 'Obstacles'], {}), '(X_dimensions, Obstacles)\n', (3089, 3114), False, 'from src.search_space.search_space import SearchSpace\n'), ((3847, 3866), 'src.utilities.plotting.Plot', 'Plot', (['"""rrt_star_3d"""'], {}), "('rrt_star_3d')\n", (3851, 3866), False, 'from src.utilities.plotting import Plot\n'), ((4405, 4449), 'numpy.savetxt', 'np.savetxt', (['"""path3.csv"""', 'path'], {'delimiter': '""","""'}), "('path3.csv', path, delimiter=',')\n", (4415, 4449), True, 'import numpy as np\n'), ((1438, 1459), 'numpy.size', 'np.size', (['Obstacles', '(0)'], {}), '(Obstacles, 0)\n', (1445, 1459), True, 'import numpy as np\n'), ((3596, 3650), 'src.rrt.rrt_star.RRTStar', 'RRTStar', (['X', 'Q', 'a', 'b', 'max_samples', 'r', 'prc', 'rewire_count'], {}), '(X, Q, a, b, max_samples, r, prc, rewire_count)\n', (3603, 3650), False, 'from src.rrt.rrt_star import RRTStar\n'), ((4187, 4210), 'numpy.size', 'np.size', (['goal_points', '(0)'], {}), '(goal_points, 0)\n', (4194, 4210), True, 'import numpy as np\n'), ((3390, 3413), 'numpy.size', 'np.size', (['goal_points', '(0)'], {}), '(goal_points, 0)\n', (3397, 3413), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
#
# Author: <NAME> <syp at umd dot edu>
# License: BSD 2-clause
# Last Change: Tue Aug 31, 2021 at 08:49 PM +0200
from collections import defaultdict
from pyBabyMaker.dag_resolver import Variable, Node
from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, \
resolve_scope
##############
# Containers #
##############
def test_Variable_simple():
var = Variable('test')
assert var.name == 'test'
assert var.type is None
assert var.rvals == []
assert not var.literal
def test_Variable_typical():
var = Variable('test', rvals=['a+B', 'a'])
assert list(var) == [
('a+B', ['a', 'B']),
('a', ['a'])
]
def test_Variable_repr():
var1 = Variable('test', 'Double_t', ['a+b', 'a', 'b'])
var2 = Variable('pi', rvals=['3.14'], literal=True)
assert str(var1) == 'Double_t test = a+b|a|b'
assert str(var2) == 'pi := 3.14'
def test_Node_properties():
var1 = Node('test')
var2 = Node('test', literal='a')
var3 = Node('test', expr='a+b')
assert var1.fname == 'None_test'
assert var1.fake is False
assert var1.rval == 'test'
assert var2.fname == 'None_test'
assert var2.fake is False
assert var2.rval == 'a'
assert var3.fname == 'None_test'
assert var3.fake is True
assert var3.rval == 'a+b'
def test_Node_sub():
var = Node('test', expr='a+b')
var.children = [
Node('a', 'scope1'),
Node('b', 'scope2')
]
assert var.rval == 'scope1_a+scope2_b'
#############################
# Resolve a single variable #
#############################
def test_resolve_var_trivial():
var = Variable('id')
scopes = defaultdict(dict)
assert resolve_var(var, 'scope1', scopes, ['a', 'b']) == (
True,
Node('id', 'scope1'),
[Node('id', 'scope1')]
)
def test_resolve_var_simple():
var = Variable('A', rvals=['a'])
scopes = {'raw': {'a': Variable('a')}}
assert resolve_var(var, 'keep', scopes, ['raw']) == (
True,
Node('A', 'keep', expr='a'),
[
Node('a', 'raw'),
Node('A', 'keep', expr='a')
]
)
def test_resolve_var_simple_fail():
var = Variable('a', rvals=['b'])
scopes = {'raw': {'a': Variable('a')}}
assert resolve_var(var, 'keep', scopes, ['raw']) == (
False, Node('a', 'keep', expr='b'), [])
def test_resolve_var_multi_scope():
scopes = {
'rename': {
'x': Variable('x', rvals=['a'])
},
'raw': {
'a': Variable('a'),
'b': Variable('b')
}
}
var = Variable('a', rvals=['x+b'])
assert resolve_var(var, 'calc', scopes, ordering=['rename', 'raw']) == (
True,
Node('a', 'calc', expr='x+b'),
[
Node('a', 'raw'),
Node('x', 'rename', expr='a'),
Node('b', 'raw'),
Node('a', 'calc', expr='x+b')
]
)
def test_resolve_var_multi_scope_literal_shadow():
scopes = {
'rename': {
'x': Variable('x', rvals=['a'])
},
'raw': {
'a': Variable('a'),
'b': Variable('b')
},
'literals': {
'b': Variable('b', literal='343')
}
}
var = Variable('a', rvals=['x+b'])
result = resolve_var(
var, 'calc', scopes, ordering=['literals', 'rename', 'raw'])
assert result == (
True,
Node('a', 'calc', expr='x+b'),
[
Node('a', 'raw'),
Node('x', 'rename', expr='a'),
Node('a', 'calc', expr='x+b')
]
)
assert result[1].rval == 'rename_x+343'
def test_resolve_var_multi_scope_fail():
scopes = {
'rename': {
'x': Variable('x', rvals=['a'])
},
'raw': {
'a': Variable('a'),
'b': Variable('b')
}
}
var = Variable('a', rvals=['b+y'])
result = resolve_var(var, 'calc', scopes, ordering=['rename', 'raw'])
assert result == (
False,
Node('a', 'calc', expr='b+y'),
[]
)
assert result[1].children == [Node('b', 'raw')]
def test_resolve_var_multi_scope_alt_def():
scopes = {
'rename': {
'x': Variable('x', rvals=['a'])
},
'raw': {
'a': Variable('a'),
'b': Variable('b')
}
}
var = Variable('a', rvals=['b+y', 'x+b'])
result = resolve_var(var, 'calc', scopes, ordering=['rename', 'raw'])
assert result == (
True,
Node('a', 'calc', expr='x+b'),
[
Node('a', 'raw'),
Node('x', 'rename', expr='a'),
Node('b', 'raw'),
Node('a', 'calc', expr='x+b')
]
)
assert result[1].rval == 'rename_x+raw_b'
def test_resolve_var_existing_var():
resolved_vars = [Node('a', 'rename', expr='x', children=[Node('x', 'raw')])]
var = Variable('a', rvals=['x'])
result = resolve_var(
var, 'rename', {'raw': {}}, ['raw'], resolved_vars=resolved_vars)
assert result == (True, Node('a', 'rename', expr='x'), [])
assert result[1].rval == 'raw_x'
def test_resolve_var_circular():
var = Variable('x', rvals=['GEV2(x)'])
scopes = {
'calc': {
'x': var,
},
'raw': {
'x': Variable('x'),
}
}
result = resolve_var(var, 'calc', scopes, ordering=['calc', 'raw'])
assert result == (
True,
Node('x', 'calc', expr='GEV2(x)'),
[
Node('x', 'raw'),
Node('x', 'calc', expr='GEV2(x)')
]
)
assert result[1].rval == 'GEV2(raw_x)'
def test_resolve_var_full_fail():
scopes = {
'raw': {},
'rename': {'a': Variable('x', rvals=['c'])}
}
var = Variable('x', rvals=['a+b'])
assert resolve_var(var, 'calc', scopes, ordering=['rename', 'raw']) == (
False, Node('x', 'calc', expr='a+b'), []
)
def test_resolve_var_skip_names_simple():
var = Variable('x', rvals=['300*GeV'])
result = resolve_var(var, 'calc', {'raw': {}}, ['raw'], skip_names=['GeV'])
assert result == (
True,
Node('x', 'calc', expr='300*GeV'),
[Node('x', 'calc', expr='300*GeV')]
)
assert result[1].rval == '300*GeV'
def test_resolve_var_selection():
scopes = {
'raw': {
'k_PT': Variable('k_PT'),
'pi_PT': Variable('pi_PT')
}
}
var = Variable('sel0', rvals=['k_PT + pi_PT > 1400.0*MeV'])
result = resolve_var(var, 'sel', scopes, ['raw'], skip_names=['MeV'])
assert result == (
True,
Node('sel0', 'sel', expr='k_PT + pi_PT > 1400.0*MeV'),
[
Node('k_PT', 'raw'),
Node('pi_PT', 'raw'),
Node('sel0', 'sel', expr='k_PT + pi_PT > 1400.0*MeV'),
]
)
assert result[1].rval == 'raw_k_PT + raw_pi_PT > 1400.0*MeV'
assert result[1].fake is True
################################################
# Resolve multiple variables in a single scope #
################################################
def test_resolve_vars_in_scope_vars_simple():
scopes = {
'calc': {
'a': Variable('a', rvals=['b/c']),
'b': Variable('b', rvals=['GEV2(b)']),
},
'rename': {
'c': Variable('c', rvals=['x'])
},
'raw': {
'b': Variable('b'),
'x': Variable('x')
}
}
result = resolve_vars_in_scope(
scopes['calc'].values(), 'calc', scopes,
ordering=['calc', 'rename', 'raw'])
assert result == (
[
Node('b', 'raw'),
Node('b', 'calc', expr='GEV2(b)'),
Node('x', 'raw'),
Node('c', 'rename', expr='x'),
Node('a', 'calc', expr='b/c')
],
[]
)
assert result[0][-1].rval == 'calc_b/rename_c'
def test_reslove_vars_in_scope_vars_partial():
scopes = {
'calc': {
'a': Variable('a', rvals=['d/c']),
'b': Variable('b', rvals=['GEV2(b)']),
},
'rename': {
'c': Variable('c', rvals=['x'])
},
'raw': {
'b': Variable('b'),
'x': Variable('x')
}
}
result = resolve_vars_in_scope(
scopes['calc'].values(), 'calc', scopes,
ordering=['calc', 'rename', 'raw'])
assert result == (
[
Node('b', 'raw'),
Node('b', 'calc', expr='GEV2(b)'),
],
[
Node('a', 'calc', expr='d/c')
]
)
assert result[1][0].children == [
Node('c', 'rename', expr='x')
]
assert result[1][0].children[0].children == [
Node('x', 'raw')
]
###########################################
# Resolve all variables in a single scope #
###########################################
def test_resolve_scope_unknown():
scopes = {'raw': {}}
assert resolve_scope('nonexist', scopes, ['raw']) == ([], [])
def test_resolve_scope_simple():
scopes = {
'calc': {
'a': Variable('a', rvals=['c/b']),
'b': Variable('b', rvals=['GEV2(b)']),
'c': Variable('c', rvals=['b*b'])
},
'rename': {
'c': Variable('c', rvals=['x'])
},
'raw': {
'b': Variable('b'),
'x': Variable('x')
}
}
result = resolve_scope('calc', scopes, ['calc', 'rename', 'raw'])
assert result == (
[
Node('b', 'raw'),
Node('b', 'calc', expr='GEV2(b)'),
Node('c', 'calc', expr='b*b'),
Node('a', 'calc', expr='c/b')
],
[]
)
assert result[0][1].rval == 'GEV2(raw_b)'
assert result[0][2].rval == 'calc_b*calc_b'
assert result[0][3].rval == 'calc_c/calc_b'
# NOTE: In a real use case, the dependency variables in 'selection' were
# resolved multiple times, thus an error message was emitted.
def test_resolve_scope_duplicated_resolution():
scopes = {
'sel': {
'sel0': Variable('sel0', rvals=['mu_pid > 0']),
'sel1': Variable('sel1', rvals=['test > 0'])
},
'calc': {
'mu_pid': Variable('mu_pid',
rvals=['MU_PID(mu_true_id)',
'MU_PID(mu_is_mu, mu_pid_mu)']),
'test': Variable('test', rvals=['TEST(mu_pid_mu, mu_is_mu)'])
},
'rename': {
'mu_true_id': Variable('mu_true_id', rvals=['mu_TRUEID']),
'mu_is_mu': Variable('mu_is_mu', rvals=['mu_isMuon']),
'mu_pid_mu': Variable('mu_pid_mu', rvals=['mu_PIDmu'])
},
'raw': {
# 'mu_TRUEID': Variable('mu_TRUEID'),
'mu_isMuon': Variable('mu_isMuon'),
'mu_PIDmu': Variable('mu_PIDmu')
}
}
result = resolve_scope('sel', scopes, ordering=['calc', 'rename', 'raw'])
assert result == (
[
Node('mu_isMuon', 'raw'),
Node('mu_is_mu', 'rename', expr='mu_isMuon'),
Node('mu_PIDmu', 'raw'),
Node('mu_pid_mu', 'rename', expr='mu_PIDmu'),
Node('mu_pid', 'calc', expr='MU_PID(mu_is_mu, mu_pid_mu)'),
Node('sel0', 'sel', expr='mu_pid > 0'),
Node('test', 'calc', expr='TEST(mu_pid_mu, mu_is_mu)'),
Node('sel1', 'sel', expr='test > 0')
],
[]
)
# NOTE: In a real use case, two selection variables have common dependencies. If
# the first one can't be fully resolved, the dependencies of the second
# variable were not fully resolved
# NOTE: The 'simple' case initially worked; the 'complex' one doesn't
def test_resolve_scope_partial_common_deps_simple():
scopes = {
'sel': {
'sel0': Variable('sel0', rvals=['flag_d0mu']),
'sel1': Variable('sel1', rvals=['flag_mu'])
},
'calc': {
'flag_d0mu': Variable(
'flag_d0mu',
rvals=['FLAG_D0MU(mu_isMuon, k_isMuon, pi_isMuon)']
),
'flag_mu': Variable('flag_mu', rvals=['FLAG_MU(mu_isMuon)'])
},
'raw': {
# 'mu_TRUEID': Variable('mu_TRUEID'),
'mu_isMuon': Variable('mu_isMuon'),
'k_isMuon': Variable('k_isMuon'),
}
}
result = resolve_scope('sel', scopes, ordering=['calc', 'raw'])
assert result == (
[
Node('mu_isMuon', 'raw'),
Node('flag_mu', 'calc', expr='FLAG_MU(mu_isMuon)'),
Node('sel1', 'sel', expr='flag_mu')
],
[
Node('sel0', 'sel', expr='flag_d0mu')
]
)
def test_resove_scope_partial_common_deps_complex():
scopes = {
'sel': {
'sel0': Variable('sel0', rvals=['flag_d0mu']),
'sel1': Variable('sel1', rvals=['flag_mu'])
},
'calc': {
'flag': Variable('flag', rvals=['FLAG(mu_PT)']),
'flag_d0mu': Variable(
'flag_d0mu',
rvals=['FLAG_D0MU(flag, mu_isMuon, k_isMuon, pi_isMuon)']
),
'flag_mu': Variable('flag_mu', rvals=['FLAG_MU(flag, mu_isMuon)'])
},
'raw': {
# 'mu_TRUEID': Variable('mu_TRUEID'),
'mu_isMuon': Variable('mu_isMuon'),
'k_isMuon': Variable('k_isMuon'),
'mu_PT': Variable('mu_PT')
}
}
result = resolve_scope('sel', scopes, ordering=['calc', 'raw'])
assert result == (
[
Node('mu_PT', 'raw'), # This one was missing!
Node('flag', 'calc', expr='FLAG(mu_PT)'),
Node('mu_isMuon', 'raw'),
Node('flag_mu', 'calc', expr='FLAG_MU(flag, mu_isMuon)'),
Node('sel1', 'sel', expr='flag_mu')
],
[
Node('sel0', 'sel', expr='flag_d0mu')
]
)
# NOTE: In a case, when using an alternative rvalue, the dependencies of its
# dependencies are not resolved correctly
def test_VariableResolver_alternative_rvalue_dep_deep():
scopes = {
'calc': {
'other_trk': Variable(
'other_trk', rvals=['VEC(trk_k, trk_pi, trk_spi)',
'VEC(trk_k, trk_pi)']),
'trk_k': Variable('trk_k', rvals=['FAKE(k_PT)']),
'trk_pi': Variable('trk_pi', rvals=['FAKE(pi_PT)']),
'trk_spi': Variable('trk_spi', rvals=['FAKE(spi_PT)'])
},
'raw': {
'k_PT': Variable('k_PT'),
'pi_PT': Variable('pi_PT'),
# 'spi_PT': Variable('spi_PT')
}
}
result = resolve_scope('calc', scopes, ordering=['calc', 'raw'])
assert result == (
[
Node('k_PT', 'raw', ),
Node('trk_k', 'calc', expr='FAKE(k_PT)'),
Node('pi_PT', 'raw'),
Node('trk_pi', 'calc', expr='FAKE(pi_PT)'),
Node('other_trk', 'calc', expr='VEC(trk_k, trk_pi)'),
],
[
Node('trk_spi', 'calc', expr='FAKE(spi_PT)')
]
)
| [
"pyBabyMaker.dag_resolver.Variable",
"pyBabyMaker.dag_resolver.resolve_scope",
"pyBabyMaker.dag_resolver.resolve_var",
"collections.defaultdict",
"pyBabyMaker.dag_resolver.Node"
] | [((406, 422), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""test"""'], {}), "('test')\n", (414, 422), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((577, 613), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""test"""'], {'rvals': "['a+B', 'a']"}), "('test', rvals=['a+B', 'a'])\n", (585, 613), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((736, 783), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""test"""', '"""Double_t"""', "['a+b', 'a', 'b']"], {}), "('test', 'Double_t', ['a+b', 'a', 'b'])\n", (744, 783), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((795, 839), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""pi"""'], {'rvals': "['3.14']", 'literal': '(True)'}), "('pi', rvals=['3.14'], literal=True)\n", (803, 839), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((969, 981), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""test"""'], {}), "('test')\n", (973, 981), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((993, 1018), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""test"""'], {'literal': '"""a"""'}), "('test', literal='a')\n", (997, 1018), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1030, 1054), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""test"""'], {'expr': '"""a+b"""'}), "('test', expr='a+b')\n", (1034, 1054), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1380, 1404), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""test"""'], {'expr': '"""a+b"""'}), "('test', expr='a+b')\n", (1384, 1404), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1668, 1682), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""id"""'], {}), "('id')\n", (1676, 1682), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1696, 1713), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (1707, 1713), False, 'from collections import defaultdict\n'), ((1902, 1928), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""A"""'], {'rvals': "['a']"}), "('A', rvals=['a'])\n", (1910, 1928), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2226, 2252), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['b']"}), "('a', rvals=['b'])\n", (2234, 2252), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2637, 2665), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['x+b']"}), "('a', rvals=['x+b'])\n", (2645, 2665), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3296, 3324), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['x+b']"}), "('a', rvals=['x+b'])\n", (3304, 3324), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3338, 3410), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', 'scopes'], {'ordering': "['literals', 'rename', 'raw']"}), "(var, 'calc', scopes, ordering=['literals', 'rename', 'raw'])\n", (3349, 3410), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((3921, 3949), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['b+y']"}), "('a', rvals=['b+y'])\n", (3929, 3949), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3963, 4023), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', 'scopes'], {'ordering': "['rename', 'raw']"}), "(var, 'calc', scopes, ordering=['rename', 'raw'])\n", (3974, 4023), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((4413, 4448), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['b+y', 'x+b']"}), "('a', rvals=['b+y', 'x+b'])\n", (4421, 4448), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4462, 4522), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', 'scopes'], {'ordering': "['rename', 'raw']"}), "(var, 'calc', scopes, ordering=['rename', 'raw'])\n", (4473, 4522), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((4947, 4973), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['x']"}), "('a', rvals=['x'])\n", (4955, 4973), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4987, 5064), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""rename"""', "{'raw': {}}", "['raw']"], {'resolved_vars': 'resolved_vars'}), "(var, 'rename', {'raw': {}}, ['raw'], resolved_vars=resolved_vars)\n", (4998, 5064), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((5220, 5252), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['GEV2(x)']"}), "('x', rvals=['GEV2(x)'])\n", (5228, 5252), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5397, 5455), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', 'scopes'], {'ordering': "['calc', 'raw']"}), "(var, 'calc', scopes, ordering=['calc', 'raw'])\n", (5408, 5455), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((5820, 5848), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['a+b']"}), "('x', rvals=['a+b'])\n", (5828, 5848), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6036, 6068), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['300*GeV']"}), "('x', rvals=['300*GeV'])\n", (6044, 6068), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6082, 6148), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', "{'raw': {}}", "['raw']"], {'skip_names': "['GeV']"}), "(var, 'calc', {'raw': {}}, ['raw'], skip_names=['GeV'])\n", (6093, 6148), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((6490, 6543), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel0"""'], {'rvals': "['k_PT + pi_PT > 1400.0*MeV']"}), "('sel0', rvals=['k_PT + pi_PT > 1400.0*MeV'])\n", (6498, 6543), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6558, 6618), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""sel"""', 'scopes', "['raw']"], {'skip_names': "['MeV']"}), "(var, 'sel', scopes, ['raw'], skip_names=['MeV'])\n", (6569, 6618), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((9432, 9488), 'pyBabyMaker.dag_resolver.resolve_scope', 'resolve_scope', (['"""calc"""', 'scopes', "['calc', 'rename', 'raw']"], {}), "('calc', scopes, ['calc', 'rename', 'raw'])\n", (9445, 9488), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((10906, 10970), 'pyBabyMaker.dag_resolver.resolve_scope', 'resolve_scope', (['"""sel"""', 'scopes'], {'ordering': "['calc', 'rename', 'raw']"}), "('sel', scopes, ordering=['calc', 'rename', 'raw'])\n", (10919, 10970), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((12387, 12441), 'pyBabyMaker.dag_resolver.resolve_scope', 'resolve_scope', (['"""sel"""', 'scopes'], {'ordering': "['calc', 'raw']"}), "('sel', scopes, ordering=['calc', 'raw'])\n", (12400, 12441), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((13477, 13531), 'pyBabyMaker.dag_resolver.resolve_scope', 'resolve_scope', (['"""sel"""', 'scopes'], {'ordering': "['calc', 'raw']"}), "('sel', scopes, ordering=['calc', 'raw'])\n", (13490, 13531), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((14673, 14728), 'pyBabyMaker.dag_resolver.resolve_scope', 'resolve_scope', (['"""calc"""', 'scopes'], {'ordering': "['calc', 'raw']"}), "('calc', scopes, ordering=['calc', 'raw'])\n", (14686, 14728), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((1434, 1453), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""scope1"""'], {}), "('a', 'scope1')\n", (1438, 1453), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1463, 1482), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""scope2"""'], {}), "('b', 'scope2')\n", (1467, 1482), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1726, 1772), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""scope1"""', 'scopes', "['a', 'b']"], {}), "(var, 'scope1', scopes, ['a', 'b'])\n", (1737, 1772), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((1984, 2025), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""keep"""', 'scopes', "['raw']"], {}), "(var, 'keep', scopes, ['raw'])\n", (1995, 2025), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((2308, 2349), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""keep"""', 'scopes', "['raw']"], {}), "(var, 'keep', scopes, ['raw'])\n", (2319, 2349), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((2678, 2738), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', 'scopes'], {'ordering': "['rename', 'raw']"}), "(var, 'calc', scopes, ordering=['rename', 'raw'])\n", (2689, 2738), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((5861, 5921), 'pyBabyMaker.dag_resolver.resolve_var', 'resolve_var', (['var', '"""calc"""', 'scopes'], {'ordering': "['rename', 'raw']"}), "(var, 'calc', scopes, ordering=['rename', 'raw'])\n", (5872, 5921), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((8970, 9012), 'pyBabyMaker.dag_resolver.resolve_scope', 'resolve_scope', (['"""nonexist"""', 'scopes', "['raw']"], {}), "('nonexist', scopes, ['raw'])\n", (8983, 9012), False, 'from pyBabyMaker.dag_resolver import resolve_var, resolve_vars_in_scope, resolve_scope\n'), ((1800, 1820), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""id"""', '"""scope1"""'], {}), "('id', 'scope1')\n", (1804, 1820), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1956, 1969), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (1964, 1969), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2053, 2080), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""A"""', '"""keep"""'], {'expr': '"""a"""'}), "('A', 'keep', expr='a')\n", (2057, 2080), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2280, 2293), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (2288, 2293), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2370, 2397), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""keep"""'], {'expr': '"""b"""'}), "('a', 'keep', expr='b')\n", (2374, 2397), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2493, 2519), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['a']"}), "('x', rvals=['a'])\n", (2501, 2519), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2565, 2578), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (2573, 2578), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2597, 2610), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (2605, 2610), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2766, 2795), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""x+b"""'}), "('a', 'calc', expr='x+b')\n", (2770, 2795), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3073, 3099), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['a']"}), "('x', rvals=['a'])\n", (3081, 3099), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3145, 3158), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (3153, 3158), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3177, 3190), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (3185, 3190), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3241, 3269), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {'literal': '"""343"""'}), "('b', literal='343')\n", (3249, 3269), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3466, 3495), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""x+b"""'}), "('a', 'calc', expr='x+b')\n", (3470, 3495), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3777, 3803), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['a']"}), "('x', rvals=['a'])\n", (3785, 3803), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3849, 3862), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (3857, 3862), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3881, 3894), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (3889, 3894), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4071, 4100), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""b+y"""'}), "('a', 'calc', expr='b+y')\n", (4075, 4100), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4153, 4169), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""raw"""'], {}), "('b', 'raw')\n", (4157, 4169), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4269, 4295), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['a']"}), "('x', rvals=['a'])\n", (4277, 4295), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4341, 4354), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (4349, 4354), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4373, 4386), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (4381, 4386), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4569, 4598), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""x+b"""'}), "('a', 'calc', expr='x+b')\n", (4573, 4598), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5103, 5132), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""rename"""'], {'expr': '"""x"""'}), "('a', 'rename', expr='x')\n", (5107, 5132), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5353, 5366), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (5361, 5366), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5502, 5535), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""calc"""'], {'expr': '"""GEV2(x)"""'}), "('x', 'calc', expr='GEV2(x)')\n", (5506, 5535), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5776, 5802), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {'rvals': "['c']"}), "('x', rvals=['c'])\n", (5784, 5802), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5942, 5971), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""calc"""'], {'expr': '"""a+b"""'}), "('x', 'calc', expr='a+b')\n", (5946, 5971), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6195, 6228), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""calc"""'], {'expr': '"""300*GeV"""'}), "('x', 'calc', expr='300*GeV')\n", (6199, 6228), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6407, 6423), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""k_PT"""'], {}), "('k_PT')\n", (6415, 6423), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6446, 6463), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""pi_PT"""'], {}), "('pi_PT')\n", (6454, 6463), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6664, 6717), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel0"""', '"""sel"""'], {'expr': '"""k_PT + pi_PT > 1400.0*MeV"""'}), "('sel0', 'sel', expr='k_PT + pi_PT > 1400.0*MeV')\n", (6668, 6717), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7224, 7252), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['b/c']"}), "('a', rvals=['b/c'])\n", (7232, 7252), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7271, 7303), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {'rvals': "['GEV2(b)']"}), "('b', rvals=['GEV2(b)'])\n", (7279, 7303), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7353, 7379), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""c"""'], {'rvals': "['x']"}), "('c', rvals=['x'])\n", (7361, 7379), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7425, 7438), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (7433, 7438), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7457, 7470), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (7465, 7470), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8020, 8048), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['d/c']"}), "('a', rvals=['d/c'])\n", (8028, 8048), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8067, 8099), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {'rvals': "['GEV2(b)']"}), "('b', rvals=['GEV2(b)'])\n", (8075, 8099), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8149, 8175), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""c"""'], {'rvals': "['x']"}), "('c', rvals=['x'])\n", (8157, 8175), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8221, 8234), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (8229, 8234), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8253, 8266), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (8261, 8266), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8648, 8677), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""c"""', '"""rename"""'], {'expr': '"""x"""'}), "('c', 'rename', expr='x')\n", (8652, 8677), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8742, 8758), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""raw"""'], {}), "('x', 'raw')\n", (8746, 8758), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9110, 9138), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""a"""'], {'rvals': "['c/b']"}), "('a', rvals=['c/b'])\n", (9118, 9138), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9157, 9189), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {'rvals': "['GEV2(b)']"}), "('b', rvals=['GEV2(b)'])\n", (9165, 9189), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9208, 9236), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""c"""'], {'rvals': "['b*b']"}), "('c', rvals=['b*b'])\n", (9216, 9236), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9285, 9311), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""c"""'], {'rvals': "['x']"}), "('c', rvals=['x'])\n", (9293, 9311), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9357, 9370), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""b"""'], {}), "('b')\n", (9365, 9370), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9389, 9402), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (9397, 9402), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10098, 10136), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel0"""'], {'rvals': "['mu_pid > 0']"}), "('sel0', rvals=['mu_pid > 0'])\n", (10106, 10136), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10158, 10194), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel1"""'], {'rvals': "['test > 0']"}), "('sel1', rvals=['test > 0'])\n", (10166, 10194), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10246, 10325), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_pid"""'], {'rvals': "['MU_PID(mu_true_id)', 'MU_PID(mu_is_mu, mu_pid_mu)']"}), "('mu_pid', rvals=['MU_PID(mu_true_id)', 'MU_PID(mu_is_mu, mu_pid_mu)'])\n", (10254, 10325), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10416, 10469), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""test"""'], {'rvals': "['TEST(mu_pid_mu, mu_is_mu)']"}), "('test', rvals=['TEST(mu_pid_mu, mu_is_mu)'])\n", (10424, 10469), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10527, 10570), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_true_id"""'], {'rvals': "['mu_TRUEID']"}), "('mu_true_id', rvals=['mu_TRUEID'])\n", (10535, 10570), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10596, 10637), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_is_mu"""'], {'rvals': "['mu_isMuon']"}), "('mu_is_mu', rvals=['mu_isMuon'])\n", (10604, 10637), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10664, 10705), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_pid_mu"""'], {'rvals': "['mu_PIDmu']"}), "('mu_pid_mu', rvals=['mu_PIDmu'])\n", (10672, 10705), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10809, 10830), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_isMuon"""'], {}), "('mu_isMuon')\n", (10817, 10830), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((10856, 10876), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_PIDmu"""'], {}), "('mu_PIDmu')\n", (10864, 10876), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11842, 11879), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel0"""'], {'rvals': "['flag_d0mu']"}), "('sel0', rvals=['flag_d0mu'])\n", (11850, 11879), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11901, 11936), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel1"""'], {'rvals': "['flag_mu']"}), "('sel1', rvals=['flag_mu'])\n", (11909, 11936), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11991, 12065), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""flag_d0mu"""'], {'rvals': "['FLAG_D0MU(mu_isMuon, k_isMuon, pi_isMuon)']"}), "('flag_d0mu', rvals=['FLAG_D0MU(mu_isMuon, k_isMuon, pi_isMuon)'])\n", (11999, 12065), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12136, 12185), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""flag_mu"""'], {'rvals': "['FLAG_MU(mu_isMuon)']"}), "('flag_mu', rvals=['FLAG_MU(mu_isMuon)'])\n", (12144, 12185), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12289, 12310), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_isMuon"""'], {}), "('mu_isMuon')\n", (12297, 12310), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12336, 12356), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""k_isMuon"""'], {}), "('k_isMuon')\n", (12344, 12356), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12820, 12857), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel0"""'], {'rvals': "['flag_d0mu']"}), "('sel0', rvals=['flag_d0mu'])\n", (12828, 12857), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12879, 12914), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""sel1"""'], {'rvals': "['flag_mu']"}), "('sel1', rvals=['flag_mu'])\n", (12887, 12914), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12964, 13003), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""flag"""'], {'rvals': "['FLAG(mu_PT)']"}), "('flag', rvals=['FLAG(mu_PT)'])\n", (12972, 13003), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13030, 13115), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""flag_d0mu"""'], {'rvals': "['FLAG_D0MU(flag, mu_isMuon, k_isMuon, pi_isMuon)']"}), "('flag_d0mu', rvals=['FLAG_D0MU(flag, mu_isMuon, k_isMuon, pi_isMuon)']\n )\n", (13038, 13115), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13181, 13236), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""flag_mu"""'], {'rvals': "['FLAG_MU(flag, mu_isMuon)']"}), "('flag_mu', rvals=['FLAG_MU(flag, mu_isMuon)'])\n", (13189, 13236), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13340, 13361), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_isMuon"""'], {}), "('mu_isMuon')\n", (13348, 13361), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13387, 13407), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""k_isMuon"""'], {}), "('k_isMuon')\n", (13395, 13407), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13430, 13447), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""mu_PT"""'], {}), "('mu_PT')\n", (13438, 13447), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14164, 14250), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""other_trk"""'], {'rvals': "['VEC(trk_k, trk_pi, trk_spi)', 'VEC(trk_k, trk_pi)']"}), "('other_trk', rvals=['VEC(trk_k, trk_pi, trk_spi)',\n 'VEC(trk_k, trk_pi)'])\n", (14172, 14250), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14322, 14361), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""trk_k"""'], {'rvals': "['FAKE(k_PT)']"}), "('trk_k', rvals=['FAKE(k_PT)'])\n", (14330, 14361), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14385, 14426), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""trk_pi"""'], {'rvals': "['FAKE(pi_PT)']"}), "('trk_pi', rvals=['FAKE(pi_PT)'])\n", (14393, 14426), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14451, 14494), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""trk_spi"""'], {'rvals': "['FAKE(spi_PT)']"}), "('trk_spi', rvals=['FAKE(spi_PT)'])\n", (14459, 14494), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14543, 14559), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""k_PT"""'], {}), "('k_PT')\n", (14551, 14559), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14582, 14599), 'pyBabyMaker.dag_resolver.Variable', 'Variable', (['"""pi_PT"""'], {}), "('pi_PT')\n", (14590, 14599), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((1831, 1851), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""id"""', '"""scope1"""'], {}), "('id', 'scope1')\n", (1835, 1851), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2104, 2120), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""raw"""'], {}), "('a', 'raw')\n", (2108, 2120), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2134, 2161), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""A"""', '"""keep"""'], {'expr': '"""a"""'}), "('A', 'keep', expr='a')\n", (2138, 2161), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2819, 2835), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""raw"""'], {}), "('a', 'raw')\n", (2823, 2835), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2849, 2878), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""rename"""'], {'expr': '"""a"""'}), "('x', 'rename', expr='a')\n", (2853, 2878), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2892, 2908), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""raw"""'], {}), "('b', 'raw')\n", (2896, 2908), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((2922, 2951), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""x+b"""'}), "('a', 'calc', expr='x+b')\n", (2926, 2951), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3519, 3535), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""raw"""'], {}), "('a', 'raw')\n", (3523, 3535), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3549, 3578), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""rename"""'], {'expr': '"""a"""'}), "('x', 'rename', expr='a')\n", (3553, 3578), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((3592, 3621), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""x+b"""'}), "('a', 'calc', expr='x+b')\n", (3596, 3621), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4622, 4638), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""raw"""'], {}), "('a', 'raw')\n", (4626, 4638), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4652, 4681), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""rename"""'], {'expr': '"""a"""'}), "('x', 'rename', expr='a')\n", (4656, 4681), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4695, 4711), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""raw"""'], {}), "('b', 'raw')\n", (4699, 4711), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4725, 4754), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""x+b"""'}), "('a', 'calc', expr='x+b')\n", (4729, 4754), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5559, 5575), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""raw"""'], {}), "('x', 'raw')\n", (5563, 5575), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((5589, 5622), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""calc"""'], {'expr': '"""GEV2(x)"""'}), "('x', 'calc', expr='GEV2(x)')\n", (5593, 5622), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6239, 6272), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""calc"""'], {'expr': '"""300*GeV"""'}), "('x', 'calc', expr='300*GeV')\n", (6243, 6272), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6741, 6760), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""k_PT"""', '"""raw"""'], {}), "('k_PT', 'raw')\n", (6745, 6760), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6774, 6794), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""pi_PT"""', '"""raw"""'], {}), "('pi_PT', 'raw')\n", (6778, 6794), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((6808, 6861), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel0"""', '"""sel"""'], {'expr': '"""k_PT + pi_PT > 1400.0*MeV"""'}), "('sel0', 'sel', expr='k_PT + pi_PT > 1400.0*MeV')\n", (6812, 6861), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7662, 7678), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""raw"""'], {}), "('b', 'raw')\n", (7666, 7678), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7692, 7725), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""calc"""'], {'expr': '"""GEV2(b)"""'}), "('b', 'calc', expr='GEV2(b)')\n", (7696, 7725), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7739, 7755), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""raw"""'], {}), "('x', 'raw')\n", (7743, 7755), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7769, 7798), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""c"""', '"""rename"""'], {'expr': '"""x"""'}), "('c', 'rename', expr='x')\n", (7773, 7798), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((7812, 7841), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""b/c"""'}), "('a', 'calc', expr='b/c')\n", (7816, 7841), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8458, 8474), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""raw"""'], {}), "('b', 'raw')\n", (8462, 8474), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8488, 8521), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""calc"""'], {'expr': '"""GEV2(b)"""'}), "('b', 'calc', expr='GEV2(b)')\n", (8492, 8521), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((8556, 8585), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""d/c"""'}), "('a', 'calc', expr='d/c')\n", (8560, 8585), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9535, 9551), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""raw"""'], {}), "('b', 'raw')\n", (9539, 9551), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9565, 9598), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""b"""', '"""calc"""'], {'expr': '"""GEV2(b)"""'}), "('b', 'calc', expr='GEV2(b)')\n", (9569, 9598), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9612, 9641), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""c"""', '"""calc"""'], {'expr': '"""b*b"""'}), "('c', 'calc', expr='b*b')\n", (9616, 9641), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((9655, 9684), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""a"""', '"""calc"""'], {'expr': '"""c/b"""'}), "('a', 'calc', expr='c/b')\n", (9659, 9684), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11017, 11041), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_isMuon"""', '"""raw"""'], {}), "('mu_isMuon', 'raw')\n", (11021, 11041), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11055, 11099), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_is_mu"""', '"""rename"""'], {'expr': '"""mu_isMuon"""'}), "('mu_is_mu', 'rename', expr='mu_isMuon')\n", (11059, 11099), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11113, 11136), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_PIDmu"""', '"""raw"""'], {}), "('mu_PIDmu', 'raw')\n", (11117, 11136), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11150, 11194), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_pid_mu"""', '"""rename"""'], {'expr': '"""mu_PIDmu"""'}), "('mu_pid_mu', 'rename', expr='mu_PIDmu')\n", (11154, 11194), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11208, 11266), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_pid"""', '"""calc"""'], {'expr': '"""MU_PID(mu_is_mu, mu_pid_mu)"""'}), "('mu_pid', 'calc', expr='MU_PID(mu_is_mu, mu_pid_mu)')\n", (11212, 11266), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11280, 11318), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel0"""', '"""sel"""'], {'expr': '"""mu_pid > 0"""'}), "('sel0', 'sel', expr='mu_pid > 0')\n", (11284, 11318), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11332, 11386), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""test"""', '"""calc"""'], {'expr': '"""TEST(mu_pid_mu, mu_is_mu)"""'}), "('test', 'calc', expr='TEST(mu_pid_mu, mu_is_mu)')\n", (11336, 11386), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((11400, 11436), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel1"""', '"""sel"""'], {'expr': '"""test > 0"""'}), "('sel1', 'sel', expr='test > 0')\n", (11404, 11436), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12488, 12512), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_isMuon"""', '"""raw"""'], {}), "('mu_isMuon', 'raw')\n", (12492, 12512), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12526, 12576), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""flag_mu"""', '"""calc"""'], {'expr': '"""FLAG_MU(mu_isMuon)"""'}), "('flag_mu', 'calc', expr='FLAG_MU(mu_isMuon)')\n", (12530, 12576), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12590, 12625), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel1"""', '"""sel"""'], {'expr': '"""flag_mu"""'}), "('sel1', 'sel', expr='flag_mu')\n", (12594, 12625), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((12659, 12696), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel0"""', '"""sel"""'], {'expr': '"""flag_d0mu"""'}), "('sel0', 'sel', expr='flag_d0mu')\n", (12663, 12696), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13578, 13598), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_PT"""', '"""raw"""'], {}), "('mu_PT', 'raw')\n", (13582, 13598), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13637, 13677), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""flag"""', '"""calc"""'], {'expr': '"""FLAG(mu_PT)"""'}), "('flag', 'calc', expr='FLAG(mu_PT)')\n", (13641, 13677), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13691, 13715), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""mu_isMuon"""', '"""raw"""'], {}), "('mu_isMuon', 'raw')\n", (13695, 13715), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13729, 13785), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""flag_mu"""', '"""calc"""'], {'expr': '"""FLAG_MU(flag, mu_isMuon)"""'}), "('flag_mu', 'calc', expr='FLAG_MU(flag, mu_isMuon)')\n", (13733, 13785), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13799, 13834), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel1"""', '"""sel"""'], {'expr': '"""flag_mu"""'}), "('sel1', 'sel', expr='flag_mu')\n", (13803, 13834), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((13868, 13905), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""sel0"""', '"""sel"""'], {'expr': '"""flag_d0mu"""'}), "('sel0', 'sel', expr='flag_d0mu')\n", (13872, 13905), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14775, 14794), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""k_PT"""', '"""raw"""'], {}), "('k_PT', 'raw')\n", (14779, 14794), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14810, 14850), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""trk_k"""', '"""calc"""'], {'expr': '"""FAKE(k_PT)"""'}), "('trk_k', 'calc', expr='FAKE(k_PT)')\n", (14814, 14850), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14864, 14884), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""pi_PT"""', '"""raw"""'], {}), "('pi_PT', 'raw')\n", (14868, 14884), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14898, 14940), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""trk_pi"""', '"""calc"""'], {'expr': '"""FAKE(pi_PT)"""'}), "('trk_pi', 'calc', expr='FAKE(pi_PT)')\n", (14902, 14940), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((14954, 15006), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""other_trk"""', '"""calc"""'], {'expr': '"""VEC(trk_k, trk_pi)"""'}), "('other_trk', 'calc', expr='VEC(trk_k, trk_pi)')\n", (14958, 15006), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((15041, 15085), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""trk_spi"""', '"""calc"""'], {'expr': '"""FAKE(spi_PT)"""'}), "('trk_spi', 'calc', expr='FAKE(spi_PT)')\n", (15045, 15085), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n'), ((4917, 4933), 'pyBabyMaker.dag_resolver.Node', 'Node', (['"""x"""', '"""raw"""'], {}), "('x', 'raw')\n", (4921, 4933), False, 'from pyBabyMaker.dag_resolver import Variable, Node\n')] |
# -*- coding: utf-8 -*-
import chainer
import chainer
from chainer import report, training, Chain, datasets, iterators, optimizers
import chainer.functions as F
import chainer.links as L
from chainer.training import extensions
from chainer.datasets import tuple_dataset
import numpy as np
import sys
sys.path.append("//tera/user/boku/study/nn")
import iomod as io
import csv
import pickle
import matplotlib.pyplot as plt
#argument
argvs = sys.argv
argc = len(argvs)
if (argc != 4):
print ("Usage: train_path Test_path OUtput_path")
quit()
#parameter
image_side_size = 9
image_size = image_side_size * image_side_size * image_side_size
train_data_size = 1000
test_data_size = 500
hidden1 = 100
hidden2 = 3
n_epoch = 5
#load_file_train
train = np.fromfile(argvs[1],np.float64)
train = train.astype(np.float32)
train = train.reshape(image_size,train_data_size)
train_max = np.max(train,axis = 0)
train_min = np.min(train,axis = 0)
train_mat = ( train - train_min[np.newaxis,:] ) / ( train_max[np.newaxis,:] - train_min[np.newaxis,:])
train_mat = train_mat.T
#load_file_test
test = np.fromfile(argvs[2],np.float64)
test = test.astype(np.float32)
test = test.reshape(image_size,test_data_size)
test_max = np.max(test,axis = 0)
test_min = np.min(test,axis = 0)
test_mat = ( test - test_min[np.newaxis,:] ) / ( test_max[np.newaxis,:] - test_min[np.newaxis,:])
test_mat = test_mat.T
#save_input_test
trans_test = test.T
in_temp = trans_test.copy(order = 'C')
for t in range(test_data_size):
io.save_raw(in_temp[t,:], argvs[3] + "sae/input_test" + str(t) +".raw",np.float32)
class MyAE(Chain):
def __init__(self):
super(MyAE, self).__init__(
l11=L.Linear(image_size,hidden1),
l12=L.Linear(hidden1,hidden2),
l13 =L.Linear(hidden2,hidden1),
l14 =L.Linear(hidden1,image_size),
)
def __call__(self,x,train = True):
fv1 = F.sigmoid(self.l11(x))
fv2 = F.sigmoid(self.l12(fv1))
bv1 = F.sigmoid(self.l13(fv2))
bv2 = F.sigmoid(self.l14(bv1))
return bv2
#model
model = L.Classifier(MyAE(),lossfun = F.mean_squared_error)
model.compute_accuracy = False
optimizer = optimizers.Adam()
optimizer.setup(model)
#train
print ("train")
xtrain = tuple_dataset.TupleDataset(train_mat,train_mat)
xtest = tuple_dataset.TupleDataset(test_mat,test_mat)
train_iter = iterators.SerialIterator(xtrain,10)
test_iter = iterators.SerialIterator(xtest,1,repeat = False,shuffle = False)
updater = training.StandardUpdater(train_iter, optimizer)
trainer = training.Trainer(updater, (n_epoch, 'epoch'), out='result')
trainer.extend(extensions.LogReport())
trainer.extend(extensions.PrintReport( ['epoch', 'main/loss']))
trainer.extend(extensions.snapshot(), trigger=(n_epoch, 'epoch'))
trainer.extend(extensions.dump_graph('main/loss'))
trainer.extend(extensions.ProgressBar())
trainer.run()
#test
test_result = model.predictor(np.array(test_mat))
test_result2 = ( test_result.data.T * ( test_max[np.newaxis,:] - test_min[np.newaxis,:])) + test_min[np.newaxis,:]
test_result_temp = test_result2.T
#save_output_test
for t in range(test_data_size):
io.save_raw(test_result_temp[t,:], argvs[3] + "sae/output_test" + str(t) + ".raw",np.float32)
#Generalization
dev_result = abs(test - test_result2)
gene = np.average(np.average(dev_result,axis = 0))
with open(argvs[3] + 'sae/generalization.csv', 'wt') as f:
writer = csv.writer(f)
writer.writerows(dev_result)
print ("Generalization")
print (gene)
| [
"chainer.training.extensions.ProgressBar",
"numpy.fromfile",
"chainer.training.extensions.PrintReport",
"chainer.training.extensions.dump_graph",
"chainer.training.extensions.LogReport",
"numpy.average",
"chainer.iterators.SerialIterator",
"chainer.optimizers.Adam",
"chainer.training.Trainer",
"chainer.training.StandardUpdater",
"csv.writer",
"numpy.max",
"numpy.array",
"chainer.links.Linear",
"numpy.min",
"sys.path.append",
"chainer.training.extensions.snapshot",
"chainer.datasets.tuple_dataset.TupleDataset"
] | [((300, 344), 'sys.path.append', 'sys.path.append', (['"""//tera/user/boku/study/nn"""'], {}), "('//tera/user/boku/study/nn')\n", (315, 344), False, 'import sys\n'), ((755, 788), 'numpy.fromfile', 'np.fromfile', (['argvs[1]', 'np.float64'], {}), '(argvs[1], np.float64)\n', (766, 788), True, 'import numpy as np\n'), ((883, 904), 'numpy.max', 'np.max', (['train'], {'axis': '(0)'}), '(train, axis=0)\n', (889, 904), True, 'import numpy as np\n'), ((918, 939), 'numpy.min', 'np.min', (['train'], {'axis': '(0)'}), '(train, axis=0)\n', (924, 939), True, 'import numpy as np\n'), ((1092, 1125), 'numpy.fromfile', 'np.fromfile', (['argvs[2]', 'np.float64'], {}), '(argvs[2], np.float64)\n', (1103, 1125), True, 'import numpy as np\n'), ((1214, 1234), 'numpy.max', 'np.max', (['test'], {'axis': '(0)'}), '(test, axis=0)\n', (1220, 1234), True, 'import numpy as np\n'), ((1247, 1267), 'numpy.min', 'np.min', (['test'], {'axis': '(0)'}), '(test, axis=0)\n', (1253, 1267), True, 'import numpy as np\n'), ((2180, 2197), 'chainer.optimizers.Adam', 'optimizers.Adam', ([], {}), '()\n', (2195, 2197), False, 'from chainer import report, training, Chain, datasets, iterators, optimizers\n'), ((2254, 2302), 'chainer.datasets.tuple_dataset.TupleDataset', 'tuple_dataset.TupleDataset', (['train_mat', 'train_mat'], {}), '(train_mat, train_mat)\n', (2280, 2302), False, 'from chainer.datasets import tuple_dataset\n'), ((2310, 2356), 'chainer.datasets.tuple_dataset.TupleDataset', 'tuple_dataset.TupleDataset', (['test_mat', 'test_mat'], {}), '(test_mat, test_mat)\n', (2336, 2356), False, 'from chainer.datasets import tuple_dataset\n'), ((2369, 2405), 'chainer.iterators.SerialIterator', 'iterators.SerialIterator', (['xtrain', '(10)'], {}), '(xtrain, 10)\n', (2393, 2405), False, 'from chainer import report, training, Chain, datasets, iterators, optimizers\n'), ((2417, 2480), 'chainer.iterators.SerialIterator', 'iterators.SerialIterator', (['xtest', '(1)'], {'repeat': '(False)', 'shuffle': '(False)'}), '(xtest, 1, repeat=False, shuffle=False)\n', (2441, 2480), False, 'from chainer import report, training, Chain, datasets, iterators, optimizers\n'), ((2493, 2540), 'chainer.training.StandardUpdater', 'training.StandardUpdater', (['train_iter', 'optimizer'], {}), '(train_iter, optimizer)\n', (2517, 2540), False, 'from chainer import report, training, Chain, datasets, iterators, optimizers\n'), ((2551, 2610), 'chainer.training.Trainer', 'training.Trainer', (['updater', "(n_epoch, 'epoch')"], {'out': '"""result"""'}), "(updater, (n_epoch, 'epoch'), out='result')\n", (2567, 2610), False, 'from chainer import report, training, Chain, datasets, iterators, optimizers\n'), ((2627, 2649), 'chainer.training.extensions.LogReport', 'extensions.LogReport', ([], {}), '()\n', (2647, 2649), False, 'from chainer.training import extensions\n'), ((2666, 2712), 'chainer.training.extensions.PrintReport', 'extensions.PrintReport', (["['epoch', 'main/loss']"], {}), "(['epoch', 'main/loss'])\n", (2688, 2712), False, 'from chainer.training import extensions\n'), ((2730, 2751), 'chainer.training.extensions.snapshot', 'extensions.snapshot', ([], {}), '()\n', (2749, 2751), False, 'from chainer.training import extensions\n'), ((2796, 2830), 'chainer.training.extensions.dump_graph', 'extensions.dump_graph', (['"""main/loss"""'], {}), "('main/loss')\n", (2817, 2830), False, 'from chainer.training import extensions\n'), ((2847, 2871), 'chainer.training.extensions.ProgressBar', 'extensions.ProgressBar', ([], {}), '()\n', (2869, 2871), False, 'from chainer.training import extensions\n'), ((2924, 2942), 'numpy.array', 'np.array', (['test_mat'], {}), '(test_mat)\n', (2932, 2942), True, 'import numpy as np\n'), ((3316, 3346), 'numpy.average', 'np.average', (['dev_result'], {'axis': '(0)'}), '(dev_result, axis=0)\n', (3326, 3346), True, 'import numpy as np\n'), ((3421, 3434), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (3431, 3434), False, 'import csv\n'), ((1682, 1711), 'chainer.links.Linear', 'L.Linear', (['image_size', 'hidden1'], {}), '(image_size, hidden1)\n', (1690, 1711), True, 'import chainer.links as L\n'), ((1728, 1754), 'chainer.links.Linear', 'L.Linear', (['hidden1', 'hidden2'], {}), '(hidden1, hidden2)\n', (1736, 1754), True, 'import chainer.links as L\n'), ((1772, 1798), 'chainer.links.Linear', 'L.Linear', (['hidden2', 'hidden1'], {}), '(hidden2, hidden1)\n', (1780, 1798), True, 'import chainer.links as L\n'), ((1816, 1845), 'chainer.links.Linear', 'L.Linear', (['hidden1', 'image_size'], {}), '(hidden1, image_size)\n', (1824, 1845), True, 'import chainer.links as L\n')] |
from typing import Callable, Optional, List, Union, Tuple
from pyjsg.jsglib import JSGObject
from pyjsg.jsglib import isinstance_
from rdflib import BNode, URIRef, Graph
from pyshex.shapemap_structure_and_language.p1_notation_and_terminology import RDFGraph, Node
from pyshex.utils.collection_utils import format_collection
from pyshex.utils.n3_mapper import N3Mapper
class ParseNode:
def __init__(self,
function: Callable[["Context", Union[RDFGraph, Node], JSGObject], bool],
expr: JSGObject,
obj: Union[RDFGraph, Node],
cntxt: "Context"):
self.function = function
self.expr = expr
self.graph = obj if isinstance(obj, RDFGraph) else None
self.node = obj if isinstance_(obj, Node) else None
self.result: bool = None
self._fail_reason: Optional[str] = None
self.reason_stack: List[Tuple[Union[BNode, URIRef], Optional[str]]] = []
self.nodes: List[ParseNode] = []
self.n3m = cntxt.n3_mapper
def dump_bnodes(self, g: Graph, node: BNode, indent: str, top: bool = True) -> List[str]:
indent = indent + " "
collection = format_collection(g, node, 6)
if collection is not None:
return [indent + c for c in collection]
rval = []
if top:
for s, p in g.subject_predicates(node):
rval.append(f"{indent} {self.n3m.n3(s)} {self.n3m.n3(p)} {self.n3m.n3(node)} .")
for p, o in sorted(g.predicate_objects(node)):
rval += [f"{indent} {self.n3m.n3(node)} {self.n3m.n3(p)} {self.n3m.n3(o)} ."]
if isinstance(o, BNode):
rval += self.dump_bnodes(g, o, indent, top=False)
return rval
def fail_reasons(self, g: Graph, depth: int = 0) -> List[str]:
def follow_reasons(d: int) -> List[str]:
fr = []
if self._fail_reason:
fr.append(d * " " + f" {self._fail_reason}")
d += 1
for n in self.nodes:
fr += n.fail_reasons(g, d)
return fr
rval = []
for i in range(0, len(self.reason_stack)):
node, shape_name = self.reason_stack[i]
if not shape_name:
shape_name = '(unnamed shape)'
indent = (i+depth)*" "
rval.append(f"{indent} Testing {self.n3m.n3(node)} against shape {shape_name}")
if isinstance(node, BNode):
rval += [f"{indent} {self.n3m.n3(node)} context:"]
rval += self.dump_bnodes(g, node, indent)
rval[-1] = rval[-1] + '\n'
rval += follow_reasons(depth + len(self.reason_stack))
return rval
def set_result(self, rval: bool) -> None:
""" Set the result of the evaluation. If the result is true, prune all of the children that didn't cut it
:param rval: Result of evaluation
"""
self.result = rval
if self.result:
self.nodes = [pn for pn in self.nodes if pn.result]
| [
"pyjsg.jsglib.isinstance_",
"pyshex.utils.collection_utils.format_collection"
] | [((1184, 1213), 'pyshex.utils.collection_utils.format_collection', 'format_collection', (['g', 'node', '(6)'], {}), '(g, node, 6)\n', (1201, 1213), False, 'from pyshex.utils.collection_utils import format_collection\n'), ((766, 788), 'pyjsg.jsglib.isinstance_', 'isinstance_', (['obj', 'Node'], {}), '(obj, Node)\n', (777, 788), False, 'from pyjsg.jsglib import isinstance_\n')] |
import torch
import copy
from torch.utils.data import Dataset
from architecture import feature_tensor_encoding_201
from .nasbench201_database import NASBench201DataBase
class NASBench201Dataset(Dataset):
def __init__(self, database: NASBench201DataBase, seed):
self.database = database
g_cpu = torch.Generator()
g_cpu.manual_seed(seed)
self.index_list = torch.randperm(self.database.size,
generator=g_cpu).tolist()
self.keys_list = list(self.database.index_iterator())
def __getitem__(self, index):
arch_id = self.keys_list[self.index_list[index]]
arch = self.database.query_by_id(arch_id)
arch_feature, edges_type_counts = feature_tensor_encoding_201(copy.deepcopy(arch))
network_data = {}
for net_type in ['cifar10-valid', 'cifar100', 'ImageNet16-120']:
params = arch['{}_total_params'.format(net_type)]
flops = arch['{}_total_flops'.format(net_type)]
# n_edges = arch['{}']
if net_type == 'cifar10-valid':
val_acc = arch['cifar10_val_acc']
test_acc = arch['cifar10_test_acc']
rank = arch['cifar10_rank']
network_data['cifar10'] = (arch_feature[net_type], val_acc, test_acc, params, flops, edges_type_counts, rank)
elif net_type == 'ImageNet16-120':
val_acc = arch['imagenet16_val_acc']
test_acc = arch['imagenet16_test_acc']
rank = arch['imagenet16_rank']
network_data['imagenet16'] = (arch_feature[net_type], val_acc, test_acc, params, flops, edges_type_counts, rank)
else:
val_acc = arch['cifar100_val_acc']
test_acc = arch['cifar100_test_acc']
rank = arch['cifar100_rank']
network_data['cifar100'] = (arch_feature[net_type], val_acc, test_acc, params, flops, edges_type_counts, rank)
return network_data
def query_arch_by_str(self, arch_str:str, network_type):
assert network_type in [
'cifar10-valid','cifar100','ImageNet16-120'
], 'The network_type arg should choose from cifar10-valid,cifar100,ImageNet16-120'
arch_key = self.database.check_arch_inside_dataset(arch_str)
if arch_key is None:
return None, None, None
keys_list_idx = self.keys_list.index(arch_key)
index_list_idx = self.index_list.index(keys_list_idx)
flops = self.database.archs[arch_key]['{}_total_flops'.format(network_type)]
params = self.database.archs[arch_key]['{}_total_params'.format(network_type)]
return flops, params, index_list_idx
def __len__(self):
return self.database.size
| [
"copy.deepcopy",
"torch.randperm",
"torch.Generator"
] | [((319, 336), 'torch.Generator', 'torch.Generator', ([], {}), '()\n', (334, 336), False, 'import torch\n'), ((772, 791), 'copy.deepcopy', 'copy.deepcopy', (['arch'], {}), '(arch)\n', (785, 791), False, 'import copy\n'), ((396, 447), 'torch.randperm', 'torch.randperm', (['self.database.size'], {'generator': 'g_cpu'}), '(self.database.size, generator=g_cpu)\n', (410, 447), False, 'import torch\n')] |
#
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
from unittest.mock import MagicMock
import pytest
from airbyte_cdk.models import (
AirbyteMessage,
AirbyteRecordMessage,
AirbyteStream,
ConfiguredAirbyteCatalog,
ConfiguredAirbyteStream,
ConnectorSpecification,
Type,
)
from source_acceptance_test.config import BasicReadTestConfig
from source_acceptance_test.tests.test_core import TestBasicRead as _TestBasicRead
from source_acceptance_test.tests.test_core import TestDiscovery as _TestDiscovery
from source_acceptance_test.tests.test_core import TestSpec as _TestSpec
@pytest.mark.parametrize(
"schema, cursors, should_fail",
[
({}, ["created"], True),
({"properties": {"created": {"type": "string"}}}, ["created"], False),
({"properties": {"created_at": {"type": "string"}}}, ["created"], True),
({"properties": {"created": {"type": "string"}}}, ["updated", "created"], True),
({"properties": {"updated": {"type": "object", "properties": {"created": {"type": "string"}}}}}, ["updated", "created"], False),
({"properties": {"created": {"type": "object", "properties": {"updated": {"type": "string"}}}}}, ["updated", "created"], True),
],
)
def test_discovery(schema, cursors, should_fail):
t = _TestDiscovery()
discovered_catalog = {
"test_stream": AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema, "default_cursor_field": cursors})
}
if should_fail:
with pytest.raises(AssertionError):
t.test_defined_cursors_exist_in_schema(None, discovered_catalog)
else:
t.test_defined_cursors_exist_in_schema(None, discovered_catalog)
@pytest.mark.parametrize(
"schema, record, should_fail",
[
({"type": "object"}, {"aa": 23}, False),
({"type": "object"}, {}, False),
({"type": "object", "properties": {"created": {"type": "string"}}}, {"aa": 23}, True),
({"type": "object", "properties": {"created": {"type": "string"}}}, {"created": "23"}, False),
({"type": "object", "properties": {"created": {"type": "string"}}}, {"root": {"created": "23"}}, True),
# Recharge shop stream case
(
{"type": "object", "properties": {"shop": {"type": ["null", "object"]}, "store": {"type": ["null", "object"]}}},
{"shop": {"a": "23"}, "store": {"b": "23"}},
False,
),
],
)
def test_read(schema, record, should_fail):
catalog = ConfiguredAirbyteCatalog(
streams=[
ConfiguredAirbyteStream(
stream=AirbyteStream.parse_obj({"name": "test_stream", "json_schema": schema}),
sync_mode="full_refresh",
destination_sync_mode="overwrite",
)
]
)
input_config = BasicReadTestConfig()
docker_runner_mock = MagicMock()
docker_runner_mock.call_read.return_value = [
AirbyteMessage(type=Type.RECORD, record=AirbyteRecordMessage(stream="test_stream", data=record, emitted_at=111))
]
t = _TestBasicRead()
if should_fail:
with pytest.raises(AssertionError, match="stream should have some fields mentioned by json schema"):
t.test_read(None, catalog, input_config, [], docker_runner_mock, MagicMock())
else:
t.test_read(None, catalog, input_config, [], docker_runner_mock, MagicMock())
@pytest.mark.parametrize(
"connector_spec, expected_error",
[
# SUCCESS: no authSpecification specified
(ConnectorSpecification(connectionSpecification={}), ""),
# FAIL: Field specified in root object does not exist
(
ConnectorSpecification(
connectionSpecification={"type": "object"},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 0],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"Specified oauth fields are missed from spec schema:",
),
# SUCCESS: Empty root object
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": [],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
# FAIL: Some oauth fields missed
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
},
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 0],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"Specified oauth fields are missed from spec schema:",
),
# SUCCESS: case w/o oneOf property
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
},
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials"],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
# SUCCESS: case w/ oneOf property
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
}
},
{
"properties": {
"api_key": {"type": "string"},
}
},
],
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 0],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
# FAIL: Wrong root object index
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
}
},
{
"properties": {
"api_key": {"type": "string"},
}
},
],
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 1],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"Specified oauth fields are missed from spec schema:",
),
# SUCCESS: root object index equal to 1
(
ConnectorSpecification(
connectionSpecification={
"type": "object",
"properties": {
"credentials": {
"type": "object",
"oneOf": [
{
"properties": {
"api_key": {"type": "string"},
}
},
{
"properties": {
"client_id": {"type": "string"},
"client_secret": {"type": "string"},
"access_token": {"type": "string"},
"refresh_token": {"type": "string"},
}
},
],
}
},
},
authSpecification={
"auth_type": "oauth2.0",
"oauth2Specification": {
"rootObject": ["credentials", 1],
"oauthFlowInitParameters": [["client_id"], ["client_secret"]],
"oauthFlowOutputParameters": [["access_token"], ["refresh_token"]],
},
},
),
"",
),
],
)
def test_validate_oauth_flow(connector_spec, expected_error):
t = _TestSpec()
if expected_error:
with pytest.raises(AssertionError, match=expected_error):
t.test_oauth_flow_parameters(connector_spec)
else:
t.test_oauth_flow_parameters(connector_spec)
| [
"unittest.mock.MagicMock",
"source_acceptance_test.config.BasicReadTestConfig",
"airbyte_cdk.models.AirbyteRecordMessage",
"pytest.mark.parametrize",
"source_acceptance_test.tests.test_core.TestSpec",
"pytest.raises",
"source_acceptance_test.tests.test_core.TestBasicRead",
"source_acceptance_test.tests.test_core.TestDiscovery",
"airbyte_cdk.models.ConnectorSpecification",
"airbyte_cdk.models.AirbyteStream.parse_obj"
] | [((614, 1210), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""schema, cursors, should_fail"""', "[({}, ['created'], True), ({'properties': {'created': {'type': 'string'}}},\n ['created'], False), ({'properties': {'created_at': {'type': 'string'}}\n }, ['created'], True), ({'properties': {'created': {'type': 'string'}}},\n ['updated', 'created'], True), ({'properties': {'updated': {'type':\n 'object', 'properties': {'created': {'type': 'string'}}}}}, ['updated',\n 'created'], False), ({'properties': {'created': {'type': 'object',\n 'properties': {'updated': {'type': 'string'}}}}}, ['updated', 'created'\n ], True)]"], {}), "('schema, cursors, should_fail', [({}, ['created'], \n True), ({'properties': {'created': {'type': 'string'}}}, ['created'], \n False), ({'properties': {'created_at': {'type': 'string'}}}, ['created'\n ], True), ({'properties': {'created': {'type': 'string'}}}, ['updated',\n 'created'], True), ({'properties': {'updated': {'type': 'object',\n 'properties': {'created': {'type': 'string'}}}}}, ['updated', 'created'\n ], False), ({'properties': {'created': {'type': 'object', 'properties':\n {'updated': {'type': 'string'}}}}}, ['updated', 'created'], True)])\n", (637, 1210), False, 'import pytest\n'), ((1708, 2325), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""schema, record, should_fail"""', "[({'type': 'object'}, {'aa': 23}, False), ({'type': 'object'}, {}, False),\n ({'type': 'object', 'properties': {'created': {'type': 'string'}}}, {\n 'aa': 23}, True), ({'type': 'object', 'properties': {'created': {'type':\n 'string'}}}, {'created': '23'}, False), ({'type': 'object',\n 'properties': {'created': {'type': 'string'}}}, {'root': {'created':\n '23'}}, True), ({'type': 'object', 'properties': {'shop': {'type': [\n 'null', 'object']}, 'store': {'type': ['null', 'object']}}}, {'shop': {\n 'a': '23'}, 'store': {'b': '23'}}, False)]"], {}), "('schema, record, should_fail', [({'type': 'object'},\n {'aa': 23}, False), ({'type': 'object'}, {}, False), ({'type': 'object',\n 'properties': {'created': {'type': 'string'}}}, {'aa': 23}, True), ({\n 'type': 'object', 'properties': {'created': {'type': 'string'}}}, {\n 'created': '23'}, False), ({'type': 'object', 'properties': {'created':\n {'type': 'string'}}}, {'root': {'created': '23'}}, True), ({'type':\n 'object', 'properties': {'shop': {'type': ['null', 'object']}, 'store':\n {'type': ['null', 'object']}}}, {'shop': {'a': '23'}, 'store': {'b':\n '23'}}, False)])\n", (1731, 2325), False, 'import pytest\n'), ((1303, 1319), 'source_acceptance_test.tests.test_core.TestDiscovery', '_TestDiscovery', ([], {}), '()\n', (1317, 1319), True, 'from source_acceptance_test.tests.test_core import TestDiscovery as _TestDiscovery\n'), ((2818, 2839), 'source_acceptance_test.config.BasicReadTestConfig', 'BasicReadTestConfig', ([], {}), '()\n', (2837, 2839), False, 'from source_acceptance_test.config import BasicReadTestConfig\n'), ((2865, 2876), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (2874, 2876), False, 'from unittest.mock import MagicMock\n'), ((3062, 3078), 'source_acceptance_test.tests.test_core.TestBasicRead', '_TestBasicRead', ([], {}), '()\n', (3076, 3078), True, 'from source_acceptance_test.tests.test_core import TestBasicRead as _TestBasicRead\n'), ((12280, 12291), 'source_acceptance_test.tests.test_core.TestSpec', '_TestSpec', ([], {}), '()\n', (12289, 12291), True, 'from source_acceptance_test.tests.test_core import TestSpec as _TestSpec\n'), ((1370, 1478), 'airbyte_cdk.models.AirbyteStream.parse_obj', 'AirbyteStream.parse_obj', (["{'name': 'test_stream', 'json_schema': schema, 'default_cursor_field': cursors}"], {}), "({'name': 'test_stream', 'json_schema': schema,\n 'default_cursor_field': cursors})\n", (1393, 1478), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((1514, 1543), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1527, 1543), False, 'import pytest\n'), ((3112, 3211), 'pytest.raises', 'pytest.raises', (['AssertionError'], {'match': '"""stream should have some fields mentioned by json schema"""'}), "(AssertionError, match=\n 'stream should have some fields mentioned by json schema')\n", (3125, 3211), False, 'import pytest\n'), ((3381, 3392), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3390, 3392), False, 'from unittest.mock import MagicMock\n'), ((12328, 12379), 'pytest.raises', 'pytest.raises', (['AssertionError'], {'match': 'expected_error'}), '(AssertionError, match=expected_error)\n', (12341, 12379), False, 'import pytest\n'), ((3525, 3575), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': '{}'}), '(connectionSpecification={})\n', (3547, 3575), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((3666, 3986), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object'}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [\n 'credentials', 0], 'oauthFlowInitParameters': [['client_id'], [\n 'client_secret']], 'oauthFlowOutputParameters': [['access_token'], [\n 'refresh_token']]}}"}), "(connectionSpecification={'type': 'object'},\n authSpecification={'auth_type': 'oauth2.0', 'oauth2Specification': {\n 'rootObject': ['credentials', 0], 'oauthFlowInitParameters': [[\n 'client_id'], ['client_secret']], 'oauthFlowOutputParameters': [[\n 'access_token'], ['refresh_token']]}})\n", (3688, 3986), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((4307, 4778), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object', 'properties': {'client_id': {'type': 'string'},\n 'client_secret': {'type': 'string'}, 'access_token': {'type': 'string'},\n 'refresh_token': {'type': 'string'}}}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [],\n 'oauthFlowInitParameters': [['client_id'], ['client_secret']],\n 'oauthFlowOutputParameters': [['access_token'], ['refresh_token']]}}"}), "(connectionSpecification={'type': 'object',\n 'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}, 'refresh_token':\n {'type': 'string'}}}, authSpecification={'auth_type': 'oauth2.0',\n 'oauth2Specification': {'rootObject': [], 'oauthFlowInitParameters': [[\n 'client_id'], ['client_secret']], 'oauthFlowOutputParameters': [[\n 'access_token'], ['refresh_token']]}})\n", (4329, 4778), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((5222, 5721), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object', 'properties': {'credentials': {'type': 'object',\n 'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}}}}}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [\n 'credentials', 0], 'oauthFlowInitParameters': [['client_id'], [\n 'client_secret']], 'oauthFlowOutputParameters': [['access_token'], [\n 'refresh_token']]}}"}), "(connectionSpecification={'type': 'object',\n 'properties': {'credentials': {'type': 'object', 'properties': {\n 'client_id': {'type': 'string'}, 'client_secret': {'type': 'string'},\n 'access_token': {'type': 'string'}}}}}, authSpecification={'auth_type':\n 'oauth2.0', 'oauth2Specification': {'rootObject': ['credentials', 0],\n 'oauthFlowInitParameters': [['client_id'], ['client_secret']],\n 'oauthFlowOutputParameters': [['access_token'], ['refresh_token']]}})\n", (5244, 5721), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((6357, 6897), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object', 'properties': {'credentials': {'type': 'object',\n 'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}, 'refresh_token':\n {'type': 'string'}}}}}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [\n 'credentials'], 'oauthFlowInitParameters': [['client_id'], [\n 'client_secret']], 'oauthFlowOutputParameters': [['access_token'], [\n 'refresh_token']]}}"}), "(connectionSpecification={'type': 'object',\n 'properties': {'credentials': {'type': 'object', 'properties': {\n 'client_id': {'type': 'string'}, 'client_secret': {'type': 'string'},\n 'access_token': {'type': 'string'}, 'refresh_token': {'type': 'string'}\n }}}}, authSpecification={'auth_type': 'oauth2.0', 'oauth2Specification':\n {'rootObject': ['credentials'], 'oauthFlowInitParameters': [[\n 'client_id'], ['client_secret']], 'oauthFlowOutputParameters': [[\n 'access_token'], ['refresh_token']]}})\n", (6379, 6897), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((7506, 8116), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object', 'properties': {'credentials': {'type': 'object', 'oneOf':\n [{'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}, 'refresh_token':\n {'type': 'string'}}}, {'properties': {'api_key': {'type': 'string'}}}]}}}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [\n 'credentials', 0], 'oauthFlowInitParameters': [['client_id'], [\n 'client_secret']], 'oauthFlowOutputParameters': [['access_token'], [\n 'refresh_token']]}}"}), "(connectionSpecification={'type': 'object',\n 'properties': {'credentials': {'type': 'object', 'oneOf': [{\n 'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}, 'refresh_token':\n {'type': 'string'}}}, {'properties': {'api_key': {'type': 'string'}}}]}\n }}, authSpecification={'auth_type': 'oauth2.0', 'oauth2Specification':\n {'rootObject': ['credentials', 0], 'oauthFlowInitParameters': [[\n 'client_id'], ['client_secret']], 'oauthFlowOutputParameters': [[\n 'access_token'], ['refresh_token']]}})\n", (7528, 8116), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((9072, 9682), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object', 'properties': {'credentials': {'type': 'object', 'oneOf':\n [{'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}, 'refresh_token':\n {'type': 'string'}}}, {'properties': {'api_key': {'type': 'string'}}}]}}}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [\n 'credentials', 1], 'oauthFlowInitParameters': [['client_id'], [\n 'client_secret']], 'oauthFlowOutputParameters': [['access_token'], [\n 'refresh_token']]}}"}), "(connectionSpecification={'type': 'object',\n 'properties': {'credentials': {'type': 'object', 'oneOf': [{\n 'properties': {'client_id': {'type': 'string'}, 'client_secret': {\n 'type': 'string'}, 'access_token': {'type': 'string'}, 'refresh_token':\n {'type': 'string'}}}, {'properties': {'api_key': {'type': 'string'}}}]}\n }}, authSpecification={'auth_type': 'oauth2.0', 'oauth2Specification':\n {'rootObject': ['credentials', 1], 'oauthFlowInitParameters': [[\n 'client_id'], ['client_secret']], 'oauthFlowOutputParameters': [[\n 'access_token'], ['refresh_token']]}})\n", (9094, 9682), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((10697, 11305), 'airbyte_cdk.models.ConnectorSpecification', 'ConnectorSpecification', ([], {'connectionSpecification': "{'type': 'object', 'properties': {'credentials': {'type': 'object', 'oneOf':\n [{'properties': {'api_key': {'type': 'string'}}}, {'properties': {\n 'client_id': {'type': 'string'}, 'client_secret': {'type': 'string'},\n 'access_token': {'type': 'string'}, 'refresh_token': {'type': 'string'}\n }}]}}}", 'authSpecification': "{'auth_type': 'oauth2.0', 'oauth2Specification': {'rootObject': [\n 'credentials', 1], 'oauthFlowInitParameters': [['client_id'], [\n 'client_secret']], 'oauthFlowOutputParameters': [['access_token'], [\n 'refresh_token']]}}"}), "(connectionSpecification={'type': 'object',\n 'properties': {'credentials': {'type': 'object', 'oneOf': [{\n 'properties': {'api_key': {'type': 'string'}}}, {'properties': {\n 'client_id': {'type': 'string'}, 'client_secret': {'type': 'string'},\n 'access_token': {'type': 'string'}, 'refresh_token': {'type': 'string'}\n }}]}}}, authSpecification={'auth_type': 'oauth2.0',\n 'oauth2Specification': {'rootObject': ['credentials', 1],\n 'oauthFlowInitParameters': [['client_id'], ['client_secret']],\n 'oauthFlowOutputParameters': [['access_token'], ['refresh_token']]}})\n", (10719, 11305), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((2975, 3046), 'airbyte_cdk.models.AirbyteRecordMessage', 'AirbyteRecordMessage', ([], {'stream': '"""test_stream"""', 'data': 'record', 'emitted_at': '(111)'}), "(stream='test_stream', data=record, emitted_at=111)\n", (2995, 3046), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n'), ((3285, 3296), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3294, 3296), False, 'from unittest.mock import MagicMock\n'), ((2603, 2674), 'airbyte_cdk.models.AirbyteStream.parse_obj', 'AirbyteStream.parse_obj', (["{'name': 'test_stream', 'json_schema': schema}"], {}), "({'name': 'test_stream', 'json_schema': schema})\n", (2626, 2674), False, 'from airbyte_cdk.models import AirbyteMessage, AirbyteRecordMessage, AirbyteStream, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, ConnectorSpecification, Type\n')] |
import numpy as np
from scikits.statsmodels.tsa.api import VAR
from scikits.statsmodels.api import datasets as ds
mdata = ds.macrodata.load().data[['realgdp', 'realcons', 'realinv']]
names = mdata.dtype.names
data = mdata.view((float,3))
data = np.diff(np.log(data), axis=0)
model = VAR(data, names=names)
est = model.fit(maxlags=2)
def plot_input():
est.plot()
def plot_acorr():
est.plot_acorr()
def plot_irf():
est.irf().plot()
def plot_irf_cum():
irf = est.irf()
irf.plot_cum_effects()
def plot_forecast():
est.plot_forecast(10)
def plot_fevd():
est.fevd(20).plot()
| [
"numpy.log",
"scikits.statsmodels.tsa.api.VAR",
"scikits.statsmodels.api.datasets.macrodata.load"
] | [((286, 308), 'scikits.statsmodels.tsa.api.VAR', 'VAR', (['data'], {'names': 'names'}), '(data, names=names)\n', (289, 308), False, 'from scikits.statsmodels.tsa.api import VAR\n'), ((255, 267), 'numpy.log', 'np.log', (['data'], {}), '(data)\n', (261, 267), True, 'import numpy as np\n'), ((124, 143), 'scikits.statsmodels.api.datasets.macrodata.load', 'ds.macrodata.load', ([], {}), '()\n', (141, 143), True, 'from scikits.statsmodels.api import datasets as ds\n')] |
import os
from kivy.utils import platform
if platform == 'android':
os.environ["IMAGEIO_FFMPEG_EXE"] = "/usr/bin/ffmpeg"
import moviepy.editor as mpy
import glob, uuid, threading
from PIL import Image
from kivy.core.window import Window
from proglog import ProgressBarLogger
from math import floor
from functools import partial
#Globales
from pytikzgenerate import globales
#KIVY
from kivy.lang import Builder
from kivy.uix.relativelayout import RelativeLayout
from kivy.graphics import Color,Rectangle,Ellipse,Line
#FRAMEWORK KIVYMD
from kivymd.uix.dialog import MDDialog
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.button import MDRaisedButton
#Librerias propias
from pytikzgenerate.modulos.limpiar_recursos import limpiar_recursos
class InfoProgreso(MDBoxLayout):
pass
#PROGRESS BAR - CÓDIGO KIVY
Builder.load_string('''
<InfoProgreso>:
orientation: "vertical"
size_hint_y: None
height: dp(100)
MDLabel:
id: porcentaje_de_realizacion
font_name: "media/fonts/OpenSans-ExtraBold"
font_style: "H6"
halign: "center"
size_hint: (1.0, .15)
MDLabel:
id: info
font_name: "media/fonts/OpenSans-ExtraBold"
font_style: "Body1"
halign: "center"
size_hint: (1.0, .75)
MDProgressBar:
id: porcentaje_actual
size_hint: (1.0, .1)
''')
#PROGRESS BAR - API DEL MOVIEPY
class ProgresoCreacionImagen(ProgressBarLogger):
# `window` is the class where all the gui widgets are held
def __init__(self,clase_generar_archivo):
super().__init__(init_state=None, bars=None, ignored_bars=None,
logged_bars='all', min_time_interval=0, ignore_bars_under=0)
self.clase_generar_archivo = clase_generar_archivo
def callback(self, **changes):
# Every time the logger is updated, this function is called with
# the `changes` dictionnary of the form `parameter: new value`.
# the `try` is to avoid KeyErrors before moviepy generates a `'t'` dict
try:
index = self.state['bars']['t']['index']
total = self.state['bars']['t']['total']
porcentaje_de_realizacion = index / total * 100
if porcentaje_de_realizacion < 0:
porcentaje_de_realizacion = 0
if porcentaje_de_realizacion > 100:
porcentaje_de_realizacion = 100
self.clase_generar_archivo.actualizar_wid(porcentaje_de_realizacion, index=index, total=total,generar_dibujo_en_formato=True)
except KeyError as e:
print("ERROR")
print(e)
class GuardarDibujoEnImagen():
def __init__(self,area_de_dibujar):
# CONFIGURACIÓN DEL WID A DESCARGAR COMO IMG
w,h = Window.size
print("Window.size")
print(Window.size)
self.wid_gif = RelativeLayout(size=(w,h))
with self.wid_gif.canvas:
Color(1,1,1,0)
Rectangle(pos=self.wid_gif.pos,size=self.wid_gif.size)
def figura_a_png(self,generar_dibujo_en_formato,name_figure,size,pos,color_relleno,tipo_de_linea,coords_borde,color_borde,line_width,angle_start=0,angle_end=0):
#APLICAR RELLENO
with self.wid_gif.canvas:
Color(*color_relleno)
if name_figure == "rectangle":
with self.wid_gif.canvas:
figura = Rectangle(pos=pos,size=size)
elif name_figure == "arc":
with self.wid_gif.canvas:
figura = Ellipse(pos=pos,size=size,angle_start=angle_start,angle_end=angle_end)
elif name_figure == "circle":
with self.wid_gif.canvas:
figura = Ellipse(pos=pos,size=size)
#APLICAR BORDE
if tipo_de_linea:
with self.wid_gif.canvas:
Color(*color_borde)
if name_figure == "rectangle":
with self.wid_gif.canvas:
#BORDE RECTANGULO CON LINEAS DISCONTINUADAS
Line(points=coords_borde, dash_offset=10, dash_length=5)
elif name_figure == "arc":
with self.wid_gif.canvas:
Line(circle=coords_borde, dash_offset=10, dash_length=5)
else:
if name_figure == "rectangle":
with self.wid_gif.canvas:
#BORDE RECTANGULO
Line(points=coords_borde,width=line_width)
elif name_figure == "arc":
with self.wid_gif.canvas:
Line(circle=coords_borde,width=line_width)
if generar_dibujo_en_formato:
#ESTE PROCESO TMB TOMA TIEMPO PERO NO SE COMO REFLEJARLO EN UN PROGRESSBAR
#1. Guardar imagenes con transparencia, con el proposito de que la imagen sea lo unico sin transparencia...
id_figura = str(figura.uid)
nombre_img = 'figura_estandar_'+id_figura+".png"
ruta = os.path.join(globales.ruta_raiz,'recursos/crear_imagen/grafica_original/'+nombre_img)
self.wid_gif.export_to_png(ruta)
#2. Quitar transparencia de la imagen para solo conservar la figura
image_png = Image.open(ruta)
image_png.getbbox()
image_png = image_png.crop(image_png.getbbox())
#3. Convertir PNG a JPG para ser compatible como secuencia de imagenes de un .GIF (Si es requerido)
image_png.load()
background = Image.new("RGB", image_png.size, (255, 255, 255))
background.paste(image_png, mask=image_png.split()[3])
nombre_img = 'figura_estandar_'+id_figura+".jpg"
ruta = os.path.join(globales.ruta_raiz,'recursos/crear_imagen/grafica_recortada/'+nombre_img)
background.save(ruta, 'JPEG', quality=80)
def crear_imagen(self):
#1. CONFIGURACIÓN - PROGRESS BAR
self.old_value = 0#IMPORTANTE - ANTERIOR VALOR DEJADO POR EL ANTERIOR IMPULSO DE CARGA ILUSTRADO [PROGRESS BAR]
#2. DESPLIEGUE DEL POP UP, CONEXIÓN AL API Logger del MoviePy Y CREAR UN GIF ANIMADO...
self.contenido_progreso_wid = InfoProgreso()
btn_salir = MDRaisedButton(text="Vale",font_name=os.path.join(globales.ruta_raiz,"media/fonts/OpenSans-SemiBold"))
self.md_dialog = MDDialog(
title="Información del progreso de generación",
type="custom",
radius=[20, 7, 20, 7],
content_cls=self.contenido_progreso_wid,
buttons=[
btn_salir
]
)
#Agregar los comportamientos correspondientes
def cerrar_md_dialog(md_dialog,*args):
md_dialog.dismiss()
btn_salir.bind(on_release=partial(cerrar_md_dialog,self.md_dialog))
self.md_dialog.open()
#Genera GIF a partir de una lista de secuencia de imagenes
threading.Thread(target=self.__crear_imagen).start()#En este caso la función por la cual el Progress Bar llenara hasta ser terminado es el "self.onMul"
def __crear_imagen(self):
#CONEXIÓN AL API Logger del MoviePy
my_bar_logger = ProgresoCreacionImagen(self)
#GENERAR ARCHIVO
id = str(uuid.uuid4())
#Ordenar los archivos de forma ascendente
input_png_list = glob.glob(os.path.join(globales.ruta_raiz,"recursos/crear_imagen/grafica_recortada/*.jpg"))
input_png_list.sort()
clips = [mpy.ImageClip(i).set_duration(.1)
for i in input_png_list]
#¿Hay secuencia de imagenes o almenos una imagen?
if (len(clips) > 0):
concat_clip = mpy.concatenate_videoclips(clips, method="compose")
#No es una secuencia de imagenes - GENERAR JPG
if len(clips) == 1:
self.ruta_imagen_creado = os.path.join(globales.ruta_imagen,'Pytikz/imagen_generado_id-'+id+'.jpg').replace("/","\\")
concat_clip.write_gif(self.ruta_imagen_creado,fps=2, logger=my_bar_logger)
#Es una secuencia de imagenes - GENERAR GIF
else:
self.ruta_imagen_creado = os.path.join(globales.ruta_imagen,'Pytikz/imagen_generado_id-'+id+'.gif').replace("/","\\")
concat_clip.write_gif(self.ruta_imagen_creado,fps=2, logger=my_bar_logger)
#Si no lo hay es un error...
else:
self.md_dialog.title = "¡ERROR!"
self.contenido_progreso_wid.ids.info.text = f"Ocurrio un error al momento de crear la imagen"
#Limpiar recursos
limpiar_recursos()
def actualizar_wid(self, porcentaje_de_realizacion, info="",index=0, total=0, generar_dibujo_en_formato=False,*args):
porcentaje_actual = floor(porcentaje_de_realizacion)
if porcentaje_actual != self.old_value and porcentaje_actual % 5 == 0:
self.contenido_progreso_wid.ids.porcentaje_actual.value = porcentaje_actual#IMPORTANTE - [PROGRESS BAR]
self.contenido_progreso_wid.ids.porcentaje_de_realizacion.text = "PROGRESO: "+str(porcentaje_actual)
if generar_dibujo_en_formato:
self.contenido_progreso_wid.ids.info.text = f"{index} de {total} frames de la imagen completados... ({floor(porcentaje_de_realizacion)}%)"
else:
self.contenido_progreso_wid.ids.info.text = info
if(porcentaje_actual == 100):
self.md_dialog.title = "¡EXITO!"
self.contenido_progreso_wid.ids.porcentaje_de_realizacion.text = "La imagen se creo satisfactoriamente"
self.contenido_progreso_wid.ids.info.text = "Ruta: "+self.ruta_imagen_creado | [
"kivy.uix.relativelayout.RelativeLayout",
"PIL.Image.open",
"kivy.graphics.Ellipse",
"math.floor",
"kivy.graphics.Line",
"PIL.Image.new",
"kivy.lang.Builder.load_string",
"os.path.join",
"kivymd.uix.dialog.MDDialog",
"kivy.graphics.Rectangle",
"uuid.uuid4",
"moviepy.editor.concatenate_videoclips",
"pytikzgenerate.modulos.limpiar_recursos.limpiar_recursos",
"functools.partial",
"moviepy.editor.ImageClip",
"kivy.graphics.Color",
"threading.Thread"
] | [((831, 1381), 'kivy.lang.Builder.load_string', 'Builder.load_string', (['"""\n<InfoProgreso>:\n orientation: "vertical"\n size_hint_y: None\n height: dp(100)\n MDLabel:\n id: porcentaje_de_realizacion\n font_name: "media/fonts/OpenSans-ExtraBold"\n font_style: "H6"\n halign: "center"\n size_hint: (1.0, .15)\n MDLabel:\n id: info\n font_name: "media/fonts/OpenSans-ExtraBold"\n font_style: "Body1"\n halign: "center"\n size_hint: (1.0, .75)\n MDProgressBar:\n id: porcentaje_actual\n size_hint: (1.0, .1)\n"""'], {}), '(\n """\n<InfoProgreso>:\n orientation: "vertical"\n size_hint_y: None\n height: dp(100)\n MDLabel:\n id: porcentaje_de_realizacion\n font_name: "media/fonts/OpenSans-ExtraBold"\n font_style: "H6"\n halign: "center"\n size_hint: (1.0, .15)\n MDLabel:\n id: info\n font_name: "media/fonts/OpenSans-ExtraBold"\n font_style: "Body1"\n halign: "center"\n size_hint: (1.0, .75)\n MDProgressBar:\n id: porcentaje_actual\n size_hint: (1.0, .1)\n"""\n )\n', (850, 1381), False, 'from kivy.lang import Builder\n'), ((2846, 2873), 'kivy.uix.relativelayout.RelativeLayout', 'RelativeLayout', ([], {'size': '(w, h)'}), '(size=(w, h))\n', (2860, 2873), False, 'from kivy.uix.relativelayout import RelativeLayout\n'), ((6260, 6425), 'kivymd.uix.dialog.MDDialog', 'MDDialog', ([], {'title': '"""Información del progreso de generación"""', 'type': '"""custom"""', 'radius': '[20, 7, 20, 7]', 'content_cls': 'self.contenido_progreso_wid', 'buttons': '[btn_salir]'}), "(title='Información del progreso de generación', type='custom',\n radius=[20, 7, 20, 7], content_cls=self.contenido_progreso_wid, buttons\n =[btn_salir])\n", (6268, 6425), False, 'from kivymd.uix.dialog import MDDialog\n'), ((8494, 8512), 'pytikzgenerate.modulos.limpiar_recursos.limpiar_recursos', 'limpiar_recursos', ([], {}), '()\n', (8510, 8512), False, 'from pytikzgenerate.modulos.limpiar_recursos import limpiar_recursos\n'), ((8664, 8696), 'math.floor', 'floor', (['porcentaje_de_realizacion'], {}), '(porcentaje_de_realizacion)\n', (8669, 8696), False, 'from math import floor\n'), ((2919, 2936), 'kivy.graphics.Color', 'Color', (['(1)', '(1)', '(1)', '(0)'], {}), '(1, 1, 1, 0)\n', (2924, 2936), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((2946, 3001), 'kivy.graphics.Rectangle', 'Rectangle', ([], {'pos': 'self.wid_gif.pos', 'size': 'self.wid_gif.size'}), '(pos=self.wid_gif.pos, size=self.wid_gif.size)\n', (2955, 3001), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((3243, 3264), 'kivy.graphics.Color', 'Color', (['*color_relleno'], {}), '(*color_relleno)\n', (3248, 3264), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((4909, 5001), 'os.path.join', 'os.path.join', (['globales.ruta_raiz', "('recursos/crear_imagen/grafica_original/' + nombre_img)"], {}), "(globales.ruta_raiz, 'recursos/crear_imagen/grafica_original/' +\n nombre_img)\n", (4921, 5001), False, 'import os\n'), ((5145, 5161), 'PIL.Image.open', 'Image.open', (['ruta'], {}), '(ruta)\n', (5155, 5161), False, 'from PIL import Image\n'), ((5433, 5482), 'PIL.Image.new', 'Image.new', (['"""RGB"""', 'image_png.size', '(255, 255, 255)'], {}), "('RGB', image_png.size, (255, 255, 255))\n", (5442, 5482), False, 'from PIL import Image\n'), ((5630, 5723), 'os.path.join', 'os.path.join', (['globales.ruta_raiz', "('recursos/crear_imagen/grafica_recortada/' + nombre_img)"], {}), "(globales.ruta_raiz, 'recursos/crear_imagen/grafica_recortada/' +\n nombre_img)\n", (5642, 5723), False, 'import os\n'), ((7153, 7165), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (7163, 7165), False, 'import glob, uuid, threading\n'), ((7252, 7337), 'os.path.join', 'os.path.join', (['globales.ruta_raiz', '"""recursos/crear_imagen/grafica_recortada/*.jpg"""'], {}), "(globales.ruta_raiz,\n 'recursos/crear_imagen/grafica_recortada/*.jpg')\n", (7264, 7337), False, 'import os\n'), ((7573, 7624), 'moviepy.editor.concatenate_videoclips', 'mpy.concatenate_videoclips', (['clips'], {'method': '"""compose"""'}), "(clips, method='compose')\n", (7599, 7624), True, 'import moviepy.editor as mpy\n'), ((3367, 3396), 'kivy.graphics.Rectangle', 'Rectangle', ([], {'pos': 'pos', 'size': 'size'}), '(pos=pos, size=size)\n', (3376, 3396), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((3796, 3815), 'kivy.graphics.Color', 'Color', (['*color_borde'], {}), '(*color_borde)\n', (3801, 3815), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((6169, 6234), 'os.path.join', 'os.path.join', (['globales.ruta_raiz', '"""media/fonts/OpenSans-SemiBold"""'], {}), "(globales.ruta_raiz, 'media/fonts/OpenSans-SemiBold')\n", (6181, 6234), False, 'import os\n'), ((6684, 6725), 'functools.partial', 'partial', (['cerrar_md_dialog', 'self.md_dialog'], {}), '(cerrar_md_dialog, self.md_dialog)\n', (6691, 6725), False, 'from functools import partial\n'), ((6831, 6875), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.__crear_imagen'}), '(target=self.__crear_imagen)\n', (6847, 6875), False, 'import glob, uuid, threading\n'), ((3494, 3567), 'kivy.graphics.Ellipse', 'Ellipse', ([], {'pos': 'pos', 'size': 'size', 'angle_start': 'angle_start', 'angle_end': 'angle_end'}), '(pos=pos, size=size, angle_start=angle_start, angle_end=angle_end)\n', (3501, 3567), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((3985, 4041), 'kivy.graphics.Line', 'Line', ([], {'points': 'coords_borde', 'dash_offset': '(10)', 'dash_length': '(5)'}), '(points=coords_borde, dash_offset=10, dash_length=5)\n', (3989, 4041), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((4357, 4400), 'kivy.graphics.Line', 'Line', ([], {'points': 'coords_borde', 'width': 'line_width'}), '(points=coords_borde, width=line_width)\n', (4361, 4400), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((7381, 7397), 'moviepy.editor.ImageClip', 'mpy.ImageClip', (['i'], {}), '(i)\n', (7394, 7397), True, 'import moviepy.editor as mpy\n'), ((3666, 3693), 'kivy.graphics.Ellipse', 'Ellipse', ([], {'pos': 'pos', 'size': 'size'}), '(pos=pos, size=size)\n', (3673, 3693), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((4143, 4199), 'kivy.graphics.Line', 'Line', ([], {'circle': 'coords_borde', 'dash_offset': '(10)', 'dash_length': '(5)'}), '(circle=coords_borde, dash_offset=10, dash_length=5)\n', (4147, 4199), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((4501, 4544), 'kivy.graphics.Line', 'Line', ([], {'circle': 'coords_borde', 'width': 'line_width'}), '(circle=coords_borde, width=line_width)\n', (4505, 4544), False, 'from kivy.graphics import Color, Rectangle, Ellipse, Line\n'), ((7758, 7836), 'os.path.join', 'os.path.join', (['globales.ruta_imagen', "('Pytikz/imagen_generado_id-' + id + '.jpg')"], {}), "(globales.ruta_imagen, 'Pytikz/imagen_generado_id-' + id + '.jpg')\n", (7770, 7836), False, 'import os\n'), ((8057, 8135), 'os.path.join', 'os.path.join', (['globales.ruta_imagen', "('Pytikz/imagen_generado_id-' + id + '.gif')"], {}), "(globales.ruta_imagen, 'Pytikz/imagen_generado_id-' + id + '.gif')\n", (8069, 8135), False, 'import os\n'), ((9165, 9197), 'math.floor', 'floor', (['porcentaje_de_realizacion'], {}), '(porcentaje_de_realizacion)\n', (9170, 9197), False, 'from math import floor\n')] |
import math
import csv
with open("data.csv",newline="")as f:
reader = csv.reader(f)
file_data = list(reader)
data = file_data[0]
def mean(data):
n = len(data)
total = 0
for x in data:
total+=int(x)
mean = total/n
return mean
squaredList=[]
for number in data:
a= int(number)-mean(data)
a = a**2
squaredList.append(a)
sum = 0
for i in squaredList:
sum=sum+1
result = sum/(len(data)-1)
std_deviation = math.sqrt(result)
print(std_deviation)
| [
"math.sqrt",
"csv.reader"
] | [((484, 501), 'math.sqrt', 'math.sqrt', (['result'], {}), '(result)\n', (493, 501), False, 'import math\n'), ((79, 92), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (89, 92), False, 'import csv\n')] |
import MeCab
from .sentence import Sentence
from .token import Morpheme
class Tokenizer:
def __init__(self, *mecab_kwargs):
self.__tagger = MeCab.Tagger(*mecab_kwargs)
self.__tagger.parse("Initialize")
def tokenize(self, sentence):
return Sentence([morpheme for morpheme in self.__parse_to_tag(sentence)])
def __parse_to_tag(self, sentence):
node = self.__tagger.parseToNode(sentence)
node = node.next
while node.next:
yield Morpheme(node)
node = node.next
| [
"MeCab.Tagger"
] | [((155, 182), 'MeCab.Tagger', 'MeCab.Tagger', (['*mecab_kwargs'], {}), '(*mecab_kwargs)\n', (167, 182), False, 'import MeCab\n')] |
from flask_login import UserMixin
import sqlalchemy
from data.db_session import SqlAlchemyBase
class User(SqlAlchemyBase, UserMixin):
__tablename__ = 'users'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True, autoincrement=True)
teleg_id = sqlalchemy.Column(sqlalchemy.Integer, nullable=True)
address = sqlalchemy.Column(sqlalchemy.String, nullable=True)
spisok = sqlalchemy.Column(sqlalchemy.String, nullable=True)
coordinates_shop = sqlalchemy.Column(sqlalchemy.String, nullable=True)
def __repr__(self):
return f'<User> {self.id} {self.teleg_id} {self.spisok}'
| [
"sqlalchemy.Column"
] | [((175, 250), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.Integer'], {'primary_key': '(True)', 'autoincrement': '(True)'}), '(sqlalchemy.Integer, primary_key=True, autoincrement=True)\n', (192, 250), False, 'import sqlalchemy\n'), ((266, 318), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.Integer'], {'nullable': '(True)'}), '(sqlalchemy.Integer, nullable=True)\n', (283, 318), False, 'import sqlalchemy\n'), ((333, 384), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.String'], {'nullable': '(True)'}), '(sqlalchemy.String, nullable=True)\n', (350, 384), False, 'import sqlalchemy\n'), ((398, 449), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.String'], {'nullable': '(True)'}), '(sqlalchemy.String, nullable=True)\n', (415, 449), False, 'import sqlalchemy\n'), ((474, 525), 'sqlalchemy.Column', 'sqlalchemy.Column', (['sqlalchemy.String'], {'nullable': '(True)'}), '(sqlalchemy.String, nullable=True)\n', (491, 525), False, 'import sqlalchemy\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# anited. publish - Python package with cli to turn markdown files into ebooks
# Copyright (c) 2014 <NAME>
#
# Distributed under the MIT License
# (license terms are at http://opensource.org/licenses/MIT).
"""Tests for `publish.output` module.
"""
# pylint: disable=missing-docstring,no-self-use,invalid-name,protected-access
# pylint: disable=too-few-public-methods
from typing import Iterable
from unittest.mock import patch, mock_open
import os
import pytest
from publish import __version__ as package_version
from publish.book import Book, Chapter
# noinspection PyProtectedMember
from publish.output import (SUPPORTED_EBOOKCONVERT_ATTRIBUTES,
_apply_template,
_yield_attributes_as_params,
_get_ebook_convert_params,
HtmlOutput,
NoChaptersFoundError,
EbookConvertOutput)
from publish.substitution import Substitution, SimpleSubstitution
from tests import get_test_book
def test_supported_ebookconvert_attrs():
assert len(SUPPORTED_EBOOKCONVERT_ATTRIBUTES) == 14
assert 'author_sort' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'authors' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'book_producer' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'comments' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'cover' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'isbn' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'language' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'pubdate' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'publisher' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'rating' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'series' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'series_index' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'tags' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
assert 'title' in SUPPORTED_EBOOKCONVERT_ATTRIBUTES
TEST_HTML_TEMPLATE = """<!DOCTYPE html>
<html lang="{language}">
<head>
<meta charset="UTF-8">
<meta name="generator" content="anited. publish v{package_version}" />
<title>{title}</title>
<style type="text/css">
{css}
</style>
</head>
<body>
{content}
</body>
</html>"""
# noinspection PyMissingOrEmptyDocstring
class HtmlOutputStub(HtmlOutput):
def make(self, book: Book, substitutions: Iterable[Substitution] = None):
pass
# noinspection PyMissingOrEmptyDocstring
class EbookConvertOutputStub(EbookConvertOutput):
def make(self, book: Book, substitutions: Iterable[Substitution] = None):
pass
class TestHtmlOutput:
def test_constructor(self):
output = HtmlOutputStub('a',
stylesheet='b',
force_publish=True)
assert output.path == 'a'
assert output.stylesheet == 'b'
assert output.force_publish
def test_constructor_default_values(self):
output = HtmlOutputStub('a')
assert output.path == 'a'
assert output.stylesheet is None
assert output.force_publish is not True
def test_get_chapters_to_be_published(self):
output = HtmlOutputStub('a')
chapters = [Chapter('1', publish=True),
Chapter('2', publish=False),
Chapter('3')] # defaults to True
expected = [chapters[0],
chapters[2]]
actual = output.get_chapters_to_be_published(chapters)
assert actual == expected
def test_get_chapters_to_be_published_force_publish_true(self):
output = HtmlOutputStub('a', force_publish=True)
chapters = [Chapter('1', publish=True),
Chapter('2', publish=False),
Chapter('3')] # defaults to True
expected = [chapters[0],
chapters[1],
chapters[2]]
actual = output.get_chapters_to_be_published(chapters)
assert actual == expected
def test_get_chapters_to_be_published_force_publish_false(self):
output = HtmlOutputStub('a', force_publish=False)
chapters = [Chapter('1', publish=True),
Chapter('2', publish=False),
Chapter('3')] # defaults to True
expected = [chapters[0],
chapters[2]]
actual = output.get_chapters_to_be_published(chapters)
assert actual == expected
def test_get_css(self):
with patch('builtins.open', mock_open(read_data='css')) as mock_file:
output = HtmlOutput('some.path', stylesheet='some.css')
actual = output._get_css()
expected = 'css'
assert actual == expected
mock_file.assert_called_once_with(os.path.join(os.getcwd(), 'some.css'), 'r')
def test_get_css_returns_empty_str_if_no_css_path(self):
output = HtmlOutput('some.path')
expected = ''
actual = output._get_css()
assert actual == expected
def test_make(self):
output = HtmlOutput('some.path')
book = Book('title')
substitution = SimpleSubstitution(old='a', new='b')
substitutions = [substitution]
with patch('builtins.open', mock_open(read_data='css'), create=True) as mock_file, \
patch.object(output, '_get_html_document') as mock_get_html_document:
mock_get_html_document.return_value = 'document'
output.make(book, substitutions)
mock_get_html_document.assert_called_once_with(book, substitutions)
mock_file.assert_called_once_with('some.path', 'w')
mock_file_handle = mock_file()
mock_file_handle.write.assert_called_once_with('document')
def test_make_without_substitutions(self):
output = HtmlOutput('some.path')
book = Book('title')
with patch('builtins.open', mock_open(read_data='css'), create=True) as mock_file, \
patch.object(output, '_get_html_document') as mock_get_html_document:
mock_get_html_document.return_value = 'document'
output.make(book)
mock_file.assert_called_once_with('some.path', 'w')
mock_file_handle = mock_file()
mock_file_handle.write.assert_called_once_with('document')
class TestEbookConvertOutput:
def test_constructor(self):
output = EbookConvertOutputStub('a',
stylesheet='b',
force_publish=True,
ebookconvert_params=['--param=value'])
assert output.path == 'a'
assert output.stylesheet == 'b'
assert output.force_publish
assert output.ebookconvert_params == ['--param=value']
def test_get_html_document():
title = 'Foo'
html_content = '<h1>This is the first file</h1>\n<p>With some content.</p>'
css = ''
language = 'en'
book = Book(title, language='en')
book.chapters.extend([Chapter('tests/resources/1.md')])
expected = TEST_HTML_TEMPLATE.format(
content=html_content,
title=title,
css=css,
language=language,
package_version=package_version)
actual = HtmlOutput('')._get_html_document(
book,
[SimpleSubstitution('text', 'content')]
)
assert actual == expected
def test_apply_template():
title = 'Foo'
html_content = '<p>Bar</p>'
css = 'p { font-style: italic }'
language = 'en'
expected = TEST_HTML_TEMPLATE.format(
content=html_content,
title=title,
css=css,
language=language,
package_version=package_version)
actual = _apply_template(
html_content=html_content,
title=title,
css=css,
language=language)
assert actual == expected
def test_yield_attributes_as_params_from_dict():
attributes = {attribute: attribute
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES}
expected = [f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES]
actual = list(_yield_attributes_as_params(attributes))
assert actual == expected
def test_yield_attributes_as_params_from_dict_omits_unsupported():
attributes = {attribute: attribute
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES}
attributes['unsupported'] = 'unsupported'
expected = [f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES]
actual = list(_yield_attributes_as_params(attributes))
assert actual == expected
def test_yield_attributes_as_params_from_object():
object_ = get_test_book()
expected = [f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES]
actual = list(_yield_attributes_as_params(object_))
assert actual == expected
def test_yield_attributes_as_params_from_object_omits_unsupported():
object_ = get_test_book()
object_.unsupported = 'unsupported'
expected = [f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES]
actual = list(_yield_attributes_as_params(object_))
assert actual == expected
def test_yield_attributes_as_params_from_dict_missing_supported_attribute_omits_attribute():
attributes = {attribute: attribute
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES[:-1]}
expected = [f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES[:-1]]
actual = list(_yield_attributes_as_params(attributes))
assert actual == expected
def test_yield_attributes_as_params_value_none_omits_attribute():
attributes = {attribute: attribute
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES[:-1]}
attributes[SUPPORTED_EBOOKCONVERT_ATTRIBUTES[-1]] = None
expected = [f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES[:-1]]
actual = list(_yield_attributes_as_params(attributes))
assert actual == expected
def test_get_html_content():
output = HtmlOutput('')
actual = output._get_html_content([Chapter('tests/resources/1.md'),
Chapter('tests/resources/2.md')],
[SimpleSubstitution('text', 'content')])
expected = '\n'.join(("<h1>This is the first file</h1>",
"<p>With some content.</p>",
"<h1>This is the second file</h1>",
"<p>With some more content.</p>"))
assert actual == expected
def test_get_markdown_content_no_chapters_raises_error():
output = HtmlOutput('')
with pytest.raises(NoChaptersFoundError):
output._get_markdown_content([])
def test_get_markdown_content_no_chapters_set_to_publish_raises_error():
output = HtmlOutput('')
with pytest.raises(NoChaptersFoundError):
output._get_markdown_content([Chapter(src='',
publish=False)])
def test_get_markdown_content_invalid_path_raises_error():
output = HtmlOutput('')
with pytest.raises(FileNotFoundError):
output._get_markdown_content([Chapter(src='',
publish=True)])
def test_get_markdown_content_joins_multiple_markdown_files():
output = HtmlOutput('')
actual = output._get_markdown_content([Chapter('tests/resources/1.md'),
Chapter('tests/resources/2.md')])
expected = '\n'.join(("# This is the first file",
"",
"With some text.",
"",
"# This is the second file",
"",
"With some more text."))
assert actual == expected
def test_get_markdown_content_omits_chapters_not_set_to_publish():
output = HtmlOutput('')
actual = output._get_markdown_content([Chapter('tests/resources/1.md'),
Chapter('tests/resources/2.md'),
Chapter('tests/resources/2.md',
publish=False)])
expected = '\n'.join(("# This is the first file",
"",
"With some text.",
"",
"# This is the second file",
"",
"With some more text."))
assert actual == expected
def test_get_ebook_convert_params_no_additional_params():
book = get_test_book()
input_path = 'input_path'
output_path = 'output_path'
actual = _get_ebook_convert_params(book, input_path, output_path)
expected = ['ebook-convert', input_path, output_path]
expected.extend([f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES])
assert actual == expected
def test_get_ebook_convert_params_additional_params():
book = get_test_book()
input_path = 'input_path'
output_path = 'output_path'
additional_params = ['--param1=value1', '--param2=value2']
actual = _get_ebook_convert_params(book, input_path, output_path, additional_params)
expected = ['ebook-convert', input_path, output_path]
expected.extend([f'--{attribute}={attribute}'
for attribute in SUPPORTED_EBOOKCONVERT_ATTRIBUTES])
expected.extend(additional_params)
assert actual == expected
| [
"publish.book.Book",
"publish.output._apply_template",
"publish.output._yield_attributes_as_params",
"unittest.mock.mock_open",
"publish.output.HtmlOutput",
"publish.book.Chapter",
"tests.get_test_book",
"os.getcwd",
"pytest.raises",
"publish.output._get_ebook_convert_params",
"unittest.mock.patch.object",
"publish.substitution.SimpleSubstitution"
] | [((6965, 6991), 'publish.book.Book', 'Book', (['title'], {'language': '"""en"""'}), "(title, language='en')\n", (6969, 6991), False, 'from publish.book import Book, Chapter\n'), ((7708, 7796), 'publish.output._apply_template', '_apply_template', ([], {'html_content': 'html_content', 'title': 'title', 'css': 'css', 'language': 'language'}), '(html_content=html_content, title=title, css=css, language=\n language)\n', (7723, 7796), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((8715, 8730), 'tests.get_test_book', 'get_test_book', ([], {}), '()\n', (8728, 8730), False, 'from tests import get_test_book\n'), ((9017, 9032), 'tests.get_test_book', 'get_test_book', ([], {}), '()\n', (9030, 9032), False, 'from tests import get_test_book\n'), ((10188, 10202), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (10198, 10202), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((10771, 10785), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (10781, 10785), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((10961, 10975), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (10971, 10975), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((11213, 11227), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (11223, 11227), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((11465, 11479), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (11475, 11479), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((12042, 12056), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (12052, 12056), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((12750, 12765), 'tests.get_test_book', 'get_test_book', ([], {}), '()\n', (12763, 12765), False, 'from tests import get_test_book\n'), ((12842, 12898), 'publish.output._get_ebook_convert_params', '_get_ebook_convert_params', (['book', 'input_path', 'output_path'], {}), '(book, input_path, output_path)\n', (12867, 12898), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((13181, 13196), 'tests.get_test_book', 'get_test_book', ([], {}), '()\n', (13194, 13196), False, 'from tests import get_test_book\n'), ((13336, 13411), 'publish.output._get_ebook_convert_params', '_get_ebook_convert_params', (['book', 'input_path', 'output_path', 'additional_params'], {}), '(book, input_path, output_path, additional_params)\n', (13361, 13411), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((4915, 4938), 'publish.output.HtmlOutput', 'HtmlOutput', (['"""some.path"""'], {}), "('some.path')\n", (4925, 4938), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((5075, 5098), 'publish.output.HtmlOutput', 'HtmlOutput', (['"""some.path"""'], {}), "('some.path')\n", (5085, 5098), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((5114, 5127), 'publish.book.Book', 'Book', (['"""title"""'], {}), "('title')\n", (5118, 5127), False, 'from publish.book import Book, Chapter\n'), ((5151, 5187), 'publish.substitution.SimpleSubstitution', 'SimpleSubstitution', ([], {'old': '"""a"""', 'new': '"""b"""'}), "(old='a', new='b')\n", (5169, 5187), False, 'from publish.substitution import Substitution, SimpleSubstitution\n'), ((5821, 5844), 'publish.output.HtmlOutput', 'HtmlOutput', (['"""some.path"""'], {}), "('some.path')\n", (5831, 5844), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((5860, 5873), 'publish.book.Book', 'Book', (['"""title"""'], {}), "('title')\n", (5864, 5873), False, 'from publish.book import Book, Chapter\n'), ((8148, 8187), 'publish.output._yield_attributes_as_params', '_yield_attributes_as_params', (['attributes'], {}), '(attributes)\n', (8175, 8187), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((8576, 8615), 'publish.output._yield_attributes_as_params', '_yield_attributes_as_params', (['attributes'], {}), '(attributes)\n', (8603, 8615), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((8863, 8899), 'publish.output._yield_attributes_as_params', '_yield_attributes_as_params', (['object_'], {}), '(object_)\n', (8890, 8899), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((9205, 9241), 'publish.output._yield_attributes_as_params', '_yield_attributes_as_params', (['object_'], {}), '(object_)\n', (9232, 9241), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((9620, 9659), 'publish.output._yield_attributes_as_params', '_yield_attributes_as_params', (['attributes'], {}), '(attributes)\n', (9647, 9659), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((10072, 10111), 'publish.output._yield_attributes_as_params', '_yield_attributes_as_params', (['attributes'], {}), '(attributes)\n', (10099, 10111), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((10795, 10830), 'pytest.raises', 'pytest.raises', (['NoChaptersFoundError'], {}), '(NoChaptersFoundError)\n', (10808, 10830), False, 'import pytest\n'), ((10985, 11020), 'pytest.raises', 'pytest.raises', (['NoChaptersFoundError'], {}), '(NoChaptersFoundError)\n', (10998, 11020), False, 'import pytest\n'), ((11237, 11269), 'pytest.raises', 'pytest.raises', (['FileNotFoundError'], {}), '(FileNotFoundError)\n', (11250, 11269), False, 'import pytest\n'), ((3260, 3286), 'publish.book.Chapter', 'Chapter', (['"""1"""'], {'publish': '(True)'}), "('1', publish=True)\n", (3267, 3286), False, 'from publish.book import Book, Chapter\n'), ((3308, 3335), 'publish.book.Chapter', 'Chapter', (['"""2"""'], {'publish': '(False)'}), "('2', publish=False)\n", (3315, 3335), False, 'from publish.book import Book, Chapter\n'), ((3357, 3369), 'publish.book.Chapter', 'Chapter', (['"""3"""'], {}), "('3')\n", (3364, 3369), False, 'from publish.book import Book, Chapter\n'), ((3702, 3728), 'publish.book.Chapter', 'Chapter', (['"""1"""'], {'publish': '(True)'}), "('1', publish=True)\n", (3709, 3728), False, 'from publish.book import Book, Chapter\n'), ((3750, 3777), 'publish.book.Chapter', 'Chapter', (['"""2"""'], {'publish': '(False)'}), "('2', publish=False)\n", (3757, 3777), False, 'from publish.book import Book, Chapter\n'), ((3799, 3811), 'publish.book.Chapter', 'Chapter', (['"""3"""'], {}), "('3')\n", (3806, 3811), False, 'from publish.book import Book, Chapter\n'), ((4179, 4205), 'publish.book.Chapter', 'Chapter', (['"""1"""'], {'publish': '(True)'}), "('1', publish=True)\n", (4186, 4205), False, 'from publish.book import Book, Chapter\n'), ((4227, 4254), 'publish.book.Chapter', 'Chapter', (['"""2"""'], {'publish': '(False)'}), "('2', publish=False)\n", (4234, 4254), False, 'from publish.book import Book, Chapter\n'), ((4276, 4288), 'publish.book.Chapter', 'Chapter', (['"""3"""'], {}), "('3')\n", (4283, 4288), False, 'from publish.book import Book, Chapter\n'), ((4603, 4649), 'publish.output.HtmlOutput', 'HtmlOutput', (['"""some.path"""'], {'stylesheet': '"""some.css"""'}), "('some.path', stylesheet='some.css')\n", (4613, 4649), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((5337, 5379), 'unittest.mock.patch.object', 'patch.object', (['output', '"""_get_html_document"""'], {}), "(output, '_get_html_document')\n", (5349, 5379), False, 'from unittest.mock import patch, mock_open\n'), ((5984, 6026), 'unittest.mock.patch.object', 'patch.object', (['output', '"""_get_html_document"""'], {}), "(output, '_get_html_document')\n", (5996, 6026), False, 'from unittest.mock import patch, mock_open\n'), ((7018, 7049), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/1.md"""'], {}), "('tests/resources/1.md')\n", (7025, 7049), False, 'from publish.book import Book, Chapter\n'), ((7245, 7259), 'publish.output.HtmlOutput', 'HtmlOutput', (['""""""'], {}), "('')\n", (7255, 7259), False, 'from publish.output import SUPPORTED_EBOOKCONVERT_ATTRIBUTES, _apply_template, _yield_attributes_as_params, _get_ebook_convert_params, HtmlOutput, NoChaptersFoundError, EbookConvertOutput\n'), ((7303, 7340), 'publish.substitution.SimpleSubstitution', 'SimpleSubstitution', (['"""text"""', '"""content"""'], {}), "('text', 'content')\n", (7321, 7340), False, 'from publish.substitution import Substitution, SimpleSubstitution\n'), ((10242, 10273), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/1.md"""'], {}), "('tests/resources/1.md')\n", (10249, 10273), False, 'from publish.book import Book, Chapter\n'), ((10314, 10345), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/2.md"""'], {}), "('tests/resources/2.md')\n", (10321, 10345), False, 'from publish.book import Book, Chapter\n'), ((10387, 10424), 'publish.substitution.SimpleSubstitution', 'SimpleSubstitution', (['"""text"""', '"""content"""'], {}), "('text', 'content')\n", (10405, 10424), False, 'from publish.substitution import Substitution, SimpleSubstitution\n'), ((11523, 11554), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/1.md"""'], {}), "('tests/resources/1.md')\n", (11530, 11554), False, 'from publish.book import Book, Chapter\n'), ((11599, 11630), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/2.md"""'], {}), "('tests/resources/2.md')\n", (11606, 11630), False, 'from publish.book import Book, Chapter\n'), ((12100, 12131), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/1.md"""'], {}), "('tests/resources/1.md')\n", (12107, 12131), False, 'from publish.book import Book, Chapter\n'), ((12176, 12207), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/2.md"""'], {}), "('tests/resources/2.md')\n", (12183, 12207), False, 'from publish.book import Book, Chapter\n'), ((12252, 12298), 'publish.book.Chapter', 'Chapter', (['"""tests/resources/2.md"""'], {'publish': '(False)'}), "('tests/resources/2.md', publish=False)\n", (12259, 12298), False, 'from publish.book import Book, Chapter\n'), ((4540, 4566), 'unittest.mock.mock_open', 'mock_open', ([], {'read_data': '"""css"""'}), "(read_data='css')\n", (4549, 4566), False, 'from unittest.mock import patch, mock_open\n'), ((4805, 4816), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4814, 4816), False, 'import os\n'), ((5264, 5290), 'unittest.mock.mock_open', 'mock_open', ([], {'read_data': '"""css"""'}), "(read_data='css')\n", (5273, 5290), False, 'from unittest.mock import patch, mock_open\n'), ((5911, 5937), 'unittest.mock.mock_open', 'mock_open', ([], {'read_data': '"""css"""'}), "(read_data='css')\n", (5920, 5937), False, 'from unittest.mock import patch, mock_open\n'), ((11060, 11090), 'publish.book.Chapter', 'Chapter', ([], {'src': '""""""', 'publish': '(False)'}), "(src='', publish=False)\n", (11067, 11090), False, 'from publish.book import Book, Chapter\n'), ((11309, 11338), 'publish.book.Chapter', 'Chapter', ([], {'src': '""""""', 'publish': '(True)'}), "(src='', publish=True)\n", (11316, 11338), False, 'from publish.book import Book, Chapter\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('detention_notifier', '0004_auto_20160229_1131'),
]
operations = [
migrations.AlterField(
model_name='detention',
name='detention_date',
field=models.DateField(null=True),
),
migrations.AlterField(
model_name='offense',
name='sentence_insert',
field=models.CharField(blank=True, max_length=4096),
),
]
| [
"django.db.models.DateField",
"django.db.models.CharField"
] | [((374, 401), 'django.db.models.DateField', 'models.DateField', ([], {'null': '(True)'}), '(null=True)\n', (390, 401), False, 'from django.db import migrations, models\n'), ((533, 578), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(4096)'}), '(blank=True, max_length=4096)\n', (549, 578), False, 'from django.db import migrations, models\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from django.core.exceptions import ValidationError
from django.http import JsonResponse, Http404
# Create your views here.
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from .models import Agenda, Section, Topic
from .utils.agenda_json import agenda_json
from .utils.PositionFixing import fixPosition
@csrf_exempt
def create_agenda(request):
json_data = json.loads(request.body)
if "date" in json_data and "lc" in json_data:
try:
a = Agenda(date=json_data["date"], lc=json_data["lc"])
a.save()
return JsonResponse({"status": 200, "id": a.id})
except ValidationError:
return JsonResponse({"status": 400, "msg": "Wrong format"})
else:
return JsonResponse({"status": 400, "msg": "Not all fields are given"})
def getAgendaByID(request, agenda_id: int):
agenda = get_object_or_404(Agenda, pk=agenda_id)
return JsonResponse({"status": 200, "agenda": agenda_json(agenda)})
@csrf_exempt
def createSection(request):
data = json.loads(request.body)
if "agenda_id" in data and "section_name" in data:
agenda = get_object_or_404(Agenda, pk=data["agenda_id"])
try:
if "position" not in data:
agenda.section_set.create(section_name=data["section_name"], position=agenda.section_set.count())
else:
oldList=list(agenda.section_set.all().order_by("position"))
normalizer=0
for index,s in enumerate(list(agenda.section_set.all().order_by("position"))[data["position"]:]):
if index==data["position"]:
normalizer+=1
s.position =index+normalizer
s.save()
agenda.section_set.create(section_name=data["section_name"], position=data["position"])
agenda.save()
except ValidationError:
return JsonResponse({"status": 400, "msg": "wrong format"})
return JsonResponse({"status": 200, "agenda": agenda_json(agenda)})
else:
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def createTopic(request):
data = json.loads(request.body)
if "agenda_id" in data and "section_position" in data and "topic_position" in data and "topic_json" in data:
agenda = get_object_or_404(Agenda, pk=data["agenda_id"])
try:
section = agenda.section_set.get(position=data["section_position"])
except Section.DoesNotExist:
raise Http404("Section does not exist")
try:
for t in list(section.topic_set.all().order_by("position"))[data["topic_position"]:]:
t.position += 1
t.save()
topic = data["topic_json"]
section.topic_set.create(topic_name=topic["topic_name"], votable=topic["votable"],
yes_no_vote=topic["yes_no_vote"], open_ballot=topic["open_ballot"],
possible_answers=topic["possible_answers"])
section.save()
agenda.save()
except:
JsonResponse({"status": 400, "msg": "you didn't sent all the necessary information"})
return JsonResponse({"status": 200, "agenda": agenda_json(agenda)})
else:
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def updateAgenda(request):
data = json.loads(request.body)
if "agenda_id" in data and "new_agenda" in data:
agenda = get_object_or_404(Agenda, pk=data["agenda_id"])
newAgenda=data["new_agenda"]
try:
if "lc" in newAgenda:
agenda.lc = newAgenda["lc"]
if "date" in newAgenda:
agenda.date = newAgenda["date"]
agenda.save()
except:
return JsonResponse({"status": 400, "msg": "wrong format"})
return JsonResponse({"status":200,"agenda":agenda_json(agenda)})
else:
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def updateSection(request):
data = json.loads(request.body)
if "agenda_id" in data and "section_position" in data and "section_json" in data:
agenda = get_object_or_404(Agenda, pk=data["agenda_id"])
newSection = data["section_json"]
try:
if "section_name" in newSection:
section = get_object_or_404(Section,agenda=data["agenda_id"],position=data["section_position"])
section.section_name = newSection["section_name"]
section.save()
if "position" in newSection:
section = get_object_or_404(Section,agenda=data["agenda_id"],position=data["section_position"])
old = list(agenda.section_set.all().order_by("position"))
element = old[data["section_position"]]
newList = []
index = 0
for s in range(len(old)+1):
if s == newSection["position"]:
newList.append(element)
continue
newList.append(old[index])
index += 1
fixPosition(newList)
section.position = newSection["position"]
section.save()
agenda.save()
except:
return JsonResponse({"status": 400, "msg": "wrong format"})
return JsonResponse({"status": 200, "agenda": agenda_json(agenda)})
else:
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def updateTopic(request):
data = json.loads(request.body)
if "agenda_id" in data and "section_position" in data and "topic_position" in data:
agenda = get_object_or_404(Agenda, pk=data["agenda_id"])
section=get_object_or_404(Section,agenda=data["agenda_id"],position=data["section_position"])
topic=section.topic_set.get(position=data["topic_position"])
newTopic=data["topic_json"]
try:
if "topic_name" in newTopic:
topic.topic_name=newTopic["topic_name"]
if "votable" in newTopic:
topic.votable=newTopic["votable"]
if "yes_no_vote" in newTopic:
topic.yes_no_vote=newTopic["yes_no_vote"]
if "open_ballot" in newTopic:
topic.open_ballot=newTopic["open_ballot"]
if "possible_answers" in newTopic:
topic.set_answers(newTopic["possible_answers"])
topic.save()
if "position" in newTopic:
#Todo: make this a function and use it in everything
old = list(section.topic_set.all().order_by("position"))
element = old[data["topic_position"]]
newList = []
index = 0
for s in range(len(old)+1):
if s == newTopic["position"]:
newList.append(element)
continue
newList.append(old[index])
index += 1
fixPosition(newList)
except:
return JsonResponse({"status": 400, "msg": "wrong format"})
return JsonResponse({"status": 200, "agenda": agenda_json(agenda)})
else:
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def deleteAgenda(request):
data = json.loads(request.body)
if "agenda_id" in data:
agenda=get_object_or_404(Agenda,pk=data["agenda_id"])
try:
agenda.delete()
except:
return JsonResponse({"status":500,"msg":"Couldnt delete it due to internal problem"})
return JsonResponse({"status":200})
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def deleteSection(request):
data = json.loads(request.body)
if "agenda_id" in data and "section_position" in data:
agenda=get_object_or_404(Agenda,pk=data["agenda_id"])
section=get_object_or_404(Section,agenda=agenda.id,position=data["section_position"])
try:
section.delete()
fixPosition(agenda.section_set.all().order_by("position"))
except:
return JsonResponse({"status":500,"msg":"Couldnt delete it due to internal problem"})
return JsonResponse({"status":200})
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
@csrf_exempt
def deleteTopic(request):
data = json.loads(request.body)
if "agenda_id" in data and "section_position" in data and "topic_position" in data:
agenda=get_object_or_404(Agenda,pk=data["agenda_id"])
section=get_object_or_404(Section,agenda=agenda.id, position=data["section_position"])
topic=get_object_or_404(Topic,section=section.id,position=data["topic_position"])
try:
topic.delete()
fixPosition(section.topic_set.all().order_by("position"))
except:
return JsonResponse({"status":500,"msg":"Couldnt delete it due to internal problem"})
return JsonResponse({"status":200})
return JsonResponse({"status":400,"msg":"you didn't sent all the necessary information"})
| [
"json.loads",
"django.shortcuts.get_object_or_404",
"django.http.Http404",
"django.http.JsonResponse"
] | [((493, 517), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (503, 517), False, 'import json\n'), ((983, 1022), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': 'agenda_id'}), '(Agenda, pk=agenda_id)\n', (1000, 1022), False, 'from django.shortcuts import get_object_or_404\n'), ((1149, 1173), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (1159, 1173), False, 'import json\n'), ((2331, 2355), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (2341, 2355), False, 'import json\n'), ((3607, 3631), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (3617, 3631), False, 'import json\n'), ((4311, 4335), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (4321, 4335), False, 'import json\n'), ((5865, 5889), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (5875, 5889), False, 'import json\n'), ((7690, 7714), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (7700, 7714), False, 'import json\n'), ((8015, 8104), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (8027, 8104), False, 'from django.http import JsonResponse, Http404\n'), ((8152, 8176), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (8162, 8176), False, 'import json\n'), ((8674, 8763), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (8686, 8763), False, 'from django.http import JsonResponse, Http404\n'), ((8809, 8833), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (8819, 8833), False, 'import json\n'), ((9448, 9537), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (9460, 9537), False, 'from django.http import JsonResponse, Http404\n'), ((859, 923), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 400, 'msg': 'Not all fields are given'}"], {}), "({'status': 400, 'msg': 'Not all fields are given'})\n", (871, 923), False, 'from django.http import JsonResponse, Http404\n'), ((1246, 1293), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (1263, 1293), False, 'from django.shortcuts import get_object_or_404\n'), ((2196, 2285), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (2208, 2285), False, 'from django.http import JsonResponse, Http404\n'), ((2486, 2533), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (2503, 2533), False, 'from django.shortcuts import get_object_or_404\n'), ((3472, 3561), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (3484, 3561), False, 'from django.http import JsonResponse, Http404\n'), ((3702, 3749), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (3719, 3749), False, 'from django.shortcuts import get_object_or_404\n'), ((4174, 4263), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (4186, 4263), False, 'from django.http import JsonResponse, Http404\n'), ((4439, 4486), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (4456, 4486), False, 'from django.shortcuts import get_object_or_404\n'), ((5730, 5819), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (5742, 5819), False, 'from django.http import JsonResponse, Http404\n'), ((5995, 6042), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (6012, 6042), False, 'from django.shortcuts import get_object_or_404\n'), ((6059, 6151), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Section'], {'agenda': "data['agenda_id']", 'position': "data['section_position']"}), "(Section, agenda=data['agenda_id'], position=data[\n 'section_position'])\n", (6076, 6151), False, 'from django.shortcuts import get_object_or_404\n'), ((7554, 7643), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (7566, 7643), False, 'from django.http import JsonResponse, Http404\n'), ((7758, 7805), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (7775, 7805), False, 'from django.shortcuts import get_object_or_404\n'), ((7975, 8004), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 200}"], {}), "({'status': 200})\n", (7987, 8004), False, 'from django.http import JsonResponse, Http404\n'), ((8251, 8298), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (8268, 8298), False, 'from django.shortcuts import get_object_or_404\n'), ((8314, 8393), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Section'], {'agenda': 'agenda.id', 'position': "data['section_position']"}), "(Section, agenda=agenda.id, position=data['section_position'])\n", (8331, 8393), False, 'from django.shortcuts import get_object_or_404\n'), ((8634, 8663), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 200}"], {}), "({'status': 200})\n", (8646, 8663), False, 'from django.http import JsonResponse, Http404\n'), ((8937, 8984), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Agenda'], {'pk': "data['agenda_id']"}), "(Agenda, pk=data['agenda_id'])\n", (8954, 8984), False, 'from django.shortcuts import get_object_or_404\n'), ((9000, 9079), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Section'], {'agenda': 'agenda.id', 'position': "data['section_position']"}), "(Section, agenda=agenda.id, position=data['section_position'])\n", (9017, 9079), False, 'from django.shortcuts import get_object_or_404\n'), ((9093, 9170), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Topic'], {'section': 'section.id', 'position': "data['topic_position']"}), "(Topic, section=section.id, position=data['topic_position'])\n", (9110, 9170), False, 'from django.shortcuts import get_object_or_404\n'), ((9408, 9437), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 200}"], {}), "({'status': 200})\n", (9420, 9437), False, 'from django.http import JsonResponse, Http404\n'), ((688, 729), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 200, 'id': a.id}"], {}), "({'status': 200, 'id': a.id})\n", (700, 729), False, 'from django.http import JsonResponse, Http404\n'), ((781, 833), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 400, 'msg': 'Wrong format'}"], {}), "({'status': 400, 'msg': 'Wrong format'})\n", (793, 833), False, 'from django.http import JsonResponse, Http404\n'), ((2042, 2094), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 400, 'msg': 'wrong format'}"], {}), "({'status': 400, 'msg': 'wrong format'})\n", (2054, 2094), False, 'from django.http import JsonResponse, Http404\n'), ((2682, 2715), 'django.http.Http404', 'Http404', (['"""Section does not exist"""'], {}), "('Section does not exist')\n", (2689, 2715), False, 'from django.http import JsonResponse, Http404\n'), ((3285, 3374), 'django.http.JsonResponse', 'JsonResponse', (['{\'status\': 400, \'msg\': "you didn\'t sent all the necessary information"}'], {}), '({\'status\': 400, \'msg\':\n "you didn\'t sent all the necessary information"})\n', (3297, 3374), False, 'from django.http import JsonResponse, Http404\n'), ((4023, 4075), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 400, 'msg': 'wrong format'}"], {}), "({'status': 400, 'msg': 'wrong format'})\n", (4035, 4075), False, 'from django.http import JsonResponse, Http404\n'), ((4613, 4705), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Section'], {'agenda': "data['agenda_id']", 'position': "data['section_position']"}), "(Section, agenda=data['agenda_id'], position=data[\n 'section_position'])\n", (4630, 4705), False, 'from django.shortcuts import get_object_or_404\n'), ((4863, 4955), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Section'], {'agenda': "data['agenda_id']", 'position': "data['section_position']"}), "(Section, agenda=data['agenda_id'], position=data[\n 'section_position'])\n", (4880, 4955), False, 'from django.shortcuts import get_object_or_404\n'), ((5576, 5628), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 400, 'msg': 'wrong format'}"], {}), "({'status': 400, 'msg': 'wrong format'})\n", (5588, 5628), False, 'from django.http import JsonResponse, Http404\n'), ((7400, 7452), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 400, 'msg': 'wrong format'}"], {}), "({'status': 400, 'msg': 'wrong format'})\n", (7412, 7452), False, 'from django.http import JsonResponse, Http404\n'), ((7881, 7966), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 500, 'msg': 'Couldnt delete it due to internal problem'}"], {}), "({'status': 500, 'msg':\n 'Couldnt delete it due to internal problem'})\n", (7893, 7966), False, 'from django.http import JsonResponse, Http404\n'), ((8540, 8625), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 500, 'msg': 'Couldnt delete it due to internal problem'}"], {}), "({'status': 500, 'msg':\n 'Couldnt delete it due to internal problem'})\n", (8552, 8625), False, 'from django.http import JsonResponse, Http404\n'), ((9314, 9399), 'django.http.JsonResponse', 'JsonResponse', (["{'status': 500, 'msg': 'Couldnt delete it due to internal problem'}"], {}), "({'status': 500, 'msg':\n 'Couldnt delete it due to internal problem'})\n", (9326, 9399), False, 'from django.http import JsonResponse, Http404\n')] |
import os
ls=["python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold0_coarse_dropout.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold1_coarse_dropout.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold2_coarse_dropout.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold3_coarse_dropout.yml",
"python main.py --configs configs/train_mosmed_unetplusplus_timm-regnetx_002_fold4_coarse_dropout.yml",
]
for l in ls:
os.system(l) | [
"os.system"
] | [((553, 565), 'os.system', 'os.system', (['l'], {}), '(l)\n', (562, 565), False, 'import os\n')] |
#!/usr/bin/env python
from distutils.core import setup, Command
from skos import __version__
class TestCommand(Command):
"""
Custom distutils command for running the test suite
"""
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import os.path
from test import unittest
test_dir = os.path.join(os.path.dirname(__file__), 'test')
package_suite = unittest.TestLoader().discover(test_dir)
unittest.TextTestRunner(verbosity=2).run(package_suite)
setup(name='python-skos',
version=__version__,
description='A basic implementation of some core elements of the SKOS object model',
author='<NAME>',
author_email='<EMAIL>',
url='http://github.com/geo-data/python-skos',
license='BSD',
py_modules=['skos'],
cmdclass = { 'test': TestCommand }
)
| [
"test.unittest.TextTestRunner",
"test.unittest.TestLoader",
"distutils.core.setup"
] | [((586, 891), 'distutils.core.setup', 'setup', ([], {'name': '"""python-skos"""', 'version': '__version__', 'description': '"""A basic implementation of some core elements of the SKOS object model"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""http://github.com/geo-data/python-skos"""', 'license': '"""BSD"""', 'py_modules': "['skos']", 'cmdclass': "{'test': TestCommand}"}), "(name='python-skos', version=__version__, description=\n 'A basic implementation of some core elements of the SKOS object model',\n author='<NAME>', author_email='<EMAIL>', url=\n 'http://github.com/geo-data/python-skos', license='BSD', py_modules=[\n 'skos'], cmdclass={'test': TestCommand})\n", (591, 891), False, 'from distutils.core import setup, Command\n'), ((480, 501), 'test.unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (499, 501), False, 'from test import unittest\n'), ((529, 565), 'test.unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (552, 565), False, 'from test import unittest\n')] |
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
class NotePostForm(forms.Form):
content = forms.CharField(
label=_("Content"),
min_length=2,
widget=forms.Textarea(),
required=True,
)
| [
"django.forms.Textarea",
"django.utils.translation.ugettext_lazy"
] | [((211, 223), 'django.utils.translation.ugettext_lazy', '_', (['"""Content"""'], {}), "('Content')\n", (212, 223), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((262, 278), 'django.forms.Textarea', 'forms.Textarea', ([], {}), '()\n', (276, 278), False, 'from django import forms\n')] |
import os, sys
from interop import interop,interop2,sign_separate_interop
from multiprocessing import Pool
from missing_cds import missing_cds
from rev_interop import fasta_op
# program to process operon_prediction file from molquest to give initial transcription units
def interoper(org,ncbid):
interop(org)
interop2(org)
sign_separate_interop(org)
missing_cds(org)
fasta_op(org,ncbid)
# print("done")
| [
"interop.sign_separate_interop",
"interop.interop2",
"interop.interop",
"rev_interop.fasta_op",
"missing_cds.missing_cds"
] | [((298, 310), 'interop.interop', 'interop', (['org'], {}), '(org)\n', (305, 310), False, 'from interop import interop, interop2, sign_separate_interop\n'), ((312, 325), 'interop.interop2', 'interop2', (['org'], {}), '(org)\n', (320, 325), False, 'from interop import interop, interop2, sign_separate_interop\n'), ((327, 353), 'interop.sign_separate_interop', 'sign_separate_interop', (['org'], {}), '(org)\n', (348, 353), False, 'from interop import interop, interop2, sign_separate_interop\n'), ((355, 371), 'missing_cds.missing_cds', 'missing_cds', (['org'], {}), '(org)\n', (366, 371), False, 'from missing_cds import missing_cds\n'), ((373, 393), 'rev_interop.fasta_op', 'fasta_op', (['org', 'ncbid'], {}), '(org, ncbid)\n', (381, 393), False, 'from rev_interop import fasta_op\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_cli
"""
import unittest
from mongoengine.connection import register_connection
from vulyk import settings
class BaseTest(unittest.TestCase):
DB_NAME = 'vulyk_test'
MONGO_URI = 'mongodb://localhost:27017/'
# override to run against test DB during tests
settings.MONGODB_SETTINGS['DB'] = DB_NAME
# allows us to run selected tests w/o need to launch the whole bunch
register_connection('default', name=DB_NAME, host=MONGO_URI)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"mongoengine.connection.register_connection"
] | [((447, 507), 'mongoengine.connection.register_connection', 'register_connection', (['"""default"""'], {'name': 'DB_NAME', 'host': 'MONGO_URI'}), "('default', name=DB_NAME, host=MONGO_URI)\n", (466, 507), False, 'from mongoengine.connection import register_connection\n'), ((541, 556), 'unittest.main', 'unittest.main', ([], {}), '()\n', (554, 556), False, 'import unittest\n')] |
# agent.py
# --------
import util
def scoreEvaluationFn(gameState, player):
'''
This default evaluation function just returns the score of the state.
'''
enemyScore, playerScore = gameState.getScore(player)
return playerScore - enemyScore
def betterEvaluationFn(gameState, player):
'''
Insane piece killing evaluation function
DESCRIPTION:
< Considering: Mobility, Number of Doubled, Isolated Pawns and Pinned Pieces,
King checked, Queen Trapped >
Pawn Structure:
- Penalise doubled, backward and blocked pawns.
- Encourage pawn advancement where adequately defended.
- Encourage control of the centre of the board.
Piece Placement:
- Encourage knights to occupy the centre of the board.
- Encourage bishops to occupy principal diagonals.
- Encourage queens and rooks to defend each other and attack.
- Encourage 7th rank attacks for rooks.
Passed Pawns:
- These deserve a special treatment as they are so important.
- Check for safety from opposing king and enemy pieces.
- Test pawn structure for weaknesses, such as hidden passed pawns.
- Add enormous incentives for passed pawns near promotion.
King Safety
- Encourage the king to stay to the corner in the middlegame.
- Try to retain an effective pawn shield.
- Try to stop enemy pieces from getting near to the king.
'''
mobility = len(player.getLegalActions(gameState))
enemyScore, playerScore = gameState.getScore(player)
pawns = [piece for piece in player.pieces if piece.toString() == "P"]
actions = player.getLegalActions(gameState)
NumOfDoubledPawns = 0
for pawn in pawns:
if player.color == "White":
if (pawn.pos[0], pawn.pos[1]+1) in [pawn.pos for pawn in pawns]:
NumOfDoubledPawns += 1
else:
if (pawn.pos[0], pawn.pos[1]-1) in [pawn.pos for pawn in pawns]:
NumOfDoubledPawns += 1
NumOfIsolatedPawns = 0
for pawn in pawns:
if util.computeMinDistFromOtherPieces(pawn, player.pieces) > 4:
NumOfIsolatedPawns += 1
NumOfPinnedPieces = gameState.pinnedPieces
centralControl = 0 # checking control over the coordinates: (2,3) (3,3) (4,3) (5,3) (2,4) (3,4) (4,4) (5,4)
central_coords = [(2,3),(3,3),(4,3),(5,3),(2,4),(3,4),(4,4),(5,4)]
pieces_pos = [piece.pos for piece in player.pieces]
actions_pos = [action.newPos for action in actions]
for coord in central_coords:
if coord in pieces_pos or coord in actions_pos:
centralControl += 1
sum = 10 * mobility - 20 * NumOfPinnedPieces - 5* (NumOfIsolatedPawns+NumOfDoubledPawns) + 100 * (playerScore-enemyScore) + 50 * centralControl
return sum
class Agent(object):
"""An abstract class for AlphaBeta and Expectimax agents"""
def __init__(self, player, enemy, evalFn=scoreEvaluationFn, depth="2"):
super(Agent, self).__init__()
self.player = player
self.color = player.color
self.enemy = enemy
self.evaluationFunction = evalFn
self.depth = int(depth)
def getAction(self, args):
util.raiseNotDefined()
class AlphaBetAgent(Agent):
def getAction(self, gameState):
"""
Returns the minimax action using self.depth and self.evaluationFunction
"""
"*** YOUR CODE HERE ***"
value, index = self.max_value(gameState, self.depth, float('-inf'), float('inf'))
return self.player.getLegalActions(gameState)[index]
def max_value(self, gameState, curDepth, alpha, beta):
v = float('-inf')
# print "Max node"
# print "Current Depth:", curDepth
legalActions = self.player.getLegalActions(gameState)
counter = 0
# Check if this is an end state and whether the depth has been reached
if len(legalActions) == 0 or curDepth == 0:
# print "Returns: ", self.evaluationFunction(gameState, self.player)
return self.evaluationFunction(gameState, self.player), None
for i, action in enumerate(legalActions):
successor = gameState.getSuccessor(action)
# Recurse if depth has not been reached
newv = max(v, self.min_value(successor, curDepth, alpha, beta))
# keep track of the index of the best action
if newv != v: counter = i
v = newv
if v > beta: return v, counter # pruning
alpha = max(alpha, v)
return v, counter
def min_value(self, gameState, curDepth, alpha, beta):
v = float('inf')
# print "Min Node"
# print "Current Depth:", curDepth
legalActions = self.player.getLegalActions(gameState)
# Check if this is an end state
if len(legalActions) == 0:
# print "Returns: ", self.evaluationFunction(gameState, self.enemy)
return self.evaluationFunction(gameState, self.enemy)
for action in legalActions:
successor = gameState.getSuccessor(action)
# Switch to MAX agent
v = min(v, self.max_value(successor, curDepth-1, alpha, beta)[0])
if v < alpha: return v # pruning
beta = min(beta, v)
return v
class ExpectimaxAgent(Agent):
"""
A simple Expectimax Agent
"""
def getAction(self, gameState):
value, index = self.max_value(gameState, self.depth)
return self.player.getLegalActions(gameState)[index]
def max_value(self, gameState, curDepth):
legalActions = self.player.getLegalActions(gameState)
counter = 0
v = float('-inf')
if len(legalActions) == 0 or curDepth == 0:
return self.evaluationFunction(gameState, self.player), None
for i, action in enumerate(legalActions):
successor = gameState.getSuccessor(action)
newv = max(v, self.expect_value(successor, curDepth))
# keep track of the index of the best action
if newv != v: counter = i
v = newv
return v, counter
def expect_value(self, gameState, curDepth):
legalActions = self.player.getLegalActions(gameState)
total = 0
if len(legalActions) == 0:
return self.evaluationFunction(gameState, self.enemy)
for action in legalActions:
successor = gameState.getSuccessor(action)
# Switch to MAX agent
total = total + self.max_value(successor, curDepth-1)[0]
return ( float(total) / len(legalActions) )
| [
"util.raiseNotDefined",
"util.computeMinDistFromOtherPieces"
] | [((2928, 2950), 'util.raiseNotDefined', 'util.raiseNotDefined', ([], {}), '()\n', (2948, 2950), False, 'import util\n'), ((1889, 1944), 'util.computeMinDistFromOtherPieces', 'util.computeMinDistFromOtherPieces', (['pawn', 'player.pieces'], {}), '(pawn, player.pieces)\n', (1923, 1944), False, 'import util\n')] |
from PyDictionary import PyDictionary
from typing import Any
def get_dictionary_response(content, bot_handler: Any) -> str:
words = content.lower().split()
dictionary = PyDictionary()
res = dictionary.meaning(words[1])
if res is not None:
res = res['Noun']
ans = ""
for i in res:
ans = ans + i + "\n\n"
return ans
return "Unable to find meaning :("
| [
"PyDictionary.PyDictionary"
] | [((178, 192), 'PyDictionary.PyDictionary', 'PyDictionary', ([], {}), '()\n', (190, 192), False, 'from PyDictionary import PyDictionary\n')] |
import csv
from django.utils import timezone
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint, FeatureVector
def get_mapping(header):
keys = ('Name', 'Options', 'Occupied', 'HOMO', 'Virtual', 'LUMO',
'HomoOrbital', 'Dipole', 'Energy', 'ExcitationEnergy1',
'BandGap', 'Excited', 'Time')
mapping = {x: None for x in keys}
cleaned = [x.split('(')[0].strip() for x in header]
for j, value in enumerate(cleaned):
if value in mapping:
mapping[value] = j
duplicates = (
('HOMO', 'Occupied'),
('LUMO', 'Virtual'),
('ExcitationEnergy1', 'Excited', 'BandGap')
)
for groups in duplicates:
if all(mapping[x] is not None for x in groups):
first = mapping[groups[0]]
if any(first != mapping[x] for x in groups[1:]):
raise ValueError('The mapping values do not match.')
else:
values = [mapping[x] for x in groups if mapping[x] is not None]
if not len(values):
continue
for x in groups:
mapping[x] = values[0]
return mapping
def main(csvfile):
# TODO use Pandas
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
feature_vectors = []
idxs = set()
names = set()
preexist = set(
FeatureVector.objects.all().values_list("exact_name", flat=True))
now = timezone.now()
count = 0
for i, row in enumerate(reader):
if not i:
mapping = get_mapping(row)
continue
if row == [] or len(row) < max(mapping.values()):
continue
try:
try:
exact_name = get_exact_name(row[mapping["Name"]])
try:
decay_feature = get_decay_feature_vector(exact_name)
feature_vector = True
if exact_name not in names and exact_name not in preexist:
temp = FeatureVector(
exact_name=exact_name,
type=FeatureVector.DECAY,
vector=decay_feature,
created=now)
temp.clean_fields()
feature_vectors.append(temp)
names.add(exact_name)
if len(feature_vectors) > 150:
FeatureVector.objects.bulk_create(feature_vectors)
feature_vectors = []
except Exception:
feature_vector = None
except Exception:
feature_vector = None
exact_name = None
band_gap = row[mapping["BandGap"]]
data = {
"name": row[mapping["Name"]],
"options": row[mapping["Options"]],
"homo": row[mapping["HOMO"]],
"lumo": row[mapping["LUMO"]],
"homo_orbital": row[mapping["HomoOrbital"]],
"dipole": row[mapping["Dipole"]],
"energy": row[mapping["Energy"]],
"band_gap": band_gap if band_gap != '---' else None,
"exact_name": exact_name,
"created": now,
}
point = DataPoint(**data)
point.clean_fields()
points.append(point)
if len(points) > 50:
DataPoint.objects.bulk_create(points)
points = []
if feature_vector is not None:
idxs.add(count)
count += 1
except Exception:
pass
DataPoint.objects.bulk_create(points)
FeatureVector.objects.bulk_create(feature_vectors)
Through = DataPoint.vectors.through
temp = DataPoint.objects.filter(
created=now).values_list("pk", "exact_name")
temp2 = FeatureVector.objects.all().values_list("exact_name", "pk")
groups = dict(temp2)
final = []
for i, (pk, name) in enumerate(temp):
if i in idxs:
final.append(
Through(datapoint_id=pk, featurevector_id=groups[name]))
if len(final) > 200:
Through.objects.bulk_create(final)
final = []
Through.objects.bulk_create(final)
return count
| [
"chemtools.mol_name.get_exact_name",
"models.DataPoint.objects.bulk_create",
"models.FeatureVector.objects.all",
"models.FeatureVector.objects.bulk_create",
"chemtools.ml.get_decay_feature_vector",
"django.utils.timezone.now",
"models.DataPoint.objects.filter",
"models.DataPoint",
"csv.reader",
"models.FeatureVector"
] | [((1306, 1355), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""', 'quotechar': '"""\\""""'}), '(csvfile, delimiter=\',\', quotechar=\'"\')\n', (1316, 1355), False, 'import csv\n'), ((1539, 1553), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1551, 1553), False, 'from django.utils import timezone\n'), ((3767, 3804), 'models.DataPoint.objects.bulk_create', 'DataPoint.objects.bulk_create', (['points'], {}), '(points)\n', (3796, 3804), False, 'from models import DataPoint, FeatureVector\n'), ((3809, 3859), 'models.FeatureVector.objects.bulk_create', 'FeatureVector.objects.bulk_create', (['feature_vectors'], {}), '(feature_vectors)\n', (3842, 3859), False, 'from models import DataPoint, FeatureVector\n'), ((3421, 3438), 'models.DataPoint', 'DataPoint', ([], {}), '(**data)\n', (3430, 3438), False, 'from models import DataPoint, FeatureVector\n'), ((3913, 3950), 'models.DataPoint.objects.filter', 'DataPoint.objects.filter', ([], {'created': 'now'}), '(created=now)\n', (3937, 3950), False, 'from models import DataPoint, FeatureVector\n'), ((4004, 4031), 'models.FeatureVector.objects.all', 'FeatureVector.objects.all', ([], {}), '()\n', (4029, 4031), False, 'from models import DataPoint, FeatureVector\n'), ((1462, 1489), 'models.FeatureVector.objects.all', 'FeatureVector.objects.all', ([], {}), '()\n', (1487, 1489), False, 'from models import DataPoint, FeatureVector\n'), ((1822, 1858), 'chemtools.mol_name.get_exact_name', 'get_exact_name', (["row[mapping['Name']]"], {}), "(row[mapping['Name']])\n", (1836, 1858), False, 'from chemtools.mol_name import get_exact_name\n'), ((3554, 3591), 'models.DataPoint.objects.bulk_create', 'DataPoint.objects.bulk_create', (['points'], {}), '(points)\n', (3583, 3591), False, 'from models import DataPoint, FeatureVector\n'), ((1916, 1952), 'chemtools.ml.get_decay_feature_vector', 'get_decay_feature_vector', (['exact_name'], {}), '(exact_name)\n', (1940, 1952), False, 'from chemtools.ml import get_decay_feature_vector\n'), ((2105, 2207), 'models.FeatureVector', 'FeatureVector', ([], {'exact_name': 'exact_name', 'type': 'FeatureVector.DECAY', 'vector': 'decay_feature', 'created': 'now'}), '(exact_name=exact_name, type=FeatureVector.DECAY, vector=\n decay_feature, created=now)\n', (2118, 2207), False, 'from models import DataPoint, FeatureVector\n'), ((2544, 2594), 'models.FeatureVector.objects.bulk_create', 'FeatureVector.objects.bulk_create', (['feature_vectors'], {}), '(feature_vectors)\n', (2577, 2594), False, 'from models import DataPoint, FeatureVector\n')] |
import telepot
bot = telepot.Bot('API TOKEN')
print(bot.getMe())
response = bot.getUpdates()
print(response)
| [
"telepot.Bot"
] | [((22, 46), 'telepot.Bot', 'telepot.Bot', (['"""API TOKEN"""'], {}), "('API TOKEN')\n", (33, 46), False, 'import telepot\n')] |
# @Author
# <NAME>
# <EMAIL>; <EMAIL>
# July 2013
import sys, os, h5py, sqlite3, json, numpy, itertools
import subprocess
# Input of primary and secondary datasets
from datatypes.ExpressionDataset import HDF5GroupToExpressionDataset
from datatypes.ExpressionDataset import MakeRandomFoldMap
from datatypes.GeneSetCollection import ReadGeneSetCollection
# Feature-extraction algorithms
from featureExtractors.SingleGenes.SingleGeneFeatureExtractor import SingleGeneFeatureExtractorFactory
from featureExtractors.SingleGenes.RandomGeneFeatureExtractor import RandomGeneFeatureExtractorFactory
from featureExtractors.Lee.LeeFeatureExtractor import LeeFeatureExtractorFactory
# Classifiers
from classifiers.BinaryNearestMeanClassifier import BinaryNearestMeanClassifierFactory, V1, V2a, V2b, V3
from statistics.PerformanceCurve import CalculateFeatureCountDependentPerformanceCurve, CalculateFeatureCountDependentPerformance
import random
def CombineData():
"""
Combines the data, methods and parameters for the Performance and Overlap evaluation.
"""
datasets = ['U133A_combat_DMFS']
pathways = ['nwGeneSetsKEGG', 'nwGeneSetsMsigDB']
PathwayFeatureExtractorSpecificParams = [('Lee', None)]
SingleGene = [('SingleGenes', None), ('RandomGenes', None)]
#Combine algos with sec data
NetworkDataAndFeatureExtractors = list(itertools.product(PathwayFeatureExtractorSpecificParams, pathways))
NetworkDataAndFeatureExtractors.extend(list(itertools.product(SingleGene, [None])))
#Combine FEs with data and number of shuffles
DataAndFeatureExtractors = list(itertools.product(datasets, NetworkDataAndFeatureExtractors, [None]))
return DataAndFeatureExtractors
def SetUpRun(dataset, network, method, datafile = "4851460", datapath = '..'):
#get data from figshare
# wget -P data/ https://ndownloader.figshare.com/files/4851460
#print("Downloading data from figshare.")
#PATH = "data/"
#wget = ["wget", "-P", PATH, "https://ndownloader.figshare.com/files/4851460"]
#proc = subprocess.Popen(wget, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
#stderr = subprocess.PIPE)
#output, err = proc.communicate()
#get dataset
f = h5py.File(datapath+'/'+datafile)
# The hdf5 file contains several datasets, fetch the one we indicated
data = [HDF5GroupToExpressionDataset(f[group]) for group in f.keys() if dataset in group][0]
f.close()
#get network
if network == "nwGeneSetsKEGG":
net = ReadGeneSetCollection("KEGGpw" , datapath+"/KEGG1210_PathwayGeneSets_Entrez.txt" , "Entrez_")
elif network == "nwGeneSetsMsigDB":
net = ReadGeneSetCollection("MsigDBpw" , datapath+"/C2V3_PathwayGeneSets_Entrez.txt" , "Entrez_")
elif network == "nwEdgesKEGG":
net = ReadSIF("KEGG" , datapath+"/KEGG_edges1210.sif" , "Entrez_")
elif network == None:
net = None
print("SG no network")
else:
raise Exception("Network not known. Add network in SetUpGrid/SetUpRun.")
#get featureselection method
if method == "Lee":
featureSelector = LeeFeatureExtractorFactory()
elif method.startswith('SingleGenes'):
featureSelector = SingleGeneFeatureExtractorFactory()
elif method.startswith('RandomGenes'):
featureSelector = RandomGeneFeatureExtractorFactory()
else:
raise Exception("Method not known. Add method in SetUpGrid/SetUpRun.")
#get classifier
classifiers = [
BinaryNearestMeanClassifierFactory(V1),
]
return (data, net, featureSelector, classifiers, None)
def RunInstance(data, net, featureSelector, special, classifiers, repeat, nrFolds, fold, shuffleNr, survTime = None, TaylorParam = True):
#in case if the inner loop the dataset name has an additional tag.
if "training" in data.name:
dName = "_".join(data.name.split('_')[:len(data.name.split('_'))-2])
else:
dName = data.name
#split datasets
dsTraining, dsTesting, foldMap = splitData(data, fold, repeat, nrFolds)
#select features
if featureSelector.productName in ["SingleGeneFeatureExtractor", "RandomGeneFeatureExtractor"]:
featureExtractor = featureSelector.train(dsTraining)
elif special == None: #Chuang, Dao, Lee
featureExtractor = featureSelector.train(dsTraining, net)
else:
raise Exception("Method not known. Add method in SetUpGrid/SetUpRun.")
#train classifiers and produce AUC values with the testing dataset
maxFeatureCount = 400
AucAndCi = {}
for CF in classifiers:
print("-->", CF.productName)
featureCounts = [fc for fc in featureExtractor.validFeatureCounts if fc <= maxFeatureCount]
nf_to_auc = CalculateFeatureCountDependentPerformanceCurve(
featureExtractor,
CF,
(dsTraining, dsTesting),
featureCounts
)
AucAndCi[CF.productName] = nf_to_auc
#return the resulting features, and classification results
if net == None:
return (data.name, featureExtractor.name, None, None, featureExtractor.toJsonExpression(), AucAndCi)
else:
return (data.name, featureExtractor.name, net.name, None, featureExtractor.toJsonExpression(), AucAndCi)
def splitData(data, fold, repeat, nrFolds):
#split datasets
foldMap = MakeRandomFoldMap(data, nrFolds, repeat)
foldList = range(0, nrFolds)
dsTraining = data.extractPatientsByIndices("%s_fold-%d-of-%d_training" % (data.name, fold,
len(foldList)), numpy.array(foldMap.foldAssignments)-1 != fold, checkNormalization = False)
dsTesting = data.extractPatientsByIndices("%s_fold-%d-of-%d_testing" % (data.name, fold,
len(foldList)), numpy.array(foldMap.foldAssignments)-1 == fold, checkNormalization = False)
return dsTraining, dsTesting, foldMap
def NextItem(iterable):
try:
first = next(iterable)
except StopIteration:
return None
it = itertools.chain([first], iterable)
return first, it
def getDoneTokens(db, experiment):
"""
Sort all done Tokens according to experiment.
db : dictionary item['_id']: item
"""
finishedTokens = []
pendingTokens = []
otherDocs = []
pending = []
for item in db:
doc = db[item]
if 'output' in doc.keys():
if item.startswith(experiment):
finishedTokens.append(doc)
elif "lock" in doc.keys():
if doc["lock"] > 0 and doc["done"] == 0:
pendingTokens.append(doc)
return finishedTokens, pendingTokens
def resetTokens(ids, db):
for ID in ids:
token = db.get(ID)
scrub = token['scrub_count']
updateContent = {'lock': 0,
'scrub_count' : scrub+1}
token.update(updateContent)
db.save(token)
| [
"itertools.chain",
"classifiers.BinaryNearestMeanClassifier.BinaryNearestMeanClassifierFactory",
"datatypes.ExpressionDataset.HDF5GroupToExpressionDataset",
"datatypes.GeneSetCollection.ReadGeneSetCollection",
"itertools.product",
"h5py.File",
"featureExtractors.Lee.LeeFeatureExtractor.LeeFeatureExtractorFactory",
"numpy.array",
"featureExtractors.SingleGenes.SingleGeneFeatureExtractor.SingleGeneFeatureExtractorFactory",
"datatypes.ExpressionDataset.MakeRandomFoldMap",
"statistics.PerformanceCurve.CalculateFeatureCountDependentPerformanceCurve",
"featureExtractors.SingleGenes.RandomGeneFeatureExtractor.RandomGeneFeatureExtractorFactory"
] | [((2275, 2311), 'h5py.File', 'h5py.File', (["(datapath + '/' + datafile)"], {}), "(datapath + '/' + datafile)\n", (2284, 2311), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n'), ((5403, 5443), 'datatypes.ExpressionDataset.MakeRandomFoldMap', 'MakeRandomFoldMap', (['data', 'nrFolds', 'repeat'], {}), '(data, nrFolds, repeat)\n', (5420, 5443), False, 'from datatypes.ExpressionDataset import MakeRandomFoldMap\n'), ((6033, 6067), 'itertools.chain', 'itertools.chain', (['[first]', 'iterable'], {}), '([first], iterable)\n', (6048, 6067), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n'), ((1394, 1460), 'itertools.product', 'itertools.product', (['PathwayFeatureExtractorSpecificParams', 'pathways'], {}), '(PathwayFeatureExtractorSpecificParams, pathways)\n', (1411, 1460), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n'), ((1636, 1704), 'itertools.product', 'itertools.product', (['datasets', 'NetworkDataAndFeatureExtractors', '[None]'], {}), '(datasets, NetworkDataAndFeatureExtractors, [None])\n', (1653, 1704), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n'), ((2564, 2661), 'datatypes.GeneSetCollection.ReadGeneSetCollection', 'ReadGeneSetCollection', (['"""KEGGpw"""', "(datapath + '/KEGG1210_PathwayGeneSets_Entrez.txt')", '"""Entrez_"""'], {}), "('KEGGpw', datapath +\n '/KEGG1210_PathwayGeneSets_Entrez.txt', 'Entrez_')\n", (2585, 2661), False, 'from datatypes.GeneSetCollection import ReadGeneSetCollection\n'), ((3170, 3198), 'featureExtractors.Lee.LeeFeatureExtractor.LeeFeatureExtractorFactory', 'LeeFeatureExtractorFactory', ([], {}), '()\n', (3196, 3198), False, 'from featureExtractors.Lee.LeeFeatureExtractor import LeeFeatureExtractorFactory\n'), ((3547, 3585), 'classifiers.BinaryNearestMeanClassifier.BinaryNearestMeanClassifierFactory', 'BinaryNearestMeanClassifierFactory', (['V1'], {}), '(V1)\n', (3581, 3585), False, 'from classifiers.BinaryNearestMeanClassifier import BinaryNearestMeanClassifierFactory, V1, V2a, V2b, V3\n'), ((4792, 4905), 'statistics.PerformanceCurve.CalculateFeatureCountDependentPerformanceCurve', 'CalculateFeatureCountDependentPerformanceCurve', (['featureExtractor', 'CF', '(dsTraining, dsTesting)', 'featureCounts'], {}), '(featureExtractor, CF, (\n dsTraining, dsTesting), featureCounts)\n', (4838, 4905), False, 'from statistics.PerformanceCurve import CalculateFeatureCountDependentPerformanceCurve, CalculateFeatureCountDependentPerformance\n'), ((1510, 1547), 'itertools.product', 'itertools.product', (['SingleGene', '[None]'], {}), '(SingleGene, [None])\n', (1527, 1547), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n'), ((2395, 2433), 'datatypes.ExpressionDataset.HDF5GroupToExpressionDataset', 'HDF5GroupToExpressionDataset', (['f[group]'], {}), '(f[group])\n', (2423, 2433), False, 'from datatypes.ExpressionDataset import HDF5GroupToExpressionDataset\n'), ((2712, 2807), 'datatypes.GeneSetCollection.ReadGeneSetCollection', 'ReadGeneSetCollection', (['"""MsigDBpw"""', "(datapath + '/C2V3_PathwayGeneSets_Entrez.txt')", '"""Entrez_"""'], {}), "('MsigDBpw', datapath +\n '/C2V3_PathwayGeneSets_Entrez.txt', 'Entrez_')\n", (2733, 2807), False, 'from datatypes.GeneSetCollection import ReadGeneSetCollection\n'), ((3268, 3303), 'featureExtractors.SingleGenes.SingleGeneFeatureExtractor.SingleGeneFeatureExtractorFactory', 'SingleGeneFeatureExtractorFactory', ([], {}), '()\n', (3301, 3303), False, 'from featureExtractors.SingleGenes.SingleGeneFeatureExtractor import SingleGeneFeatureExtractorFactory\n'), ((3373, 3408), 'featureExtractors.SingleGenes.RandomGeneFeatureExtractor.RandomGeneFeatureExtractorFactory', 'RandomGeneFeatureExtractorFactory', ([], {}), '()\n', (3406, 3408), False, 'from featureExtractors.SingleGenes.RandomGeneFeatureExtractor import RandomGeneFeatureExtractorFactory\n'), ((5596, 5632), 'numpy.array', 'numpy.array', (['foldMap.foldAssignments'], {}), '(foldMap.foldAssignments)\n', (5607, 5632), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n'), ((5790, 5826), 'numpy.array', 'numpy.array', (['foldMap.foldAssignments'], {}), '(foldMap.foldAssignments)\n', (5801, 5826), False, 'import sys, os, h5py, sqlite3, json, numpy, itertools\n')] |
#!/usr/bin/env python3
# Avoid needing display if plots aren't being shown
import sys
if "--noninteractive" in sys.argv:
import matplotlib as mpl
mpl.use("svg")
import control as cnt
import frccontrol as frccnt
import matplotlib.pyplot as plt
import numpy as np
def drivetrain(motor, num_motors, m, r, rb, J, Gl, Gr):
"""Returns the state-space model for a drivetrain.
States: [[left velocity], [right velocity]]
Inputs: [[left voltage], [right voltage]]
Outputs: [[left velocity], [right velocity]]
Keyword arguments:
motor -- instance of DcBrushedMotor
um_motors -- number of motors driving the mechanism
m -- mass of robot in kg
r -- radius of wheels in meters
rb -- radius of robot in meters
J -- moment of inertia of the drivetrain in kg-m^2
Gl -- gear ratio of left side of drivetrain
Gr -- gear ratio of right side of drivetrain
Returns:
StateSpace instance containing continuous model
"""
motor = frccnt.models.gearbox(motor, num_motors)
C1 = -Gl ** 2 * motor.Kt / (motor.Kv * motor.R * r ** 2)
C2 = Gl * motor.Kt / (motor.R * r)
C3 = -Gr ** 2 * motor.Kt / (motor.Kv * motor.R * r ** 2)
C4 = Gr * motor.Kt / (motor.R * r)
# fmt: off
A = np.array([[(1 / m + rb**2 / J) * C1, (1 / m - rb**2 / J) * C3],
[(1 / m - rb**2 / J) * C1, (1 / m + rb**2 / J) * C3]])
B = np.array([[(1 / m + rb**2 / J) * C2, (1 / m - rb**2 / J) * C4],
[(1 / m - rb**2 / J) * C2, (1 / m + rb**2 / J) * C4]])
C = np.array([[1, 0],
[0, 1]])
D = np.array([[0, 0],
[0, 0]])
# fmt: on
return cnt.ss(A, B, C, D)
class Drivetrain(frccnt.System):
def __init__(self, dt):
"""Drivetrain subsystem.
Keyword arguments:
dt -- time between model/controller updates
"""
state_labels = [("Left velocity", "m/s"), ("Right velocity", "m/s")]
u_labels = [("Left voltage", "V"), ("Right voltage", "V")]
self.set_plot_labels(state_labels, u_labels)
u_min = np.array([[-12.0], [-12.0]])
u_max = np.array([[12.0], [12.0]])
frccnt.System.__init__(self, u_min, u_max, dt, np.zeros((2, 1)), np.zeros((2, 1)))
def create_model(self, states, inputs):
self.in_low_gear = False
# Number of motors per side
num_motors = 2.0
# High and low gear ratios of drivetrain
Ghigh = 72.0 / 12.0
# Drivetrain mass in kg
m = 64
# Radius of wheels in meters
r = 0.0746125
# Radius of robot in meters
rb = 0.6096 / 2.0
# Moment of inertia of the drivetrain in kg-m^2
J = 4.0
# Gear ratios of left and right sides of drivetrain respectively
if self.in_low_gear:
Gl = Glow
Gr = Glow
else:
Gl = Ghigh
Gr = Ghigh
return drivetrain(frccnt.models.MOTOR_CIM, num_motors, m, r, rb, J, Gl, Gr)
def design_controller_observer(self):
if self.in_low_gear:
q_vel = 1.0
else:
q_vel = 0.95
q = [q_vel, q_vel]
r = [12.0, 12.0]
self.design_lqr(q, r)
qff_vel = 0.01
self.design_two_state_feedforward([qff_vel, qff_vel], [12.0, 12.0])
q_vel = 1.0
r_vel = 0.01
self.design_kalman_filter([q_vel, q_vel], [r_vel, r_vel])
print("ctrb cond =", np.linalg.cond(cnt.ctrb(self.sysd.A, self.sysd.B)))
def main():
dt = 0.00505
drivetrain = Drivetrain(dt)
drivetrain.export_cpp_coeffs("Drivetrain", "control/")
t, xprof, vprof, aprof = frccnt.generate_s_curve_profile(
max_v=4.0, max_a=3.5, time_to_max_a=1.0, dt=dt, goal=50.0
)
# Generate references for simulation
refs = []
for i in range(len(t)):
r = np.matrix([[vprof[i]], [vprof[i]]])
refs.append(r)
if "--save-plots" in sys.argv or "--noninteractive" not in sys.argv:
state_rec, ref_rec, u_rec, y_rec = drivetrain.generate_time_responses(t, refs)
drivetrain.plot_time_responses(t, state_rec, ref_rec, u_rec)
if "--save-plots" in sys.argv:
plt.savefig("drivetrain_response.svg")
if "--noninteractive" not in sys.argv:
plt.show()
if __name__ == "__main__":
main()
| [
"frccontrol.generate_s_curve_profile",
"frccontrol.models.gearbox",
"matplotlib.pyplot.savefig",
"control.ctrb",
"matplotlib.use",
"control.ss",
"numpy.array",
"numpy.zeros",
"numpy.matrix",
"matplotlib.pyplot.show"
] | [((157, 171), 'matplotlib.use', 'mpl.use', (['"""svg"""'], {}), "('svg')\n", (164, 171), True, 'import matplotlib as mpl\n'), ((990, 1030), 'frccontrol.models.gearbox', 'frccnt.models.gearbox', (['motor', 'num_motors'], {}), '(motor, num_motors)\n', (1011, 1030), True, 'import frccontrol as frccnt\n'), ((1255, 1385), 'numpy.array', 'np.array', (['[[(1 / m + rb ** 2 / J) * C1, (1 / m - rb ** 2 / J) * C3], [(1 / m - rb ** \n 2 / J) * C1, (1 / m + rb ** 2 / J) * C3]]'], {}), '([[(1 / m + rb ** 2 / J) * C1, (1 / m - rb ** 2 / J) * C3], [(1 / m -\n rb ** 2 / J) * C1, (1 / m + rb ** 2 / J) * C3]])\n', (1263, 1385), True, 'import numpy as np\n'), ((1400, 1530), 'numpy.array', 'np.array', (['[[(1 / m + rb ** 2 / J) * C2, (1 / m - rb ** 2 / J) * C4], [(1 / m - rb ** \n 2 / J) * C2, (1 / m + rb ** 2 / J) * C4]]'], {}), '([[(1 / m + rb ** 2 / J) * C2, (1 / m - rb ** 2 / J) * C4], [(1 / m -\n rb ** 2 / J) * C2, (1 / m + rb ** 2 / J) * C4]])\n', (1408, 1530), True, 'import numpy as np\n'), ((1545, 1571), 'numpy.array', 'np.array', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (1553, 1571), True, 'import numpy as np\n'), ((1598, 1624), 'numpy.array', 'np.array', (['[[0, 0], [0, 0]]'], {}), '([[0, 0], [0, 0]])\n', (1606, 1624), True, 'import numpy as np\n'), ((1669, 1687), 'control.ss', 'cnt.ss', (['A', 'B', 'C', 'D'], {}), '(A, B, C, D)\n', (1675, 1687), True, 'import control as cnt\n'), ((3664, 3759), 'frccontrol.generate_s_curve_profile', 'frccnt.generate_s_curve_profile', ([], {'max_v': '(4.0)', 'max_a': '(3.5)', 'time_to_max_a': '(1.0)', 'dt': 'dt', 'goal': '(50.0)'}), '(max_v=4.0, max_a=3.5, time_to_max_a=1.0, dt\n =dt, goal=50.0)\n', (3695, 3759), True, 'import frccontrol as frccnt\n'), ((2090, 2118), 'numpy.array', 'np.array', (['[[-12.0], [-12.0]]'], {}), '([[-12.0], [-12.0]])\n', (2098, 2118), True, 'import numpy as np\n'), ((2135, 2161), 'numpy.array', 'np.array', (['[[12.0], [12.0]]'], {}), '([[12.0], [12.0]])\n', (2143, 2161), True, 'import numpy as np\n'), ((3865, 3900), 'numpy.matrix', 'np.matrix', (['[[vprof[i]], [vprof[i]]]'], {}), '([[vprof[i]], [vprof[i]]])\n', (3874, 3900), True, 'import numpy as np\n'), ((4197, 4235), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""drivetrain_response.svg"""'], {}), "('drivetrain_response.svg')\n", (4208, 4235), True, 'import matplotlib.pyplot as plt\n'), ((4287, 4297), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4295, 4297), True, 'import matplotlib.pyplot as plt\n'), ((2217, 2233), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (2225, 2233), True, 'import numpy as np\n'), ((2235, 2251), 'numpy.zeros', 'np.zeros', (['(2, 1)'], {}), '((2, 1))\n', (2243, 2251), True, 'import numpy as np\n'), ((3475, 3509), 'control.ctrb', 'cnt.ctrb', (['self.sysd.A', 'self.sysd.B'], {}), '(self.sysd.A, self.sysd.B)\n', (3483, 3509), True, 'import control as cnt\n')] |
"""
Author :- <NAME>
Task :- Test for Series connection. This is not a part of the network. it is for Testing algos
Begin DATE :- 01- APRIL- 2021
"""
from socket import*
from random import randint
# CIF Customer Information file
# It maaps between the Card info to the Bank information
global CIF_number
CIF_number={
"1001 0110 2002 0011":"98765432011",
"1001 0110 2002 0026":"98765432026",
"1001 0110 2002 0006":"98765432006"
}
global accountDetails
accountDetails={
"98765432011":["00000000011", "RBIS0PFMS01"],
"98765432026":["00000000026", "RBIS0PFMS01"],
"98765432006":["00000000006", "RBIS0PFMS01"],
}
# function for converting the binary message into list
# Input is receved message from payment gateway
# output is full message in list
def give_list(recvMessage):
recvMessage = recvMessage.decode()
recvMessage2 = eval(recvMessage)
return recvMessage2
def otp_gen():
otpgenerated = randint(100001, 999999)
print("Generated OTP is :- ", otpgenerated)
return otpgenerated
TpsPortNumber = 9988
TpsServer = socket(AF_INET, SOCK_STREAM)
TpsServer.bind(('', TpsPortNumber))
TpsServer.listen(1)
print("Server is connected...")
while 1:
ppInstance, ppAddress = TpsServer.accept()
print("Connection established...")
recvMsgFromPP = ppInstance.recv(2048)
recvInfo = give_list(recvMsgFromPP)
print("Received message...")
print(recvInfo)
cardNumber = recvInfo[0]
print(cardNumber)
print(CIF_number[str(cardNumber)])
OTP = otp_gen()
receivedOtp=ppInstance.recv(2048)
recvotp = receivedOtp.decode()
print("RECEIVED OTP from the user :- ",recvotp)
if recvotp == str(OTP):
ppInstance.send("True".encode())
else:
ppInstance.send("False".encode())
ppInstance.close()
# ppInstance.send("1".encode())
| [
"random.randint"
] | [((930, 953), 'random.randint', 'randint', (['(100001)', '(999999)'], {}), '(100001, 999999)\n', (937, 953), False, 'from random import randint\n')] |
from argparse import ArgumentParser
import os
import glob
import json
import pandas as pd
from pydicom import dcmread
from tqdm import tqdm
from joblib import Parallel, delayed
def crawl_one(folder):
database = {}
for path, _, _ in os.walk(folder):
# find dicoms
dicoms = glob.glob(os.path.join(path, "*.dcm"))
# instance (slice) information
for dcm in dicoms:
try:
meta = dcmread(dcm, force=True)
patient = str(meta.PatientID)
study = str(meta.StudyInstanceUID)
series = str(meta.SeriesInstanceUID)
instance = str(meta.SOPInstanceUID)
reference_ct, reference_rs, reference_pl = " ", " ", " "
try: #RTSTRUCT
reference_ct = str(meta.ReferencedFrameOfReferenceSequence[0].RTReferencedStudySequence[0].RTReferencedSeriesSequence[0].SeriesInstanceUID)
except:
try: #RTDOSE
reference_rs = str(meta.ReferencedStructureSetSequence[0].ReferencedSOPInstanceUID)
except:
pass
try:
reference_ct = str(meta.ReferencedImageSequence[0].ReferencedSOPInstanceUID)
except:
pass
try:
reference_pl = str(meta.ReferencedRTPlanSequence[0].ReferencedSOPInstanceUID)
except:
pass
try:
reference_frame = str(meta.FrameOfReferenceUID)
except:
try:
reference_frame = str(meta.ReferencedFrameOfReferenceSequence[0].FrameOfReferenceUID)
except:
reference_frame = ""
try:
study_description = str(meta.StudyDescription)
except:
study_description = ""
try:
series_description = str(meta.SeriesDescription)
except:
series_description = ""
if patient not in database:
database[patient] = {}
if study not in database[patient]:
database[patient][study] = {'description': study_description}
if series not in database[patient][study]:
database[patient][study][series] = {'instances': [],
'instance_uid': instance,
'modality': meta.Modality,
'description': series_description,
'reference_ct': reference_ct,
'reference_rs': reference_rs,
'reference_pl': reference_pl,
'reference_frame': reference_frame,
'folder': path}
database[patient][study][series]['instances'].append(instance)
except:
pass
return database
def to_df(database_dict):
df = pd.DataFrame()
for pat in database_dict:
for study in database_dict[pat]:
for series in database_dict[pat][study]:
if series != 'description':
df = df.append({'patient_ID': pat,
'study': study,
'study_description': database_dict[pat][study]['description'],
'series': series,
'series_description': database_dict[pat][study][series]['description'],
'modality': database_dict[pat][study][series]['modality'],
'instances': len(database_dict[pat][study][series]['instances']),
'instance_uid': database_dict[pat][study][series]['instance_uid'],
'reference_ct': database_dict[pat][study][series]['reference_ct'],
'reference_rs': database_dict[pat][study][series]['reference_rs'],
'reference_pl': database_dict[pat][study][series]['reference_pl'],
'reference_frame': database_dict[pat][study][series]['reference_frame'],
'folder': database_dict[pat][study][series]['folder']}, ignore_index=True)
return df
def crawl(top,
n_jobs: int = -1):
database_list = []
folders = glob.glob(os.path.join(top, "*"))
database_list = Parallel(n_jobs=n_jobs)(delayed(crawl_one)(os.path.join(top, folder)) for folder in tqdm(folders))
# convert list to dictionary
database_dict = {}
for db in database_list:
for key in db:
database_dict[key] = db[key]
# save one level above imaging folders
parent, dataset = os.path.split(top)
# save as json
with open(os.path.join(parent, f'imgtools_{dataset}.json'), 'w') as f:
json.dump(database_dict, f, indent=4)
# save as dataframe
df = to_df(database_dict)
df_path = os.path.join(parent, f'imgtools_{dataset}.csv')
df.to_csv(df_path)
return database_dict
if __name__ == "__main__":
parser = ArgumentParser("Dataset DICOM Crawler")
parser.add_argument("directory",
type=str,
help="Top-level directory of the dataset.")
parser.add_argument("--n_jobs",
type=int,
default=16,
help="Number of parallel processes for multiprocessing.")
args = parser.parse_args()
db = crawl(args.directory, n_jobs=args.n_jobs)
print("# patients:", len(db))
| [
"pydicom.dcmread",
"argparse.ArgumentParser",
"json.dump",
"tqdm.tqdm",
"os.path.join",
"os.path.split",
"joblib.Parallel",
"pandas.DataFrame",
"joblib.delayed",
"os.walk"
] | [((243, 258), 'os.walk', 'os.walk', (['folder'], {}), '(folder)\n', (250, 258), False, 'import os\n'), ((3406, 3420), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (3418, 3420), True, 'import pandas as pd\n'), ((5262, 5280), 'os.path.split', 'os.path.split', (['top'], {}), '(top)\n', (5275, 5280), False, 'import os\n'), ((5499, 5546), 'os.path.join', 'os.path.join', (['parent', 'f"""imgtools_{dataset}.csv"""'], {}), "(parent, f'imgtools_{dataset}.csv')\n", (5511, 5546), False, 'import os\n'), ((5641, 5680), 'argparse.ArgumentParser', 'ArgumentParser', (['"""Dataset DICOM Crawler"""'], {}), "('Dataset DICOM Crawler')\n", (5655, 5680), False, 'from argparse import ArgumentParser\n'), ((4893, 4915), 'os.path.join', 'os.path.join', (['top', '"""*"""'], {}), "(top, '*')\n", (4905, 4915), False, 'import os\n'), ((4942, 4965), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'n_jobs'}), '(n_jobs=n_jobs)\n', (4950, 4965), False, 'from joblib import Parallel, delayed\n'), ((5388, 5425), 'json.dump', 'json.dump', (['database_dict', 'f'], {'indent': '(4)'}), '(database_dict, f, indent=4)\n', (5397, 5425), False, 'import json\n'), ((309, 336), 'os.path.join', 'os.path.join', (['path', '"""*.dcm"""'], {}), "(path, '*.dcm')\n", (321, 336), False, 'import os\n'), ((5319, 5367), 'os.path.join', 'os.path.join', (['parent', 'f"""imgtools_{dataset}.json"""'], {}), "(parent, f'imgtools_{dataset}.json')\n", (5331, 5367), False, 'import os\n'), ((445, 469), 'pydicom.dcmread', 'dcmread', (['dcm'], {'force': '(True)'}), '(dcm, force=True)\n', (452, 469), False, 'from pydicom import dcmread\n'), ((4966, 4984), 'joblib.delayed', 'delayed', (['crawl_one'], {}), '(crawl_one)\n', (4973, 4984), False, 'from joblib import Parallel, delayed\n'), ((4985, 5010), 'os.path.join', 'os.path.join', (['top', 'folder'], {}), '(top, folder)\n', (4997, 5010), False, 'import os\n'), ((5026, 5039), 'tqdm.tqdm', 'tqdm', (['folders'], {}), '(folders)\n', (5030, 5039), False, 'from tqdm import tqdm\n')] |
import torch
from torch.nn import Module, MSELoss
class StructuredLoss(Module):
"""
Applies MSE loss to layers of TensorTree and sums results
"""
def __init__(self):
super().__init__()
self.loss = MSELoss(reduce=False)
def forward(self, a, b):
loss = self._apply_loss(a, b)
# print(a, b, loss)
return loss.sum()
def _apply_loss(self, a, b):
loss = self.loss(a.tensor, b.tensor).sum(1)
# Compute loss for children
if len(a.children) != len(b.children):
raise ValueError('Mismatching sizes of children: ' + str(len(a.children)) + ' and ' + str(len(b.children)))
for (cur, (a_child, b_child)) in enumerate(zip(a.children, b.children)):
if a_child is None:
continue
if b_child is None:
# Second children is missing - create fake one
rows, columns = a_child.tensor.size()
b_child = a_child.__class__(
torch.zeros(rows, columns),
[None] * columns
)
# child_loss = self._apply_loss(a_child.cmul(a.tensor[:, cur].view(a.rows(), 1)), b_child)
child_loss = self._apply_loss(a_child, b_child)
# Add to resulting loss
loss = loss + child_loss
return loss
| [
"torch.nn.MSELoss",
"torch.zeros"
] | [((232, 253), 'torch.nn.MSELoss', 'MSELoss', ([], {'reduce': '(False)'}), '(reduce=False)\n', (239, 253), False, 'from torch.nn import Module, MSELoss\n'), ((1017, 1043), 'torch.zeros', 'torch.zeros', (['rows', 'columns'], {}), '(rows, columns)\n', (1028, 1043), False, 'import torch\n')] |
from snippets import is_palindrome
def problem_4():
"Find the largest palindrome made from the product of two 3-digit numbers."
largest_product = 0
# With one factor between 100 and 1000...
for factor_one in range(100, 999 + 1):
# And with a second factor between 100 and 1000...
for factor_two in range(factor_one, 999 + 1):
# Calculate the product of the two factors
product = factor_one * factor_two
# If the product is a palindrome and is larger than the largest product...
if(is_palindrome(product) and product > largest_product):
# Set the largest product to the current product
largest_product = product
return largest_product
if __name__ == "__main__":
answer = problem_4()
print(answer)
| [
"snippets.is_palindrome"
] | [((564, 586), 'snippets.is_palindrome', 'is_palindrome', (['product'], {}), '(product)\n', (577, 586), False, 'from snippets import is_palindrome\n')] |
from labeling.lf import *
from labeling.apply import *
from labeling.preprocess import *
from labeling.continuous_scoring import *
from labeling.noisy_labels import *
from labeling.lf_set import *
from preprocessor import *
import numpy as np
pre_resources={"r0":1.0}
@preprocessor(resources=pre_resources)
def square(x,**kwargs):
return {"value":x*x*kwargs["r0"]}
cf_resources={"r1":4, "r2":8, "len1":4}
lf_resources={"r3":4, "len2":5}
@continuous_scorer(resources=cf_resources)
def score(x, **kwargs):
t1=np.exp(-1*np.linalg.norm(x['value']))
t2=(kwargs["r1"]+kwargs["r2"])/(kwargs["len1"]*kwargs["len1"])
t3=kwargs["r3"]/kwargs["len2"]
return t1*t2*t3
@labeling_function(pre=[square], resources=lf_resources, cont_scorer=score, label=0)
def lf1(x, **kwargs):
if np.linalg.norm(x['value']) < 1 and kwargs["r3"]==4:
if (kwargs['continuous_score']>0.01): # can use continuous score inside now
return 0
return -1
@labeling_function(pre=[square], label=1) # no continuous scorer specified
def lf2(x, **kwargs):
if np.linalg.norm(x['value']) < 1:
return 1
return -1
## creating a RuleSet object with desired Labeling functions ##
lfs = [lf1, lf2]
# rules = LFSet("myrules", lfs)
rules = LFSet("testrules")
rules.add_lf_list(lfs)
# rules.add_lf(lf1)
# rules.add_lf(lf2)
## Data ##
dataX = np.array([[0.7659027, 0.07041862, 0.67856597, 0.58097795],
[0.98964838, 0.29277118, 0.67217224, 0.69125625],
[0.25344225, 0.72530643, 0.52627362, 0.08560926]])
dataY = np.array([0, 1, 1])
## Creating NoisyLabels class ##
test_data_noisy_labels = NoisyLabels("testdata", dataX, dataY, rules)
## Getting Noisy Labels ##
L,S = test_data_noisy_labels.get_labels()
## Generating pickle file ##
test_data_noisy_labels.generate_pickle()
# Checking correctness ##
if (next(iter(rules.get_lfs())) == lf1):
Lc=np.array([[0, 1],[-1, -1],[ 0, 1]])
Sc=np.array([[0.26463369, -1.],[-1.,-1.],[ 0.32993693, -1.]])
else:
Lc=np.array([[1, 0],[-1, -1],[1, 0]])
Sc=np.array([[-1., 0.26463369],[-1.,-1.],[-1., 0.32993693]])
f=open("testdata_pickle","rb")
noisy_data = pickle.load(f)
if np.allclose(Lc, noisy_data["l"]) and np.allclose(Sc, noisy_data["s"]):
print("works fine")
else:
print("something went wrong") | [
"numpy.array",
"numpy.allclose",
"numpy.linalg.norm"
] | [((1396, 1565), 'numpy.array', 'np.array', (['[[0.7659027, 0.07041862, 0.67856597, 0.58097795], [0.98964838, 0.29277118, \n 0.67217224, 0.69125625], [0.25344225, 0.72530643, 0.52627362, 0.08560926]]'], {}), '([[0.7659027, 0.07041862, 0.67856597, 0.58097795], [0.98964838, \n 0.29277118, 0.67217224, 0.69125625], [0.25344225, 0.72530643, \n 0.52627362, 0.08560926]])\n', (1404, 1565), True, 'import numpy as np\n'), ((1565, 1584), 'numpy.array', 'np.array', (['[0, 1, 1]'], {}), '([0, 1, 1])\n', (1573, 1584), True, 'import numpy as np\n'), ((1905, 1941), 'numpy.array', 'np.array', (['[[0, 1], [-1, -1], [0, 1]]'], {}), '([[0, 1], [-1, -1], [0, 1]])\n', (1913, 1941), True, 'import numpy as np\n'), ((1950, 2014), 'numpy.array', 'np.array', (['[[0.26463369, -1.0], [-1.0, -1.0], [0.32993693, -1.0]]'], {}), '([[0.26463369, -1.0], [-1.0, -1.0], [0.32993693, -1.0]])\n', (1958, 2014), True, 'import numpy as np\n'), ((2022, 2058), 'numpy.array', 'np.array', (['[[1, 0], [-1, -1], [1, 0]]'], {}), '([[1, 0], [-1, -1], [1, 0]])\n', (2030, 2058), True, 'import numpy as np\n'), ((2066, 2130), 'numpy.array', 'np.array', (['[[-1.0, 0.26463369], [-1.0, -1.0], [-1.0, 0.32993693]]'], {}), '([[-1.0, 0.26463369], [-1.0, -1.0], [-1.0, 0.32993693]])\n', (2074, 2130), True, 'import numpy as np\n'), ((2190, 2222), 'numpy.allclose', 'np.allclose', (['Lc', "noisy_data['l']"], {}), "(Lc, noisy_data['l'])\n", (2201, 2222), True, 'import numpy as np\n'), ((2227, 2259), 'numpy.allclose', 'np.allclose', (['Sc', "noisy_data['s']"], {}), "(Sc, noisy_data['s'])\n", (2238, 2259), True, 'import numpy as np\n'), ((1109, 1135), 'numpy.linalg.norm', 'np.linalg.norm', (["x['value']"], {}), "(x['value'])\n", (1123, 1135), True, 'import numpy as np\n'), ((531, 557), 'numpy.linalg.norm', 'np.linalg.norm', (["x['value']"], {}), "(x['value'])\n", (545, 557), True, 'import numpy as np\n'), ((796, 822), 'numpy.linalg.norm', 'np.linalg.norm', (["x['value']"], {}), "(x['value'])\n", (810, 822), True, 'import numpy as np\n')] |
from connexion.resolver import RestyResolver
import connexion
from injector import Binder, CallableProvider
from flask_injector import FlaskInjector
from services.provider import RestaurantsProvider
def configure(binder: Binder) -> Binder:
binder.bind(
interface=RestaurantsProvider.RestaurantsProvider,
to=RestaurantsProvider.RestaurantsProvider(items = [{"Name":"Fgh"}])
);
if __name__ == '__main__':
app = connexion.App(__name__, port=2508, specification_dir='swagger/')
app.add_api('restaurants.yaml', resolver=RestyResolver('api'))
# app.run(ssl_context=('cert.pem','key.pem'))
FlaskInjector(app=app.app, modules=[configure])
app.run()
| [
"connexion.resolver.RestyResolver",
"connexion.App",
"flask_injector.FlaskInjector",
"services.provider.RestaurantsProvider.RestaurantsProvider"
] | [((440, 504), 'connexion.App', 'connexion.App', (['__name__'], {'port': '(2508)', 'specification_dir': '"""swagger/"""'}), "(__name__, port=2508, specification_dir='swagger/')\n", (453, 504), False, 'import connexion\n'), ((626, 673), 'flask_injector.FlaskInjector', 'FlaskInjector', ([], {'app': 'app.app', 'modules': '[configure]'}), '(app=app.app, modules=[configure])\n', (639, 673), False, 'from flask_injector import FlaskInjector\n'), ((328, 392), 'services.provider.RestaurantsProvider.RestaurantsProvider', 'RestaurantsProvider.RestaurantsProvider', ([], {'items': "[{'Name': 'Fgh'}]"}), "(items=[{'Name': 'Fgh'}])\n", (367, 392), False, 'from services.provider import RestaurantsProvider\n'), ((550, 570), 'connexion.resolver.RestyResolver', 'RestyResolver', (['"""api"""'], {}), "('api')\n", (563, 570), False, 'from connexion.resolver import RestyResolver\n')] |
# eventsourcing==7.2.4
from eventsourcing.exceptions import ConcurrencyError
from eventsourcing.application.sqlalchemy import SQLAlchemyApplication
import os
from eventsourcing.utils.random import encode_random_bytes
from eventsourcing.domain.model.aggregate import AggregateRoot
from eventsourcing.domain.model.decorators import attribute
class World(AggregateRoot):
def __init__(self, ruler=None, **kwargs):
super(World, self).__init__(**kwargs)
self._history = []
self._ruler = ruler
@property
def history(self):
return tuple(self._history)
@attribute
def ruler(self):
"""A mutable event-sourced attribute."""
def make_it_so(self, something):
self.__trigger_event__(World.SomethingHappened, what=something)
class SomethingHappened(AggregateRoot.Event):
def mutate(self, obj):
obj._history.append(self.what)
# Call library factory method.
world = World.__create__(ruler='gods')
assert world.ruler == 'gods'
# Execute commands.
world.make_it_so('dinosaurs')
world.make_it_so('trucks')
# Assign attribute.
world.ruler = 'money'
assert world.history == ('dinosaurs', 'trucks'), world.history
assert world.ruler == 'money'
# Generate cipher key (optional).
# Keep this safe.
cipher_key = encode_random_bytes(num_bytes=32)
# Configure environment variables.
# Optional cipher key (random bytes encoded with Base64).
os.environ['CIPHER_KEY'] = cipher_key
# SQLAlchemy-style database connection string.
os.environ['DB_URI'] = 'sqlite:///:memory:'
# Construct simple application (used here as a context manager).
with SQLAlchemyApplication(persist_event_type=World.Event) as app:
# Call library factory method.
world = World.__create__(ruler='gods')
# Execute commands.
world.make_it_so('dinosaurs')
world.make_it_so('trucks')
version = world.__version__ # note version at this stage
world.make_it_so('internet')
# Assign to event-sourced attribute.
world.ruler = 'money'
# View current state of aggregate.
assert world.ruler == 'money'
assert world.history[2] == 'internet'
assert world.history[1] == 'trucks'
assert world.history[0] == 'dinosaurs'
# Publish pending events (to persistence subscriber).
world.__save__()
# Retrieve aggregate (replay stored events).
copy = app.repository[world.id]
assert isinstance(copy, World)
# View retrieved state.
assert copy.ruler == 'money'
assert copy.history[2] == 'internet'
assert copy.history[1] == 'trucks'
assert copy.history[0] == 'dinosaurs'
# Verify retrieved state (cryptographically).
assert copy.__head__ == world.__head__
# Discard aggregate.
world.__discard__()
world.__save__()
# Discarded aggregate is not found.
assert world.id not in app.repository
try:
# Repository raises key error.
app.repository[world.id]
except KeyError:
pass
else:
raise Exception("Shouldn't get here")
# Get historical state (at version from above).
old = app.repository.get_entity(world.id, at=version)
assert old.history[-1] == 'trucks' # internet not happened
assert len(old.history) == 2
assert old.ruler == 'gods'
# Optimistic concurrency control (no branches).
old.make_it_so('future')
try:
old.__save__()
except ConcurrencyError:
pass
else:
raise Exception("Shouldn't get here")
# Check domain event data integrity (happens also during replay).
events = app.event_store.get_domain_events(world.id)
last_hash = ''
for event in events:
event.__check_hash__()
assert event.__previous_hash__ == last_hash
last_hash = event.__event_hash__
# Verify sequence of events (cryptographically).
assert last_hash == world.__head__
# Project application event notifications.
from eventsourcing.application.notificationlog import NotificationLogReader
reader = NotificationLogReader(app.notification_log)
notifications = reader.read()
notification_ids = [n['id'] for n in notifications]
assert notification_ids == [1, 2, 3, 4, 5, 6]
# Check records are encrypted (values not visible in database).
record_manager = app.event_store.record_manager
items = record_manager.get_items(world.id)
for item in items:
assert item.originator_id == world.id
assert 'dinosaurs' not in item.state
assert 'trucks' not in item.state
assert 'internet' not in item.state
| [
"eventsourcing.utils.random.encode_random_bytes",
"eventsourcing.application.notificationlog.NotificationLogReader",
"eventsourcing.application.sqlalchemy.SQLAlchemyApplication"
] | [((1300, 1333), 'eventsourcing.utils.random.encode_random_bytes', 'encode_random_bytes', ([], {'num_bytes': '(32)'}), '(num_bytes=32)\n', (1319, 1333), False, 'from eventsourcing.utils.random import encode_random_bytes\n'), ((1632, 1685), 'eventsourcing.application.sqlalchemy.SQLAlchemyApplication', 'SQLAlchemyApplication', ([], {'persist_event_type': 'World.Event'}), '(persist_event_type=World.Event)\n', (1653, 1685), False, 'from eventsourcing.application.sqlalchemy import SQLAlchemyApplication\n'), ((4011, 4054), 'eventsourcing.application.notificationlog.NotificationLogReader', 'NotificationLogReader', (['app.notification_log'], {}), '(app.notification_log)\n', (4032, 4054), False, 'from eventsourcing.application.notificationlog import NotificationLogReader\n')] |
from django.contrib import admin
from API.models import News
# Register your models here.
admin.site.register(News) | [
"django.contrib.admin.site.register"
] | [((91, 116), 'django.contrib.admin.site.register', 'admin.site.register', (['News'], {}), '(News)\n', (110, 116), False, 'from django.contrib import admin\n')] |
# Generated by Django 4.0.2 on 2022-02-09 18:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('utils', '0023_constellation_neighbors'),
]
operations = [
migrations.AlterField(
model_name='constellation',
name='other_map',
field=models.ImageField(blank=True, null=True, upload_to='constellation_maps', verbose_name='Other Map'),
),
]
| [
"django.db.models.ImageField"
] | [((349, 451), 'django.db.models.ImageField', 'models.ImageField', ([], {'blank': '(True)', 'null': '(True)', 'upload_to': '"""constellation_maps"""', 'verbose_name': '"""Other Map"""'}), "(blank=True, null=True, upload_to='constellation_maps',\n verbose_name='Other Map')\n", (366, 451), False, 'from django.db import migrations, models\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.