code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
import os
import tempfile
from pathlib import Path
from typing import List
def get_list_of_all_files_in_dir(directory: str) -> List[str]:
"""
Returns a list containing all the file paths(absolute path) in a directory.
The list is sorted.
:return: List of absolute path of all files in a directory.
:rtype: List[str]
"""
return sorted([(directory + filename) for filename in os.listdir(directory)])
def does_path_exists(path: str) -> bool:
"""
If a directory is supplied then check if it exists.
If a file is supplied then check if it exists.
Directory ends with "/" on posix or "\" in windows and files do not.
If directory/file exists returns True else returns False
:return: True if dir or file exists else False.
:rtype: bool
"""
if path.endswith("/") or path.endswith("\\"):
# it's directory
return os.path.isdir(path)
else:
# it's file
return os.path.isfile(path)
def create_and_return_temporary_directory() -> str:
"""
create a temporary directory where we can store the video, frames and the
collage.
:return: Absolute path of the empty directory.
:rtype: str
"""
path = os.path.join(tempfile.mkdtemp(), ("temp_storage_dir" + os.path.sep))
Path(path).mkdir(parents=True, exist_ok=True)
return path
| [
"os.listdir",
"pathlib.Path",
"os.path.isfile",
"os.path.isdir",
"tempfile.mkdtemp"
] | [((893, 912), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (906, 912), False, 'import os\n'), ((959, 979), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (973, 979), False, 'import os\n'), ((1234, 1252), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1250, 1252), False, 'import tempfile\n'), ((1294, 1304), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (1298, 1304), False, 'from pathlib import Path\n'), ((406, 427), 'os.listdir', 'os.listdir', (['directory'], {}), '(directory)\n', (416, 427), False, 'import os\n')] |
from flaskcbv.url import Url, make_urls
from views import mainView, mainHandlersView
namespases = make_urls(
Url('', mainView(), name="main"),
Url('handlers/', mainHandlersView(), name="handlers"),
)
| [
"views.mainView",
"views.mainHandlersView"
] | [((122, 132), 'views.mainView', 'mainView', ([], {}), '()\n', (130, 132), False, 'from views import mainView, mainHandlersView\n'), ((169, 187), 'views.mainHandlersView', 'mainHandlersView', ([], {}), '()\n', (185, 187), False, 'from views import mainView, mainHandlersView\n')] |
from behave import when, then
from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey
@when('the internal user navigates to the home page')
def internal_user_views_home(_):
home.go_to()
@then('the user can view and click on a link to the surveys list page')
def should_be_able_to_click_survey_link(_):
home.click_surveys_link()
assert survey.get_page_title() == "Surveys | Survey Data Collection"
@then('the user can view and click on a link to the QBS survey page')
def should_be_able_to_click_qbs_survey_link(_):
survey.click_qbs_survey_link()
assert collection_exercise.get_page_title() == 'QBS | Surveys | Survey Data Collection'
@then('the user can view and click on a link to the 1803 QBS collection exercise page')
def should_be_able_to_click_qbs_collection_exercise_link(_):
collection_exercise.click_ce_link('1803')
assert collection_exercise_details.get_page_title() == '139 QBS 1803 | Surveys | Survey Data Collection'
| [
"acceptance_tests.features.pages.home.go_to",
"acceptance_tests.features.pages.collection_exercise_details.get_page_title",
"acceptance_tests.features.pages.home.click_surveys_link",
"acceptance_tests.features.pages.survey.click_qbs_survey_link",
"behave.when",
"acceptance_tests.features.pages.collection_exercise.click_ce_link",
"acceptance_tests.features.pages.collection_exercise.get_page_title",
"behave.then",
"acceptance_tests.features.pages.survey.get_page_title"
] | [((141, 193), 'behave.when', 'when', (['"""the internal user navigates to the home page"""'], {}), "('the internal user navigates to the home page')\n", (145, 193), False, 'from behave import when, then\n'), ((247, 317), 'behave.then', 'then', (['"""the user can view and click on a link to the surveys list page"""'], {}), "('the user can view and click on a link to the surveys list page')\n", (251, 317), False, 'from behave import when, then\n'), ((468, 536), 'behave.then', 'then', (['"""the user can view and click on a link to the QBS survey page"""'], {}), "('the user can view and click on a link to the QBS survey page')\n", (472, 536), False, 'from behave import when, then\n'), ((715, 811), 'behave.then', 'then', (['"""the user can view and click on a link to the 1803 QBS collection exercise page"""'], {}), "(\n 'the user can view and click on a link to the 1803 QBS collection exercise page'\n )\n", (719, 811), False, 'from behave import when, then\n'), ((231, 243), 'acceptance_tests.features.pages.home.go_to', 'home.go_to', ([], {}), '()\n', (241, 243), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n'), ((366, 391), 'acceptance_tests.features.pages.home.click_surveys_link', 'home.click_surveys_link', ([], {}), '()\n', (389, 391), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n'), ((589, 619), 'acceptance_tests.features.pages.survey.click_qbs_survey_link', 'survey.click_qbs_survey_link', ([], {}), '()\n', (617, 619), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n'), ((867, 908), 'acceptance_tests.features.pages.collection_exercise.click_ce_link', 'collection_exercise.click_ce_link', (['"""1803"""'], {}), "('1803')\n", (900, 908), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n'), ((403, 426), 'acceptance_tests.features.pages.survey.get_page_title', 'survey.get_page_title', ([], {}), '()\n', (424, 426), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n'), ((631, 667), 'acceptance_tests.features.pages.collection_exercise.get_page_title', 'collection_exercise.get_page_title', ([], {}), '()\n', (665, 667), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n'), ((920, 964), 'acceptance_tests.features.pages.collection_exercise_details.get_page_title', 'collection_exercise_details.get_page_title', ([], {}), '()\n', (962, 964), False, 'from acceptance_tests.features.pages import collection_exercise, collection_exercise_details, home, survey\n')] |
from datetime import timedelta
from functools import update_wrapper
from flask import request, make_response, current_app
def crossdomain(
origin=None,
methods=None,
headers=None,
max_age=21600,
attach_to_all=True,
automatic_options=True,
):
"""
2.1 spec
Servers should send the Access-Control-Allow-Origin header with the value * in
response to information requests. This header is required in order to allow the JSON
responses to be used by Web applications hosted on different servers.
"""
if methods is not None:
methods = ", ".join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, list):
headers = ", ".join(x.upper() for x in headers)
if not isinstance(origin, list):
origin = ", ".join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers["allow"]
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == "OPTIONS":
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != "OPTIONS":
return resp
h = resp.headers
h["Access-Control-Allow-Origin"] = origin
h["Access-Control-Allow-Methods"] = get_methods()
h["Access-Control-Max-Age"] = str(max_age)
h["Access-Control-Allow-Headers"] = "cache-control, pragma" # headers
# if headers is not None:
# h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
| [
"flask.current_app.make_default_options_response",
"functools.update_wrapper"
] | [((1014, 1057), 'flask.current_app.make_default_options_response', 'current_app.make_default_options_response', ([], {}), '()\n', (1055, 1057), False, 'from flask import request, make_response, current_app\n'), ((1944, 1979), 'functools.update_wrapper', 'update_wrapper', (['wrapped_function', 'f'], {}), '(wrapped_function, f)\n', (1958, 1979), False, 'from functools import update_wrapper\n'), ((1263, 1306), 'flask.current_app.make_default_options_response', 'current_app.make_default_options_response', ([], {}), '()\n', (1304, 1306), False, 'from flask import request, make_response, current_app\n')] |
import threading
import tkinter
from tkinter import scrolledtext
from PIL import ImageTk, Image
import subprocess as s
import requests
from configurations import theme
class GUI:
def __init__(self):
self.kill_code = None
self.name = theme.name
self.ascii_art = theme.art
self.gui_done = False
self.bg = Image.open(theme.bg_image)
self.conn_img = Image.open(
"assistant/configurations/images/connected.png")
self.disconn_img = Image.open(
"assistant/configurations/images/disconnected.png")
self.conn_state = False
gui_thread = threading.Thread(target=self.gui_loop)
connection_thread = threading.Thread(target=self.check_connection)
connection_thread.setDaemon(True)
self.check_conn_event = threading.Event()
gui_thread.start()
connection_thread.start()
def gui_loop(self):
self.win = tkinter.Tk()
self.win.title(f"Assistant {self.name}")
self.win.resizable(0, 0)
self.bgimage = ImageTk.PhotoImage(self.bg)
self.conn_img = ImageTk.PhotoImage(self.conn_img)
self.disconn_img = ImageTk.PhotoImage(self.disconn_img)
self.win.configure(bg=theme.label_bg_colour, relief="solid")
self.background = tkinter.Label(self.win, image=self.bgimage)
self.background.place(relx=0.5, rely=0.5, anchor="center")
self.title = tkinter.Label(
self.win, text=f"{self.name}", bg=theme.label_bg_colour, fg=theme.fg_colour)
self.title.config(font=(theme.title_font, 40, 'bold', 'underline'))
self.title.pack(padx=20, pady=0)
self.connection = tkinter.Label(self.win, image=self.disconn_img)
self.connection.place(x=365, y=8)
self.display_area = scrolledtext.ScrolledText(
self.win, width=52, height=35, bg=theme.scrolltext_bg_colour, fg=theme.fg_colour, bd="4", relief="solid")
self.display_area.config(font=("theme.base_font", 10))
self.display_area.pack(padx=20, pady=0)
for line in self.ascii_art:
self.display_area.insert('end', line)
self.display_area.config(state='disabled')
self.footer = tkinter.Label(
self.win, text="speak to give command...", bg=theme.label_bg_colour, fg=theme.fg_colour)
self.footer.place(x=25, y=645)
self.footer.config(font=("theme.base_font", 12, 'italic'))
self.clear_button = tkinter.Button(
self.win, text="Clear Screen", command=self.clear, relief="solid", bg=theme.button_colour, fg=theme.fg_colour)
self.clear_button.config(font=("theme.base_font", 10))
self.clear_button.pack(side='right', padx=20, pady=10)
self.gui_done = True
self.check_conn_event.set()
self.win.protocol("WM_DELETE_WINDOW", self.stop)
self.win.mainloop()
def clear(self):
self.display_area.config(state='normal')
self.display_area.delete('1.0', 'end')
self.display_area.config(state='disabled')
def stop(self):
if self.kill_code != None:
s.Popen(f'taskkill /F /PID {self.kill_code}', shell=True)
self.win.destroy()
exit(0)
def check_connection(self):
self.check_conn_event.wait()
while True:
try:
requests.get("https://www.google.com")
self.connection.configure(image=self.conn_img)
except Exception:
self.connection.configure(image=self.disconn_img)
def display(self, message):
if type(message) == int:
self.kill_code = message
message = "Ready to GO!\n"
elif "->>" not in message and "listening..." not in message:
message = f"{self.name} : {message}\n"
self.display_area.config(state='normal')
self.display_area.insert('end', message)
self.display_area.yview('end')
self.display_area.config(state='disabled')
gui = GUI()
| [
"PIL.Image.open",
"subprocess.Popen",
"tkinter.Button",
"requests.get",
"threading.Event",
"tkinter.scrolledtext.ScrolledText",
"tkinter.Tk",
"tkinter.Label",
"threading.Thread",
"PIL.ImageTk.PhotoImage"
] | [((354, 380), 'PIL.Image.open', 'Image.open', (['theme.bg_image'], {}), '(theme.bg_image)\n', (364, 380), False, 'from PIL import ImageTk, Image\n'), ((405, 464), 'PIL.Image.open', 'Image.open', (['"""assistant/configurations/images/connected.png"""'], {}), "('assistant/configurations/images/connected.png')\n", (415, 464), False, 'from PIL import ImageTk, Image\n'), ((505, 567), 'PIL.Image.open', 'Image.open', (['"""assistant/configurations/images/disconnected.png"""'], {}), "('assistant/configurations/images/disconnected.png')\n", (515, 567), False, 'from PIL import ImageTk, Image\n'), ((636, 674), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.gui_loop'}), '(target=self.gui_loop)\n', (652, 674), False, 'import threading\n'), ((703, 749), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.check_connection'}), '(target=self.check_connection)\n', (719, 749), False, 'import threading\n'), ((824, 841), 'threading.Event', 'threading.Event', ([], {}), '()\n', (839, 841), False, 'import threading\n'), ((948, 960), 'tkinter.Tk', 'tkinter.Tk', ([], {}), '()\n', (958, 960), False, 'import tkinter\n'), ((1066, 1093), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['self.bg'], {}), '(self.bg)\n', (1084, 1093), False, 'from PIL import ImageTk, Image\n'), ((1118, 1151), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['self.conn_img'], {}), '(self.conn_img)\n', (1136, 1151), False, 'from PIL import ImageTk, Image\n'), ((1179, 1215), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['self.disconn_img'], {}), '(self.disconn_img)\n', (1197, 1215), False, 'from PIL import ImageTk, Image\n'), ((1312, 1355), 'tkinter.Label', 'tkinter.Label', (['self.win'], {'image': 'self.bgimage'}), '(self.win, image=self.bgimage)\n', (1325, 1355), False, 'import tkinter\n'), ((1445, 1540), 'tkinter.Label', 'tkinter.Label', (['self.win'], {'text': 'f"""{self.name}"""', 'bg': 'theme.label_bg_colour', 'fg': 'theme.fg_colour'}), "(self.win, text=f'{self.name}', bg=theme.label_bg_colour, fg=\n theme.fg_colour)\n", (1458, 1540), False, 'import tkinter\n'), ((1693, 1740), 'tkinter.Label', 'tkinter.Label', (['self.win'], {'image': 'self.disconn_img'}), '(self.win, image=self.disconn_img)\n', (1706, 1740), False, 'import tkinter\n'), ((1812, 1948), 'tkinter.scrolledtext.ScrolledText', 'scrolledtext.ScrolledText', (['self.win'], {'width': '(52)', 'height': '(35)', 'bg': 'theme.scrolltext_bg_colour', 'fg': 'theme.fg_colour', 'bd': '"""4"""', 'relief': '"""solid"""'}), "(self.win, width=52, height=35, bg=theme.\n scrolltext_bg_colour, fg=theme.fg_colour, bd='4', relief='solid')\n", (1837, 1948), False, 'from tkinter import scrolledtext\n'), ((2230, 2337), 'tkinter.Label', 'tkinter.Label', (['self.win'], {'text': '"""speak to give command..."""', 'bg': 'theme.label_bg_colour', 'fg': 'theme.fg_colour'}), "(self.win, text='speak to give command...', bg=theme.\n label_bg_colour, fg=theme.fg_colour)\n", (2243, 2337), False, 'import tkinter\n'), ((2481, 2611), 'tkinter.Button', 'tkinter.Button', (['self.win'], {'text': '"""Clear Screen"""', 'command': 'self.clear', 'relief': '"""solid"""', 'bg': 'theme.button_colour', 'fg': 'theme.fg_colour'}), "(self.win, text='Clear Screen', command=self.clear, relief=\n 'solid', bg=theme.button_colour, fg=theme.fg_colour)\n", (2495, 2611), False, 'import tkinter\n'), ((3136, 3193), 'subprocess.Popen', 's.Popen', (['f"""taskkill /F /PID {self.kill_code}"""'], {'shell': '(True)'}), "(f'taskkill /F /PID {self.kill_code}', shell=True)\n", (3143, 3193), True, 'import subprocess as s\n'), ((3360, 3398), 'requests.get', 'requests.get', (['"""https://www.google.com"""'], {}), "('https://www.google.com')\n", (3372, 3398), False, 'import requests\n')] |
"""
phone_number wordlists
"""
import os
from .. import parse_assoc
NAME = 'phone_number'
raw_association_multipliers_1_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'raw',
'association_multipliers',
'phone_number.txt'
)
def get_wordlists(line_printer_cb):
return {**{'main': []}, **parse_assoc.read_assoc_data([raw_association_multipliers_1_path], line_printer_cb)} | [
"os.path.realpath"
] | [((165, 191), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (181, 191), False, 'import os\n')] |
# Copyright (C) 2011 <NAME>
# Copyright (C) 2010 <NAME>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from txgossip.detector import FailureDetector
class PeerState(object):
def __init__(self, clock, participant, name=None, PHI=8):
self.clock = clock
self.participant = participant
self.max_version_seen = 0
self.attrs = {}
self.detector = FailureDetector()
self.alive = False
self.heart_beat_version = 0
self.name = name
self.PHI = PHI
def set_name(self, name):
self.name = name
def update_with_delta(self, k, v, n):
"""."""
# It's possibly to get the same updates more than once if
# we're gossiping with multiple peers at once ignore them
if n > self.max_version_seen:
self.max_version_seen = n
self.set_key(k,v,n)
if k == '__heartbeat__':
self.detector.add(self.clock.seconds())
def update_local(self, k, v):
# This is used when the peerState is owned by this peer
self.max_version_seen += 1
self.set_key(k, v, self.max_version_seen)
def __iter__(self):
return iter(self.attrs)
def __len__(self):
return len(self.attrs)
def __contains__(self, key):
return key in self.attrs
def __setitem__(self, key, value):
self.update_local(key, value)
def set(self, key, value):
self.update_local(key, value)
def __getitem__(self, key):
return self.attrs[key][0]
def get(self, key, default=None):
if key in self.attrs:
return self.attrs[key][0]
return default
def has_key(self, key):
return key in self.attrs
def keys(self):
return self.attrs.keys()
def items(self):
for k, (v, n) in self.attrs.items():
yield k, v
def set_key(self, k, v, n):
self.attrs[k] = (v, n)
self.participant.value_changed(self, str(k), v)
def beat_that_heart(self):
self.heart_beat_version += 1
self.update_local('__heartbeat__', self.heart_beat_version);
def deltas_after_version(self, lowest_version):
"""
Return sorted by version.
"""
deltas = []
for key, (value, version) in self.attrs.items():
if version > lowest_version:
deltas.append((key, value, version))
deltas.sort(key=lambda kvv: kvv[2])
return deltas
def check_suspected(self):
phi = self.detector.phi(self.clock.seconds())
if phi > self.PHI or phi == 0:
self.mark_dead()
return True
else:
self.mark_alive()
return False
def mark_alive(self):
alive, self.alive = self.alive, True
if not alive:
self.participant.peer_alive(self)
def mark_dead(self):
if self.alive:
self.alive = False
self.participant.peer_dead(self)
| [
"txgossip.detector.FailureDetector"
] | [((1402, 1419), 'txgossip.detector.FailureDetector', 'FailureDetector', ([], {}), '()\n', (1417, 1419), False, 'from txgossip.detector import FailureDetector\n')] |
import unittest
from collections import Hashable
from pysome import default_name, SameState, Same, SameOutsideExpect
class DefaultNameTest(unittest.TestCase):
def test_basics(self):
self.assertTrue(isinstance(default_name, Hashable))
self.assertTrue(len({default_name, default_name}) == 1)
class SameStateTest(unittest.TestCase):
def test_basics(self):
self.assertTrue(set(SameState._state.keys()) == set(SameState._allow.keys()))
for key in SameState._allow.keys():
self.assertTrue(SameState._allow[key] is False)
self.assertTrue(SameState._state[key] == {})
def test_same_usage(self):
same = Same()
with self.assertRaises(SameOutsideExpect):
_ = same == 12
SameState._start()
self.assertTrue(same == 12)
self.assertTrue(same == 12)
self.assertFalse(same == 13)
SameState._end()
with self.assertRaises(SameOutsideExpect):
_ = same == 12
| [
"pysome.SameState._allow.keys",
"pysome.SameState._start",
"pysome.SameState._state.keys",
"pysome.SameState._end",
"pysome.Same"
] | [((488, 511), 'pysome.SameState._allow.keys', 'SameState._allow.keys', ([], {}), '()\n', (509, 511), False, 'from pysome import default_name, SameState, Same, SameOutsideExpect\n'), ((677, 683), 'pysome.Same', 'Same', ([], {}), '()\n', (681, 683), False, 'from pysome import default_name, SameState, Same, SameOutsideExpect\n'), ((771, 789), 'pysome.SameState._start', 'SameState._start', ([], {}), '()\n', (787, 789), False, 'from pysome import default_name, SameState, Same, SameOutsideExpect\n'), ((907, 923), 'pysome.SameState._end', 'SameState._end', ([], {}), '()\n', (921, 923), False, 'from pysome import default_name, SameState, Same, SameOutsideExpect\n'), ((410, 433), 'pysome.SameState._state.keys', 'SameState._state.keys', ([], {}), '()\n', (431, 433), False, 'from pysome import default_name, SameState, Same, SameOutsideExpect\n'), ((442, 465), 'pysome.SameState._allow.keys', 'SameState._allow.keys', ([], {}), '()\n', (463, 465), False, 'from pysome import default_name, SameState, Same, SameOutsideExpect\n')] |
import numpy as np
# Physical constants in cgs units
c = 3e10
G = 7e-8
# Calculate period given semimajor axis and total mass
def find_period(a, M, use_earth_units=False):
if use_earth_units:
return np.sqrt(a**3 / M)
else:
return np.sqrt(4 * np.pi**2 * a**3 / (G * M))
# Calculate total mass given semimajor axis and period
def find_mass(a, P, use_earth_units=False):
if use_earth_units:
return a**3 / P**2
else:
return 4 * np.pi**2 * a**3 / (G * P**2)
# Calculate semimajor axis given period and total mass
def find_semimajor_axis(P, M, use_earth_units=False):
if use_earth_units:
return np.power(M * P**2, 1/3)
else:
return np.power(G * M * P**2 / (4 * np.pi**2), 1/3)
print('Calculating the orbital radius of our Earth...')
orbital_radius_cm = find_semimajor_axis(60*60*24*365.25, 2e33)
orbital_radius_AU = find_semimajor_axis(1, 1, use_earth_units=True)
print('%s AU is equal to %2.2e cm.' % (orbital_radius_AU, orbital_radius_cm))
| [
"numpy.sqrt",
"numpy.power"
] | [((213, 232), 'numpy.sqrt', 'np.sqrt', (['(a ** 3 / M)'], {}), '(a ** 3 / M)\n', (220, 232), True, 'import numpy as np\n'), ((256, 298), 'numpy.sqrt', 'np.sqrt', (['(4 * np.pi ** 2 * a ** 3 / (G * M))'], {}), '(4 * np.pi ** 2 * a ** 3 / (G * M))\n', (263, 298), True, 'import numpy as np\n'), ((653, 680), 'numpy.power', 'np.power', (['(M * P ** 2)', '(1 / 3)'], {}), '(M * P ** 2, 1 / 3)\n', (661, 680), True, 'import numpy as np\n'), ((702, 752), 'numpy.power', 'np.power', (['(G * M * P ** 2 / (4 * np.pi ** 2))', '(1 / 3)'], {}), '(G * M * P ** 2 / (4 * np.pi ** 2), 1 / 3)\n', (710, 752), True, 'import numpy as np\n')] |
# Generated by Django 3.1.7 on 2021-04-05 13:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Pizza',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('regular', 'Regular pizza'), ('square', 'Square pizza')], max_length=13)),
],
),
migrations.CreateModel(
name='PizzaSize',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='PizzaTopping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pizza', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='core.pizza')),
],
),
migrations.CreateModel(
name='Topping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=60)),
('on_top_of', models.ManyToManyField(through='core.PizzaTopping', to='core.Pizza')),
],
),
migrations.AddField(
model_name='pizzatopping',
name='topping',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='core.topping'),
),
migrations.AddField(
model_name='pizza',
name='size',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.pizzasize'),
),
migrations.AddField(
model_name='pizza',
name='toppings',
field=models.ManyToManyField(through='core.PizzaTopping', to='core.Topping'),
),
]
| [
"django.db.models.AutoField",
"django.db.models.ManyToManyField",
"django.db.models.CharField",
"django.db.models.ForeignKey"
] | [((1693, 1779), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""core.topping"""'}), "(on_delete=django.db.models.deletion.PROTECT, to=\n 'core.topping')\n", (1710, 1779), False, 'from django.db import migrations, models\n'), ((1891, 1979), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""core.pizzasize"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'core.pizzasize')\n", (1908, 1979), False, 'from django.db import migrations, models\n'), ((2095, 2165), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'through': '"""core.PizzaTopping"""', 'to': '"""core.Topping"""'}), "(through='core.PizzaTopping', to='core.Topping')\n", (2117, 2165), False, 'from django.db import migrations, models\n'), ((334, 427), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (350, 427), False, 'from django.db import migrations, models\n'), ((451, 554), 'django.db.models.CharField', 'models.CharField', ([], {'choices': "[('regular', 'Regular pizza'), ('square', 'Square pizza')]", 'max_length': '(13)'}), "(choices=[('regular', 'Regular pizza'), ('square',\n 'Square pizza')], max_length=13)\n", (467, 554), False, 'from django.db import migrations, models\n'), ((685, 778), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (701, 778), False, 'from django.db import migrations, models\n'), ((802, 833), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (818, 833), False, 'from django.db import migrations, models\n'), ((971, 1064), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (987, 1064), False, 'from django.db import migrations, models\n'), ((1089, 1168), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.PROTECT', 'to': '"""core.pizza"""'}), "(on_delete=django.db.models.deletion.PROTECT, to='core.pizza')\n", (1106, 1168), False, 'from django.db import migrations, models\n'), ((1301, 1394), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1317, 1394), False, 'from django.db import migrations, models\n'), ((1418, 1449), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (1434, 1449), False, 'from django.db import migrations, models\n'), ((1482, 1550), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'through': '"""core.PizzaTopping"""', 'to': '"""core.Pizza"""'}), "(through='core.PizzaTopping', to='core.Pizza')\n", (1504, 1550), False, 'from django.db import migrations, models\n')] |
import json
retrieve_success = json.loads('''{
"id": "ch_1234",
"object": "charge",
"created": 1442723274,
"livemode": false,
"paid": true,
"status": "succeeded",
"amount": 100,
"currency": "aud",
"refunded": false,
"source": {
"id": "card_1234",
"object": "card",
"last4": "4242",
"brand": "Visa",
"funding": "credit",
"exp_month": 8,
"exp_year": 2016,
"country": "US",
"name": null,
"address_line1": null,
"address_line2": null,
"address_city": null,
"address_state": null,
"address_zip": null,
"address_country": null,
"cvc_check": null,
"address_line1_check": null,
"address_zip_check": null,
"tokenization_method": null,
"dynamic_last4": null,
"metadata": {
},
"customer": null
},
"captured": true,
"balance_transaction": "txn_234",
"failure_message": null,
"failure_code": null,
"amount_refunded": 0,
"customer": null,
"invoice": null,
"description": "Some refund",
"dispute": null,
"metadata": {},
"statement_descriptor": null,
"fraud_details": {},
"receipt_email": null,
"receipt_number": null,
"shipping": null,
"destination": null,
"application_fee": null,
"refunds": {
"object": "list",
"total_count": 0,
"has_more": false,
"url": "/v1/charges/ch_1234/refunds",
"data": [
]
}
}''')
| [
"json.loads"
] | [((33, 1381), 'json.loads', 'json.loads', (['"""{\n "id": "ch_1234",\n "object": "charge",\n "created": 1442723274,\n "livemode": false,\n "paid": true,\n "status": "succeeded",\n "amount": 100,\n "currency": "aud",\n "refunded": false,\n "source": {\n "id": "card_1234",\n "object": "card",\n "last4": "4242",\n "brand": "Visa",\n "funding": "credit",\n "exp_month": 8,\n "exp_year": 2016,\n "country": "US",\n "name": null,\n "address_line1": null,\n "address_line2": null,\n "address_city": null,\n "address_state": null,\n "address_zip": null,\n "address_country": null,\n "cvc_check": null,\n "address_line1_check": null,\n "address_zip_check": null,\n "tokenization_method": null,\n "dynamic_last4": null,\n "metadata": {\n },\n "customer": null\n },\n "captured": true,\n "balance_transaction": "txn_234",\n "failure_message": null,\n "failure_code": null,\n "amount_refunded": 0,\n "customer": null,\n "invoice": null,\n "description": "Some refund",\n "dispute": null,\n "metadata": {},\n "statement_descriptor": null,\n "fraud_details": {},\n "receipt_email": null,\n "receipt_number": null,\n "shipping": null,\n "destination": null,\n "application_fee": null,\n "refunds": {\n "object": "list",\n "total_count": 0,\n "has_more": false,\n "url": "/v1/charges/ch_1234/refunds",\n "data": [\n\n ]\n }\n}"""'], {}), '(\n """{\n "id": "ch_1234",\n "object": "charge",\n "created": 1442723274,\n "livemode": false,\n "paid": true,\n "status": "succeeded",\n "amount": 100,\n "currency": "aud",\n "refunded": false,\n "source": {\n "id": "card_1234",\n "object": "card",\n "last4": "4242",\n "brand": "Visa",\n "funding": "credit",\n "exp_month": 8,\n "exp_year": 2016,\n "country": "US",\n "name": null,\n "address_line1": null,\n "address_line2": null,\n "address_city": null,\n "address_state": null,\n "address_zip": null,\n "address_country": null,\n "cvc_check": null,\n "address_line1_check": null,\n "address_zip_check": null,\n "tokenization_method": null,\n "dynamic_last4": null,\n "metadata": {\n },\n "customer": null\n },\n "captured": true,\n "balance_transaction": "txn_234",\n "failure_message": null,\n "failure_code": null,\n "amount_refunded": 0,\n "customer": null,\n "invoice": null,\n "description": "Some refund",\n "dispute": null,\n "metadata": {},\n "statement_descriptor": null,\n "fraud_details": {},\n "receipt_email": null,\n "receipt_number": null,\n "shipping": null,\n "destination": null,\n "application_fee": null,\n "refunds": {\n "object": "list",\n "total_count": 0,\n "has_more": false,\n "url": "/v1/charges/ch_1234/refunds",\n "data": [\n\n ]\n }\n}"""\n )\n', (43, 1381), False, 'import json\n')] |
# Simple forward differentiation example: show how a perturbation
# of a single scene parameter changes the rendered image.
import enoki as ek
import mitsuba
mitsuba.set_variant('gpu_autodiff_rgb')
from mitsuba.core import Thread, Float
from mitsuba.core.xml import load_file
from mitsuba.python.util import traverse
from mitsuba.python.autodiff import render, write_bitmap
# Load the Cornell Box
Thread.thread().file_resolver().append('cbox')
scene = load_file('cbox/cbox.xml')
# Find differentiable scene parameters
params = traverse(scene)
# Keep track of derivatives with respect to one parameter
param_0 = params['red.reflectance.value']
ek.set_requires_gradient(param_0)
# Differentiable simulation
image = render(scene, spp=32)
# Assign the gradient [1, 1, 1] to the 'red.reflectance.value' input
ek.set_gradient(param_0, [1, 1, 1], backward=False)
# Forward-propagate previously assigned gradients
Float.forward()
# The gradients have been propagated to the output image
image_grad = ek.gradient(image)
# .. write them to a PNG file
crop_size = scene.sensors()[0].film().crop_size()
fname = 'out.png'
write_bitmap(fname, image_grad, crop_size)
print('Wrote forward differentiation image to: {}'.format(fname))
| [
"mitsuba.core.Float.forward",
"mitsuba.python.util.traverse",
"mitsuba.python.autodiff.write_bitmap",
"enoki.set_gradient",
"mitsuba.set_variant",
"enoki.gradient",
"mitsuba.core.xml.load_file",
"mitsuba.python.autodiff.render",
"enoki.set_requires_gradient",
"mitsuba.core.Thread.thread"
] | [((159, 198), 'mitsuba.set_variant', 'mitsuba.set_variant', (['"""gpu_autodiff_rgb"""'], {}), "('gpu_autodiff_rgb')\n", (178, 198), False, 'import mitsuba\n'), ((455, 481), 'mitsuba.core.xml.load_file', 'load_file', (['"""cbox/cbox.xml"""'], {}), "('cbox/cbox.xml')\n", (464, 481), False, 'from mitsuba.core.xml import load_file\n'), ((531, 546), 'mitsuba.python.util.traverse', 'traverse', (['scene'], {}), '(scene)\n', (539, 546), False, 'from mitsuba.python.util import traverse\n'), ((648, 681), 'enoki.set_requires_gradient', 'ek.set_requires_gradient', (['param_0'], {}), '(param_0)\n', (672, 681), True, 'import enoki as ek\n'), ((719, 740), 'mitsuba.python.autodiff.render', 'render', (['scene'], {'spp': '(32)'}), '(scene, spp=32)\n', (725, 740), False, 'from mitsuba.python.autodiff import render, write_bitmap\n'), ((811, 862), 'enoki.set_gradient', 'ek.set_gradient', (['param_0', '[1, 1, 1]'], {'backward': '(False)'}), '(param_0, [1, 1, 1], backward=False)\n', (826, 862), True, 'import enoki as ek\n'), ((914, 929), 'mitsuba.core.Float.forward', 'Float.forward', ([], {}), '()\n', (927, 929), False, 'from mitsuba.core import Thread, Float\n'), ((1001, 1019), 'enoki.gradient', 'ek.gradient', (['image'], {}), '(image)\n', (1012, 1019), True, 'import enoki as ek\n'), ((1119, 1161), 'mitsuba.python.autodiff.write_bitmap', 'write_bitmap', (['fname', 'image_grad', 'crop_size'], {}), '(fname, image_grad, crop_size)\n', (1131, 1161), False, 'from mitsuba.python.autodiff import render, write_bitmap\n'), ((400, 415), 'mitsuba.core.Thread.thread', 'Thread.thread', ([], {}), '()\n', (413, 415), False, 'from mitsuba.core import Thread, Float\n')] |
import os
import torch
from halite_rl.utils.rewards import calc_episode_reward
class HaliteStateActionHDF5Dataset(torch.utils.data.Dataset):
def __init__(self, hdf5_file):
self._hdf5_file = hdf5_file
self._example_paths = []
for episode_name, episode in self._hdf5_file.items():
for step_id in episode.keys():
self._example_paths.append(f"{episode_name}/{step_id}")
self._example_paths.sort()
def __len__(self):
return len(self._example_paths)
def __getitem__(self, idx):
example_path = self._example_paths[idx]
episode_path = os.path.dirname(example_path)
episode = self._hdf5_file[episode_path]
example = self._hdf5_file[example_path]
# ellipsis indexing converts from hdf5 dataset to np.ndarray.
state = example["state"][...]
ship_actions = example["ship_actions"][...]
shipyard_actions = example["shipyard_actions"][...]
state_value = calc_episode_reward(episode.attrs["cur_team_reward"], episode.attrs["other_team_reward"])
return (state, ship_actions, shipyard_actions, state_value)
| [
"os.path.dirname",
"halite_rl.utils.rewards.calc_episode_reward"
] | [((629, 658), 'os.path.dirname', 'os.path.dirname', (['example_path'], {}), '(example_path)\n', (644, 658), False, 'import os\n'), ((998, 1092), 'halite_rl.utils.rewards.calc_episode_reward', 'calc_episode_reward', (["episode.attrs['cur_team_reward']", "episode.attrs['other_team_reward']"], {}), "(episode.attrs['cur_team_reward'], episode.attrs[\n 'other_team_reward'])\n", (1017, 1092), False, 'from halite_rl.utils.rewards import calc_episode_reward\n')] |
#!/usr/local/opt/python@2/bin/python2.7
# -*- coding: utf-8 -*-
# vim: set nospell:
##############################################################################
# TermRecord.py #
# #
# This file can either run the 'script' command as a wrapper, or parse its #
# output with timing information. It produces self-contained or dynamic #
# HTML files capable of replaying the terminal session with just a modern #
# browser. #
# #
# #
# Authors: <NAME> <<EMAIL>> #
# #
# #
# Copyright 2014-2017 <NAME> and licensed under the MIT License. #
# #
##############################################################################
from __future__ import print_function
from argparse import ArgumentParser, FileType
from contextlib import closing
from codecs import open as copen
from json import dumps
from math import ceil
import os
from os.path import basename, dirname, exists, join
from struct import unpack
from subprocess import Popen
from sys import platform, stderr, stdout
from tempfile import NamedTemporaryFile
from termrecord import templated
from jinja2 import FileSystemLoader, Template
from jinja2.environment import Environment
DEFAULT_TEMPLATE = join(templated(), 'static.jinja2')
TTYREC = 'ttyrec'
# don't pause more the MAXPAUSE
# let's us handle gaps when appending ttyrec sessions
MAXPAUSE = 400
# http://blog.taz.net.au/2012/04/09/getting-the-terminal-size-in-python/
def probeDimensions(fd=1):
"""
Returns height and width of current terminal. First tries to get
size via termios.TIOCGWINSZ, then from environment. Defaults to 25
lines x 80 columns if both methods fail.
:param fd: file descriptor (default: 1=stdout)
"""
try:
import fcntl, termios, struct
hw = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
except:
try:
hw = (os.environ['LINES'], os.environ['COLUMNS'])
except:
hw = (24, 80)
return hw
# http://stackoverflow.com/a/8220141/3362361
def testOSX():
return platform == 'darwin'
def escapeString(string):
string = string.encode('unicode_escape').decode('utf-8')
string = string.replace("'", "\\'")
string = '\'' + string + '\''
return string
def runScript(command=None, tempfile=None):
timingfname = None
scriptfname = None
CMD = ['script']
if tempfile:
timingfname = "%s.timing" % str(tempfile)
scriptfname = "%s.log" % str(tempfile)
with open(timingfname, 'w'):
with open(scriptfname, 'w'):
pass
else:
with NamedTemporaryFile(delete=False) as timingf:
with NamedTemporaryFile(delete=False) as scriptf:
timingfname = timingf.name
scriptfname = scriptf.name
CMD.append('-t')
if command:
CMD.append('-c')
CMD.append(command)
CMD.append(scriptfname)
with open(timingfname, 'w') as timingf:
proc = Popen(CMD, stderr=timingf)
proc.wait()
return copen(scriptfname, encoding='utf-8', errors='replace'), \
open(timingfname, 'r')
def runTtyrec(command=None):
scriptfname = None
CMD = ['ttyrec']
with NamedTemporaryFile(delete=False) as scriptf:
scriptfname = scriptf.name
if command:
CMD.append('-e')
CMD.append(command)
CMD.append(scriptfname)
proc = Popen(CMD)
proc.wait()
return open(scriptfname, 'rb')
def getTiming(timef):
timing = None
with closing(timef):
timing = [l.strip().split(' ') for l in timef]
timing = [(int(ceil(float(r[0]) * 1000)), int(r[1])) for r in timing]
return timing
def scriptToJSON(scriptf, timing=None, offset = 0):
ret = []
last = 0
with closing(scriptf):
scriptf.readline() # ignore first header line from script file
for t in timing:
data = escapeString(scriptf.read(t[1]))
adjust = min(t[0] - last, MAXPAUSE)
offset += adjust
last = t[0]
if data == "'exit\\r\\n'":
data = "'\\r\\n'"
ret.append((data, offset))
return dumps(ret)
def parseTtyrec(scriptf):
pos = 0
offset = 0
oldtime = 0
ret = []
with closing(scriptf):
data = scriptf.read()
while pos < len(data):
secs,usecs,amount = unpack('iii', data[pos:pos+12])
pos += 12
timing = int(ceil(secs * 1000 + float(usecs) / 1000))
if oldtime:
offset += min(timing - oldtime, MAXPAUSE)
oldtime = timing
text = escapeString(data[pos:pos+amount].decode(
encoding='utf-8', errors='replace'))
if text == "'exit\\r\\n'":
text = "'\\r\\n'"
ret.append((text, offset))
pos += amount
return dumps(ret)
def renderTemplate(json, dimensions, templatename=None, outfname=None, contents=None):
if contents:
template = Template(contents)
else:
fsl = FileSystemLoader(dirname(templatename), 'utf-8')
e = Environment()
e.loader = fsl
templatename = basename(templatename)
template = e.get_template(templatename)
rendered = template.render(json=json, dimensions=dimensions)
if not outfname:
return rendered
with closing(outfname):
outfname.write(rendered)
if __name__ == '__main__':
argparser = ArgumentParser(description=
'Stores terminal sessions into HTML.')
argparser.add_argument('-b', '--backend', type=str,
choices=['script', 'ttyrec'],
help='use either script or ttyrec', required=False)
argparser.add_argument('-c', '--command', type=str,
help='run a command and quit', required=False)
argparser.add_argument('-d', '--dimensions', type=int,
metavar=('h','w'), nargs=2,
help='dimensions of terminal', required=False)
argparser.add_argument('--json',
help='output only JSON', action='store_true',
required=False)
argparser.add_argument('--js',
help='output only JavaScript', action='store_true',
required=False)
argparser.add_argument('-m', '--template-file', type=str,
default=DEFAULT_TEMPLATE,
help='file to use as HTML template', required=False)
argparser.add_argument('-o', '--output-file', type=FileType('w'),
help='file to output HTML to', required=False)
argparser.add_argument('-s', '--script-file', type=str,
help='script file to parse', required=False)
argparser.add_argument('-t', '--timing-file', type=FileType('r'),
help='timing file to parse', required=False)
argparser.add_argument('--tempfile', type=str,
help='full path for tempfiles (extensions will be added)', required=False)
ns = argparser.parse_args()
backend = ns.backend
command = ns.command
dimensions = ns.dimensions
tmpname = ns.template_file
scriptf = ns.script_file
outf = ns.output_file
timef = ns.timing_file
tempfile = ns.tempfile
json_only = ns.json
js_only = ns.js
isOSX = testOSX()
if not backend:
if isOSX:
backend = TTYREC
else:
backend = 'script'
if backend != TTYREC and ((scriptf and not timef) or \
(timef and not scriptf)):
argparser.error('Both SCRIPT_FILE and TIMING_FILE have to be ' +
'specified together.')
exit(1)
if not json_only and not js_only and tmpname and not exists(tmpname):
stderr.write('Error: Template ("%s") does not exist.\n' % (tmpname))
stderr.write('If you only wanted JSON output, use "--json"\n')
stderr.write('If you only wanted JavaScript output, use "--js"\n')
exit(1)
if not dimensions:
dimensions = probeDimensions() if not scriptf else (24,80)
if not scriptf:
if backend == TTYREC:
scriptf = runTtyrec(command)
else:
scriptf,timef = runScript(command=command, tempfile=tempfile)
else:
if backend == TTYREC:
scriptf = open(scriptf, 'rb')
else:
scriptf = copen(scriptf, encoding='utf-8', errors='replace')
if backend == TTYREC:
json = parseTtyrec(scriptf)
else:
timing = getTiming(timef)
json = scriptToJSON(scriptf, timing)
if json_only:
print(json, file=outf or stdout)
elif js_only:
out = renderTemplate(json, dimensions, None, outf, "var termData = {{ json }};")
if not outf:
print(out)
else:
print("wrote", outf.name)
elif tmpname and outf:
renderTemplate(json, dimensions, tmpname, outf)
elif tmpname:
print(renderTemplate(json, dimensions, tmpname))
| [
"os.path.exists",
"argparse.FileType",
"fcntl.ioctl",
"argparse.ArgumentParser",
"subprocess.Popen",
"json.dumps",
"jinja2.Template",
"tempfile.NamedTemporaryFile",
"sys.stderr.write",
"struct.unpack",
"os.path.dirname",
"contextlib.closing",
"jinja2.environment.Environment",
"termrecord.templated",
"os.path.basename",
"codecs.open"
] | [((1780, 1791), 'termrecord.templated', 'templated', ([], {}), '()\n', (1789, 1791), False, 'from termrecord import templated\n'), ((3991, 4001), 'subprocess.Popen', 'Popen', (['CMD'], {}), '(CMD)\n', (3996, 4001), False, 'from subprocess import Popen\n'), ((4748, 4758), 'json.dumps', 'dumps', (['ret'], {}), '(ret)\n', (4753, 4758), False, 'from json import dumps\n'), ((5477, 5487), 'json.dumps', 'dumps', (['ret'], {}), '(ret)\n', (5482, 5487), False, 'from json import dumps\n'), ((6071, 6136), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Stores terminal sessions into HTML."""'}), "(description='Stores terminal sessions into HTML.')\n", (6085, 6136), False, 'from argparse import ArgumentParser, FileType\n'), ((3565, 3591), 'subprocess.Popen', 'Popen', (['CMD'], {'stderr': 'timingf'}), '(CMD, stderr=timingf)\n', (3570, 3591), False, 'from subprocess import Popen\n'), ((3624, 3678), 'codecs.open', 'copen', (['scriptfname'], {'encoding': '"""utf-8"""', 'errors': '"""replace"""'}), "(scriptfname, encoding='utf-8', errors='replace')\n", (3629, 3678), True, 'from codecs import open as copen\n'), ((3800, 3832), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (3818, 3832), False, 'from tempfile import NamedTemporaryFile\n'), ((4103, 4117), 'contextlib.closing', 'closing', (['timef'], {}), '(timef)\n', (4110, 4117), False, 'from contextlib import closing\n'), ((4358, 4374), 'contextlib.closing', 'closing', (['scriptf'], {}), '(scriptf)\n', (4365, 4374), False, 'from contextlib import closing\n'), ((4852, 4868), 'contextlib.closing', 'closing', (['scriptf'], {}), '(scriptf)\n', (4859, 4868), False, 'from contextlib import closing\n'), ((5612, 5630), 'jinja2.Template', 'Template', (['contents'], {}), '(contents)\n', (5620, 5630), False, 'from jinja2 import FileSystemLoader, Template\n'), ((5716, 5729), 'jinja2.environment.Environment', 'Environment', ([], {}), '()\n', (5727, 5729), False, 'from jinja2.environment import Environment\n'), ((5776, 5798), 'os.path.basename', 'basename', (['templatename'], {}), '(templatename)\n', (5784, 5798), False, 'from os.path import basename, dirname, exists, join\n'), ((5969, 5986), 'contextlib.closing', 'closing', (['outfname'], {}), '(outfname)\n', (5976, 5986), False, 'from contextlib import closing\n'), ((8601, 8667), 'sys.stderr.write', 'stderr.write', (['(\'Error: Template ("%s") does not exist.\\n\' % tmpname)'], {}), '(\'Error: Template ("%s") does not exist.\\n\' % tmpname)\n', (8613, 8667), False, 'from sys import platform, stderr, stdout\n'), ((8678, 8740), 'sys.stderr.write', 'stderr.write', (['"""If you only wanted JSON output, use "--json\\"\n"""'], {}), '(\'If you only wanted JSON output, use "--json"\\n\')\n', (8690, 8740), False, 'from sys import platform, stderr, stdout\n'), ((8749, 8815), 'sys.stderr.write', 'stderr.write', (['"""If you only wanted JavaScript output, use "--js\\"\n"""'], {}), '(\'If you only wanted JavaScript output, use "--js"\\n\')\n', (8761, 8815), False, 'from sys import platform, stderr, stdout\n'), ((2380, 2423), 'fcntl.ioctl', 'fcntl.ioctl', (['fd', 'termios.TIOCGWINSZ', '"""1234"""'], {}), "(fd, termios.TIOCGWINSZ, '1234')\n", (2391, 2423), False, 'import fcntl, termios, struct\n'), ((3191, 3223), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (3209, 3223), False, 'from tempfile import NamedTemporaryFile\n'), ((4963, 4996), 'struct.unpack', 'unpack', (['"""iii"""', 'data[pos:pos + 12]'], {}), "('iii', data[pos:pos + 12])\n", (4969, 4996), False, 'from struct import unpack\n'), ((5672, 5693), 'os.path.dirname', 'dirname', (['templatename'], {}), '(templatename)\n', (5679, 5693), False, 'from os.path import basename, dirname, exists, join\n'), ((7249, 7262), 'argparse.FileType', 'FileType', (['"""w"""'], {}), "('w')\n", (7257, 7262), False, 'from argparse import ArgumentParser, FileType\n'), ((7525, 7538), 'argparse.FileType', 'FileType', (['"""r"""'], {}), "('r')\n", (7533, 7538), False, 'from argparse import ArgumentParser, FileType\n'), ((8576, 8591), 'os.path.exists', 'exists', (['tmpname'], {}), '(tmpname)\n', (8582, 8591), False, 'from os.path import basename, dirname, exists, join\n'), ((9221, 9271), 'codecs.open', 'copen', (['scriptf'], {'encoding': '"""utf-8"""', 'errors': '"""replace"""'}), "(scriptf, encoding='utf-8', errors='replace')\n", (9226, 9271), True, 'from codecs import open as copen\n'), ((3253, 3285), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (3271, 3285), False, 'from tempfile import NamedTemporaryFile\n')] |
from __future__ import print_function
import os
import cv2
import numpy as np
import random as rng
from math import hypot, pi
import time
import collections
path = '/home/robotics-verse/projects/felix/DataSet/doppler/seq1/'
class ContourProperties:
def __init__(self, center, radius, dummy_value=False):
self.center = center
self.rad = radius
self.dummyValue = dummy_value
self.area = pi * (radius ** 2)
def stabilize_doppler(array):
# Initialize count
count = 0
num_past_frames = 15
minimal_occurence_in_percent = 0.7
max_distance = 30
tracking_objects = {}
track_id = 0
# for image in sorted([int(num.split('.')[0]) for num in os.listdir(path)]):
for idx, image in enumerate(array):
# image = str(image) + '.png'
# Point current frame
contour_properties_cur_frame = []
# frame = cv2.imread(os.path.join(path, image))
# frame = cv2.imread(image)
src_gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
src_gray[src_gray > 0] = 255
# cv2.imshow("gray", src_gray)
contours, hierarchy = cv2.findContours(src_gray, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
for i, c in enumerate(contours):
contours_poly = cv2.approxPolyDP(c, 3, True)
cen, rad = cv2.minEnclosingCircle(contours_poly)
if rad < 5:
continue
contour_properties_cur_frame.append(ContourProperties(tuple(int(el) for el in cen), int(rad)))
tracking_objects_copy = tracking_objects.copy()
contour_properties_cur_frame_copy = contour_properties_cur_frame.copy()
for object_id, cp2 in tracking_objects_copy.items():
object_exists = False
for cp in contour_properties_cur_frame_copy:
distance = hypot(cp2[0].center[0] - cp.center[0], cp2[0].center[1] - cp.center[1])
# Update IDs position
if distance < max_distance:
if object_exists:
if tracking_objects[object_id][0].area > cp.area:
continue
else:
contour_properties_cur_frame.append(tracking_objects[object_id].popleft())
tracking_objects[object_id].appendleft(cp)
object_exists = True
if cp in contour_properties_cur_frame:
contour_properties_cur_frame.remove(cp)
# Pop element if it has not been visible recently or add a dummy value
if not object_exists:
if len([el for el in tracking_objects[object_id] if
el.dummyValue]) > num_past_frames * minimal_occurence_in_percent:
tracking_objects.pop(object_id)
else:
tracking_objects[object_id].appendleft(ContourProperties((0, 0), 0, True))
# Add new IDs found
for cp in contour_properties_cur_frame:
tracking_objects[track_id] = collections.deque([cp], maxlen=num_past_frames)
track_id += 1
annotations = np.zeros(image.shape)
for object_id, cp in tracking_objects.items():
if len([el for el in tracking_objects[object_id] if
not el.dummyValue]) < num_past_frames * minimal_occurence_in_percent:
continue
center_point = next(el.center for el in cp if not el.dummyValue)
max_radius = max(el.rad for el in cp if not el.dummyValue)
cv2.circle(annotations, center_point, max_radius, (255, 255, 255), -1)
# cv2.putText(annotations, str(object_id), (center_point[0], center_point[1] - max_radius - 7), 0, 1,
# (0, 0, 255), 2)
# cv2.imshow("annotations", annotations)
# cv2.imshow("Frame", frame)
array[idx] = annotations
count += 1
# key = cv2.waitKey(1)
# if key == 27:
# break
return array
| [
"collections.deque",
"cv2.minEnclosingCircle",
"numpy.zeros",
"cv2.circle",
"cv2.approxPolyDP",
"cv2.cvtColor",
"cv2.findContours",
"math.hypot"
] | [((988, 1027), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (1000, 1027), False, 'import cv2\n'), ((1135, 1205), 'cv2.findContours', 'cv2.findContours', (['src_gray', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(src_gray, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (1151, 1205), False, 'import cv2\n'), ((3164, 3185), 'numpy.zeros', 'np.zeros', (['image.shape'], {}), '(image.shape)\n', (3172, 3185), True, 'import numpy as np\n'), ((1276, 1304), 'cv2.approxPolyDP', 'cv2.approxPolyDP', (['c', '(3)', '(True)'], {}), '(c, 3, True)\n', (1292, 1304), False, 'import cv2\n'), ((1328, 1365), 'cv2.minEnclosingCircle', 'cv2.minEnclosingCircle', (['contours_poly'], {}), '(contours_poly)\n', (1350, 1365), False, 'import cv2\n'), ((3067, 3114), 'collections.deque', 'collections.deque', (['[cp]'], {'maxlen': 'num_past_frames'}), '([cp], maxlen=num_past_frames)\n', (3084, 3114), False, 'import collections\n'), ((3582, 3652), 'cv2.circle', 'cv2.circle', (['annotations', 'center_point', 'max_radius', '(255, 255, 255)', '(-1)'], {}), '(annotations, center_point, max_radius, (255, 255, 255), -1)\n', (3592, 3652), False, 'import cv2\n'), ((1839, 1910), 'math.hypot', 'hypot', (['(cp2[0].center[0] - cp.center[0])', '(cp2[0].center[1] - cp.center[1])'], {}), '(cp2[0].center[0] - cp.center[0], cp2[0].center[1] - cp.center[1])\n', (1844, 1910), False, 'from math import hypot, pi\n')] |
"""
The main module for training process
"""
import json
import torch
from torch.nn import Module
from neural_pipeline.data_processor import TrainDataProcessor
from neural_pipeline.utils import FileStructManager, CheckpointsManager
from neural_pipeline.train_config.train_config import TrainConfig
from neural_pipeline.monitoring import MonitorHub, ConsoleMonitor
__all__ = ['Trainer']
class LearningRate:
"""
Basic learning rate class
"""
def __init__(self, value: float):
self._value = value
def value(self) -> float:
"""
Get value of current learning rate
:return: current value
"""
return self._value
def set_value(self, value) -> None:
"""
Set lr value
:param value: lr value
"""
self._value = value
class DecayingLR(LearningRate):
"""
This class provide lr decaying by defined metric value (by :arg:`target_value_clbk`).
If metric value doesn't update minimum after defined number of steps (:arg:`patience`) - lr was decaying
by defined coefficient (:arg:`decay_coefficient`).
:param start_value: start value
:param decay_coefficient: coefficient of decaying
:param patience: steps before decay
:param target_value_clbk: callable, that return target value for lr decaying
"""
def __init__(self, start_value: float, decay_coefficient: float, patience: int, target_value_clbk: callable):
super().__init__(start_value)
self._decay_coefficient = decay_coefficient
self._patience = patience
self._cur_step = 1
self._target_value_clbk = target_value_clbk
self._cur_min_target_val = None
def value(self) -> float:
"""
Get value of current learning rate
:return: learning rate value
"""
metric_val = self._target_value_clbk()
if metric_val is None:
return self._value
if self._cur_min_target_val is None:
self._cur_min_target_val = metric_val
if metric_val < self._cur_min_target_val:
self._cur_step = 1
self._cur_min_target_val = metric_val
if self._cur_step > 0 and (self._cur_step % self._patience) == 0:
self._value *= self._decay_coefficient
self._cur_min_target_val = None
self._cur_step = 1
return self._value
self._cur_step += 1
return self._value
def set_value(self, value):
self._value = value
self._cur_step = 0
self._cur_min_target_val = None
class Trainer:
"""
Class, that run drive process.
Trainer get list of training stages and every epoch loop over it.
Training process looks like:
.. highlight:: python
.. code-block:: python
for epoch in epochs_num:
for stage in training_stages:
stage.run()
monitor_hub.update_metrics(stage.metrics_processor().get_metrics())
save_state()
on_epoch_end_callback()
:param model: model for training
:param train_config: :class:`TrainConfig` object
:param fsm: :class:`FileStructManager` object
:param device: device for training process
"""
class TrainerException(Exception):
def __init__(self, msg):
super().__init__()
self._msg = msg
def __str__(self):
return self._msg
def __init__(self, model: Module, train_config: TrainConfig, fsm: FileStructManager,
device: torch.device = None):
self._fsm = fsm
self.monitor_hub = MonitorHub()
self._checkpoint_manager = CheckpointsManager(self._fsm)
self.__epoch_num = 100
self._resume_from = None
self._on_epoch_end = []
self._best_state_rule = None
self.__train_config = train_config
self._device = device
self._data_processor = TrainDataProcessor(model, self.__train_config, self._device) \
.set_checkpoints_manager(self._checkpoint_manager)
self._lr = LearningRate(self._data_processor.get_lr())
def set_epoch_num(self, epoch_number: int) -> 'Trainer':
"""
Define number of epoch for training. One epoch - one iteration over all train stages
:param epoch_number: number of training epoch
:return: self object
"""
self.__epoch_num = epoch_number
return self
def resume(self, from_best_checkpoint: bool) -> 'Trainer':
"""
Resume train from last checkpoint
:param from_best_checkpoint: is need to continue from best checkpoint
:return: self object
"""
self._resume_from = 'last' if from_best_checkpoint is False else 'best'
return self
def enable_lr_decaying(self, coeff: float, patience: int, target_val_clbk: callable) -> 'Trainer':
"""
Enable rearing rate decaying. Learning rate decay when `target_val_clbk` returns doesn't update
minimum for `patience` steps
:param coeff: lr decay coefficient
:param patience: number of steps
:param target_val_clbk: callback which returns the value that is used for lr decaying
:return: self object
"""
self._lr = DecayingLR(self._data_processor.get_lr(), coeff, patience, target_val_clbk)
return self
def train(self) -> None:
"""
Run training process
"""
if len(self.__train_config.stages()) < 1:
raise self.TrainerException("There's no sages for training")
best_checkpoints_manager = None
cur_best_state = None
if self._best_state_rule is not None:
best_checkpoints_manager = CheckpointsManager(self._fsm, 'best')
start_epoch_idx = 1
if self._resume_from is not None:
start_epoch_idx += self._resume()
self.monitor_hub.add_monitor(ConsoleMonitor())
with self.monitor_hub:
for epoch_idx in range(start_epoch_idx, self.__epoch_num + start_epoch_idx):
self.monitor_hub.set_epoch_num(epoch_idx)
for stage in self.__train_config.stages():
stage.run(self._data_processor)
if stage.metrics_processor() is not None:
self.monitor_hub.update_metrics(stage.metrics_processor().get_metrics())
new_best_state = self._save_state(self._checkpoint_manager, best_checkpoints_manager, cur_best_state, epoch_idx)
if new_best_state is not None:
cur_best_state = new_best_state
self._data_processor.update_lr(self._lr.value())
for clbk in self._on_epoch_end:
clbk()
self._update_losses()
self.__iterate_by_stages(lambda s: s.on_epoch_end())
def _resume(self) -> int:
if self._resume_from == 'last':
ckpts_manager = self._checkpoint_manager
elif self._checkpoint_manager == 'best':
ckpts_manager = CheckpointsManager(self._fsm, 'best')
else:
raise NotImplementedError("Resume parameter may be only 'last' or 'best' not {}".format(self._resume_from))
ckpts_manager.unpack()
self._data_processor.load()
with open(ckpts_manager.trainer_file(), 'r') as file:
start_epoch_idx = json.load(file)['last_epoch'] + 1
ckpts_manager.pack()
return start_epoch_idx
def _save_state(self, ckpts_manager: CheckpointsManager, best_ckpts_manager: CheckpointsManager or None,
cur_best_state: float or None, epoch_idx: int) -> float or None:
"""
Internal method used for save states after epoch end
:param ckpts_manager: ordinal checkpoints manager
:param best_ckpts_manager: checkpoints manager, used for store best stages
:param cur_best_state: current best stage metric value
:return: new best stage metric value or None if it not update
"""
def save_trainer(ckp_manager):
with open(ckp_manager.trainer_file(), 'w') as out:
json.dump({'last_epoch': epoch_idx}, out)
if self._best_state_rule is not None:
new_best_state = self._best_state_rule()
if cur_best_state is None:
self._data_processor.save_state()
save_trainer(ckpts_manager)
ckpts_manager.pack()
return new_best_state
else:
if new_best_state <= cur_best_state:
self._data_processor.set_checkpoints_manager(best_ckpts_manager)
self._data_processor.save_state()
save_trainer(best_ckpts_manager)
best_ckpts_manager.pack()
self._data_processor.set_checkpoints_manager(ckpts_manager)
return new_best_state
self._data_processor.save_state()
save_trainer(ckpts_manager)
ckpts_manager.pack()
return None
def _update_losses(self) -> None:
"""
Update loses procedure
"""
losses = {}
for stage in self.__train_config.stages():
if stage.get_losses() is not None:
losses[stage.name()] = stage.get_losses()
self.monitor_hub.update_losses(losses)
def data_processor(self) -> TrainDataProcessor:
"""
Get data processor object
:return: data processor
"""
return self._data_processor
def enable_best_states_saving(self, rule: callable) -> 'Trainer':
"""
Enable best states saving
Best stages will save when return of `rule` update minimum
:param rule: callback which returns the value that is used for define when need store best metric
:return: self object
"""
self._best_state_rule = rule
return self
def disable_best_states_saving(self) -> 'Trainer':
"""
Enable best states saving
:return: self object
"""
self._best_state_rule = None
return self
def add_on_epoch_end_callback(self, callback: callable) -> 'Trainer':
"""
Add callback, that will be called after every epoch end
:param callback: method, that will be called. This method may not get any parameters
:return: self object
"""
self._on_epoch_end.append(callback)
return self
def __iterate_by_stages(self, func: callable) -> None:
"""
Internal method, that used for iterate by stages
:param func: callback, that calls for every stage
"""
for stage in self.__train_config.stages():
func(stage)
| [
"neural_pipeline.monitoring.MonitorHub",
"neural_pipeline.monitoring.ConsoleMonitor",
"json.load",
"neural_pipeline.utils.CheckpointsManager",
"neural_pipeline.data_processor.TrainDataProcessor",
"json.dump"
] | [((3632, 3644), 'neural_pipeline.monitoring.MonitorHub', 'MonitorHub', ([], {}), '()\n', (3642, 3644), False, 'from neural_pipeline.monitoring import MonitorHub, ConsoleMonitor\n'), ((3681, 3710), 'neural_pipeline.utils.CheckpointsManager', 'CheckpointsManager', (['self._fsm'], {}), '(self._fsm)\n', (3699, 3710), False, 'from neural_pipeline.utils import FileStructManager, CheckpointsManager\n'), ((5754, 5791), 'neural_pipeline.utils.CheckpointsManager', 'CheckpointsManager', (['self._fsm', '"""best"""'], {}), "(self._fsm, 'best')\n", (5772, 5791), False, 'from neural_pipeline.utils import FileStructManager, CheckpointsManager\n'), ((5946, 5962), 'neural_pipeline.monitoring.ConsoleMonitor', 'ConsoleMonitor', ([], {}), '()\n', (5960, 5962), False, 'from neural_pipeline.monitoring import MonitorHub, ConsoleMonitor\n'), ((3950, 4010), 'neural_pipeline.data_processor.TrainDataProcessor', 'TrainDataProcessor', (['model', 'self.__train_config', 'self._device'], {}), '(model, self.__train_config, self._device)\n', (3968, 4010), False, 'from neural_pipeline.data_processor import TrainDataProcessor\n'), ((7094, 7131), 'neural_pipeline.utils.CheckpointsManager', 'CheckpointsManager', (['self._fsm', '"""best"""'], {}), "(self._fsm, 'best')\n", (7112, 7131), False, 'from neural_pipeline.utils import FileStructManager, CheckpointsManager\n'), ((8194, 8235), 'json.dump', 'json.dump', (["{'last_epoch': epoch_idx}", 'out'], {}), "({'last_epoch': epoch_idx}, out)\n", (8203, 8235), False, 'import json\n'), ((7426, 7441), 'json.load', 'json.load', (['file'], {}), '(file)\n', (7435, 7441), False, 'import json\n')] |
"""Library of functions to support the Evaluator class."""
import numpy as np
from numpy.random import default_rng
from tqdm import tqdm
from ensemble_uncertainties.constants import RANDOM_SEED
def use_tqdm(iterable, use):
"""Returns tqdm(iterable) if use is True, else iterable."""
if use:
return tqdm(iterable)
else:
return iterable
def format_time_elapsed(time_elapsed):
"""Formats the elapsed time to a more human readable output.
Parameters
----------
time_elapsed : datetime.timedelta
The elapsed time
Returns
-------
str
A string where hours, minutes and seconds are separated.
"""
took = format(time_elapsed).split('.')[0]
hours, minutes, seconds = took.split(':')
display = ''
if int(hours) > 0:
display += f'{hours} Hour(s). '
if int(minutes) > 0:
display += f'{int(minutes)} Minute(s). '
if int(seconds) > 0:
display += f'{int(seconds)} Second(s). '
if display:
return display
else:
return '< 1 Second.'
def make_columns(repetitions, n_splits):
"""Provides column names for the tables of all predictions (from each
split of each rep).
Parameters
----------
repetitions : int
Repetitions of the n-fold validation
n_splits : int
number of splits in KFold
Returns
-------
list
List of the column names as strings
"""
reps = range(repetitions)
splits = range(n_splits)
pre_cols = [[f'rep{i}_split{j}' for j in splits] for i in reps]
cols = [item for sublist in pre_cols for item in sublist]
return cols
def make_array(dim):
"""Makes an empty table (dim[0] x dim[1]) to store objects into."""
return np.empty(dim, dtype=object)
def random_int32(size, seed=RANDOM_SEED):
"""Returns a <size>-long array of unique random 0+ 32-bit integers."""
generator = default_rng(seed=seed)
ints = generator.choice(2**32-1, size=size, replace=False)
return ints
def print_summary(overall_run_time, metric_name, train_quality, test_quality):
"""Show train performance, test performance, and over all runtime.
Parameters
----------
overall_run_time : datetime.timedelta
The elapsed time
metric_name : str
Name of the evaluation metric
train_quality : float
Train performance, measured in the provided metric
test_quality : float
Test performance, measured in the provided metric
"""
took = format_time_elapsed(overall_run_time)
print()
print(f'Ensemble train {metric_name}:', end=' ')
print(f'{train_quality:.3f}')
print(f'Ensemble test {metric_name}:', end=' ')
print(f'{test_quality:.3f}')
print(f'Overall runtime: {took}')
print()
| [
"tqdm.tqdm",
"numpy.empty",
"numpy.random.default_rng"
] | [((1763, 1790), 'numpy.empty', 'np.empty', (['dim'], {'dtype': 'object'}), '(dim, dtype=object)\n', (1771, 1790), True, 'import numpy as np\n'), ((1926, 1948), 'numpy.random.default_rng', 'default_rng', ([], {'seed': 'seed'}), '(seed=seed)\n', (1937, 1948), False, 'from numpy.random import default_rng\n'), ((321, 335), 'tqdm.tqdm', 'tqdm', (['iterable'], {}), '(iterable)\n', (325, 335), False, 'from tqdm import tqdm\n')] |
# asks the user qustions and saves the answers to
# the input folder of the chatbot.
# useful during the chatbot creation.
import os.path
from helper_funcs import get_random_element_of_list
from linguistic_engine import get_faq_tag
path2questions = "res/interview_qs.txt"
path2answers = "input_texts/interview_as.txt"
faq_tag = get_faq_tag()
def add_additional_answer(answer, question):
with open(path2answers) as aq:
aq_strings = aq.readlines()
aq_strings.pop(0) # delete tag
for i in range(len(aq_strings)):
aq_strings[i] = aq_strings[i].strip()
# print(aq_strings)
q_index = aq_strings.index("Q: " + question)
# print(q_index)
aq_strings.insert(q_index + 1, "A: " + answer)
# print(aq_strings)
with open(path2answers, 'w') as answers_file:
answers_file.write(faq_tag + "\n")
for fi in range(len(aq_strings)):
answers_file.write(aq_strings[fi] + "\n")
def load_answered_qs(answers_path):
answered = []
if os.path.isfile(answers_path):
with open(answers_path) as aq:
aq_strings = aq.readlines()
for i in range(len(aq_strings)):
st = aq_strings[i].strip()
if st[0:2] == "Q:":
answered.append(st[3:])
else:
print("no such file as", answers_path)
return answered
def save_answer(answer, question, first_answer7):
answered_l = load_answered_qs(path2answers)
already_answered7 = False
if question in answered_l:
already_answered7 = True
if already_answered7:
add_additional_answer(answer, question)
else: # append to the end of file
with open(path2answers, 'a') as f:
if first_answer7:
f.write(faq_tag + "\n")
f.write("Q: " + question + "\nA: " + answer + "\n")
def select_question(all_questions):
# try to find an unanswered
answered = load_answered_qs(path2answers)
print("\nAnswered:", len(answered), "of", len(all_questions), "\n")
found = False
c = 0
good_q_s = ""
while (found is False) and (c < len(all_questions)):
test_s = all_questions[c]
# print(test_s)
if test_s not in answered:
found = True
good_q_s = test_s
c += 1
# print(found)
if found is False: # all are answered. choose a random one
good_q_s = get_random_element_of_list(all_questions)
return good_q_s, found
# load questions
# with open(path2questions) as f:
# q_strings = f.readlines()
import questions_unpack as qun
q_strings = qun.recover("to_memory", path2questions)
# print("-------\nloaded these many questions:", len(q_strings))
for i in range(len(q_strings)):
temp_s = q_strings[i].strip()
q_strings[i] = temp_s
# check if some answers are already answered
first_answer7 = True
answered_l = []
if os.path.isfile(path2answers):
first_answer7 = False
answered_l = load_answered_qs(path2answers)
# print(answered_l)
# main q&a routine
print(
"this is a helper program. it helps to improve the bot. it asks the user (you) to answer some common questions. answers are immediately saved. you can exit at any time by typing exit and then pressing enter. \n")
answer = ""
q_count = 0
notified_about_all_ansered7 = False
while True:
question, unanswered_exist7 = select_question(q_strings)
if notified_about_all_ansered7 == False and unanswered_exist7 == False:
print(
"\n_congratulations, you've answered all questions. you can exit, or provide additional answers' variants to the same questions. the more variants there are, the more natural thr conversation will sound. be creative! \n")
notified_about_all_ansered7 = True
answer = input(question + "\n")
if answer != "exit":
save_answer(answer, question, first_answer7)
if q_count >= 0:
first_answer7 = False
q_count += 1
else:
break
| [
"helper_funcs.get_random_element_of_list",
"linguistic_engine.get_faq_tag",
"questions_unpack.recover"
] | [((332, 345), 'linguistic_engine.get_faq_tag', 'get_faq_tag', ([], {}), '()\n', (343, 345), False, 'from linguistic_engine import get_faq_tag\n'), ((2600, 2640), 'questions_unpack.recover', 'qun.recover', (['"""to_memory"""', 'path2questions'], {}), "('to_memory', path2questions)\n", (2611, 2640), True, 'import questions_unpack as qun\n'), ((2402, 2443), 'helper_funcs.get_random_element_of_list', 'get_random_element_of_list', (['all_questions'], {}), '(all_questions)\n', (2428, 2443), False, 'from helper_funcs import get_random_element_of_list\n')] |
from argparse import ArgumentParser, FileType
from json import loads, dumps
from tkinter import Menu
import threading, time, requests
import os
from tkgui.utils import startFile, Backend, thunkifySync
from tkgui.utils import guiCodegen as c
Backend.TTk.use()
from tkgui.ui import TkGUI, TkWin, nop, Timeout, callThreadSafe, delay, runAsync, rescueWidgetOption, bindYScrollBar, bindXScrollBar
from tkgui.widgets import MenuItem, TreeWidget
import tkgui.widgets as _
app = ArgumentParser(prog="hachi-groups", description="GUI tool for recording lyric sentences with hachi")
app.add_argument("music", type=FileType("r"), nargs="*", help="music BGM to play")
app.add_argument("-seek-minus", type=float, default=3.0, help="back-seek before playing the sentence")
app.add_argument("-mix-multi", action="store_true", default=False, help="give multi-track mix")
app.add_argument("-o", type=str, default="mix.mid", help="mixed output file")
app.add_argument("-replay", type=FileType("r"), default=None, help="MIDI File to replay")
app.add_argument("-import", type=str, default=None, help="import a sentence list")
#GUI: ($lyric @ $n s .Rec-Edit .Play)[] (input-lyric @ input-n s .Add .Remove_Last) (input-JSON .Mix .Delete .Export) (-) ($music) (slider-volume)
rescueWidgetOption["relief"] = lambda _: None
class GUI(TkGUI):
def __init__(self):
super().__init__()
z = self.shorthand
self.a=z.var(str, "some"); self.b=z.var(bool); self.c=z.var(int)
c.getAttr(self, "a"); c.getAttr(self, "b"); c.getAttr(self, "c")
def up(self):
self.a.set("wtf")
self.ui.removeChild(self.ui.lastChild)
a=GUI.ThreadDemo()
a.run("Thread Demo", compile_binding={"GUI_ThreadDemo":a})
def pr(self):
print(self.c.get())
self.ui.removeChild(self.ui.childs[5])
def addChild(self): self.ui.appendChild(_.text("hhh"))
def layout(self):
z = self.shorthand
return _.verticalLayout(
_.button("Yes", self.quit),
_.text(self.a),
_.button("Change", self.up),
_.horizontalLayout(_.text("ex"), _.text("wtf"), _.button("emmm",self.addChild), _.text("aa")),
_.input("hel"),
_.separator(),
_.withScroll(z.vert, z.by("ta", _.textarea("wtf"))),
z.by("ah", _.text("ah")),
_.checkBox("Some", self.b),
_.horizontalLayout(_.radioButton("Wtf", self.c, 1, self.pr), _.radioButton("emm", self.c, 2, self.pr)),
_.horizontalLayout(
z.by("sbar", _.scrollBar(z.vert)),
_.verticalLayout(
z.by("lbox", _.listBox(("1 2 3 apple juicy lamb clamp banana "*20).split(" "), z.chooseMulti)),
z.by("hsbar", _.scrollBar(z.hor))
)
),
_.withScroll(z.both, z.by("box", _.listBox(("1 2 3 apple juicy lamb clamp banana "*20).split(" ")))),
_.comboBox(self.a, "hello cruel world".split(" ")),
_.spinBox(range(0, 100+1, 10)),
_.slider(range(0, 100+1, 2), orient=z.hor),
_.button("hello", self.run1),
_.button("split", self.run2),
_.menuButton("kind", _.menu(MenuItem.CheckBox("wtf", self.b), MenuItem.RadioButton("emm", self.c, 9)), relief=z.raised),
_.labeledBox("emmm", _.button("Dangerous", self.run3))
)
def run1(self): GUI.Layout1().run("Hello", compile_binding={"GUI":GUI})
def run2(self): GUI.SplitWin().run("Split", compile_binding={})
def run3(self): print(self.ta.marker["insert"])
def setup(self):
z = self.shorthand
bindYScrollBar(self.lbox, self.sbar)
bindXScrollBar(self.lbox, self.hsbar)
themes = self.listThemes()
themez = iter(themes)
self.ah["text"] = ",".join(themes)
def nextTheme(event):
nonlocal themez
try: self.theme = next(themez)
except StopIteration:
themez = iter(themes)
self.ah.bind(z.Events.click, nextTheme)
self.ah.bind(z.Events.mouseR, z.makeMenuPopup(_.menu(*[MenuItem.named(it, nop) for it in "Cut Copy Paste Reload".split(" ")], MenuItem.sep, MenuItem.named("Rename", nop))))
self.initLooper()
class Layout1(TkWin):
def layout(self):
menubar = _.menu(self.tk,
MenuItem.named("New", nop),
MenuItem.named("Open", GUI.Layout1.run1),
MenuItem.SubMenu("Help", [MenuItem.named("Index...", nop), MenuItem.sep, MenuItem.named("About", nop)])
) # probably bug: menu (label='Open', command=GUI.Layout1.run1) works no matter command is correct or not
# possible: win.tk uses attribute assign(when getCode() ) bound to created menu and it's reused
self.setMenu(menubar)
self.setSizeBounds((200,100))
z = self.shorthand
return _.verticalLayout(
_.text("Hello world"),
z.by("can", _.canvas((250, 300)))
)
@staticmethod
def run1(): GUI.DoNothing().run("x", compile_binding={})
def setup(self):
self.addSizeGrip()
self.can["bg"] = "blue"
coord = (10, 50, 240, 210)
self.can.create_arc(coord, start=0, extent=150, fill="red")
class SplitWin(TkWin):
def layout(self):
z = self.shorthand
return _.withFill(_.splitter(z.hor,
_.text("left pane"),
_.splitter(z.vert,
_.text("top pane"),
_.text("bottom pane")
)
))
class DoNothing(TkWin):
def __init__(self):
super().__init__()
self.nodes = dict()
self.ftv:TreeWidget
def layout(self):
z = self.shorthand
return _.withFill(_.tabWidget(
("Tab 1", _.text("a")),
("Tab 2", _.verticalLayout(_.text("Lets dive into the world of computers"))),
("TabTree", z.by("tv", _.treeWidget())),
("File Man", z.by("ftv", _.treeWidget()))
))
def setup(self):
self.tv.makeTree(["Name", "Desc"], [
"GeeksforGeeks",
("Computer Science", [
["Algorithm", "too hard"],
["Data structure", "just right"]
]),
("GATE papers", [
"2018", "2019"
]),
("Programming Languages", [
"Python", "Java"
])
])
self.tv.item("GATE papers").moveTo("GeeksforGeeks")
abspath = os.path.abspath(".")
self.ftv.makeTree(["Project tree"], [])
self.insertNode(self.ftv.rootItem, abspath, abspath)
self.ftv.on(TreeWidget.open, self.openNode)
def insertNode(self, parent:TreeWidget.TreeItem, text, abspath):
node = parent.addChild((text,))
if os.path.isdir(abspath):
self.nodes[node[0]] = abspath
node.addChild((None,))
def openNode(self, event):
node = self.ftv.focusItem
abspath = self.nodes.pop(node[0], None) # This don't work for same-name opens, use multi-map (key-values) or multi column can fix this.
if abspath:
print(abspath)
node.removeChilds()
for p in os.listdir(abspath):
self.insertNode(node, p, os.path.join(abspath, p))
else: startFile(node[0])
class ThreadDemo(TkWin):
def __init__(self):
super().__init__()
self.ta = None
z = self.shorthand
self.active = z.var(str)
self.confirmed = z.var(str)
def layout(self):
z = self.shorthand
return _.verticalLayout(
z.by("ta", _.textarea()),
_.createLayout(z.hor, 0, [_.text("Total active cases: ~"), _.text(self.active)]),
_.createLayout(z.vert, 0, [_.text("Total confirmed cases:"), _.text(self.confirmed)]),
_.button("Refresh", self.on_refresh)
)
url = "https://api.covid19india.org/data.json"
def on_refresh(self):
runAsync(thunkifySync(requests.get, self.url), self.on_refreshed)
runAsync(delay(1000), lambda ms: self.ta.insert("end", "233"))
def on_refreshed(self, page):
data = loads(page.text)
#print(data)
self.active.set(data["statewise"][0]["active"])
self.confirmed.set(data["statewise"][0]["confirmed"])
self.btn_refresh["text"] = "Data refreshed"
def setup(self):
self.setSizeBounds((220, 70))
threading.Thread(target=self.thread_target).start()
def thread_target(self):
callThreadSafe(lambda: self.setSize(self.size, (0,0)))
def addText(text): callThreadSafe(lambda: self.ta.insert("end", text))
addText('doing things...\n')
time.sleep(1)
addText('doing more things...\n')
time.sleep(2)
addText('done')
from sys import argv
from tkgui.utils import Codegen
def main(args = argv[1:]):
cfg = app.parse_args(args)
gui = GUI()
Codegen.useDebug = True
gui.run("Widget Factory", compile_binding={"GUI":gui, "TkGUI":gui})
if __name__ == "__main__": main()
| [
"tkgui.widgets.treeWidget",
"tkgui.widgets.canvas",
"tkgui.widgets.MenuItem.CheckBox",
"time.sleep",
"tkgui.utils.Backend.TTk.use",
"tkgui.widgets.checkBox",
"tkgui.utils.thunkifySync",
"tkgui.widgets.input",
"os.listdir",
"tkgui.widgets.scrollBar",
"argparse.ArgumentParser",
"tkgui.widgets.MenuItem.RadioButton",
"os.path.isdir",
"tkgui.widgets.radioButton",
"tkgui.utils.guiCodegen.getAttr",
"tkgui.widgets.textarea",
"argparse.FileType",
"json.loads",
"tkgui.widgets.separator",
"tkgui.utils.startFile",
"tkgui.ui.delay",
"tkgui.widgets.button",
"tkgui.ui.bindXScrollBar",
"os.path.abspath",
"tkgui.widgets.text",
"os.path.join",
"tkgui.ui.bindYScrollBar",
"tkgui.widgets.MenuItem.named",
"threading.Thread"
] | [((244, 261), 'tkgui.utils.Backend.TTk.use', 'Backend.TTk.use', ([], {}), '()\n', (259, 261), False, 'from tkgui.utils import startFile, Backend, thunkifySync\n'), ((476, 581), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'prog': '"""hachi-groups"""', 'description': '"""GUI tool for recording lyric sentences with hachi"""'}), "(prog='hachi-groups', description=\n 'GUI tool for recording lyric sentences with hachi')\n", (490, 581), False, 'from argparse import ArgumentParser, FileType\n'), ((608, 621), 'argparse.FileType', 'FileType', (['"""r"""'], {}), "('r')\n", (616, 621), False, 'from argparse import ArgumentParser, FileType\n'), ((970, 983), 'argparse.FileType', 'FileType', (['"""r"""'], {}), "('r')\n", (978, 983), False, 'from argparse import ArgumentParser, FileType\n'), ((1464, 1484), 'tkgui.utils.guiCodegen.getAttr', 'c.getAttr', (['self', '"""a"""'], {}), "(self, 'a')\n", (1473, 1484), True, 'from tkgui.utils import guiCodegen as c\n'), ((1486, 1506), 'tkgui.utils.guiCodegen.getAttr', 'c.getAttr', (['self', '"""b"""'], {}), "(self, 'b')\n", (1495, 1506), True, 'from tkgui.utils import guiCodegen as c\n'), ((1508, 1528), 'tkgui.utils.guiCodegen.getAttr', 'c.getAttr', (['self', '"""c"""'], {}), "(self, 'c')\n", (1517, 1528), True, 'from tkgui.utils import guiCodegen as c\n'), ((3404, 3440), 'tkgui.ui.bindYScrollBar', 'bindYScrollBar', (['self.lbox', 'self.sbar'], {}), '(self.lbox, self.sbar)\n', (3418, 3440), False, 'from tkgui.ui import TkGUI, TkWin, nop, Timeout, callThreadSafe, delay, runAsync, rescueWidgetOption, bindYScrollBar, bindXScrollBar\n'), ((3445, 3482), 'tkgui.ui.bindXScrollBar', 'bindXScrollBar', (['self.lbox', 'self.hsbar'], {}), '(self.lbox, self.hsbar)\n', (3459, 3482), False, 'from tkgui.ui import TkGUI, TkWin, nop, Timeout, callThreadSafe, delay, runAsync, rescueWidgetOption, bindYScrollBar, bindXScrollBar\n'), ((1821, 1834), 'tkgui.widgets.text', '_.text', (['"""hhh"""'], {}), "('hhh')\n", (1827, 1834), True, 'import tkgui.widgets as _\n'), ((1914, 1940), 'tkgui.widgets.button', '_.button', (['"""Yes"""', 'self.quit'], {}), "('Yes', self.quit)\n", (1922, 1940), True, 'import tkgui.widgets as _\n'), ((1948, 1962), 'tkgui.widgets.text', '_.text', (['self.a'], {}), '(self.a)\n', (1954, 1962), True, 'import tkgui.widgets as _\n'), ((1970, 1997), 'tkgui.widgets.button', '_.button', (['"""Change"""', 'self.up'], {}), "('Change', self.up)\n", (1978, 1997), True, 'import tkgui.widgets as _\n'), ((2106, 2120), 'tkgui.widgets.input', '_.input', (['"""hel"""'], {}), "('hel')\n", (2113, 2120), True, 'import tkgui.widgets as _\n'), ((2128, 2141), 'tkgui.widgets.separator', '_.separator', ([], {}), '()\n', (2139, 2141), True, 'import tkgui.widgets as _\n'), ((2240, 2266), 'tkgui.widgets.checkBox', '_.checkBox', (['"""Some"""', 'self.b'], {}), "('Some', self.b)\n", (2250, 2266), True, 'import tkgui.widgets as _\n'), ((2908, 2936), 'tkgui.widgets.button', '_.button', (['"""hello"""', 'self.run1'], {}), "('hello', self.run1)\n", (2916, 2936), True, 'import tkgui.widgets as _\n'), ((2944, 2972), 'tkgui.widgets.button', '_.button', (['"""split"""', 'self.run2'], {}), "('split', self.run2)\n", (2952, 2972), True, 'import tkgui.widgets as _\n'), ((6033, 6053), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (6048, 6053), False, 'import os\n'), ((6325, 6347), 'os.path.isdir', 'os.path.isdir', (['abspath'], {}), '(abspath)\n', (6338, 6347), False, 'import os\n'), ((7625, 7641), 'json.loads', 'loads', (['page.text'], {}), '(page.text)\n', (7630, 7641), False, 'from json import loads, dumps\n'), ((8148, 8161), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (8158, 8161), False, 'import threading, time, requests\n'), ((8208, 8221), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (8218, 8221), False, 'import threading, time, requests\n'), ((2024, 2036), 'tkgui.widgets.text', '_.text', (['"""ex"""'], {}), "('ex')\n", (2030, 2036), True, 'import tkgui.widgets as _\n'), ((2038, 2051), 'tkgui.widgets.text', '_.text', (['"""wtf"""'], {}), "('wtf')\n", (2044, 2051), True, 'import tkgui.widgets as _\n'), ((2053, 2084), 'tkgui.widgets.button', '_.button', (['"""emmm"""', 'self.addChild'], {}), "('emmm', self.addChild)\n", (2061, 2084), True, 'import tkgui.widgets as _\n'), ((2085, 2097), 'tkgui.widgets.text', '_.text', (['"""aa"""'], {}), "('aa')\n", (2091, 2097), True, 'import tkgui.widgets as _\n'), ((2219, 2231), 'tkgui.widgets.text', '_.text', (['"""ah"""'], {}), "('ah')\n", (2225, 2231), True, 'import tkgui.widgets as _\n'), ((2293, 2333), 'tkgui.widgets.radioButton', '_.radioButton', (['"""Wtf"""', 'self.c', '(1)', 'self.pr'], {}), "('Wtf', self.c, 1, self.pr)\n", (2306, 2333), True, 'import tkgui.widgets as _\n'), ((2335, 2375), 'tkgui.widgets.radioButton', '_.radioButton', (['"""emm"""', 'self.c', '(2)', 'self.pr'], {}), "('emm', self.c, 2, self.pr)\n", (2348, 2375), True, 'import tkgui.widgets as _\n'), ((3128, 3160), 'tkgui.widgets.button', '_.button', (['"""Dangerous"""', 'self.run3'], {}), "('Dangerous', self.run3)\n", (3136, 3160), True, 'import tkgui.widgets as _\n'), ((4052, 4078), 'tkgui.widgets.MenuItem.named', 'MenuItem.named', (['"""New"""', 'nop'], {}), "('New', nop)\n", (4066, 4078), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((4088, 4128), 'tkgui.widgets.MenuItem.named', 'MenuItem.named', (['"""Open"""', 'GUI.Layout1.run1'], {}), "('Open', GUI.Layout1.run1)\n", (4102, 4128), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((4586, 4607), 'tkgui.widgets.text', '_.text', (['"""Hello world"""'], {}), "('Hello world')\n", (4592, 4607), True, 'import tkgui.widgets as _\n'), ((6709, 6728), 'os.listdir', 'os.listdir', (['abspath'], {}), '(abspath)\n', (6719, 6728), False, 'import os\n'), ((6803, 6821), 'tkgui.utils.startFile', 'startFile', (['node[0]'], {}), '(node[0])\n', (6812, 6821), False, 'from tkgui.utils import startFile, Backend, thunkifySync\n'), ((7314, 7350), 'tkgui.widgets.button', '_.button', (['"""Refresh"""', 'self.on_refresh'], {}), "('Refresh', self.on_refresh)\n", (7322, 7350), True, 'import tkgui.widgets as _\n'), ((7451, 7487), 'tkgui.utils.thunkifySync', 'thunkifySync', (['requests.get', 'self.url'], {}), '(requests.get, self.url)\n', (7463, 7487), False, 'from tkgui.utils import startFile, Backend, thunkifySync\n'), ((7523, 7534), 'tkgui.ui.delay', 'delay', (['(1000)'], {}), '(1000)\n', (7528, 7534), False, 'from tkgui.ui import TkGUI, TkWin, nop, Timeout, callThreadSafe, delay, runAsync, rescueWidgetOption, bindYScrollBar, bindXScrollBar\n'), ((2181, 2198), 'tkgui.widgets.textarea', '_.textarea', (['"""wtf"""'], {}), "('wtf')\n", (2191, 2198), True, 'import tkgui.widgets as _\n'), ((2425, 2444), 'tkgui.widgets.scrollBar', '_.scrollBar', (['z.vert'], {}), '(z.vert)\n', (2436, 2444), True, 'import tkgui.widgets as _\n'), ((3008, 3040), 'tkgui.widgets.MenuItem.CheckBox', 'MenuItem.CheckBox', (['"""wtf"""', 'self.b'], {}), "('wtf', self.b)\n", (3025, 3040), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((3042, 3080), 'tkgui.widgets.MenuItem.RadioButton', 'MenuItem.RadioButton', (['"""emm"""', 'self.c', '(9)'], {}), "('emm', self.c, 9)\n", (3062, 3080), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((3910, 3939), 'tkgui.widgets.MenuItem.named', 'MenuItem.named', (['"""Rename"""', 'nop'], {}), "('Rename', nop)\n", (3924, 3939), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((4629, 4649), 'tkgui.widgets.canvas', '_.canvas', (['(250, 300)'], {}), '((250, 300))\n', (4637, 4649), True, 'import tkgui.widgets as _\n'), ((5035, 5054), 'tkgui.widgets.text', '_.text', (['"""left pane"""'], {}), "('left pane')\n", (5041, 5054), True, 'import tkgui.widgets as _\n'), ((7106, 7118), 'tkgui.widgets.textarea', '_.textarea', ([], {}), '()\n', (7116, 7118), True, 'import tkgui.widgets as _\n'), ((7888, 7931), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.thread_target'}), '(target=self.thread_target)\n', (7904, 7931), False, 'import threading, time, requests\n'), ((2606, 2624), 'tkgui.widgets.scrollBar', '_.scrollBar', (['z.hor'], {}), '(z.hor)\n', (2617, 2624), True, 'import tkgui.widgets as _\n'), ((4164, 4195), 'tkgui.widgets.MenuItem.named', 'MenuItem.named', (['"""Index..."""', 'nop'], {}), "('Index...', nop)\n", (4178, 4195), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((4211, 4239), 'tkgui.widgets.MenuItem.named', 'MenuItem.named', (['"""About"""', 'nop'], {}), "('About', nop)\n", (4225, 4239), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((5093, 5111), 'tkgui.widgets.text', '_.text', (['"""top pane"""'], {}), "('top pane')\n", (5099, 5111), True, 'import tkgui.widgets as _\n'), ((5123, 5144), 'tkgui.widgets.text', '_.text', (['"""bottom pane"""'], {}), "('bottom pane')\n", (5129, 5144), True, 'import tkgui.widgets as _\n'), ((5393, 5404), 'tkgui.widgets.text', '_.text', (['"""a"""'], {}), "('a')\n", (5399, 5404), True, 'import tkgui.widgets as _\n'), ((6765, 6789), 'os.path.join', 'os.path.join', (['abspath', 'p'], {}), '(abspath, p)\n', (6777, 6789), False, 'import os\n'), ((7155, 7186), 'tkgui.widgets.text', '_.text', (['"""Total active cases: ~"""'], {}), "('Total active cases: ~')\n", (7161, 7186), True, 'import tkgui.widgets as _\n'), ((7188, 7207), 'tkgui.widgets.text', '_.text', (['self.active'], {}), '(self.active)\n', (7194, 7207), True, 'import tkgui.widgets as _\n'), ((7246, 7278), 'tkgui.widgets.text', '_.text', (['"""Total confirmed cases:"""'], {}), "('Total confirmed cases:')\n", (7252, 7278), True, 'import tkgui.widgets as _\n'), ((7280, 7302), 'tkgui.widgets.text', '_.text', (['self.confirmed'], {}), '(self.confirmed)\n', (7286, 7302), True, 'import tkgui.widgets as _\n'), ((3825, 3848), 'tkgui.widgets.MenuItem.named', 'MenuItem.named', (['it', 'nop'], {}), '(it, nop)\n', (3839, 3848), False, 'from tkgui.widgets import MenuItem, TreeWidget\n'), ((5442, 5489), 'tkgui.widgets.text', '_.text', (['"""Lets dive into the world of computers"""'], {}), "('Lets dive into the world of computers')\n", (5448, 5489), True, 'import tkgui.widgets as _\n'), ((5524, 5538), 'tkgui.widgets.treeWidget', '_.treeWidget', ([], {}), '()\n', (5536, 5538), True, 'import tkgui.widgets as _\n'), ((5575, 5589), 'tkgui.widgets.treeWidget', '_.treeWidget', ([], {}), '()\n', (5587, 5589), True, 'import tkgui.widgets as _\n')] |
import logging
from aiogram.types import Message
from app.loader import dp
from app.utils.notify_admin import on_start_cmd_notify
from app.data.texts import HELP_TEXT
@dp.message_handler(commands='start')
async def start_bot(msg: Message):
await on_start_cmd_notify(msg.from_user)
await msg.answer('Список команд бота доступен командой /help. Изучайте информацию полностью.')
logging.info(f'Start bot in chat [{msg.chat.type}:{msg.chat.id}]. User '
f'[{msg.from_user.id}:{msg.from_user.username}]')
@dp.message_handler(commands='help')
async def help_command(msg: Message):
await msg.answer(HELP_TEXT)
logging.info(f'Help command in chat [{msg.chat.type}:{msg.chat.id}]. User '
f'[{msg.from_user.id}:{msg.from_user.username}]')
| [
"app.loader.dp.message_handler",
"app.utils.notify_admin.on_start_cmd_notify",
"logging.info"
] | [((173, 209), 'app.loader.dp.message_handler', 'dp.message_handler', ([], {'commands': '"""start"""'}), "(commands='start')\n", (191, 209), False, 'from app.loader import dp\n'), ((537, 572), 'app.loader.dp.message_handler', 'dp.message_handler', ([], {'commands': '"""help"""'}), "(commands='help')\n", (555, 572), False, 'from app.loader import dp\n'), ((394, 522), 'logging.info', 'logging.info', (['f"""Start bot in chat [{msg.chat.type}:{msg.chat.id}]. User [{msg.from_user.id}:{msg.from_user.username}]"""'], {}), "(\n f'Start bot in chat [{msg.chat.type}:{msg.chat.id}]. User [{msg.from_user.id}:{msg.from_user.username}]'\n )\n", (406, 522), False, 'import logging\n'), ((647, 778), 'logging.info', 'logging.info', (['f"""Help command in chat [{msg.chat.type}:{msg.chat.id}]. User [{msg.from_user.id}:{msg.from_user.username}]"""'], {}), "(\n f'Help command in chat [{msg.chat.type}:{msg.chat.id}]. User [{msg.from_user.id}:{msg.from_user.username}]'\n )\n", (659, 778), False, 'import logging\n'), ((255, 289), 'app.utils.notify_admin.on_start_cmd_notify', 'on_start_cmd_notify', (['msg.from_user'], {}), '(msg.from_user)\n', (274, 289), False, 'from app.utils.notify_admin import on_start_cmd_notify\n')] |
import Model
import datetime
import Result
import Demand
import signal
import copy
class Meso_Simu:
def __init__(self,roadname,road_path,demand_path,signal_path,out_put_path,total_stime):
self.roadname = roadname
self.road_path =road_path
self.demand_path = demand_path
self.signal_path =signal_path
self.out_put_path=out_put_path
self.car_len = 5.6 # 5(vehicle length)+0.6(space headway)
self.total_stime=total_stime
def simulation(self,stime,etime):
print(stime, etime)
test_Network = Model.Model(self.roadname, self.car_len, stime, etime)
road_data = test_Network.get_roaddata(self.roadname, self.road_path) # initialize all road
signal_data = signal.signal(self.signal_path, road_data, stime, etime) # initialize signal
demand_data = Demand.Demand(self.demand_path, self.roadname, stime, etime) # # initialize vehicle(demand)
# initialize road network
test_Network.get_init_network(self.roadname, signal_data.signal_plan, road_data, demand_data.Demand, stime,etime) # 路网完成初始化
#run simulation
self.simu_result = test_Network.simulation(stime, etime)
simu_result = Result.Result().get_vehicle_result(self.simu_result, self.out_put_path, stime,etime)
return simu_result
| [
"Model.Model",
"signal.signal",
"Demand.Demand",
"Result.Result"
] | [((589, 643), 'Model.Model', 'Model.Model', (['self.roadname', 'self.car_len', 'stime', 'etime'], {}), '(self.roadname, self.car_len, stime, etime)\n', (600, 643), False, 'import Model\n'), ((768, 824), 'signal.signal', 'signal.signal', (['self.signal_path', 'road_data', 'stime', 'etime'], {}), '(self.signal_path, road_data, stime, etime)\n', (781, 824), False, 'import signal\n'), ((869, 929), 'Demand.Demand', 'Demand.Demand', (['self.demand_path', 'self.roadname', 'stime', 'etime'], {}), '(self.demand_path, self.roadname, stime, etime)\n', (882, 929), False, 'import Demand\n'), ((1245, 1260), 'Result.Result', 'Result.Result', ([], {}), '()\n', (1258, 1260), False, 'import Result\n')] |
import typing as tp
from abc import abstractmethod, ABCMeta
from satella.coding.concurrent import IntervalTerminableThread
from .collector import sleep_cpu_aware, CPUProfileBuilderThread
from satella.time import measure, parse_time_string
class CPUTimeAwareIntervalTerminableThread(IntervalTerminableThread, metaclass=ABCMeta):
"""
An IntervalTerminableThread that can call the loop a bit faster than usual,
based of current CPU time metrics.
:param seconds: time that a single looping through should take. This will
include the time spent on calling .loop(), the rest of this time will
be spent safe_sleep()ing.
Can be alternatively a time string
:param max_sooner: amount of seconds that is ok to call this earlier.
Default is 10% seconds.
:param percentile: percentile that CPU usage has to fall below to call it earlier.
:param wakeup_interval: amount of seconds to wake up between to check for _terminating status.
Can be also a time string
Same concerns for :code:`terminate_on` as in
:class:`~satella.coding.concurrent.TerminableThread` apply.
"""
def __init__(self, seconds: tp.Union[str, float], max_sooner: tp.Optional[float] = None,
percentile: float = 0.3,
wakeup_interval: tp.Union[str, float] = '3s', *args, **kwargs):
self.seconds = parse_time_string(seconds)
self.wakeup_interval = parse_time_string(wakeup_interval)
self.max_sooner = max_sooner or seconds * 0.1
cp_bt = CPUProfileBuilderThread()
cp_bt.request_percentile(percentile)
self.percentile = percentile
super().__init__(seconds, *args, **kwargs)
@abstractmethod
def loop(self) -> None:
"""
Override me!
"""
def _execute_measured(self) -> float:
with measure() as measurement:
self.loop()
return measurement()
def __sleep_waiting_for_cpu(self, how_long: float) -> None:
cp_bt = CPUProfileBuilderThread()
per_val = cp_bt.percentile(self.percentile)
while how_long > 0 and not self._terminating:
time_to_sleep = min(self.wakeup_interval, how_long)
if sleep_cpu_aware(time_to_sleep, per_val):
break
how_long -= time_to_sleep
def __sleep(self, how_long: float) -> None:
if how_long > self.max_sooner:
if self.safe_sleep(how_long - self.max_sooner):
return
how_long = self.max_sooner
self.__sleep_waiting_for_cpu(how_long)
def run(self):
try:
try:
self.prepare()
except Exception as e:
if self._terminate_on is not None:
if isinstance(e, self._terminate_on):
self.terminate()
return
raise
while not self.terminating:
try:
measured = self._execute_measured()
except Exception as e:
if self._terminate_on is not None:
if isinstance(e, self._terminate_on):
self.terminate()
return
raise
seconds_to_wait = self.seconds - measured
if seconds_to_wait > 0:
self.__sleep(seconds_to_wait)
elif seconds_to_wait < 0:
self.on_overrun(measured)
except SystemExit:
pass
finally:
self.cleanup()
| [
"satella.time.parse_time_string",
"satella.time.measure"
] | [((1380, 1406), 'satella.time.parse_time_string', 'parse_time_string', (['seconds'], {}), '(seconds)\n', (1397, 1406), False, 'from satella.time import measure, parse_time_string\n'), ((1438, 1472), 'satella.time.parse_time_string', 'parse_time_string', (['wakeup_interval'], {}), '(wakeup_interval)\n', (1455, 1472), False, 'from satella.time import measure, parse_time_string\n'), ((1852, 1861), 'satella.time.measure', 'measure', ([], {}), '()\n', (1859, 1861), False, 'from satella.time import measure, parse_time_string\n')] |
import json
import time
import requests
from Lib import DocSegmentation
DOCKER = False
ENDPOINT = "http://localhost:8080/api/v1"
DOCUMENT = 'Document2'
'''
Post request
'''
def post_request(path, json, headers=None):
url = F'{ENDPOINT}/{path}'
headers = headers or {}
headers['Content-Type'] = 'application/json'
response = requests.post(url, json=json, headers=headers)
if response.ok:
return response.json()
else:
raise Exception(str(response))
'''
Split Document Segments
'''
def do_segmentation(txt):
doc = {'Text': txt}
segs = post_request('document/segmentation?lan=es', doc)
jso = json.dumps(segs, indent=2, sort_keys=True)
print(jso)
with open(F'_output/web-{DOCUMENT}-segs.json', 'w') as fp:
fp.write(jso)
return segs
'''
MAIN
'''
def run():
with open(F'_input/{DOCUMENT}.txt', 'r', encoding='UTF-8') as fp:
text = fp.read()
segs = do_segmentation(text)
if __name__ == '__main__':
if DOCKER:
run()
else:
import threading
from app import main
threading.Thread(target=run).start()
main()
| [
"json.dumps",
"requests.post",
"app.main",
"threading.Thread"
] | [((349, 395), 'requests.post', 'requests.post', (['url'], {'json': 'json', 'headers': 'headers'}), '(url, json=json, headers=headers)\n', (362, 395), False, 'import requests\n'), ((654, 696), 'json.dumps', 'json.dumps', (['segs'], {'indent': '(2)', 'sort_keys': '(True)'}), '(segs, indent=2, sort_keys=True)\n', (664, 696), False, 'import json\n'), ((1144, 1150), 'app.main', 'main', ([], {}), '()\n', (1148, 1150), False, 'from app import main\n'), ((1099, 1127), 'threading.Thread', 'threading.Thread', ([], {'target': 'run'}), '(target=run)\n', (1115, 1127), False, 'import threading\n')] |
from factory.django import DjangoModelFactory
import factory
from django.utils import timezone
from . import models
class UserFactory(DjangoModelFactory):
class Meta:
model = models.User
first_name = factory.Faker('first_name')
last_name = factory.Faker('last_name')
affiliation = factory.Faker('company')
job_title = factory.Faker('job')
email = factory.LazyAttribute(lambda a: '{0}.{1}<EMAIL>'.format(a.first_name, a.last_name).lower())
# ldap_name = factory.LazyAttribute(lambda a: '{0}{1}'.format(a.first_name, a.last_name).lower())
class ProjectFactory(DjangoModelFactory):
class Meta:
model = models.Project
name = factory.Faker('job')
abstract = factory.Faker('job')
class DatasetFactory(DjangoModelFactory):
class Meta:
model = models.Dataset
name = factory.Faker('company')
dataset_id = factory.Sequence(lambda n: 'ADRF-%s' % n)
class DatasetAccessFactory(DjangoModelFactory):
class Meta:
model = models.DatasetAccess
project = factory.SubFactory(ProjectFactory)
dataset = factory.SubFactory(DatasetFactory)
start_at = factory.LazyFunction(lambda: timezone.now())
class DfRoleFactory(DjangoModelFactory):
class Meta:
model = models.DfRole
name = factory.Faker('job')
description = factory.Faker('job')
| [
"django.utils.timezone.now",
"factory.Faker",
"factory.Sequence",
"factory.SubFactory"
] | [((218, 245), 'factory.Faker', 'factory.Faker', (['"""first_name"""'], {}), "('first_name')\n", (231, 245), False, 'import factory\n'), ((262, 288), 'factory.Faker', 'factory.Faker', (['"""last_name"""'], {}), "('last_name')\n", (275, 288), False, 'import factory\n'), ((307, 331), 'factory.Faker', 'factory.Faker', (['"""company"""'], {}), "('company')\n", (320, 331), False, 'import factory\n'), ((348, 368), 'factory.Faker', 'factory.Faker', (['"""job"""'], {}), "('job')\n", (361, 368), False, 'import factory\n'), ((677, 697), 'factory.Faker', 'factory.Faker', (['"""job"""'], {}), "('job')\n", (690, 697), False, 'import factory\n'), ((713, 733), 'factory.Faker', 'factory.Faker', (['"""job"""'], {}), "('job')\n", (726, 733), False, 'import factory\n'), ((836, 860), 'factory.Faker', 'factory.Faker', (['"""company"""'], {}), "('company')\n", (849, 860), False, 'import factory\n'), ((878, 919), 'factory.Sequence', 'factory.Sequence', (["(lambda n: 'ADRF-%s' % n)"], {}), "(lambda n: 'ADRF-%s' % n)\n", (894, 919), False, 'import factory\n'), ((1037, 1071), 'factory.SubFactory', 'factory.SubFactory', (['ProjectFactory'], {}), '(ProjectFactory)\n', (1055, 1071), False, 'import factory\n'), ((1086, 1120), 'factory.SubFactory', 'factory.SubFactory', (['DatasetFactory'], {}), '(DatasetFactory)\n', (1104, 1120), False, 'import factory\n'), ((1281, 1301), 'factory.Faker', 'factory.Faker', (['"""job"""'], {}), "('job')\n", (1294, 1301), False, 'import factory\n'), ((1320, 1340), 'factory.Faker', 'factory.Faker', (['"""job"""'], {}), "('job')\n", (1333, 1340), False, 'import factory\n'), ((1165, 1179), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1177, 1179), False, 'from django.utils import timezone\n')] |
# Generated by Django 2.2 on 2020-04-24 08:05
from django.db import migrations, models
import django.db.models.deletion
import machina.apps.forum.abstract_models
import machina.models.fields
import mptt.fields
class Migration(migrations.Migration):
dependencies = [
('forum', '0011_auto_20190627_2132'),
]
operations = [
migrations.AlterModelOptions(
name='forum',
options={'ordering': ['tree_id', 'lft'], 'verbose_name': 'Forum', 'verbose_name_plural': 'Fora'},
),
migrations.AlterField(
model_name='forum',
name='created',
field=models.DateTimeField(auto_now_add=True, verbose_name='Data utworzenia'),
),
migrations.AlterField(
model_name='forum',
name='description',
field=machina.models.fields.MarkupTextField(blank=True, no_rendered_field=True, null=True, verbose_name='Opis'),
),
migrations.AlterField(
model_name='forum',
name='direct_posts_count',
field=models.PositiveIntegerField(blank=True, default=0, editable=False, verbose_name='Liczba postów'),
),
migrations.AlterField(
model_name='forum',
name='direct_topics_count',
field=models.PositiveIntegerField(blank=True, default=0, editable=False, verbose_name='Liczba tematów'),
),
migrations.AlterField(
model_name='forum',
name='display_sub_forum_list',
field=models.BooleanField(default=True, help_text='Wyświetlaj forum w liście forów rodzica.', verbose_name='Wyświetlaj w liście rodzica'),
),
migrations.AlterField(
model_name='forum',
name='image',
field=machina.models.fields.ExtendedImageField(blank=True, null=True, upload_to=machina.apps.forum.abstract_models.get_forum_image_upload_to, verbose_name='Logo forum'),
),
migrations.AlterField(
model_name='forum',
name='last_post',
field=models.ForeignKey(blank=True, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='forum_conversation.Post', verbose_name='Ostatni post'),
),
migrations.AlterField(
model_name='forum',
name='last_post_on',
field=models.DateTimeField(blank=True, null=True, verbose_name='Ostatni post dodano'),
),
migrations.AlterField(
model_name='forum',
name='link',
field=models.URLField(blank=True, null=True, verbose_name='Odnośnik forum'),
),
migrations.AlterField(
model_name='forum',
name='link_redirects',
field=models.BooleanField(default=False, help_text='Zliczaj liczbę kliknięć w odnośnik do forum', verbose_name='Licznik przekierowań'),
),
migrations.AlterField(
model_name='forum',
name='link_redirects_count',
field=models.PositiveIntegerField(blank=True, default=0, editable=False, verbose_name='Licznik przekierowań'),
),
migrations.AlterField(
model_name='forum',
name='name',
field=models.CharField(max_length=100, verbose_name='Nazwa'),
),
migrations.AlterField(
model_name='forum',
name='parent',
field=mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='forum.Forum', verbose_name='Rodzic'),
),
migrations.AlterField(
model_name='forum',
name='type',
field=models.PositiveSmallIntegerField(choices=[(0, 'Domyślne forum'), (1, 'Kategoria'), (2, 'Odnośnik')], db_index=True, verbose_name='Typ forum'),
),
migrations.AlterField(
model_name='forum',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='Data edycji'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.migrations.AlterModelOptions",
"django.db.models.PositiveIntegerField",
"django.db.models.DateTimeField",
"django.db.models.PositiveSmallIntegerField",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((354, 498), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""forum"""', 'options': "{'ordering': ['tree_id', 'lft'], 'verbose_name': 'Forum',\n 'verbose_name_plural': 'Fora'}"}), "(name='forum', options={'ordering': ['tree_id',\n 'lft'], 'verbose_name': 'Forum', 'verbose_name_plural': 'Fora'})\n", (382, 498), False, 'from django.db import migrations, models\n'), ((640, 711), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Data utworzenia"""'}), "(auto_now_add=True, verbose_name='Data utworzenia')\n", (660, 711), False, 'from django.db import migrations, models\n'), ((1075, 1175), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'default': '(0)', 'editable': '(False)', 'verbose_name': '"""Liczba postów"""'}), "(blank=True, default=0, editable=False,\n verbose_name='Liczba postów')\n", (1102, 1175), False, 'from django.db import migrations, models\n'), ((1305, 1406), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'default': '(0)', 'editable': '(False)', 'verbose_name': '"""Liczba tematów"""'}), "(blank=True, default=0, editable=False,\n verbose_name='Liczba tematów')\n", (1332, 1406), False, 'from django.db import migrations, models\n'), ((1539, 1680), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'help_text': '"""Wyświetlaj forum w liście forów rodzica."""', 'verbose_name': '"""Wyświetlaj w liście rodzica"""'}), "(default=True, help_text=\n 'Wyświetlaj forum w liście forów rodzica.', verbose_name=\n 'Wyświetlaj w liście rodzica')\n", (1558, 1680), False, 'from django.db import migrations, models\n'), ((2076, 2265), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'editable': '(False)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""+"""', 'to': '"""forum_conversation.Post"""', 'verbose_name': '"""Ostatni post"""'}), "(blank=True, editable=False, null=True, on_delete=django.\n db.models.deletion.SET_NULL, related_name='+', to=\n 'forum_conversation.Post', verbose_name='Ostatni post')\n", (2093, 2265), False, 'from django.db import migrations, models\n'), ((2382, 2461), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Ostatni post dodano"""'}), "(blank=True, null=True, verbose_name='Ostatni post dodano')\n", (2402, 2461), False, 'from django.db import migrations, models\n'), ((2580, 2649), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Odnośnik forum"""'}), "(blank=True, null=True, verbose_name='Odnośnik forum')\n", (2595, 2649), False, 'from django.db import migrations, models\n'), ((2778, 2916), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'help_text': '"""Zliczaj liczbę kliknięć w odnośnik do forum"""', 'verbose_name': '"""Licznik przekierowań"""'}), "(default=False, help_text=\n 'Zliczaj liczbę kliknięć w odnośnik do forum', verbose_name=\n 'Licznik przekierowań')\n", (2797, 2916), False, 'from django.db import migrations, models\n'), ((3041, 3148), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'default': '(0)', 'editable': '(False)', 'verbose_name': '"""Licznik przekierowań"""'}), "(blank=True, default=0, editable=False,\n verbose_name='Licznik przekierowań')\n", (3068, 3148), False, 'from django.db import migrations, models\n'), ((3263, 3317), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'verbose_name': '"""Nazwa"""'}), "(max_length=100, verbose_name='Nazwa')\n", (3279, 3317), False, 'from django.db import migrations, models\n'), ((3717, 3862), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'choices': "[(0, 'Domyślne forum'), (1, 'Kategoria'), (2, 'Odnośnik')]", 'db_index': '(True)', 'verbose_name': '"""Typ forum"""'}), "(choices=[(0, 'Domyślne forum'), (1,\n 'Kategoria'), (2, 'Odnośnik')], db_index=True, verbose_name='Typ forum')\n", (3749, 3862), False, 'from django.db import migrations, models\n'), ((3980, 4043), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Data edycji"""'}), "(auto_now=True, verbose_name='Data edycji')\n", (4000, 4043), False, 'from django.db import migrations, models\n')] |
# coding: utf-8
import os
import pickle
import json
import requests
import warnings
from collections import OrderedDict
import pandas as pd
from tqdm import tqdm
from datetime import datetime, timedelta
from typing import List
from ..objects import RawBar
from ..utils.kline_generator import bar_end_time
import sqlite3
from binance_f import RequestClient
from binance_f.model import *
from binance_f.constant.test import *
import time
# 1m, 5m, 15m, 30m, 60m, 120m, 1d, 1w, 1M
freq_convert = {"1min": "1m", "5min": '5m', '15min': '15m',
"30min": "30m", "60min": '1h', "D": "1d", "W": '1w', "M": "1M"}
conn = sqlite3.connect("trade.db")
c = conn.cursor()
DEFAULT_START_TIME = datetime(2019, 1, 1)
request_client = RequestClient(api_key=g_api_key, secret_key=g_secret_key)
# # 从数据库获取k线
# def get_kline_from_sql(symbol, interval: 'CandlestickInterval',starttime:datetime):
# result = c.execute("SELECT * FROM MARKET_INFO WHERE SYMBOL=? AND INTERVAL=? AND DT>=? ORDER BY DT",
# (symbol, interval,starttime.timestamp()*1000))
# bars = []
# # 会有防止重复请求的
# for row in result:
# # if datetime.fromtimestamp(row[2]/1000)>=starttime:
# bars.append(RawBar(symbol=symbol, dt=datetime.fromtimestamp(row[2]/1000),
# open=round(float(row[3]), 2),
# close=round(float(row[4]), 2),
# high=round(float(row[6]), 2),
# low=round(float(row[5]), 2),
# vol=int(row[7])))
# return bars
def get_kline(symbol: str, end_date: [datetime, str], freq: str,
start_date: [datetime, str] = None, count=None, fq: bool = False) -> List[RawBar]:
"""获取K线数据
:param symbol: 币安期货的交易对 BTCUSDT/ETHUSDT
:param start_date: 开始日期
:param end_date: 截止日期
:param freq: K线级别,可选值 ['1min', '5min', '30min', '60min', 'D', 'W', 'M']
:param count: K线数量,最大值为 5000
:param fq: 是否进行复权
:return: pd.DataFrame
>>> start_date = datetime.strptime("20200101", "%Y%m%d")
>>> end_date = datetime.strptime("20210701", "%Y%m%d")
>>> df1 = get_kline(symbol="BTCUSDT", start_date=start_date, end_date=end_date, freq="1min")
>>> df2 = get_kline(symbol="000001.XSHG", end_date=end_date, freq="1min", count=1000)
>>> df3 = get_kline(symbol="000001.XSHG", start_date='20200701', end_date='20200719', freq="1min", fq=True)
>>> df4 = get_kline(symbol="000001.XSHG", end_date='20200719', freq="1min", count=1000)
"""
# 从币安获取k线数据
if count and count > 1300:
warnings.warn(f"count={count}, 超过5000的最大值限制,仅返回最后5000条记录")
end_date = datetime.now()
result = []
if start_date:
start_date = pd.to_datetime(start_date)
while len(result) == 0:
try:
result = request_client.get_candlestick_data(symbol=symbol,
interval=freq_convert[freq],
startTime=start_date.timestamp() * 1000,
endTime=end_date.timestamp() * 1000)
except:
print("重连了")
time.sleep(2)
elif count:
while len(result) == 0:
try:
result = request_client.get_candlestick_data(symbol=symbol,
interval=freq_convert[freq],
endTime=end_date.timestamp() * 1000,
limit=count)
except:
print("重连了")
time.sleep(2)
else:
raise ValueError("start_date 和 count 不能同时为空")
bars = []
for kline in result:
bars.append(RawBar(symbol=symbol, dt=datetime.fromtimestamp(kline.openTime / 1000),
open=round(float(kline.open), 2),
close=round(float(kline.close), 2),
high=round(float(kline.high), 2),
low=round(float(kline.low), 2),
vol=int(float(kline.volume))))
return bars
def get_kline_period(symbol: str, start_date: [datetime, str],
end_date: [datetime, str], freq: str, fq=False) -> List[RawBar]:
"""获取指定时间段的行情数据
:param symbol: 币安期货的交易对 BTCUSDT/ETHUSDT
:param start_date: 开始日期
:param end_date: 截止日期
:param freq: K线级别,可选值 ['1min', '5min', '30min', '60min', 'D', 'W', 'M']
:param fq: 是否进行复权
:return:
"""
return get_kline(symbol, end_date, freq, start_date)
| [
"datetime.datetime",
"datetime.datetime.fromtimestamp",
"sqlite3.connect",
"time.sleep",
"datetime.datetime.now",
"binance_f.RequestClient",
"warnings.warn",
"pandas.to_datetime"
] | [((627, 654), 'sqlite3.connect', 'sqlite3.connect', (['"""trade.db"""'], {}), "('trade.db')\n", (642, 654), False, 'import sqlite3\n'), ((694, 714), 'datetime.datetime', 'datetime', (['(2019)', '(1)', '(1)'], {}), '(2019, 1, 1)\n', (702, 714), False, 'from datetime import datetime, timedelta\n'), ((732, 789), 'binance_f.RequestClient', 'RequestClient', ([], {'api_key': 'g_api_key', 'secret_key': 'g_secret_key'}), '(api_key=g_api_key, secret_key=g_secret_key)\n', (745, 789), False, 'from binance_f import RequestClient\n'), ((2658, 2672), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2670, 2672), False, 'from datetime import datetime, timedelta\n'), ((2584, 2642), 'warnings.warn', 'warnings.warn', (['f"""count={count}, 超过5000的最大值限制,仅返回最后5000条记录"""'], {}), "(f'count={count}, 超过5000的最大值限制,仅返回最后5000条记录')\n", (2597, 2642), False, 'import warnings\n'), ((2729, 2755), 'pandas.to_datetime', 'pd.to_datetime', (['start_date'], {}), '(start_date)\n', (2743, 2755), True, 'import pandas as pd\n'), ((3236, 3249), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3246, 3249), False, 'import time\n'), ((3869, 3914), 'datetime.datetime.fromtimestamp', 'datetime.fromtimestamp', (['(kline.openTime / 1000)'], {}), '(kline.openTime / 1000)\n', (3891, 3914), False, 'from datetime import datetime, timedelta\n'), ((3706, 3719), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3716, 3719), False, 'import time\n')] |
# Generated by Django 3.2.7 on 2021-09-02 02:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('creepers', '0002_rename_owner_creeper_added'),
]
operations = [
migrations.AddField(
model_name='creeper',
name='survivability',
field=models.IntegerField(choices=[(1, 1), (2, 2), (3, 3), (4, 4)], default='1'),
),
]
| [
"django.db.models.IntegerField"
] | [((351, 425), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': '[(1, 1), (2, 2), (3, 3), (4, 4)]', 'default': '"""1"""'}), "(choices=[(1, 1), (2, 2), (3, 3), (4, 4)], default='1')\n", (370, 425), False, 'from django.db import migrations, models\n')] |
import shutil
import os
def end_report(pr_dictionary):
# copy sl logo to report folder
shutil.copy(os.path.join(pr_dictionary["html_bin_path"], "logo.png"), os.path.join(pr_dictionary["workflow_outpath"],"plots"))
# initialize a new report file
report = open(pr_dictionary["workflow_outpath"] + "/report.html", "w")
# replace the section tags in report_meta_data by the content generated in add_report.py
report_out = pr_dictionary["meta_data"].replace("<*side_bar*>", pr_dictionary["side_bar"])\
.replace("<*report_body*>", str(pr_dictionary["report_body"])).replace("<*report_header*>", pr_dictionary["report_header"])
# write to file
report.write(report_out)
| [
"os.path.join"
] | [((110, 166), 'os.path.join', 'os.path.join', (["pr_dictionary['html_bin_path']", '"""logo.png"""'], {}), "(pr_dictionary['html_bin_path'], 'logo.png')\n", (122, 166), False, 'import os\n'), ((168, 224), 'os.path.join', 'os.path.join', (["pr_dictionary['workflow_outpath']", '"""plots"""'], {}), "(pr_dictionary['workflow_outpath'], 'plots')\n", (180, 224), False, 'import os\n')] |
"""The promethues monitor page"""
from bareasgi import HttpRequest, HttpResponse, bytes_writer
from bareutils import header, response_code
from prometheus_client import CONTENT_TYPE_LATEST, generate_latest
async def prometheus_view(_request: HttpRequest) -> HttpResponse:
"""The endpoint for prometheus stats
Args:
_request (HttpRequest): The request.
Returns:
HttpResponse: The prometheus statistics
"""
headers = [
(header.CONTENT_TYPE, CONTENT_TYPE_LATEST.encode('ascii'))
]
body = generate_latest()
return HttpResponse(response_code.OK, headers, bytes_writer(body))
| [
"bareasgi.bytes_writer",
"prometheus_client.CONTENT_TYPE_LATEST.encode",
"prometheus_client.generate_latest"
] | [((542, 559), 'prometheus_client.generate_latest', 'generate_latest', ([], {}), '()\n', (557, 559), False, 'from prometheus_client import CONTENT_TYPE_LATEST, generate_latest\n'), ((611, 629), 'bareasgi.bytes_writer', 'bytes_writer', (['body'], {}), '(body)\n', (623, 629), False, 'from bareasgi import HttpRequest, HttpResponse, bytes_writer\n'), ((488, 523), 'prometheus_client.CONTENT_TYPE_LATEST.encode', 'CONTENT_TYPE_LATEST.encode', (['"""ascii"""'], {}), "('ascii')\n", (514, 523), False, 'from prometheus_client import CONTENT_TYPE_LATEST, generate_latest\n')] |
from abstract_esn import AbstractESN
import numpy as np
from pathlib import Path
path = Path('./results/santafe/noisy')
def mean_squared_error(y_true, y_pred):
try:
return np.mean(np.abs((y_true - y_pred)**2))
except:
return -1
def mean_absolute_percentage_error(y_true, y_pred):
try:
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
except:
return -1
if __name__ == '__main__':
path.mkdir(parents=True, exist_ok=True)
with open('dataset/SantaFe.D.txt', 'r') as f:
train = list(map(float, f.readlines()))
with open('dataset/SantaFe.D.cont.txt', 'r') as f:
test = list(map(float, f.readlines()))
n_neurons = 100
n_inputs = 1
connectivity = 0.2
washout = 1000
seed = 42
a = 0.8
spectral_radius = 0.1
np.random.seed(42)
with open(path/'results_open.csv', 'w') as f:
print('amplitude', 'n', 'type', 'MSE', 'MAPE', file=f, sep=',')
for du in np.arange(0.35, 1.0, 0.01):
esn = AbstractESN(n_neurons, n_inputs, 1, connectivity, washout, seed, a, spectral_radius)
X = np.zeros((1 + n_inputs + n_neurons, len(train) - washout - 1))
dX = np.zeros((1 + n_inputs + n_neurons, len(train) - washout - 1))
for i, u in enumerate(train[:-1]):
(x, dx) = esn.step(u, du)
if i >= washout:
X[:, i - washout] = np.hstack((1, u, x))
dX[:, i - washout] = np.hstack((1, du, dx))
esn.calculate_classical_weights(train[1 + washout:], X.T)
esn.calculate_abstract_weights(train[1 + washout:], du, X.T, dX.T)
x, _ = esn.step(train[-1], du)
x_pretest = np.copy(x)
for n in range(100):
esn.xc = np.copy(x_pretest)
Y_abstract = np.zeros((len(test) - 1, 1))
Y_classical = np.zeros((len(test) - 1, 1))
for i, u in enumerate(test[:-1]):
noise = (np.random.rand() - 0.5) * du
(x, _) = esn.step(u + noise, 0)
(Y_classical[i, ...], _) = esn.predict_classical(u + noise, 0)
(Y_abstract[i, ...], _) = esn.predict_abstract(u + noise, 0)
with open(path/'results_open.csv', 'a') as f:
print(du, n, 'abstract', mean_squared_error(test[1:], Y_abstract), mean_absolute_percentage_error(test[1:], Y_abstract), file=f, sep=',')
print(du, n, 'classical', mean_squared_error(test[1:], Y_classical), mean_absolute_percentage_error(test[1:], Y_classical), file=f, sep=',') | [
"numpy.copy",
"numpy.abs",
"numpy.random.rand",
"pathlib.Path",
"numpy.hstack",
"abstract_esn.AbstractESN",
"numpy.random.seed",
"numpy.arange"
] | [((89, 120), 'pathlib.Path', 'Path', (['"""./results/santafe/noisy"""'], {}), "('./results/santafe/noisy')\n", (93, 120), False, 'from pathlib import Path\n'), ((822, 840), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (836, 840), True, 'import numpy as np\n'), ((980, 1006), 'numpy.arange', 'np.arange', (['(0.35)', '(1.0)', '(0.01)'], {}), '(0.35, 1.0, 0.01)\n', (989, 1006), True, 'import numpy as np\n'), ((1022, 1110), 'abstract_esn.AbstractESN', 'AbstractESN', (['n_neurons', 'n_inputs', '(1)', 'connectivity', 'washout', 'seed', 'a', 'spectral_radius'], {}), '(n_neurons, n_inputs, 1, connectivity, washout, seed, a,\n spectral_radius)\n', (1033, 1110), False, 'from abstract_esn import AbstractESN\n'), ((1690, 1700), 'numpy.copy', 'np.copy', (['x'], {}), '(x)\n', (1697, 1700), True, 'import numpy as np\n'), ((194, 224), 'numpy.abs', 'np.abs', (['((y_true - y_pred) ** 2)'], {}), '((y_true - y_pred) ** 2)\n', (200, 224), True, 'import numpy as np\n'), ((1752, 1770), 'numpy.copy', 'np.copy', (['x_pretest'], {}), '(x_pretest)\n', (1759, 1770), True, 'import numpy as np\n'), ((339, 373), 'numpy.abs', 'np.abs', (['((y_true - y_pred) / y_true)'], {}), '((y_true - y_pred) / y_true)\n', (345, 373), True, 'import numpy as np\n'), ((1406, 1426), 'numpy.hstack', 'np.hstack', (['(1, u, x)'], {}), '((1, u, x))\n', (1415, 1426), True, 'import numpy as np\n'), ((1464, 1486), 'numpy.hstack', 'np.hstack', (['(1, du, dx)'], {}), '((1, du, dx))\n', (1473, 1486), True, 'import numpy as np\n'), ((1952, 1968), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (1966, 1968), True, 'import numpy as np\n')] |
import scipy.linalg as lg
import numpy as np
import utils as ut
import argparse
import pickle
np.set_printoptions(
formatter={'float': '{: 0.3f}'.format}
)
def get_saliency_key_factors(W, H):
'''Extract key factors based on saliency score'''
n_terms, _ = W.shape
_, n_sentences = H.shape
_, column_permutations = zip(*sorted(
zip(H.T, range(n_sentences)),
key=lambda t: -sum(t[0])
))
_, row_permutations = zip(*sorted(
zip(W, range(n_terms)),
key=lambda t: -sum(t[0])
))
return column_permutations, row_permutations
def get_qr_key_factors(W, H):
'''Extract key factors based on QR decomposition with column pivoting'''
_, _, column_permutations = lg.qr(H, pivoting=True)
_, _, row_permutations = lg.qr(W.T, pivoting=True)
return column_permutations, row_permutations
methods = {
'saliency': get_saliency_key_factors,
'qr': get_qr_key_factors,
}
parser = argparse.ArgumentParser()
parser.add_argument('parsed')
parser.add_argument('factorized')
parser.add_argument('--method', default=list(methods.keys())[0], choices=methods.keys())
parser.add_argument('--out', default=None)
if __name__ == '__main__':
args = parser.parse_args()
parsed_file_path = args.parsed
factorized_file_path = args.factorized
method_name = args.method
out_file_path = args.out if args.out else ut.replace_ext(parsed_file_path, '{}.extracted'.format(method_name))
parsed_file = open(parsed_file_path, mode='rb+')
factorized_file = open(factorized_file_path, mode='rb+')
out_file = open(out_file_path, mode='w+')
parsed_data = pickle.load(parsed_file)
factorized_data = pickle.load(factorized_file)
W = factorized_data.get('W')
H = factorized_data.get('H')
terms = parsed_data.get('terms')
sentences = parsed_data.get('sentences')
method_func = methods[method_name]
key_sent, key_words = method_func(W, H)
out_file.write('\n# Key sentences\n')
for index in key_sent:
out_file.write('{} - {}'.format(H.T[index], sentences[index]))
out_file.write('\n')
| [
"pickle.load",
"scipy.linalg.qr",
"argparse.ArgumentParser",
"numpy.set_printoptions"
] | [((96, 155), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'formatter': "{'float': '{: 0.3f}'.format}"}), "(formatter={'float': '{: 0.3f}'.format})\n", (115, 155), True, 'import numpy as np\n'), ((916, 941), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (939, 941), False, 'import argparse\n'), ((696, 719), 'scipy.linalg.qr', 'lg.qr', (['H'], {'pivoting': '(True)'}), '(H, pivoting=True)\n', (701, 719), True, 'import scipy.linalg as lg\n'), ((747, 772), 'scipy.linalg.qr', 'lg.qr', (['W.T'], {'pivoting': '(True)'}), '(W.T, pivoting=True)\n', (752, 772), True, 'import scipy.linalg as lg\n'), ((1585, 1609), 'pickle.load', 'pickle.load', (['parsed_file'], {}), '(parsed_file)\n', (1596, 1609), False, 'import pickle\n'), ((1630, 1658), 'pickle.load', 'pickle.load', (['factorized_file'], {}), '(factorized_file)\n', (1641, 1658), False, 'import pickle\n')] |
#!/usr/bin/env python3
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
import glob
import json
import yaml
from google.cloud import storage
from google.api_core.gapic_v1 import client_info as grpc_client_info
parser = argparse.ArgumentParser(
description='Export project charging codes to GCS')
parser.add_argument('--config',
type=str,
help='Location of Project Factory config.yaml')
parser.add_argument('projects',
type=str,
help='Location of directory with all project YAML files')
args = parser.parse_args()
project_dir = args.projects if args.projects.endswith(
'/') else '%s/' % args.projects
config = {}
with open(args.config, 'rt') as f:
config = yaml.load(f, Loader=yaml.SafeLoader)
if 'chargingCodesDestinationBucket' not in config:
sys.exit(0)
print('Reading projects...', file=sys.stderr)
charging_codes = []
for project_file in glob.glob('%s*.yaml' % project_dir):
with open(project_file, 'rt') as f:
_project = yaml.load(f, Loader=yaml.SafeLoader)
project = _project['project']
if project['chargingCode'] not in charging_codes:
charging_codes.append(project['chargingCode'])
client_info = grpc_client_info.ClientInfo(
user_agent='google-pso-tool/turbo-project-factory/1.0.0')
storage_client = storage.Client(client_info=client_info)
print('Writing charging codes to GCS (%s/%s)...' %
(config['chargingCodesDestinationBucket'],
config['chargingCodesDestinationObject']),
file=sys.stderr)
bucket = storage_client.bucket(config['chargingCodesDestinationBucket'])
blob = bucket.blob(config['chargingCodesDestinationObject'])
blob.upload_from_string(json.dumps(charging_codes))
print('All done.', file=sys.stderr)
| [
"google.cloud.storage.Client",
"argparse.ArgumentParser",
"json.dumps",
"yaml.load",
"google.api_core.gapic_v1.client_info.ClientInfo",
"sys.exit",
"glob.glob"
] | [((776, 851), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Export project charging codes to GCS"""'}), "(description='Export project charging codes to GCS')\n", (799, 851), False, 'import argparse\n'), ((1500, 1535), 'glob.glob', 'glob.glob', (["('%s*.yaml' % project_dir)"], {}), "('%s*.yaml' % project_dir)\n", (1509, 1535), False, 'import glob\n'), ((1803, 1893), 'google.api_core.gapic_v1.client_info.ClientInfo', 'grpc_client_info.ClientInfo', ([], {'user_agent': '"""google-pso-tool/turbo-project-factory/1.0.0"""'}), "(user_agent=\n 'google-pso-tool/turbo-project-factory/1.0.0')\n", (1830, 1893), True, 'from google.api_core.gapic_v1 import client_info as grpc_client_info\n'), ((1911, 1950), 'google.cloud.storage.Client', 'storage.Client', ([], {'client_info': 'client_info'}), '(client_info=client_info)\n', (1925, 1950), False, 'from google.cloud import storage\n'), ((1308, 1344), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.SafeLoader'}), '(f, Loader=yaml.SafeLoader)\n', (1317, 1344), False, 'import yaml\n'), ((1401, 1412), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1409, 1412), False, 'import sys\n'), ((2282, 2308), 'json.dumps', 'json.dumps', (['charging_codes'], {}), '(charging_codes)\n', (2292, 2308), False, 'import json\n'), ((1596, 1632), 'yaml.load', 'yaml.load', (['f'], {'Loader': 'yaml.SafeLoader'}), '(f, Loader=yaml.SafeLoader)\n', (1605, 1632), False, 'import yaml\n')] |
"""Read material densities and compositions from the PNNL Compendium.
The report is:
Detwiler, <NAME>., <NAME>., <NAME>., <NAME>., & <NAME>. Compendium of Material Composition Data for Radiation
Transport Modeling. United States. PNNL-15870 Revision 2.
https://doi.org/10.2172/1782721
and it is available at:
https://compendium.cwmd.pnnl.gov
"""
import json
import os
import warnings
import numpy as np
import pandas as pd
from .materials_error import MaterialsWarning
FNAME = os.path.join(os.path.split(__file__)[0], "MaterialsCompendium.json")
def json_elements_to_weight_fractions(elements):
"""Calculate element weight fractions from the Elements data."""
results = []
for element in elements:
assert element["Element"].isalpha()
line = f"{element['Element']} {element['WeightFraction_whole']:.6f}"
results.append(line)
return results
def json_elements_to_atom_fractions(elements):
"""Calculate element atomic number fractions from the Elements data."""
results = []
for element in elements:
line = f"{element['Element']} {element['AtomFraction_whole']:.6f}"
results.append(line)
return results
def fetch_compendium_data():
"""Read material data from the Compendium."""
# read the file
if not os.path.exists(FNAME):
warnings.warn(
'Material data from the "Compendium of Material Composition Data for '
'Radiation Transport Modeling" cannot be found. If these data are '
"desired, please visit the following URL: "
'https://compendium.cwmd.pnnl.gov and select "Download JSON". Then '
f"move the resulting file to the following path: {FNAME}",
MaterialsWarning,
)
data = []
else:
with open(FNAME, "r") as f:
data = json.load(f)
# extract relevant data
names = [datum["Name"] for datum in data]
formulae = [datum["Formula"] if "Formula" in datum else "-" for datum in data]
densities = [datum["Density"] for datum in data]
weight_fracs = [
json_elements_to_weight_fractions(datum["Elements"]) for datum in data
]
# assemble data into a dataframe like the NIST data
df = pd.DataFrame()
df["Material"] = names
df["Formula"] = formulae
df["Density"] = np.array(densities, dtype=float)
df["Composition_symbol"] = weight_fracs
return df
| [
"os.path.exists",
"os.path.split",
"json.load",
"numpy.array",
"pandas.DataFrame",
"warnings.warn"
] | [((2246, 2260), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (2258, 2260), True, 'import pandas as pd\n'), ((2337, 2369), 'numpy.array', 'np.array', (['densities'], {'dtype': 'float'}), '(densities, dtype=float)\n', (2345, 2369), True, 'import numpy as np\n'), ((513, 536), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (526, 536), False, 'import os\n'), ((1310, 1331), 'os.path.exists', 'os.path.exists', (['FNAME'], {}), '(FNAME)\n', (1324, 1331), False, 'import os\n'), ((1341, 1681), 'warnings.warn', 'warnings.warn', (['f"""Material data from the "Compendium of Material Composition Data for Radiation Transport Modeling" cannot be found. If these data are desired, please visit the following URL: https://compendium.cwmd.pnnl.gov and select "Download JSON". Then move the resulting file to the following path: {FNAME}"""', 'MaterialsWarning'], {}), '(\n f\'Material data from the "Compendium of Material Composition Data for Radiation Transport Modeling" cannot be found. If these data are desired, please visit the following URL: https://compendium.cwmd.pnnl.gov and select "Download JSON". Then move the resulting file to the following path: {FNAME}\'\n , MaterialsWarning)\n', (1354, 1681), False, 'import warnings\n'), ((1850, 1862), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1859, 1862), False, 'import json\n')] |
# -*- coding: utf-8 -*-
""" Cost functions for video calibration, used with scipy. """
# pylint:disable=invalid-name
import numpy as np
import sksurgerycalibration.video.video_calibration_utils as vu
import sksurgerycalibration.video.video_calibration_metrics as vm
def stereo_2d_error_for_extrinsics(x_0,
common_object_points,
common_left_image_points,
common_right_image_points,
left_intrinsics,
left_distortion,
right_intrinsics,
right_distortion,
l2r_rmat,
l2r_tvec
):
"""
Computes a vector of residuals between projected image points
and actual image points, for left and right image. x_0 should
contain left camera extrinsic parameters.
"""
rvecs = []
tvecs = []
number_of_frames = len(common_object_points)
for i in range(0, number_of_frames):
rvec = np.zeros((3, 1))
rvec[0][0] = x_0[6 * i + 0]
rvec[1][0] = x_0[6 * i + 1]
rvec[2][0] = x_0[6 * i + 2]
tvec = np.zeros((3, 1))
tvec[0][0] = x_0[6 * i + 3]
tvec[1][0] = x_0[6 * i + 4]
tvec[2][0] = x_0[6 * i + 5]
rvecs.append(rvec)
tvecs.append(tvec)
residual = vm.compute_stereo_2d_err(l2r_rmat,
l2r_tvec,
common_object_points,
common_left_image_points,
left_intrinsics,
left_distortion,
common_object_points,
common_right_image_points,
right_intrinsics,
right_distortion,
rvecs,
tvecs,
return_residuals=True
)
return residual
def mono_proj_err_h2e(x_0,
object_points,
image_points,
intrinsics,
distortion,
pattern_tracking,
device_tracking,
pattern2marker_matrix
):
"""
Computes the SSE of projected
image points and actual image points, for a single camera,
where we have a tracked calibration pattern, and assume the
pattern2marker transform should remain fixed. Therefore we
only optimise hand-eye. So, x_0 should be of length 6.
"""
assert len(x_0) == 6
rvec = np.zeros((3, 1))
rvec[0] = x_0[0]
rvec[1] = x_0[1]
rvec[2] = x_0[2]
tvec = np.zeros((3, 1))
tvec[0] = x_0[3]
tvec[1] = x_0[4]
tvec[2] = x_0[5]
h2e = vu.extrinsic_vecs_to_matrix(rvec, tvec)
number_of_frames = len(object_points)
rvecs = []
tvecs = []
# Computes pattern2camera for each pose
for i in range(0, number_of_frames):
p2c = h2e @ np.linalg.inv(device_tracking[i]) @ \
pattern_tracking[i] @ pattern2marker_matrix
rvec, tvec = vu.extrinsic_matrix_to_vecs(p2c)
rvecs.append(rvec)
tvecs.append(tvec)
proj, _ = vm.compute_mono_2d_err(object_points,
image_points,
rvecs,
tvecs,
intrinsics,
distortion,
return_residuals=False)
return proj
def mono_proj_err_p2m_h2e(x_0,
object_points,
image_points,
intrinsics,
distortion,
pattern_tracking,
device_tracking
):
"""
Computes the SSE between projected
image points to actual image points, for a single camera,
where we have a tracked pattern. Both the
pattern2marker and hand2eye are optimised.
So, x_0 should be of length 12.
"""
assert len(x_0) == 12
rvec = np.zeros((3, 1))
rvec[0] = x_0[0]
rvec[1] = x_0[1]
rvec[2] = x_0[2]
tvec = np.zeros((3, 1))
tvec[0] = x_0[3]
tvec[1] = x_0[4]
tvec[2] = x_0[5]
p2m = vu.extrinsic_vecs_to_matrix(rvec, tvec)
rvec[0] = x_0[6]
rvec[1] = x_0[7]
rvec[2] = x_0[8]
tvec[0] = x_0[9]
tvec[1] = x_0[10]
tvec[2] = x_0[11]
h2e = vu.extrinsic_vecs_to_matrix(rvec, tvec)
number_of_frames = len(object_points)
rvecs = []
tvecs = []
# Computes pattern2camera for each pose
for i in range(0, number_of_frames):
p2c = h2e @ np.linalg.inv(device_tracking[i])\
@ pattern_tracking[i] @ p2m
rvec, tvec = vu.extrinsic_matrix_to_vecs(p2c)
rvecs.append(rvec)
tvecs.append(tvec)
proj, _ = vm.compute_mono_2d_err(object_points,
image_points,
rvecs,
tvecs,
intrinsics,
distortion,
return_residuals=False)
return proj
def mono_proj_err_h2e_g2w(x_0,
object_points,
image_points,
intrinsics,
distortion,
device_tracking
):
"""
Method to the SSE of projected
image points to actual image points, for a single camera,
where we have an untracked pattern. Both the
hand2eye and grid2world are optimised.
So, x_0 should be of length 12.
"""
assert len(x_0) == 12
rvec = np.zeros((3, 1))
rvec[0] = x_0[0]
rvec[1] = x_0[1]
rvec[2] = x_0[2]
tvec = np.zeros((3, 1))
tvec[0] = x_0[3]
tvec[1] = x_0[4]
tvec[2] = x_0[5]
h2e = vu.extrinsic_vecs_to_matrix(rvec, tvec)
rvec[0] = x_0[6]
rvec[1] = x_0[7]
rvec[2] = x_0[8]
tvec[0] = x_0[9]
tvec[1] = x_0[10]
tvec[2] = x_0[11]
g2w = vu.extrinsic_vecs_to_matrix(rvec, tvec)
number_of_frames = len(object_points)
rvecs = []
tvecs = []
# Computes pattern2camera for each pose
for i in range(0, number_of_frames):
p2c = h2e @ np.linalg.inv(device_tracking[i]) @ g2w
rvec, tvec = vu.extrinsic_matrix_to_vecs(p2c)
rvecs.append(rvec)
tvecs.append(tvec)
proj, _ = vm.compute_mono_2d_err(object_points,
image_points,
rvecs,
tvecs,
intrinsics,
distortion,
return_residuals=False)
return proj
def mono_proj_err_h2e_int_dist(x_0,
object_points,
image_points,
device_tracking,
pattern_tracking,
pattern2marker_matrix
):
"""
Computes the SSE between projected
image points to actual image points, for a single camera,
where we have a tracked pattern. The handeye, intrinsics and
distortion parameters are optimised.
So, x_0 should be of length 6+4+5 = 15.
"""
assert len(x_0) == 15
rvec = np.zeros((3, 1))
rvec[0] = x_0[0]
rvec[1] = x_0[1]
rvec[2] = x_0[2]
tvec = np.zeros((3, 1))
tvec[0] = x_0[3]
tvec[1] = x_0[4]
tvec[2] = x_0[5]
h2e = vu.extrinsic_vecs_to_matrix(rvec, tvec)
intrinsics = np.zeros((3, 3))
intrinsics[0][0] = x_0[6]
intrinsics[1][1] = x_0[7]
intrinsics[0][2] = x_0[8]
intrinsics[1][2] = x_0[9]
distortion = np.zeros((1, 5))
distortion[0][0] = x_0[10]
distortion[0][1] = x_0[11]
distortion[0][2] = x_0[12]
distortion[0][3] = x_0[13]
distortion[0][4] = x_0[14]
number_of_frames = len(object_points)
rvecs = []
tvecs = []
# Computes pattern2camera for each pose
for i in range(0, number_of_frames):
p2c = h2e @ np.linalg.inv(device_tracking[i]) @ \
pattern_tracking[i] @ pattern2marker_matrix
rvec, tvec = vu.extrinsic_matrix_to_vecs(p2c)
rvecs.append(rvec)
tvecs.append(tvec)
proj, _ = vm.compute_mono_2d_err(object_points,
image_points,
rvecs,
tvecs,
intrinsics,
distortion)
return proj
# pylint:disable=too-many-arguments
def stereo_proj_err_h2e(x_0,
common_object_points,
common_left_image_points,
common_right_image_points,
left_intrinsics,
left_distortion,
right_intrinsics,
right_distortion,
l2r_rmat,
l2r_tvec,
device_tracking_array,
pattern_tracking_array,
left_pattern2marker_matrix=None
):
"""
Computes the SSE of projected image points
and actual image points for left and right cameras. x_0 should contain
the 6DOF of hand-to-eye, and if left_pattern2marker_matrix is None,
then an additional 6DOF of pattern-to-marker. So, x_0 can be either
length 6 or length 12.
:param x_0:
:param common_object_points:
:param common_left_image_points:
:param common_right_image_points:
:param left_intrinsics:
:param left_distortion:
:param right_intrinsics:
:param right_distortion:
:param l2r_rmat:
:param l2r_tvec:
:param device_tracking_array:
:param pattern_tracking_array:
:param left_pattern2marker_matrix:
:return: matrix of residuals for Levenberg-Marquardt optimisation.
"""
rvecs = []
tvecs = []
number_of_frames = len(common_object_points)
h2e_rvec = np.zeros((3, 1))
h2e_rvec[0][0] = x_0[0]
h2e_rvec[1][0] = x_0[1]
h2e_rvec[2][0] = x_0[2]
h2e_tvec = np.zeros((3, 1))
h2e_tvec[0][0] = x_0[3]
h2e_tvec[1][0] = x_0[4]
h2e_tvec[2][0] = x_0[5]
h2e = vu.extrinsic_vecs_to_matrix(h2e_rvec, h2e_tvec)
if left_pattern2marker_matrix is None:
p2m_rvec = np.zeros((3, 1))
p2m_rvec[0][0] = x_0[6]
p2m_rvec[1][0] = x_0[7]
p2m_rvec[2][0] = x_0[8]
p2m_tvec = np.zeros((3, 1))
p2m_tvec[0][0] = x_0[9]
p2m_tvec[1][0] = x_0[10]
p2m_tvec[2][0] = x_0[11]
p2m = vu.extrinsic_vecs_to_matrix(p2m_rvec, p2m_tvec)
else:
p2m = left_pattern2marker_matrix
for i in range(0, number_of_frames):
p2c = h2e \
@ np.linalg.inv(device_tracking_array[i]) \
@ pattern_tracking_array[i] \
@ p2m
rvec, tvec = vu.extrinsic_matrix_to_vecs(p2c)
rvecs.append(rvec)
tvecs.append(tvec)
proj, _ = vm.compute_stereo_2d_err(l2r_rmat,
l2r_tvec,
common_object_points,
common_left_image_points,
left_intrinsics,
left_distortion,
common_object_points,
common_right_image_points,
right_intrinsics,
right_distortion,
rvecs,
tvecs
)
return proj
def stereo_proj_err_h2e_int_dist_l2r(x_0,
common_object_points,
common_left_image_points,
common_right_image_points,
device_tracking_array,
pattern_tracking_array,
left_pattern2marker_matrix
):
"""
Computes the SSE of projected image points against actual
image points. x_0 should be 30 DOF.
"""
h2e_rvec = np.zeros((3, 1))
h2e_rvec[0][0] = x_0[0]
h2e_rvec[1][0] = x_0[1]
h2e_rvec[2][0] = x_0[2]
h2e_tvec = np.zeros((3, 1))
h2e_tvec[0][0] = x_0[3]
h2e_tvec[1][0] = x_0[4]
h2e_tvec[2][0] = x_0[5]
h2e = vu.extrinsic_vecs_to_matrix(h2e_rvec, h2e_tvec)
l2r_rvec = np.zeros((3, 1))
l2r_rvec[0][0] = x_0[6]
l2r_rvec[1][0] = x_0[7]
l2r_rvec[2][0] = x_0[8]
l2r_tvec = np.zeros((3, 1))
l2r_tvec[0][0] = x_0[9]
l2r_tvec[1][0] = x_0[10]
l2r_tvec[2][0] = x_0[11]
l2r = vu.extrinsic_vecs_to_matrix(l2r_rvec, l2r_tvec)
left_intrinsics = np.zeros((3, 3))
left_intrinsics[0][0] = x_0[12]
left_intrinsics[1][1] = x_0[13]
left_intrinsics[0][2] = x_0[14]
left_intrinsics[1][2] = x_0[15]
left_distortion = np.zeros((1, 5))
left_distortion[0][0] = x_0[16]
left_distortion[0][1] = x_0[17]
left_distortion[0][2] = x_0[18]
left_distortion[0][3] = x_0[19]
left_distortion[0][4] = x_0[20]
right_intrinsics = np.zeros((3, 3))
right_intrinsics[0][0] = x_0[21]
right_intrinsics[1][1] = x_0[22]
right_intrinsics[0][2] = x_0[23]
right_intrinsics[1][2] = x_0[24]
right_distortion = np.zeros((1, 5))
right_distortion[0][0] = x_0[25]
right_distortion[0][1] = x_0[26]
right_distortion[0][2] = x_0[27]
right_distortion[0][3] = x_0[28]
right_distortion[0][4] = x_0[29]
rvecs = []
tvecs = []
number_of_frames = len(common_object_points)
for i in range(0, number_of_frames):
p2c = h2e \
@ np.linalg.inv(device_tracking_array[i]) \
@ pattern_tracking_array[i] \
@ left_pattern2marker_matrix
rvec, tvec = vu.extrinsic_matrix_to_vecs(p2c)
rvecs.append(rvec)
tvecs.append(tvec)
proj, _ = vm.compute_stereo_2d_err(l2r[0:3, 0:3],
l2r[0:3, 3],
common_object_points,
common_left_image_points,
left_intrinsics,
left_distortion,
common_object_points,
common_right_image_points,
right_intrinsics,
right_distortion,
rvecs,
tvecs
)
return proj
| [
"sksurgerycalibration.video.video_calibration_metrics.compute_mono_2d_err",
"sksurgerycalibration.video.video_calibration_metrics.compute_stereo_2d_err",
"numpy.zeros",
"numpy.linalg.inv",
"sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix",
"sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs"
] | [((1492, 1752), 'sksurgerycalibration.video.video_calibration_metrics.compute_stereo_2d_err', 'vm.compute_stereo_2d_err', (['l2r_rmat', 'l2r_tvec', 'common_object_points', 'common_left_image_points', 'left_intrinsics', 'left_distortion', 'common_object_points', 'common_right_image_points', 'right_intrinsics', 'right_distortion', 'rvecs', 'tvecs'], {'return_residuals': '(True)'}), '(l2r_rmat, l2r_tvec, common_object_points,\n common_left_image_points, left_intrinsics, left_distortion,\n common_object_points, common_right_image_points, right_intrinsics,\n right_distortion, rvecs, tvecs, return_residuals=True)\n', (1516, 1752), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((2936, 2952), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (2944, 2952), True, 'import numpy as np\n'), ((3028, 3044), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (3036, 3044), True, 'import numpy as np\n'), ((3119, 3158), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['rvec', 'tvec'], {}), '(rvec, tvec)\n', (3146, 3158), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((3560, 3677), 'sksurgerycalibration.video.video_calibration_metrics.compute_mono_2d_err', 'vm.compute_mono_2d_err', (['object_points', 'image_points', 'rvecs', 'tvecs', 'intrinsics', 'distortion'], {'return_residuals': '(False)'}), '(object_points, image_points, rvecs, tvecs,\n intrinsics, distortion, return_residuals=False)\n', (3582, 3677), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((4501, 4517), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (4509, 4517), True, 'import numpy as np\n'), ((4593, 4609), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (4601, 4609), True, 'import numpy as np\n'), ((4684, 4723), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['rvec', 'tvec'], {}), '(rvec, tvec)\n', (4711, 4723), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((4865, 4904), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['rvec', 'tvec'], {}), '(rvec, tvec)\n', (4892, 4904), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((5287, 5404), 'sksurgerycalibration.video.video_calibration_metrics.compute_mono_2d_err', 'vm.compute_mono_2d_err', (['object_points', 'image_points', 'rvecs', 'tvecs', 'intrinsics', 'distortion'], {'return_residuals': '(False)'}), '(object_points, image_points, rvecs, tvecs,\n intrinsics, distortion, return_residuals=False)\n', (5309, 5404), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((6179, 6195), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (6187, 6195), True, 'import numpy as np\n'), ((6271, 6287), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (6279, 6287), True, 'import numpy as np\n'), ((6362, 6401), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['rvec', 'tvec'], {}), '(rvec, tvec)\n', (6389, 6401), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((6543, 6582), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['rvec', 'tvec'], {}), '(rvec, tvec)\n', (6570, 6582), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((6928, 7045), 'sksurgerycalibration.video.video_calibration_metrics.compute_mono_2d_err', 'vm.compute_mono_2d_err', (['object_points', 'image_points', 'rvecs', 'tvecs', 'intrinsics', 'distortion'], {'return_residuals': '(False)'}), '(object_points, image_points, rvecs, tvecs,\n intrinsics, distortion, return_residuals=False)\n', (6950, 7045), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((7898, 7914), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (7906, 7914), True, 'import numpy as np\n'), ((7990, 8006), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (7998, 8006), True, 'import numpy as np\n'), ((8081, 8120), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['rvec', 'tvec'], {}), '(rvec, tvec)\n', (8108, 8120), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((8139, 8155), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (8147, 8155), True, 'import numpy as np\n'), ((8294, 8310), 'numpy.zeros', 'np.zeros', (['(1, 5)'], {}), '((1, 5))\n', (8302, 8310), True, 'import numpy as np\n'), ((8867, 8960), 'sksurgerycalibration.video.video_calibration_metrics.compute_mono_2d_err', 'vm.compute_mono_2d_err', (['object_points', 'image_points', 'rvecs', 'tvecs', 'intrinsics', 'distortion'], {}), '(object_points, image_points, rvecs, tvecs,\n intrinsics, distortion)\n', (8889, 8960), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((10648, 10664), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (10656, 10664), True, 'import numpy as np\n'), ((10765, 10781), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (10773, 10781), True, 'import numpy as np\n'), ((10877, 10924), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['h2e_rvec', 'h2e_tvec'], {}), '(h2e_rvec, h2e_tvec)\n', (10904, 10924), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((11663, 11900), 'sksurgerycalibration.video.video_calibration_metrics.compute_stereo_2d_err', 'vm.compute_stereo_2d_err', (['l2r_rmat', 'l2r_tvec', 'common_object_points', 'common_left_image_points', 'left_intrinsics', 'left_distortion', 'common_object_points', 'common_right_image_points', 'right_intrinsics', 'right_distortion', 'rvecs', 'tvecs'], {}), '(l2r_rmat, l2r_tvec, common_object_points,\n common_left_image_points, left_intrinsics, left_distortion,\n common_object_points, common_right_image_points, right_intrinsics,\n right_distortion, rvecs, tvecs)\n', (11687, 11900), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((12962, 12978), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (12970, 12978), True, 'import numpy as np\n'), ((13079, 13095), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (13087, 13095), True, 'import numpy as np\n'), ((13191, 13238), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['h2e_rvec', 'h2e_tvec'], {}), '(h2e_rvec, h2e_tvec)\n', (13218, 13238), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((13255, 13271), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (13263, 13271), True, 'import numpy as np\n'), ((13372, 13388), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (13380, 13388), True, 'import numpy as np\n'), ((13486, 13533), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['l2r_rvec', 'l2r_tvec'], {}), '(l2r_rvec, l2r_tvec)\n', (13513, 13533), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((13557, 13573), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (13565, 13573), True, 'import numpy as np\n'), ((13741, 13757), 'numpy.zeros', 'np.zeros', (['(1, 5)'], {}), '((1, 5))\n', (13749, 13757), True, 'import numpy as np\n'), ((13962, 13978), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (13970, 13978), True, 'import numpy as np\n'), ((14151, 14167), 'numpy.zeros', 'np.zeros', (['(1, 5)'], {}), '((1, 5))\n', (14159, 14167), True, 'import numpy as np\n'), ((14766, 15011), 'sksurgerycalibration.video.video_calibration_metrics.compute_stereo_2d_err', 'vm.compute_stereo_2d_err', (['l2r[0:3, 0:3]', 'l2r[0:3, 3]', 'common_object_points', 'common_left_image_points', 'left_intrinsics', 'left_distortion', 'common_object_points', 'common_right_image_points', 'right_intrinsics', 'right_distortion', 'rvecs', 'tvecs'], {}), '(l2r[0:3, 0:3], l2r[0:3, 3], common_object_points,\n common_left_image_points, left_intrinsics, left_distortion,\n common_object_points, common_right_image_points, right_intrinsics,\n right_distortion, rvecs, tvecs)\n', (14790, 15011), True, 'import sksurgerycalibration.video.video_calibration_metrics as vm\n'), ((1157, 1173), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (1165, 1173), True, 'import numpy as np\n'), ((1297, 1313), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (1305, 1313), True, 'import numpy as np\n'), ((3457, 3489), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs', 'vu.extrinsic_matrix_to_vecs', (['p2c'], {}), '(p2c)\n', (3484, 3489), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((5184, 5216), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs', 'vu.extrinsic_matrix_to_vecs', (['p2c'], {}), '(p2c)\n', (5211, 5216), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((6825, 6857), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs', 'vu.extrinsic_matrix_to_vecs', (['p2c'], {}), '(p2c)\n', (6852, 6857), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((8764, 8796), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs', 'vu.extrinsic_matrix_to_vecs', (['p2c'], {}), '(p2c)\n', (8791, 8796), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((10989, 11005), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (10997, 11005), True, 'import numpy as np\n'), ((11122, 11138), 'numpy.zeros', 'np.zeros', (['(3, 1)'], {}), '((3, 1))\n', (11130, 11138), True, 'import numpy as np\n'), ((11252, 11299), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_vecs_to_matrix', 'vu.extrinsic_vecs_to_matrix', (['p2m_rvec', 'p2m_tvec'], {}), '(p2m_rvec, p2m_tvec)\n', (11279, 11299), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((11560, 11592), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs', 'vu.extrinsic_matrix_to_vecs', (['p2c'], {}), '(p2c)\n', (11587, 11592), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((14663, 14695), 'sksurgerycalibration.video.video_calibration_utils.extrinsic_matrix_to_vecs', 'vu.extrinsic_matrix_to_vecs', (['p2c'], {}), '(p2c)\n', (14690, 14695), True, 'import sksurgerycalibration.video.video_calibration_utils as vu\n'), ((6763, 6796), 'numpy.linalg.inv', 'np.linalg.inv', (['device_tracking[i]'], {}), '(device_tracking[i])\n', (6776, 6796), True, 'import numpy as np\n'), ((3339, 3372), 'numpy.linalg.inv', 'np.linalg.inv', (['device_tracking[i]'], {}), '(device_tracking[i])\n', (3352, 3372), True, 'import numpy as np\n'), ((5085, 5118), 'numpy.linalg.inv', 'np.linalg.inv', (['device_tracking[i]'], {}), '(device_tracking[i])\n', (5098, 5118), True, 'import numpy as np\n'), ((8646, 8679), 'numpy.linalg.inv', 'np.linalg.inv', (['device_tracking[i]'], {}), '(device_tracking[i])\n', (8659, 8679), True, 'import numpy as np\n'), ((11432, 11471), 'numpy.linalg.inv', 'np.linalg.inv', (['device_tracking_array[i]'], {}), '(device_tracking_array[i])\n', (11445, 11471), True, 'import numpy as np\n'), ((14512, 14551), 'numpy.linalg.inv', 'np.linalg.inv', (['device_tracking_array[i]'], {}), '(device_tracking_array[i])\n', (14525, 14551), True, 'import numpy as np\n')] |
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def create_mixed_plot(dataframe, groupby, primary_measure, secondary_measure, title):
"""
Description: This function can be used to create a plot with 2 measures
Arguments:
df: the dataframe
group: columns to be grouped of the dataframe (can be multiple ones)
primary_measure: the primary measure
secondary_measure: the secondary measure (will be on the secondary_y axis)
title: the title for the plot
Returns:
The plot
"""
ax = dataframe.groupby(groupby)[secondary_measure].mean().plot(secondary_y=True, color="r", ylabel=primary_measure, rot=90, legend=True)
dataframe.groupby(groupby)[primary_measure].mean().plot.bar(ylabel=secondary_measure, ax=ax, color='tab:blue', edgecolor="k", legend=True)
plt.title(title)
return plt
def create_mean_and_count_plot(dataframe, groupby, measure, secondary_ylabel, title):
"""
Description: This function can be used to create a plot with 1 measure and a count as a secondary_y axis
Arguments:
df: the dataframe
group: columns to be grouped of the dataframe (can be multiple ones)
measure: the measure
ylabel: will be used as the secondary ylabes (for the count)
title: the title for the plot
Returns:
The plot
"""
ax = dataframe.groupby(groupby).count().plot(secondary_y=True, color="r", ylabel=measure, rot=90, legend=False)
dataframe.groupby(groupby)[measure].mean().plot.bar(ylabel=secondary_ylabel, ax=ax, color='tab:blue', edgecolor="k", legend=True)
plt.title(title)
return plt | [
"matplotlib.pyplot.title"
] | [((868, 884), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (877, 884), True, 'import matplotlib.pyplot as plt\n'), ((1667, 1683), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (1676, 1683), True, 'import matplotlib.pyplot as plt\n')] |
#pwap8: A tool to add Progressive Web App elements to PICO-8 HTML exports
#Copyright (c) 2020 Loxodromic
#MIT License (see LICENSE)
#....
from bs4 import BeautifulSoup
from bs4 import Doctype
from bs4 import Comment
from PIL import Image
import os
import json
import shutil
import argparse
import base64
import sys
#....
class PWAP8:
def __init__(self):
self.projectName = None
self.projectNameShort = None
self.srcHTML = None
self.srcJS = None
self.srcICON = None
self.buildDir = None
self.faviconStyle = "png"
self.bInlineManifest = False
self.copyOriginal = False
self.index = None
self.appRootHTML = 'index.html'
self.iconSizes = [32, 128, 144, 152, 167, 180, 192, 256, 512]
def _findPaths(self):
#derived paths...
if self.buildDir is None:
self.buildDir = os.path.join(os.getcwd(), 'build')
self.imagesDir = os.path.join(self.buildDir, 'images')
self.srcHTML = os.path.abspath(self.srcHTML)
self.srcJS = os.path.abspath(self.srcJS)
(javaScriptDir, self.javascriptFile) = os.path.split(self.srcJS)
def _createDirs(self):
try:
os.mkdir(self.buildDir)
except OSError:
pass
try:
os.mkdir(self.imagesDir)
except OSError:
pass
def _tweakHTML(self, soup, manifest, swJS):
#TODO: adding a DOCTYPE seems to mess with the finished game's layout, a browser issue, quirks mode?...
#prefix with <!DOCTYPE html>...
#doctype = Doctype('html')
#soup.insert(0, doctype)
#tweak head...
head = soup.head
comment = Comment("This file has been modified by pwap8 (https://github.com/loxodromic/pwap8)")
head.insert(0, comment)
#add some meta tags for colours, icons, etc...
head.append(soup.new_tag('meta', attrs={'name': 'theme-color', 'content': '#cccccc'}))
head.append(soup.new_tag('meta', attrs={'name': 'apple-mobile-web-app-capable', 'content': 'yes'}))
head.append(soup.new_tag('meta', attrs={'name': 'apple-mobile-web-app-status-bar-style', 'content':'#222222'}))
head.append(soup.new_tag('meta', attrs={'name': 'apple-mobile-web-app-title', 'content':soup.title.string}))
head.append(soup.new_tag('meta', attrs={'name': 'msapplication-TileImage', 'content':"images/{name}-icon-144.png".format(name=self.projectNameShort)}))
head.append(soup.new_tag('meta', attrs={'name': 'msapplication-TileColor', 'content':'#cccccc'}))
#favicons...
head.append(soup.new_tag('link', attrs={'rel': 'apple-touch-icon', 'href': "images/{name}-icon-167.png.png".format(name=self.projectNameShort)}))
if self.faviconStyle == "png":
head.append(soup.new_tag('link', attrs={'rel':'icon', 'href':'favicon-32.png', 'type':'image/png'}))
elif self.faviconStyle == "ico":
head.append(soup.new_tag('link', attrs={'rel':'icon', 'href':'favicon.ico', 'type':'image/x-icon'}))
#manifest...
if self.bInlineManifest:
manifestStr = json.dumps(manifest, indent=4, sort_keys=False)
head.append(soup.new_tag('link', attrs={'rel':'manifest', 'href':'data:application/manifest+json,' + manifestStr}))
else:
head.append(soup.new_tag('link', attrs={'rel':'manifest', 'href':"{name}.manifest".format(name=self.projectNameShort)}))
#tweak body...
body = soup.body
#something for when JavaScrript is off...
fallbackContent = soup.new_tag("noscript")
fallbackContent.string = "This will much be more fun with JavaScript enabled."
body.append(fallbackContent)
#service worker...
#TODO: can we inline the service worker?...
startSW = soup.new_tag("script", attrs={'type':'text/javascript'})
startSW.string = "window.onload = () => { 'use strict'; if ('serviceWorker' in navigator) { navigator.serviceWorker.register('./sw.js');}}"
body.append(startSW)
def _createManifest(self):
manifest = {
'name': self.projectName,
'short_name': self.projectNameShort,
'start_url': self.appRootHTML,
'display': 'standalone',
'theme_color': '#cccccc',
'background_color': '#222222',
'lang': 'en-US'
}
manifest['icons'] = []
for size in self.iconSizes:
icon = {'src': "images/{name}-icon-{size}.png".format(name=self.projectNameShort, size=size), 'sizes': "{size}x{size}".format(size=size), 'type': 'image/png'}
manifest["icons"].append(icon)
return manifest
def _createServiceWorker(self, cachedThings):
cachedStr = json.dumps(cachedThings)
swJS = """//sw.js...
//see https://developer.mozilla.org/en-US/docs/Web/API/Service_Worker_API/Using_Service_Workers
var cacheName = '{name}';
self.addEventListener('install', function(event) {{
event.waitUntil(
caches.open(cacheName).then(function(cache) {{ return cache.addAll({cached}); }})
);
}});
self.addEventListener('fetch', function(event) {{
event.respondWith(
caches.match(event.request).then(function(response) {{ return response || fetch(event.request); }})
);
}});
"""
return swJS.format(name=self.projectName, cached=cachedStr)
def _createIcon(self, sourceImage, size):
icon = Image.new("RGBA", (size, size))
thumb = sourceImage.copy()
if thumb.width < 64: #...TODO: remove hack
method = Image.NEAREST
else:
method = Image.BICUBIC
#scale up, then down to force thumbnail to work as expected...
scale = 1024 / thumb.width
thumb = thumb.resize((int(thumb.width * scale), int(thumb.height * scale)), method)
thumb.thumbnail((size, size), method)
#...TODO: this is a horrible hack, please fix
offset = (size - thumb.width) / 2
icon.paste(thumb, (int(offset), 0))
return icon
def _iconPath(self, pathFilename):
(path, filename) = pathFilename
if path is not None:
logicalPath = os.path.join(path, filename)
else:
logicalPath = filename
return os.path.join(self.buildDir, logicalPath)
def _createIcons(self):
iconFilenames = []
#use the provided graphic, or use a default...
if self.srcICON is not None:
sourceImage = Image.open(self.srcICON)
else:
fallbackIconStr = 'Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_AAAAAAAAKa3_Ka3_Ka3_Ka3_Ka3_Ka3_AAAAAAAAKa3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_AAAAAAAAKa3_Ka3_Ka3_Ka3_Ka3_Ka3_AAAAAAAAKa3_Ka3_AAAAAAAAAAAAAAAAAAAAAAAAKa3_Ka3_AAAAAAAAAAAAAAAAAAAAAAAAKa3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_Ka3_'
fallbackIconImage = Image.frombytes("RGB", (8, 8), base64.urlsafe_b64decode(fallbackIconStr))
sourceImage = fallbackIconImage.copy()
#resize and save each of the icons...
for size in self.iconSizes:
icon = self._createIcon(sourceImage, size)
iconFilename = ('images', "{name}-icon-{size}.png".format(name=self.projectNameShort, size=size))
icon.save(self._iconPath(iconFilename), "PNG")
iconFilenames.append(iconFilename)
#...and a favicon...
if self.faviconStyle is not None:
#additionally a classic 32 x 32 favicon referenced in the HTML...
icon = self._createIcon(sourceImage, 32)
if self.faviconStyle == "png":
iconFilename = (None, 'favicon-32.png')
icon.save(self._iconPath(iconFilename), "PNG")
iconFilenames.append(iconFilename)
elif self.faviconStyle == "ico":
iconFilename = (None, 'favicon.ico')
icon.save(self._iconPath(iconFilename), "ICO")
iconFilenames.append(iconFilename)
return iconFilenames
def Run(self):
print("Running build\n")
self._findPaths()
print("PROJECT_NAME = {name}".format(name=self.projectName))
print("SHORT_NAME = {name}".format(name=self.projectNameShort))
print("HTML = {html}".format(html=self.srcHTML))
print("JAVASCRIPT = {js}".format(js=self.srcJS))
print("ICON = {icon}".format(icon=self.srcICON))
print("BUILD_DIR = {build}".format(build=self.buildDir))
if self.index is not None:
print("INDEX = {index}".format(index=self.index))
if self.copyOriginal:
print("Will copy original html")
self._createDirs()
if self.copyOriginal:
dstHTML = os.path.join(self.buildDir, 'original.html')
try:
shutil.copy(self.srcHTML, dstHTML)
except OSError:
print("\nERROR: unable to copy original html file ({html})".format(html=self.srcHTML))
sys.exit()
dstHTML = 'index.html'
if self.index is not None:
dstHTML = 'app.html'
self.appRootHTML = dstHTML
#create manifest, icons, service worker...
manifestFilename = "{name}.manifest".format(name=self.projectNameShort)
manifest = self._createManifest()
if not self.bInlineManifest:
with open(os.path.join(self.buildDir, manifestFilename), "w") as fout:
fout.write(json.dumps(manifest, indent=4, sort_keys=False))
iconFilenames = self._createIcons()
#cachedThings = ['/', '/index.html', '/' + self.javascriptFile, '/sw.js', '/' + manifestFilename]
cachedThings = ['index.html', self.javascriptFile, 'sw.js', manifestFilename]
for (path, filename) in iconFilenames:
if path is not None:
cachedThings.append("{path}/{filename}".format(path = path, filename = filename))
else:
cachedThings.append(filename)
swJS = self._createServiceWorker(cachedThings)
with open(os.path.join(self.buildDir, 'sw.js'), "w") as fout:
fout.write(swJS)
#open up the html exported from PICO-8...
exportHML = None
try:
with open(self.srcHTML, "r") as fin:
exportHML = fin.read()
except OSError:
print("\nERROR: unable to open exported HTML ({html})".format(html=self.srcJS))
sys.exit()
soup = BeautifulSoup(exportHML, 'html.parser') #, from_encoding="utf-8")
#mess with it...
self._tweakHTML(soup, manifest, swJS)
#write it out to the build dir...
with open(os.path.join(self.buildDir, dstHTML), "w") as fout:
fout.write(str(soup.prettify()))
dstJS = os.path.join(self.buildDir, self.javascriptFile)
try:
shutil.copy(self.srcJS, dstJS)
except OSError:
print("\nERROR: unable to find exported JavaScript ({js})".format(js=self.srcJS))
sys.exit()
if self.index is not None:
try:
dstIndex = os.path.join(self.buildDir, 'index.html')
shutil.copy(self.index, dstIndex)
except OSError:
print("\nERROR: unable to copy replacement index ({html})".format(html=self.index))
sys.exit()
#....
if __name__ == '__main__':
print("""pwap8: A hack to add Progressive Web App elements to PICO-8 HTML exports
Copyright (c) 2020 Loxodromic
MIT License (see LICENSE)\n""")
parser = argparse.ArgumentParser(description='')
parser.add_argument('--name', nargs=1, type=str, metavar='PROJECT_NAME', help='project name', required=True)
parser.add_argument('--short', nargs=1, type=str, metavar='SHORT_NAME', help='short project name', required=False)
parser.add_argument('--icon', nargs=1, type=str, metavar='<ICON>', help='an image to use for the icons', required=False)
parser.add_argument('--original', help='also copy the original html to the build directory', required=False, action='store_true')
srcGroup = parser.add_argument_group('source')
srcGroup.add_argument('--html', nargs=1, type=str, metavar='<EXPORT.html>', help='PICO-8 exported HTML', required=True)
srcGroup.add_argument('--js', nargs=1, type=str, metavar='<JAVASCRIPT.js>', help='PICO-8 exported JavsScript', required=True)
srcGroup.add_argument('--index', nargs=1, type=str, metavar='<INDEX.html>', help='use a different file for the index.html (perhaps a cookie question)', required=False)
#TODO: intelligently determine html and JS filenames from just a dir...
#srcGroup.add_argument('--dir', nargs=1, type=str, metavar='<DIRECTORY>', help='Directory containing PICO-8 exported HTML and JavaScript')
dstGroup = parser.add_argument_group('destination')
dstGroup.add_argument('--build', nargs=1, type=str, metavar='<BUILD_DIR>', help='Directory for build result (defaults to ./build)', required=False)
args = parser.parse_args()
pwap8 = PWAP8()
pwap8.projectName = ''.join(args.name)
pwap8.srcHTML = ''.join(args.html)
pwap8.srcJS = ''.join(args.js)
if args.icon is not None:
pwap8.srcICON = ''.join(args.icon)
pwap8.projectNameShort = pwap8.projectName
if args.short is not None:
pwap8.projectNameShort = ''.join(args.short)
if args.build is not None:
pwap8.buildDir = ''.join(args.build)
pwap8.copyOriginal = args.original
if args.index is not None:
pwap8.index = ''.join(args.index)
#if we're asking the question, then we need the original...
pwap8.copyOriginal = True
pwap8.Run()
print("\nEOL")
#....
| [
"PIL.Image.open",
"argparse.ArgumentParser",
"base64.urlsafe_b64decode",
"bs4.Comment",
"PIL.Image.new",
"json.dumps",
"os.path.join",
"os.path.split",
"bs4.BeautifulSoup",
"os.getcwd",
"os.mkdir",
"shutil.copy",
"sys.exit",
"os.path.abspath"
] | [((11638, 11677), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (11661, 11677), False, 'import argparse\n'), ((964, 1001), 'os.path.join', 'os.path.join', (['self.buildDir', '"""images"""'], {}), "(self.buildDir, 'images')\n", (976, 1001), False, 'import os\n'), ((1026, 1055), 'os.path.abspath', 'os.path.abspath', (['self.srcHTML'], {}), '(self.srcHTML)\n', (1041, 1055), False, 'import os\n'), ((1077, 1104), 'os.path.abspath', 'os.path.abspath', (['self.srcJS'], {}), '(self.srcJS)\n', (1092, 1104), False, 'import os\n'), ((1153, 1178), 'os.path.split', 'os.path.split', (['self.srcJS'], {}), '(self.srcJS)\n', (1166, 1178), False, 'import os\n'), ((1729, 1824), 'bs4.Comment', 'Comment', (['"""This file has been modified by pwap8 (https://github.com/loxodromic/pwap8)"""'], {}), "(\n 'This file has been modified by pwap8 (https://github.com/loxodromic/pwap8)'\n )\n", (1736, 1824), False, 'from bs4 import Comment\n'), ((4827, 4851), 'json.dumps', 'json.dumps', (['cachedThings'], {}), '(cachedThings)\n', (4837, 4851), False, 'import json\n'), ((5507, 5538), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', '(size, size)'], {}), "('RGBA', (size, size))\n", (5516, 5538), False, 'from PIL import Image\n'), ((6350, 6390), 'os.path.join', 'os.path.join', (['self.buildDir', 'logicalPath'], {}), '(self.buildDir, logicalPath)\n', (6362, 6390), False, 'import os\n'), ((10551, 10590), 'bs4.BeautifulSoup', 'BeautifulSoup', (['exportHML', '"""html.parser"""'], {}), "(exportHML, 'html.parser')\n", (10564, 10590), False, 'from bs4 import BeautifulSoup\n'), ((10864, 10912), 'os.path.join', 'os.path.join', (['self.buildDir', 'self.javascriptFile'], {}), '(self.buildDir, self.javascriptFile)\n', (10876, 10912), False, 'import os\n'), ((1233, 1256), 'os.mkdir', 'os.mkdir', (['self.buildDir'], {}), '(self.buildDir)\n', (1241, 1256), False, 'import os\n'), ((1324, 1348), 'os.mkdir', 'os.mkdir', (['self.imagesDir'], {}), '(self.imagesDir)\n', (1332, 1348), False, 'import os\n'), ((3175, 3222), 'json.dumps', 'json.dumps', (['manifest'], {'indent': '(4)', 'sort_keys': '(False)'}), '(manifest, indent=4, sort_keys=False)\n', (3185, 3222), False, 'import json\n'), ((6256, 6284), 'os.path.join', 'os.path.join', (['path', 'filename'], {}), '(path, filename)\n', (6268, 6284), False, 'import os\n'), ((6567, 6591), 'PIL.Image.open', 'Image.open', (['self.srcICON'], {}), '(self.srcICON)\n', (6577, 6591), False, 'from PIL import Image\n'), ((8793, 8837), 'os.path.join', 'os.path.join', (['self.buildDir', '"""original.html"""'], {}), "(self.buildDir, 'original.html')\n", (8805, 8837), False, 'import os\n'), ((10939, 10969), 'shutil.copy', 'shutil.copy', (['self.srcJS', 'dstJS'], {}), '(self.srcJS, dstJS)\n', (10950, 10969), False, 'import shutil\n'), ((916, 927), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (925, 927), False, 'import os\n'), ((6958, 6999), 'base64.urlsafe_b64decode', 'base64.urlsafe_b64decode', (['fallbackIconStr'], {}), '(fallbackIconStr)\n', (6982, 6999), False, 'import base64\n'), ((8871, 8905), 'shutil.copy', 'shutil.copy', (['self.srcHTML', 'dstHTML'], {}), '(self.srcHTML, dstHTML)\n', (8882, 8905), False, 'import shutil\n'), ((10138, 10174), 'os.path.join', 'os.path.join', (['self.buildDir', '"""sw.js"""'], {}), "(self.buildDir, 'sw.js')\n", (10150, 10174), False, 'import os\n'), ((10524, 10534), 'sys.exit', 'sys.exit', ([], {}), '()\n', (10532, 10534), False, 'import sys\n'), ((10750, 10786), 'os.path.join', 'os.path.join', (['self.buildDir', 'dstHTML'], {}), '(self.buildDir, dstHTML)\n', (10762, 10786), False, 'import os\n'), ((11100, 11110), 'sys.exit', 'sys.exit', ([], {}), '()\n', (11108, 11110), False, 'import sys\n'), ((11191, 11232), 'os.path.join', 'os.path.join', (['self.buildDir', '"""index.html"""'], {}), "(self.buildDir, 'index.html')\n", (11203, 11232), False, 'import os\n'), ((11249, 11282), 'shutil.copy', 'shutil.copy', (['self.index', 'dstIndex'], {}), '(self.index, dstIndex)\n', (11260, 11282), False, 'import shutil\n'), ((9053, 9063), 'sys.exit', 'sys.exit', ([], {}), '()\n', (9061, 9063), False, 'import sys\n'), ((9447, 9492), 'os.path.join', 'os.path.join', (['self.buildDir', 'manifestFilename'], {}), '(self.buildDir, manifestFilename)\n', (9459, 9492), False, 'import os\n'), ((9535, 9582), 'json.dumps', 'json.dumps', (['manifest'], {'indent': '(4)', 'sort_keys': '(False)'}), '(manifest, indent=4, sort_keys=False)\n', (9545, 9582), False, 'import json\n'), ((11427, 11437), 'sys.exit', 'sys.exit', ([], {}), '()\n', (11435, 11437), False, 'import sys\n')] |
# Copyright: Copyright (c) 2020., <NAME>
#
# Author: <NAME> <adam at jakab dot pro>
# Created: 2/23/20, 10:50 PM
# License: See LICENSE.txt
from beets.plugins import BeetsPlugin
from beets.util import cpu_count
from beetsplug.bpmanalyser.command import BpmAnayserCommand
class BpmAnalyserPlugin(BeetsPlugin):
def __init__(self):
super(BpmAnalyserPlugin, self).__init__()
self.config.add({
'auto': False,
'dry-run': False,
'write': True,
'threads': cpu_count(),
'force': False,
'quiet': False
})
def commands(self):
return [BpmAnayserCommand(self.config)]
| [
"beets.util.cpu_count",
"beetsplug.bpmanalyser.command.BpmAnayserCommand"
] | [((645, 675), 'beetsplug.bpmanalyser.command.BpmAnayserCommand', 'BpmAnayserCommand', (['self.config'], {}), '(self.config)\n', (662, 675), False, 'from beetsplug.bpmanalyser.command import BpmAnayserCommand\n'), ((525, 536), 'beets.util.cpu_count', 'cpu_count', ([], {}), '()\n', (534, 536), False, 'from beets.util import cpu_count\n')] |
# -*- coding: utf-8 -*-
"""
Test the utils lib.
"""
from __future__ import absolute_import
import pytest # noqa
from pynsot.util import slugify, validate_cidr
def test_validate_cidr():
"""Test ``validate_cidr()``."""
# IPv4
assert validate_cidr('0.0.0.0/0')
assert validate_cidr('1.2.3.4/32')
# IPv6
assert validate_cidr('::/0')
assert validate_cidr('fe8::/10')
# Bad
assert not validate_cidr('bogus')
assert not validate_cidr(None)
assert not validate_cidr(object())
assert not validate_cidr({})
assert not validate_cidr([])
def test_slugify():
cases = [
('/', '_'),
('my cool string', 'my cool string'),
('Ethernet1/2', 'Ethernet1_2'),
(
'foo-bar1:xe-0/0/0.0_foo-bar2:xe-0/0/0.0',
'foo-bar1:xe-0_0_0.0_foo-bar2:xe-0_0_0.0'
),
]
for case, expected in cases:
assert slugify(case) == expected
| [
"pynsot.util.validate_cidr",
"pynsot.util.slugify"
] | [((249, 275), 'pynsot.util.validate_cidr', 'validate_cidr', (['"""0.0.0.0/0"""'], {}), "('0.0.0.0/0')\n", (262, 275), False, 'from pynsot.util import slugify, validate_cidr\n'), ((287, 314), 'pynsot.util.validate_cidr', 'validate_cidr', (['"""1.2.3.4/32"""'], {}), "('1.2.3.4/32')\n", (300, 314), False, 'from pynsot.util import slugify, validate_cidr\n'), ((338, 359), 'pynsot.util.validate_cidr', 'validate_cidr', (['"""::/0"""'], {}), "('::/0')\n", (351, 359), False, 'from pynsot.util import slugify, validate_cidr\n'), ((371, 396), 'pynsot.util.validate_cidr', 'validate_cidr', (['"""fe8::/10"""'], {}), "('fe8::/10')\n", (384, 396), False, 'from pynsot.util import slugify, validate_cidr\n'), ((423, 445), 'pynsot.util.validate_cidr', 'validate_cidr', (['"""bogus"""'], {}), "('bogus')\n", (436, 445), False, 'from pynsot.util import slugify, validate_cidr\n'), ((461, 480), 'pynsot.util.validate_cidr', 'validate_cidr', (['None'], {}), '(None)\n', (474, 480), False, 'from pynsot.util import slugify, validate_cidr\n'), ((535, 552), 'pynsot.util.validate_cidr', 'validate_cidr', (['{}'], {}), '({})\n', (548, 552), False, 'from pynsot.util import slugify, validate_cidr\n'), ((568, 585), 'pynsot.util.validate_cidr', 'validate_cidr', (['[]'], {}), '([])\n', (581, 585), False, 'from pynsot.util import slugify, validate_cidr\n'), ((913, 926), 'pynsot.util.slugify', 'slugify', (['case'], {}), '(case)\n', (920, 926), False, 'from pynsot.util import slugify, validate_cidr\n')] |
import os
import warnings
import numpy as np
import pytorch_lightning as pl
import toml
import torch
import wandb
from pytorch_lightning.loggers import WandbLogger
from sklearn.metrics import cohen_kappa_score, accuracy_score
from torch import nn, optim
from torchvision import models
from data.data_aptos import get_aptos_loaders
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
torch.multiprocessing.set_sharing_strategy("file_system")
warnings.filterwarnings("ignore", category=UserWarning)
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
CHECKPOINT_PATH = None
CHECKPOINTS_BASE_PATH = toml.load("paths.toml")["CHECKPOINTS_BASE_PATH"]
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_raw_risks_burdens_inner_none/model-epoch_99-valid_loss_5.68.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_raw_snps_gen_none/model_100.pth"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_raw_snps_gen_h1/model-epoch_99-valid_loss_6.62.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_raw_snps_gen_h12/model-epoch_99-valid_loss_5.20.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_risk_scores_gen_none/model-epoch_99-valid_loss_6.28.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_risk_scores_gen_h1/model-epoch_99-valid_loss_6.15.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_risk_scores_gen_h12/model-epoch_99-valid_loss_5.74.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_burden_scores_gen_none/model-epoch_99-valid_loss_5.29.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_burden_scores_gen_h1/model-epoch_99-valid_loss_4.61.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "cm_r50_burden_scores_gen_h12/model-epoch_99-valid_loss_4.93.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "barlow_r50_proj128/epoch_99-step_170399.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "byol_r50_proj128/epoch_99-step_170399.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "simsiam_r50_proj128/epoch_99-step_170399.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "simclr_r50_proj128/epoch_99-step_170399.ckpt"
# CHECKPOINT_PATH = CHECKPOINTS_BASE_PATH + "nnclr_r50_proj128/epoch_99-step_170399.ckpt"
PROJECT_NAME = "aptos-sweep"
defaults = {
"batch_size": 32,
"epochs": 10,
"img_size": 448,
"accumulate_grad_batches": 1,
"scheduler": "none",
"lr": 1e-3,
"tfms": "default",
}
wandb.init(config=defaults)
config = wandb.config
def transform_multilabel_to_continuous(y, threshold):
assert isinstance(y, np.ndarray), "invalid y"
y = y > threshold
y = y.astype(int).sum(axis=1) - 1
return y
def score_kappa_aptos(y, y_pred, threshold=0.5):
y = transform_multilabel_to_continuous(y, threshold)
y_pred = transform_multilabel_to_continuous(y_pred, threshold)
return cohen_kappa_score(y, y_pred, labels=[0, 1, 2, 3, 4], weights="quadratic")
def acc_aptos(y, y_pred, threshold=0.5):
y = transform_multilabel_to_continuous(y, threshold)
y_pred = transform_multilabel_to_continuous(y_pred, threshold)
return accuracy_score(y, y_pred)
def load_from_state_dict_gen_img(model, state_dict):
"""Loads the model weights from the state dictionary."""
# step 1: filter state dict
model_keys_prefixes = []
for okey, oitem in model.state_dict().items():
model_keys_prefixes.append(okey.split(".")[0])
new_state_dict = {}
index = 0
for key, item in state_dict.items():
if (
key.startswith("imaging_model")
or key.startswith("model.imaging_model")
or key.startswith("models.0.imaging_model")
):
# remove the "model." prefix from the state dict key
all_key_parts = [model_keys_prefixes[index]]
if key.startswith("imaging_model"):
all_key_parts.extend(key.split(".")[2:])
elif key.startswith("model.imaging_model"):
all_key_parts.extend(key.split(".")[3:])
else:
all_key_parts.extend(key.split(".")[4:])
index += 1
new_key = ".".join(all_key_parts)
if new_key in model.state_dict():
new_state_dict[new_key] = item
# step 2: load from checkpoint
model.load_state_dict(new_state_dict, strict=False)
def load_from_state_dict_img_only(model, state_dict):
"""Loads the model weights from the state dictionary."""
# step 1: filter state dict
model_keys_prefixes = []
for okey, oitem in model.state_dict().items():
model_keys_prefixes.append(okey.split(".")[0])
new_state_dict = {}
index = 0
for key, item in state_dict.items():
if (
(
key.startswith("resnet_simclr")
or key.startswith("resnet_simsiam")
or key.startswith("resnet_barlow_twins")
or key.startswith("resnet_byol")
or key.startswith("resnet_nnclr")
)
and "projection" not in key
and "prediction" not in key
and "momentum" not in key
):
# remove the "model." prefix from the state dict key
all_key_parts = [model_keys_prefixes[index]]
all_key_parts.extend(key.split(".")[3:])
index += 1
new_key = ".".join(all_key_parts)
if new_key in model.state_dict():
new_state_dict[new_key] = item
# step 2: load from checkpoint
model.load_state_dict(new_state_dict, strict=False)
class Model(pl.LightningModule):
def __init__(
self,
n_output,
loss_fct,
base_model=models.resnet18,
pretrained=True,
lr=1e-3,
total_steps=0,
set_scheduler="none",
opt_method="adam",
opt_param=dict(),
metrics=[score_kappa_aptos, acc_aptos],
checkpoint=CHECKPOINT_PATH,
):
super().__init__()
self.lr = lr
self.total_steps = total_steps
self.loss_fct = loss_fct
self.set_scheduler = set_scheduler
if checkpoint is None:
self.model = base_model(pretrained=pretrained)
else:
self.model = base_model(pretrained=pretrained)
state_dict = torch.load(checkpoint, map_location=DEVICE)
if (
"simclr" in checkpoint
or "byol" in checkpoint
or "barlow" in checkpoint
or "simsiam" in checkpoint
or "nnclr" in checkpoint
):
load_from_state_dict_img_only(self.model, state_dict["state_dict"])
else:
if "state_dict" in state_dict:
load_from_state_dict_gen_img(self.model, state_dict["state_dict"])
else:
load_from_state_dict_gen_img(self.model, state_dict)
self.model.fc = nn.Linear(self.model.fc.in_features, n_output)
self.opt_method = opt_method
self.opt_param = opt_param
self.metrics = metrics
def forward(self, x):
return self.model(x)
def configure_optimizers(self):
if self.opt_method == "adam":
optimizer = optim.Adam(self.parameters(), lr=self.lr, **self.opt_param)
elif self.opt_method == "sgd":
optimizer = optim.SGD(self.parameters(), lr=self.lr, **self.opt_param)
else:
raise NotImplementedError(
f"optimization method {self.opt_method} not set up"
)
if self.set_scheduler == "none":
return optimizer
elif self.set_scheduler == "steplr":
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.5)
elif self.set_scheduler == "onecycle":
scheduler = optim.lr_scheduler.OneCycleLR(
optimizer,
max_lr=self.lr,
total_steps=self.total_steps,
)
return [optimizer], [scheduler]
def training_step(self, batch, idx):
x, y = batch
y_hat = self(x)
loss = self.loss_fct(y_hat, y)
self.log("train_loss", loss, on_epoch=True)
return loss
def validation_step(self, batch, idx):
x, y = batch
y_hat = self(x)
loss = self.loss_fct(y_hat, y)
y_np = y.detach().cpu().numpy()
y_hat_np = y_hat.detach().cpu().numpy()
if self.metrics is not None:
for metric in self.metrics:
self.log(
f"valid_{metric.__name__}",
metric(y_np, y_hat_np),
on_epoch=True,
prog_bar=True,
)
self.log("valid_loss", loss, on_epoch=True, prog_bar=True)
return loss
def test_step(self, batch, idx):
x, y = batch
y_hat = self(x)
loss = self.loss_fct(y_hat, y)
y_np = y.detach().cpu().numpy()
y_hat_np = y_hat.detach().cpu().numpy()
if self.metrics is not None:
for metric in self.metrics:
self.log(
f"test_{metric.__name__}",
metric(y_np, y_hat_np),
on_epoch=True,
prog_bar=True,
)
self.log("test_loss", loss, on_epoch=True, prog_bar=True)
return loss
def main():
print(config)
bs = config["batch_size"]
max_bs = 16
if bs > max_bs:
accumulate_grad_batches = int(np.ceil(bs / max_bs))
bs = bs // accumulate_grad_batches
print(
f"set batch_size to {bs} and use accumulate_grad_batches every {accumulate_grad_batches}"
)
else:
accumulate_grad_batches = 1
tl, vl, ttl = get_aptos_loaders(
num_workers=10,
size=config["img_size"],
batch_size=bs,
)
n_classes = 5
ep = config["epochs"]
loss_fct = torch.nn.BCEWithLogitsLoss()
optimizer = "adam"
optimizer_dict = dict(weight_decay=config["weight_decay"])
basemodel = models.resnet50
model = Model(
n_classes,
loss_fct=loss_fct,
base_model=basemodel,
lr=config["lr"],
pretrained=False,
opt_method=optimizer,
opt_param=optimizer_dict,
checkpoint=CHECKPOINT_PATH,
)
logger = WandbLogger(project=PROJECT_NAME)
trainer = pl.Trainer(
gpus=1,
max_epochs=ep,
logger=logger,
accumulate_grad_batches=accumulate_grad_batches,
)
trainer.fit(model, tl, vl)
if __name__ == "__main__":
main()
| [
"sklearn.metrics.accuracy_score",
"numpy.ceil",
"torch.load",
"torch.optim.lr_scheduler.OneCycleLR",
"torch.optim.lr_scheduler.StepLR",
"sklearn.metrics.cohen_kappa_score",
"wandb.init",
"pytorch_lightning.loggers.WandbLogger",
"torch.cuda.is_available",
"pytorch_lightning.Trainer",
"toml.load",
"data.data_aptos.get_aptos_loaders",
"torch.multiprocessing.set_sharing_strategy",
"torch.nn.BCEWithLogitsLoss",
"torch.nn.Linear",
"warnings.filterwarnings"
] | [((375, 432), 'torch.multiprocessing.set_sharing_strategy', 'torch.multiprocessing.set_sharing_strategy', (['"""file_system"""'], {}), "('file_system')\n", (417, 432), False, 'import torch\n'), ((433, 488), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'UserWarning'}), "('ignore', category=UserWarning)\n", (456, 488), False, 'import warnings\n'), ((2388, 2415), 'wandb.init', 'wandb.init', ([], {'config': 'defaults'}), '(config=defaults)\n', (2398, 2415), False, 'import wandb\n'), ((608, 631), 'toml.load', 'toml.load', (['"""paths.toml"""'], {}), "('paths.toml')\n", (617, 631), False, 'import toml\n'), ((2804, 2877), 'sklearn.metrics.cohen_kappa_score', 'cohen_kappa_score', (['y', 'y_pred'], {'labels': '[0, 1, 2, 3, 4]', 'weights': '"""quadratic"""'}), "(y, y_pred, labels=[0, 1, 2, 3, 4], weights='quadratic')\n", (2821, 2877), False, 'from sklearn.metrics import cohen_kappa_score, accuracy_score\n'), ((3056, 3081), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['y', 'y_pred'], {}), '(y, y_pred)\n', (3070, 3081), False, 'from sklearn.metrics import cohen_kappa_score, accuracy_score\n'), ((9714, 9787), 'data.data_aptos.get_aptos_loaders', 'get_aptos_loaders', ([], {'num_workers': '(10)', 'size': "config['img_size']", 'batch_size': 'bs'}), "(num_workers=10, size=config['img_size'], batch_size=bs)\n", (9731, 9787), False, 'from data.data_aptos import get_aptos_loaders\n'), ((9878, 9906), 'torch.nn.BCEWithLogitsLoss', 'torch.nn.BCEWithLogitsLoss', ([], {}), '()\n', (9904, 9906), False, 'import torch\n'), ((10290, 10323), 'pytorch_lightning.loggers.WandbLogger', 'WandbLogger', ([], {'project': 'PROJECT_NAME'}), '(project=PROJECT_NAME)\n', (10301, 10323), False, 'from pytorch_lightning.loggers import WandbLogger\n'), ((10339, 10441), 'pytorch_lightning.Trainer', 'pl.Trainer', ([], {'gpus': '(1)', 'max_epochs': 'ep', 'logger': 'logger', 'accumulate_grad_batches': 'accumulate_grad_batches'}), '(gpus=1, max_epochs=ep, logger=logger, accumulate_grad_batches=\n accumulate_grad_batches)\n', (10349, 10441), True, 'import pytorch_lightning as pl\n'), ((521, 546), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (544, 546), False, 'import torch\n'), ((6874, 6920), 'torch.nn.Linear', 'nn.Linear', (['self.model.fc.in_features', 'n_output'], {}), '(self.model.fc.in_features, n_output)\n', (6883, 6920), False, 'from torch import nn, optim\n'), ((6237, 6280), 'torch.load', 'torch.load', (['checkpoint'], {'map_location': 'DEVICE'}), '(checkpoint, map_location=DEVICE)\n', (6247, 6280), False, 'import torch\n'), ((9457, 9477), 'numpy.ceil', 'np.ceil', (['(bs / max_bs)'], {}), '(bs / max_bs)\n', (9464, 9477), True, 'import numpy as np\n'), ((7635, 7696), 'torch.optim.lr_scheduler.StepLR', 'optim.lr_scheduler.StepLR', (['optimizer'], {'step_size': '(10)', 'gamma': '(0.5)'}), '(optimizer, step_size=10, gamma=0.5)\n', (7660, 7696), False, 'from torch import nn, optim\n'), ((7768, 7859), 'torch.optim.lr_scheduler.OneCycleLR', 'optim.lr_scheduler.OneCycleLR', (['optimizer'], {'max_lr': 'self.lr', 'total_steps': 'self.total_steps'}), '(optimizer, max_lr=self.lr, total_steps=self.\n total_steps)\n', (7797, 7859), False, 'from torch import nn, optim\n')] |
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.utils import flt, cstr, cint
from frappe import _, msgprint, scrub
from frappe.model.naming import make_autoname
from frappe.utils import has_common
from datetime import date
#------------------------------------------------------------------
#Permission Query
#------------------------------------------------------------------
def cust_get_permission_query_conditions(user):
if not user: user = frappe.session.user
cust_list = []
cust=frappe.db.sql("""select name from `tabCustomer` where user='{0}' """.format(frappe.session.user), as_dict=1)
cust_list = [ '"%s"'%c.get("name") for c in cust ]
roles = frappe.get_roles();
if user != "Administrator" and has_common(['Customer'],roles) :
if cust_list:
return """(`tabCustomer`.name in ({0}) )""".format(','.join(cust_list))
else:
return """(`tabCustomer`.name!='1')""" | [
"frappe.utils.has_common",
"frappe.get_roles"
] | [((725, 743), 'frappe.get_roles', 'frappe.get_roles', ([], {}), '()\n', (741, 743), False, 'import frappe\n'), ((777, 808), 'frappe.utils.has_common', 'has_common', (["['Customer']", 'roles'], {}), "(['Customer'], roles)\n", (787, 808), False, 'from frappe.utils import has_common\n')] |
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
consumer_key = 'Iqx8PEplFCeqMF0BYIjxnh2Xq'
consumer_secret = '<KEY>'
access_token = '<KEY>'
access_token_secret = '<KEY>'
class StdoutListener(StreamListener):
def on_status(self, status):
print("new status captured")
username = status.user.name
with open('tweet.csv', 'a', encoding='utf-8') as f:
if hasattr(status, 'retweeted_status'):
try:
data = status.retweeted_status.extended_tweet["full_text"].replace("\n", " ")
f.write(f'"{username}", "{data}"')
except AttributeError:
data = status.retweeted_status.text.replace("\n", " ")
f.write(f'"{username}", "{data}"')
else:
try:
data = status.extended_tweet["full_text"].replace("\n", " ")
f.write(f'"{username}", "{data}"')
except AttributeError:
data = status.text.replace("\n", " ")
f.write(f'"{username}", "{data}"')
f.write('\n')
print(username)
print(data)
def on_error(self,status):
print(f"Error happened = {status}")
print("Starting ...")
l = StdoutListener()
auth = OAuthHandler(consumer_key,consumer_secret)
auth.set_access_token(access_token,access_token_secret)
stream = Stream(auth, l, tweet_mode= 'extended')
stream.filter(track=['@paketmantap',
'@Pluus62',
'@VVIPTjanda',
'@kontenmalamm',
'@blogcrotin',
'@localpride69',
'@kontenlangka1',
'@megadriveigo2',
'@josie_6996',
'@indokoleksivip',
'video syur',
'@indokoleksivip',
'@tanjhennya',
'@stichhh18',
'@18PlusPlus',
'@Pemer1_bangsa',
'@tantelc69',
'@vipplatinume',
'@BawokPenikmat',
'@Sangee69548407'
'@susumurniid',
'@bokepviral77',
'@Nanda Vcs',
'@bokep vvip',
'@ReaddyVcs',
'@Salsanabilreal',
'@indosexvideo',
'@ngentotvideoabg',
'@Mirna72373335',
'@DirtyDaddyyyyy',
'@Silvia95918924',
'@Nadiaabby_',
'@RianiSintaa',
'@VcsVcs5',
'@indosia73523285',
'@bella72993251',
'@raniyp2',
'@nadiaabby_',
'@dahlia32651457',
'@flovcs',
'@vcsvcs5',
'@ayu22445553',
'@sari1235567',
'@vcs_booking',
'@dila250796',
'@cintaclaura1',
'@diana08226224',
'@meossela',
'@xxxvcsxx1',
'@juliaprw1',
'@woredone',
'@vcswiwit',
'@vcsreal41672818',
'@bbygiselle4',
'@medicinefromme',
'@newbie10549073',
'@rajanyapijat',
'@balqis58702678',
'@almerrdo1',
'@nova9099',
'@ayu22445553',
'@availlg',
'@wilsafitri',
'@ternaklonteori',
'openbo',
'cewekpanggilan',
'@barterfogil',
'jilbob',
'@jilbobcolmek',
'bokep',
'korantempo',
'jav_grandpa',
'dapoerbokep',
'jav_banget',
'jav_m1lf',
'ngentot',
'entot',
'gisel',
'@loveasiangirls',
'siskaeee_ofc',
'#malamjumat',
'korantempo',
'detikcom',
'berita',
'#morningSeex',
'#AyoMainLagi',
'Memek',
'Kontol',
'Coli',
'crot',
'ngocok',
'handjob',
'colmek',
'#openVCS',
'@susumurniid',
'#openBO',
'jilmek',
'#berita',
'memekperawan',
'ngewe',
'#bokepjilbab',
'#toketgede',
'#bokepindo',
'#bokepviral',
'#bokepKorea',
'#bokepterbaru',
'#bokepjilbab',
'#bokeppelajar',
'#bokepmahasiswi',
'#vcsreal',
'#videomesum',
'#bokep2020',
'#vcscrot',
'#bokepabg',
'#tantebispak',
'#bokepbarat',
'#mantapmantap',
'#sange',
'#sangeberat',
'#bugil',
'#ngaceng',
'#bugil',
'#bugilhot',
'#sangeberat',
'#SJY182',
'@korantempo',
'@detikcom',
'#BoikotSyariahMandiri',
'#SJ128',
'#LaskarFPITerbuktiBersenpi',
'#ngenton',
'kontol',
'memek',
'#berita',
'<NAME>',
'@jokowi',
'#beritaindonesia',
'#cewekbispak',
'#bispak',
'#cewebispak',
'#toketgede',
'BASARNAS',
'#Sumedang',
'CNN Indonesia',
'#ayamkampus'])
| [
"tweepy.Stream",
"tweepy.OAuthHandler"
] | [((1351, 1394), 'tweepy.OAuthHandler', 'OAuthHandler', (['consumer_key', 'consumer_secret'], {}), '(consumer_key, consumer_secret)\n', (1363, 1394), False, 'from tweepy import OAuthHandler\n'), ((1459, 1497), 'tweepy.Stream', 'Stream', (['auth', 'l'], {'tweet_mode': '"""extended"""'}), "(auth, l, tweet_mode='extended')\n", (1465, 1497), False, 'from tweepy import Stream\n')] |
import pickle
import os
import re
import numpy as np
import pandas as pd
from torch.utils.data import DataLoader, Dataset
# 35 attributes which contains enough non-values
attributes = ['DiasABP', 'HR', 'Na', 'Lactate', 'NIDiasABP', 'PaO2', 'WBC', 'pH', 'Albumin', 'ALT', 'Glucose', 'SaO2',
'Temp', 'AST', 'Bilirubin', 'HCO3', 'BUN', 'RespRate', 'Mg', 'HCT', 'SysABP', 'FiO2', 'K', 'GCS',
'Cholesterol', 'NISysABP', 'TroponinT', 'MAP', 'TroponinI', 'PaCO2', 'Platelets', 'Urine', 'NIMAP',
'Creatinine', 'ALP']
def extract_hour(x):
h, _ = map(int, x.split(":"))
return h
def parse_data(x):
# extract the last value for each attribute
x = x.set_index("Parameter").to_dict()["Value"]
values = []
for attr in attributes:
if x.__contains__(attr):
values.append(x[attr])
else:
values.append(np.nan)
return values
def parse_id(id_, missing_ratio=0.1):
data = pd.read_csv("./data/physio/set-a/{}.txt".format(id_))
# set hour
data["Time"] = data["Time"].apply(lambda x: extract_hour(x))
# create data for 48 hours x 35 attributes
observed_values = []
for h in range(48):
observed_values.append(parse_data(data[data["Time"] == h]))
observed_values = np.array(observed_values)
observed_masks = ~np.isnan(observed_values)
# randomly set some percentage as ground-truth
masks = observed_masks.reshape(-1).copy()
obs_indices = np.where(masks)[0].tolist()
miss_indices = np.random.choice(
obs_indices, (int)(len(obs_indices) * missing_ratio), replace=False
)
masks[miss_indices] = False
gt_masks = masks.reshape(observed_masks.shape)
observed_values = np.nan_to_num(observed_values)
observed_masks = observed_masks.astype("float32")
gt_masks = gt_masks.astype("float32")
return observed_values, observed_masks, gt_masks
def get_idlist():
patient_id = []
for filename in os.listdir("./data/physio/set-a"):
match = re.search("\d{6}", filename)
if match:
patient_id.append(match.group())
patient_id = np.sort(patient_id)
return patient_id
class Physio_Dataset(Dataset):
def __init__(self, eval_length=48, use_index_list=None, missing_ratio=0.0, seed=0):
self.eval_length = eval_length
np.random.seed(seed) # seed for ground truth choice
self.observed_values = []
self.observed_masks = []
self.gt_masks = []
path = (
"./data/physio_missing" + str(missing_ratio) + "_seed" + str(seed) + ".pk"
)
if os.path.isfile(path) == False: # if datasetfile is none, create
idlist = get_idlist()
for id_ in idlist:
try:
observed_values, observed_masks, gt_masks = parse_id(
id_, missing_ratio
)
self.observed_values.append(observed_values)
self.observed_masks.append(observed_masks)
self.gt_masks.append(gt_masks)
except Exception as e:
print(id_, e)
continue
self.observed_values = np.array(self.observed_values)
self.observed_masks = np.array(self.observed_masks)
self.gt_masks = np.array(self.gt_masks)
# calc mean and std and normalize values
# (it is the same normalization as Cao et al. (2018) (https://github.com/caow13/BRITS))
tmp_values = self.observed_values.reshape(-1, 35)
tmp_masks = self.observed_masks.reshape(-1, 35)
mean = np.zeros(35)
std = np.zeros(35)
for k in range(35):
c_data = tmp_values[:, k][tmp_masks[:, k] == 1]
mean[k] = c_data.mean()
std[k] = c_data.std()
self.observed_values = (
(self.observed_values - mean) / std * self.observed_masks
)
with open(path, "wb") as f:
pickle.dump(
[self.observed_values, self.observed_masks, self.gt_masks], f
)
else: # load datasetfile
with open(path, "rb") as f:
self.observed_values, self.observed_masks, self.gt_masks = pickle.load(
f
)
if use_index_list is None:
self.use_index_list = np.arange(len(self.observed_values))
else:
self.use_index_list = use_index_list
def __getitem__(self, org_index):
index = self.use_index_list[org_index]
s = {
"observed_data": self.observed_values[index],
"observed_mask": self.observed_masks[index],
"gt_mask": self.gt_masks[index],
"timepoints": np.arange(self.eval_length),
}
return s
def __len__(self):
return len(self.use_index_list)
def get_dataloader(seed=1, nfold=None, batch_size=16, missing_ratio=0.1):
# only to obtain total length of dataset
dataset = Physio_Dataset(missing_ratio=missing_ratio, seed=seed)
indlist = np.arange(len(dataset))
np.random.seed(seed)
np.random.shuffle(indlist)
# 5-fold test
start = (int)(nfold * 0.2 * len(dataset))
end = (int)((nfold + 1) * 0.2 * len(dataset))
test_index = indlist[start:end]
remain_index = np.delete(indlist, np.arange(start, end))
np.random.seed(seed)
np.random.shuffle(remain_index)
num_train = (int)(len(dataset) * 0.7)
train_index = remain_index[:num_train]
valid_index = remain_index[num_train:]
dataset = Physio_Dataset(
use_index_list=train_index, missing_ratio=missing_ratio, seed=seed
)
train_loader = DataLoader(dataset, batch_size=batch_size, shuffle=1)
valid_dataset = Physio_Dataset(
use_index_list=valid_index, missing_ratio=missing_ratio, seed=seed
)
valid_loader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=0)
test_dataset = Physio_Dataset(
use_index_list=test_index, missing_ratio=missing_ratio, seed=seed
)
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=0)
return train_loader, valid_loader, test_loader
| [
"re.search",
"os.listdir",
"pickle.dump",
"numpy.arange",
"numpy.where",
"numpy.sort",
"pickle.load",
"os.path.isfile",
"numpy.array",
"numpy.zeros",
"numpy.isnan",
"numpy.random.seed",
"torch.utils.data.DataLoader",
"numpy.nan_to_num",
"numpy.random.shuffle"
] | [((1296, 1321), 'numpy.array', 'np.array', (['observed_values'], {}), '(observed_values)\n', (1304, 1321), True, 'import numpy as np\n'), ((1739, 1769), 'numpy.nan_to_num', 'np.nan_to_num', (['observed_values'], {}), '(observed_values)\n', (1752, 1769), True, 'import numpy as np\n'), ((1980, 2013), 'os.listdir', 'os.listdir', (['"""./data/physio/set-a"""'], {}), "('./data/physio/set-a')\n", (1990, 2013), False, 'import os\n'), ((2140, 2159), 'numpy.sort', 'np.sort', (['patient_id'], {}), '(patient_id)\n', (2147, 2159), True, 'import numpy as np\n'), ((5196, 5216), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (5210, 5216), True, 'import numpy as np\n'), ((5221, 5247), 'numpy.random.shuffle', 'np.random.shuffle', (['indlist'], {}), '(indlist)\n', (5238, 5247), True, 'import numpy as np\n'), ((5465, 5485), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (5479, 5485), True, 'import numpy as np\n'), ((5490, 5521), 'numpy.random.shuffle', 'np.random.shuffle', (['remain_index'], {}), '(remain_index)\n', (5507, 5521), True, 'import numpy as np\n'), ((5781, 5834), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': '(1)'}), '(dataset, batch_size=batch_size, shuffle=1)\n', (5791, 5834), False, 'from torch.utils.data import DataLoader, Dataset\n'), ((5971, 6030), 'torch.utils.data.DataLoader', 'DataLoader', (['valid_dataset'], {'batch_size': 'batch_size', 'shuffle': '(0)'}), '(valid_dataset, batch_size=batch_size, shuffle=0)\n', (5981, 6030), False, 'from torch.utils.data import DataLoader, Dataset\n'), ((6164, 6222), 'torch.utils.data.DataLoader', 'DataLoader', (['test_dataset'], {'batch_size': 'batch_size', 'shuffle': '(0)'}), '(test_dataset, batch_size=batch_size, shuffle=0)\n', (6174, 6222), False, 'from torch.utils.data import DataLoader, Dataset\n'), ((1344, 1369), 'numpy.isnan', 'np.isnan', (['observed_values'], {}), '(observed_values)\n', (1352, 1369), True, 'import numpy as np\n'), ((2031, 2060), 're.search', 're.search', (['"""\\\\d{6}"""', 'filename'], {}), "('\\\\d{6}', filename)\n", (2040, 2060), False, 'import re\n'), ((2350, 2370), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (2364, 2370), True, 'import numpy as np\n'), ((5437, 5458), 'numpy.arange', 'np.arange', (['start', 'end'], {}), '(start, end)\n', (5446, 5458), True, 'import numpy as np\n'), ((2624, 2644), 'os.path.isfile', 'os.path.isfile', (['path'], {}), '(path)\n', (2638, 2644), False, 'import os\n'), ((3230, 3260), 'numpy.array', 'np.array', (['self.observed_values'], {}), '(self.observed_values)\n', (3238, 3260), True, 'import numpy as np\n'), ((3295, 3324), 'numpy.array', 'np.array', (['self.observed_masks'], {}), '(self.observed_masks)\n', (3303, 3324), True, 'import numpy as np\n'), ((3353, 3376), 'numpy.array', 'np.array', (['self.gt_masks'], {}), '(self.gt_masks)\n', (3361, 3376), True, 'import numpy as np\n'), ((3672, 3684), 'numpy.zeros', 'np.zeros', (['(35)'], {}), '(35)\n', (3680, 3684), True, 'import numpy as np\n'), ((3703, 3715), 'numpy.zeros', 'np.zeros', (['(35)'], {}), '(35)\n', (3711, 3715), True, 'import numpy as np\n'), ((4842, 4869), 'numpy.arange', 'np.arange', (['self.eval_length'], {}), '(self.eval_length)\n', (4851, 4869), True, 'import numpy as np\n'), ((1486, 1501), 'numpy.where', 'np.where', (['masks'], {}), '(masks)\n', (1494, 1501), True, 'import numpy as np\n'), ((4072, 4146), 'pickle.dump', 'pickle.dump', (['[self.observed_values, self.observed_masks, self.gt_masks]', 'f'], {}), '([self.observed_values, self.observed_masks, self.gt_masks], f)\n', (4083, 4146), False, 'import pickle\n'), ((4334, 4348), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4345, 4348), False, 'import pickle\n')] |
import os;
pref = "impact"
i = 1
for filename in os.listdir("."):
if filename.startswith(pref):
os.rename(filename, str(i) + ".png")
i = i + 1
# os.rename(filename, filename.replace("0", ""))
# print(filename) | [
"os.listdir"
] | [((50, 65), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (60, 65), False, 'import os\n')] |
import logging
logging.basicConfig(
filename='govlytics.log', filemode='w',
format='[%(asctime)s - %(filename)s - %(levelname)s] %(message)s',
level=logging.INFO
)
from . import gov
from . import graph
gov.data_utils.create_govlytics_dirs()
| [
"logging.basicConfig"
] | [((16, 172), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""govlytics.log"""', 'filemode': '"""w"""', 'format': '"""[%(asctime)s - %(filename)s - %(levelname)s] %(message)s"""', 'level': 'logging.INFO'}), "(filename='govlytics.log', filemode='w', format=\n '[%(asctime)s - %(filename)s - %(levelname)s] %(message)s', level=\n logging.INFO)\n", (35, 172), False, 'import logging\n')] |
#ANN
#Preprocessing
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Churn_Modelling.csv')
X = dataset.iloc[:,3:13].values
y = dataset.iloc[:,-1].values
#Encoding categorical data
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
labelencoder_X_1 = LabelEncoder()
X[:,1] = labelencoder_X_1.fit_transform(X[:,1])
labelencoder_X_2 = LabelEncoder()
X[:,2] = labelencoder_X_2.fit_transform(X[:,2])
onehotencoder = OneHotEncoder(categorical_features = [1])
#onehotencoder = OneHotEncoder(categorical_features = [2])
X = onehotencoder.fit_transform(X).toarray()
X = X[:,1:]
from sklearn.cross_validation import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X,y, test_size = 0.2, random_state = 0)
#Feature scaling
from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()
X_train = sc_X.fit_transform(X_train)
X_test = sc_X.transform(X_test)
#Build model
import keras
from keras.models import Sequential
from keras.layers import Dense
classifier = Sequential()
classifier.add(Dense(activation="relu", input_dim=11, units=6, kernel_initializer="uniform"))
classifier.add(Dense(activation="relu", units=6, kernel_initializer="uniform"))
classifier.add(Dense(activation="sigmoid", units=1, kernel_initializer="uniform"))
classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
#Train model
classifier.fit(X_train, y_train, batch_size = 10, epochs = 100)
#Predict
y_pred = classifier.predict(X_test)
y_pred = (y_pred > 0.5)
#Evaluate
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test,y_pred) | [
"sklearn.preprocessing.LabelEncoder",
"pandas.read_csv",
"sklearn.preprocessing.OneHotEncoder",
"keras.models.Sequential",
"sklearn.preprocessing.StandardScaler",
"sklearn.cross_validation.train_test_split",
"keras.layers.Dense",
"sklearn.metrics.confusion_matrix"
] | [((103, 137), 'pandas.read_csv', 'pd.read_csv', (['"""Churn_Modelling.csv"""'], {}), "('Churn_Modelling.csv')\n", (114, 137), True, 'import pandas as pd\n'), ((309, 323), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (321, 323), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((391, 405), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (403, 405), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((470, 509), 'sklearn.preprocessing.OneHotEncoder', 'OneHotEncoder', ([], {'categorical_features': '[1]'}), '(categorical_features=[1])\n', (483, 509), False, 'from sklearn.preprocessing import LabelEncoder, OneHotEncoder\n'), ((718, 771), 'sklearn.cross_validation.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)', 'random_state': '(0)'}), '(X, y, test_size=0.2, random_state=0)\n', (734, 771), False, 'from sklearn.cross_validation import train_test_split\n'), ((849, 865), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (863, 865), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1044, 1056), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1054, 1056), False, 'from keras.models import Sequential\n'), ((1617, 1649), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['y_test', 'y_pred'], {}), '(y_test, y_pred)\n', (1633, 1649), False, 'from sklearn.metrics import confusion_matrix\n'), ((1072, 1149), 'keras.layers.Dense', 'Dense', ([], {'activation': '"""relu"""', 'input_dim': '(11)', 'units': '(6)', 'kernel_initializer': '"""uniform"""'}), "(activation='relu', input_dim=11, units=6, kernel_initializer='uniform')\n", (1077, 1149), False, 'from keras.layers import Dense\n'), ((1166, 1229), 'keras.layers.Dense', 'Dense', ([], {'activation': '"""relu"""', 'units': '(6)', 'kernel_initializer': '"""uniform"""'}), "(activation='relu', units=6, kernel_initializer='uniform')\n", (1171, 1229), False, 'from keras.layers import Dense\n'), ((1246, 1312), 'keras.layers.Dense', 'Dense', ([], {'activation': '"""sigmoid"""', 'units': '(1)', 'kernel_initializer': '"""uniform"""'}), "(activation='sigmoid', units=1, kernel_initializer='uniform')\n", (1251, 1312), False, 'from keras.layers import Dense\n')] |
# <NAME>
# 2020
import numpy as np
import time
# TODO - There is also peak checking in the Midi class.
class PeakTrigger:
def __init__(self, must_exceed, cannot_exceed):
self.peak_value = 0
self.must_exceed = must_exceed
self.cannot_exceed = cannot_exceed
self.exceeded_max = False
self.time_last = time.time()
def is_in_trigger_range(self, data_l, data_r):
peak_l = max(np.absolute(data_l))
# If the peak exceeds the max, it must go below the min before
# being considered again. This covers the corner case of a
# note decaying into the range, when the requirement is to
# record the note attacking into the range.
return_value = True
if peak_l > self.cannot_exceed:
self.exceeded_max = True
return_value = False
if peak_l < self.must_exceed:
return_value = False
if peak_l < self.must_exceed/2:
self.exceeded_max = False
if return_value and not self.exceeded_max:
return True
else:
return False
# This is for display purposes only.
def peak(self, data_l, data_r):
present_peak = max([max(np.absolute(data_l)), max(np.absolute(data_r))])
if self.return_true_every_second():
self.set_peak(present_peak)
elif present_peak > self.peak_value:
self.peak_value = present_peak
self.last_time = time.time() # Looks weird to reset on a peak.
return self.peak_value
def return_true_every_second(self):
time_now = time.time()
time_diff = time_now - self.time_last
if time_diff > 1:
self.time_last = time_now
return True
else:
return False
def set_peak(self, value):
self.peak_value = value | [
"numpy.absolute",
"time.time"
] | [((345, 356), 'time.time', 'time.time', ([], {}), '()\n', (354, 356), False, 'import time\n'), ((1609, 1620), 'time.time', 'time.time', ([], {}), '()\n', (1618, 1620), False, 'import time\n'), ((431, 450), 'numpy.absolute', 'np.absolute', (['data_l'], {}), '(data_l)\n', (442, 450), True, 'import numpy as np\n'), ((1472, 1483), 'time.time', 'time.time', ([], {}), '()\n', (1481, 1483), False, 'import time\n'), ((1222, 1241), 'numpy.absolute', 'np.absolute', (['data_l'], {}), '(data_l)\n', (1233, 1241), True, 'import numpy as np\n'), ((1248, 1267), 'numpy.absolute', 'np.absolute', (['data_r'], {}), '(data_r)\n', (1259, 1267), True, 'import numpy as np\n')] |
"""
Mail Report
-----------
Mail creator and sender. Its a postprocess that sends a report with
the model forecasts.
"""
from email.mime.application import MIMEApplication
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import logging
from os.path import basename
from pathlib import Path
import smtplib
from typing import List, Optional, Tuple, Union
from soam.cfg import MAIL_TEMPLATE, get_smtp_cred
from soam.constants import PROJECT_NAME
from soam.core.step import Step
DEFAULT_SUBJECT = "[{end_date}]Forecast report for {metric_name}"
DEFAULT_SIGNATURE = PROJECT_NAME
logger = logging.getLogger(__name__)
class MailReport:
"""
Builds and sends reports via mail.
"""
def __init__(
self,
mail_recipients_list: List[str],
metric_name: str,
setting_path: Optional[str] = None,
):
"""
Create MailReport object.
Parameters
----------
mail_recipients_list : list of str
The mails of the recipients for the report.
metric_name : str
Name of the metric being forecasted.
setting_path : str, optional
The path for the .ini document with the settings.
"""
self.mail_recipients_list = mail_recipients_list
credentials = get_smtp_cred(setting_path)
self.credentials = credentials
self.metric_name = metric_name
def send(
self,
current_date: str,
plot_filename: Union[Path, str],
subject: str = DEFAULT_SUBJECT,
signature: str = DEFAULT_SIGNATURE,
):
"""
Send email report.
Parameters
----------
current_date : str
Date when the report will be sent.
plot_filename : str or pathlib.Path
Path of the forecast plot to send.
subject : str
Subject of the email.
signature : str
Signature for the email.
"""
logger.info(f"Sending email report to: {self.mail_recipients_list}")
mime_img, mime_img_name = self._get_mime_images(Path(plot_filename))
subject, msg_body = self._build_subject_n_msg_body(
subject, signature, self.metric_name, current_date, mime_img_name
)
self._send_mail(
self.credentials,
self.mail_recipients_list,
subject,
msg_body,
[mime_img],
[],
)
def _send_mail(
self,
smtp_credentials: dict,
mail_recipients: List[str],
subject: str,
mail_body: str,
mime_image_list: List[MIMEImage],
attachments: List[str],
):
"""
Send a report email.
TODO: review method, may be static
Parameters
----------
smtp_credentials : dict
Credentials for the SMTP service.
mail_recipients : list of str
The mails of the recipients for the report.
TODO: this data is on self.
subject : str
Subject of the email.
mail_body : str
The message to be sent
mime_image_list : list of email.mime.image.MIMEImage
List of images to sent
attachments : list of str
List of files to attach in the email.
"""
user = smtp_credentials.get("user_address")
password = smtp_credentials.get("password")
from_address = smtp_credentials["mail_from"]
host = smtp_credentials["host"]
port = smtp_credentials["port"]
logger.info(
f"""About to send the following email:
'From: ' {from_address}
'To: ' {mail_recipients}
'Subject: ' {subject}
'Using host': {host} and port: {port}"""
)
logger.error(f"With the following body: \n {mail_body}")
msg_root = MIMEMultipart("related")
msg_root["From"] = from_address
msg_root["Subject"] = subject
msg_root["To"] = ", ".join(mail_recipients)
msg_alt = MIMEMultipart("alternative")
msg_root.attach(msg_alt)
msg_text = MIMEText(mail_body, "html")
msg_alt.attach(msg_text)
for mim_img in mime_image_list:
msg_root.attach(mim_img)
for attachment in attachments:
with open(attachment, "rb") as f:
part = MIMEApplication(f.read(), Name=basename(attachment))
part["Content-Disposition"] = 'attachment; filename="%s"' % basename(
attachment
)
msg_root.attach(part)
with smtplib.SMTP(host, port) as server:
server.ehlo()
if user is not None and password is not None:
server.starttls()
server.ehlo()
server.login(user, password)
server.sendmail(from_address, mail_recipients, msg_root.as_string())
logger.info("Email sent succesfully")
def _build_subject_n_msg_body(
self, subject: str, signature: str, metric_name: str, end_date, mime_img: str
) -> Tuple[str, str]:
"""
Creates the subject and message body
TODO: review method, may be static
Parameters
----------
subject : str
The subject to format
signature : str
The message signature
metric_name : str
The name for the metric
end_date : ?
?
mime_img : str
The path to the mime image.
Returns
-------
str
The subject of the email.
str
The message body of the email.
"""
if subject == DEFAULT_SUBJECT:
subject = subject.format(end_date=end_date, metric_name=metric_name)
logger.debug(f"Mail subject:\n {subject}")
jparams = {
"signature": signature,
"metric_name": metric_name,
"end_date": end_date,
"mime_img": mime_img,
}
msg_body = getattr(MAIL_TEMPLATE, "mail_body")(**jparams)
logger.debug(f"html mail body:\n {msg_body}")
return subject, msg_body
def _get_mime_images(self, plot_filename: Path) -> Tuple[MIMEImage, str]:
"""
Extract images from local dir paths.
TODO: review method, may be static
Parameters
----------
plot_filename : pathlib.Path
The path to the plot image.
Returns
-------
MIMEImage
The image MIME document.
str
The plot filename.
"""
with plot_filename.open("rb") as img_file:
msg_image = MIMEImage(img_file.read())
img_name = str(plot_filename)
msg_image.add_header("Content-Id", f"<{img_name}>")
return msg_image, img_name
def _format_link(self, factor: str) -> str:
"""
TODO: review unused method
Parameters
----------
factor
Returns
-------
"""
return f"<a href=#{factor}>{factor}</a>"
class MailReportTask(Step, MailReport):
"""
Builds the task that sends reports via mail.
"""
def __init__(self, mail_recipients_list: List[str], metric_name: str, **kwargs):
"""
Initialization of the Mail Report Task.
Parameters
----------
mail_recipients_list: List[str]
List of the recipients of the email to be sent.
metric_name: str
Name of the performance metric being measured.
"""
Step.__init__(self, **kwargs) # type: ignore
MailReport.__init__(self, mail_recipients_list, metric_name)
def run( # type: ignore
self,
current_date: str,
plot_filename: Union[Path, str],
subject: str = DEFAULT_SUBJECT,
signature: str = DEFAULT_SIGNATURE,
):
"""
Run the Mail Report Task.
Parameters
----------
current_date: str,
Current datetime as string.
plot_filename: Union[Path, str],
The path and filename of the plot.
subject: str = DEFAULT_SUBJECT,
The subject for the email.
signature: str = DEFAULT_SIGNATURE,
Signature for the email.
Returns
-------
Mail Report Task
Sends the report via email.
"""
return self.send(current_date, plot_filename, subject, signature)
| [
"logging.getLogger",
"smtplib.SMTP",
"soam.core.step.Step.__init__",
"pathlib.Path",
"email.mime.multipart.MIMEMultipart",
"os.path.basename",
"soam.cfg.get_smtp_cred",
"email.mime.text.MIMEText"
] | [((660, 687), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (677, 687), False, 'import logging\n'), ((1363, 1390), 'soam.cfg.get_smtp_cred', 'get_smtp_cred', (['setting_path'], {}), '(setting_path)\n', (1376, 1390), False, 'from soam.cfg import MAIL_TEMPLATE, get_smtp_cred\n'), ((3989, 4013), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', (['"""related"""'], {}), "('related')\n", (4002, 4013), False, 'from email.mime.multipart import MIMEMultipart\n'), ((4163, 4191), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', (['"""alternative"""'], {}), "('alternative')\n", (4176, 4191), False, 'from email.mime.multipart import MIMEMultipart\n'), ((4245, 4272), 'email.mime.text.MIMEText', 'MIMEText', (['mail_body', '"""html"""'], {}), "(mail_body, 'html')\n", (4253, 4272), False, 'from email.mime.text import MIMEText\n'), ((7706, 7735), 'soam.core.step.Step.__init__', 'Step.__init__', (['self'], {}), '(self, **kwargs)\n', (7719, 7735), False, 'from soam.core.step import Step\n'), ((2164, 2183), 'pathlib.Path', 'Path', (['plot_filename'], {}), '(plot_filename)\n', (2168, 2183), False, 'from pathlib import Path\n'), ((4717, 4741), 'smtplib.SMTP', 'smtplib.SMTP', (['host', 'port'], {}), '(host, port)\n', (4729, 4741), False, 'import smtplib\n'), ((4618, 4638), 'os.path.basename', 'basename', (['attachment'], {}), '(attachment)\n', (4626, 4638), False, 'from os.path import basename\n'), ((4524, 4544), 'os.path.basename', 'basename', (['attachment'], {}), '(attachment)\n', (4532, 4544), False, 'from os.path import basename\n')] |
import re
import hashlib
from collections import OrderedDict
from PIL import ImageChops
import PIL.Image
import numpy as np
PARSE_REGEX = re.compile(r'\t(.*) = (.*)')
def int_or_float(value):
try:
value = int(value)
except ValueError:
value = float(value)
return value
def parse_metadata(img):
"""
Parses a DMI metadata,
returning an tuple array(icon_state, state dict).
img is a PIL.Image object
"""
img_dict = img.info
info = img_dict['Description'].split('\n')
if not 'version = 4.0' in info:
return None
meta_info = []
current_key = None
for entry in info:
if entry in ["# BEGIN DMI", "# END DMI", ""]:
continue
if '\t' not in entry:
current_key = entry.replace('state = ', '').replace('\"', '')
meta_info.append((current_key, {}))
else:
this_info = PARSE_REGEX.search(entry)
if this_info:
grp_1 = this_info.group(1)
grp_2 = this_info.group(2)
if grp_1 in ['delay', 'hotspot']:
entries = grp_2.split(',')
grp_2 = []
for thing in entries:
grp_2.append(int_or_float(thing))
else:
grp_2 = int_or_float(grp_2)
dict_to_add = {grp_1 : grp_2}
meta_info[len(meta_info) - 1][1].update(dict_to_add)
return meta_info
def get_icon_hash(fp, fp_name):
"""Returns a file's hashed, fp is passed as a string"""
sha1 = hashlib.sha1(fp)
return sha1.hexdigest()
def generate_icon_states(filename, save_each=False):
"""Generates every icon state into an Image object. Returning a dict with {name : (object, icon hash)}"""
img = PIL.Image.open(filename)
meta_data = parse_metadata(img)
if meta_data is None:
print("Failed to retreive metadata.")
return
image_width = img.width
image_height = img.height
icon_width = meta_data[0][1]['width']
icon_height = meta_data[0][1]['height']
meta_data = meta_data[1:] #We don't need the version info anymore
icons_per_line = int(image_width / icon_width)
total_lines = int(image_height / icon_height)
if img.mode != "RGBA":
img = img.convert("RGBA")
data = np.asarray(img)
img.close()
icon_count = 0
skip_naming = 0
name_count = 1
icons = {}
for line in range(0, total_lines):
icon = 0
while icon < icons_per_line:
this_icon = PIL.Image.new('RGBA', (icon_width, icon_height))
try:
state_tuple = meta_data[icon_count] # (name, {'dirs' : 1, 'frames' : 1})
name = state_tuple[0]
if skip_naming:
if name_count > 0:
name += "[{}]".format(str(name_count))
name_count += 1
skip_naming -= 1
if not skip_naming:
icon_count += 1
else:
amt_dirs = state_tuple[1]['dirs']
amt_frames = state_tuple[1]['frames']
skip_naming = (amt_dirs * amt_frames) - 1
if not skip_naming:
icon_count += 1
else:
name_count = 1
except IndexError:
break #IndexError means blank icon
icon_start_w = icon * icon_width
icon_end_w = icon_start_w + icon_width
icon_start_h = line * icon_height
icon_end_h = icon_start_h + icon_height
this_state_x = 0
for i in range(icon_start_w, icon_end_w):
this_state_y = 0
for j in range(icon_start_h, icon_end_h):
this_icon.putpixel((this_state_x, this_state_y), tuple(data[j, i]))
this_state_y += 1
this_state_x += 1
icon += 1
icons[name] = this_icon
if save_each:
this_icon.save("icon_dump/{}.png".format(name))
return icons
def check_icon_state_diff(image_a, image_b):
"""Compares two icons(passed as an Image object), returning True if the icons are equal, False in case of a difference."""
return ImageChops.difference(image_a, image_b).getbbox() is None
def compare_two_icon_files(file_a, file_b):
"""
Compares every icon state of two icons, returning a dict with the icon state status:
{state name : {
status : no_check/modified/created,
img_a : Image obj,
img_a_hash: sha1 of Image a obj,
img_b : Image obj,
img_b_hash: sha1 of Image b obj
}
}
"""
if file_a:
file_a_dict = generate_icon_states(file_a)
else:
file_a_dict = {}
file_b_dict = generate_icon_states(file_b)
final_dict = OrderedDict()
for key in file_a_dict:
final_dict[key] = {}
final_dict[key]["img_a_hash"] = get_icon_hash(file_a_dict[key].tobytes(), key)
if not file_b_dict.get(key):
final_dict[key]["status"] = "Removed"
final_dict[key]["img_a"] = file_a_dict[key]
elif check_icon_state_diff(file_a_dict[key], file_b_dict[key]):
final_dict[key]["status"] = "Equal"
file_a_dict[key].close()
file_b_dict[key].close()
else:
final_dict[key]["status"] = "Modified"
final_dict[key]["img_a"] = file_a_dict[key]
final_dict[key]["img_b"] = file_b_dict[key]
final_dict[key]["img_b_hash"] = get_icon_hash(file_b_dict[key].tobytes(), key)
for key in file_b_dict:
if not file_a_dict.get(key):
final_dict[key] = {
"status" : "Created",
"img_b" : file_b_dict[key],
"img_b_hash" : get_icon_hash(file_b_dict[key].tobytes(), key)
}
return final_dict
if __name__ == '__main__':
with open("./icon_dump/new_unary_devices.dmi", 'rb') as f:
generate_icon_states(f)
| [
"PIL.ImageChops.difference",
"collections.OrderedDict",
"re.compile",
"numpy.asarray",
"hashlib.sha1"
] | [((140, 168), 're.compile', 're.compile', (['"""\\\\t(.*) = (.*)"""'], {}), "('\\\\t(.*) = (.*)')\n", (150, 168), False, 'import re\n'), ((1588, 1604), 'hashlib.sha1', 'hashlib.sha1', (['fp'], {}), '(fp)\n', (1600, 1604), False, 'import hashlib\n'), ((2352, 2367), 'numpy.asarray', 'np.asarray', (['img'], {}), '(img)\n', (2362, 2367), True, 'import numpy as np\n'), ((4952, 4965), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4963, 4965), False, 'from collections import OrderedDict\n'), ((4361, 4400), 'PIL.ImageChops.difference', 'ImageChops.difference', (['image_a', 'image_b'], {}), '(image_a, image_b)\n', (4382, 4400), False, 'from PIL import ImageChops\n')] |
import torch
import pickle
from tqdm import tqdm
import json
from transformers import BertModel, BertTokenizer
import argparse
import os
try:
from .utils import check_path
except:
from utils import check_path
id2concept = None
def load_resources(cpnet_vocab_path):
global concept2id, id2concept, relation2id, id2relation
with open(cpnet_vocab_path, "r", encoding="utf8") as fin:
id2concept = [w.strip() for w in fin]
def convert_qa_concept_to_bert_input(tokenizer, question, answer, concept, max_seq_length):
qa_tokens = tokenizer.tokenize('Q: ' + question + ' A: ' + answer)
concept_tokens = tokenizer.tokenize(concept)
qa_tokens = qa_tokens[:max_seq_length - len(concept_tokens) - 3]
tokens = [tokenizer.cls_token] + qa_tokens + [tokenizer.sep_token] + concept_tokens + [tokenizer.sep_token]
input_ids = tokenizer.convert_tokens_to_ids(tokens)
segment_ids = [0] * (len(qa_tokens) + 2) + [1] * (len(concept_tokens) + 1)
assert len(input_ids) == len(segment_ids) == len(input_ids)
# padding
pad_len = max_seq_length - len(input_ids)
input_mask = [1] * len(input_ids) + [0] * pad_len
input_ids += [0] * pad_len
segment_ids += [0] * pad_len
span = (len(qa_tokens) + 2, len(qa_tokens) + 2 + len(concept_tokens))
assert span[1] + 1 == len(tokens)
assert max_seq_length == len(input_ids) == len(segment_ids) == len(input_mask)
return input_ids, input_mask, segment_ids, span
def extract_bert_node_features_from_adj(cpnet_vocab_path, statement_path, adj_path, output_path, max_seq_length, device, batch_size, layer_id=-1, cache_path=None, use_cache=True):
global id2concept
if id2concept is None:
load_resources(cpnet_vocab_path=cpnet_vocab_path)
check_path(output_path)
print('extracting from triple strings')
tokenizer = BertTokenizer.from_pretrained('bert-large-uncased', do_lower_case=True)
model = BertModel.from_pretrained('bert-large-uncased', output_hidden_states=True).to(device)
model.eval()
if use_cache and os.path.isfile(cache_path):
print('Loading cached inputs.')
with open(cache_path, 'rb') as fin:
all_input_ids, all_input_mask, all_segment_ids, all_span, offsets = pickle.load(fin)
print('Loaded')
else:
with open(adj_path, 'rb') as fin:
adj_data = pickle.load(fin)
offsets = [0]
all_input_ids, all_input_mask, all_segment_ids, all_span = [], [], [], []
n = sum(1 for _ in open(statement_path, 'r'))
with open(statement_path, 'r') as fin:
for line in tqdm(fin, total=n, desc='Calculating alignments'):
dic = json.loads(line)
question = dic['question']['stem']
for choice in dic['question']['choices']:
answer = choice['text']
adj, concepts, _, _ = adj_data.pop(0)
concepts = [id2concept[c].replace('_', ' ') for c in concepts]
offsets.append(offsets[-1] + len(concepts))
for concept in concepts:
input_ids, input_mask, segment_ids, span = convert_qa_concept_to_bert_input(tokenizer, question, answer, concept, max_seq_length)
all_input_ids.append(input_ids)
all_input_mask.append(input_mask)
all_segment_ids.append(segment_ids)
all_span.append(span)
assert len(adj_data) == 0
check_path(cache_path)
with open(cache_path, 'wb') as fout:
pickle.dump((all_input_ids, all_input_mask, all_segment_ids, all_span, offsets), fout)
print('Inputs dumped')
all_input_ids, all_input_mask, all_segment_ids, all_span = [torch.tensor(x, dtype=torch.long) for x in [all_input_ids, all_input_mask, all_segment_ids, all_span]]
all_span = all_span.to(device)
concept_vecs = []
n = all_input_ids.size(0)
with torch.no_grad():
for a in tqdm(range(0, n, batch_size), total=n // batch_size + 1, desc='Extracting features'):
b = min(a + batch_size, n)
batch = [x.to(device) for x in [all_input_ids[a:b], all_input_mask[a:b], all_segment_ids[a:b]]]
outputs = model(*batch)
hidden_states = outputs[-1][layer_id]
mask = torch.arange(max_seq_length, device=device)[None, :]
mask = (mask >= all_span[a:b, 0, None]) & (mask < all_span[a:b, 1, None])
pooled = (hidden_states * mask.float().unsqueeze(-1)).sum(1)
pooled = pooled / (all_span[a:b, 1].float() - all_span[a:b, 0].float() + 1e-5).unsqueeze(1)
concept_vecs.append(pooled.cpu())
concept_vecs = torch.cat(concept_vecs, 0).numpy()
res = [concept_vecs[offsets[i]:offsets[i + 1]] for i in range(len(offsets) - 1)]
with open(output_path, 'wb') as fout:
pickle.dump(res, fout)
print('done!')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--statement_path', default=None)
parser.add_argument('--adj_path', default=None)
parser.add_argument('--output_path', default=None)
parser.add_argument('--split', default='train', choices=['train', 'dev', 'test'], required=True)
parser.add_argument('-ds', '--dataset', default='csqa', choices=['csqa', 'socialiqa', 'obqa'], required=True)
parser.add_argument('--layer_id', type=int, default=-1)
parser.add_argument('--max_seq_length', type=int, required=True)
parser.add_argument('--batch_size', type=int, default=8)
parser.add_argument('--cpnet_vocab_path', default='./data/cpnet/concept.txt')
parser.add_argument('--cache_path', default=None)
args = parser.parse_args()
parser.set_defaults(statement_path=f'./data/{args.dataset}/statement/{args.split}.statement.jsonl',
adj_path=f'./data/{args.dataset}/graph/{args.split}.graph.adj.pk',
output_path=f'./data/{args.dataset}/concept_embs/{args.split}.bert-large-uncased.layer{args.layer_id}.pk',
cache_path=f'./data/{args.dataset}/concept_embs/{args.split}.inputs.pk')
args = parser.parse_args()
print(args)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
extract_bert_node_features_from_adj(cpnet_vocab_path=args.cpnet_vocab_path,
statement_path=args.statement_path,
adj_path=args.adj_path,
output_path=args.output_path,
max_seq_length=args.max_seq_length,
device=device,
batch_size=args.batch_size,
layer_id=args.layer_id,
cache_path=args.cache_path)
| [
"json.loads",
"pickle.dump",
"argparse.ArgumentParser",
"tqdm.tqdm",
"transformers.BertTokenizer.from_pretrained",
"pickle.load",
"transformers.BertModel.from_pretrained",
"os.path.isfile",
"torch.tensor",
"torch.cat",
"torch.cuda.is_available",
"utils.check_path",
"torch.no_grad",
"torch.arange"
] | [((1762, 1785), 'utils.check_path', 'check_path', (['output_path'], {}), '(output_path)\n', (1772, 1785), False, 'from utils import check_path\n'), ((1848, 1919), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['"""bert-large-uncased"""'], {'do_lower_case': '(True)'}), "('bert-large-uncased', do_lower_case=True)\n", (1877, 1919), False, 'from transformers import BertModel, BertTokenizer\n'), ((5005, 5030), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (5028, 5030), False, 'import argparse\n'), ((2057, 2083), 'os.path.isfile', 'os.path.isfile', (['cache_path'], {}), '(cache_path)\n', (2071, 2083), False, 'import os\n'), ((3524, 3546), 'utils.check_path', 'check_path', (['cache_path'], {}), '(cache_path)\n', (3534, 3546), False, 'from utils import check_path\n'), ((3787, 3820), 'torch.tensor', 'torch.tensor', (['x'], {'dtype': 'torch.long'}), '(x, dtype=torch.long)\n', (3799, 3820), False, 'import torch\n'), ((3988, 4003), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4001, 4003), False, 'import torch\n'), ((4920, 4942), 'pickle.dump', 'pickle.dump', (['res', 'fout'], {}), '(res, fout)\n', (4931, 4942), False, 'import pickle\n'), ((1932, 2006), 'transformers.BertModel.from_pretrained', 'BertModel.from_pretrained', (['"""bert-large-uncased"""'], {'output_hidden_states': '(True)'}), "('bert-large-uncased', output_hidden_states=True)\n", (1957, 2006), False, 'from transformers import BertModel, BertTokenizer\n'), ((2249, 2265), 'pickle.load', 'pickle.load', (['fin'], {}), '(fin)\n', (2260, 2265), False, 'import pickle\n'), ((2366, 2382), 'pickle.load', 'pickle.load', (['fin'], {}), '(fin)\n', (2377, 2382), False, 'import pickle\n'), ((2614, 2663), 'tqdm.tqdm', 'tqdm', (['fin'], {'total': 'n', 'desc': '"""Calculating alignments"""'}), "(fin, total=n, desc='Calculating alignments')\n", (2618, 2663), False, 'from tqdm import tqdm\n'), ((3604, 3694), 'pickle.dump', 'pickle.dump', (['(all_input_ids, all_input_mask, all_segment_ids, all_span, offsets)', 'fout'], {}), '((all_input_ids, all_input_mask, all_segment_ids, all_span,\n offsets), fout)\n', (3615, 3694), False, 'import pickle\n'), ((6280, 6305), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6303, 6305), False, 'import torch\n'), ((2687, 2703), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (2697, 2703), False, 'import json\n'), ((4360, 4403), 'torch.arange', 'torch.arange', (['max_seq_length'], {'device': 'device'}), '(max_seq_length, device=device)\n', (4372, 4403), False, 'import torch\n'), ((4745, 4771), 'torch.cat', 'torch.cat', (['concept_vecs', '(0)'], {}), '(concept_vecs, 0)\n', (4754, 4771), False, 'import torch\n')] |
from main import evaluate, corpus
def test_evaluate():
assert corpus.test
assert evaluate(corpus.test)
| [
"main.evaluate"
] | [((91, 112), 'main.evaluate', 'evaluate', (['corpus.test'], {}), '(corpus.test)\n', (99, 112), False, 'from main import evaluate, corpus\n')] |
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets
from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo
from flask_login import current_user
from app.models import *
import datetime
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField('Password', validators=[
DataRequired(), EqualTo('password')])
fullname = StringField('Full Name', validators=[DataRequired()])
submit = SubmitField('Register')
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class RoomChoiceIterable(object):
def __iter__(self):
rooms = Room.query.all()
choices = [(room.id, room.roomName) for room in rooms]
for choice in choices:
yield choice
class BookmeetingForm(FlaskForm):
title = StringField('EPL team', validators=[DataRequired()])
rooms = SelectField('Choose Pitch', coerce=int,
choices=RoomChoiceIterable())
date = DateField('Choose date', format="%m/%d/%Y",
validators=[DataRequired()])
startTime = SelectField('Choose starting time(in 24hr expression)',
coerce=int, choices=[(i, i) for i in range(9, 19)])
duration = SelectField('Choose duration of the meeting(in hours)',
coerce=int, choices=[(i, i) for i in range(1, 6)])
# participants_user=SelectMultipleField('Choose participants from company',coerce=int,choices=UserChoiceIterable(),option_widget=widgets.CheckboxInput(),widget=widgets.ListWidget(prefix_label=False),validators=[DataRequired()])
# participants_partner=SelectMultipleField('Choose participants from partners',coerce=int,choices=PartnerChoiceIterable(),option_widget=widgets.CheckboxInput(),widget=widgets.ListWidget(prefix_label=False))
submit = SubmitField('Book')
def validate_title(self, title):
meeting = Booking.query.filter_by(title=self.title.data).first()
if meeting is not None: # username exist
raise ValidationError('Please use another meeting title.')
def validate_date(self, date):
if self.date.data < datetime.datetime.now().date():
raise ValidationError('You can only book for day after today.')
class RoomavailableForm(FlaskForm):
date = DateField('Choose date', format="%m/%d/%Y",
validators=[DataRequired()])
startTime = SelectField('Choose starting time(in 24hr expression)',
coerce=int, choices=[(i, i) for i in range(9, 19)])
duration = SelectField('Choose duration of the meeting(in hours)',
coerce=int, choices=[(i, i) for i in range(1, 6)])
submit = SubmitField('Check')
class RoomoccupationForm(FlaskForm):
date = DateField('Choose date', format="%m/%d/%Y",
validators=[DataRequired()])
submit = SubmitField('Check')
class MeetingChoiceIterable(object):
def __iter__(self):
bookings = Booking.query.filter_by(bookerId=current_user.id).all()
choices = [
(meeting.id, f'{meeting.title} in {Room.query.filter_by(id=meeting.roomId).first().roomName} start at {meeting.date.date()} from {meeting.startTime}') for meeting in bookings]
for choice in choices:
yield choice
class CancelbookingForm(FlaskForm):
ids = SelectField('Choose booking to cancel', coerce=int,
choices=MeetingChoiceIterable())
submit = SubmitField('Cancel')
# class LoginForm(FlaskForm):
# username = StringField('Username', validators=[DataRequired()])
# password = PasswordField('Password', validators=[DataRequired()])
# remember_me = BooleanField('Remember Me')
# submit = SubmitField('Sign In')
# class RegistrationForm(FlaskForm):
# username = StringField('Username', validators=[DataRequired()])
# password = PasswordField('Password', validators=[DataRequired()])
# password2 = PasswordField('Password', validators=[DataRequired(), EqualTo('password')])
# fullname=StringField('Full Name',validators=[DataRequired()])
# position=StringField('Position',validators=[DataRequired()])
# teamId=IntegerField('Team number',validators=[DataRequired()])
# teamName=StringField('Team name',validators=[DataRequired()])
# submit=SubmitField('Register')
# def validate_username(self,username):
# user=User.query.filter_by(username=self.username.data).first()
# if user is not None: # username exist
# raise ValidationError('Please use a different username.')
# # def validate_teamId(self,teamId):
# # team=Team.query.filter_by(id=teamId.data).first()
# # if team is not None:
# # if team.teamName!=self.teamName.data:
# # raise ValidationError('Team name does not match, try again.')
# class AddteamForm(FlaskForm):
# id=IntegerField('Team number',validators=[DataRequired()])
# teamName=StringField('Team name',validators=[DataRequired()])
# submit=SubmitField('Add')
# def validate_id(self,id):
# team=Team.query.filter_by(id=id.data).first()
# if team is not None:
# raise ValidationError('Team Exist, try again')
# def validate_teamName(self,teamName):
# team=Team.query.filter_by(teamName=teamName.data).first()
# if team is not None:
# raise ValidationError('Team Name Exist, try again')
# class AdduserForm(FlaskForm):
# username = StringField('Username', validators=[DataRequired()])
# password = PasswordField('Password', validators=[DataRequired()])
# fullname=StringField('Full Name',validators=[DataRequired()])
# position=StringField('Position',validators=[DataRequired()])
# teamId=IntegerField('Team number',validators=[DataRequired()])
# teamName=StringField('Team name',validators=[DataRequired()])
# submit=SubmitField('Register')
# def validate_username(self,username):
# user=User.query.filter_by(username=self.username.data).first()
# if user is not None: # username exist
# raise ValidationError('Please use a different username.')
# def validate_teamId(self,teamId):
# team=Team.query.filter_by(id=teamId.data).first()
# if team is not None:
# if team.teamName!=self.teamName.data:
# raise ValidationError('Team name does not match, try again.')
# # use this so that the choice can be refreshed every time
# class TeamChoiceIterable(object):
# def __iter__(self):
# teams=Team.query.all()
# choices=[(team.id,team.teamName) for team in teams]
# choices=[choice for choice in choices if choice[1]!='Admin']
# for choice in choices:
# yield choice
# class DeleteteamForm(FlaskForm):
# ids=SelectField('Choose Team',choices=TeamChoiceIterable(),coerce=int)
# submit=SubmitField('Delete')
# class UserChoiceIterable(object):
# def __iter__(self):
# users=User.query.all()
# choices=[(user.id,f'{user.fullname}, team {Team.query.filter_by(id=user.teamId).first().teamName}') for user in users]
# choices=[choice for choice in choices if 'admin' not in choice[1]] # do not delete admin
# for choice in choices:
# yield choice
# class PartnerChoiceIterable(object):
# def __iter__(self):
# partners=Businesspartner.query.all()
# choices=[(partner.id,f'{partner.name} from {partner.representing}') for partner in partners]
# #choices=[choice for choice in choices if choice[1]!='admin'] # do not delete admin
# for choice in choices:
# yield choice
# class DeleteuserForm(FlaskForm):
# ids=SelectField('Choose User',coerce=int,choices=UserChoiceIterable())
# submit=SubmitField('Delete')
# class RoomChoiceIterable(object):
# def __iter__(self):
# rooms=Room.query.all()
# choices=[(room.id,room.roomName) for room in rooms]
# for choice in choices:
# yield choice
# class BookmeetingForm(FlaskForm):
# title=StringField('Meeting title',validators=[DataRequired()])
# rooms=SelectField('Choose room',coerce=int,choices=RoomChoiceIterable())
# date=DateField('Choose date', format="%m/%d/%Y",validators=[DataRequired()])
# startTime=SelectField('Choose starting time(in 24hr expression)',coerce=int,choices=[(i,i) for i in range(9,19)])
# duration=SelectField('Choose duration of the meeting(in hours)',coerce=int,choices=[(i,i) for i in range(1,6)])
# participants_user=SelectMultipleField('Choose participants from company',coerce=int,choices=UserChoiceIterable(),option_widget=widgets.CheckboxInput(),widget=widgets.ListWidget(prefix_label=False),validators=[DataRequired()])
# participants_partner=SelectMultipleField('Choose participants from partners',coerce=int,choices=PartnerChoiceIterable(),option_widget=widgets.CheckboxInput(),widget=widgets.ListWidget(prefix_label=False))
# submit=SubmitField('Book')
# def validate_title(self,title):
# meeting=Meeting.query.filter_by(title=self.title.data).first()
# if meeting is not None: # username exist
# raise ValidationError('Please use another meeting title.')
# def validate_date(self,date):
# if self.date.data<datetime.datetime.now().date():
# raise ValidationError('You can only book for day after today.')
# class MeetingChoiceIterable(object):
# def __iter__(self):
# meetings=Meeting.query.filter_by(bookerId=current_user.id).all()
# choices=[(meeting.id,f'{meeting.title} in {Room.query.filter_by(id=meeting.roomId).first().roomName} start at {meeting.date.date()} from {meeting.startTime}') for meeting in meetings]
# for choice in choices:
# yield choice
# class CancelbookingForm(FlaskForm):
# #def __init__(self,userId,**kw):
# # super(CancelbookingForm, self).__init__(**kw)
# # self.name.userId =userId
# ids=SelectField('Choose meeting to cancel',coerce=int,choices=MeetingChoiceIterable())
# submit=SubmitField('Cancel')
# class RoomavailableForm(FlaskForm):
# date=DateField('Choose date', format="%m/%d/%Y",validators=[DataRequired()])
# startTime=SelectField('Choose starting time(in 24hr expression)',coerce=int,choices=[(i,i) for i in range(9,19)])
# duration=SelectField('Choose duration of the meeting(in hours)',coerce=int,choices=[(i,i) for i in range(1,6)])
# submit=SubmitField('Check')
# class RoomoccupationForm(FlaskForm):
# date=DateField('Choose date', format="%m/%d/%Y",validators=[DataRequired()])
# submit=SubmitField('Check')
# class MeetingChoiceAllIterable(object):
# def __iter__(self):
# meetings=Meeting.query.all()
# choices=[(meeting.id,f'{meeting.title} in {Room.query.filter_by(id=meeting.roomId).first().roomName} start at {meeting.date.date()} from {meeting.startTime}') for meeting in meetings]
# for choice in choices:
# yield choice
# class MeetingparticipantsForm(FlaskForm):
# ids=SelectField('Choose meeting',coerce=int,choices=MeetingChoiceAllIterable())
# submit=SubmitField('Check')
# class CostaccruedForm(FlaskForm):
# startdate=DateField('Choose start date', format="%m/%d/%Y",validators=[DataRequired()])
# enddate=DateField('Choose end date', format="%m/%d/%Y",validators=[DataRequired()])
# submit=SubmitField('Check')
# def validate_enddate(self,enddate):
# if enddate.data<self.startdate.data:
# raise ValidationError('End Date must be after Start Date')
| [
"wtforms.validators.ValidationError",
"wtforms.BooleanField",
"wtforms.SubmitField",
"wtforms.validators.EqualTo",
"datetime.datetime.now",
"wtforms.validators.DataRequired"
] | [((734, 757), 'wtforms.SubmitField', 'SubmitField', (['"""Register"""'], {}), "('Register')\n", (745, 757), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((944, 971), 'wtforms.BooleanField', 'BooleanField', (['"""Remember Me"""'], {}), "('Remember Me')\n", (956, 971), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((985, 1007), 'wtforms.SubmitField', 'SubmitField', (['"""Sign In"""'], {}), "('Sign In')\n", (996, 1007), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((2289, 2308), 'wtforms.SubmitField', 'SubmitField', (['"""Book"""'], {}), "('Book')\n", (2300, 2308), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((3170, 3190), 'wtforms.SubmitField', 'SubmitField', (['"""Check"""'], {}), "('Check')\n", (3181, 3190), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((3348, 3368), 'wtforms.SubmitField', 'SubmitField', (['"""Check"""'], {}), "('Check')\n", (3359, 3368), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((3940, 3961), 'wtforms.SubmitField', 'SubmitField', (['"""Cancel"""'], {}), "('Cancel')\n", (3951, 3961), False, 'from wtforms import StringField, TextAreaField, SubmitField, SelectField, PasswordField, BooleanField, IntegerField, DateField, SelectMultipleField, widgets\n'), ((2488, 2540), 'wtforms.validators.ValidationError', 'ValidationError', (['"""Please use another meeting title."""'], {}), "('Please use another meeting title.')\n", (2503, 2540), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((2655, 2712), 'wtforms.validators.ValidationError', 'ValidationError', (['"""You can only book for day after today."""'], {}), "('You can only book for day after today.')\n", (2670, 2712), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((442, 456), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (454, 456), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((512, 526), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (524, 526), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((614, 628), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (626, 628), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((630, 649), 'wtforms.validators.EqualTo', 'EqualTo', (['"""password"""'], {}), "('password')\n", (637, 649), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((704, 718), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (716, 718), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((839, 853), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (851, 853), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((909, 923), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (921, 923), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((1304, 1318), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1316, 1318), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((1515, 1529), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (1527, 1529), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((2839, 2853), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (2851, 2853), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((3318, 3332), 'wtforms.validators.DataRequired', 'DataRequired', ([], {}), '()\n', (3330, 3332), False, 'from wtforms.validators import Required, ValidationError, DataRequired, Email, EqualTo\n'), ((2605, 2628), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2626, 2628), False, 'import datetime\n')] |
"""
Implementation of ListMetadataFormats verb
"""
import re
import validators
from lxml import etree
from .request import OAIRequest
from .response import OAIResponse
from .exceptions import (
OAIErrorIdDoesNotExist,
OAIErrorNoMetadataFormats,
OAIRepoInternalException
)
class MetadataFormatValidator:
"""Validator for the MetadataFormat class"""
def errors(self):
"""
Verify fields are valid and present where required. Returning a list of descriptive
errors if any issues were found.
"""
failures = []
failures.extend(self._metadata_prefix_failures())
failures.extend(self._schema_failures())
failures.extend(self._metadata_namespace_failures())
return failures
def _metadata_prefix_failures(self):
"""Return a list of metadata_prefix failures"""
pattern = re.compile(r"^[A-Za-z0-9-_.!~*'\(\)]+$")
return [] if pattern.search(self.metadata_prefix) is not None else \
["metadata_prefix contains invalid character(s); allowed chars: A-Za-z0-9-_.!~*'()"]
def _schema_failures(self):
"""Return a list of schema failures"""
return ["schema must be a valid URL"] \
if not validators.url(self.schema) else []
def _metadata_namespace_failures(self):
"""Return a list of metadata_namespace failures"""
return ["metadata_namespace must be a valid URL"] \
if not validators.url(self.metadata_namespace) else []
class ListMetadataFormatsRequest(OAIRequest):
"""
Parse a request for the ListMetadataFormats verb
raises:
OAIErrorBadArgument
"""
def __init__(self):
super().__init__()
self.optional_args = ['identifier']
self.identifier: str = None
def post_parse(self):
"""Runs after args are parsed"""
if self.args:
self.identifier = self.args["identifier"]
def __repr__(self):
return f"ListMetadataFormatsRequest(identifier={self.identifier})"
class ListMetadataFormatsResponse(OAIResponse):
"""
Generate a resposne for the ListMetadataFormats verb
raises:
OAIErrorIdDoesNotExist
OAIErrorNoMetadataFormats
"""
def __repr__(self):
return f"ListMetadataFormatsResponse(identifier={self.request.identifier})"
def body(self) -> etree.Element:
"""Response body"""
identifier = self.request.identifier
if identifier and not self.repository.data.is_valid_identifier(identifier):
raise OAIErrorIdDoesNotExist("The given identifier does not exist.")
mdformats = self.repository.data.get_metadata_formats(identifier)
if not mdformats:
raise OAIErrorNoMetadataFormats("No metadata fomats found for given identifier.")
xmlb = etree.Element("ListMetadataFormats")
for mdformat in mdformats:
# Report errors if any MetadataFormat object were invalid
errors = mdformat.errors()
if errors:
raise OAIRepoInternalException(f"Invalid MetadataFormat instance: {errors}")
self.add_format(xmlb, mdformat)
return xmlb
def add_format(self, xmlb: etree.Element, mdformat: dict):
"""
Add the given metadta format to the provided xml element
"""
mdf_elem = etree.SubElement(xmlb, "metadataFormat")
elem = etree.SubElement(mdf_elem, "metadataPrefix")
elem.text = mdformat.metadata_prefix
elem = etree.SubElement(mdf_elem, "schema")
elem.text = mdformat.schema
elem = etree.SubElement(mdf_elem, "metadataNamespace")
elem.text = mdformat.metadata_namespace
| [
"lxml.etree.Element",
"validators.url",
"lxml.etree.SubElement",
"re.compile"
] | [((875, 916), 're.compile', 're.compile', (['"""^[A-Za-z0-9-_.!~*\'\\\\(\\\\)]+$"""'], {}), '("^[A-Za-z0-9-_.!~*\'\\\\(\\\\)]+$")\n', (885, 916), False, 'import re\n'), ((2831, 2867), 'lxml.etree.Element', 'etree.Element', (['"""ListMetadataFormats"""'], {}), "('ListMetadataFormats')\n", (2844, 2867), False, 'from lxml import etree\n'), ((3364, 3404), 'lxml.etree.SubElement', 'etree.SubElement', (['xmlb', '"""metadataFormat"""'], {}), "(xmlb, 'metadataFormat')\n", (3380, 3404), False, 'from lxml import etree\n'), ((3420, 3464), 'lxml.etree.SubElement', 'etree.SubElement', (['mdf_elem', '"""metadataPrefix"""'], {}), "(mdf_elem, 'metadataPrefix')\n", (3436, 3464), False, 'from lxml import etree\n'), ((3525, 3561), 'lxml.etree.SubElement', 'etree.SubElement', (['mdf_elem', '"""schema"""'], {}), "(mdf_elem, 'schema')\n", (3541, 3561), False, 'from lxml import etree\n'), ((3613, 3660), 'lxml.etree.SubElement', 'etree.SubElement', (['mdf_elem', '"""metadataNamespace"""'], {}), "(mdf_elem, 'metadataNamespace')\n", (3629, 3660), False, 'from lxml import etree\n'), ((1237, 1264), 'validators.url', 'validators.url', (['self.schema'], {}), '(self.schema)\n', (1251, 1264), False, 'import validators\n'), ((1456, 1495), 'validators.url', 'validators.url', (['self.metadata_namespace'], {}), '(self.metadata_namespace)\n', (1470, 1495), False, 'import validators\n')] |
import setuptools
setuptools.setup(
name='locomotionbench',
version='0.1.0',
author='<NAME>',
author_email='<EMAIL>',
description='Compute metrics based on gait segmentation for periodic walking motions and export PI',
url='https://gitlab.com/orb-benchmarking/eb_hum_bench/',
packages=setuptools.find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
],
python_requires='>=3.6.0',
scripts=['script/run_i3sa.py'],
install_requires=[
"wheel",
"csaps",
"cycler",
"Cython",
"hdbscan",
"joblib",
"kiwisolver",
"llvmlite",
"matplotlib",
"numpy",
"pandas",
"Pillow",
"pyparsing",
"python-dateutil",
"pytz",
"PyYAML",
"scikit-learn",
"scipy",
"Shapely",
"six",
"sklearn",
"threadpoolctl"
],
entry_points ={
'console_scripts': ['run_i3sa = src.script.run_i3sa:entry_point']
}
)
| [
"setuptools.find_packages"
] | [((314, 340), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (338, 340), False, 'import setuptools\n')] |
# Generated by Django 2.2 on 2020-03-26 13:48
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0001_initial'),
]
atomic = False
operations = [
migrations.RenameModel(
old_name='Request',
new_name='ItemRequest',
),
]
| [
"django.db.migrations.RenameModel"
] | [((230, 296), 'django.db.migrations.RenameModel', 'migrations.RenameModel', ([], {'old_name': '"""Request"""', 'new_name': '"""ItemRequest"""'}), "(old_name='Request', new_name='ItemRequest')\n", (252, 296), False, 'from django.db import migrations\n')] |
import tensorflow as tf
import tensorflow_datasets as tfds
import numpy as np
from PIL import Image, ImageDraw
import PIL
import sys
import os
outAnnotationPath = sys.argv[1]
outDirPath = sys.argv[2]
split = sys.argv[3]
print("Will export the annotations to {0}".format(outAnnotationPath))
print("Will export images to {0}".format(outDirPath))
idx = 0
with open(outAnnotationPath, 'w') as outFile:
for ex in tfds.load('stanford_dogs', split=split):
bboxes = ex["objects"]["bbox"].numpy()
bboxesCount,_ = bboxes.shape
fname = os.path.basename(ex["image/filename"].numpy().decode('utf-8'))
npImage = ex["image"].numpy()
h,w,channels = npImage.shape
image = Image.fromarray(npImage)
outFilePath = os.path.join(outDirPath,fname)
image.save(outFilePath)
bboxStrs = []
for i in range(bboxesCount):
bboxn = np.squeeze(bboxes[i,:]) # hmin,wmin,hmax,wmax
(xmin,ymin,xmax,ymax) = int(round(w*bboxn[1])), int(round(h*bboxn[0])), int(round(w*bboxn[3])), int(round(h*bboxn[2]))
# print("{0} {1}; {2}; {3}".format(h,w,bboxn, (xmin,ymin,xmax,ymax)))
bboxStrs.append("{0},{1},{2},{3},0".format(xmin,ymin,xmax-xmin+1, ymax-ymin+1))
bboxStr = " ".join(bboxStrs)
outFile.write('{0} {1}\n'.format(fname,bboxStr))
idx += 1
print("{0}\t:{1} is ready".format(idx,fname))
print("Done") | [
"tensorflow_datasets.load",
"os.path.join",
"PIL.Image.fromarray",
"numpy.squeeze"
] | [((414, 453), 'tensorflow_datasets.load', 'tfds.load', (['"""stanford_dogs"""'], {'split': 'split'}), "('stanford_dogs', split=split)\n", (423, 453), True, 'import tensorflow_datasets as tfds\n'), ((709, 733), 'PIL.Image.fromarray', 'Image.fromarray', (['npImage'], {}), '(npImage)\n', (724, 733), False, 'from PIL import Image, ImageDraw\n'), ((756, 787), 'os.path.join', 'os.path.join', (['outDirPath', 'fname'], {}), '(outDirPath, fname)\n', (768, 787), False, 'import os\n'), ((898, 922), 'numpy.squeeze', 'np.squeeze', (['bboxes[i, :]'], {}), '(bboxes[i, :])\n', (908, 922), True, 'import numpy as np\n')] |
''' Some logic that made sense to move to a separate file '''
from django.core.exceptions import ValidationError
from django.shortcuts import redirect, render
from lobby.models import Lobby
from backend.models import User
from lobby.forms import JoinLobby, LobbyForm
def _try_add_to_lobby(request):
''' Validate the user and tries to add him to the lobby '''
pin = request.POST.get('pin')
form = JoinLobby(data=request.POST)
try:
lobby = Lobby.objects.get(id=pin)
lobby.add_user(request.user)
return redirect('/lobby/'+pin)
except ValidationError as e:
form.add_error('pin', e)
data = {'form': form, 'lobby_form': LobbyForm()}
return render(request, 'lobby/lobby.html', data)
| [
"django.shortcuts.render",
"lobby.forms.JoinLobby",
"lobby.forms.LobbyForm",
"django.shortcuts.redirect",
"lobby.models.Lobby.objects.get"
] | [((411, 439), 'lobby.forms.JoinLobby', 'JoinLobby', ([], {'data': 'request.POST'}), '(data=request.POST)\n', (420, 439), False, 'from lobby.forms import JoinLobby, LobbyForm\n'), ((465, 490), 'lobby.models.Lobby.objects.get', 'Lobby.objects.get', ([], {'id': 'pin'}), '(id=pin)\n', (482, 490), False, 'from lobby.models import Lobby\n'), ((543, 568), 'django.shortcuts.redirect', 'redirect', (["('/lobby/' + pin)"], {}), "('/lobby/' + pin)\n", (551, 568), False, 'from django.shortcuts import redirect, render\n'), ((705, 746), 'django.shortcuts.render', 'render', (['request', '"""lobby/lobby.html"""', 'data'], {}), "(request, 'lobby/lobby.html', data)\n", (711, 746), False, 'from django.shortcuts import redirect, render\n'), ((677, 688), 'lobby.forms.LobbyForm', 'LobbyForm', ([], {}), '()\n', (686, 688), False, 'from lobby.forms import JoinLobby, LobbyForm\n')] |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense,LSTM
x = [
[[0], [1], [1], [0], [0], [0]],
[[0], [0], [0], [2], [2], [0]],
[[0], [0], [0], [0], [3], [3]],
[[0], [2], [2], [0], [0], [0]],
[[0], [3], [3], [3], [0], [0]],
[[0], [0], [0], [0], [1], [1]],
]
x = np.array(x, dtype=np.float32)
y = np.array([1, 2, 3, 2, 3, 1], dtype=np.int32)
y2 = np.zeros((y.shape[0], 4), dtype=np.float32)
y2[np.arange(y.shape[0]), y] = 1.0
print(y2)
model = Sequential()
model.add(LSTM(128, dropout=0.2, recurrent_dropout=0.2, input_shape=(None, 1)))
model.add(Dense(4, activation='sigmoid'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
history = model.fit(x, y2, epochs=75)
pd.DataFrame(history.history).plot(figsize=(8, 5))
plt.grid(True)
plt.gca().set_ylim(0, 1.25)
plt.show()
def runit(model, inp):
inp = np.array(inp,dtype=np.float32)
pred = model.predict(inp)
return np.argmax(pred[0])
print( runit( model, [[[0],[2],[2],[2],[2],[0]]] )) | [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.gca",
"numpy.argmax",
"numpy.array",
"numpy.zeros",
"tensorflow.keras.layers.LSTM",
"tensorflow.keras.layers.Dense",
"pandas.DataFrame",
"tensorflow.keras.models.Sequential",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((396, 425), 'numpy.array', 'np.array', (['x'], {'dtype': 'np.float32'}), '(x, dtype=np.float32)\n', (404, 425), True, 'import numpy as np\n'), ((430, 474), 'numpy.array', 'np.array', (['[1, 2, 3, 2, 3, 1]'], {'dtype': 'np.int32'}), '([1, 2, 3, 2, 3, 1], dtype=np.int32)\n', (438, 474), True, 'import numpy as np\n'), ((481, 524), 'numpy.zeros', 'np.zeros', (['(y.shape[0], 4)'], {'dtype': 'np.float32'}), '((y.shape[0], 4), dtype=np.float32)\n', (489, 524), True, 'import numpy as np\n'), ((581, 593), 'tensorflow.keras.models.Sequential', 'Sequential', ([], {}), '()\n', (591, 593), False, 'from tensorflow.keras.models import Sequential\n'), ((896, 910), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (904, 910), True, 'import matplotlib.pyplot as plt\n'), ((939, 949), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (947, 949), True, 'import matplotlib.pyplot as plt\n'), ((604, 672), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(128)'], {'dropout': '(0.2)', 'recurrent_dropout': '(0.2)', 'input_shape': '(None, 1)'}), '(128, dropout=0.2, recurrent_dropout=0.2, input_shape=(None, 1))\n', (608, 672), False, 'from tensorflow.keras.layers import Dense, LSTM\n'), ((684, 714), 'tensorflow.keras.layers.Dense', 'Dense', (['(4)'], {'activation': '"""sigmoid"""'}), "(4, activation='sigmoid')\n", (689, 714), False, 'from tensorflow.keras.layers import Dense, LSTM\n'), ((985, 1016), 'numpy.array', 'np.array', (['inp'], {'dtype': 'np.float32'}), '(inp, dtype=np.float32)\n', (993, 1016), True, 'import numpy as np\n'), ((1057, 1075), 'numpy.argmax', 'np.argmax', (['pred[0]'], {}), '(pred[0])\n', (1066, 1075), True, 'import numpy as np\n'), ((528, 549), 'numpy.arange', 'np.arange', (['y.shape[0]'], {}), '(y.shape[0])\n', (537, 549), True, 'import numpy as np\n'), ((845, 874), 'pandas.DataFrame', 'pd.DataFrame', (['history.history'], {}), '(history.history)\n', (857, 874), True, 'import pandas as pd\n'), ((911, 920), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (918, 920), True, 'import matplotlib.pyplot as plt\n')] |
""""Common pyparsing grammar patterns."""
from pyparsing import alphas, nums
from pyparsing import Group, OneOrMore, Optional, Regex, Suppress, Word
import re
pos_neg_int_number = Word('+-' + nums).setParseAction(lambda t: [int(t[0])]) # '+3' or '-2' are examples
# matching float w/ regex is ugly but is recommended by pyparsing
regex_after_decimal = r'([0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)'
float_number = Regex(r'[-+]?([0-9]+\.(?!([0-9]|[eE])))|{0}'.format(regex_after_decimal)) \
.setParseAction(lambda t: [float(t[0])])
chemical_formula = Group(OneOrMore(Word(alphas, min=1, max=2) + Optional(float_number, default=1.0))) + \
Optional(Suppress('/') + pos_neg_int_number, default=0)
reg_symbol = r'([A-z][A-z]?)'
reg_amount = r'([-+]?([0-9]+\.(?!([0-9]|[eE])))|([0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?))?'
reg_charge = r'/([+-]?[0-9]+)'
chem_regex = reg_symbol + reg_amount
def parse_chemical_formula(formula):
""""""
matches = re.findall(chem_regex, formula)
sym_amnts = [(m[0], float(m[1]) if m[1] != '' else 1.0) for m in matches]
charge = re.search(reg_charge, formula)
if charge is None:
charge = 0
else:
charge = int(charge.groups()[0])
return (sym_amnts, charge)
| [
"pyparsing.Suppress",
"pyparsing.Optional",
"pyparsing.Word",
"re.findall",
"re.search"
] | [((961, 992), 're.findall', 're.findall', (['chem_regex', 'formula'], {}), '(chem_regex, formula)\n', (971, 992), False, 'import re\n'), ((1084, 1114), 're.search', 're.search', (['reg_charge', 'formula'], {}), '(reg_charge, formula)\n', (1093, 1114), False, 'import re\n'), ((182, 199), 'pyparsing.Word', 'Word', (["('+-' + nums)"], {}), "('+-' + nums)\n", (186, 199), False, 'from pyparsing import Group, OneOrMore, Optional, Regex, Suppress, Word\n'), ((665, 678), 'pyparsing.Suppress', 'Suppress', (['"""/"""'], {}), "('/')\n", (673, 678), False, 'from pyparsing import Group, OneOrMore, Optional, Regex, Suppress, Word\n'), ((566, 592), 'pyparsing.Word', 'Word', (['alphas'], {'min': '(1)', 'max': '(2)'}), '(alphas, min=1, max=2)\n', (570, 592), False, 'from pyparsing import Group, OneOrMore, Optional, Regex, Suppress, Word\n'), ((595, 630), 'pyparsing.Optional', 'Optional', (['float_number'], {'default': '(1.0)'}), '(float_number, default=1.0)\n', (603, 630), False, 'from pyparsing import Group, OneOrMore, Optional, Regex, Suppress, Word\n')] |
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
app = Flask (__name__)
app.config['SECRET_KEY'] = '9886f73c12d19d30b78e41008e26e4f2'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///todo.db'
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = 'login'
login_manager.login_message_category ='info'
from flask_api import routes
| [
"flask_sqlalchemy.SQLAlchemy",
"flask_login.LoginManager",
"flask_bcrypt.Bcrypt",
"flask.Flask"
] | [((142, 157), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (147, 157), False, 'from flask import Flask\n'), ((286, 301), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (296, 301), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((311, 322), 'flask_bcrypt.Bcrypt', 'Bcrypt', (['app'], {}), '(app)\n', (317, 322), False, 'from flask_bcrypt import Bcrypt\n'), ((340, 357), 'flask_login.LoginManager', 'LoginManager', (['app'], {}), '(app)\n', (352, 357), False, 'from flask_login import LoginManager\n')] |
# -*- coding: utf-8 -*-
""" p2p-streams (c) 2014 enen92 fightnight
This file manages the history of recent played p2p addon items
Functions:
list_history() -> Function list addon history. It grabs the info from history.txt in the userdata
add_to_history(name,url,mode,iconimage) -> Add to addon history. It appends a new line to history.txt
remove_history() -> delete history.txt if the file exists
"""
import xbmcvfs,xbmc,os,sys
from peertopeerutils.pluginxbmc import *
from peertopeerutils.iofile import *
from peertopeerutils.directoryhandle import addDir
history_file = os.path.join(pastaperfil,'history.txt')
def list_history():
if xbmcvfs.exists(history_file):
lines = open(history_file).readlines()
i=0
for line in lines:
info = line.split('|')
if i < int(settings.getSetting('items_per_page')):
try:
addDir(info[0],info[1],int(info[2]),info[3].replace('\n',''),1,False)
except: pass
i+=1
else:
sys.exit(0)
def add_to_history(name,url,mode,iconimage):
line = str(name) + '|' + str(url) + '|' +str(mode) +'|' + str(iconimage) + '\n'
if xbmcvfs.exists(history_file):
lines = open(history_file).readlines()
if len(lines) < int(settings.getSetting('items_per_page')):
if name in lines[0]: pass
else:
lines.insert(0,line)
open(history_file, 'w').writelines(lines)
else:
lines = open(history_file).readlines()
newlines = lines[0:-1*int(settings.getSetting('items_per_page'))-1]
newlines.insert(0,line)
open(history_file, 'w').writelines(newlines)
else:
save(history_file,line)
return
def remove_history():
if xbmcvfs.exists(history_file):
xbmcvfs.delete(history_file)
xbmc.executebuiltin("Notification(%s,%s,%i,%s)" % (translate(40000), translate(600026), 1,addonpath+"/icon.png"))
| [
"xbmcvfs.delete",
"xbmcvfs.exists",
"os.path.join",
"sys.exit"
] | [((612, 652), 'os.path.join', 'os.path.join', (['pastaperfil', '"""history.txt"""'], {}), "(pastaperfil, 'history.txt')\n", (624, 652), False, 'import xbmcvfs, xbmc, os, sys\n'), ((677, 705), 'xbmcvfs.exists', 'xbmcvfs.exists', (['history_file'], {}), '(history_file)\n', (691, 705), False, 'import xbmcvfs, xbmc, os, sys\n'), ((1118, 1146), 'xbmcvfs.exists', 'xbmcvfs.exists', (['history_file'], {}), '(history_file)\n', (1132, 1146), False, 'import xbmcvfs, xbmc, os, sys\n'), ((1625, 1653), 'xbmcvfs.exists', 'xbmcvfs.exists', (['history_file'], {}), '(history_file)\n', (1639, 1653), False, 'import xbmcvfs, xbmc, os, sys\n'), ((974, 985), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (982, 985), False, 'import xbmcvfs, xbmc, os, sys\n'), ((1657, 1685), 'xbmcvfs.delete', 'xbmcvfs.delete', (['history_file'], {}), '(history_file)\n', (1671, 1685), False, 'import xbmcvfs, xbmc, os, sys\n')] |
#!/usr/bin/env python3
import rospy
from std_msgs.msg import String
from std_msgs.msg import UInt8
from std_msgs.msg import Float32
import time
import random
class Eva(object):
move_eva = 0
pos_eva = Float32()
setPoint = 0
def __init__(self):
self.start_time = time.time()
print('*** ROS Node Init ***')
rospy.init_node('ROS_topics_test')
rospy.loginfo(' ROS_topics_test node started.')
# SUBSCRIBERS
print('\n *** SUBSCRIBERS ***')
print(' /robocol/sensorica/mover_servos_cajas -> UInt8')
rospy.Subscriber("/robocol/sensorica/mover_servos_cajas",UInt8,self.boxes_callback)
print(' /robocol/sensorica/dispensador_eva -> UInt8')
rospy.Subscriber("/robocol/sensorica/dispensador_eva",UInt8,self.leave_callback)
print(' /robocol/sensorica/celda_carga -> Float32')
rospy.Subscriber("/robocol/sensorica/celda_carga",Float32,self.load_cell_callback)
print(' /robocol/sensorica/posicion_actuador -> Float32')
rospy.Subscriber("/robocol/sensorica/posicion_actuador",Float32,self.pos_callback)
print(' /robocol/sensorica/vibracion_motores -> UInt8')
rospy.Subscriber("/robocol/sensorica/vibracion_motores",UInt8,self.vib_callback)
print(' /robocol/sensorica/sensors_test_data -> String')
rospy.Subscriber("/robocol/sensorica/sensors_test_data",String,self.sensors_data_callback)
# PUBLISHERS
print('*** PUBLISHERS ***')
print(' Publishing to /robocol/sensorica/temperatura as Float32')
self.temp_pub = rospy.Publisher("/robocol/sensorica/temperatura", Float32, queue_size=1)
print(' Publishing to /robocol/sensorica/ph as Float32')
self.ph_pub = rospy.Publisher("/robocol/sensorica/ph", Float32, queue_size=1)
print(' Publishing to /robocol/sensorica/humedad as Float32')
self.hum_pub = rospy.Publisher("/robocol/sensorica/humedad", Float32, queue_size=1)
print(' Publishing to /robocol/sensorica/monoxido as Float32')
self.co_pub = rospy.Publisher("/robocol/sensorica/monoxido", Float32, queue_size=1)
print(' Publishing to /robocol/sensorica/metano as Float32')
self.met_pub = rospy.Publisher("/robocol/sensorica/metano", Float32, queue_size=1)
print(' Publishing to /robocol/sensorica/hidrogeno as Float32')
self.hyd_pub = rospy.Publisher("/robocol/sensorica/hidrogeno", Float32, queue_size=1)
print(' Publishing /robocol/sensorica/posicion_actual_eva-> Float32')
self.pos_eva_pub = rospy.Publisher("/robocol/sensorica/posicion_actual_eva",Float32,queue_size=1)
# print(' Publishing to /robocol/sensorica/air as Float32')
# self.air_pub = rospy.Publisher("/robocol/sensorica/air", Float32, queue_size=1)
# print(' Publishing to /robocol/sensorica/co2 as Float32')
# self.co2_pub = rospy.Publisher("/robocol/sensorica/co2", Float32, queue_size=1)
def test_callback(self,param):
print(' test_callback -> Received: ',param.data)
def boxes_callback(self,param):
print(' boxes_callback -> Received: ',param.data)
def leave_callback(self,param):
print(' leave_callback -> Received: ',param.data)
def load_cell_callback(self,param):
print(' load_cell_callback -> Received: ',param.data)
def pos_callback(self,param):
self.setPoint = param.data
print(' pos_callback -> Received: ',self.setPoint)
# if(self.move_eva==0):
# if(self.setPoint>self.pos_eva.data):
# print('UP')
# self.move_eva = 1
# elif(self.setPoint<self.pos_eva.data):
# print('DOWN')
# self.move_eva = -1
# elif (self.setPoint==self.pos_eva.data):
# print('STOP')
# self.move_eva = 0
def vib_callback(self,param):
print(' vib_callback -> Received: ',param.data)
def sensors_data_callback(self,param):
print(' sensors_data_callback -> Received: ',param.data)
def publish(self, pub, value):
msg = Float32()
msg.data = value
pub.publish(msg)
if __name__ == '__main__':
try:
eva = Eva()
rate = rospy.Rate(5)
# c = 0
time.sleep(0.1)
print('\n\n Initial random position: ', end='')
rand = random.randint(-300,300)
print(rand)
eva.publish(eva.pos_eva_pub, rand)
while not rospy.is_shutdown():
# print(' EVA_POS -> Feedback: ',eva.pos_eva.data, ' Setpoint: ',eva.setPoint)
# if(eva.setPoint>eva.pos_eva.data):
# print('UP')
# eva.move_eva = 1
# eva.pos_eva.data += 1
# elif(eva.setPoint<eva.pos_eva.data):
# print('DOWN')
# eva.move_eva = -1
# eva.pos_eva.data -= 1
# elif (eva.setPoint==eva.pos_eva.data):
# print('STOP')
# eva.move_eva = 0
eva.publish(eva.temp_pub, float(random.randint(20, 23)))
eva.publish(eva.ph_pub, float(random.randint(6, 8)))
eva.publish(eva.hum_pub, float(random.randint(35, 55)))
eva.publish(eva.co_pub, float(random.randint(65, 75)))
eva.publish(eva.met_pub, float(random.randint(45, 55)))
eva.publish(eva.hyd_pub, float(random.randint(475, 525)))
# eva.publish(eva.air_pub, float(random.randint(0, 100)))
# eva.publish(eva.co2_pub, float(random.randint(0, 800)))
# if(eva.move_eva):
# eva.pos_eva.data = eva.pos_eva.data + 1
# eva.pos_eva_pub.publish(eva.pos_eva)
rate.sleep()
except rospy.ROSInterruptException:
pass
| [
"rospy.Publisher",
"random.randint",
"std_msgs.msg.Float32",
"rospy.is_shutdown",
"rospy.init_node",
"time.sleep",
"rospy.Rate",
"rospy.Subscriber",
"time.time",
"rospy.loginfo"
] | [((204, 213), 'std_msgs.msg.Float32', 'Float32', ([], {}), '()\n', (211, 213), False, 'from std_msgs.msg import Float32\n'), ((269, 280), 'time.time', 'time.time', ([], {}), '()\n', (278, 280), False, 'import time\n'), ((316, 350), 'rospy.init_node', 'rospy.init_node', (['"""ROS_topics_test"""'], {}), "('ROS_topics_test')\n", (331, 350), False, 'import rospy\n'), ((353, 400), 'rospy.loginfo', 'rospy.loginfo', (['""" ROS_topics_test node started."""'], {}), "(' ROS_topics_test node started.')\n", (366, 400), False, 'import rospy\n'), ((514, 604), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robocol/sensorica/mover_servos_cajas"""', 'UInt8', 'self.boxes_callback'], {}), "('/robocol/sensorica/mover_servos_cajas', UInt8, self.\n boxes_callback)\n", (530, 604), False, 'import rospy\n'), ((658, 745), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robocol/sensorica/dispensador_eva"""', 'UInt8', 'self.leave_callback'], {}), "('/robocol/sensorica/dispensador_eva', UInt8, self.\n leave_callback)\n", (674, 745), False, 'import rospy\n'), ((797, 886), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robocol/sensorica/celda_carga"""', 'Float32', 'self.load_cell_callback'], {}), "('/robocol/sensorica/celda_carga', Float32, self.\n load_cell_callback)\n", (813, 886), False, 'import rospy\n'), ((944, 1033), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robocol/sensorica/posicion_actuador"""', 'Float32', 'self.pos_callback'], {}), "('/robocol/sensorica/posicion_actuador', Float32, self.\n pos_callback)\n", (960, 1033), False, 'import rospy\n'), ((1089, 1176), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robocol/sensorica/vibracion_motores"""', 'UInt8', 'self.vib_callback'], {}), "('/robocol/sensorica/vibracion_motores', UInt8, self.\n vib_callback)\n", (1105, 1176), False, 'import rospy\n'), ((1233, 1330), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/robocol/sensorica/sensors_test_data"""', 'String', 'self.sensors_data_callback'], {}), "('/robocol/sensorica/sensors_test_data', String, self.\n sensors_data_callback)\n", (1249, 1330), False, 'import rospy\n'), ((1455, 1527), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/temperatura"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/temperatura', Float32, queue_size=1)\n", (1470, 1527), False, 'import rospy\n'), ((1603, 1666), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/ph"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/ph', Float32, queue_size=1)\n", (1618, 1666), False, 'import rospy\n'), ((1748, 1816), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/humedad"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/humedad', Float32, queue_size=1)\n", (1763, 1816), False, 'import rospy\n'), ((1898, 1967), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/monoxido"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/monoxido', Float32, queue_size=1)\n", (1913, 1967), False, 'import rospy\n'), ((2048, 2115), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/metano"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/metano', Float32, queue_size=1)\n", (2063, 2115), False, 'import rospy\n'), ((2199, 2269), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/hidrogeno"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/hidrogeno', Float32, queue_size=1)\n", (2214, 2269), False, 'import rospy\n'), ((2363, 2448), 'rospy.Publisher', 'rospy.Publisher', (['"""/robocol/sensorica/posicion_actual_eva"""', 'Float32'], {'queue_size': '(1)'}), "('/robocol/sensorica/posicion_actual_eva', Float32, queue_size=1\n )\n", (2378, 2448), False, 'import rospy\n'), ((3703, 3712), 'std_msgs.msg.Float32', 'Float32', ([], {}), '()\n', (3710, 3712), False, 'from std_msgs.msg import Float32\n'), ((3810, 3823), 'rospy.Rate', 'rospy.Rate', (['(5)'], {}), '(5)\n', (3820, 3823), False, 'import rospy\n'), ((3836, 3851), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3846, 3851), False, 'import time\n'), ((3911, 3936), 'random.randint', 'random.randint', (['(-300)', '(300)'], {}), '(-300, 300)\n', (3925, 3936), False, 'import random\n'), ((4000, 4019), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (4017, 4019), False, 'import rospy\n'), ((4453, 4475), 'random.randint', 'random.randint', (['(20)', '(23)'], {}), '(20, 23)\n', (4467, 4475), False, 'import random\n'), ((4511, 4531), 'random.randint', 'random.randint', (['(6)', '(8)'], {}), '(6, 8)\n', (4525, 4531), False, 'import random\n'), ((4568, 4590), 'random.randint', 'random.randint', (['(35)', '(55)'], {}), '(35, 55)\n', (4582, 4590), False, 'import random\n'), ((4626, 4648), 'random.randint', 'random.randint', (['(65)', '(75)'], {}), '(65, 75)\n', (4640, 4648), False, 'import random\n'), ((4685, 4707), 'random.randint', 'random.randint', (['(45)', '(55)'], {}), '(45, 55)\n', (4699, 4707), False, 'import random\n'), ((4744, 4768), 'random.randint', 'random.randint', (['(475)', '(525)'], {}), '(475, 525)\n', (4758, 4768), False, 'import random\n')] |
import json
class Config:
""" Class used for operations on config file """
def __init__(self, path_to_config: str):
self.path = path_to_config
def read(self):
with open(self.path) as f:
j = json.load(f)
return j
def save(self, j: object):
with open(self.path, 'w') as f:
json.dump(j, f, indent=4)
def edit(self, key: str, value: any):
j = self.read()
j[key] = value
self.save(j)
| [
"json.load",
"json.dump"
] | [((232, 244), 'json.load', 'json.load', (['f'], {}), '(f)\n', (241, 244), False, 'import json\n'), ((350, 375), 'json.dump', 'json.dump', (['j', 'f'], {'indent': '(4)'}), '(j, f, indent=4)\n', (359, 375), False, 'import json\n')] |
from models.recurrent_model import Recurrent_QL_Model
import tensorflow as tf
class RecurrentQL_Model(Recurrent_QL_Model):
def _get_q_values_op(self, states, dynamic_states, scope):
'''
Args
- state: tf.Tensor, shape [batch_size, state_dim]
- scope: str, name of scope
Returns
- q: tf.Tensor, shape [batch_size, num_actions]
'''
h = states
num_actions = self.train_simulator.get_num_actions()
with tf.variable_scope(scope):
for width in self.config.widths:
h = tf.layers.dense(h, width, activation=tf.nn.relu)
q_values = tf.layers.dense(h, num_actions)
return q_values
| [
"tensorflow.layers.dense",
"tensorflow.variable_scope"
] | [((485, 509), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope'], {}), '(scope)\n', (502, 509), True, 'import tensorflow as tf\n'), ((648, 679), 'tensorflow.layers.dense', 'tf.layers.dense', (['h', 'num_actions'], {}), '(h, num_actions)\n', (663, 679), True, 'import tensorflow as tf\n'), ((576, 624), 'tensorflow.layers.dense', 'tf.layers.dense', (['h', 'width'], {'activation': 'tf.nn.relu'}), '(h, width, activation=tf.nn.relu)\n', (591, 624), True, 'import tensorflow as tf\n')] |
from app.main.graphing.graph_tools import generate_horizontal_line_chart_dict, generate_box_and_whisker_dict
def _get_title_and_df_keys_from_tab_value(chart_title):
"""Remove extra quotes from Jinja template and separate multi-column chart titles"""
chart_title = chart_title.replace('"', '')
axes_titles = chart_title.split(' / ')
df_keys = [item.lower().replace(' ', '_') for item in axes_titles]
return chart_title, df_keys, axes_titles
def get_graph_dict(df, chart_title, yaxis_title=None):
title, df_keys, axes_titles = _get_title_and_df_keys_from_tab_value(chart_title)
pd_series = df[df_keys[0]]
pd_series = pd_series.dropna()
if pd_series.dtype == 'float64' or pd_series.dtype == 'int64' or ('$' in list(pd_series.values)[0]):
return generate_box_and_whisker_dict(pd_series)
return generate_horizontal_line_chart_dict(title='', pd_series=pd_series, yaxis_title=yaxis_title)
| [
"app.main.graphing.graph_tools.generate_box_and_whisker_dict",
"app.main.graphing.graph_tools.generate_horizontal_line_chart_dict"
] | [((842, 937), 'app.main.graphing.graph_tools.generate_horizontal_line_chart_dict', 'generate_horizontal_line_chart_dict', ([], {'title': '""""""', 'pd_series': 'pd_series', 'yaxis_title': 'yaxis_title'}), "(title='', pd_series=pd_series,\n yaxis_title=yaxis_title)\n", (877, 937), False, 'from app.main.graphing.graph_tools import generate_horizontal_line_chart_dict, generate_box_and_whisker_dict\n'), ((790, 830), 'app.main.graphing.graph_tools.generate_box_and_whisker_dict', 'generate_box_and_whisker_dict', (['pd_series'], {}), '(pd_series)\n', (819, 830), False, 'from app.main.graphing.graph_tools import generate_horizontal_line_chart_dict, generate_box_and_whisker_dict\n')] |
import torch.nn as nn
import torch
import os
import numpy as np
from vilmedic.networks.blocks.rnn.textencoder import TextEncoder
from vilmedic.networks.blocks.rnn.decoder import ConditionalDecoder
from vilmedic.networks.blocks.rnn.visualdecoder import VisualConditionalDecoder
from vilmedic.networks.blocks.vision import *
from vilmedic.networks.blocks.rnn.evaluation import beam_search
from vilmedic.networks.models.utils import get_n_params
class SumRNN(nn.Module):
def __init__(self, encoder, decoder, cnn=None, **kwargs):
super().__init__()
self.kwargs = kwargs
encoder_func = encoder.proto
decoder_func = decoder.proto
self.enc = eval(encoder_func)(**encoder)
self.dec = eval(decoder_func)(**decoder, encoder_size=self.enc.ctx_size)
# Evaluation
self.eval_func = beam_search
self.reset_parameters()
self.cnn = None
if cnn is not None:
cnn_func = cnn.pop('proto')
self.visual_projection = nn.Linear(cnn.pop("visual_embedding_dim"), self.dec.hidden_size)
self.cnn = eval(cnn_func)(**cnn)
def reset_parameters(self):
for name, param in self.named_parameters():
# Skip 1-d biases and scalars
if param.requires_grad and param.dim() > 1:
nn.init.kaiming_normal_(param.data)
if 'src_emb' in self.kwargs:
self.set_embeddings(self.kwargs['src_emb'], self.enc.emb)
if 'tgt_emb' in self.kwargs:
self.set_embeddings(self.kwargs['tgt_emb'], self.dec.emb)
if hasattr(self, 'enc') and hasattr(self.enc, 'emb'):
# Reset padding embedding to 0
with torch.no_grad():
self.enc.emb.weight.data[0].fill_(0)
def encode(self, input_ids, feats=None, images=None, **kwargs):
# RNN model is batch_first = False
input_ids = input_ids.permute(1, 0)
if feats is not None:
feats = feats.cuda().permute(1, 0, 2) # RNN takes (n, bs, feat)
return {'enc': self.enc(input_ids, feats), 'feats': (feats, None)}
elif images is not None:
with torch.no_grad():
feats = self.cnn(images.cuda())
feats = self.visual_projection(feats)
feats = feats.permute(1, 0, 2) # RNN takes (n, bs, feat)
return {'enc': self.enc(input_ids), 'feats': (feats, None)}
else:
return {'enc': self.enc(input_ids)}
def decode(self, enc_outputs, decoder_input_ids):
# RNN model is batch_first = False
decoder_input_ids = decoder_input_ids.permute(1, 0)
result = self.dec(enc_outputs, decoder_input_ids)
result['n_items'] = torch.nonzero(decoder_input_ids[1:]).shape[0]
return result
def forward(self, input_ids, decoder_input_ids, **kwargs):
input_ids = input_ids.cuda()
decoder_input_ids = decoder_input_ids.cuda()
enc_outputs = self.encode(input_ids, **kwargs)
result = self.decode(enc_outputs, decoder_input_ids)
result['loss'] = result['loss'] / result['n_items']
return result
def set_embeddings(self, path, obj):
filename = os.path.basename(path)
embs = np.load(path, allow_pickle=True)
assert len(embs) == obj.weight.size(0)
success = 0
with torch.no_grad():
for i, emb in enumerate(embs):
if emb is None:
continue
obj.weight.data[i] = torch.from_numpy(emb)
success += 1
print(filename, success, '/', len(embs), 'words loaded')
def __repr__(self):
s = super().__repr__() + '\n'
s += "{}\n".format(get_n_params(self))
return s
| [
"vilmedic.networks.models.utils.get_n_params",
"torch.nn.init.kaiming_normal_",
"torch.from_numpy",
"torch.nonzero",
"os.path.basename",
"torch.no_grad",
"numpy.load"
] | [((3209, 3231), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (3225, 3231), False, 'import os\n'), ((3247, 3279), 'numpy.load', 'np.load', (['path'], {'allow_pickle': '(True)'}), '(path, allow_pickle=True)\n', (3254, 3279), True, 'import numpy as np\n'), ((3360, 3375), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3373, 3375), False, 'import torch\n'), ((3724, 3742), 'vilmedic.networks.models.utils.get_n_params', 'get_n_params', (['self'], {}), '(self)\n', (3736, 3742), False, 'from vilmedic.networks.models.utils import get_n_params\n'), ((1325, 1360), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['param.data'], {}), '(param.data)\n', (1348, 1360), True, 'import torch.nn as nn\n'), ((1699, 1714), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1712, 1714), False, 'import torch\n'), ((2725, 2761), 'torch.nonzero', 'torch.nonzero', (['decoder_input_ids[1:]'], {}), '(decoder_input_ids[1:])\n', (2738, 2761), False, 'import torch\n'), ((3518, 3539), 'torch.from_numpy', 'torch.from_numpy', (['emb'], {}), '(emb)\n', (3534, 3539), False, 'import torch\n'), ((2162, 2177), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2175, 2177), False, 'import torch\n')] |
import random
import os
import socket
import string
import sys
import numpy
variables = {}
def connect( ip, port ):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect( (ip, port) )
except:
print('Failed to connect to %s:%d' %(ip, port))
sys.exit(1)
return s
def rs(l):
z = ''
for _ in range(l):
z += random.choice( string.ascii_lowercase )
return z
def readuntil( s, u ):
z = ''
try:
while z.endswith( u ) == False:
z += s.recv(1).decode('utf-8')
except Exception as e:
print('[ERROR]')
print (e)
return None
return str(z)
def readline( s ):
return readuntil( s, '\n')
def readprompt(s):
return readuntil(s, '>>> ')
## used for writing logs
def sendwrapper(s, data ):
s.send(data.encode('utf-8'))
def create_named_scalar_matrix( s ):
global variables
varname = rs(7)
val = float(random.randint(-5, 5))
data = varname + ' = ' + str(val)
sendwrapper(s,data + '\n')
variables[varname] = [[val]];
print("[TEST] create_named_scalar_matrix -- %s" %(data) )
try:
x = readuntil(s, '= \n')
except:
print('[ERROR] create_named_scalar_matrix -- failed to read to "="')
return 0
expectation = varname + ' = \n'
if x != expectation:
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = '\n\t' + "{0:.3f}".format(val) + ' ' + '\n\n'
try:
x = readuntil(s, '\n\n')
except:
print('ERROR] create_named_scalar_matrix -- failed to read "\n\n"')
return 0
if x != expectation:
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
readprompt(s)
print('[SUCCESS] create_named_scalar_matrix')
return 1
## returns a tuple: list of lists and the string to declare it
def genmatrix( rows, cols, start_interval=-5, end_interval=5 ):
nm = []
data = "[ "
for i in range(rows):
c = []
for j in range(cols):
element = float(random.randint(start_interval, end_interval))
data += str(element) + " "
c.append(element)
if i < rows - 1:
data += '; '
nm.append(c)
data += ']'
return (nm, data)
def create_matrix(s):
global variables
varname = rs(7)
A, data = genmatrix( random.randint(2,5), random.randint(2,5) )
data = varname + ' = ' + data
print('[TEST] create_matrix %d x %d -- ' %(len(A), len(A[0])) + data)
sendwrapper(s, data + '\n')
variables[varname] = A
try:
x = readuntil(s, '= \n')
except:
print('[ERROR] create_matrix -- failed to read to "="')
return 0
expectation = varname + ' = \n'
if x != expectation:
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in A:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
readprompt(s)
print('[SUCCESS] create_matrix')
return 1
def check_named_matrix(s):
global variables
if len(variables) == 0:
check_named_matrix(s)
print('[TEST] check_named_matrix')
y = random.choice([*variables.keys()])
sendwrapper(s,y + '\n')
nm = variables[y]
expectation = ''
for row in nm:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] check_named_matrix')
def transpose_matrix(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
nm = variables[varname]
cmd += varname + "'"
else:
nm, data = genmatrix( random.randint(2,10), random.randint(2,10))
cmd += data + "'"
sendwrapper(s,cmd + '\n')
print('[TEST] transpose_matrix -- %s' %(cmd) )
tp = numpy.array(nm).transpose().tolist()
if store_matrix:
variables[newvarname] = tp
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] transpose_matrix -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in tp:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] transpose_matrix')
return 1
def scalar_matrix_exp(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
nm, data = genmatrix( random.randint(2,10), random.randint(2,10), -5, 5)
cmd += data + " ^ "
### I need a scalar
sc = random.randint(1, 3)
cmd += str(sc)
A = numpy.power( numpy.array(nm), sc ).tolist()
sendwrapper(s,cmd + '\n')
print('[TEST] scalar_matrix_exp -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = A
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] scalar_matrix_exp -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in A:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] scalar_matrix_exp')
return 1
def dot_hat_matrix(s):
global variables
cmd = ''
varname = ''
A, data = genmatrix( random.randint(2,5), random.randint(2,5))
cmd += data + " .^ "
## I need a matrix of the same size
B, data = genmatrix( len(A), len(A[0]), -3, 3 )
C = numpy.power( numpy.array(A), numpy.array(B) ).tolist()
cmd += data
sendwrapper(s,cmd + '\n')
print('[TEST] dot_hat_matrix -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
if val == -float('Inf'):
val = float('Inf')
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] dot_hat_matrix')
return 1
def scalar_multiply_matrix(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
else:
A, data = genmatrix( random.randint(2,10), random.randint(2,10))
## I need a scalar
B = float(random.randint(-5,5))
## decide which side the scalar is on
if random.randint(0,100) > 50:
## scalar first
cmd += str(B) + ' * '
if use_existing:
cmd += varname
else:
cmd += data
else:
if use_existing:
cmd += varname
else:
cmd += data
cmd += ' * ' + str(B)
C = ( numpy.array(A) * numpy.array(B) ).tolist()
sendwrapper(s,cmd + '\n')
print('[TEST] scalar_multiply_matrix -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] scalar_multiply_matrix -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0;
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] scalar_multiply_matrix')
return 1
def multiply_matrices(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
cmd += varname + ' * '
else:
A, data = genmatrix( random.randint(2,10), random.randint(2,10))
cmd += data + ' * '
## I need a new matrix of specific size
B, data = genmatrix( len(A[0]), random.randint(2,5) )
cmd += data
C = ( numpy.array(A).dot( numpy.array(B) )).tolist()
sendwrapper(s,cmd + '\n')
print('[TEST] multiply_matrices -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] multiply_matrices -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] multiply_matrices')
return 1
def scalar_add_matrix(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
else:
A, data = genmatrix( random.randint(2,10), random.randint(2,10))
## I need a scalar
B = float(random.randint(-5,5))
## decide which side the scalar is on
if random.randint(0,100) > 50:
## scalar first
cmd += str(B) + ' + '
if use_existing:
cmd += varname
else:
cmd += data
else:
if use_existing:
cmd += varname
else:
cmd += data
cmd += ' +' + str(B)
C = ( numpy.array(A) + numpy.array(B) ).tolist()
sendwrapper(s,cmd + '\n')
print('[TEST] scalar_add_matrix -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] scalar_add_matrix -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] scalar_add_matrix')
return 1
def add_func(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
size_selection = random.randint(0,100)
B, Bdata = genmatrix(len(A), len(A[0]))
## decide order
use_func = (random.randint(0,100) > 50)
if random.randint(0,100) > 50:
## A first
cmd += '%add( ' + Adata + ', ' + Bdata + ')'
else:
cmd += '%add( ' + Bdata + ', ' + Adata + ')'
C = (numpy.array(A) + numpy.array(B)).tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] add_func -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] add_func -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] add_func')
return 1
def add_matrices(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
size_selection = random.randint(0,100)
if size_selection > 66:
## same rows
B, Bdata = genmatrix( len(A), 1)
elif size_selection > 33:
## same cols
B, Bdata = genmatrix( 1, len(A[0]))
else:
## same both
B, Bdata = genmatrix(len(A), len(A[0]))
## decide order
if random.randint(0,100) > 50:
## A first
cmd += Adata + ' + ' + Bdata
else:
cmd += Bdata + ' + ' + Adata
C = (numpy.array(A) + numpy.array(B)).tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] add_matrices -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] add_matrices -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] add_matrices')
return 1
def scalar_sub_matrix(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
else:
A, data = genmatrix( random.randint(2,10), random.randint(2,10))
## I need a scalar
B = float(random.randint(-5,5))
## decide which side the scalar is on
if random.randint(0,100) > 50:
## scalar first
cmd += str(B) + ' - '
if use_existing:
cmd += varname
else:
cmd += data
C = ( numpy.array(B) - numpy.array(A) ).tolist()
else:
if use_existing:
cmd += varname
else:
cmd += data
cmd += ' - ' + str(B)
C = ( numpy.array(A) - numpy.array(B) ).tolist()
sendwrapper(s,cmd + '\n')
print('[TEST] scalar_sub_matrix -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] scalar_sub_matrix -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] scalar_sub_matrix')
return 1
def reshape(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
new_row = len(A[0])
new_col = len(A)
## create the reshape matrix
nv = rs(7)
rscmd = nv + ' = [' + str(new_row) + ' ' + str(new_col) + ' ] '
cmd += '%reshape( ' + Adata + ', ' + nv + ')'
sendwrapper( s, rscmd + '\n')
readprompt(s)
sendwrapper(s, cmd + '\n')
print('[TEST] reshape -- %s' %(cmd) )
C = numpy.reshape( numpy.array(A), (len(A[0]), len(A)))
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] reshape -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] reshape')
return 1
def sub_func(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
size_selection = random.randint(0,100)
B, Bdata = genmatrix(len(A), len(A[0]))
## decide order
if random.randint(0,100) > 50:
## A first
cmd += '%sub( ' + Adata + ', ' + Bdata + ' )'
C = (numpy.array(A) - numpy.array(B)).tolist()
else:
cmd += '%sub( ' + Bdata + ', ' + Adata + ' )'
C = (numpy.array(B) - numpy.array(A)).tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] sub_func -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] sub_func -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
if val == -float('Inf'):
val = float('Inf')
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] sub_func')
return 1
def sub_matrices(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
size_selection = random.randint(0,100)
if size_selection > 66:
## same rows
B, Bdata = genmatrix( len(A), 1)
elif size_selection > 33:
## same cols
B, Bdata = genmatrix( 1, len(A[0]))
else:
## same both
B, Bdata = genmatrix(len(A), len(A[0]))
## decide order
if random.randint(0,100) > 50:
## A first
cmd += Adata + ' - ' + Bdata
C = (numpy.array(A) - numpy.array(B)).tolist()
else:
cmd += Bdata + ' - ' + Adata
C = (numpy.array(B) - numpy.array(A)).tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] sub_matrices -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] sub_matrices -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
if val == -float('Inf'):
val = float('Inf')
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] sub_matrices')
return 1
def dot_multiply_matrices(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
size_selection = random.randint(0,100)
if size_selection > 66:
## same rows
B, Bdata = genmatrix( len(A), 1)
elif size_selection > 33:
## same cols
B, Bdata = genmatrix( 1, len(A[0]))
else:
## same both
B, Bdata = genmatrix(len(A), len(A[0]))
## decide order
if random.randint(0,100) > 50:
## A first
cmd += Adata + ' .* ' + Bdata
else:
cmd += Bdata + ' .* ' + Adata
C = (numpy.array(A) * numpy.array(B)).tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] dot_multiply_matrices -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] dot_multiply_matrices -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] dot_multiply_matrices')
return 1
def dot_divide_matrices(s):
global variables
newvarname = ''
cmd = ''
## I'm not going to use existing because I only want to gen non zero elements
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10), 1, 5)
size_selection = random.randint(0,100)
if size_selection > 66:
## same rows
B, Bdata = genmatrix( len(A), 1, 1, 5)
elif size_selection > 33:
## same cols
B, Bdata = genmatrix( 1, len(A[0]), 1, 5)
else:
## same both
B, Bdata = genmatrix(len(A), len(A[0]), 1, 5)
## decide order
if random.randint(0,100) > 50:
## A first
cmd += Adata + ' ./ ' + Bdata
C = (numpy.array(A) / numpy.array(B)).tolist()
else:
cmd += Bdata + ' ./ ' + Adata
C = (numpy.array(B) / numpy.array(A)).tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] dot_divide_matrices -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] dot_divide_matrices')
return 1
def sigmoid(s):
global variables
newvarname = ''
cmd = ''
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
Adata = varname
else:
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10))
cmd += '%sig( ' + Adata + ')'
C = numpy.array(A)
C = 1 / (1 + numpy.exp(-C))
C = C.tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] sigmoid -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] sigmoid')
return 1
def log_two(s):
global variables
cmd = ''
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10), 2, 8)
cmd += '%lg( ' + Adata + ')'
C = numpy.array(A)
C = numpy.log2(A)
C = C.tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] log_two -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
if val == -float('Inf'):
val = float('Inf')
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] log_two')
return 1
def log_ten(s):
global variables
cmd = ''
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10), 2, 8)
cmd += '%log( ' + Adata + ')'
C = numpy.array(A)
C = numpy.log10(A)
C = C.tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] log_ten -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
if val == -float('Inf'):
val = float('Inf')
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] log_ten')
return 1
def sqrtm(s):
global variables
cmd = ''
A, Adata = genmatrix( random.randint(2,10), random.randint(2,10), 2, 8)
cmd += '%sqrt( ' + Adata + ')'
C = numpy.array(A)
C = numpy.sqrt(A)
C = C.tolist()
sendwrapper(s, cmd + '\n')
print('[TEST] sqrtm -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
if val == -float('Inf'):
val = float('Inf')
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] sqrtm')
return 1
def get_columns(s):
global variables
cmd = ''
varname = random.choice( [*variables.keys()])
nm = variables[varname]
cmd += '%cols( ' + varname + ")"
sendwrapper(s,cmd + '\n')
print('[TEST] get_columns -- %s' %(cmd) )
expectation = '\n\t' + "{0:.3f}".format(len(nm[0])) + ' \n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] get_columns')
return 1
def get_rows(s):
global variables
cmd = ''
varname = random.choice( [*variables.keys()])
A = variables[varname]
cmd += '%rows( ' + varname + ")"
sendwrapper(s,cmd + '\n')
print('[TEST] get_rows -- %s' %(cmd) )
expectation = '\n\t' + "{0:.3f}".format(len(A)) + ' \n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] get_rows')
return 1
def identity(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
size_selection = random.randint(2,5)
cmd += '%I(' + str(size_selection) + ')'
C = []
for i in range(size_selection):
row = []
for j in range(size_selection):
if i == j:
row.append(float(1))
else:
row.append(float(0))
C.append(row)
sendwrapper(s, cmd + '\n')
print('[TEST] identity -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] identity -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] identity')
return 1
def magnitude(s):
global variables
newvarname = ''
cmd = ''
A, Adata = genmatrix( random.randint(2,10), 1)
mag = 0
for x in A:
mag += x[0] ** 2
mag = numpy.sqrt(mag)
C = [[mag]]
## decide to use function or bars
if random.randint(0,100) > 50:
cmd += '||' + Adata + '||'
else:
cmd += '%mag(' + Adata + ')'
sendwrapper(s, cmd + '\n')
print('[TEST] magnitude -- %s' %(cmd) )
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] magnitude')
return 1
def zeros(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
rows = random.randint(2,5)
cols = random.randint(2,5)
cmd += '%zeros(' + str(rows) + ',' + str(cols) + ')'
C = []
for i in range(rows):
row = []
for j in range(cols):
row.append(float(0))
C.append(row)
sendwrapper(s, cmd + '\n')
print('[TEST] zeros -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] zeros -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] zeros')
return 1
def ones(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
rows = random.randint(2,5)
cols = random.randint(2,5)
cmd += '%ones(' + str(rows) + ',' + str(cols) + ')'
C = []
for i in range(rows):
row = []
for j in range(cols):
row.append(float(1))
C.append(row)
sendwrapper(s, cmd + '\n')
print('[TEST] ones -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] ones -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] ones')
return 1
def sum_matrix(s):
global variables
newvarname = ''
cmd = ''
## decide to store or not
store_matrix = (random.randint(0, 100) > 50)
if store_matrix:
newvarname = rs(7)
cmd = newvarname + ' = '
## decide to use an existing variable or not
use_existing = (random.randint(0, 100) > 50)
varname = ''
if use_existing:
varname = random.choice( [*variables.keys()])
A = variables[varname]
data = varname
else:
A, data = genmatrix( random.randint(2,10), random.randint(2,10))
## decide to sum rows or sum cols
if random.randint(0,100) > 50:
## cols
C = numpy.array(A).sum(0).tolist()
row = []
for x in C:
row.append(x)
C = [row]
dimension = 2.0
else:
##rows
C = numpy.array(A).sum(1).tolist()
dimension = 1.0
B = []
for x in C:
B.append([x])
C = B[:]
cmd += '%sum( ' + data + ',' + str(dimension) + ')'
sendwrapper(s,cmd + '\n')
print('[TEST] sum_matrix -- %s' %(cmd) )
if store_matrix:
variables[newvarname] = C
expectation = newvarname + ' = \n'
x = ''
try:
x = readuntil( s, ' = \n')
except:
print('[ERROR] sum_matrix -- failed to read "="')
return 0
if ( x != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, x) )
return 0
expectation = ''
for row in C:
expectation += '\n\t'
for val in row:
if val == 0.0:
val = 0.0
expectation += "{0:.3f}".format(val) + ' '
expectation += '\n\n'
z = readuntil(s, '\n\n')
if ( z != expectation):
print('[ERROR] expected %s ; received %s' %(expectation, z) )
return 0
readprompt(s)
print('[SUCCESS] sum_matrix')
return 1
## TODO: specify timeout
def run_test(HOST, PORT):
s = connect( HOST, PORT )
### eat the initial prompt
readprompt(s)
## begin by creating a few named matrices
for _ in range(2):
create_matrix(s)
for _ in range(200):
test_func = random.choice( [scalar_matrix_exp, transpose_matrix, check_named_matrix,
create_named_scalar_matrix, create_matrix, dot_hat_matrix,
scalar_multiply_matrix, multiply_matrices, scalar_add_matrix,
add_matrices, reshape, scalar_sub_matrix, sub_matrices,
dot_multiply_matrices, add_func, dot_divide_matrices, sigmoid,
log_two, log_ten, get_columns, identity, magnitude,
zeros, ones, sub_func, sum_matrix] )
r = test_func(s)
if r == 0:
print('[FAILED] Test did not succeed')
s.close()
sys.exit()
sendwrapper(s,'exit\n')
s.close()
def main():
if len(sys.argv) == 3:
HOST = sys.argv[1]
PORT = int(sys.argv[2])
else:
if 'HOST' in os.environ and 'PORT' in os.environ:
HOST = os.environ['HOST']
PORT = int(os.environ['PORT'])
else:
print('[ERROR] target and port must be specified via arguments or variables.')
sys.exit(1)
if 'SEED' in os.environ:
SEED = int(os.environ['SEED'])
else:
SEED = random.randint(0, 2**64 - 1)
print('[INFO] Poller for the Pierrepont Challenge Binary')
print('[INFO] SEED: {0}'.format(SEED))
random.seed( a=SEED )
run_test(HOST, PORT)
print('[SUCCESS] Poller completed successfully')
if __name__ == '__main__':
main() | [
"numpy.log10",
"numpy.sqrt",
"random.choice",
"socket.socket",
"random.seed",
"numpy.exp",
"numpy.array",
"sys.exit",
"numpy.log2",
"random.randint"
] | [((126, 175), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (139, 175), False, 'import socket\n'), ((5991, 6011), 'random.randint', 'random.randint', (['(1)', '(3)'], {}), '(1, 3)\n', (6005, 6011), False, 'import random\n'), ((14114, 14136), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (14128, 14136), False, 'import random\n'), ((15926, 15948), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (15940, 15948), False, 'import random\n'), ((21713, 21735), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (21727, 21735), False, 'import random\n'), ((23682, 23704), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (23696, 23704), False, 'import random\n'), ((25850, 25872), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (25864, 25872), False, 'import random\n'), ((27572, 27594), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (27586, 27594), False, 'import random\n'), ((29161, 29175), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (29172, 29175), False, 'import numpy\n'), ((29934, 29948), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (29945, 29948), False, 'import numpy\n'), ((29958, 29971), 'numpy.log2', 'numpy.log2', (['A'], {}), '(A)\n', (29968, 29971), False, 'import numpy\n'), ((30767, 30781), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (30778, 30781), False, 'import numpy\n'), ((30791, 30805), 'numpy.log10', 'numpy.log10', (['A'], {}), '(A)\n', (30802, 30805), False, 'import numpy\n'), ((31600, 31614), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (31611, 31614), False, 'import numpy\n'), ((31624, 31637), 'numpy.sqrt', 'numpy.sqrt', (['A'], {}), '(A)\n', (31634, 31637), False, 'import numpy\n'), ((33562, 33582), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (33576, 33582), False, 'import random\n'), ((34936, 34951), 'numpy.sqrt', 'numpy.sqrt', (['mag'], {}), '(mag)\n', (34946, 34951), False, 'import numpy\n'), ((35912, 35932), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (35926, 35932), False, 'import random\n'), ((35943, 35963), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (35957, 35963), False, 'import random\n'), ((37270, 37290), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (37284, 37290), False, 'import random\n'), ((37301, 37321), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (37315, 37321), False, 'import random\n'), ((42031, 42050), 'random.seed', 'random.seed', ([], {'a': 'SEED'}), '(a=SEED)\n', (42042, 42050), False, 'import random\n'), ((380, 417), 'random.choice', 'random.choice', (['string.ascii_lowercase'], {}), '(string.ascii_lowercase)\n', (393, 417), False, 'import random\n'), ((957, 978), 'random.randint', 'random.randint', (['(-5)', '(5)'], {}), '(-5, 5)\n', (971, 978), False, 'import random\n'), ((2436, 2456), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (2450, 2456), False, 'import random\n'), ((2457, 2477), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (2471, 2477), False, 'import random\n'), ((4150, 4172), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (4164, 4172), False, 'import random\n'), ((4331, 4353), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (4345, 4353), False, 'import random\n'), ((5743, 5765), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (5757, 5765), False, 'import random\n'), ((5881, 5902), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (5895, 5902), False, 'import random\n'), ((5903, 5924), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (5917, 5924), False, 'import random\n'), ((7126, 7146), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (7140, 7146), False, 'import random\n'), ((7147, 7167), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (7161, 7167), False, 'import random\n'), ((8110, 8132), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (8124, 8132), False, 'import random\n'), ((8291, 8313), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (8305, 8313), False, 'import random\n'), ((8566, 8587), 'random.randint', 'random.randint', (['(-5)', '(5)'], {}), '(-5, 5)\n', (8580, 8587), False, 'import random\n'), ((8638, 8660), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (8652, 8660), False, 'import random\n'), ((10083, 10105), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (10097, 10105), False, 'import random\n'), ((10264, 10286), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (10278, 10286), False, 'import random\n'), ((10641, 10661), 'random.randint', 'random.randint', (['(2)', '(5)'], {}), '(2, 5)\n', (10655, 10661), False, 'import random\n'), ((11755, 11777), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (11769, 11777), False, 'import random\n'), ((11936, 11958), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (11950, 11958), False, 'import random\n'), ((12211, 12232), 'random.randint', 'random.randint', (['(-5)', '(5)'], {}), '(-5, 5)\n', (12225, 12232), False, 'import random\n'), ((12283, 12305), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (12297, 12305), False, 'import random\n'), ((13649, 13671), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (13663, 13671), False, 'import random\n'), ((13830, 13852), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (13844, 13852), False, 'import random\n'), ((14223, 14245), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (14237, 14245), False, 'import random\n'), ((14259, 14281), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (14273, 14281), False, 'import random\n'), ((15461, 15483), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (15475, 15483), False, 'import random\n'), ((15642, 15664), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (15656, 15664), False, 'import random\n'), ((16245, 16267), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (16259, 16267), False, 'import random\n'), ((17432, 17454), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (17446, 17454), False, 'import random\n'), ((17613, 17635), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (17627, 17635), False, 'import random\n'), ((17888, 17909), 'random.randint', 'random.randint', (['(-5)', '(5)'], {}), '(-5, 5)\n', (17902, 17909), False, 'import random\n'), ((17960, 17982), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (17974, 17982), False, 'import random\n'), ((19442, 19464), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (19456, 19464), False, 'import random\n'), ((19623, 19645), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (19637, 19645), False, 'import random\n'), ((20252, 20266), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (20263, 20266), False, 'import numpy\n'), ((21248, 21270), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (21262, 21270), False, 'import random\n'), ((21429, 21451), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (21443, 21451), False, 'import random\n'), ((21812, 21834), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (21826, 21834), False, 'import random\n'), ((23217, 23239), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (23231, 23239), False, 'import random\n'), ((23398, 23420), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (23412, 23420), False, 'import random\n'), ((24002, 24024), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (24016, 24024), False, 'import random\n'), ((25385, 25407), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (25399, 25407), False, 'import random\n'), ((25566, 25588), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (25580, 25588), False, 'import random\n'), ((26170, 26192), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (26184, 26192), False, 'import random\n'), ((27500, 27521), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (27514, 27521), False, 'import random\n'), ((27522, 27543), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (27536, 27543), False, 'import random\n'), ((27910, 27932), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (27924, 27932), False, 'import random\n'), ((28855, 28877), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (28869, 28877), False, 'import random\n'), ((29841, 29862), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (29855, 29862), False, 'import random\n'), ((29863, 29884), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (29877, 29884), False, 'import random\n'), ((30673, 30694), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (30687, 30694), False, 'import random\n'), ((30695, 30716), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (30709, 30716), False, 'import random\n'), ((31505, 31526), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (31519, 31526), False, 'import random\n'), ((31527, 31548), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (31541, 31548), False, 'import random\n'), ((33428, 33450), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (33442, 33450), False, 'import random\n'), ((34845, 34866), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (34859, 34866), False, 'import random\n'), ((35015, 35037), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (35029, 35037), False, 'import random\n'), ((35788, 35810), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (35802, 35810), False, 'import random\n'), ((37146, 37168), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (37160, 37168), False, 'import random\n'), ((38506, 38528), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (38520, 38528), False, 'import random\n'), ((38687, 38709), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (38701, 38709), False, 'import random\n'), ((38993, 39015), 'random.randint', 'random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (39007, 39015), False, 'import random\n'), ((40613, 41036), 'random.choice', 'random.choice', (['[scalar_matrix_exp, transpose_matrix, check_named_matrix,\n create_named_scalar_matrix, create_matrix, dot_hat_matrix,\n scalar_multiply_matrix, multiply_matrices, scalar_add_matrix,\n add_matrices, reshape, scalar_sub_matrix, sub_matrices,\n dot_multiply_matrices, add_func, dot_divide_matrices, sigmoid, log_two,\n log_ten, get_columns, identity, magnitude, zeros, ones, sub_func,\n sum_matrix]'], {}), '([scalar_matrix_exp, transpose_matrix, check_named_matrix,\n create_named_scalar_matrix, create_matrix, dot_hat_matrix,\n scalar_multiply_matrix, multiply_matrices, scalar_add_matrix,\n add_matrices, reshape, scalar_sub_matrix, sub_matrices,\n dot_multiply_matrices, add_func, dot_divide_matrices, sigmoid, log_two,\n log_ten, get_columns, identity, magnitude, zeros, ones, sub_func,\n sum_matrix])\n', (40626, 41036), False, 'import random\n'), ((41890, 41920), 'random.randint', 'random.randint', (['(0)', '(2 ** 64 - 1)'], {}), '(0, 2 ** 64 - 1)\n', (41904, 41920), False, 'import random\n'), ((294, 305), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (302, 305), False, 'import sys\n'), ((4555, 4576), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (4569, 4576), False, 'import random\n'), ((4577, 4598), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (4591, 4598), False, 'import random\n'), ((8484, 8505), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (8498, 8505), False, 'import random\n'), ((8506, 8527), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (8520, 8527), False, 'import random\n'), ((10488, 10509), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (10502, 10509), False, 'import random\n'), ((10510, 10531), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (10524, 10531), False, 'import random\n'), ((12129, 12150), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (12143, 12150), False, 'import random\n'), ((12151, 12172), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (12165, 12172), False, 'import random\n'), ((14048, 14069), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (14062, 14069), False, 'import random\n'), ((14070, 14091), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (14084, 14091), False, 'import random\n'), ((15860, 15881), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (15874, 15881), False, 'import random\n'), ((15882, 15903), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (15896, 15903), False, 'import random\n'), ((17806, 17827), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (17820, 17827), False, 'import random\n'), ((17828, 17849), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (17842, 17849), False, 'import random\n'), ((19841, 19862), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (19855, 19862), False, 'import random\n'), ((19863, 19884), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (19877, 19884), False, 'import random\n'), ((21647, 21668), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (21661, 21668), False, 'import random\n'), ((21669, 21690), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (21683, 21690), False, 'import random\n'), ((23616, 23637), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (23630, 23637), False, 'import random\n'), ((23638, 23659), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (23652, 23659), False, 'import random\n'), ((25784, 25805), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (25798, 25805), False, 'import random\n'), ((25806, 25827), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (25820, 25827), False, 'import random\n'), ((29073, 29094), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (29087, 29094), False, 'import random\n'), ((29095, 29116), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (29109, 29116), False, 'import random\n'), ((29194, 29207), 'numpy.exp', 'numpy.exp', (['(-C)'], {}), '(-C)\n', (29203, 29207), False, 'import numpy\n'), ((38903, 38924), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (38917, 38924), False, 'import random\n'), ((38925, 38946), 'random.randint', 'random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (38939, 38946), False, 'import random\n'), ((41363, 41373), 'sys.exit', 'sys.exit', ([], {}), '()\n', (41371, 41373), False, 'import sys\n'), ((41784, 41795), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (41792, 41795), False, 'import sys\n'), ((2115, 2159), 'random.randint', 'random.randint', (['start_interval', 'end_interval'], {}), '(start_interval, end_interval)\n', (2129, 2159), False, 'import random\n'), ((6054, 6069), 'numpy.array', 'numpy.array', (['nm'], {}), '(nm)\n', (6065, 6069), False, 'import numpy\n'), ((7309, 7323), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (7320, 7323), False, 'import numpy\n'), ((7325, 7339), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (7336, 7339), False, 'import numpy\n'), ((8953, 8967), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (8964, 8967), False, 'import numpy\n'), ((8970, 8984), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (8981, 8984), False, 'import numpy\n'), ((10710, 10724), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (10721, 10724), False, 'import numpy\n'), ((12597, 12611), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (12608, 12611), False, 'import numpy\n'), ((12614, 12628), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (12625, 12628), False, 'import numpy\n'), ((14432, 14446), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (14443, 14446), False, 'import numpy\n'), ((14449, 14463), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (14460, 14463), False, 'import numpy\n'), ((16386, 16400), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (16397, 16400), False, 'import numpy\n'), ((16403, 16417), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (16414, 16417), False, 'import numpy\n'), ((26313, 26327), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (26324, 26327), False, 'import numpy\n'), ((26330, 26344), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (26341, 26344), False, 'import numpy\n'), ((4718, 4733), 'numpy.array', 'numpy.array', (['nm'], {}), '(nm)\n', (4729, 4733), False, 'import numpy\n'), ((10690, 10704), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (10701, 10704), False, 'import numpy\n'), ((18148, 18162), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (18159, 18162), False, 'import numpy\n'), ((18165, 18179), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (18176, 18179), False, 'import numpy\n'), ((18337, 18351), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (18348, 18351), False, 'import numpy\n'), ((18354, 18368), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (18365, 18368), False, 'import numpy\n'), ((21935, 21949), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (21946, 21949), False, 'import numpy\n'), ((21952, 21966), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (21963, 21966), False, 'import numpy\n'), ((22056, 22070), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (22067, 22070), False, 'import numpy\n'), ((22073, 22087), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (22084, 22087), False, 'import numpy\n'), ((24099, 24113), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (24110, 24113), False, 'import numpy\n'), ((24116, 24130), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (24127, 24130), False, 'import numpy\n'), ((24203, 24217), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (24214, 24217), False, 'import numpy\n'), ((24220, 24234), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (24231, 24234), False, 'import numpy\n'), ((28009, 28023), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (28020, 28023), False, 'import numpy\n'), ((28026, 28040), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (28037, 28040), False, 'import numpy\n'), ((28113, 28127), 'numpy.array', 'numpy.array', (['B'], {}), '(B)\n', (28124, 28127), False, 'import numpy\n'), ((28130, 28144), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (28141, 28144), False, 'import numpy\n'), ((39049, 39063), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (39060, 39063), False, 'import numpy\n'), ((39228, 39242), 'numpy.array', 'numpy.array', (['A'], {}), '(A)\n', (39239, 39242), False, 'import numpy\n')] |
import sys
import re
import os
from collections import OrderedDict
from itertools import groupby
from sqlalchemy.orm import joinedload
from sqlalchemy import func, and_
from clld.cliutil import Data
from clld.db.meta import DBSession
from clld.db.models import common
from clld.lib.bibtex import EntryType
from clld.web.util.helpers import data_uri
from clldutils.color import qualitative_colors, rgb_as_hex
from clldutils.path import Path
from clldutils.misc import slug
from pycldf import Wordlist
from clld_phylogeny_plugin.models import Phylogeny, TreeLabel, LanguageTreeLabel
from clld_cognacy_plugin.models import Cognate, Cognateset
from csvw.dsv import reader
import cobl2
from cobl2 import models
import clld_cognacy_plugin.models
data_file_path = Path(cobl2.__file__).parent / '../..' / 'iecor'
ds = Wordlist.from_metadata(data_file_path / 'cldf' / 'cldf-metadata.json')
photos = {
p.stem: p.as_posix() for p in
(Path(cobl2.__file__).parent / '../..' / 'CoBL-public' / 'cobl' / 'static' / 'contributors').iterdir()
if p.suffix == '.jpg'}
for k, v in {
'Kümmel': 'Kuemmel',
'de Vaan': 'deVaan',
'Dewey-Findell': 'Dewey',
}.items():
photos[k] = photos[v]
def main(args):
data = Data()
dataset = common.Dataset(
id=cobl2.__name__,
name="IE-CoR",
publisher_name="Max Planck Institute for Evolutionary Anthropology",
publisher_place="Leipzig",
publisher_url="https://www.eva.mpg.de",
license="https://creativecommons.org/licenses/by/4.0/",
domain='iecor.clld.org',
contact='<EMAIL>',
jsondata={
'license_icon': 'cc-by.png',
'license_name': 'Creative Commons Attribution 4.0 International License'})
DBSession.add(dataset)
editors = OrderedDict([('Heggarty', None), ('Anderson', None), ('Scarborough', None)])
for row in sorted(ds['authors.csv'], key=lambda x: [
x['Last_Name'].lower(), x['First_Name'].lower()]):
if row['Last_Name'] in editors:
editors[row['Last_Name']] = row['ID']
data.add(
models.Author,
row['ID'],
id=row['ID'],
name='{0} {1}'.format(row['First_Name'], row['Last_Name']),
url=row['URL'],
photo=data_uri(photos[row['Last_Name']], 'image/jpg') if row['Last_Name'] in photos else None)
for i, cid in enumerate(editors.values()):
common.Editor(dataset=dataset, contributor=data['Author'][cid], ord=i + 1)
for src in ds.sources.items():
for invalid in ['isbn', 'part', 'institution']:
if invalid in src:
del src[invalid]
data.add(
common.Source,
src.id,
id=src.id,
name=src.get('author', src.get('editor')),
description=src.get('title', src.get('booktitle')),
bibtex_type=getattr(EntryType, src.genre, EntryType.misc),
**src)
re_links = re.compile(r'\[(?P<label>[^\]]+?)\]\((?P<type>.+?)-(?P<id>\d+)\)')
link_map = {
'cog': '/cognatesets/',
'lex': '/values/',
'src': '/sources/',
}
def parse_links(m):
try:
return '<a href="{}{}">{}</a>'.format(
link_map[m.group('type')], m.group('id'), m.group('label'))
except KeyError:
print("parse_links: type error in '{}'".format(":".join(m.groups())))
return '[{}]({}-{})'.format(m.group('label'), m.group('type'), m.group('id'))
for param in ds['ParameterTable']:
data.add(
models.Meaning,
param['ID'],
id=slug(param['Name']),
name=param['Name'],
description_md=param['Description_md'],
concepticon_id=int(param['Concepticon_ID']) if param['Concepticon_ID'] != '0' else None,
)
for row in ds['clades.csv']:
data.add(
models.Clade,
row['ID'],
id=row['ID'],
level0_name=row['level0_name'],
level1_name=row['level1_name'],
level2_name=row['level2_name'],
level3_name=row['level3_name'],
clade_level0=row['clade_level0'],
clade_level1=row['clade_level1'],
clade_level2=row['clade_level2'],
clade_level3=row['clade_level3'],
clade_name=row['clade_name'],
short_name=row['short_name'],
color=row['color'],
)
for row in ds['LanguageTable']:
c = data.add(
common.Contribution,
row['ID'],
id=row['ID'],
name=row['Name'],
)
for i, cid in enumerate(row['Author_ID']):
DBSession.add(common.ContributionContributor(
contribution=c, contributor=data['Author'][cid], ord=i + 1))
data.add(
models.Variety,
row['ID'],
id=slug(row['Name']),
name=row['Name'],
latitude=float(row['Latitude']) if row['Latitude'] is not None else None,
longitude=float(row['Longitude']) if row['Longitude'] is not None else None,
contribution=c,
color=rgb_as_hex(row['Color']),
clade=', '.join(filter(None, row['Clade'])),
clade_name=row['clade_name'],
glottocode=row['Glottocode'],
historical=row['historical'],
distribution=row['distribution'],
logNormalMean=row['logNormalMean'],
logNormalOffset=row['logNormalOffset'],
logNormalStDev=row['logNormalStDev'],
normalMean=row['normalMean'],
normalStDev=row['normalStDev'],
ascii_name=row['ascii_name'],
iso=row['ISO639P3code'],
lang_description=row['Description'],
variety=row['Variety'],
loc_justification=row['loc_justification'] or None,
sort_order=row['sort_order']
)
vsrs = set()
for row in ds['FormTable']:
vs = data['ValueSet'].get((row['Language_ID'], row['Parameter_ID']))
if not vs:
vs = data.add(
common.ValueSet,
(row['Language_ID'], row['Parameter_ID']),
id='{0}-{1}'.format(row['Language_ID'], row['Parameter_ID']),
language=data['Variety'][row['Language_ID']],
parameter=data['Meaning'][row['Parameter_ID']],
contribution=data['Contribution'][row['Language_ID']],
)
v = data.add(
models.Lexeme,
row['ID'],
id=row['ID'],
name=row['Form'],
native_script=row['native_script'],
phonetic=row['phon_form'],
phonemic=row['Phonemic'],
comment=re_links.sub(parse_links, row['Comment'] or ''),
url=row['url'],
gloss=row['Gloss'],
valueset=vs
)
for src in row['Source']:
sid, pages = ds.sources.parse(src)
key = (vs.id, sid, pages)
if pages:
pages = pages.replace('|', ';')
if key not in vsrs:
DBSession.add(common.ValueSetReference(
valueset=vs, source=data['Source'][sid], description=pages))
vsrs.add(key)
for row in ds['CognatesetTable']:
cc = data.add(
models.CognateClass,
row['ID'],
id=row['ID'],
name=row['ID'],
root_form=row['Root_Form_calc'] if row['Root_Form_calc'] is not None and len(row['Root_Form_calc']) else row['Root_Form'],
root_form_calc=row['Root_Form_calc'] or None,
root_gloss=row['Root_Gloss'] or None,
root_language=row['Root_Language_calc'] if row['Root_Language_calc'] is not None and len(row['Root_Language_calc']) else row['Root_Language'],
root_language_calc=row['Root_Language_calc'] or None,
comment=re_links.sub(parse_links, row['Comment'] or ''),
justification=re_links.sub(parse_links, row['Justification'] or ''),
ideophonic=row['Ideophonic'] or None,
parallel_derivation=row['parallelDerivation'] or None,
revised_by=','.join(row['revised_by']) or None,
superset_id=int(row['supersetid']) if row['supersetid'] else None,
)
for src in row['Source']:
sid, pages = ds.sources.parse(src)
if pages:
pages = pages.replace('|', ';')
DBSession.add(clld_cognacy_plugin.models.CognatesetReference(
cognateset=cc, source=data['Source'][sid], description=pages))
DBSession.flush()
cc_id_pk_map = {str(ccid): cc.pk for ccid, cc in data['CognateClass'].items()}
for row in ds['CognatesetTable']:
if row['proposedAsCognateTo_pk']:
DBSession.add(models.ProposedCognates(
cc1_pk=data['CognateClass'][row['ID']].pk,
cc2_pk=cc_id_pk_map[str(row['proposedAsCognateTo_pk'])],
scale=row['proposedAsCognateToScale']
))
DBSession.flush()
loans = {ln['Cognateset_ID']: ln for ln in ds['loans.csv']}
for ccid, cc in data['CognateClass'].items():
if ccid in loans:
le = loans[ccid]
if le['SourceCognateset_ID']:
cc.loan_source_pk = data['CognateClass'][le['SourceCognateset_ID']].pk
else:
cc.loan_source_pk = None
cc.loan_notes = le['Comment']
cc.loan_source_languoid = le['Source_languoid']
cc.loan_source_form = le['Source_form']
cc.parallel_loan_event = le['Parallel_loan_event']
cc.is_loan = True
for row in ds['CognateTable']:
cc = data['CognateClass'][row['Cognateset_ID']]
if cc.meaning_pk is None:
cc.meaning_pk = data['Lexeme'][row['Form_ID']].valueset.parameter_pk
else:
assert data['Lexeme'][row['Form_ID']].valueset.parameter_pk == cc.meaning_pk
data.add(
clld_cognacy_plugin.models.Cognate,
row['ID'],
cognateset=data['CognateClass'][row['Cognateset_ID']],
counterpart=data['Lexeme'][row['Form_ID']],
doubt=row['Doubt'],
)
l_by_gc = {}
for s in DBSession.query(models.Variety):
l_by_gc[s.glottocode] = s.pk
tree = Phylogeny(
id='1',
name='Bouckaert et al.',
description='',
newick=Path.read_text(data_file_path / 'raw' / 'bouckaert_et_al2012' / 'newick.txt'),
)
for k, taxon in enumerate(reader(data_file_path / 'raw' / 'bouckaert_et_al2012' / 'taxa.csv', namedtuples=True)):
label = TreeLabel(
id='{0}-{1}-{2}'.format(tree.id, slug(taxon.taxon), k + 1),
name=taxon.taxon,
phylogeny=tree,
description=taxon.glottocode)
if taxon.glottocode in l_by_gc:
LanguageTreeLabel(language_pk=l_by_gc[taxon.glottocode], treelabel=label)
DBSession.add(tree)
l_by_ascii = {}
for s in DBSession.query(models.Variety):
l_by_ascii[s.ascii_name] = s.pk
tree = Phylogeny(
id='2',
name='CoBL consensu',
description='',
newick=Path.read_text(data_file_path / 'raw' / 'ie122' / 'newick.txt'),
)
for k, taxon in enumerate(reader(data_file_path / 'raw' / 'ie122' / 'taxa.csv', namedtuples=True)):
label = TreeLabel(
id='{0}-{1}-{2}'.format(tree.id, slug(taxon.taxon), k + 1),
name=taxon.taxon,
phylogeny=tree)
if taxon.taxon in l_by_ascii:
LanguageTreeLabel(language_pk=l_by_ascii[taxon.taxon], treelabel=label)
DBSession.add(tree)
def prime_cache(args):
"""If data needs to be denormalized for lookup, do that here.
This procedure should be separate from the db initialization, because
it will have to be run periodically whenever data has been updated.
"""
ordered_clade_colors = {k: v for k, v in DBSession.query(models.Clade.clade_name, models.Clade.color)
.filter(models.Clade.short_name != '')
.order_by(models.Clade.clade_level0).all()}
for _, cc in groupby(
DBSession.query(models.CognateClass, models.Variety.clade_name)
.join(clld_cognacy_plugin.models.Cognate,
and_(models.CognateClass.pk == clld_cognacy_plugin.models.Cognate.cognateset_pk))
.join(models.Value,
and_(clld_cognacy_plugin.models.Cognate.counterpart_pk == models.Value.pk))
.join(common.ValueSet,
and_(models.Value.valueset_pk == common.ValueSet.pk))
.join(models.Variety,
and_(common.ValueSet.language_pk == models.Variety.pk))
.distinct().order_by(models.CognateClass.pk), lambda c: c[0].pk):
cc = sorted(list(cc))
cc[0][0].count_clades = len(cc)
involved_clades = [c[1] for c in cc]
r = []
for cl, col in ordered_clade_colors.items():
if cl in involved_clades:
r.append(col)
else:
r.append('0')
cc[0][0].involved_clade_colors = ' '.join(r)
cc[0][0].clades = ', '.join(involved_clades)
for c in DBSession.query(models.CognateClass, func.count(models.CognateClass.id)) \
.join(clld_cognacy_plugin.models.Cognate) \
.group_by(models.CognateClass.pk, models.Cognateset.pk, models.CognateClass.id):
c[0].count_lexemes = c[1]
for _, ccs in groupby(
DBSession.query(models.CognateClass).order_by(models.CognateClass.meaning_pk),
lambda c: c.meaning_pk
):
ccs = list(ccs)
colors = qualitative_colors(len(ccs))
for i, cc in enumerate(ccs):
cc.color = colors[i]
for meaning in DBSession.query(models.Meaning).options(
joinedload(models.Meaning.cognateclasses),
joinedload(common.Parameter.valuesets, common.ValueSet.language)
):
meaning.count_cognateclasses = len(meaning.cognateclasses)
meaning.count_languages = len([vs.language for vs in meaning.valuesets])
meaning.count_loan_cognateclasses = len([cc for cc in meaning.cognateclasses
if cc.is_loan])
for meaning in DBSession.query(
models.Meaning, func.count(common.Parameter.pk))\
.join(common.Parameter).join(common.ValueSet).join(common.Value).group_by(
models.Meaning.pk, common.Parameter.pk):
meaning[0].count_lexemes = meaning[1]
for language in DBSession.query(common.Language).options(
joinedload(common.Language.valuesets, common.ValueSet.references)
):
language.count_meanings = len(language.valuesets)
language.count_lexemes = len(DBSession.query(common.Value.id)
.filter(common.ValueSet.language_pk == language.pk)
.join(common.ValueSet).all())
spks = set()
for vs in language.valuesets:
for ref in vs.references:
spks.add(ref.source_pk)
for spk in spks:
DBSession.add(common.LanguageSource(language_pk=language.pk, source_pk=spk))
if __name__ == '__main__':
initializedb(create=main, prime_cache=prime_cache)
sys.exit(0)
| [
"re.compile",
"sys.exit",
"sqlalchemy.and_",
"clldutils.misc.slug",
"sqlalchemy.func.count",
"clld.db.meta.DBSession.query",
"clld.db.meta.DBSession.flush",
"clld.web.util.helpers.data_uri",
"clldutils.path.Path.read_text",
"clld_phylogeny_plugin.models.LanguageTreeLabel",
"clldutils.path.Path",
"collections.OrderedDict",
"clld.db.models.common.ContributionContributor",
"clld.cliutil.Data",
"sqlalchemy.orm.joinedload",
"clld.db.models.common.ValueSetReference",
"clld.db.meta.DBSession.add",
"clld.db.models.common.LanguageSource",
"clldutils.color.rgb_as_hex",
"csvw.dsv.reader",
"clld.db.models.common.Editor",
"pycldf.Wordlist.from_metadata",
"clld.db.models.common.Dataset"
] | [((816, 886), 'pycldf.Wordlist.from_metadata', 'Wordlist.from_metadata', (["(data_file_path / 'cldf' / 'cldf-metadata.json')"], {}), "(data_file_path / 'cldf' / 'cldf-metadata.json')\n", (838, 886), False, 'from pycldf import Wordlist\n'), ((1227, 1233), 'clld.cliutil.Data', 'Data', ([], {}), '()\n', (1231, 1233), False, 'from clld.cliutil import Data\n'), ((1249, 1670), 'clld.db.models.common.Dataset', 'common.Dataset', ([], {'id': 'cobl2.__name__', 'name': '"""IE-CoR"""', 'publisher_name': '"""Max Planck Institute for Evolutionary Anthropology"""', 'publisher_place': '"""Leipzig"""', 'publisher_url': '"""https://www.eva.mpg.de"""', 'license': '"""https://creativecommons.org/licenses/by/4.0/"""', 'domain': '"""iecor.clld.org"""', 'contact': '"""<EMAIL>"""', 'jsondata': "{'license_icon': 'cc-by.png', 'license_name':\n 'Creative Commons Attribution 4.0 International License'}"}), "(id=cobl2.__name__, name='IE-CoR', publisher_name=\n 'Max Planck Institute for Evolutionary Anthropology', publisher_place=\n 'Leipzig', publisher_url='https://www.eva.mpg.de', license=\n 'https://creativecommons.org/licenses/by/4.0/', domain='iecor.clld.org',\n contact='<EMAIL>', jsondata={'license_icon': 'cc-by.png',\n 'license_name': 'Creative Commons Attribution 4.0 International License'})\n", (1263, 1670), False, 'from clld.db.models import common\n'), ((1751, 1773), 'clld.db.meta.DBSession.add', 'DBSession.add', (['dataset'], {}), '(dataset)\n', (1764, 1773), False, 'from clld.db.meta import DBSession\n'), ((1789, 1865), 'collections.OrderedDict', 'OrderedDict', (["[('Heggarty', None), ('Anderson', None), ('Scarborough', None)]"], {}), "([('Heggarty', None), ('Anderson', None), ('Scarborough', None)])\n", (1800, 1865), False, 'from collections import OrderedDict\n'), ((2977, 3048), 're.compile', 're.compile', (['"""\\\\[(?P<label>[^\\\\]]+?)\\\\]\\\\((?P<type>.+?)-(?P<id>\\\\d+)\\\\)"""'], {}), "('\\\\[(?P<label>[^\\\\]]+?)\\\\]\\\\((?P<type>.+?)-(?P<id>\\\\d+)\\\\)')\n", (2987, 3048), False, 'import re\n'), ((8681, 8698), 'clld.db.meta.DBSession.flush', 'DBSession.flush', ([], {}), '()\n', (8696, 8698), False, 'from clld.db.meta import DBSession\n'), ((9119, 9136), 'clld.db.meta.DBSession.flush', 'DBSession.flush', ([], {}), '()\n', (9134, 9136), False, 'from clld.db.meta import DBSession\n'), ((10337, 10368), 'clld.db.meta.DBSession.query', 'DBSession.query', (['models.Variety'], {}), '(models.Variety)\n', (10352, 10368), False, 'from clld.db.meta import DBSession\n'), ((11050, 11069), 'clld.db.meta.DBSession.add', 'DBSession.add', (['tree'], {}), '(tree)\n', (11063, 11069), False, 'from clld.db.meta import DBSession\n'), ((11104, 11135), 'clld.db.meta.DBSession.query', 'DBSession.query', (['models.Variety'], {}), '(models.Variety)\n', (11119, 11135), False, 'from clld.db.meta import DBSession\n'), ((11743, 11762), 'clld.db.meta.DBSession.add', 'DBSession.add', (['tree'], {}), '(tree)\n', (11756, 11762), False, 'from clld.db.meta import DBSession\n'), ((15440, 15451), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (15448, 15451), False, 'import sys\n'), ((2433, 2507), 'clld.db.models.common.Editor', 'common.Editor', ([], {'dataset': 'dataset', 'contributor': "data['Author'][cid]", 'ord': '(i + 1)'}), "(dataset=dataset, contributor=data['Author'][cid], ord=i + 1)\n", (2446, 2507), False, 'from clld.db.models import common\n'), ((10633, 10722), 'csvw.dsv.reader', 'reader', (["(data_file_path / 'raw' / 'bouckaert_et_al2012' / 'taxa.csv')"], {'namedtuples': '(True)'}), "(data_file_path / 'raw' / 'bouckaert_et_al2012' / 'taxa.csv',\n namedtuples=True)\n", (10639, 10722), False, 'from csvw.dsv import reader\n'), ((11386, 11457), 'csvw.dsv.reader', 'reader', (["(data_file_path / 'raw' / 'ie122' / 'taxa.csv')"], {'namedtuples': '(True)'}), "(data_file_path / 'raw' / 'ie122' / 'taxa.csv', namedtuples=True)\n", (11392, 11457), False, 'from csvw.dsv import reader\n'), ((13967, 14008), 'sqlalchemy.orm.joinedload', 'joinedload', (['models.Meaning.cognateclasses'], {}), '(models.Meaning.cognateclasses)\n', (13977, 14008), False, 'from sqlalchemy.orm import joinedload\n'), ((14018, 14082), 'sqlalchemy.orm.joinedload', 'joinedload', (['common.Parameter.valuesets', 'common.ValueSet.language'], {}), '(common.Parameter.valuesets, common.ValueSet.language)\n', (14028, 14082), False, 'from sqlalchemy.orm import joinedload\n'), ((14744, 14809), 'sqlalchemy.orm.joinedload', 'joinedload', (['common.Language.valuesets', 'common.ValueSet.references'], {}), '(common.Language.valuesets, common.ValueSet.references)\n', (14754, 14809), False, 'from sqlalchemy.orm import joinedload\n'), ((762, 782), 'clldutils.path.Path', 'Path', (['cobl2.__file__'], {}), '(cobl2.__file__)\n', (766, 782), False, 'from clldutils.path import Path\n'), ((10518, 10595), 'clldutils.path.Path.read_text', 'Path.read_text', (["(data_file_path / 'raw' / 'bouckaert_et_al2012' / 'newick.txt')"], {}), "(data_file_path / 'raw' / 'bouckaert_et_al2012' / 'newick.txt')\n", (10532, 10595), False, 'from clldutils.path import Path\n'), ((10972, 11045), 'clld_phylogeny_plugin.models.LanguageTreeLabel', 'LanguageTreeLabel', ([], {'language_pk': 'l_by_gc[taxon.glottocode]', 'treelabel': 'label'}), '(language_pk=l_by_gc[taxon.glottocode], treelabel=label)\n', (10989, 11045), False, 'from clld_phylogeny_plugin.models import Phylogeny, TreeLabel, LanguageTreeLabel\n'), ((11285, 11348), 'clldutils.path.Path.read_text', 'Path.read_text', (["(data_file_path / 'raw' / 'ie122' / 'newick.txt')"], {}), "(data_file_path / 'raw' / 'ie122' / 'newick.txt')\n", (11299, 11348), False, 'from clldutils.path import Path\n'), ((11667, 11738), 'clld_phylogeny_plugin.models.LanguageTreeLabel', 'LanguageTreeLabel', ([], {'language_pk': 'l_by_ascii[taxon.taxon]', 'treelabel': 'label'}), '(language_pk=l_by_ascii[taxon.taxon], treelabel=label)\n', (11684, 11738), False, 'from clld_phylogeny_plugin.models import Phylogeny, TreeLabel, LanguageTreeLabel\n'), ((13918, 13949), 'clld.db.meta.DBSession.query', 'DBSession.query', (['models.Meaning'], {}), '(models.Meaning)\n', (13933, 13949), False, 'from clld.db.meta import DBSession\n'), ((14694, 14726), 'clld.db.meta.DBSession.query', 'DBSession.query', (['common.Language'], {}), '(common.Language)\n', (14709, 14726), False, 'from clld.db.meta import DBSession\n'), ((3642, 3661), 'clldutils.misc.slug', 'slug', (["param['Name']"], {}), "(param['Name'])\n", (3646, 3661), False, 'from clldutils.misc import slug\n'), ((4729, 4824), 'clld.db.models.common.ContributionContributor', 'common.ContributionContributor', ([], {'contribution': 'c', 'contributor': "data['Author'][cid]", 'ord': '(i + 1)'}), "(contribution=c, contributor=data['Author'][\n cid], ord=i + 1)\n", (4759, 4824), False, 'from clld.db.models import common\n'), ((4922, 4939), 'clldutils.misc.slug', 'slug', (["row['Name']"], {}), "(row['Name'])\n", (4926, 4939), False, 'from clldutils.misc import slug\n'), ((5192, 5216), 'clldutils.color.rgb_as_hex', 'rgb_as_hex', (["row['Color']"], {}), "(row['Color'])\n", (5202, 5216), False, 'from clldutils.color import qualitative_colors, rgb_as_hex\n'), ((13641, 13677), 'clld.db.meta.DBSession.query', 'DBSession.query', (['models.CognateClass'], {}), '(models.CognateClass)\n', (13656, 13677), False, 'from clld.db.meta import DBSession\n'), ((15289, 15350), 'clld.db.models.common.LanguageSource', 'common.LanguageSource', ([], {'language_pk': 'language.pk', 'source_pk': 'spk'}), '(language_pk=language.pk, source_pk=spk)\n', (15310, 15350), False, 'from clld.db.models import common\n'), ((2288, 2335), 'clld.web.util.helpers.data_uri', 'data_uri', (["photos[row['Last_Name']]", '"""image/jpg"""'], {}), "(photos[row['Last_Name']], 'image/jpg')\n", (2296, 2335), False, 'from clld.web.util.helpers import data_uri\n'), ((7183, 7271), 'clld.db.models.common.ValueSetReference', 'common.ValueSetReference', ([], {'valueset': 'vs', 'source': "data['Source'][sid]", 'description': 'pages'}), "(valueset=vs, source=data['Source'][sid],\n description=pages)\n", (7207, 7271), False, 'from clld.db.models import common\n'), ((10793, 10810), 'clldutils.misc.slug', 'slug', (['taxon.taxon'], {}), '(taxon.taxon)\n', (10797, 10810), False, 'from clldutils.misc import slug\n'), ((11532, 11549), 'clldutils.misc.slug', 'slug', (['taxon.taxon'], {}), '(taxon.taxon)\n', (11536, 11549), False, 'from clldutils.misc import slug\n'), ((13384, 13418), 'sqlalchemy.func.count', 'func.count', (['models.CognateClass.id'], {}), '(models.CognateClass.id)\n', (13394, 13418), False, 'from sqlalchemy import func, and_\n'), ((12794, 12848), 'sqlalchemy.and_', 'and_', (['(common.ValueSet.language_pk == models.Variety.pk)'], {}), '(common.ValueSet.language_pk == models.Variety.pk)\n', (12798, 12848), False, 'from sqlalchemy import func, and_\n'), ((12053, 12113), 'clld.db.meta.DBSession.query', 'DBSession.query', (['models.Clade.clade_name', 'models.Clade.color'], {}), '(models.Clade.clade_name, models.Clade.color)\n', (12068, 12113), False, 'from clld.db.meta import DBSession\n'), ((12688, 12740), 'sqlalchemy.and_', 'and_', (['(models.Value.valueset_pk == common.ValueSet.pk)'], {}), '(models.Value.valueset_pk == common.ValueSet.pk)\n', (12692, 12740), False, 'from sqlalchemy import func, and_\n'), ((14912, 14944), 'clld.db.meta.DBSession.query', 'DBSession.query', (['common.Value.id'], {}), '(common.Value.id)\n', (14927, 14944), False, 'from clld.db.meta import DBSession\n'), ((938, 958), 'clldutils.path.Path', 'Path', (['cobl2.__file__'], {}), '(cobl2.__file__)\n', (942, 958), False, 'from clldutils.path import Path\n'), ((14449, 14480), 'sqlalchemy.func.count', 'func.count', (['common.Parameter.pk'], {}), '(common.Parameter.pk)\n', (14459, 14480), False, 'from sqlalchemy import func, and_\n'), ((12559, 12633), 'sqlalchemy.and_', 'and_', (['(clld_cognacy_plugin.models.Cognate.counterpart_pk == models.Value.pk)'], {}), '(clld_cognacy_plugin.models.Cognate.counterpart_pk == models.Value.pk)\n', (12563, 12633), False, 'from sqlalchemy import func, and_\n'), ((12427, 12512), 'sqlalchemy.and_', 'and_', (['(models.CognateClass.pk == clld_cognacy_plugin.models.Cognate.cognateset_pk)'], {}), '(models.CognateClass.pk == clld_cognacy_plugin.models.Cognate.cognateset_pk\n )\n', (12431, 12512), False, 'from sqlalchemy import func, and_\n'), ((12291, 12354), 'clld.db.meta.DBSession.query', 'DBSession.query', (['models.CognateClass', 'models.Variety.clade_name'], {}), '(models.CognateClass, models.Variety.clade_name)\n', (12306, 12354), False, 'from clld.db.meta import DBSession\n')] |
import sys
import django
import django.db
import tsadmuser
from .base import TSAdmView
from ..log import TSAdmLogger
logger = TSAdmLogger(__name__)
class HomeView(TSAdmView):
template_name = 'tsadm/home.html'
def __init__(self):
logger.debug('HomeView init')
super(HomeView, self).__init__()
def get_context_data(self, **kwargs):
logger.debug('get_context_data')
context = super(HomeView, self).get_context_data(**kwargs)
context['tsadm']['userSites'] = tsadmuser.sitesAll(self.tsadm.user)
return context
class InfoView(TSAdmView):
template_name = 'tsadm/info.html'
def __init__(self):
logger.debug('InfoView init')
super(InfoView, self).__init__()
def _dbconns(self):
dbConnections = list()
for dbconn in django.db.connections.all():
dbinfo = {
'alias': dbconn.alias,
'version': dbconn.Database.version,
'name': dbconn.settings_dict['NAME'],
'vendor': dbconn.vendor,
'vendorVersion': None,
}
if dbconn.vendor == 'sqlite':
dbinfo['vendorVersion'] = dbconn.Database.sqlite_version
dbConnections.append(dbinfo)
return dbConnections
def get_context_data(self, **kwargs):
logger.debug('get_context_data')
context = super(InfoView, self).get_context_data(**kwargs)
context['tsadm']['config'] = self.tsadm.cfg.dumps()
context['tsadm']['dbConnections'] = self._dbconns()
context['python'] = {
'version': '{}.{}.{}'.format(sys.version_info.major, sys.version_info.minor, sys.version_info.micro),
}
context['django'] = {
'version': django.get_version(),
}
return context
| [
"tsadmuser.sitesAll",
"django.db.connections.all",
"django.get_version"
] | [((511, 546), 'tsadmuser.sitesAll', 'tsadmuser.sitesAll', (['self.tsadm.user'], {}), '(self.tsadm.user)\n', (529, 546), False, 'import tsadmuser\n'), ((819, 846), 'django.db.connections.all', 'django.db.connections.all', ([], {}), '()\n', (844, 846), False, 'import django\n'), ((1773, 1793), 'django.get_version', 'django.get_version', ([], {}), '()\n', (1791, 1793), False, 'import django\n')] |
"""
# Sample code to perform I/O:
name = input() # Reading input from STDIN
print('Hi, %s.' % name) # Writing output to STDOUT
# Warning: Printing unwanted or ill-formatted data to output will cause the test cases to fail
"""
# Write your code here
from bisect import bisect_left
t = int(input())
for _ in range(t):
n, q = map(int, input().strip().split())
ranges = []
for _ in range(n):
a, b = map(int, input().split())
ranges.append([a, b])
ranges.sort()
final_range = [ranges[0]]
j = 0
for i in range(1, n):
if ranges[i][0] <= final_range[j][1]:
final_range[j][1] = max(ranges[i][1], final_range[j][1])
else:
final_range.append(ranges[i])
j += 1
total = 0
prefix = []
for r in final_range:
total += r[1] - r[0] + 1
prefix.append(total)
for _ in range(q):
k = int(input())
if k > total:
print(-1)
else:
index = bisect_left(prefix, k)
if index:
offset = k - prefix[index - 1] - 1
else:
offset = k - 1
print(final_range[index][0] + offset)
| [
"bisect.bisect_left"
] | [((1018, 1040), 'bisect.bisect_left', 'bisect_left', (['prefix', 'k'], {}), '(prefix, k)\n', (1029, 1040), False, 'from bisect import bisect_left\n')] |
from sqlalchemy.orm import backref
from app import db
from app.models.event import UserEventBaseModel
class MissionEnd(UserEventBaseModel):
backref_base_name = "mission_ends"
mission_id = db.Column(
db.Integer, db.ForeignKey("mission.id"), index=True, nullable=False
)
mission = db.relationship("Mission", backref=backref("ends"))
__table_args__ = (db.Constraint(name="user_can_only_end_mission_once"),)
| [
"app.db.Constraint",
"app.db.ForeignKey",
"sqlalchemy.orm.backref"
] | [((231, 258), 'app.db.ForeignKey', 'db.ForeignKey', (['"""mission.id"""'], {}), "('mission.id')\n", (244, 258), False, 'from app import db\n'), ((382, 434), 'app.db.Constraint', 'db.Constraint', ([], {'name': '"""user_can_only_end_mission_once"""'}), "(name='user_can_only_end_mission_once')\n", (395, 434), False, 'from app import db\n'), ((342, 357), 'sqlalchemy.orm.backref', 'backref', (['"""ends"""'], {}), "('ends')\n", (349, 357), False, 'from sqlalchemy.orm import backref\n')] |
import Quandl
import pandas as pd
import pickle
# Not necessary, I just do this so I do not show my API key.
api_key = open('quandlapikey.txt','r').read()
def state_list():
fiddy_states = pd.read_html('https://simple.wikipedia.org/wiki/List_of_U.S._states')
return fiddy_states[0][0][1:]
def grab_initial_state_data():
states = state_list()
main_df = pd.DataFrame()
for abbv in states:
query = "FMAC/HPI_" + str(abbv)
df = Quandl.get(query, authtoken=api_key)
# NOTE: This is a fix that is not addressed in the tutorial
df.columns = [str(abbv)]
if main_df.empty:
main_df = df
else:
main_df = main_df.join(df)
print(main_df.head())
pickle_out = open('fiddy_states.pickle', 'wb')
pickle.dump(main_df, pickle_out)
pickle_out.close()
# grab_initial_state_data()
# pickle_in = open('fiddy_states.pickle', 'rb')
# HPI_data = pickle.load(pickle_in)
# print(HPI_data)
# HPI_data.to_pickle('pickle.pickle')
HPI_data2 = pd.read_pickle('pickle.pickle')
print(HPI_data2) | [
"pandas.read_pickle",
"pickle.dump",
"Quandl.get",
"pandas.DataFrame",
"pandas.read_html"
] | [((1032, 1063), 'pandas.read_pickle', 'pd.read_pickle', (['"""pickle.pickle"""'], {}), "('pickle.pickle')\n", (1046, 1063), True, 'import pandas as pd\n'), ((195, 264), 'pandas.read_html', 'pd.read_html', (['"""https://simple.wikipedia.org/wiki/List_of_U.S._states"""'], {}), "('https://simple.wikipedia.org/wiki/List_of_U.S._states')\n", (207, 264), True, 'import pandas as pd\n'), ((372, 386), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (384, 386), True, 'import pandas as pd\n'), ((793, 825), 'pickle.dump', 'pickle.dump', (['main_df', 'pickle_out'], {}), '(main_df, pickle_out)\n', (804, 825), False, 'import pickle\n'), ((465, 501), 'Quandl.get', 'Quandl.get', (['query'], {'authtoken': 'api_key'}), '(query, authtoken=api_key)\n', (475, 501), False, 'import Quandl\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
###########################
### The puZZle in a Mug ###
###########################
# importing the libraries
import scipy.stats as stats
# Adding the data
y = [1.90642, 2.22488, 2.10288, 1.69742, 1.52229, 3.15435, 2.61826, 1.98492, 1.42738, 1.99568]
# Calculating the Shapiro-Wilk test
shapiro_stat, shapiro_p_valor = stats.shapiro(y)
language = "portugues"
language = "english"
# Conclusion
if language == "english":
print("The calculated value for the Shapiro-Wilk test is = " + str(shapiro_stat))
print("The calculated p-value of the Shapiro-Wilk test is = " + str(shapiro_p_valor))
# Conclusion
if shapiro_p_valor >= 0.05:
print("At 95% confidence level, we have NO evidence to reject the hypothesis of data normality, according to the Shapiro-Wilk test")
else:
print("At 95% confidence level, we have evidence to reject the hypothesis of data normality, according to the Shapiro-Wilk test")
elif language == "portugues":
print("O valor calculado do teste de Shapiro-Wilk eh de = " + str(shapiro_stat))
print("O p-valor calculado para o teste de Shapiro-Wilk eh de = " + str(shapiro_p_valor))
# Conclusion
if shapiro_p_valor >= 0.05:
print("Com 95% de confianca, não temos evidências para rejeitar a hipótese de normalidade dos dados, segundo o teste de Shapiro-Wilk")
else:
print("Com 95% de confianca, temos evidências para rejeitar a hipótese de normalidade dos dados, segundo o teste de Shapiro-Wilk")
else:
print("Unsupported language")
| [
"scipy.stats.shapiro"
] | [((372, 388), 'scipy.stats.shapiro', 'stats.shapiro', (['y'], {}), '(y)\n', (385, 388), True, 'import scipy.stats as stats\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# bring in the ZSI-generated interface
from PUG_services import *
import re
# other modules/functions
from time import sleep
from urllib2 import urlopen
import zlib
def DownloadCIDs(cids):
# get a PUG SOAP port instance
loc = PUGLocator()
port = loc.getPUGSoap()
# start with a list of CIDs
req = InputListSoapIn()
req.set_element_ids(req.new_ids())
req.get_element_ids().set_element_int(cids)
req.set_element_idType('eID_CID')
listKey = port.InputList(req).get_element_ListKey()
# print 'ListKey =', listKey
# request download in SDF format, gzip-compressed
req = DownloadSoapIn()
req.set_element_ListKey(listKey)
req.set_element_eFormat('eFormat_SDF')
req.set_element_eCompress('eCompress_GZip')
downloadKey = port.Download(req).get_element_DownloadKey()
# print 'DownloadKey =', downloadKey
# call GetOperationStatus until the operation is finished
req = GetOperationStatusSoapIn()
req.set_element_AnyKey(downloadKey)
status = port.GetOperationStatus(req).get_element_status()
while status == 'eStatus_Queued' or status == 'eStatus_Running':
# print 'Waiting for operation to finish...'
sleep(10)
status = port.GetOperationStatus(req).get_element_status()
# check status
if status == 'eStatus_Success':
# get the url of the prepared file
req = GetDownloadUrlSoapIn()
req.set_element_DownloadKey(downloadKey)
url = port.GetDownloadUrl(req).get_element_url()
url = re.sub(r'^ftp://', 'http://', url)
# print 'Success! URL =', url
# download to a local file
sdfCompressed = urlopen(url, timeout=10)
sdf = zlib.decompress(sdfCompressed.read(), 31)
sdfCompressed.close()
return sdf
else:
# status indicates error
# see if there's some explanatory message
req = GetStatusMessageSoapIn()
req.set_element_AnyKey(downloadKey)
# print 'Error:', port.GetStatusMessage(req).get_element_message()
if __name__ == '__main__':
DownloadCIDs()
# $Id: DownloadCIDs.py 127842 2008-05-15 21:52:43Z thiessen $
| [
"re.sub",
"urllib2.urlopen",
"time.sleep"
] | [((1250, 1259), 'time.sleep', 'sleep', (['(10)'], {}), '(10)\n', (1255, 1259), False, 'from time import sleep\n'), ((1586, 1619), 're.sub', 're.sub', (['"""^ftp://"""', '"""http://"""', 'url'], {}), "('^ftp://', 'http://', url)\n", (1592, 1619), False, 'import re\n'), ((1720, 1744), 'urllib2.urlopen', 'urlopen', (['url'], {'timeout': '(10)'}), '(url, timeout=10)\n', (1727, 1744), False, 'from urllib2 import urlopen\n')] |
# -*- coding: utf-8 -*-
'''
############################
Acme::MetaSyntactic::colours
############################
****
NAME
****
Acme::MetaSyntactic::colours - The colours theme
***********
DESCRIPTION
***********
This theme list several colour names.
Note: this theme is aliased by \ ``Acme::MetaSyntactic::colors``\ .
************
CONTRIBUTORS
************
<NAME>, Abigail, <NAME> (BooK).
*******
CHANGES
*******
- \*
2012-06-04 - v1.003
Abigail added the color codes for resistors in English, Catalan, Danish,
Dutch, Estonian, Spanish and French
in Acme-MetaSyntactic-Themes version 1.004.
- \*
2012-05-21 - v1.002
Abigail added the seven colours of the rainbow in English, Dutch, German,
French, Spanish, Esperanto, Latin, Italian, Polish, Tagalog, Swedish,
Portuguese, Danish, and Interlingua
in Acme-MetaSyntactic-Themes version 1.002.
- \*
2012-05-14 - v1.001
Updated with an \ ``=encoding``\ pod command
in Acme-MetaSyntactic-Themes version 1.001.
- \*
2012-05-07 - v1.000
Received its own version number in Acme-MetaSyntactic-Themes version 1.000.
- \*
2006-06-05
Made multilingual in Acme-MetaSyntactic version 0.77,
with the large list of colours submitted by Abigail,
several translations by <NAME>,
and the X11 colors lying in \ */usr/X11R6/lib/X11/rgb.txt*\
by <NAME> (BooK).
- \*
2006-05-29
Introduced in Acme-MetaSyntactic version 0.76,
with the twelve colours proposed by <NAME>.
- \*
2005-10-20
Theme \ *colours*\ submitted by Abigail,
based on `http://en.wikipedia.org/wiki/List_of_colors <http://en.wikipedia.org/wiki/List_of_colors>`_.
- \*
2005-10-13
Theme \ *colors*\ submitted by <NAME>.
********
SEE ALSO
********
`Acme::MetaSyntactic <http://search.cpan.org/search?query=Acme%3a%3aMetaSyntactic&mode=module>`_, `Acme::MetaSyntactic::Locale <http://search.cpan.org/search?query=Acme%3a%3aMetaSyntactic%3a%3aLocale&mode=module>`_.
'''
name = 'colours'
DATA = '''\
# default
x-11
# names en
white black orange blue red yellow pink purple gray green magenta brown
amber amethyst asparagus aqua aquamarine azure beige bistre black blue
blaze_orange bondi_blue bright_green bright_turquoise bright_violet brown
buff burnt_orange burnt_sienna burnt_umber cadet_blue camouflage_green
cardinal caribbean_green carmine carrot celadon cerise cerulean
cerulean_blue chartreuse chestnut chocolate cobalt copper coral corn
cornflower_blue cream crimson cyan dark_brown dark_cerulean dark_chestnut
dark_coral dark_goldenrod dark_green dark_indigo dark_khaki dark_olive
dark_pastel_green dark_peach dark_pink dark_salmon dark_scarlet
dark_slate_gray dark_spring_green dark_tan dark_tangerine dark_tea_green
dark_turquoise dark_violet denim dodger_blue emerald eggplant fern_green
flax fuchsia gold goldenrod gray gray_asparagus gray_tea_green green
green_yellow heliotrope hot_pink indigo international_orange jade khaki
klein_blue lavender lavender_blush lemon lemon_cream light_brown lilac
lime linen magenta maroon mauve midnight_blue mint_green moss_green
mountbatten_pink mustard navajo_white navy_blue ochre old_gold
olive olive_drab orange orchid pale_blue pale_bright_green pale_brown
pale_carmine pale_chestnut pale_cornflower_blue pale_denim pale_magenta
pale_mauve pale_mint_green pale_ochre pale_olive pale_olive_drab
pale_orange pale_pink pale_pink_lavender pale_raw_umber pale_red_violet
pale_sandy_brown pale_sea_green pale_turquoise pale_wisteria pale_yellow
papaya_whip pastel_green pastel_pink peach peach_orange peach_yellow pear
periwinkle persian_blue pine_green pink pink_orange plum powder_blue
puce pumpkin purple raw_umber red red_violet robin_egg_blue royal_blue
safety_orange salmon sandy_brown scarlet school_bus_yellow sea_green
seashell sepia silver slate_gray spring_green steel_blue swamp_green
tan tangerine tea_green teal tenne thistle turquoise vermilion violet
violet_eggplant viridian wheat white wisteria yellow zinnwaldite
# names pt
preto azul castanho verde cinzento magenta laranja rosa roxo vermelho
branco amarelo
# names es
negro azul marron verde gris magenta anaranjado rosa purpura rojo blanco
amarillo
# names fr
noir bleu brun vert gris orange magenta rose pourpre rouge blanc jaune
# names x-11
snow ghost_white GhostWhite white_smoke WhiteSmoke gainsboro floral_white
FloralWhite old_lace OldLace linen antique_white AntiqueWhite papaya_whip
PapayaWhip blanched_almond BlanchedAlmond bisque peach_puff PeachPuff
navajo_white NavajoWhite moccasin cornsilk ivory lemon_chiffon
LemonChiffon seashell honeydew mint_cream MintCream azure alice_blue
AliceBlue lavender lavender_blush LavenderBlush misty_rose MistyRose white
black dark_slate_gray DarkSlateGray dark_slate_grey DarkSlateGrey dim_gray
DimGray dim_grey DimGrey slate_gray SlateGray slate_grey SlateGrey
light_slate_gray LightSlateGray light_slate_grey LightSlateGrey gray grey
light_grey LightGrey light_gray LightGray midnight_blue MidnightBlue
navy navy_blue NavyBlue cornflower_blue CornflowerBlue dark_slate_blue
DarkSlateBlue slate_blue SlateBlue medium_slate_blue MediumSlateBlue
light_slate_blue LightSlateBlue medium_blue MediumBlue royal_blue
RoyalBlue blue dodger_blue DodgerBlue deep_sky_blue DeepSkyBlue sky_blue
SkyBlue light_sky_blue LightSkyBlue steel_blue SteelBlue light_steel_blue
LightSteelBlue light_blue LightBlue powder_blue PowderBlue
pale_turquoise PaleTurquoise dark_turquoise DarkTurquoise medium_turquoise
MediumTurquoise turquoise cyan light_cyan LightCyan cadet_blue CadetBlue
medium_aquamarine MediumAquamarine aquamarine dark_green DarkGreen
dark_olive_green DarkOliveGreen dark_sea_green DarkSeaGreen sea_green
SeaGreen medium_sea_green MediumSeaGreen light_sea_green LightSeaGreen
pale_green PaleGreen spring_green SpringGreen lawn_green LawnGreen
green chartreuse medium_spring_green MediumSpringGreen green_yellow
GreenYellow lime_green LimeGreen yellow_green YellowGreen forest_green
ForestGreen olive_drab OliveDrab dark_khaki DarkKhaki khaki pale_goldenrod
PaleGoldenrod light_goldenrod_yellow LightGoldenrodYellow light_yellow
LightYellow yellow gold light_goldenrod LightGoldenrod goldenrod
dark_goldenrod DarkGoldenrod rosy_brown RosyBrown indian_red IndianRed
saddle_brown SaddleBrown sienna peru burlywood beige wheat sandy_brown
SandyBrown tan chocolate firebrick brown dark_salmon DarkSalmon salmon
light_salmon LightSalmon orange dark_orange DarkOrange coral light_coral
LightCoral tomato orange_red OrangeRed red hot_pink HotPink deep_pink
DeepPink pink light_pink LightPink pale_violet_red PaleVioletRed maroon
medium_violet_red MediumVioletRed violet_red VioletRed magenta violet
plum orchid medium_orchid MediumOrchid dark_orchid DarkOrchid dark_violet
DarkViolet blue_violet BlueViolet purple medium_purple MediumPurple
thistle snow1 snow2 snow3 snow4 seashell1 seashell2 seashell3 seashell4
AntiqueWhite1 AntiqueWhite2 AntiqueWhite3 AntiqueWhite4 bisque1 bisque2
bisque3 bisque4 PeachPuff1 PeachPuff2 PeachPuff3 PeachPuff4 NavajoWhite1
NavajoWhite2 NavajoWhite3 NavajoWhite4 LemonChiffon1 LemonChiffon2
LemonChiffon3 LemonChiffon4 cornsilk1 cornsilk2 cornsilk3 cornsilk4
ivory1 ivory2 ivory3 ivory4 honeydew1 honeydew2 honeydew3 honeydew4
LavenderBlush1 LavenderBlush2 LavenderBlush3 LavenderBlush4 MistyRose1
MistyRose2 MistyRose3 MistyRose4 azure1 azure2 azure3 azure4 SlateBlue1
SlateBlue2 SlateBlue3 SlateBlue4 RoyalBlue1 RoyalBlue2 RoyalBlue3
RoyalBlue4 blue1 blue2 blue3 blue4 DodgerBlue1 DodgerBlue2 DodgerBlue3
DodgerBlue4 SteelBlue1 SteelBlue2 SteelBlue3 SteelBlue4 DeepSkyBlue1
DeepSkyBlue2 DeepSkyBlue3 DeepSkyBlue4 SkyBlue1 SkyBlue2 SkyBlue3 SkyBlue4
LightSkyBlue1 LightSkyBlue2 LightSkyBlue3 LightSkyBlue4 SlateGray1
SlateGray2 SlateGray3 SlateGray4 LightSteelBlue1 LightSteelBlue2
LightSteelBlue3 LightSteelBlue4 LightBlue1 LightBlue2 LightBlue3
LightBlue4 LightCyan1 LightCyan2 LightCyan3 LightCyan4 PaleTurquoise1
PaleTurquoise2 PaleTurquoise3 PaleTurquoise4 CadetBlue1 CadetBlue2
CadetBlue3 CadetBlue4 turquoise1 turquoise2 turquoise3 turquoise4
cyan1 cyan2 cyan3 cyan4 DarkSlateGray1 DarkSlateGray2 DarkSlateGray3
DarkSlateGray4 aquamarine1 aquamarine2 aquamarine3 aquamarine4
DarkSeaGreen1 DarkSeaGreen2 DarkSeaGreen3 DarkSeaGreen4 SeaGreen1
SeaGreen2 SeaGreen3 SeaGreen4 PaleGreen1 PaleGreen2 PaleGreen3
PaleGreen4 SpringGreen1 SpringGreen2 SpringGreen3 SpringGreen4
green1 green2 green3 green4 chartreuse1 chartreuse2 chartreuse3
chartreuse4 OliveDrab1 OliveDrab2 OliveDrab3 OliveDrab4 DarkOliveGreen1
DarkOliveGreen2 DarkOliveGreen3 DarkOliveGreen4 khaki1 khaki2 khaki3
khaki4 LightGoldenrod1 LightGoldenrod2 LightGoldenrod3 LightGoldenrod4
LightYellow1 LightYellow2 LightYellow3 LightYellow4 yellow1 yellow2
yellow3 yellow4 gold1 gold2 gold3 gold4 goldenrod1 goldenrod2 goldenrod3
goldenrod4 DarkGoldenrod1 DarkGoldenrod2 DarkGoldenrod3 DarkGoldenrod4
RosyBrown1 RosyBrown2 RosyBrown3 RosyBrown4 IndianRed1 IndianRed2
IndianRed3 IndianRed4 sienna1 sienna2 sienna3 sienna4 burlywood1
burlywood2 burlywood3 burlywood4 wheat1 wheat2 wheat3 wheat4 tan1
tan2 tan3 tan4 chocolate1 chocolate2 chocolate3 chocolate4 firebrick1
firebrick2 firebrick3 firebrick4 brown1 brown2 brown3 brown4 salmon1
salmon2 salmon3 salmon4 LightSalmon1 LightSalmon2 LightSalmon3
LightSalmon4 orange1 orange2 orange3 orange4 DarkOrange1 DarkOrange2
DarkOrange3 DarkOrange4 coral1 coral2 coral3 coral4 tomato1 tomato2
tomato3 tomato4 OrangeRed1 OrangeRed2 OrangeRed3 OrangeRed4 red1 red2
red3 red4 DeepPink1 DeepPink2 DeepPink3 DeepPink4 HotPink1 HotPink2
HotPink3 HotPink4 pink1 pink2 pink3 pink4 LightPink1 LightPink2
LightPink3 LightPink4 PaleVioletRed1 PaleVioletRed2 PaleVioletRed3
PaleVioletRed4 maroon1 maroon2 maroon3 maroon4 VioletRed1 VioletRed2
VioletRed3 VioletRed4 magenta1 magenta2 magenta3 magenta4 orchid1 orchid2
orchid3 orchid4 plum1 plum2 plum3 plum4 MediumOrchid1 MediumOrchid2
MediumOrchid3 MediumOrchid4 DarkOrchid1 DarkOrchid2 DarkOrchid3
DarkOrchid4 purple1 purple2 purple3 purple4 MediumPurple1 MediumPurple2
MediumPurple3 MediumPurple4 thistle1 thistle2 thistle3 thistle4 gray0
grey0 gray1 grey1 gray2 grey2 gray3 grey3 gray4 grey4 gray5 grey5 gray6
grey6 gray7 grey7 gray8 grey8 gray9 grey9 gray10 grey10 gray11 grey11
gray12 grey12 gray13 grey13 gray14 grey14 gray15 grey15 gray16 grey16
gray17 grey17 gray18 grey18 gray19 grey19 gray20 grey20 gray21 grey21
gray22 grey22 gray23 grey23 gray24 grey24 gray25 grey25 gray26 grey26
gray27 grey27 gray28 grey28 gray29 grey29 gray30 grey30 gray31 grey31
gray32 grey32 gray33 grey33 gray34 grey34 gray35 grey35 gray36 grey36
gray37 grey37 gray38 grey38 gray39 grey39 gray40 grey40 gray41 grey41
gray42 grey42 gray43 grey43 gray44 grey44 gray45 grey45 gray46 grey46
gray47 grey47 gray48 grey48 gray49 grey49 gray50 grey50 gray51 grey51
gray52 grey52 gray53 grey53 gray54 grey54 gray55 grey55 gray56 grey56
gray57 grey57 gray58 grey58 gray59 grey59 gray60 grey60 gray61 grey61
gray62 grey62 gray63 grey63 gray64 grey64 gray65 grey65 gray66 grey66
gray67 grey67 gray68 grey68 gray69 grey69 gray70 grey70 gray71 grey71
gray72 grey72 gray73 grey73 gray74 grey74 gray75 grey75 gray76 grey76
gray77 grey77 gray78 grey78 gray79 grey79 gray80 grey80 gray81 grey81
gray82 grey82 gray83 grey83 gray84 grey84 gray85 grey85 gray86 grey86
gray87 grey87 gray88 grey88 gray89 grey89 gray90 grey90 gray91 grey91
gray92 grey92 gray93 grey93 gray94 grey94 gray95 grey95 gray96 grey96
gray97 grey97 gray98 grey98 gray99 grey99 gray100 grey100 dark_grey
DarkGrey dark_gray DarkGray dark_blue DarkBlue dark_cyan DarkCyan
dark_magenta DarkMagenta dark_red DarkRed light_green LightGreen
# names rainbow en
red orange yellow green blue indigo violet
# names rainbow nl
rood oranje geel groen blauw indigo violet
# names rainbow de
rot orange gelb grun blau indigo violett
# names rainbow fr
rouge orange jaune vert bleu indigo violet
# names rainbow es
rojo naranja amarillo verde azul anil violeta
# names rainbow eo
ruga oranga flava verda blua indiga viola
# names rainbow la
ruber aurantius flavus viridus caeruleus indicum violaceus
# names rainbow it
rosso arancione giallo verde azzurro indaco violetto
# names rainbow pl
czerwona pomaranczowa zolta zielona niebieska indygo fioletowa
# names rainbow ia
rubie orange jalne verde blau anil violetto
# names rainbow sv
rod orange gul gron bla indigo violett
# names rainbow pt
vermelho laranja amarelo verde azul anil violeta
# names rainbow dk
rod orange gul gron bla indigo violet
# names rainbow tl
pula kahel dilaw lunti bughaw indigo lila
# names resistor en
black brown red orange yellow green blue violet gray white gold silver
# names resistor ca
negre marro vermell taronja groc vert blau violeta gris blanc daurat argent
# names resistor dk
sort brun rod orange gul gron bla violet gra hvid guld solv
# names resistor nl
zwart bruin rood oranje geel groen blauw violet grijs wit goud zilver
# names resistor et
must pruun punane oranz kollane reheline sinine violetne hall valge
kuldne hobedane
# names resistor es
negro marron rojo naranja amarillo verde azul violeta gris blanco dorado plata
# names resistor fr
noir marron rouge orange jaune vert blue violet gris blanc or argent\
'''
from metasyntactic.base import parse_data
from random import choice, shuffle
from six import iteritems
data = parse_data(DATA)
def default():
try:
if 'default' in data:
return data['default'][0]
except (KeyError, IndexError):
pass
return 'en'
def all():
acc = set()
for category, names in iteritems(data['names']):
if names:
acc |= names
return acc
def names(category=None):
if not category:
category = default()
if category == ':all':
return list(all())
category = category.replace('/', ' ')
return list(data['names'][category])
def random(n=1, category=None):
got = names(category)
if got:
shuffle(got)
if n == 1:
return choice(got)
return got[:n]
def categories():
return set(data['names'])
| [
"random.choice",
"six.iteritems",
"random.shuffle",
"metasyntactic.base.parse_data"
] | [((13295, 13311), 'metasyntactic.base.parse_data', 'parse_data', (['DATA'], {}), '(DATA)\n', (13305, 13311), False, 'from metasyntactic.base import parse_data\n'), ((13526, 13550), 'six.iteritems', 'iteritems', (["data['names']"], {}), "(data['names'])\n", (13535, 13550), False, 'from six import iteritems\n'), ((13905, 13917), 'random.shuffle', 'shuffle', (['got'], {}), '(got)\n', (13912, 13917), False, 'from random import choice, shuffle\n'), ((13956, 13967), 'random.choice', 'choice', (['got'], {}), '(got)\n', (13962, 13967), False, 'from random import choice, shuffle\n')] |
"""
@brief test log(time=10s)
"""
import os
import unittest
from ensae_teaching_cs.helpers import enumerate_inspect_source_code
class TestCodeHelper(unittest.TestCase):
def test_size_object_in_folder(self):
folder = os.path.abspath(os.path.dirname(__file__))
patterns = "from ensae_teaching_cs[._a-zA-Z0-9]* import ([a-zA-Z0-9_]+)"
res = []
nb = 0
for obs in enumerate_inspect_source_code(folder, line_patterns=patterns):
res.append(obs)
if obs['group'] == "enumerate_inspect_source_code":
nb += 1
self.assertGreater(len(res), 0)
self.assertEqual(nb, 1)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"os.path.dirname",
"ensae_teaching_cs.helpers.enumerate_inspect_source_code"
] | [((696, 711), 'unittest.main', 'unittest.main', ([], {}), '()\n', (709, 711), False, 'import unittest\n'), ((412, 473), 'ensae_teaching_cs.helpers.enumerate_inspect_source_code', 'enumerate_inspect_source_code', (['folder'], {'line_patterns': 'patterns'}), '(folder, line_patterns=patterns)\n', (441, 473), False, 'from ensae_teaching_cs.helpers import enumerate_inspect_source_code\n'), ((253, 278), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (268, 278), False, 'import os\n')] |
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: MIT
from __future__ import absolute_import
from mock import patch, MagicMock
import pytest
from module_build_service.common.errors import IgnoreMessage
from module_build_service.scheduler import events
from module_build_service.scheduler.consumer import MBSConsumer
class TestConsumer:
def test_get_abstracted_msg_fedmsg(self):
"""
Test the output of get_abstracted_msg() when using the
fedmsg backend.
"""
hub = MagicMock(config={})
consumer = MBSConsumer(hub)
msg = {
"username": "apache",
"source_name": "datanommer",
"i": 1,
"timestamp": 1505492681.0,
"msg_id": "2017-0627b798-f241-4230-b365-8a8a111a8ec5",
"crypto": "x509",
"topic": "org.fedoraproject.prod.buildsys.tag",
"headers": {},
"source_version": "0.8.1",
"msg": {
"build_id": 962861,
"name": "python3-virtualenv",
"tag_id": 263,
"instance": "primary",
"tag": "epel7-pending",
"user": "bodhi",
"version": "15.1.0",
"owner": "orion",
"release": "1.el7",
},
}
event_info = consumer.get_abstracted_event_info(msg)
assert event_info["event"] == events.KOJI_TAG_CHANGE
assert event_info["msg_id"] == msg["msg_id"]
assert event_info["tag_name"] == msg["msg"]["tag"]
@patch("module_build_service.scheduler.consumer.models")
@patch.object(MBSConsumer, "process_message")
def test_consume_fedmsg(self, process_message, models):
"""
Test the MBSConsumer.consume() method when using the
fedmsg backend.
"""
hub = MagicMock(config={})
consumer = MBSConsumer(hub)
msg = {
"topic": "org.fedoraproject.prod.buildsys.repo.done",
"headers": {},
"body": {
"username": "apache",
"source_name": "datanommer",
"i": 1,
"timestamp": 1405126329.0,
"msg_id": "2014-adbc33f6-51b0-4fce-aa0d-3c699a9920e4",
"crypto": "x509",
"topic": "org.fedoraproject.prod.buildsys.repo.done",
"headers": {},
"source_version": "0.6.4",
"msg": {
"instance": "primary",
"repo_id": 400859,
"tag": "f22-build",
"tag_id": 278,
},
},
}
consumer.consume(msg)
assert process_message.call_count == 1
event_info = process_message.call_args[0][0]
assert event_info["event"] == events.KOJI_REPO_CHANGE
assert event_info["msg_id"] == msg["body"]["msg_id"]
assert event_info["tag_name"] == msg["body"]["msg"]["tag"]
@patch.object(MBSConsumer, "process_message")
def test_ingore_koji_build_change_event_without_task_id(self, process_message):
"""
Test koji_build_change event without task_id should be ignored.
"""
hub = MagicMock(config={})
consumer = MBSConsumer(hub)
event = {
'build_new_state': 1,
'task_id': None,
'msg_id': u'f66a43be-e510-44fc-a318-e422cfda65d3',
'module_build_id': None,
'state_reason': None,
'build_name': 'foobar',
'build_version': '201912130626',
'event': 'koji_build_change',
'build_release': u'070752'
}
consumer.get_abstracted_event_info = MagicMock()
consumer.get_abstracted_event_info.return_value = event
consumer.consume({})
assert process_message.call_count == 0
def test_validate_event_none_msg(self):
hub = MagicMock(config={})
consumer = MBSConsumer(hub)
with pytest.raises(IgnoreMessage):
consumer.validate_event(None)
| [
"mock.patch",
"module_build_service.scheduler.consumer.MBSConsumer",
"mock.patch.object",
"pytest.raises",
"mock.MagicMock"
] | [((1560, 1615), 'mock.patch', 'patch', (['"""module_build_service.scheduler.consumer.models"""'], {}), "('module_build_service.scheduler.consumer.models')\n", (1565, 1615), False, 'from mock import patch, MagicMock\n'), ((1621, 1665), 'mock.patch.object', 'patch.object', (['MBSConsumer', '"""process_message"""'], {}), "(MBSConsumer, 'process_message')\n", (1633, 1665), False, 'from mock import patch, MagicMock\n'), ((2988, 3032), 'mock.patch.object', 'patch.object', (['MBSConsumer', '"""process_message"""'], {}), "(MBSConsumer, 'process_message')\n", (3000, 3032), False, 'from mock import patch, MagicMock\n'), ((512, 532), 'mock.MagicMock', 'MagicMock', ([], {'config': '{}'}), '(config={})\n', (521, 532), False, 'from mock import patch, MagicMock\n'), ((552, 568), 'module_build_service.scheduler.consumer.MBSConsumer', 'MBSConsumer', (['hub'], {}), '(hub)\n', (563, 568), False, 'from module_build_service.scheduler.consumer import MBSConsumer\n'), ((1849, 1869), 'mock.MagicMock', 'MagicMock', ([], {'config': '{}'}), '(config={})\n', (1858, 1869), False, 'from mock import patch, MagicMock\n'), ((1889, 1905), 'module_build_service.scheduler.consumer.MBSConsumer', 'MBSConsumer', (['hub'], {}), '(hub)\n', (1900, 1905), False, 'from module_build_service.scheduler.consumer import MBSConsumer\n'), ((3227, 3247), 'mock.MagicMock', 'MagicMock', ([], {'config': '{}'}), '(config={})\n', (3236, 3247), False, 'from mock import patch, MagicMock\n'), ((3267, 3283), 'module_build_service.scheduler.consumer.MBSConsumer', 'MBSConsumer', (['hub'], {}), '(hub)\n', (3278, 3283), False, 'from module_build_service.scheduler.consumer import MBSConsumer\n'), ((3716, 3727), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3725, 3727), False, 'from mock import patch, MagicMock\n'), ((3927, 3947), 'mock.MagicMock', 'MagicMock', ([], {'config': '{}'}), '(config={})\n', (3936, 3947), False, 'from mock import patch, MagicMock\n'), ((3967, 3983), 'module_build_service.scheduler.consumer.MBSConsumer', 'MBSConsumer', (['hub'], {}), '(hub)\n', (3978, 3983), False, 'from module_build_service.scheduler.consumer import MBSConsumer\n'), ((3997, 4025), 'pytest.raises', 'pytest.raises', (['IgnoreMessage'], {}), '(IgnoreMessage)\n', (4010, 4025), False, 'import pytest\n')] |
from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
from sqlalchemy.schema import PrimaryKeyConstraint
Base = declarative_base()
user_projects = Table(
'user_projects', Base.metadata,
Column('user_id', Integer, ForeignKey('users.id')),
Column('project_id', Integer, ForeignKey('projects.id')),
PrimaryKeyConstraint('user_id', 'project_id')
)
issue_labels = Table(
'issue_labels', Base.metadata,
Column('issue_id', Integer, ForeignKey('issues.id')),
Column('label_id', Integer, ForeignKey('labels.id')),
PrimaryKeyConstraint('issue_id', 'label_id')
)
project_collaborators = Table(
'project_collaborators', Base.metadata,
Column('project_id', Integer, ForeignKey('projects.id')),
Column('user_id', Integer, ForeignKey('users.id')),
PrimaryKeyConstraint('project_id', 'user_id')
)
class Users(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
username = Column(String)
email_id = Column(String)
github_access_token = Column(String)
projects = relationship('Projects', cascade='all, delete-orphan', single_parent=True,
secondary=user_projects, backref='users')
class Projects(Base):
__tablename__ = 'projects'
id = Column(Integer, primary_key=True)
name = Column(String)
author_id = Column(Integer, ForeignKey('users.id'))
repositories = relationship("Repositories", cascade='all, delete-orphan', backref='projects')
labels = relationship("Labels", cascade='all, delete-orphan', backref='projects')
collaborators = relationship("Users", secondary=project_collaborators, backref='projects_collaborators')
class Repositories(Base):
__tablename__ = 'repositories'
id = Column(Integer, primary_key=True)
name = Column(String(100))
github_repo_id = Column(Integer)
project_id = Column(Integer, ForeignKey('projects.id'), nullable=False)
issues = relationship("Issues", cascade='all, delete-orphan', backref='repositories')
class Issues(Base):
__tablename__ = 'issues'
id = Column(Integer, primary_key=True)
title = Column(String(100))
body = Column(String)
number = Column(Integer)
repository_id = Column(Integer, ForeignKey('repositories.id'), nullable=False)
labels = relationship('Labels', secondary=issue_labels, backref='issues')
UniqueConstraint(number, repository_id)
class Labels(Base):
__tablename__ = 'labels'
id = Column(Integer, primary_key=True)
name = Column(String)
color = Column(String)
order = Column(Integer)
project_id = Column(Integer, ForeignKey('projects.id'), nullable=False)
| [
"sqlalchemy.orm.relationship",
"sqlalchemy.ForeignKey",
"sqlalchemy.schema.PrimaryKeyConstraint",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.String",
"sqlalchemy.ext.declarative.declarative_base",
"sqlalchemy.Column"
] | [((248, 266), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (264, 266), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((450, 495), 'sqlalchemy.schema.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""user_id"""', '"""project_id"""'], {}), "('user_id', 'project_id')\n", (470, 495), False, 'from sqlalchemy.schema import PrimaryKeyConstraint\n'), ((676, 720), 'sqlalchemy.schema.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""issue_id"""', '"""label_id"""'], {}), "('issue_id', 'label_id')\n", (696, 720), False, 'from sqlalchemy.schema import PrimaryKeyConstraint\n'), ((921, 966), 'sqlalchemy.schema.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""project_id"""', '"""user_id"""'], {}), "('project_id', 'user_id')\n", (941, 966), False, 'from sqlalchemy.schema import PrimaryKeyConstraint\n'), ((1028, 1061), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1034, 1061), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1077, 1091), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1083, 1091), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1107, 1121), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1113, 1121), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1148, 1162), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1154, 1162), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1179, 1299), 'sqlalchemy.orm.relationship', 'relationship', (['"""Projects"""'], {'cascade': '"""all, delete-orphan"""', 'single_parent': '(True)', 'secondary': 'user_projects', 'backref': '"""users"""'}), "('Projects', cascade='all, delete-orphan', single_parent=True,\n secondary=user_projects, backref='users')\n", (1191, 1299), False, 'from sqlalchemy.orm import relationship, backref\n'), ((1389, 1422), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1395, 1422), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1434, 1448), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1440, 1448), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1525, 1603), 'sqlalchemy.orm.relationship', 'relationship', (['"""Repositories"""'], {'cascade': '"""all, delete-orphan"""', 'backref': '"""projects"""'}), "('Repositories', cascade='all, delete-orphan', backref='projects')\n", (1537, 1603), False, 'from sqlalchemy.orm import relationship, backref\n'), ((1617, 1689), 'sqlalchemy.orm.relationship', 'relationship', (['"""Labels"""'], {'cascade': '"""all, delete-orphan"""', 'backref': '"""projects"""'}), "('Labels', cascade='all, delete-orphan', backref='projects')\n", (1629, 1689), False, 'from sqlalchemy.orm import relationship, backref\n'), ((1710, 1803), 'sqlalchemy.orm.relationship', 'relationship', (['"""Users"""'], {'secondary': 'project_collaborators', 'backref': '"""projects_collaborators"""'}), "('Users', secondary=project_collaborators, backref=\n 'projects_collaborators')\n", (1722, 1803), False, 'from sqlalchemy.orm import relationship, backref\n'), ((1872, 1905), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (1878, 1905), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1958, 1973), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (1964, 1973), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2064, 2140), 'sqlalchemy.orm.relationship', 'relationship', (['"""Issues"""'], {'cascade': '"""all, delete-orphan"""', 'backref': '"""repositories"""'}), "('Issues', cascade='all, delete-orphan', backref='repositories')\n", (2076, 2140), False, 'from sqlalchemy.orm import relationship, backref\n'), ((2202, 2235), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (2208, 2235), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2279, 2293), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (2285, 2293), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2307, 2322), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (2313, 2322), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2420, 2484), 'sqlalchemy.orm.relationship', 'relationship', (['"""Labels"""'], {'secondary': 'issue_labels', 'backref': '"""issues"""'}), "('Labels', secondary=issue_labels, backref='issues')\n", (2432, 2484), False, 'from sqlalchemy.orm import relationship, backref\n'), ((2490, 2529), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['number', 'repository_id'], {}), '(number, repository_id)\n', (2506, 2529), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2591, 2624), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)'}), '(Integer, primary_key=True)\n', (2597, 2624), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2636, 2650), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (2642, 2650), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2663, 2677), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (2669, 2677), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2690, 2705), 'sqlalchemy.Column', 'Column', (['Integer'], {}), '(Integer)\n', (2696, 2705), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((359, 381), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (369, 381), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((418, 443), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""projects.id"""'], {}), "('projects.id')\n", (428, 443), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((588, 611), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""issues.id"""'], {}), "('issues.id')\n", (598, 611), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((646, 669), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""labels.id"""'], {}), "('labels.id')\n", (656, 669), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((833, 858), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""projects.id"""'], {}), "('projects.id')\n", (843, 858), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((892, 914), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (902, 914), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1481, 1503), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""users.id"""'], {}), "('users.id')\n", (1491, 1503), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((1924, 1935), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (1930, 1935), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2007, 2032), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""projects.id"""'], {}), "('projects.id')\n", (2017, 2032), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2255, 2266), 'sqlalchemy.String', 'String', (['(100)'], {}), '(100)\n', (2261, 2266), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2359, 2388), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""repositories.id"""'], {}), "('repositories.id')\n", (2369, 2388), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n'), ((2739, 2764), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""projects.id"""'], {}), "('projects.id')\n", (2749, 2764), False, 'from sqlalchemy import Table, Column, Integer, String, ForeignKey, UniqueConstraint\n')] |
from flask.views import MethodView
from flask import request
from helpers.build_response import build_response
from helpers.html_error_handler import html_error_handler
from python_cowbull_consrv import app
import json
import requests
class Modes(MethodView):
@property
def response(self):
return dict(server=None, status=None, ads=[], highscores=[])
@property
def modes_url(self):
return "{}:{}/{}/modes".format(
app.config.get("cowbull_server", "localhost"),
app.config.get("cowbull_port", 5000),
app.config.get("cowbull_game_version", "v1")
)
@property
def headers(self):
return {
"Content-type": "application/json"
}
def get(self):
try:
r = requests.get(
url="{}".format(self.modes_url),
timeout=app.config.get("cowbull_timeout"),
headers=self.headers
)
except Exception as e:
return html_error_handler(
html_status=500,
html_exception=repr(e),
html_message="An exception occurred while accessing the game service modes.",
html_module="Modes.py",
html_method="get"
)
_response = self.response
if r.status_code in [200, 400, 500]:
_response["server"] = r.json()
_response["status"] = r.status_code
else:
# TODO Circuit breaker
return html_error_handler(
html_status=r.status_code,
html_message=r.text,
html_exception="Error occurred while fetching a new game",
html_module="Game.py",
html_method="get"
)
return build_response(response_data=_response, html_status=r.status_code)
| [
"helpers.html_error_handler.html_error_handler",
"python_cowbull_consrv.app.config.get",
"helpers.build_response.build_response"
] | [((1801, 1867), 'helpers.build_response.build_response', 'build_response', ([], {'response_data': '_response', 'html_status': 'r.status_code'}), '(response_data=_response, html_status=r.status_code)\n', (1815, 1867), False, 'from helpers.build_response import build_response\n'), ((461, 506), 'python_cowbull_consrv.app.config.get', 'app.config.get', (['"""cowbull_server"""', '"""localhost"""'], {}), "('cowbull_server', 'localhost')\n", (475, 506), False, 'from python_cowbull_consrv import app\n'), ((520, 556), 'python_cowbull_consrv.app.config.get', 'app.config.get', (['"""cowbull_port"""', '(5000)'], {}), "('cowbull_port', 5000)\n", (534, 556), False, 'from python_cowbull_consrv import app\n'), ((570, 614), 'python_cowbull_consrv.app.config.get', 'app.config.get', (['"""cowbull_game_version"""', '"""v1"""'], {}), "('cowbull_game_version', 'v1')\n", (584, 614), False, 'from python_cowbull_consrv import app\n'), ((1523, 1699), 'helpers.html_error_handler.html_error_handler', 'html_error_handler', ([], {'html_status': 'r.status_code', 'html_message': 'r.text', 'html_exception': '"""Error occurred while fetching a new game"""', 'html_module': '"""Game.py"""', 'html_method': '"""get"""'}), "(html_status=r.status_code, html_message=r.text,\n html_exception='Error occurred while fetching a new game', html_module=\n 'Game.py', html_method='get')\n", (1541, 1699), False, 'from helpers.html_error_handler import html_error_handler\n'), ((873, 906), 'python_cowbull_consrv.app.config.get', 'app.config.get', (['"""cowbull_timeout"""'], {}), "('cowbull_timeout')\n", (887, 906), False, 'from python_cowbull_consrv import app\n')] |
import cv2
NUM_FRAMES = 3001
IN_DIR = "./"
STITCHED_PATH = "{}test_out".format(IN_DIR)
IMG_NAME = ""
IMG_FORMAT = ".png"
pathOf = lambda path, name, index, fmt: "{}/{}{}{}".format(path, name, index, fmt)
imgPathOf = lambda path, index: pathOf(path, IMG_NAME, index, IMG_FORMAT)
stitchedPathOf = lambda index: imgPathOf(STITCHED_PATH, index)
frameCount = 1
while frameCount != NUM_FRAMES:
print(stitchedPathOf(frameCount))
stitched = cv2.imread(stitchedPathOf(frameCount))
cv2.imshow("stitched", stitched)
frameCount += 1
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
cv2.destroyAllWindows()
| [
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.imshow"
] | [((668, 691), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (689, 691), False, 'import cv2\n'), ((489, 521), 'cv2.imshow', 'cv2.imshow', (['"""stitched"""', 'stitched'], {}), "('stitched', stitched)\n", (499, 521), False, 'import cv2\n'), ((553, 567), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (564, 567), False, 'import cv2\n')] |
from andes.linsolvers.cupy import CuPySolver
from andes.linsolvers.scipy import SpSolve
from andes.linsolvers.suitesparse import UMFPACKSolver, KLUSolver
class Solver:
"""
Sparse matrix solver class.
This class wraps UMFPACK, KLU, SciPy and CuPy solvers to provide an unified
interface for solving sparse linear equations ``Ax = b``.
Provides methods ``solve``, ``linsolve`` and ``clear``.
"""
def __init__(self, sparselib='umfpack'):
# solvers
self.umfpack = UMFPACKSolver()
self.klu = KLUSolver()
self.spsolve = SpSolve()
self.cupy = CuPySolver()
# KLU as failsafe
if sparselib not in self.__dict__:
self.sparselib = 'klu'
else:
self.sparselib = sparselib
self.worker = self.__dict__[self.sparselib]
def solve(self, A, b):
"""
Solve linear equations and cache factorizations if possible.
Parameters
----------
A : kvxopt.spmatrix
Sparse N-by-N matrix
b : kvxopt.matrix or numpy.ndarray
Dense N-by-1 matrix
Returns
-------
numpy.ndarray
Dense N-by-1 array
"""
return self.worker.solve(A, b)
def linsolve(self, A, b):
"""
Solve linear equations without caching facorization. Performs full factorization each call.
Parameters
----------
A : kvxopt.spmatrix
Sparse N-by-N matrix
b : kvxopt.matrix or numpy.ndarray
Dense N-by-1 matrix
Returns
-------
numpy.ndarray
Dense N-by-1 array
"""
return self.worker.linsolve(A, b)
def clear(self):
"""
Remove all cached objects.
"""
self.worker.clear()
| [
"andes.linsolvers.scipy.SpSolve",
"andes.linsolvers.suitesparse.KLUSolver",
"andes.linsolvers.cupy.CuPySolver",
"andes.linsolvers.suitesparse.UMFPACKSolver"
] | [((510, 525), 'andes.linsolvers.suitesparse.UMFPACKSolver', 'UMFPACKSolver', ([], {}), '()\n', (523, 525), False, 'from andes.linsolvers.suitesparse import UMFPACKSolver, KLUSolver\n'), ((545, 556), 'andes.linsolvers.suitesparse.KLUSolver', 'KLUSolver', ([], {}), '()\n', (554, 556), False, 'from andes.linsolvers.suitesparse import UMFPACKSolver, KLUSolver\n'), ((580, 589), 'andes.linsolvers.scipy.SpSolve', 'SpSolve', ([], {}), '()\n', (587, 589), False, 'from andes.linsolvers.scipy import SpSolve\n'), ((610, 622), 'andes.linsolvers.cupy.CuPySolver', 'CuPySolver', ([], {}), '()\n', (620, 622), False, 'from andes.linsolvers.cupy import CuPySolver\n')] |
from django.shortcuts import redirect
from main.models import Dataset, Agent, Column, Smell
from django.http import HttpResponseBadRequest, JsonResponse
from django.core.cache import cache
from main.utility.DatabaseUtility import safe_get
from main.utility import DedupeUtility
from main.utility import StringUtility
import json
from django.views.decorators.csrf import csrf_exempt
from django.core.files.base import File
from django.contrib import messages
# Views returned by Ajax calls
# Will generally return a JsonResponse containing data or an HttpResponse containing an error code and text.
# I recommend not building the return object inside the JsonResponse() call as this has lead to errors for me before.
# view which returns all columns from a dataset if the user is authorized to view it.
# id is the id of the dataset to get the columns of
@csrf_exempt
def get_columns(request, id):
if not request.user.is_authenticated:
return HttpResponseBadRequest("Must be logged in to get column names.")
dataset = safe_get(Dataset, id=id)
if not dataset:
return HttpResponseBadRequest(StringUtility.ERR_UNAUTHORIZED.format("Dataset"))
if not (dataset.user == request.user):
return HttpResponseBadRequest(StringUtility.ERR_UNAUTHORIZED.format("Dataset"))
columns = []
for col in Column.objects.filter(dataset=dataset):
columns.append({'id': col.id, 'name': col.name, 'dtype': col.dtype})
res = {'columns': columns}
return JsonResponse(res)
# view which returns the type of agent (integer) for a given agent if the user is authorized to view it.
# id is the id of the agent to get the type of
@csrf_exempt
def get_agent_type(request, id):
if not request.user.is_authenticated:
return HttpResponseBadRequest("Must be logged in to get agent type.")
agent = safe_get(Agent, id=id)
if not agent:
return HttpResponseBadRequest(StringUtility.ERR_UNAUTHORIZED.format("Agent"))
if not (agent.user == request.user):
return HttpResponseBadRequest(StringUtility.ERR_UNAUTHORIZED.format("Agent"))
res = {'type': agent.agent_type.id}
return JsonResponse(res)
# view which returns data as json for a given smell ID
# id is the id of the smell to get the data of
@csrf_exempt
def get_smell_data(request, id):
smell = safe_get(Smell, id=id)
if not smell:
return HttpResponseBadRequest(StringUtility.ERR_INVALID_KEY.format("Smell"))
res = {'name': smell.name, 'description': smell.description, 'agent_type_id': smell.agent_type.id,
'agent_type_name': smell.agent_type.name, 'dataset_description': smell.dataset_description}
return JsonResponse(res)
# view which returns the id and name of all gensim agents the user has access to, that do not have the id given in the request
# this is used to provide a list of options for using two agents to build the summed rcs of
# id is the id of the gensim agent to exclude
@csrf_exempt
def get_other_gensim_agents(request, id):
if not request.user.is_authenticated:
return HttpResponseBadRequest("Must be logged in to get agents.")
agents = Agent.objects.filter(agent_type__id = 2, user = request.user).exclude(id = id)
res = {'agents': [{'id': a.id, 'name': a.name} for a in agents]}
return JsonResponse(res)
# view which returns a dedupe pair for the user to label
# the corresponding agent needs to be stored in cache for this to work
@csrf_exempt
def get_dedupe_pair(request):
if not request.user.is_authenticated:
return HttpResponseBadRequest("Must be logged in to start the training process")
try:
# load model from cache
model_dedupe = cache.get(request.user.username + "_agent")
# post request means the user has also submitted a duplicate pair, so we get this from the post request and mark it
if request.method == "POST":
# convert arrays to tuples
pairs = json.loads(request.body)
distinct = list(map(lambda x: tuple(x), pairs["distinct"]))
match = list(map(lambda x: tuple(x), pairs["match"]))
#mark pairs
model_dedupe.mark_pairs({"match": match, "distinct": distinct})
# get a new set of uncertain pairs
uncertain = model_dedupe.uncertain_pairs()
# store agent back to the cache
cache.set(request.user.username + "_agent", model_dedupe)
# return new uncertain pairs
return JsonResponse({"pairs": uncertain})
# This probably means that the model loaded does not have a get_uncertain_pairs() method, i.e. no dedupe agent is stored properly
except AttributeError:
return HttpResponseBadRequest("Agent cached is not of proper type. (Dedupe)")
# view which stores the current model saved in cache, to the agent id stored in the session
# on success, redirects the user to the agents page.
@csrf_exempt
def store_dedupe_training(request):
if not request.user.is_authenticated:
return HttpResponseBadRequest("Must be logged in to store the training process.")
agent = safe_get(Agent, id=request.session.get("agent_id", None))
if not agent:
return HttpResponseBadRequest(StringUtility.ERR_UNAUTHORIZED.format("Agent"))
if agent.agent_type.id != 1:
return HttpResponseBadRequest("Agent in session is not of proper type. (Dedupe)")
if not (agent.user == request.user):
return HttpResponseBadRequest(StringUtility.ERR_UNAUTHORIZED.format("Agent"))
try:
# load model from cache
model = cache.get(request.user.username + "_agent")
# train and get files
model_file, settings_file = DedupeUtility.train_model(model)
#store files
readfile = open(model_file, 'r')
agent.model.save("", File(readfile), save=True)
readfile.close()
readfile = open(settings_file, 'rb')
agent.settings.save("", File(readfile), save=True)
readfile.close()
# store increased agent iteration and save
agent.iterations = agent.iterations + 1
agent.save()
messages.add_message(
request, messages.SUCCESS, "Your agent has been trained successfully.")
# redirect to main page
return redirect('/agents')
# This probably means that the model loaded does not have a get_uncertain_pairs() method, i.e. no dedupe agent is stored properly
except AttributeError:
return HttpResponseBadRequest("Model cached is not of proper type. (Dedupe)")
| [
"json.loads",
"django.http.HttpResponseBadRequest",
"main.utility.StringUtility.ERR_UNAUTHORIZED.format",
"django.http.JsonResponse",
"main.utility.DatabaseUtility.safe_get",
"main.utility.DedupeUtility.train_model",
"main.models.Column.objects.filter",
"django.shortcuts.redirect",
"main.models.Agent.objects.filter",
"django.core.files.base.File",
"django.contrib.messages.add_message",
"django.core.cache.cache.set",
"main.utility.StringUtility.ERR_INVALID_KEY.format",
"django.core.cache.cache.get"
] | [((1036, 1060), 'main.utility.DatabaseUtility.safe_get', 'safe_get', (['Dataset'], {'id': 'id'}), '(Dataset, id=id)\n', (1044, 1060), False, 'from main.utility.DatabaseUtility import safe_get\n'), ((1333, 1371), 'main.models.Column.objects.filter', 'Column.objects.filter', ([], {'dataset': 'dataset'}), '(dataset=dataset)\n', (1354, 1371), False, 'from main.models import Dataset, Agent, Column, Smell\n'), ((1493, 1510), 'django.http.JsonResponse', 'JsonResponse', (['res'], {}), '(res)\n', (1505, 1510), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((1844, 1866), 'main.utility.DatabaseUtility.safe_get', 'safe_get', (['Agent'], {'id': 'id'}), '(Agent, id=id)\n', (1852, 1866), False, 'from main.utility.DatabaseUtility import safe_get\n'), ((2150, 2167), 'django.http.JsonResponse', 'JsonResponse', (['res'], {}), '(res)\n', (2162, 2167), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((2330, 2352), 'main.utility.DatabaseUtility.safe_get', 'safe_get', (['Smell'], {'id': 'id'}), '(Smell, id=id)\n', (2338, 2352), False, 'from main.utility.DatabaseUtility import safe_get\n'), ((2676, 2693), 'django.http.JsonResponse', 'JsonResponse', (['res'], {}), '(res)\n', (2688, 2693), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((3305, 3322), 'django.http.JsonResponse', 'JsonResponse', (['res'], {}), '(res)\n', (3317, 3322), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((956, 1020), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Must be logged in to get column names."""'], {}), "('Must be logged in to get column names.')\n", (978, 1020), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((1768, 1830), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Must be logged in to get agent type."""'], {}), "('Must be logged in to get agent type.')\n", (1790, 1830), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((3072, 3130), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Must be logged in to get agents."""'], {}), "('Must be logged in to get agents.')\n", (3094, 3130), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((3552, 3625), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Must be logged in to start the training process"""'], {}), "('Must be logged in to start the training process')\n", (3574, 3625), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((3690, 3733), 'django.core.cache.cache.get', 'cache.get', (["(request.user.username + '_agent')"], {}), "(request.user.username + '_agent')\n", (3699, 3733), False, 'from django.core.cache import cache\n'), ((4360, 4417), 'django.core.cache.cache.set', 'cache.set', (["(request.user.username + '_agent')", 'model_dedupe'], {}), "(request.user.username + '_agent', model_dedupe)\n", (4369, 4417), False, 'from django.core.cache import cache\n'), ((4470, 4504), 'django.http.JsonResponse', 'JsonResponse', (["{'pairs': uncertain}"], {}), "({'pairs': uncertain})\n", (4482, 4504), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((5005, 5079), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Must be logged in to store the training process."""'], {}), "('Must be logged in to store the training process.')\n", (5027, 5079), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((5303, 5377), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Agent in session is not of proper type. (Dedupe)"""'], {}), "('Agent in session is not of proper type. (Dedupe)')\n", (5325, 5377), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((5571, 5614), 'django.core.cache.cache.get', 'cache.get', (["(request.user.username + '_agent')"], {}), "(request.user.username + '_agent')\n", (5580, 5614), False, 'from django.core.cache import cache\n'), ((5681, 5713), 'main.utility.DedupeUtility.train_model', 'DedupeUtility.train_model', (['model'], {}), '(model)\n', (5706, 5713), False, 'from main.utility import DedupeUtility\n'), ((6125, 6221), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.SUCCESS', '"""Your agent has been trained successfully."""'], {}), "(request, messages.SUCCESS,\n 'Your agent has been trained successfully.')\n", (6145, 6221), False, 'from django.contrib import messages\n'), ((6279, 6298), 'django.shortcuts.redirect', 'redirect', (['"""/agents"""'], {}), "('/agents')\n", (6287, 6298), False, 'from django.shortcuts import redirect\n'), ((1119, 1167), 'main.utility.StringUtility.ERR_UNAUTHORIZED.format', 'StringUtility.ERR_UNAUTHORIZED.format', (['"""Dataset"""'], {}), "('Dataset')\n", (1156, 1167), False, 'from main.utility import StringUtility\n'), ((1250, 1298), 'main.utility.StringUtility.ERR_UNAUTHORIZED.format', 'StringUtility.ERR_UNAUTHORIZED.format', (['"""Dataset"""'], {}), "('Dataset')\n", (1287, 1298), False, 'from main.utility import StringUtility\n'), ((1923, 1969), 'main.utility.StringUtility.ERR_UNAUTHORIZED.format', 'StringUtility.ERR_UNAUTHORIZED.format', (['"""Agent"""'], {}), "('Agent')\n", (1960, 1969), False, 'from main.utility import StringUtility\n'), ((2050, 2096), 'main.utility.StringUtility.ERR_UNAUTHORIZED.format', 'StringUtility.ERR_UNAUTHORIZED.format', (['"""Agent"""'], {}), "('Agent')\n", (2087, 2096), False, 'from main.utility import StringUtility\n'), ((2409, 2454), 'main.utility.StringUtility.ERR_INVALID_KEY.format', 'StringUtility.ERR_INVALID_KEY.format', (['"""Smell"""'], {}), "('Smell')\n", (2445, 2454), False, 'from main.utility import StringUtility\n'), ((3145, 3202), 'main.models.Agent.objects.filter', 'Agent.objects.filter', ([], {'agent_type__id': '(2)', 'user': 'request.user'}), '(agent_type__id=2, user=request.user)\n', (3165, 3202), False, 'from main.models import Dataset, Agent, Column, Smell\n'), ((3955, 3979), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (3965, 3979), False, 'import json\n'), ((4681, 4751), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Agent cached is not of proper type. (Dedupe)"""'], {}), "('Agent cached is not of proper type. (Dedupe)')\n", (4703, 4751), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n'), ((5207, 5253), 'main.utility.StringUtility.ERR_UNAUTHORIZED.format', 'StringUtility.ERR_UNAUTHORIZED.format', (['"""Agent"""'], {}), "('Agent')\n", (5244, 5253), False, 'from main.utility import StringUtility\n'), ((5457, 5503), 'main.utility.StringUtility.ERR_UNAUTHORIZED.format', 'StringUtility.ERR_UNAUTHORIZED.format', (['"""Agent"""'], {}), "('Agent')\n", (5494, 5503), False, 'from main.utility import StringUtility\n'), ((5814, 5828), 'django.core.files.base.File', 'File', (['readfile'], {}), '(readfile)\n', (5818, 5828), False, 'from django.core.files.base import File\n'), ((5943, 5957), 'django.core.files.base.File', 'File', (['readfile'], {}), '(readfile)\n', (5947, 5957), False, 'from django.core.files.base import File\n'), ((6475, 6545), 'django.http.HttpResponseBadRequest', 'HttpResponseBadRequest', (['"""Model cached is not of proper type. (Dedupe)"""'], {}), "('Model cached is not of proper type. (Dedupe)')\n", (6497, 6545), False, 'from django.http import HttpResponseBadRequest, JsonResponse\n')] |
import os
import sys
from functools import reduce
from basic import *
from mutation import *
from read import *
from sequence import *
random.seed(100)
if __name__ == '__main__':
time_start = time.time()
help = '''
-ref : get whole genome from filex , generate file3
-reg : get aimed and sorted annotation from filey, generate file2
-qph : get qphred frequencies from file,generate file3
-read : get aimed regions pe fastq from file1234 (deafault flitrate 'N')
-seq : get aimed sequence from file1,file2, generate filea
-view : view sequence or depth
-dep <file> <depth> : repositon regions from depth file
-clear : clear temp files
optional:
-wes/wgs : set the way to generate DNA segment
-se/pe : set the way to generate reads
-cd : set path to store
-R : set output fastq files extra name
-ini : set peofile
if occur something wrong, try delete bed_info and fasta_info
'''
info = sys.argv
print(info)
if '-ini' in info:
profile = info[info.index('-ini')+1]
else:
profile = 'profile1.ini'
conf = configparser.ConfigParser()
conf.read(profile)
CHIP_LEN, E_LEN, FLANK_LEN, JOIN_GAP, SLIDE_STEP = get_value(
conf, 'chip', int, "CHIP_LEN", "E_LEN", "FLANK_LEN", 'JOIN_GAP', 'SLIDE_STEP')
ERROR_E, ERROR_D, SUBSTITUTION, INSERTION, DELETION = get_value(
conf, "error", float, "ERROR_E", "ERROR_D", "SUBSTITUTION", "INSERTION", "DELETION")
ERROR = conf.getboolean("error", "ERROR")
DEPTH, MODE, PAIR = get_value(conf, 'read', int, "DEPTH", 'MODE', 'PAIR')
MISMATCH = conf.getboolean("read", 'MISMATCH')
FLI_N, INNER_N = get_value(conf, "sequence", bool, 'FLI_N', 'INNER_N')
QPH, QROW = get_value(conf, "quality", int, "QPH", "QROW")
CD, QUALITY, BED_INFO, FASTA_INFO, REFERENCES, REGIONS, MUTATIONS,LABEL = get_value(
conf, 'file', str, "CD", "QUALITY", 'BED_INFO', "FASTA_INFO", 'REFERENCES', 'REGIONS', 'MUTATIONS','LABEL')
COLUMN, MEMORY, = get_value(conf, 'file', int, "COLUMN", "MEMORY")
SEGMENT_E = conf.getint("segment", "SEGMENT_E")
SEGMENT_D = conf.getfloat("segment", "SEGMENT_D")
SEG = conf.get("segment", "SEG")
REFERENCES = 'REFERENCES='+REFERENCES
REGIONS = 'REGIONS='+REGIONS
MUTATIONS = 'MUTATIONS='+MUTATIONS
# if FLANK_LEN<SEGMENT_E-E_LEN:
# FLANK_LEN=SEGMENT_E
exec(REFERENCES)
exec(REGIONS)
exec(MUTATIONS)
BED_INFO = CD+BED_INFO
FASTA_INFO = CD+FASTA_INFO
FNA = get_dict(conf, "fna")
BED = get_dict(conf, "bed")
t = time.strftime('%Y%m%d_%H_%M_%S', time.localtime(time.time()))
if LABEL:
t=LABEL
R1 = CD+"R1_%s.fastq" % (t)
R2 = CD+"R2_%s.fastq" % (t)
R0 = CD+"R_%s.fastq" % (t)
conf = None
if '-wes' in info:
MODE = modes.WES.value
if '-wgs' in info:
MODE = modes.WGS.value
if '-pe' in info:
PAIR = pairs.PE.value
if '-se' in info:
PAIR = pairs.SE.value
if '-cd' in info:
CD = info[info.index('-cd')+1]
if '-R' in info:
string = info[info.index('-R')+1]
R1 = CD+"R1_%s.fastq" % (string)
R2 = CD+"R2_%s.fastq" % (string)
R0 = CD+"R_%s.fastq" % (string)
DEFAULT = {
'chip_len': CHIP_LEN,
'effect_len': E_LEN,
"flank_len": FLANK_LEN,
"ERROR": ERROR,
"error_e": ERROR_E,
"error_d": ERROR_D,
"substitution": SUBSTITUTION,
"insertion": INSERTION,
"deletion": DELETION,
"DEPTH": DEPTH,
"MODE": MODE,
"PAIR": PAIR,
"MISMATCH": MISMATCH,
"FLI_N": FLI_N,
"INNER_N": INNER_N,
"QPH": QPH,
"QROW": QROW,
"CD": CD,
"COLUMN": COLUMN,
"MEMORY": MEMORY,
"REFERENCES": REFERENCES,
"REGIONS": REGIONS,
"QUALITY": QUALITY,
"BED_INFO": BED_INFO,
"FASTA_INFO": FASTA_INFO,
'SEG': SEG,
'SEGMENT_D': SEGMENT_D,
'SEGMENT_E': SEGMENT_E,
'join_gap': JOIN_GAP,
'MUTATIONS': MUTATIONS,
'FNA': FNA,
"BED": BED,
'R1': R1,
'R2': R2,
"R0": R0,
}
references = set(REFERENCES)
regions = set(REGIONS) # filtrate the same region file
inireferences = list(
map(lambda x: CD+'ini'+x[0].split('/')[-1].strip(), references))
iniregions = list(
map(lambda x: CD+'ini'+x[0].split('/')[-1].strip(), regions))
iniexomes = list(
map(lambda x: CD+'exome'+x[0].split('/')[-1].strip()+'.fna', REGIONS))
inimutations = list(
map(lambda x: CD+'ini'+x[2].split('/')[-1].strip(), MUTATIONS))
stanfiles = list(
map(lambda x: CD+x[0].split('/')[-1].strip()+'.temp', regions))
iniquality = CD+'ini'+QUALITY.split('/')[-1].strip()
if not os.path.exists(CD):
os.makedirs(CD)
if not os.path.exists(BED_INFO):
open(BED_INFO, 'a').close()
if not os.path.exists(FASTA_INFO):
open(FASTA_INFO, 'a').close()
if '-help' in info:
print(help)
elif '-ref' in info: # initialize reference genomes(single haploid)
n = 1
for x, y in zip(REFERENCES, inireferences): # mark number
Fasta.ini_ref(x, y, n, FNA, COLUMN, MEMORY)
n += 1
elif '-reg'in info: # merge targed regions
for x, y in zip(regions, iniregions):
keys = Bed.ini_reg(x, y, BED, BED_INFO, E_LEN,
CHIP_LEN, 0) # 0:join_gap
elif '-qph' in info:
Quality.ini_qph(QUALITY, iniquality, QROW)
elif '-seq' in info: # initailize exome sequence from initialized regions
for x, y, z in zip(inireferences, iniregions, iniexomes):
Fasta.ini_exome(x, y, z, E_LEN, 0, FLI_N, INNER_N,
COLUMN, MEMORY, FASTA_INFO) # 0:flank_len
elif '-mut' in info: # initialize mutations
for mut, y, in zip(MUTATIONS, inimutations):
ini_muta(inireferences, iniregions, mut, y, len(REFERENCES),
COLUMN, MEMORY, BED_INFO, FASTA_INFO, E_LEN, CHIP_LEN)
elif '-read' in info:
if pairs.PE.value == PAIR:
open(R1, 'w', newline='\n').close()
open(R2, 'w', newline='\n').close()
elif PAIR == pairs.SE.value:
open(R0, 'w', newline='\n').close()
for x, mut in zip(inimutations, MUTATIONS):
# polyoid 's id,content,mutationseq,inireferences,mutationsbed
# mutationsbed can get from iniregions+polys
readout(inireferences, iniregions, iniquality, x, mut,LABEL, **DEFAULT)
elif '-dep' in info:
depfile = info[info.index('-dep')+1]
depth = int(info[info.index('-dep')+2])
segment_e = SEGMENT_E
if os.path.exists(SEG):
print('get segment length from file :', SEG)
segment_e = segfile(SEG)[2]
Depth.dep2bed(depfile, depth, segment_e, CHIP_LEN,
E_LEN, CD, JOIN_GAP, BED_INFO, SLIDE_STEP)
elif '-clear' in info:
def clear(listt):
for x in listt:
if os.path.exists(x):
os.remove(x)
clear(iniexomes)
clear(inimutations)
clear(iniregions)
clear(iniexomes)
clear(stanfiles)
clear([iniquality, BED_INFO, FASTA_INFO])
for x in os.listdir(CD):
if 'ini' in x[:4]:
os .remove(CD+x)
elif '.temp' in x[-4:]:
os.remove(CD+x)
elif '-view' in info:
view(FASTA_INFO, MEMORY)
time_end = time.time()
t = time_end-time_start
print('totally cost: %dh : %dm : %ds' % (t//3600, (t % 3600)//60, t % 60))
| [
"os.path.exists",
"os.listdir",
"os.makedirs",
"os.remove"
] | [((4946, 4964), 'os.path.exists', 'os.path.exists', (['CD'], {}), '(CD)\n', (4960, 4964), False, 'import os\n'), ((4974, 4989), 'os.makedirs', 'os.makedirs', (['CD'], {}), '(CD)\n', (4985, 4989), False, 'import os\n'), ((5002, 5026), 'os.path.exists', 'os.path.exists', (['BED_INFO'], {}), '(BED_INFO)\n', (5016, 5026), False, 'import os\n'), ((5075, 5101), 'os.path.exists', 'os.path.exists', (['FASTA_INFO'], {}), '(FASTA_INFO)\n', (5089, 5101), False, 'import os\n'), ((6909, 6928), 'os.path.exists', 'os.path.exists', (['SEG'], {}), '(SEG)\n', (6923, 6928), False, 'import os\n'), ((7500, 7514), 'os.listdir', 'os.listdir', (['CD'], {}), '(CD)\n', (7510, 7514), False, 'import os\n'), ((7252, 7269), 'os.path.exists', 'os.path.exists', (['x'], {}), '(x)\n', (7266, 7269), False, 'import os\n'), ((7563, 7580), 'os.remove', 'os.remove', (['(CD + x)'], {}), '(CD + x)\n', (7572, 7580), False, 'import os\n'), ((7291, 7303), 'os.remove', 'os.remove', (['x'], {}), '(x)\n', (7300, 7303), False, 'import os\n'), ((7632, 7649), 'os.remove', 'os.remove', (['(CD + x)'], {}), '(CD + x)\n', (7641, 7649), False, 'import os\n')] |
#!/usr/bin/env python
from flask_migrate import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
api.downgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print('Current database version: ' + str(v))
| [
"flask_migrate.api.downgrade",
"flask_migrate.api.db_version"
] | [((144, 208), 'flask_migrate.api.db_version', 'api.db_version', (['SQLALCHEMY_DATABASE_URI', 'SQLALCHEMY_MIGRATE_REPO'], {}), '(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)\n', (158, 208), False, 'from flask_migrate import api\n'), ((209, 279), 'flask_migrate.api.downgrade', 'api.downgrade', (['SQLALCHEMY_DATABASE_URI', 'SQLALCHEMY_MIGRATE_REPO', '(v - 1)'], {}), '(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)\n', (222, 279), False, 'from flask_migrate import api\n'), ((284, 348), 'flask_migrate.api.db_version', 'api.db_version', (['SQLALCHEMY_DATABASE_URI', 'SQLALCHEMY_MIGRATE_REPO'], {}), '(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)\n', (298, 348), False, 'from flask_migrate import api\n')] |
from wrapper import primitive, notrace_primitive
import numpy as np
notrace_functions = [
np.ndim, np.shape, np.iscomplexobj, np.result_type
]
def wrap_intdtype(cls):
class IntdtypeSubclass(cls):
__new__ = notrace_primitive(cls.__new__)
return IntdtypeSubclass
def wrap_namespace(old, new):
unchanged_types = {float, int, type(None), type}
int_types = {np.int, np.int8, np.int16, np.int32, np.int64, np.integer}
for name, obj in old.items():
if obj in notrace_functions:
new[name] = notrace_primitive(obj)
elif callable(obj) and type(obj) is not type:
# wrap all legeable functions with primitive decorator
new[name] = primitive(obj)
elif type(obj) is type and obj in int_types:
new[name] = wrap_intdtype(obj)
elif type(obj) in unchanged_types:
new[name] = obj
wrap_namespace(np.__dict__, globals()) # wrap numpy namespace in globals dict
| [
"wrapper.notrace_primitive",
"wrapper.primitive"
] | [((225, 255), 'wrapper.notrace_primitive', 'notrace_primitive', (['cls.__new__'], {}), '(cls.__new__)\n', (242, 255), False, 'from wrapper import primitive, notrace_primitive\n'), ((540, 562), 'wrapper.notrace_primitive', 'notrace_primitive', (['obj'], {}), '(obj)\n', (557, 562), False, 'from wrapper import primitive, notrace_primitive\n'), ((708, 722), 'wrapper.primitive', 'primitive', (['obj'], {}), '(obj)\n', (717, 722), False, 'from wrapper import primitive, notrace_primitive\n')] |
# -*- coding: utf-8 -*-
# <NAME>
# nexus-root
from setuptools import setup
version = '0.1'
setup(version=version,
name='nexus-root',
description="Root your Nexus.",
packages=[
],
scripts=[
],
long_description="""Root your Nexus.""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
include_package_data=True,
keywords='',
author='<NAME>',
author_email='<EMAIL>',
url='http://iandennismiller.com',
dependency_links=[
],
install_requires=[
"requests",
"Fabric",
],
license='MIT',
zip_safe=False,
)
| [
"setuptools.setup"
] | [((94, 474), 'setuptools.setup', 'setup', ([], {'version': 'version', 'name': '"""nexus-root"""', 'description': '"""Root your Nexus."""', 'packages': '[]', 'scripts': '[]', 'long_description': '"""Root your Nexus."""', 'classifiers': '[]', 'include_package_data': '(True)', 'keywords': '""""""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""http://iandennismiller.com"""', 'dependency_links': '[]', 'install_requires': "['requests', 'Fabric']", 'license': '"""MIT"""', 'zip_safe': '(False)'}), "(version=version, name='nexus-root', description='Root your Nexus.',\n packages=[], scripts=[], long_description='Root your Nexus.',\n classifiers=[], include_package_data=True, keywords='', author='<NAME>',\n author_email='<EMAIL>', url='http://iandennismiller.com',\n dependency_links=[], install_requires=['requests', 'Fabric'], license=\n 'MIT', zip_safe=False)\n", (99, 474), False, 'from setuptools import setup\n')] |
import time
import os
import logging
from json import load
from kubernetes import client, config
from common.logging import LoggingUtil
class JobCreate:
"""
Class that uses the k8s API to create, run and delete a job
"""
def __init__(self):
"""
inits the class
"""
# load the run configuration params
self.k8s_config: dict = self.get_config()
# get the log level and directory from the environment
log_level: int = int(os.getenv('LOG_LEVEL', logging.INFO))
log_path: str = os.getenv('LOG_PATH', os.path.join(os.path.dirname(__file__), 'logs'))
# create the dir if it does not exist
if not os.path.exists(log_path):
os.mkdir(log_path)
# create a logger
self.logger = LoggingUtil.init_logging("APSVIZ.JobCreate", level=log_level, line_format='medium', log_file_path=log_path)
@staticmethod
def create_job_object(run, job_details):
"""
creates a k8s job description object
:return: client.V1Job, the job description object
"""
# configure the data volume mount for the container
data_volume_mount = client.V1VolumeMount(
name=run[run['job-type']]['run-config']['DATA_VOLUME_NAME'],
mount_path=run[run['job-type']]['run-config']['DATA_MOUNT_PATH'])
# configure the ssh key volume mount for the container
ssh_volume_mount = client.V1VolumeMount(
name=run[run['job-type']]['run-config']['SSH_VOLUME_NAME'],
read_only=True,
mount_path=run[run['job-type']]['run-config']['SSH_MOUNT_PATH'])
# configure a persistent claim for the data
persistent_volume_claim = client.V1PersistentVolumeClaimVolumeSource(
claim_name=f'{job_details["PVC_CLAIM"]}')
# configure a secret claim for the secret keys
ssh_secret_claim = client.V1SecretVolumeSource(
secret_name=f'{job_details["SECRETS_CLAIM"]}',
default_mode=0o600)
# configure the data volume claim
data_volume = client.V1Volume(
name=run[run['job-type']]['run-config']['DATA_VOLUME_NAME'],
persistent_volume_claim=persistent_volume_claim)
# configure the ssh secret claim
ssh_volume = client.V1Volume(
name=run[run['job-type']]['run-config']['SSH_VOLUME_NAME'],
secret=ssh_secret_claim)
log_dir = client.V1EnvVar(
name='LOG_PATH',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='log-path')))
ssh_username_env = client.V1EnvVar(
name='SSH_USERNAME',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='ssh-username')))
ssh_host = client.V1EnvVar(
name='SSH_HOST',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='ssh-host')))
asgs_db_username = client.V1EnvVar(
name='ASGS_DB_USERNAME',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='asgs-username')))
asgs_db_password = client.V1EnvVar(
name='ASGS_DB_PASSWORD',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='asgs-password')))
asgs_db_host = client.V1EnvVar(
name='ASGS_DB_HOST',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='asgs-host')))
asgs_db_port = client.V1EnvVar(
name='ASGS_DB_PORT',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='asgs-port')))
asgs_db_database = client.V1EnvVar(
name='ASGS_DB_DATABASE',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='asgs-database')))
geo_username = client.V1EnvVar(
name='GEOSERVER_USER',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='geo-username')))
geo_password = client.V1EnvVar(
name='GEOSERVER_PASSWORD',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='geo-password')))
geo_url = client.V1EnvVar(
name='GEOSERVER_URL',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='geo-url')))
geo_host = client.V1EnvVar(
name='GEOSERVER_HOST',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='geo-host')))
geo_proj_path = client.V1EnvVar(
name='GEOSERVER_PROJ_PATH',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='geo-proj-path')))
geo_workspace = client.V1EnvVar(
name='GEOSERVER_WORKSPACE',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='geo-workspace')))
slack_client = client.V1EnvVar(
name='SLACK_ACCESS_TOKEN',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='slack-access-token')))
slack_channel = client.V1EnvVar(
name='SLACK_CHANNEL',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='slack-channel')))
aws_access_key_id = client.V1EnvVar(
name='AWS_ACCESS_KEY_ID',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='aws-access-key-id')))
aws_secret_access_key = client.V1EnvVar(
name='AWS_SECRET_ACCESS_KEY',
value_from=client.V1EnvVarSource(secret_key_ref=client.V1SecretKeySelector(
name='eds-keys', key='aws-secret-access-key')))
# init a list for all the containers in this job
containers: list = []
# add on the resources
for idx, item in enumerate(run[run['job-type']]['run-config']['COMMAND_MATRIX']):
# get the base command line
new_cmd_list: list = run[run['job-type']]['run-config']['COMMAND_LINE'].copy()
# add the command matrix value
new_cmd_list.extend(item)
# set the default number of CPUs
cpus: str = '1'
# find the number of CPUs needed if it is there
if len(item) > 1:
for i, arg in enumerate(item):
if arg.startswith('--cpu'):
cpus = str(item[i+1])
break
# get the baseline set of container resources
resources = {'limits': {'cpu': cpus}, 'requests': {'cpu': cpus, 'memory': run[run['job-type']]['run-config']['MEMORY']}}
# configure the pod template container
container = client.V1Container(
name=run[run['job-type']]['run-config']['JOB_NAME'] + '-' + str(idx),
image=run[run['job-type']]['run-config']['IMAGE'],
command=new_cmd_list,
volume_mounts=[data_volume_mount, ssh_volume_mount],
image_pull_policy='IfNotPresent',
env=[log_dir, ssh_username_env, ssh_host, asgs_db_username, asgs_db_password, asgs_db_host, asgs_db_port, asgs_db_database,
geo_username, geo_password, geo_url, geo_host, geo_proj_path, geo_workspace, slack_client, slack_channel, aws_access_key_id, aws_secret_access_key],
resources=resources,
)
# if idx == 2 or run[run['job-type']]['run-config']['JOB_NAME'].startswith('staging'):
# add the container to the list
containers.append(container)
# create and configure a spec section for the container
template = client.V1PodTemplateSpec(
metadata=client.V1ObjectMeta(labels={"app": run[run['job-type']]['run-config']['JOB_NAME']}),
spec=client.V1PodSpec(restart_policy="Never", containers=containers, volumes=[data_volume, ssh_volume]) # , node_selector={'apsviz-ng': run[run['job-type']]['run-config']['NODE_TYPE']}
)
# create the specification of job deployment
spec = client.V1JobSpec(
template=template,
backoff_limit=1,
ttl_seconds_after_finished=120
)
# instantiate the job object
job = client.V1Job(
api_version="batch/v1",
kind="Job",
metadata=client.V1ObjectMeta(name=run[run['job-type']]['run-config']['JOB_NAME']),
spec=spec)
# save these params onto the run info
run[run['job-type']]['job-config'] = {'job': job, 'job-details': job_details, 'job_id': '?'}
def create_job(self, run) -> str:
"""
creates the k8s job
:param run: the run details
:return: str the job id
"""
# create the API hooks
api_instance = client.BatchV1Api()
job_data = run[run['job-type']]['job-config']
job_details = job_data['job-details']
run_details = run[run['job-type']]['run-config']
# create the job
api_instance.create_namespaced_job(
body=job_data['job'],
namespace=job_details['NAMESPACE'])
# init the return storage
job_id: str = ''
# wait a period of time for the next check
time.sleep(job_data['job-details']['CREATE_SLEEP'])
# get the job run information
jobs = api_instance.list_namespaced_job(namespace=job_details['NAMESPACE'])
# for each item returned
for job in jobs.items:
# is this the one that was launched
if job.metadata.labels['app'] == run_details['JOB_NAME']:
self.logger.debug(f"Found new job: {run_details['JOB_NAME']}, controller-uid: {job.metadata.labels['controller-uid']}, status: {job.status.active}")
# save job id
job_id = str(job.metadata.labels["controller-uid"])
# no need to continue looking
break
# return the job controller uid
return job_id
@staticmethod
def delete_job(run) -> str:
"""
deletes the k8s job
:param run: the run configuration details
:return:
"""
job_data = run[run['job-type']]['job-config']
job_details = job_data['job-details']
run_details = run[run['job-type']]['run-config']
# create an API hook
api_instance = client.BatchV1Api()
# remove the job
api_response = api_instance.delete_namespaced_job(
name=run_details['JOB_NAME'],
namespace=job_details['NAMESPACE'],
body=client.V1DeleteOptions(
propagation_policy='Foreground',
grace_period_seconds=5))
# return the final status of the job
return str(api_response.status)
@staticmethod
def get_config() -> dict:
"""
gets the run configuration
:return: Dict, baseline run params
"""
# get the config file path/name
config_name = os.path.join(os.path.dirname(__file__), '..', 'base_config.json')
# open the config file
with open(config_name, 'r') as json_file:
# load the config items into a dict
data: dict = load(json_file)
# return the config data
return data
def execute(self, run):
"""
Executes the k8s job run
:return: the job ID
"""
# load the baseline config params
job_details = self.k8s_config
# load the k8s configuration
try:
# first try to get the config if this is running on the cluster
config.load_incluster_config()
except config.ConfigException:
try:
# else get the local config
config.load_kube_config(context=job_details['CLUSTER'])
except config.ConfigException:
raise Exception("Could not configure kubernetes python client")
# create the job object
self.create_job_object(run, job_details)
# create and launch the job
job_id = self.create_job(run)
# save these params onto the run info
run[run['job-type']]['job-config']['job_id'] = job_id
| [
"kubernetes.client.V1ObjectMeta",
"kubernetes.client.V1SecretKeySelector",
"time.sleep",
"kubernetes.client.V1VolumeMount",
"kubernetes.client.V1JobSpec",
"os.path.exists",
"kubernetes.client.V1DeleteOptions",
"kubernetes.config.load_incluster_config",
"os.mkdir",
"kubernetes.client.BatchV1Api",
"os.path.dirname",
"kubernetes.client.V1SecretVolumeSource",
"kubernetes.client.V1PersistentVolumeClaimVolumeSource",
"kubernetes.client.V1Volume",
"os.getenv",
"kubernetes.client.V1PodSpec",
"kubernetes.config.load_kube_config",
"common.logging.LoggingUtil.init_logging",
"json.load"
] | [((796, 908), 'common.logging.LoggingUtil.init_logging', 'LoggingUtil.init_logging', (['"""APSVIZ.JobCreate"""'], {'level': 'log_level', 'line_format': '"""medium"""', 'log_file_path': 'log_path'}), "('APSVIZ.JobCreate', level=log_level, line_format=\n 'medium', log_file_path=log_path)\n", (820, 908), False, 'from common.logging import LoggingUtil\n'), ((1185, 1342), 'kubernetes.client.V1VolumeMount', 'client.V1VolumeMount', ([], {'name': "run[run['job-type']]['run-config']['DATA_VOLUME_NAME']", 'mount_path': "run[run['job-type']]['run-config']['DATA_MOUNT_PATH']"}), "(name=run[run['job-type']]['run-config'][\n 'DATA_VOLUME_NAME'], mount_path=run[run['job-type']]['run-config'][\n 'DATA_MOUNT_PATH'])\n", (1205, 1342), False, 'from kubernetes import client, config\n'), ((1449, 1620), 'kubernetes.client.V1VolumeMount', 'client.V1VolumeMount', ([], {'name': "run[run['job-type']]['run-config']['SSH_VOLUME_NAME']", 'read_only': '(True)', 'mount_path': "run[run['job-type']]['run-config']['SSH_MOUNT_PATH']"}), "(name=run[run['job-type']]['run-config'][\n 'SSH_VOLUME_NAME'], read_only=True, mount_path=run[run['job-type']][\n 'run-config']['SSH_MOUNT_PATH'])\n", (1469, 1620), False, 'from kubernetes import client, config\n'), ((1735, 1824), 'kubernetes.client.V1PersistentVolumeClaimVolumeSource', 'client.V1PersistentVolumeClaimVolumeSource', ([], {'claim_name': 'f"""{job_details[\'PVC_CLAIM\']}"""'}), '(claim_name=\n f"{job_details[\'PVC_CLAIM\']}")\n', (1777, 1824), False, 'from kubernetes import client, config\n'), ((1916, 2012), 'kubernetes.client.V1SecretVolumeSource', 'client.V1SecretVolumeSource', ([], {'secret_name': 'f"""{job_details[\'SECRETS_CLAIM\']}"""', 'default_mode': '(384)'}), '(secret_name=f"{job_details[\'SECRETS_CLAIM\']}",\n default_mode=384)\n', (1943, 2012), False, 'from kubernetes import client, config\n'), ((2101, 2230), 'kubernetes.client.V1Volume', 'client.V1Volume', ([], {'name': "run[run['job-type']]['run-config']['DATA_VOLUME_NAME']", 'persistent_volume_claim': 'persistent_volume_claim'}), "(name=run[run['job-type']]['run-config']['DATA_VOLUME_NAME'],\n persistent_volume_claim=persistent_volume_claim)\n", (2116, 2230), False, 'from kubernetes import client, config\n'), ((2315, 2419), 'kubernetes.client.V1Volume', 'client.V1Volume', ([], {'name': "run[run['job-type']]['run-config']['SSH_VOLUME_NAME']", 'secret': 'ssh_secret_claim'}), "(name=run[run['job-type']]['run-config']['SSH_VOLUME_NAME'],\n secret=ssh_secret_claim)\n", (2330, 2419), False, 'from kubernetes import client, config\n'), ((8815, 8903), 'kubernetes.client.V1JobSpec', 'client.V1JobSpec', ([], {'template': 'template', 'backoff_limit': '(1)', 'ttl_seconds_after_finished': '(120)'}), '(template=template, backoff_limit=1,\n ttl_seconds_after_finished=120)\n', (8831, 8903), False, 'from kubernetes import client, config\n'), ((9556, 9575), 'kubernetes.client.BatchV1Api', 'client.BatchV1Api', ([], {}), '()\n', (9573, 9575), False, 'from kubernetes import client, config\n'), ((10006, 10057), 'time.sleep', 'time.sleep', (["job_data['job-details']['CREATE_SLEEP']"], {}), "(job_data['job-details']['CREATE_SLEEP'])\n", (10016, 10057), False, 'import time\n'), ((11142, 11161), 'kubernetes.client.BatchV1Api', 'client.BatchV1Api', ([], {}), '()\n', (11159, 11161), False, 'from kubernetes import client, config\n'), ((495, 531), 'os.getenv', 'os.getenv', (['"""LOG_LEVEL"""', 'logging.INFO'], {}), "('LOG_LEVEL', logging.INFO)\n", (504, 531), False, 'import os\n'), ((690, 714), 'os.path.exists', 'os.path.exists', (['log_path'], {}), '(log_path)\n', (704, 714), False, 'import os\n'), ((728, 746), 'os.mkdir', 'os.mkdir', (['log_path'], {}), '(log_path)\n', (736, 746), False, 'import os\n'), ((11782, 11807), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (11797, 11807), False, 'import os\n'), ((11990, 12005), 'json.load', 'load', (['json_file'], {}), '(json_file)\n', (11994, 12005), False, 'from json import load\n'), ((12395, 12425), 'kubernetes.config.load_incluster_config', 'config.load_incluster_config', ([], {}), '()\n', (12423, 12425), False, 'from kubernetes import client, config\n'), ((592, 617), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (607, 617), False, 'import os\n'), ((8453, 8541), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'labels': "{'app': run[run['job-type']]['run-config']['JOB_NAME']}"}), "(labels={'app': run[run['job-type']]['run-config'][\n 'JOB_NAME']})\n", (8472, 8541), False, 'from kubernetes import client, config\n'), ((8555, 8658), 'kubernetes.client.V1PodSpec', 'client.V1PodSpec', ([], {'restart_policy': '"""Never"""', 'containers': 'containers', 'volumes': '[data_volume, ssh_volume]'}), "(restart_policy='Never', containers=containers, volumes=[\n data_volume, ssh_volume])\n", (8571, 8658), False, 'from kubernetes import client, config\n'), ((9097, 9169), 'kubernetes.client.V1ObjectMeta', 'client.V1ObjectMeta', ([], {'name': "run[run['job-type']]['run-config']['JOB_NAME']"}), "(name=run[run['job-type']]['run-config']['JOB_NAME'])\n", (9116, 9169), False, 'from kubernetes import client, config\n'), ((11354, 11433), 'kubernetes.client.V1DeleteOptions', 'client.V1DeleteOptions', ([], {'propagation_policy': '"""Foreground"""', 'grace_period_seconds': '(5)'}), "(propagation_policy='Foreground', grace_period_seconds=5)\n", (11376, 11433), False, 'from kubernetes import client, config\n'), ((12542, 12597), 'kubernetes.config.load_kube_config', 'config.load_kube_config', ([], {'context': "job_details['CLUSTER']"}), "(context=job_details['CLUSTER'])\n", (12565, 12597), False, 'from kubernetes import client, config\n'), ((2566, 2625), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""log-path"""'}), "(name='eds-keys', key='log-path')\n", (2592, 2625), False, 'from kubernetes import client, config\n'), ((2783, 2846), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""ssh-username"""'}), "(name='eds-keys', key='ssh-username')\n", (2809, 2846), False, 'from kubernetes import client, config\n'), ((2992, 3051), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""ssh-host"""'}), "(name='eds-keys', key='ssh-host')\n", (3018, 3051), False, 'from kubernetes import client, config\n'), ((3213, 3277), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""asgs-username"""'}), "(name='eds-keys', key='asgs-username')\n", (3239, 3277), False, 'from kubernetes import client, config\n'), ((3439, 3503), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""asgs-password"""'}), "(name='eds-keys', key='asgs-password')\n", (3465, 3503), False, 'from kubernetes import client, config\n'), ((3657, 3717), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""asgs-host"""'}), "(name='eds-keys', key='asgs-host')\n", (3683, 3717), False, 'from kubernetes import client, config\n'), ((3871, 3931), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""asgs-port"""'}), "(name='eds-keys', key='asgs-port')\n", (3897, 3931), False, 'from kubernetes import client, config\n'), ((4093, 4157), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""asgs-database"""'}), "(name='eds-keys', key='asgs-database')\n", (4119, 4157), False, 'from kubernetes import client, config\n'), ((4313, 4376), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""geo-username"""'}), "(name='eds-keys', key='geo-username')\n", (4339, 4376), False, 'from kubernetes import client, config\n'), ((4536, 4599), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""geo-password"""'}), "(name='eds-keys', key='geo-password')\n", (4562, 4599), False, 'from kubernetes import client, config\n'), ((4749, 4807), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""geo-url"""'}), "(name='eds-keys', key='geo-url')\n", (4775, 4807), False, 'from kubernetes import client, config\n'), ((4959, 5018), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""geo-host"""'}), "(name='eds-keys', key='geo-host')\n", (4985, 5018), False, 'from kubernetes import client, config\n'), ((5180, 5244), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""geo-proj-path"""'}), "(name='eds-keys', key='geo-proj-path')\n", (5206, 5244), False, 'from kubernetes import client, config\n'), ((5406, 5470), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""geo-workspace"""'}), "(name='eds-keys', key='geo-workspace')\n", (5432, 5470), False, 'from kubernetes import client, config\n'), ((5630, 5699), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""slack-access-token"""'}), "(name='eds-keys', key='slack-access-token')\n", (5656, 5699), False, 'from kubernetes import client, config\n'), ((5855, 5919), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""slack-channel"""'}), "(name='eds-keys', key='slack-channel')\n", (5881, 5919), False, 'from kubernetes import client, config\n'), ((6083, 6151), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""aws-access-key-id"""'}), "(name='eds-keys', key='aws-access-key-id')\n", (6109, 6151), False, 'from kubernetes import client, config\n'), ((6323, 6395), 'kubernetes.client.V1SecretKeySelector', 'client.V1SecretKeySelector', ([], {'name': '"""eds-keys"""', 'key': '"""aws-secret-access-key"""'}), "(name='eds-keys', key='aws-secret-access-key')\n", (6349, 6395), False, 'from kubernetes import client, config\n')] |
#!python3.7
import sys
import os
import subprocess
import serial
# need to find the correct location for bundled packages
script_path = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(script_path, 'pkgs'))
sys.path.insert(0, os.path.join(script_path, 'a-series-programmer', 'python'))
sys.path.insert(0, os.path.join(script_path, 'q-series', 'python'))
try:
from tinyfpgaq import TinyFPGAQ
except:
print('To run this program, copy the source code {} to '.format(__file__),
'\nTinyFPGA-Programmer-Application folder and run from that folder')
sys.exit(0)
# Host command set
# flashfpga <filename> Load contents of <filename> to APPFPGA flash region
# runfpga Reset the FPGA, read APPFPGA flash region and load/configure FPGA
# change-boot-mode {1, 2} 1 ==> boot-loader, 2 ==> old-behavior
comport = None
# Command parsing loop
def help_cmd(*args):
print('help command ', args)
print('Available commands are')
[ print('{:20s} {}'.format(cmd, cmdhelptxt[cmd])) for cmd in cmdacts ]
def run_application(*args):
print('Running the command: ', ' '.join(args))
p = subprocess.Popen(args)
try:
out, err = p.communicate(timeout=10*60)
#print(out)
#print(err)
except subprocess.TimeoutExpired:
print('[S3-CLI] timeout exiting ', ' '.join(args))
p.kill()
out, err = p.communicate()
def flash_fpga_cmd(*args):
if (comport == None):
print('Please specify COM port using the comport command')
return
print('flash fpga command ', args, 'type(args) = ', type(args), 'len(args) = ', len(args))
runcmd = [ 'python', 'tinyfpga-programmer-gui.py', '--mode', 'fpga', '--appfpga', args[0], '--port', comport ]
run_application(*runcmd)
def run_fpga_cmd(*args):
if (comport == None):
print('Please specify COM port using the comport command')
return
print('run fpga command ', args)
runcmd = [ 'python', 'tinyfpga-programmer-gui.py', '--mode', 'fpga', '--reset', '--port', comport ]
run_application(*runcmd)
def change_boot_mode_cmd(*args):
if (comport == None):
print('Please specify COM port using the comport command')
return
new_mode = int(args[0])
print('change boot mode command', args, 'new mode=', new_mode)
with serial.Serial(comport, 115200, timeout=60, writeTimeout=60) as ser:
addr = 0x1F000 + 8
if new_mode == 1:
bitstream = b'\x55' * 4
else:
bitstream = b'\xAA' * 4
print('Setting boot mode to ', new_mode, ':', bitstream.hex())
fpga = TinyFPGAQ(ser)
print(fpga)
ret = fpga.program_bitstream(addr, bitstream, 'boot-mode')
def quit_cmd(*args):
sys.exit(0)
def set_comport_cmd(*args):
global comport
print('set comport command ', args, args[0])
comport = args[0]
def flash_m4app_cmd(*args):
if (comport == None):
print('Please specify COM port using the comport command')
return
print('flash m4app command ', args, 'type(args) = ', type(args), 'len(args) = ', len(args))
runcmd = [ 'python', 'tinyfpga-programmer-gui.py', '--mode', 'm4', '--m4app', args[0], '--port', comport ]
run_application(*runcmd)
def run_m4app_cmd(*args):
if (comport == None):
print('Please specify COM port using the comport command')
return
print('run m4app command ', args)
runcmd = [ 'python', 'tinyfpga-programmer-gui.py', '--mode', 'm4', '--reset', '--port', comport ]
run_application(*runcmd)
cmdacts = { 'help': help_cmd,
'flashfpga': flash_fpga_cmd,
'runfpga': run_fpga_cmd,
'change-boot-mode': change_boot_mode_cmd,
'quit': quit_cmd,
'comport': set_comport_cmd
}
cmdhelptxt = { 'help': 'List available commands and basic usage',
'flashfpga': 'Usage: flashfpga <filename> Write file bitstream contents to the APPFPGA flash region',
'runfpga': 'Configure FPGA and run the program',
'change-boot-mode': 'Change boot mode, 1 => change to boot-loader-mode, 2 => change to application-mode ',
'quit': 'Quit this program',
'comport': 'Usage: comport <comport>, set comport'
}
def readAndExecuteCommandLineInput():
try:
s = input('[S3]> ')
cmdargs = s.split()
if len(cmdargs) == 0:
return False
if (cmdargs[0] == 'quit'):
return True
if (cmdargs[0] in cmdacts):
cmdacts[cmdargs[0]](*cmdargs[1:])
return False
else:
print('Unknown command:', cmdargs[0])
except:
print('Invalid input:', s, '==')
[ print(cmd, end=' ') for cmd in cmdargs ]
print('======')
pass
if __name__ == "__main__":
quitprog = False
try:
while not quitprog:
quitprog = readAndExecuteCommandLineInput()
except SystemExit:
print('Exiting...')
os._exit(1)
except KeyboardInterrupt:
sys.exit(1)
| [
"subprocess.Popen",
"tinyfpgaq.TinyFPGAQ",
"os.path.join",
"os.path.realpath",
"serial.Serial",
"os._exit",
"sys.exit"
] | [((153, 179), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (169, 179), False, 'import os\n'), ((200, 233), 'os.path.join', 'os.path.join', (['script_path', '"""pkgs"""'], {}), "(script_path, 'pkgs')\n", (212, 233), False, 'import os\n'), ((254, 312), 'os.path.join', 'os.path.join', (['script_path', '"""a-series-programmer"""', '"""python"""'], {}), "(script_path, 'a-series-programmer', 'python')\n", (266, 312), False, 'import os\n'), ((333, 380), 'os.path.join', 'os.path.join', (['script_path', '"""q-series"""', '"""python"""'], {}), "(script_path, 'q-series', 'python')\n", (345, 380), False, 'import os\n'), ((1153, 1175), 'subprocess.Popen', 'subprocess.Popen', (['args'], {}), '(args)\n', (1169, 1175), False, 'import subprocess\n'), ((2740, 2751), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2748, 2751), False, 'import sys\n'), ((591, 602), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (599, 602), False, 'import sys\n'), ((2329, 2388), 'serial.Serial', 'serial.Serial', (['comport', '(115200)'], {'timeout': '(60)', 'writeTimeout': '(60)'}), '(comport, 115200, timeout=60, writeTimeout=60)\n', (2342, 2388), False, 'import serial\n'), ((2614, 2628), 'tinyfpgaq.TinyFPGAQ', 'TinyFPGAQ', (['ser'], {}), '(ser)\n', (2623, 2628), False, 'from tinyfpgaq import TinyFPGAQ\n'), ((4981, 4992), 'os._exit', 'os._exit', (['(1)'], {}), '(1)\n', (4989, 4992), False, 'import os\n'), ((5028, 5039), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (5036, 5039), False, 'import sys\n')] |
# Copyright 2014 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from collections import defaultdict
class StubACLStore(object):
"""
Stub version of the ACL Store class.
The methods prefixed test_ are for unit test script use.
"""
def __init__(self, test_case):
self.test_case = test_case # Used to call assertEqual etc.
self.test_endpoint_acls = {}
self.test_endpoint_acl_changes = defaultdict(int)
def update_endpoint_rules(self, endpoint_uuid, rules):
self.test_endpoint_acls[endpoint_uuid] = rules
self.test_endpoint_acl_changes[endpoint_uuid] += 1
def test_assert_endpoints(self, endpoint_list):
"""
Assert that the ACL Store contains the right endpoints.
- endpoint_list: The list of endpoint UUIDs to check for
"""
self.test_case.assertItemsEqual(self.test_endpoint_acls.keys(),
endpoint_list)
def test_assert_endpoint_acls(self, endpoint_uuid, update_count, acls):
"""
Assert that the ACL Store contains the right ACLs for an endpoint.
- endpoint_uuid: The endpoint to check the ACLs for
- update_count: The number of updates made to those ACLs, or None to
skip this check
- acls: The ACLs to check for (the ACL collection object on the Calico
ACL API Proposal)
"""
# Check that the ACLs for the endpoint have been updated the right
# number of times.
if update_count:
self.test_case.assertEqual(
self.test_endpoint_acl_changes[endpoint_uuid],
update_count
)
# Check that the ACLs themselves are correct
ep_acls = self.test_endpoint_acls[endpoint_uuid]
self.test_case.assertDictEqual(acls, ep_acls)
| [
"collections.defaultdict"
] | [((967, 983), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (978, 983), False, 'from collections import defaultdict\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 20 18:16:36 2018
@author: Nakyilkim
"""
### chapter 6 Recursion
## pg 149
## 크기 순서대로 숫자 뽑기
## 재귀적구조
### 파라미터: 전체 원소들 번호, 뽑힌 숫자들, 뽑아야될 숫자 갯수
### 함수가 한번 돌때 기존에 있던 숫자들 보다 큰 숫자들 선택한다
### 그리고 재귀 (다시 그 숫자보다 큰 수 선택) + pop(숫자를 빼면서 for문 돈다)
### sort해서 넣어야
######## 완전 탐색에 유용한 도구다!
def pick(totalNum, picked, toPick):
## Base, 더 이상 고를 원소가 업을 때
if(toPick == 0):
print(picked) # 목적이 프린트에 있으므로 따로 리턴값이 없다
return
if len(picked) == 0:
#index
smallest = 0
else:
smallest = picked[-1] + 1
for index in range(smallest, len(totalNum)):
picked.append(index)
pick(totalNum, picked, toPick -1)
picked.pop()
### PG 150
### 보글게임
## 보드판 y,x에서 시작할 때
## word가 존재하는 지 여부를 찾는 문제
## 1. 첫글자 확인
## 2. 인접 글자 중 둘째 글자 확인
## 3. 반복
## 기저사례
# 1. 첫글자가 해당 위치 X
# 2. 글자수 자체가 한 글자 짜리 일떼
#### 탐색을 위한 도구
def hasword(y,x, word, board):
## 책에서는 board 인자를 안 넣고
## 아래 값을 밖에서 선언해서 일단 안에 넣었음 (전역변수 처리해야 될 듯)
dx = [-1,-1,-1, 1, 1, 1, 0, 0]
dy = [-1, 0, 1,-1, 0, 1, -1, -1]
if len(board[0]) <= x + 1 or len(board) < y +1:
return False
if board[x][y] != word[0]:
return False
if len(word) == 1:
return True
###### 인접한 8칸 탐색하는 법 중요
for direction in range(8):
nextY = y + dy[direction]
nextX = x + dx[direction]
if(hasword(nextY,nextX, word[1:], board)):
return True
return
### 159 pg
### 6.4 풀이 : 소풍
### 첫 학생에게 친구인 짝을 만든다
### 남은 학생들 대해 친구인 짝을 만든다
### 반복
import random
random.seed(1234)
num_student = 4
areFriends = [[1 for i in range(num_student)] for j in range(num_student)]
taken = [False for i in range(10)]
def countPairs(num_student,taken):
firstFree = -1
for i in range(num_student):
if taken[i] != 1:
firstFree = i
break
## 기저 사례
if firstFree == -1 :
return 1 # 한 가지 방법이 완성되는 것이므로
ret = 0
## firstfree의 짝을 찾아주자
print("FirstFree",firstFree)
for pairs in range(firstFree+1, num_student):
if taken[pairs] != True and areFriends[firstFree][pairs] == 1:
taken[pairs] = True
taken[firstFree] = True
print("FirstFree, pairs",firstFree,pairs)
ret += countPairs(num_student,taken)
taken[pairs] = False
taken[firstFree] = False
return ret
## 6.5 게임판 덮기 159
# 점 x,y,가 주어질 때 덮는 경우의 수
## my_code 미완성
board = [[random.choice(["#","."]) for i in range(7)] for j in range(3)]
cover_type = [[(0, 0) , (1, 0) , (0,1)],
[(0, 0), (-1, 0) , (0, 1)],
[(0, 0), (-1, 0), (0, -1)],
[(0, 0), (1, 0), (0, -1)]]
def checkAvaliable(board):
avaliable_list = []
for x in range(len(board)):
for y in range(len(board[0])):
print("x,y",x,y)
if board[x][y] == ".":
possible = 0
for _, block1, block2 in cover_type:
try:
x1 = x + block1[0]
y1 = y + block1[1]
x2 = x + block2[0]
y2 = y + block2[1]
## 인덱스가 뒤로 넘어가는거 주의해야!
## 인덱스가 양수라는 걸 조건으로 주어야..
if x1>=0 and y1>=0 and x2 >=0 and y2 >=0 and board[x1][y1] == '.' and board[x2][y2] =='.':
print("Possible", x,y)
print("Possible Block Type", block1,block2)
possible = True
except IndexError:
pass
if possible :
avaliable_list.append((x,y))
return avaliable_list
## Avaliable_list 재귀적으로 이용?
### 책 코드
cover_type = [[(0, 0) , (1, 0) , (0,1)],
[(0, 0), (0, 1) , (1, 1)],
[(0, 0), (1, 0), (1, 1)],
[(0, 0), (1, 0), (1, -1)]]
### 덮었던 블록을 없애는 알고리즘
| [
"random.choice",
"random.seed"
] | [((1646, 1663), 'random.seed', 'random.seed', (['(1234)'], {}), '(1234)\n', (1657, 1663), False, 'import random\n'), ((2583, 2608), 'random.choice', 'random.choice', (["['#', '.']"], {}), "(['#', '.'])\n", (2596, 2608), False, 'import random\n')] |
# * Copyright (c) 2020-2021. Authors: see NOTICE file.
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import logging
from functools import cached_property
from typing import Optional
from pint import Quantity
from pims.formats import AbstractFormat
from pims.formats.utils.abstract import CachedDataPath
from pims.formats.utils.checker import SignatureChecker
from pims.formats.utils.engines.vips import (
VipsParser, VipsReader,
VipsSpatialConvertor
)
from pims.formats.utils.histogram import DefaultHistogramReader
from pims.formats.utils.structures.metadata import ImageMetadata
from pims.utils import UNIT_REGISTRY
from pims.utils.types import parse_datetime, parse_float
log = logging.getLogger("pims.formats")
class WebPChecker(SignatureChecker):
@classmethod
def match(cls, pathlike: CachedDataPath) -> bool:
buf = cls.get_signature(pathlike)
return (len(buf) > 13 and
buf[0] == 0x52 and
buf[1] == 0x49 and
buf[2] == 0x46 and
buf[3] == 0x46 and
buf[8] == 0x57 and
buf[9] == 0x45 and
buf[10] == 0x42 and
buf[11] == 0x50 and
buf[12] == 0x56 and
buf[13] == 0x50)
class WebPParser(VipsParser):
def parse_main_metadata(self) -> ImageMetadata:
imd = super().parse_main_metadata()
# Do not count alpha channel if any
if imd.n_channels in (2, 4):
imd.n_channels = imd.n_channels - 1
imd.n_channels_per_read = imd.n_channels
return imd
def parse_known_metadata(self) -> ImageMetadata:
imd = super().parse_known_metadata()
raw = self.format.raw_metadata
# Tags reference: https://exiftool.org/TagNames/RIFF.html
desc_fields = ("RIFF.Comment", "EXIF.ImageDescription", "EXIF.UserComment")
imd.description = raw.get_first_value(desc_fields)
date_fields = (
"RIFF.DateTimeOriginal", "EXIF.CreationDate", "EXIF.DateTimeOriginal",
"EXIF.ModifyDate"
)
imd.acquisition_datetime = parse_datetime(raw.get_first_value(date_fields))
imd.physical_size_x = self.parse_physical_size(
raw.get_value("EXIF.XResolution"),
raw.get_value("EXIF.ResolutionUnit")
)
imd.physical_size_y = self.parse_physical_size(
raw.get_value("EXIF.YResolution"),
raw.get_value("EXIF.ResolutionUnit")
)
if imd.duration > 1:
total_time = raw.get_value(
"RIFF.Duration"
) # String such as "0.84 s" -> all sequence duration
if total_time:
frame_rate = imd.duration / UNIT_REGISTRY(total_time)
imd.frame_rate = frame_rate.to("Hz")
imd.is_complete = True
return imd
@staticmethod
def parse_physical_size(
physical_size: Optional[str], unit: Optional[str]
) -> Optional[Quantity]:
supported_units = ("meters", "inches", "cm")
if physical_size is not None and parse_float(
physical_size
) is not None and unit in supported_units:
return parse_float(physical_size) * UNIT_REGISTRY(unit)
return None
class WebPFormat(AbstractFormat):
"""WebP Format. Do not support (yet) WebP sequences.
References
https://libvips.github.io/libvips/API/current/VipsForeignSave.html#vips-webpload
https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html#webp
https://exiftool.org/TagNames/RIFF.html
"""
checker_class = WebPChecker
parser_class = WebPParser
reader_class = VipsReader
histogram_reader_class = DefaultHistogramReader
convertor_class = VipsSpatialConvertor
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._enabled = True
@classmethod
def is_spatial(cls):
return True
@cached_property
def need_conversion(self):
imd = self.main_imd
return imd.width > 1024 or imd.height > 1024
@property
def media_type(self):
return "image/webp"
| [
"logging.getLogger",
"pims.utils.UNIT_REGISTRY",
"pims.utils.types.parse_float"
] | [((1230, 1263), 'logging.getLogger', 'logging.getLogger', (['"""pims.formats"""'], {}), "('pims.formats')\n", (1247, 1263), False, 'import logging\n'), ((3633, 3659), 'pims.utils.types.parse_float', 'parse_float', (['physical_size'], {}), '(physical_size)\n', (3644, 3659), False, 'from pims.utils.types import parse_datetime, parse_float\n'), ((3746, 3772), 'pims.utils.types.parse_float', 'parse_float', (['physical_size'], {}), '(physical_size)\n', (3757, 3772), False, 'from pims.utils.types import parse_datetime, parse_float\n'), ((3775, 3794), 'pims.utils.UNIT_REGISTRY', 'UNIT_REGISTRY', (['unit'], {}), '(unit)\n', (3788, 3794), False, 'from pims.utils import UNIT_REGISTRY\n'), ((3274, 3299), 'pims.utils.UNIT_REGISTRY', 'UNIT_REGISTRY', (['total_time'], {}), '(total_time)\n', (3287, 3299), False, 'from pims.utils import UNIT_REGISTRY\n')] |
r"""
Base class for polyhedra, part 6
Define methods related to plotting including affine hull projection.
"""
# ****************************************************************************
# Copyright (C) 2008-2012 <NAME> <<EMAIL>>
# Copyright (C) 2011-2015 <NAME> <<EMAIL>>
# Copyright (C) 2012-2018 <NAME>
# Copyright (C) 2013 <NAME>
# Copyright (C) 2014-2017 <NAME>
# Copyright (C) 2014-2019 <NAME>
# Copyright (C) 2015 <NAME>
# Copyright (C) 2015-2017 <NAME>
# Copyright (C) 2015-2017 <NAME>
# Copyright (C) 2015-2018 <NAME>
# Copyright (C) 2015-2020 <NAME> <labbe at math.huji.ac.il>
# Copyright (C) 2015-2021 <NAME>
# Copyright (C) 2016-2019 <NAME>
# Copyright (C) 2017 <NAME>
# Copyright (C) 2017-2018 <NAME>
# Copyright (C) 2019 <NAME>
# Copyright (C) 2019-2020 <NAME>
# Copyright (C) 2019-2020 <NAME>
# Copyright (C) 2019-2021 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from sage.misc.cachefunc import cached_method
from sage.modules.vector_space_morphism import linear_transformation
from sage.matrix.constructor import matrix
from sage.modules.free_module_element import vector
from sage.rings.qqbar import AA
from sage.geometry.convex_set import AffineHullProjectionData
from .base5 import Polyhedron_base5
class Polyhedron_base6(Polyhedron_base5):
r"""
Methods related to plotting including affine hull projection.
TESTS::
sage: from sage.geometry.polyhedron.base6 import Polyhedron_base6
sage: P = polytopes.cube()
sage: Polyhedron_base6.plot(P)
Graphics3d Object
sage: Polyhedron_base6.tikz(P)
\begin{tikzpicture}%
[x={(1.000000cm, 0.000000cm)},
y={(-0.000000cm, 1.000000cm)},
z={(0.000000cm, -0.000000cm)},
scale=1.000000,
back/.style={loosely dotted, thin},
edge/.style={color=blue!95!black, thick},
facet/.style={fill=blue!95!black,fill opacity=0.800000},
vertex/.style={inner sep=1pt,circle,draw=green!25!black,fill=green!75!black,thick}]
%
%
%% This TikZ-picture was produced with Sagemath version ...
%% with the command: ._tikz_3d_in_3d and parameters:
%% view = [0, 0, 1]
%% angle = 0
%% scale = 1
%% edge_color = blue!95!black
%% facet_color = blue!95!black
%% opacity = 0.8
%% vertex_color = green
%% axis = False
<BLANKLINE>
%% Coordinate of the vertices:
%%
\coordinate (1.00000, -1.00000, -1.00000) at (1.00000, -1.00000, -1.00000);
\coordinate (1.00000, 1.00000, -1.00000) at (1.00000, 1.00000, -1.00000);
\coordinate (1.00000, 1.00000, 1.00000) at (1.00000, 1.00000, 1.00000);
\coordinate (1.00000, -1.00000, 1.00000) at (1.00000, -1.00000, 1.00000);
\coordinate (-1.00000, -1.00000, 1.00000) at (-1.00000, -1.00000, 1.00000);
\coordinate (-1.00000, -1.00000, -1.00000) at (-1.00000, -1.00000, -1.00000);
\coordinate (-1.00000, 1.00000, -1.00000) at (-1.00000, 1.00000, -1.00000);
\coordinate (-1.00000, 1.00000, 1.00000) at (-1.00000, 1.00000, 1.00000);
%%
%%
%% Drawing edges in the back
%%
\draw[edge,back] (1.00000, -1.00000, -1.00000) -- (1.00000, 1.00000, -1.00000);
\draw[edge,back] (1.00000, -1.00000, -1.00000) -- (1.00000, -1.00000, 1.00000);
\draw[edge,back] (1.00000, -1.00000, -1.00000) -- (-1.00000, -1.00000, -1.00000);
\draw[edge,back] (1.00000, 1.00000, -1.00000) -- (1.00000, 1.00000, 1.00000);
\draw[edge,back] (1.00000, 1.00000, -1.00000) -- (-1.00000, 1.00000, -1.00000);
\draw[edge,back] (-1.00000, -1.00000, 1.00000) -- (-1.00000, -1.00000, -1.00000);
\draw[edge,back] (-1.00000, -1.00000, -1.00000) -- (-1.00000, 1.00000, -1.00000);
\draw[edge,back] (-1.00000, 1.00000, -1.00000) -- (-1.00000, 1.00000, 1.00000);
%%
%%
%% Drawing vertices in the back
%%
\node[vertex] at (1.00000, -1.00000, -1.00000) {};
\node[vertex] at (1.00000, 1.00000, -1.00000) {};
\node[vertex] at (-1.00000, 1.00000, -1.00000) {};
\node[vertex] at (-1.00000, -1.00000, -1.00000) {};
%%
%%
%% Drawing the facets
%%
\fill[facet] (-1.00000, 1.00000, 1.00000) -- (1.00000, 1.00000, 1.00000) -- (1.00000, -1.00000, 1.00000) -- (-1.00000, -1.00000, 1.00000) -- cycle {};
%%
%%
%% Drawing edges in the front
%%
\draw[edge] (1.00000, 1.00000, 1.00000) -- (1.00000, -1.00000, 1.00000);
\draw[edge] (1.00000, 1.00000, 1.00000) -- (-1.00000, 1.00000, 1.00000);
\draw[edge] (1.00000, -1.00000, 1.00000) -- (-1.00000, -1.00000, 1.00000);
\draw[edge] (-1.00000, -1.00000, 1.00000) -- (-1.00000, 1.00000, 1.00000);
%%
%%
%% Drawing the vertices in the front
%%
\node[vertex] at (1.00000, 1.00000, 1.00000) {};
\node[vertex] at (1.00000, -1.00000, 1.00000) {};
\node[vertex] at (-1.00000, -1.00000, 1.00000) {};
\node[vertex] at (-1.00000, 1.00000, 1.00000) {};
%%
%%
\end{tikzpicture}
sage: Q = polytopes.hypercube(4)
sage: Polyhedron_base6.show(Q)
sage: Polyhedron_base6.schlegel_projection(Q)
The projection of a polyhedron into 3 dimensions
sage: R = polytopes.simplex(5)
sage: Polyhedron_base6.affine_hull(R)
A 5-dimensional polyhedron in ZZ^6 defined as the convex hull of 1 vertex and 5 lines
sage: Polyhedron_base6.affine_hull_projection(R)
A 5-dimensional polyhedron in ZZ^5 defined as the convex hull of 6 vertices
"""
def plot(self,
point=None, line=None, polygon=None, # None means unspecified by the user
wireframe='blue', fill='green',
position=None,
orthonormal=True, # whether to use orthonormal projections
**kwds):
r"""
Return a graphical representation.
INPUT:
- ``point``, ``line``, ``polygon`` -- Parameters to pass to
point (0d), line (1d), and polygon (2d) plot commands.
Allowed values are:
* A Python dictionary to be passed as keywords to the plot
commands.
* A string or triple of numbers: The color. This is
equivalent to passing the dictionary ``{'color':...}``.
* ``False``: Switches off the drawing of the corresponding
graphics object
- ``wireframe``, ``fill`` -- Similar to ``point``, ``line``,
and ``polygon``, but ``fill`` is used for the graphics
objects in the dimension of the polytope (or of dimension 2
for higher dimensional polytopes) and ``wireframe`` is used
for all lower-dimensional graphics objects
(default: 'green' for ``fill`` and 'blue' for ``wireframe``)
- ``position`` -- positive number; the position to take the projection
point in Schlegel diagrams.
- ``orthonormal`` -- Boolean (default: True); whether to use
orthonormal projections.
- ``**kwds`` -- optional keyword parameters that are passed to
all graphics objects.
OUTPUT:
A (multipart) graphics object.
EXAMPLES::
sage: square = polytopes.hypercube(2)
sage: point = Polyhedron([[1,1]])
sage: line = Polyhedron([[1,1],[2,1]])
sage: cube = polytopes.hypercube(3)
sage: hypercube = polytopes.hypercube(4)
By default, the wireframe is rendered in blue and the fill in green::
sage: square.plot() # optional - sage.plot
Graphics object consisting of 6 graphics primitives
sage: point.plot() # optional - sage.plot
Graphics object consisting of 1 graphics primitive
sage: line.plot() # optional - sage.plot
Graphics object consisting of 2 graphics primitives
sage: cube.plot() # optional - sage.plot
Graphics3d Object
sage: hypercube.plot() # optional - sage.plot
Graphics3d Object
Draw the lines in red and nothing else::
sage: square.plot(point=False, line='red', polygon=False) # optional - sage.plot
Graphics object consisting of 4 graphics primitives
sage: point.plot(point=False, line='red', polygon=False) # optional - sage.plot
Graphics object consisting of 0 graphics primitives
sage: line.plot(point=False, line='red', polygon=False) # optional - sage.plot
Graphics object consisting of 1 graphics primitive
sage: cube.plot(point=False, line='red', polygon=False) # optional - sage.plot
Graphics3d Object
sage: hypercube.plot(point=False, line='red', polygon=False) # optional - sage.plot
Graphics3d Object
Draw points in red, no lines, and a blue polygon::
sage: square.plot(point={'color':'red'}, line=False, polygon=(0,0,1)) # optional - sage.plot
Graphics object consisting of 2 graphics primitives
sage: point.plot(point={'color':'red'}, line=False, polygon=(0,0,1)) # optional - sage.plot
Graphics object consisting of 1 graphics primitive
sage: line.plot(point={'color':'red'}, line=False, polygon=(0,0,1)) # optional - sage.plot
Graphics object consisting of 1 graphics primitive
sage: cube.plot(point={'color':'red'}, line=False, polygon=(0,0,1)) # optional - sage.plot
Graphics3d Object
sage: hypercube.plot(point={'color':'red'}, line=False, polygon=(0,0,1)) # optional - sage.plot
Graphics3d Object
If we instead use the ``fill`` and ``wireframe`` options, the
coloring depends on the dimension of the object::
sage: square.plot(fill='green', wireframe='red') # optional - sage.plot
Graphics object consisting of 6 graphics primitives
sage: point.plot(fill='green', wireframe='red') # optional - sage.plot
Graphics object consisting of 1 graphics primitive
sage: line.plot(fill='green', wireframe='red') # optional - sage.plot
Graphics object consisting of 2 graphics primitives
sage: cube.plot(fill='green', wireframe='red') # optional - sage.plot
Graphics3d Object
sage: hypercube.plot(fill='green', wireframe='red') # optional - sage.plot
Graphics3d Object
It is possible to draw polyhedra up to dimension 4, no matter what the
ambient dimension is::
sage: hcube = polytopes.hypercube(5)
sage: facet = hcube.facets()[0].as_polyhedron();facet
A 4-dimensional polyhedron in ZZ^5 defined as the convex hull of 16 vertices
sage: facet.plot() # optional - sage.plot
Graphics3d Object
TESTS::
sage: for p in square.plot(): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
blue Point set defined by 4 point(s)
blue Line defined by 2 points
blue Line defined by 2 points
blue Line defined by 2 points
blue Line defined by 2 points
green Polygon defined by 4 points
sage: for p in line.plot(): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
blue Point set defined by 2 point(s)
green Line defined by 2 points
sage: for p in point.plot(): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
green Point set defined by 1 point(s)
Draw the lines in red and nothing else::
sage: for p in square.plot(point=False, line='red', polygon=False): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Line defined by 2 points
red Line defined by 2 points
red Line defined by 2 points
red Line defined by 2 points
Draw vertices in red, no lines, and a blue polygon::
sage: for p in square.plot(point={'color':'red'}, line=False, polygon=(0,0,1)): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Point set defined by 4 point(s)
(0, 0, 1) Polygon defined by 4 points
sage: for p in line.plot(point={'color':'red'}, line=False, polygon=(0,0,1)): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Point set defined by 2 point(s)
sage: for p in point.plot(point={'color':'red'}, line=False, polygon=(0,0,1)): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Point set defined by 1 point(s)
Draw in red without wireframe::
sage: for p in square.plot(wireframe=False, fill="red"): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Polygon defined by 4 points
sage: for p in line.plot(wireframe=False, fill="red"): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Line defined by 2 points
sage: for p in point.plot(wireframe=False, fill="red"): # optional - sage.plot
....: print("{} {}".format(p.options()['rgbcolor'], p))
red Point set defined by 1 point(s)
We try to draw the polytope in 2 or 3 dimensions::
sage: type(Polyhedron(ieqs=[(1,)]).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(polytopes.hypercube(1).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(polytopes.hypercube(2).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(polytopes.hypercube(3).plot()) # optional - sage.plot
<class 'sage.plot.plot3d.base.Graphics3dGroup'>
In 4d a projection to 3d is used::
sage: type(polytopes.hypercube(4).plot()) # optional - sage.plot
<class 'sage.plot.plot3d.base.Graphics3dGroup'>
sage: type(polytopes.hypercube(5).plot()) # optional - sage.plot
Traceback (most recent call last):
...
NotImplementedError: plotting of 5-dimensional polyhedra not implemented
If the polyhedron is not full-dimensional, the :meth:`affine_hull_projection` is used if necessary::
sage: type(Polyhedron([(0,), (1,)]).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(Polyhedron([(0,0), (1,1)]).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(Polyhedron([(0,0,0), (1,1,1)]).plot()) # optional - sage.plot
<class 'sage.plot.plot3d.base.Graphics3dGroup'>
sage: type(Polyhedron([(0,0,0,0), (1,1,1,1)]).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(Polyhedron([(0,0,0,0,0), (1,1,1,1,1)]).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
sage: type(Polyhedron([(0,0,0,0), (1,1,1,1), (1,0,0,0)]).plot()) # optional - sage.plot
<class 'sage.plot.graphics.Graphics'>
TESTS:
Check that :trac:`30015` is fixed::
sage: fcube = polytopes.hypercube(4)
sage: tfcube = fcube.face_truncation(fcube.faces(0)[0])
sage: sp = tfcube.schlegel_projection()
sage: for face in tfcube.faces(2):
....: vertices = face.ambient_Vrepresentation()
....: indices = [sp.coord_index_of(vector(x)) for x in vertices]
....: projected_vertices = [sp.transformed_coords[i] for i in indices]
....: assert Polyhedron(projected_vertices).dim() == 2
"""
def merge_options(*opts):
merged = dict()
for i in range(len(opts)):
opt = opts[i]
if opt is None:
continue
elif opt is False:
return False
elif isinstance(opt, (str, list, tuple)):
merged['color'] = opt
else:
merged.update(opt)
return merged
d = min(self.dim(), 2)
opts = [wireframe] * d + [fill] + [False] * (2-d)
# The point/line/polygon options take precedence over wireframe/fill
opts = [merge_options(opt1, opt2, kwds)
for opt1, opt2 in zip(opts, [point, line, polygon])]
def project(polyhedron, ortho):
if polyhedron.ambient_dim() <= 3:
return polyhedron.projection()
elif polyhedron.dim() <= 3:
if ortho:
return polyhedron.affine_hull_projection(orthonormal=True, extend=True).projection()
else:
return polyhedron.affine_hull_projection().projection()
elif polyhedron.dimension() == 4:
# For 4d-polyhedron, we can use schlegel projections:
return polyhedron.schlegel_projection(position=position)
else:
return polyhedron.projection()
projection = project(self, orthonormal)
try:
plot_method = projection.plot
except AttributeError:
raise NotImplementedError('plotting of {0}-dimensional polyhedra not implemented'
.format(self.ambient_dim()))
return plot_method(*opts)
def show(self, **kwds):
r"""
Display graphics immediately
This method attempts to display the graphics immediately,
without waiting for the currently running code (if any) to
return to the command line. Be careful, calling it from within
a loop will potentially launch a large number of external
viewer programs.
INPUT:
- ``kwds`` -- optional keyword arguments. See :meth:`plot` for
the description of available options.
OUTPUT:
This method does not return anything. Use :meth:`plot` if you
want to generate a graphics object that can be saved or
further transformed.
EXAMPLES::
sage: square = polytopes.hypercube(2)
sage: square.show(point='red') # optional - sage.plot
"""
self.plot(**kwds).show()
def tikz(self, view=[0, 0, 1], angle=0, scale=1,
edge_color='blue!95!black', facet_color='blue!95!black',
opacity=0.8, vertex_color='green', axis=False):
r"""
Return a string ``tikz_pic`` consisting of a tikz picture of ``self``
according to a projection ``view`` and an angle ``angle``
obtained via the threejs viewer.
INPUT:
- ``view`` - list (default: [0,0,1]) representing the rotation axis (see note below).
- ``angle`` - integer (default: 0) angle of rotation in degree from 0 to 360 (see note
below).
- ``scale`` - integer (default: 1) specifying the scaling of the tikz picture.
- ``edge_color`` - string (default: 'blue!95!black') representing colors which tikz
recognize.
- ``facet_color`` - string (default: 'blue!95!black') representing colors which tikz
recognize.
- ``vertex_color`` - string (default: 'green') representing colors which tikz
recognize.
- ``opacity`` - real number (default: 0.8) between 0 and 1 giving the opacity of
the front facets.
- ``axis`` - Boolean (default: False) draw the axes at the origin or not.
OUTPUT:
- LatexExpr -- containing the TikZ picture.
.. NOTE::
This is a wrapper of a method of the projection object
`self.projection()`. See :meth:`~sage.geometry.polyhedron.plot.Projection.tikz`
for more detail.
The inputs ``view`` and ``angle`` can be obtained by visualizing it
using ``.show(aspect_ratio=1)``. This will open an interactive view
in your default browser, where you can rotate the polytope. Once
the desired view angle is found, click on the information icon in
the lower right-hand corner and select *Get Viewpoint*. This will
copy a string of the form '[x,y,z],angle' to your local clipboard.
Go back to Sage and type ``Img = P.tikz([x,y,z],angle)``.
The inputs ``view`` and ``angle`` can also be obtained from the
viewer Jmol::
1) Right click on the image
2) Select ``Console``
3) Select the tab ``State``
4) Scroll to the line ``moveto``
It reads something like::
moveto 0.0 {x y z angle} Scale
The ``view`` is then [x,y,z] and ``angle`` is angle.
The following number is the scale.
Jmol performs a rotation of ``angle`` degrees along the
vector [x,y,z] and show the result from the z-axis.
EXAMPLES::
sage: co = polytopes.cuboctahedron()
sage: Img = co.tikz([0,0,1], 0)
sage: print('\n'.join(Img.splitlines()[:9]))
\begin{tikzpicture}%
[x={(1.000000cm, 0.000000cm)},
y={(0.000000cm, 1.000000cm)},
z={(0.000000cm, 0.000000cm)},
scale=1.000000,
back/.style={loosely dotted, thin},
edge/.style={color=blue!95!black, thick},
facet/.style={fill=blue!95!black,fill opacity=0.800000},
vertex/.style={inner sep=1pt,circle,draw=green!25!black,fill=green!75!black,thick}]
sage: print('\n'.join(Img.splitlines()[12:21]))
%% with the command: ._tikz_3d_in_3d and parameters:
%% view = [0, 0, 1]
%% angle = 0
%% scale = 1
%% edge_color = blue!95!black
%% facet_color = blue!95!black
%% opacity = 0.8
%% vertex_color = green
%% axis = False
sage: print('\n'.join(Img.splitlines()[22:26]))
%% Coordinate of the vertices:
%%
\coordinate (-1.00000, -1.00000, 0.00000) at (-1.00000, -1.00000, 0.00000);
\coordinate (-1.00000, 0.00000, -1.00000) at (-1.00000, 0.00000, -1.00000);
"""
return self.projection().tikz(view, angle, scale,
edge_color, facet_color,
opacity, vertex_color, axis)
def _rich_repr_(self, display_manager, **kwds):
r"""
Rich Output Magic Method
See :mod:`sage.repl.rich_output` for details.
EXAMPLES::
sage: from sage.repl.rich_output import get_display_manager
sage: dm = get_display_manager()
sage: polytopes.hypercube(2)._rich_repr_(dm)
OutputPlainText container
The ``supplemental_plot`` preference lets us control whether
this object is shown as text or picture+text::
sage: dm.preferences.supplemental_plot
'never'
sage: del dm.preferences.supplemental_plot
sage: polytopes.hypercube(3)
A 3-dimensional polyhedron in ZZ^3 defined as the convex hull of 8 vertices (use the .plot() method to plot)
sage: dm.preferences.supplemental_plot = 'never'
"""
prefs = display_manager.preferences
is_small = (self.ambient_dim() <= 2)
can_plot = (prefs.supplemental_plot != 'never')
plot_graph = can_plot and (prefs.supplemental_plot == 'always' or is_small)
# Under certain circumstances we display the plot as graphics
if plot_graph:
plot_kwds = dict(kwds)
plot_kwds.setdefault('title', repr(self))
output = self.plot(**plot_kwds)._rich_repr_(display_manager)
if output is not None:
return output
# create text for non-graphical output
if can_plot:
text = '{0} (use the .plot() method to plot)'.format(repr(self))
else:
text = repr(self)
# latex() produces huge tikz environment, override
tp = display_manager.types
if (prefs.text == 'latex' and tp.OutputLatex in display_manager.supported_output()):
return tp.OutputLatex(r'\text{{{0}}}'.format(text))
return tp.OutputPlainText(text)
@cached_method
def gale_transform(self):
r"""
Return the Gale transform of a polytope as described in the
reference below.
OUTPUT:
A list of vectors, the Gale transform. The dimension is the
dimension of the affine dependencies of the vertices of the
polytope.
EXAMPLES:
This is from the reference, for a triangular prism::
sage: p = Polyhedron(vertices = [[0,0],[0,1],[1,0]])
sage: p2 = p.prism()
sage: p2.gale_transform()
((-1, 0), (0, -1), (1, 1), (-1, -1), (1, 0), (0, 1))
REFERENCES:
Lectures in Geometric Combinatorics, R.R.Thomas, 2006, AMS Press.
.. SEEALSO::
:func`~sage.geometry.polyhedron.library.gale_transform_to_polyhedron`.
TESTS::
sage: P = Polyhedron(rays=[[1,0,0]])
sage: P.gale_transform()
Traceback (most recent call last):
...
ValueError: not a polytope
Check that :trac:`29073` is fixed::
sage: P = polytopes.icosahedron(exact=False)
sage: sum(P.gale_transform()).norm() < 1e-15
True
"""
if not self.is_compact():
raise ValueError('not a polytope')
A = matrix(self.n_vertices(),
[[1]+x for x in self.vertex_generator()])
A = A.transpose()
A_ker = A.right_kernel_matrix(basis='computed')
return tuple(A_ker.columns())
def _test_gale_transform(self, tester=None, **options):
r"""
Run tests on the method :meth:`.gale_transform` and its inverse
:meth:`~sage.geometry.polyhedron.library.gale_transform_to_polytope`.
TESTS::
sage: polytopes.cross_polytope(3)._test_gale_transform()
"""
if tester is None:
tester = self._tester(**options)
if not self.is_compact():
with tester.assertRaises(ValueError):
self.gale_transform()
return
# Check :trac:`29073`.
if not self.base_ring().is_exact() and self.ambient_dim() > 0:
g = self.gale_transform()
tester.assertTrue(sum(g).norm() < 1e-10 or sum(g).norm()/matrix(g).norm() < 1e-13)
return
# Prevent very long doctests.
if self.n_vertices() + self.n_rays() > 50 or self.n_facets() > 50:
return
if not self.is_empty():
# ``gale_transform_to_polytope`` needs at least one vertex to work.
from sage.geometry.polyhedron.library import gale_transform_to_polytope
g = self.gale_transform()
P = gale_transform_to_polytope(g, base_ring=self.base_ring(), backend=self.backend())
try:
import sage.graphs.graph
except ImportError:
pass
else:
tester.assertTrue(self.is_combinatorially_isomorphic(P))
def projection(self, projection=None):
r"""
Return a projection object.
INPUT:
- ``proj`` -- a projection function
OUTPUT:
The identity projection. This is useful for plotting
polyhedra.
.. SEEALSO::
:meth:`~sage.geometry.polyhedron.base.Polyhedron_base.schlegel_projection` for a more interesting projection.
EXAMPLES::
sage: p = polytopes.hypercube(3)
sage: proj = p.projection()
sage: proj
The projection of a polyhedron into 3 dimensions
"""
from .plot import Projection
if projection is not None:
self.projection = Projection(self, projection)
else:
self.projection = Projection(self)
return self.projection
def render_solid(self, **kwds):
r"""
Return a solid rendering of a 2- or 3-d polytope.
EXAMPLES::
sage: p = polytopes.hypercube(3)
sage: p_solid = p.render_solid(opacity = .7)
sage: type(p_solid)
<class 'sage.plot.plot3d.index_face_set.IndexFaceSet'>
"""
proj = self.projection()
if self.ambient_dim() == 3:
return proj.render_solid_3d(**kwds)
if self.ambient_dim() == 2:
return proj.render_fill_2d(**kwds)
raise ValueError("render_solid is only defined for 2 and 3 dimensional polyhedra")
def render_wireframe(self, **kwds):
r"""
For polytopes in 2 or 3 dimensions, return the edges
as a list of lines.
EXAMPLES::
sage: p = Polyhedron([[1,2,],[1,1],[0,0]])
sage: p_wireframe = p.render_wireframe()
sage: p_wireframe._objects
[Line defined by 2 points, Line defined by 2 points, Line defined by 2 points]
"""
proj = self.projection()
if self.ambient_dim() == 3:
return proj.render_wireframe_3d(**kwds)
if self.ambient_dim() == 2:
return proj.render_outline_2d(**kwds)
raise ValueError("render_wireframe is only defined for 2 and 3 dimensional polyhedra")
def schlegel_projection(self, facet=None, position=None):
r"""
Return the Schlegel projection.
* The facet is orthonormally transformed into its affine hull.
* The position specifies a point coming out of the barycenter of the
facet from which the other vertices will be projected into the facet.
INPUT:
- ``facet`` -- a PolyhedronFace. The facet into which the Schlegel
diagram is created. The default is the first facet.
- ``position`` -- a positive number. Determines a relative distance
from the barycenter of ``facet``. A value close to 0 will place the
projection point close to the facet and a large value further away.
Default is `1`. If the given value is too large, an error is returned.
OUTPUT:
A :class:`~sage.geometry.polyhedron.plot.Projection` object.
EXAMPLES::
sage: p = polytopes.hypercube(3)
sage: sch_proj = p.schlegel_projection()
sage: schlegel_edge_indices = sch_proj.lines
sage: schlegel_edges = [sch_proj.coordinates_of(x) for x in schlegel_edge_indices]
sage: len([x for x in schlegel_edges if x[0][0] > 0])
8
The Schlegel projection preserves the convexity of facets, see :trac:`30015`::
sage: fcube = polytopes.hypercube(4)
sage: tfcube = fcube.face_truncation(fcube.faces(0)[0])
sage: tfcube.facets()[-1]
A 3-dimensional face of a Polyhedron in QQ^4 defined as the convex hull of 8 vertices
sage: sp = tfcube.schlegel_projection(tfcube.facets()[-1])
sage: sp.plot() # optional - sage.plot
Graphics3d Object
The same truncated cube but see inside the tetrahedral facet::
sage: tfcube.facets()[4]
A 3-dimensional face of a Polyhedron in QQ^4 defined as the convex hull of 4 vertices
sage: sp = tfcube.schlegel_projection(tfcube.facets()[4])
sage: sp.plot() # optional - sage.plot
Graphics3d Object
A different values of ``position`` changes the projection::
sage: sp = tfcube.schlegel_projection(tfcube.facets()[4],1/2)
sage: sp.plot() # optional - sage.plot
Graphics3d Object
sage: sp = tfcube.schlegel_projection(tfcube.facets()[4],4)
sage: sp.plot() # optional - sage.plot
Graphics3d Object
A value which is too large give a projection point that sees more than
one facet resulting in a error::
sage: sp = tfcube.schlegel_projection(tfcube.facets()[4],5)
Traceback (most recent call last):
...
ValueError: the chosen position is too large
"""
proj = self.projection()
return proj.schlegel(facet, position)
def affine_hull(self, *args, **kwds):
r"""
Return the affine hull of ``self`` as a polyhedron.
EXAMPLES::
sage: half_plane_in_space = Polyhedron(ieqs=[(0,1,0,0)], eqns=[(0,0,0,1)])
sage: half_plane_in_space.affine_hull().Hrepresentation()
(An equation (0, 0, 1) x + 0 == 0,)
sage: polytopes.cube().affine_hull().is_universe()
True
"""
if args or kwds:
raise TypeError("the method 'affine_hull' does not take any parameters; perhaps you meant 'affine_hull_projection'")
if not self.inequalities():
return self
self_as_face = self.faces(self.dimension())[0]
return self_as_face.affine_tangent_cone()
@cached_method
def _affine_hull_projection(self, *,
as_convex_set=True, as_affine_map=True, as_section_map=True,
orthogonal=False, orthonormal=False,
extend=False, minimal=False):
r"""
Return ``self`` projected into its affine hull.
INPUT:
See :meth:`affine_hull_projection`.
OUTPUT:
An instance of :class:`~sage.geometry.convex_set.AffineHullProjectionData`.
See :meth:`affine_hull_projection` for details.
TESTS:
Check that :trac:`23355` is fixed::
sage: P = Polyhedron([[7]]); P
A 0-dimensional polyhedron in ZZ^1 defined as the convex hull of 1 vertex
sage: P.affine_hull_projection()
A 0-dimensional polyhedron in ZZ^0 defined as the convex hull of 1 vertex
sage: P.affine_hull_projection(orthonormal='True')
A 0-dimensional polyhedron in QQ^0 defined as the convex hull of 1 vertex
sage: P.affine_hull_projection(orthogonal='True')
A 0-dimensional polyhedron in QQ^0 defined as the convex hull of 1 vertex
Check that :trac:`24047` is fixed::
sage: P1 = Polyhedron(vertices=([[-1, 1], [0, -1], [0, 0], [-1, -1]]))
sage: P2 = Polyhedron(vertices=[[1, 1], [1, -1], [0, -1], [0, 0]])
sage: P = P1.intersection(P2)
sage: A, b = P.affine_hull_projection(as_affine_map=True, orthonormal=True, extend=True) # optional - sage.rings.number_field
sage: Polyhedron([(2,3,4)]).affine_hull_projection()
A 0-dimensional polyhedron in ZZ^0 defined as the convex hull of 1 vertex
Check that backend is preserved::
sage: polytopes.simplex(backend='field').affine_hull_projection().backend()
'field'
sage: P = Polyhedron(vertices=[[0,0], [1,0]], backend='field')
sage: P.affine_hull_projection(orthogonal=True, orthonormal=True, extend=True).backend() # optional - sage.rings.number_field
'field'
Check that :trac:`29116` is fixed::
sage: V =[
....: [1, 0, -1, 0, 0],
....: [1, 0, 0, -1, 0],
....: [1, 0, 0, 0, -1],
....: [1, 0, 0, +1, 0],
....: [1, 0, 0, 0, +1],
....: [1, +1, 0, 0, 0]
....: ]
sage: P = Polyhedron(V)
sage: P.affine_hull_projection()
A 4-dimensional polyhedron in ZZ^4 defined as the convex hull of 6 vertices
sage: P.affine_hull_projection(orthonormal=True)
Traceback (most recent call last):
...
ValueError: the base ring needs to be extended; try with "extend=True"
sage: P.affine_hull_projection(orthonormal=True, extend=True) # optional - sage.rings.number_field
A 4-dimensional polyhedron in AA^4 defined as the convex hull of 6 vertices
"""
result = AffineHullProjectionData()
if self.is_empty():
raise ValueError('affine hull projection of an empty polyhedron is undefined')
# handle trivial full-dimensional case
if self.ambient_dim() == self.dim():
if as_convex_set:
result.image = self
if as_affine_map:
identity = linear_transformation(matrix(self.base_ring(),
self.dim(),
self.dim(),
self.base_ring().one()))
result.projection_linear_map = result.section_linear_map = identity
result.projection_translation = result.section_translation = self.ambient_space().zero()
elif orthogonal or orthonormal:
# see TODO
if not self.is_compact():
raise NotImplementedError('"orthogonal=True" and "orthonormal=True" work only for compact polyhedra')
affine_basis = self.an_affine_basis()
v0 = affine_basis[0].vector()
# We implicitly translate the first vertex of the affine basis to zero.
vi = tuple(v.vector() - v0 for v in affine_basis[1:])
M = matrix(self.base_ring(), self.dim(), self.ambient_dim(), vi)
# Switch base_ring to AA if necessary,
# since gram_schmidt needs to be able to take square roots.
# Pick orthonormal basis and transform all vertices accordingly
# if the orthonormal transform makes it necessary, change base ring.
try:
A, G = M.gram_schmidt(orthonormal=orthonormal)
except TypeError:
if not extend:
raise ValueError('the base ring needs to be extended; try with "extend=True"')
M = matrix(AA, M)
A = M.gram_schmidt(orthonormal=orthonormal)[0]
if minimal:
from sage.rings.qqbar import number_field_elements_from_algebraics
new_ring = number_field_elements_from_algebraics(A.list(), embedded=True, minimal=True)[0]
A = A.change_ring(new_ring)
L = linear_transformation(A, side='right')
ambient_translation = -vector(A.base_ring(), affine_basis[0])
image_translation = A * ambient_translation
# Note the order. We compute ``A*self`` and then translate the image.
# ``A*self`` uses the incidence matrix and we avoid recomputation.
# Also, if the new base ring is ``AA``, we want to avoid computing the incidence matrix in that ring.
# ``convert=True`` takes care of the case, where there might be no coercion (``AA`` and quadratic field).
if as_convex_set:
result.image = self.linear_transformation(A, new_base_ring=A.base_ring()) + image_translation
if as_affine_map:
result.projection_linear_map = L
result.projection_translation = image_translation
if as_section_map:
L_dagger = linear_transformation(A.transpose() * (A * A.transpose()).inverse(), side='right')
result.section_linear_map = L_dagger
result.section_translation = v0.change_ring(A.base_ring())
else:
# translate one vertex to the origin
v0 = self.vertices()[0].vector()
gens = []
for v in self.vertices()[1:]:
gens.append(v.vector() - v0)
for r in self.rays():
gens.append(r.vector())
for l in self.lines():
gens.append(l.vector())
# Pick subset of coordinates to coordinatize the affine span
M = matrix(gens)
pivots = M.pivots()
A = matrix(self.base_ring(), len(pivots), self.ambient_dim(),
[[1 if j == i else 0 for j in range(self.ambient_dim())] for i in pivots])
if as_affine_map:
image_translation = vector(self.base_ring(), self.dim())
L = linear_transformation(A, side='right')
result.projection_linear_map = L
result.projection_translation = image_translation
if as_convex_set:
result.image = A*self
if as_section_map:
if self.dim():
B = M.transpose()/(A*M.transpose())
else:
B = matrix(self.ambient_dim(), 0)
L_section = linear_transformation(B, side='right')
result.section_linear_map = L_section
result.section_translation = v0 - L_section(L(v0) + image_translation)
return result
def affine_hull_projection(self,
as_polyhedron=None, as_affine_map=False,
orthogonal=False, orthonormal=False,
extend=False, minimal=False,
return_all_data=False,
*, as_convex_set=None):
r"""
Return the polyhedron projected into its affine hull.
Each polyhedron is contained in some smallest affine subspace
(possibly the entire ambient space) -- its affine hull. We
provide an affine linear map that projects the ambient space of
the polyhedron to the standard Euclidean space of dimension of
the polyhedron, which restricts to a bijection from the affine
hull.
The projection map is not unique; some parameters control the
choice of the map. Other parameters control the output of the
function.
INPUT:
- ``as_polyhedron`` (or ``as_convex_set``) -- (boolean or the default
``None``) and
- ``as_affine_map`` -- (boolean, default ``False``) control the output
The default ``as_polyhedron=None`` translates to
``as_polyhedron=not as_affine_map``,
therefore to ``as_polyhedron=True`` if nothing is specified.
If exactly one of either ``as_polyhedron`` or ``as_affine_map`` is
set, then either a polyhedron or the affine transformation
is returned. The affine transformation
sends the embedded polytope to a fulldimensional one.
It is given as a pair ``(A, b)``, where A is a linear transformation
and `b` is a vector, and the affine transformation sends ``v`` to
``A(v)+b``.
If both ``as_polyhedron`` and ``as_affine_map`` are set, then
both are returned, encapsulated in an instance of
:class:`~sage.geometry.convex_set.AffineHullProjectionData`.
- ``return_all_data`` -- (boolean, default ``False``)
If set, then ``as_polyhedron`` and ``as_affine_map`` will set
(possibly overridden) and additional (internal) data concerning
the transformation is returned. Everything is encapsulated
in an instance of
:class:`~sage.geometry.convex_set.AffineHullProjectionData` in
this case.
- ``orthogonal`` -- boolean (default: ``False``); if ``True``,
provide an orthogonal transformation.
- ``orthonormal`` -- boolean (default: ``False``); if ``True``,
provide an orthonormal transformation. If the base ring does not
provide the necessary square roots, the extend parameter
needs to be set to ``True``.
- ``extend`` -- boolean (default: ``False``); if ``True``,
allow base ring to be extended if necessary. This becomes
relevant when requiring an orthonormal transformation.
- ``minimal`` -- boolean (default: ``False``); if ``True``,
when doing an extension, it computes the minimal base ring of the
extension, otherwise the base ring is ``AA``.
OUTPUT:
A full-dimensional polyhedron or an affine transformation,
depending on the parameters ``as_polyhedron`` and ``as_affine_map``,
or an instance of :class:`~sage.geometry.convex_set.AffineHullProjectionData`
containing all data (parameter ``return_all_data``).
If the output is an instance of
:class:`~sage.geometry.convex_set.AffineHullProjectionData`, the
following fields may be set:
- ``image`` -- the projection of the original polyhedron
- ``projection_map`` -- the affine map as a pair whose first component
is a linear transformation and its second component a shift;
see above.
- ``section_map`` -- an affine map as a pair whose first component
is a linear transformation and its second component a shift.
It maps the codomain of ``affine_map`` to the affine hull of
``self``. It is a right inverse of ``projection_map``.
Note that all of these data are compatible.
.. TODO::
- make the parameters ``orthogonal`` and ``orthonormal`` work
with unbounded polyhedra.
EXAMPLES::
sage: triangle = Polyhedron([(1,0,0), (0,1,0), (0,0,1)]); triangle
A 2-dimensional polyhedron in ZZ^3 defined as the convex hull of 3 vertices
sage: triangle.affine_hull_projection()
A 2-dimensional polyhedron in ZZ^2 defined as the convex hull of 3 vertices
sage: half3d = Polyhedron(vertices=[(3,2,1)], rays=[(1,0,0)])
sage: half3d.affine_hull_projection().Vrepresentation()
(A ray in the direction (1), A vertex at (3))
The resulting affine hulls depend on the parameter ``orthogonal`` and ``orthonormal``::
sage: L = Polyhedron([[1,0],[0,1]]); L
A 1-dimensional polyhedron in ZZ^2 defined as the convex hull of 2 vertices
sage: A = L.affine_hull_projection(); A
A 1-dimensional polyhedron in ZZ^1 defined as the convex hull of 2 vertices
sage: A.vertices()
(A vertex at (0), A vertex at (1))
sage: A = L.affine_hull_projection(orthogonal=True); A
A 1-dimensional polyhedron in QQ^1 defined as the convex hull of 2 vertices
sage: A.vertices()
(A vertex at (0), A vertex at (2))
sage: A = L.affine_hull_projection(orthonormal=True) # optional - sage.rings.number_field
Traceback (most recent call last):
...
ValueError: the base ring needs to be extended; try with "extend=True"
sage: A = L.affine_hull_projection(orthonormal=True, extend=True); A # optional - sage.rings.number_field
A 1-dimensional polyhedron in AA^1 defined as the convex hull of 2 vertices
sage: A.vertices() # optional - sage.rings.number_field
(A vertex at (1.414213562373095?), A vertex at (0.?e-18))
More generally::
sage: S = polytopes.simplex(); S
A 3-dimensional polyhedron in ZZ^4 defined as the convex hull of 4 vertices
sage: S.vertices()
(A vertex at (0, 0, 0, 1),
A vertex at (0, 0, 1, 0),
A vertex at (0, 1, 0, 0),
A vertex at (1, 0, 0, 0))
sage: A = S.affine_hull_projection(); A
A 3-dimensional polyhedron in ZZ^3 defined as the convex hull of 4 vertices
sage: A.vertices()
(A vertex at (0, 0, 0),
A vertex at (0, 0, 1),
A vertex at (0, 1, 0),
A vertex at (1, 0, 0))
sage: A = S.affine_hull_projection(orthogonal=True); A
A 3-dimensional polyhedron in QQ^3 defined as the convex hull of 4 vertices
sage: A.vertices()
(A vertex at (0, 0, 0),
A vertex at (2, 0, 0),
A vertex at (1, 3/2, 0),
A vertex at (1, 1/2, 4/3))
sage: A = S.affine_hull_projection(orthonormal=True, extend=True); A
A 3-dimensional polyhedron in AA^3 defined as the convex hull of 4 vertices
sage: A.vertices()
(A vertex at (0.7071067811865475?, 0.4082482904638630?, 1.154700538379252?),
A vertex at (0.7071067811865475?, 1.224744871391589?, 0.?e-18),
A vertex at (1.414213562373095?, 0.?e-18, 0.?e-18),
A vertex at (0.?e-18, 0.?e-18, 0.?e-18))
With the parameter ``minimal`` one can get a minimal base ring::
sage: s = polytopes.simplex(3)
sage: s_AA = s.affine_hull_projection(orthonormal=True, extend=True)
sage: s_AA.base_ring()
Algebraic Real Field
sage: s_full = s.affine_hull_projection(orthonormal=True, extend=True, minimal=True)
sage: s_full.base_ring()
Number Field in a with defining polynomial y^4 - 4*y^2 + 1 with a = 0.5176380902050415?
More examples with the ``orthonormal`` parameter::
sage: P = polytopes.permutahedron(3); P # optional - sage.combinat # optional - sage.rings.number_field
A 2-dimensional polyhedron in ZZ^3 defined as the convex hull of 6 vertices
sage: set([F.as_polyhedron().affine_hull_projection(orthonormal=True, extend=True).volume() for F in P.affine_hull_projection().faces(1)]) == {1, sqrt(AA(2))} # optional - sage.combinat # optional - sage.rings.number_field
True
sage: set([F.as_polyhedron().affine_hull_projection(orthonormal=True, extend=True).volume() for F in P.affine_hull_projection(orthonormal=True, extend=True).faces(1)]) == {sqrt(AA(2))} # optional - sage.combinat # optional - sage.rings.number_field
True
sage: D = polytopes.dodecahedron() # optional - sage.rings.number_field
sage: F = D.faces(2)[0].as_polyhedron() # optional - sage.rings.number_field
sage: F.affine_hull_projection(orthogonal=True) # optional - sage.rings.number_field
A 2-dimensional polyhedron in (Number Field in sqrt5 with defining polynomial x^2 - 5 with sqrt5 = 2.236067977499790?)^2 defined as the convex hull of 5 vertices
sage: F.affine_hull_projection(orthonormal=True, extend=True) # optional - sage.rings.number_field
A 2-dimensional polyhedron in AA^2 defined as the convex hull of 5 vertices
sage: K.<sqrt2> = QuadraticField(2) # optional - sage.rings.number_field
sage: P = Polyhedron([2*[K.zero()],2*[sqrt2]]); P # optional - sage.rings.number_field
A 1-dimensional polyhedron in (Number Field in sqrt2 with defining polynomial x^2 - 2 with sqrt2 = 1.414213562373095?)^2 defined as the convex hull of 2 vertices
sage: P.vertices() # optional - sage.rings.number_field
(A vertex at (0, 0), A vertex at (sqrt2, sqrt2))
sage: A = P.affine_hull_projection(orthonormal=True); A # optional - sage.rings.number_field
A 1-dimensional polyhedron in (Number Field in sqrt2 with defining polynomial x^2 - 2 with sqrt2 = 1.414213562373095?)^1 defined as the convex hull of 2 vertices
sage: A.vertices() # optional - sage.rings.number_field
(A vertex at (0), A vertex at (2))
sage: K.<sqrt3> = QuadraticField(3) # optional - sage.rings.number_field
sage: P = Polyhedron([2*[K.zero()],2*[sqrt3]]); P # optional - sage.rings.number_field
A 1-dimensional polyhedron in (Number Field in sqrt3 with defining polynomial x^2 - 3 with sqrt3 = 1.732050807568878?)^2 defined as the convex hull of 2 vertices
sage: P.vertices() # optional - sage.rings.number_field
(A vertex at (0, 0), A vertex at (sqrt3, sqrt3))
sage: A = P.affine_hull_projection(orthonormal=True) # optional - sage.rings.number_field
Traceback (most recent call last):
...
ValueError: the base ring needs to be extended; try with "extend=True"
sage: A = P.affine_hull_projection(orthonormal=True, extend=True); A # optional - sage.rings.number_field
A 1-dimensional polyhedron in AA^1 defined as the convex hull of 2 vertices
sage: A.vertices() # optional - sage.rings.number_field
(A vertex at (0), A vertex at (2.449489742783178?))
sage: sqrt(6).n() # optional - sage.rings.number_field
2.44948974278318
The affine hull is combinatorially equivalent to the input::
sage: P.is_combinatorially_isomorphic(P.affine_hull_projection()) # optional - sage.rings.number_field
True
sage: P.is_combinatorially_isomorphic(P.affine_hull_projection(orthogonal=True)) # optional - sage.rings.number_field
True
sage: P.is_combinatorially_isomorphic(P.affine_hull_projection(orthonormal=True, extend=True)) # optional - sage.rings.number_field
True
The ``orthonormal=True`` parameter preserves volumes;
it provides an isometric copy of the polyhedron::
sage: Pentagon = polytopes.dodecahedron().faces(2)[0].as_polyhedron() # optional - sage.rings.number_field
sage: P = Pentagon.affine_hull_projection(orthonormal=True, extend=True) # optional - sage.rings.number_field
sage: _, c= P.is_inscribed(certificate=True) # optional - sage.rings.number_field
sage: c # optional - sage.rings.number_field
(0.4721359549995794?, 0.6498393924658126?)
sage: circumradius = (c-vector(P.vertices()[0])).norm() # optional - sage.rings.number_field
sage: p = polytopes.regular_polygon(5) # optional - sage.rings.number_field
sage: p.volume() # optional - sage.rings.number_field
2.377641290737884?
sage: P.volume() # optional - sage.rings.number_field
1.53406271079097?
sage: p.volume()*circumradius^2 # optional - sage.rings.number_field
1.534062710790965?
sage: P.volume() == p.volume()*circumradius^2 # optional - sage.rings.number_field
True
One can also use ``orthogonal`` parameter to calculate volumes;
in this case we don't need to switch base rings. One has to divide
by the square root of the determinant of the linear part of the
affine transformation times its transpose::
sage: Pentagon = polytopes.dodecahedron().faces(2)[0].as_polyhedron() # optional - sage.rings.number_field
sage: Pnormal = Pentagon.affine_hull_projection(orthonormal=True, extend=True) # optional - sage.rings.number_field
sage: Pgonal = Pentagon.affine_hull_projection(orthogonal=True) # optional - sage.rings.number_field
sage: A, b = Pentagon.affine_hull_projection(orthogonal=True, as_affine_map=True) # optional - sage.rings.number_field
sage: Adet = (A.matrix().transpose()*A.matrix()).det() # optional - sage.rings.number_field
sage: Pnormal.volume() # optional - sage.rings.number_field
1.53406271079097?
sage: Pgonal.volume()/Adet.sqrt(extend=True) # optional - sage.rings.number_field
-80*(55*sqrt(5) - 123)/sqrt(-6368*sqrt(5) + 14240)
sage: Pgonal.volume()/AA(Adet).sqrt().n(digits=20) # optional - sage.rings.number_field
1.5340627107909646813
sage: AA(Pgonal.volume()^2) == (Pnormal.volume()^2)*AA(Adet) # optional - sage.rings.number_field
True
Another example with ``as_affine_map=True``::
sage: P = polytopes.permutahedron(4) # optional - sage.combinat # optional - sage.rings.number_field
sage: A, b = P.affine_hull_projection(orthonormal=True, as_affine_map=True, extend=True) # optional - sage.combinat # optional - sage.rings.number_field
sage: Q = P.affine_hull_projection(orthonormal=True, extend=True) # optional - sage.combinat # optional - sage.rings.number_field
sage: Q.center() # optional - sage.combinat # optional - sage.rings.number_field
(0.7071067811865475?, 1.224744871391589?, 1.732050807568878?)
sage: A(P.center()) + b == Q.center() # optional - sage.combinat # optional - sage.rings.number_field
True
For unbounded, non full-dimensional polyhedra, the ``orthogonal=True`` and ``orthonormal=True``
is not implemented::
sage: P = Polyhedron(ieqs=[[0, 1, 0], [0, 0, 1], [0, 0, -1]]); P
A 1-dimensional polyhedron in QQ^2 defined as the convex hull of 1 vertex and 1 ray
sage: P.is_compact()
False
sage: P.is_full_dimensional()
False
sage: P.affine_hull_projection(orthogonal=True)
Traceback (most recent call last):
...
NotImplementedError: "orthogonal=True" and "orthonormal=True" work only for compact polyhedra
sage: P.affine_hull_projection(orthonormal=True)
Traceback (most recent call last):
...
NotImplementedError: "orthogonal=True" and "orthonormal=True" work only for compact polyhedra
Setting ``as_affine_map`` to ``True``
without ``orthogonal`` or ``orthonormal`` set to ``True``::
sage: S = polytopes.simplex()
sage: S.affine_hull_projection(as_affine_map=True)
(Vector space morphism represented by the matrix:
[1 0 0]
[0 1 0]
[0 0 1]
[0 0 0]
Domain: Vector space of dimension 4 over Rational Field
Codomain: Vector space of dimension 3 over Rational Field,
(0, 0, 0))
If the polyhedron is full-dimensional, it is returned::
sage: polytopes.cube().affine_hull_projection()
A 3-dimensional polyhedron in ZZ^3 defined as the convex hull of 8 vertices
sage: polytopes.cube().affine_hull_projection(as_affine_map=True)
(Vector space morphism represented by the matrix:
[1 0 0]
[0 1 0]
[0 0 1]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 3 over Rational Field,
(0, 0, 0))
Return polyhedron and affine map::
sage: S = polytopes.simplex(2)
sage: data = S.affine_hull_projection(orthogonal=True,
....: as_polyhedron=True,
....: as_affine_map=True); data
AffineHullProjectionData(image=A 2-dimensional polyhedron in QQ^2
defined as the convex hull of 3 vertices,
projection_linear_map=Vector space morphism represented by the matrix:
[ -1 -1/2]
[ 1 -1/2]
[ 0 1]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field,
projection_translation=(1, 1/2),
section_linear_map=None,
section_translation=None)
Return all data::
sage: data = S.affine_hull_projection(orthogonal=True, return_all_data=True); data
AffineHullProjectionData(image=A 2-dimensional polyhedron in QQ^2
defined as the convex hull of 3 vertices,
projection_linear_map=Vector space morphism represented by the matrix:
[ -1 -1/2]
[ 1 -1/2]
[ 0 1]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field,
projection_translation=(1, 1/2),
section_linear_map=Vector space morphism represented by the matrix:
[-1/2 1/2 0]
[-1/3 -1/3 2/3]
Domain: Vector space of dimension 2 over Rational Field
Codomain: Vector space of dimension 3 over Rational Field, section_translation=(1, 0, 0))
The section map is a right inverse of the projection map::
sage: data.image.linear_transformation(data.section_linear_map.matrix().transpose()) + data.section_translation == S
True
Same without ``orthogonal=True``::
sage: data = S.affine_hull_projection(return_all_data=True); data
AffineHullProjectionData(image=A 2-dimensional polyhedron in ZZ^2
defined as the convex hull of 3 vertices,
projection_linear_map=Vector space morphism represented by the matrix:
[1 0]
[0 1]
[0 0]
Domain: Vector space of dimension 3 over Rational Field
Codomain: Vector space of dimension 2 over Rational Field, projection_translation=(0, 0),
section_linear_map=Vector space morphism represented by the matrix:
[ 1 0 -1]
[ 0 1 -1]
Domain: Vector space of dimension 2 over Rational Field
Codomain: Vector space of dimension 3 over Rational Field, section_translation=(0, 0, 1))
sage: data.image.linear_transformation(data.section_linear_map.matrix().transpose()) + data.section_translation == S
True
::
sage: P0 = Polyhedron(
....: ieqs=[(0, -1, 0, 1, 1, 1), (0, 1, 1, 0, -1, -1), (0, -1, 1, 1, 0, 0),
....: (0, 1, 0, 0, 0, 0), (0, 0, 1, 1, -1, -1), (0, 0, 0, 0, 0, 1),
....: (0, 0, 0, 0, 1, 0), (0, 0, 0, 1, 0, -1), (0, 0, 1, 0, 0, 0)])
sage: P = P0.intersection(Polyhedron(eqns=[(-1, 1, 1, 1, 1, 1)]))
sage: P.dim()
4
sage: P.affine_hull_projection(orthogonal=True, as_affine_map=True)[0]
Vector space morphism represented by the matrix:
[ 0 0 0 1/3]
[ -2/3 -1/6 0 -1/12]
[ 1/3 -1/6 1/2 -1/12]
[ 0 1/2 0 -1/12]
[ 1/3 -1/6 -1/2 -1/12]
Domain: Vector space of dimension 5 over Rational Field
Codomain: Vector space of dimension 4 over Rational Field
"""
if as_polyhedron is not None:
as_convex_set = as_polyhedron
return super().affine_hull_projection(
as_convex_set=as_convex_set, as_affine_map=as_affine_map,
orthogonal=orthogonal, orthonormal=orthonormal,
extend=extend, minimal=minimal,
return_all_data=return_all_data)
def _test_affine_hull_projection(self, tester=None, verbose=False, **options):
r"""
Run tests on the method :meth:`.affine_hull_projection`.
TESTS::
sage: D = polytopes.dodecahedron() # optional - sage.rings.number_field
sage: D.facets()[0].as_polyhedron()._test_affine_hull_projection() # optional - sage.rings.number_field
"""
if tester is None:
tester = self._tester(**options)
if self.is_empty():
# Undefined, nothing to test
return
if self.n_vertices() > 30 or self.n_facets() > 30 or self.dim() > 6:
# Avoid very long doctests.
return
data_sets = [None]*4
data_sets[0] = self.affine_hull_projection(return_all_data=True)
if self.is_compact():
data_sets[1] = self.affine_hull_projection(return_all_data=True,
orthogonal=True,
extend=True)
data_sets[2] = self.affine_hull_projection(return_all_data=True,
orthonormal=True,
extend=True)
data_sets[3] = self.affine_hull_projection(return_all_data=True,
orthonormal=True,
extend=True,
minimal=True)
else:
data_sets = data_sets[:1]
for i, data in enumerate(data_sets):
if verbose:
print("Running test number {}".format(i))
M = data.projection_linear_map.matrix().transpose()
tester.assertEqual(self.linear_transformation(M, new_base_ring=M.base_ring())
+ data.projection_translation,
data.image)
M = data.section_linear_map.matrix().transpose()
if M.base_ring() is AA:
self_extend = self.change_ring(AA)
else:
self_extend = self
tester.assertEqual(data.image.linear_transformation(M)
+ data.section_translation,
self_extend)
if i == 0:
tester.assertEqual(data.image.base_ring(), self.base_ring())
else:
# Test whether the map is orthogonal.
M = data.projection_linear_map.matrix()
tester.assertTrue((M.transpose() * M).is_diagonal())
if i > 1:
# Test whether the map is orthonormal.
tester.assertTrue((M.transpose() * M).is_one())
if i == 3:
# Test that the extension is indeed minimal.
if self.base_ring() is not AA:
tester.assertIsNot(data.image.base_ring(), AA)
def affine_hull_manifold(self, name=None, latex_name=None, start_index=0, ambient_space=None,
ambient_chart=None, names=None, **kwds):
r"""
Return the affine hull of ``self`` as a manifold.
If ``self`` is full-dimensional, it is just the ambient Euclidean space.
Otherwise, it is a Riemannian submanifold of the ambient Euclidean space.
INPUT:
- ``ambient_space`` -- a :class:`~sage.manifolds.differentiable.examples.euclidean.EuclideanSpace`
of the ambient dimension (default: the manifold of ``ambient_chart``, if provided;
otherwise, a new instance of ``EuclideanSpace``).
- ``ambient_chart`` -- a chart on ``ambient_space``.
- ``names`` -- names for the coordinates on the affine hull.
- optional arguments accepted by :meth:`affine_hull_projection`.
The default chart is determined by the optional arguments of
:meth:`affine_hull_projection`.
EXAMPLES::
sage: triangle = Polyhedron([(1,0,0), (0,1,0), (0,0,1)]); triangle
A 2-dimensional polyhedron in ZZ^3 defined as the convex hull of 3 vertices
sage: A = triangle.affine_hull_manifold(name='A'); A
2-dimensional Riemannian submanifold A embedded in the Euclidean space E^3
sage: A.embedding().display()
A → E^3
(x0, x1) ↦ (x, y, z) = (t0 + x0, t0 + x1, t0 - x0 - x1 + 1)
sage: A.embedding().inverse().display()
E^3 → A
(x, y, z) ↦ (x0, x1) = (x, y)
sage: A.adapted_chart()
[Chart (E^3, (x0_E3, x1_E3, t0_E3))]
sage: A.normal().display()
n = 1/3*sqrt(3) e_x + 1/3*sqrt(3) e_y + 1/3*sqrt(3) e_z
sage: A.induced_metric() # Need to call this before volume_form
Riemannian metric gamma on the 2-dimensional Riemannian submanifold A embedded in the Euclidean space E^3
sage: A.volume_form()
2-form eps_gamma on the 2-dimensional Riemannian submanifold A embedded in the Euclidean space E^3
Orthogonal version::
sage: A = triangle.affine_hull_manifold(name='A', orthogonal=True); A
2-dimensional Riemannian submanifold A embedded in the Euclidean space E^3
sage: A.embedding().display()
A → E^3
(x0, x1) ↦ (x, y, z) = (t0 - 1/2*x0 - 1/3*x1 + 1, t0 + 1/2*x0 - 1/3*x1, t0 + 2/3*x1)
sage: A.embedding().inverse().display()
E^3 → A
(x, y, z) ↦ (x0, x1) = (-x + y + 1, -1/2*x - 1/2*y + z + 1/2)
Arrangement of affine hull of facets::
sage: D = polytopes.dodecahedron() # optional - sage.rings.number_field
sage: E3 = EuclideanSpace(3) # optional - sage.rings.number_field
sage: submanifolds = [ # optional - sage.rings.number_field
....: F.as_polyhedron().affine_hull_manifold(name=f'F{i}', orthogonal=True, ambient_space=E3)
....: for i, F in enumerate(D.facets())]
sage: sum(FM.plot({}, srange(-2, 2, 0.1), srange(-2, 2, 0.1), opacity=0.2) # not tested # optional - sage.plot # optional - sage.rings.number_field
....: for FM in submanifolds) + D.plot()
Graphics3d Object
Full-dimensional case::
sage: cube = polytopes.cube(); cube
A 3-dimensional polyhedron in ZZ^3 defined as the convex hull of 8 vertices
sage: cube.affine_hull_manifold()
Euclidean space E^3
"""
if ambient_space is None:
if ambient_chart is not None:
ambient_space = ambient_chart.manifold()
else:
from sage.manifolds.differentiable.examples.euclidean import EuclideanSpace
ambient_space = EuclideanSpace(self.ambient_dim(), start_index=start_index)
if ambient_space.dimension() != self.ambient_dim():
raise ValueError('ambient_space and ambient_chart must match the ambient dimension')
if self.is_full_dimensional():
return ambient_space
if ambient_chart is None:
ambient_chart = ambient_space.default_chart()
CE = ambient_chart
from sage.manifolds.manifold import Manifold
if name is None:
name, latex_name = self._affine_hull_name_latex_name()
H = Manifold(self.dim(), name, ambient=ambient_space, structure="Riemannian",
latex_name=latex_name, start_index=start_index)
if names is None:
names = tuple(f'x{i}' for i in range(self.dim()))
CH = H.chart(names=names)
data = self.affine_hull_projection(return_all_data=True, **kwds)
projection_matrix = data.projection_linear_map.matrix().transpose()
projection_translation_vector = data.projection_translation
section_matrix = data.section_linear_map.matrix().transpose()
section_translation_vector = data.section_translation
from sage.symbolic.ring import SR
# We use the slacks of the (linear independent) equations as the foliation parameters
foliation_parameters = vector(SR.var(f't{i}') for i in range(self.ambient_dim() - self.dim()))
normal_matrix = matrix(equation.A() for equation in self.equation_generator()).transpose()
slack_matrix = normal_matrix.pseudoinverse()
phi = H.diff_map(ambient_space, {(CH, CE):
(section_matrix * vector(CH._xx) + section_translation_vector
+ normal_matrix * foliation_parameters).list()})
phi_inv = ambient_space.diff_map(H, {(CE, CH):
(projection_matrix * vector(CE._xx) + projection_translation_vector).list()})
foliation_scalar_fields = {parameter:
ambient_space.scalar_field({CE: slack_matrix.row(i) * (vector(CE._xx) - section_translation_vector)})
for i, parameter in enumerate(foliation_parameters)}
H.set_embedding(phi, inverse=phi_inv,
var=list(foliation_parameters), t_inverse=foliation_scalar_fields)
return H
def _affine_hull_name_latex_name(self, name=None, latex_name=None):
r"""
Return the default name of the affine hull.
EXAMPLES::
sage: polytopes.cube()._affine_hull_name_latex_name('C', r'\square')
('aff_C', '\\mathop{\\mathrm{aff}}(\\square)')
sage: Polyhedron(vertices=[[0, 1], [1, 0]])._affine_hull_name_latex_name()
('aff_P', '\\mathop{\\mathrm{aff}}(P)')
"""
if name is None:
name = 'P'
if latex_name is None:
latex_name = name
operator = 'aff'
aff_name = f'{operator}_{name}'
aff_latex_name = r'\mathop{\mathrm{' + operator + '}}(' + latex_name + ')'
return aff_name, aff_latex_name
| [
"sage.matrix.constructor.matrix",
"sage.modules.vector_space_morphism.linear_transformation",
"sage.modules.free_module_element.vector",
"sage.geometry.convex_set.AffineHullProjectionData",
"sage.symbolic.ring.SR.var"
] | [((37375, 37401), 'sage.geometry.convex_set.AffineHullProjectionData', 'AffineHullProjectionData', ([], {}), '()\n', (37399, 37401), False, 'from sage.geometry.convex_set import AffineHullProjectionData\n'), ((39637, 39675), 'sage.modules.vector_space_morphism.linear_transformation', 'linear_transformation', (['A'], {'side': '"""right"""'}), "(A, side='right')\n", (39658, 39675), False, 'from sage.modules.vector_space_morphism import linear_transformation\n'), ((41209, 41221), 'sage.matrix.constructor.matrix', 'matrix', (['gens'], {}), '(gens)\n', (41215, 41221), False, 'from sage.matrix.constructor import matrix\n'), ((74332, 74347), 'sage.symbolic.ring.SR.var', 'SR.var', (['f"""t{i}"""'], {}), "(f't{i}')\n", (74338, 74347), False, 'from sage.symbolic.ring import SR\n'), ((41550, 41588), 'sage.modules.vector_space_morphism.linear_transformation', 'linear_transformation', (['A'], {'side': '"""right"""'}), "(A, side='right')\n", (41571, 41588), False, 'from sage.modules.vector_space_morphism import linear_transformation\n'), ((41994, 42032), 'sage.modules.vector_space_morphism.linear_transformation', 'linear_transformation', (['B'], {'side': '"""right"""'}), "(B, side='right')\n", (42015, 42032), False, 'from sage.modules.vector_space_morphism import linear_transformation\n'), ((39270, 39283), 'sage.matrix.constructor.matrix', 'matrix', (['AA', 'M'], {}), '(AA, M)\n', (39276, 39283), False, 'from sage.matrix.constructor import matrix\n'), ((75110, 75124), 'sage.modules.free_module_element.vector', 'vector', (['CE._xx'], {}), '(CE._xx)\n', (75116, 75124), False, 'from sage.modules.free_module_element import vector\n'), ((74916, 74930), 'sage.modules.free_module_element.vector', 'vector', (['CE._xx'], {}), '(CE._xx)\n', (74922, 74930), False, 'from sage.modules.free_module_element import vector\n'), ((27766, 27775), 'sage.matrix.constructor.matrix', 'matrix', (['g'], {}), '(g)\n', (27772, 27775), False, 'from sage.matrix.constructor import matrix\n'), ((74660, 74674), 'sage.modules.free_module_element.vector', 'vector', (['CH._xx'], {}), '(CH._xx)\n', (74666, 74674), False, 'from sage.modules.free_module_element import vector\n')] |
import RPi.GPIO as GPIO
import time
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM) # Use BCM GPIO numbers
class Lcd:
def __init__(self, rs, e, *dataPins):
self.RS = rs
self.E = e
self.dataPins = dataPins
# Define some device constants
self.LCD_WIDTH = 16 # Maximum characters per line
self.LCD_CHR = True
self.LCD_CMD = False
self.LCD_LINES = [0x80, 0xC0] # LCD RAM address for the lines
# Timing constants
self.E_PULSE = 0.0005
self.E_DELAY = 0.0005
for pin in [self.E, self.RS, *self.dataPins]:
GPIO.setup(pin, GPIO.OUT)
# Initialise display
self.lcd_byte(0x33,self.LCD_CMD) # 110011 Initialise
self.lcd_byte(0x32,self.LCD_CMD) # 110010 Initialise
self.lcd_byte(0x06,self.LCD_CMD) # 000110 Cursor move direction
self.lcd_byte(0x0C,self.LCD_CMD) # 001100 Display On,Cursor Off, Blink Off
self.lcd_byte(0x28,self.LCD_CMD) # 101000 Data length, number of lines, font size
self.lcd_byte(0x01,self.LCD_CMD) # 000001 Clear display
time.sleep(self.E_DELAY)
def lcd_byte(self, bits, mode):
# Send byte to data pins
# bits = data
# mode = True for character
# False for command
GPIO.output(self.RS, mode) # RS
# High bits
for pin in self.dataPins:
GPIO.output(pin, False)
if bits&0x10==0x10:
GPIO.output(self.dataPins[0], True)
if bits&0x20==0x20:
GPIO.output(self.dataPins[1], True)
if bits&0x40==0x40:
GPIO.output(self.dataPins[2], True)
if bits&0x80==0x80:
GPIO.output(self.dataPins[3], True)
# Toggle 'Enable' pin
self.lcd_toggle_enable()
# Low bits
for pin in self.dataPins:
GPIO.output(pin, False)
if bits&0x01==0x01:
GPIO.output(self.dataPins[0], True)
if bits&0x02==0x02:
GPIO.output(self.dataPins[1], True)
if bits&0x04==0x04:
GPIO.output(self.dataPins[2], True)
if bits&0x08==0x08:
GPIO.output(self.dataPins[3], True)
# Toggle 'Enable' pin
self.lcd_toggle_enable()
def lcd_toggle_enable(self):
# Toggle enable
time.sleep(self.E_DELAY)
GPIO.output(self.E, True)
time.sleep(self.E_PULSE)
GPIO.output(self.E, False)
time.sleep(self.E_DELAY)
def print(self, message,line):
# Send string to display
message = message.ljust(self.LCD_WIDTH," ")
self.lcd_byte(self.LCD_LINES[line - 1], self.LCD_CMD)
for ch in message:
self.lcd_byte(ord(ch), self.LCD_CHR)
def clear(self):
self.lcd_byte(0x01,self.LCD_CMD) # 000001 Clear display
| [
"RPi.GPIO.output",
"RPi.GPIO.setup",
"RPi.GPIO.setwarnings",
"time.sleep",
"RPi.GPIO.setmode"
] | [((37, 60), 'RPi.GPIO.setwarnings', 'GPIO.setwarnings', (['(False)'], {}), '(False)\n', (53, 60), True, 'import RPi.GPIO as GPIO\n'), ((61, 83), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (73, 83), True, 'import RPi.GPIO as GPIO\n'), ((983, 1007), 'time.sleep', 'time.sleep', (['self.E_DELAY'], {}), '(self.E_DELAY)\n', (993, 1007), False, 'import time\n'), ((1148, 1174), 'RPi.GPIO.output', 'GPIO.output', (['self.RS', 'mode'], {}), '(self.RS, mode)\n', (1159, 1174), True, 'import RPi.GPIO as GPIO\n'), ((1967, 1991), 'time.sleep', 'time.sleep', (['self.E_DELAY'], {}), '(self.E_DELAY)\n', (1977, 1991), False, 'import time\n'), ((1994, 2019), 'RPi.GPIO.output', 'GPIO.output', (['self.E', '(True)'], {}), '(self.E, True)\n', (2005, 2019), True, 'import RPi.GPIO as GPIO\n'), ((2022, 2046), 'time.sleep', 'time.sleep', (['self.E_PULSE'], {}), '(self.E_PULSE)\n', (2032, 2046), False, 'import time\n'), ((2049, 2075), 'RPi.GPIO.output', 'GPIO.output', (['self.E', '(False)'], {}), '(self.E, False)\n', (2060, 2075), True, 'import RPi.GPIO as GPIO\n'), ((2078, 2102), 'time.sleep', 'time.sleep', (['self.E_DELAY'], {}), '(self.E_DELAY)\n', (2088, 2102), False, 'import time\n'), ((537, 562), 'RPi.GPIO.setup', 'GPIO.setup', (['pin', 'GPIO.OUT'], {}), '(pin, GPIO.OUT)\n', (547, 562), True, 'import RPi.GPIO as GPIO\n'), ((1226, 1249), 'RPi.GPIO.output', 'GPIO.output', (['pin', '(False)'], {}), '(pin, False)\n', (1237, 1249), True, 'import RPi.GPIO as GPIO\n'), ((1276, 1311), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[0]', '(True)'], {}), '(self.dataPins[0], True)\n', (1287, 1311), True, 'import RPi.GPIO as GPIO\n'), ((1337, 1372), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[1]', '(True)'], {}), '(self.dataPins[1], True)\n', (1348, 1372), True, 'import RPi.GPIO as GPIO\n'), ((1398, 1433), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[2]', '(True)'], {}), '(self.dataPins[2], True)\n', (1409, 1433), True, 'import RPi.GPIO as GPIO\n'), ((1459, 1494), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[3]', '(True)'], {}), '(self.dataPins[3], True)\n', (1470, 1494), True, 'import RPi.GPIO as GPIO\n'), ((1592, 1615), 'RPi.GPIO.output', 'GPIO.output', (['pin', '(False)'], {}), '(pin, False)\n', (1603, 1615), True, 'import RPi.GPIO as GPIO\n'), ((1645, 1680), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[0]', '(True)'], {}), '(self.dataPins[0], True)\n', (1656, 1680), True, 'import RPi.GPIO as GPIO\n'), ((1706, 1741), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[1]', '(True)'], {}), '(self.dataPins[1], True)\n', (1717, 1741), True, 'import RPi.GPIO as GPIO\n'), ((1767, 1802), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[2]', '(True)'], {}), '(self.dataPins[2], True)\n', (1778, 1802), True, 'import RPi.GPIO as GPIO\n'), ((1828, 1863), 'RPi.GPIO.output', 'GPIO.output', (['self.dataPins[3]', '(True)'], {}), '(self.dataPins[3], True)\n', (1839, 1863), True, 'import RPi.GPIO as GPIO\n')] |
import random
import numpy as np
from oscar.constants import *
from oscar.util.point import Point
def get_micro_management_location(obs, shared):
_MICRO_DISTANCE = 6
player_relative = obs.observation[MINIMAP][MINI_PLAYER_RELATIVE]
locations = []
for y in range(shared['minimap'].height(obs)):
for x in range(shared['minimap'].width(obs)):
if player_relative[y, x] == PLAYER_SELF:
p = Point(x, y)
if _is_close_to_enemy(obs, shared, p, _MICRO_DISTANCE):
locations.append(p)
return locations
def get_safe_screen_location(obs, shared, unit_point, influence_map):
safe_x, safe_y = [], []
safe_coeff = 0
while len(safe_x) == 0:
safe_x, safe_y = (influence_map == safe_coeff).nonzero()
safe_coeff += 1
best_loc, min_dist = None, None
for loc in zip(safe_x, safe_y):
dist = unit_point.distance(Point(loc[0], loc[1]))
if not best_loc or dist < min_dist:
best_loc = Point(loc[0], loc[1])
min_dist = dist
return best_loc
def get_enemy_influence_map(obs, shared):
_ENEMY_DISTANCE_DANGER = 25
influence_map = np.zeros((shared['screen'].width(obs), shared['screen'].height(obs)))
enemies = shared['screen'].scan_units(obs, shared, list(TERRAN_UNITS), PLAYER_HOSTILE)
for y in range(shared['screen'].height(obs)):
for x in range(shared['screen'].width(obs)):
for e in enemies:
if Point(x, y).distance(e.location.screen) <= _ENEMY_DISTANCE_DANGER:
influence_map[x, y] += 1
return influence_map
def get_closest_enemy(obs, shared, loc):
player_relative = obs.observation[MINIMAP][MINI_PLAYER_RELATIVE]
hostile_y, hostile_x = (player_relative == PLAYER_HOSTILE).nonzero()
closest, min_dist = None, None
for h in zip(hostile_x, hostile_y):
dist = loc.distance(Point(h[0], h[1]))
if not closest or dist < min_dist:
closest = Point(h[0], h[1])
min_dist = dist
return closest
def _is_close_to_enemy(obs, shared, point, max_dist):
player_relative = obs.observation[MINIMAP][MINI_PLAYER_RELATIVE]
for y in range(point.y-max_dist, point.y+max_dist+1):
if y < 0 or y >= shared['minimap'].height(obs):
continue
for x in range(point.x-max_dist, point.x+max_dist+1):
if x < 0 or x >= shared['minimap'].width(obs):
continue
if player_relative[y, x] == PLAYER_HOSTILE:
return True
return False
| [
"oscar.util.point.Point"
] | [((931, 952), 'oscar.util.point.Point', 'Point', (['loc[0]', 'loc[1]'], {}), '(loc[0], loc[1])\n', (936, 952), False, 'from oscar.util.point import Point\n'), ((1021, 1042), 'oscar.util.point.Point', 'Point', (['loc[0]', 'loc[1]'], {}), '(loc[0], loc[1])\n', (1026, 1042), False, 'from oscar.util.point import Point\n'), ((1930, 1947), 'oscar.util.point.Point', 'Point', (['h[0]', 'h[1]'], {}), '(h[0], h[1])\n', (1935, 1947), False, 'from oscar.util.point import Point\n'), ((2014, 2031), 'oscar.util.point.Point', 'Point', (['h[0]', 'h[1]'], {}), '(h[0], h[1])\n', (2019, 2031), False, 'from oscar.util.point import Point\n'), ((440, 451), 'oscar.util.point.Point', 'Point', (['x', 'y'], {}), '(x, y)\n', (445, 451), False, 'from oscar.util.point import Point\n'), ((1503, 1514), 'oscar.util.point.Point', 'Point', (['x', 'y'], {}), '(x, y)\n', (1508, 1514), False, 'from oscar.util.point import Point\n')] |
################################################################################
# code and images by <NAME>
# https://github.com/aaronpenne
################################################################################
################################################################################
# Imports
################################################################################
# Processing mode uses Python 2.7 but I prefer Python 3.x, pull in future tools
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import with_statement
# Normal Python imports
import os
import sys
import shutil
import logging
from datetime import datetime
from collections import OrderedDict
from random import seed, shuffle, sample
################################################################################
# Globals
################################################################################
# Knobs to turn
w = 500
h = 500
max_frames = 10000
attractor = None
particles = []
use_seed = False
rand_seed = 578919
# Utility variables
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
script_path = os.path.abspath(__file__)
script_name = os.path.basename(script_path)
script_ext = os.path.splitext(script_name)[1]
sketch_name = os.path.splitext(script_name)[0]
# Initialize random number generators with seed
if not use_seed:
rand_seed = int(random(99999,9999999))
randomSeed(rand_seed)
noiseSeed(rand_seed)
seed(rand_seed)
################################################################################
# Helper methods
#
# These exist here in the script instead of a separate centralized file to
# preserve portability and ability to recreate image with a single script
################################################################################
# Standardizes log formats
# ex. 2020-06-31 12:30:55 - INFO - log is better than print
logging.basicConfig(level=logging.INFO,
stream=sys.stdout,
format='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
log = logging.getLogger(__name__)
def make_dir(path):
"""Creates dir if it does not exist"""
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
def get_filename(counter):
"""Standardizes filename string format
ex. comet_12345_20200631_123055_001.png
"""
return '{}_{}_{}_{:03d}.png'.format(sketch_name, rand_seed, timestamp, counter)
def save_graphic(pg=None, path='output', counter=0):
"""Saves image and creates copy of this script"""
make_dir(path)
output_file = get_filename(counter)
output_path = os.path.join(path, output_file)
if pg:
pg.save(output_path)
else:
save(output_path)
log.info('Saved to {}'.format(output_path))
def save_code(pg=None, path='output', counter=0):
"""Saves image and creates copy of this script"""
make_dir(path)
output_file = get_filename(counter)
output_path = os.path.join(path, output_file)
make_dir('archive_code')
src = script_path
dst = os.path.join('archive_code', output_file + script_ext)
shutil.copy(src, dst)
def mousePressed():
save_graphic(None, 'output', frameCount)
################################################################################
# Artwork methods
#
# where the fun actually starts
################################################################################
class Particle:
def __init__(self, x, y, r=5):
self.pos = PVector(x, y)
self.vel = PVector(random(-5,5), random(-5,5))
self.acc = PVector()
self.vel_limit = 3000
self.r = r
self.c = color(0, 0, 100, 10)
def move(self):
self.pos.add(self.vel)
# limits
if self.vel.mag() <= self.vel_limit:
self.vel.add(self.acc)
"""
# handle x edges
if self.pos.x > w+self.r:
self.pos.x = -self.r
elif self.pos.x < -self.r:
self.pos.x = w+self.r
# handle y edges
if self.pos.y > h+self.r:
self.pos.y = -self.r
elif self.pos.y < -self.r:
self.pos.y = h+self.r
"""
def render_points(self):
pushStyle()
stroke(self.c)
strokeWeight(self.r)
point(self.pos.x, self.pos.y)
popStyle()
def render_lines(self, target):
pushStyle()
stroke(self.c)
strokeWeight(self.r)
line(self.pos.x, self.pos.y, target.x, target.y)
popStyle()
def attracted(self, target):
force = PVector.sub(target, self.pos)
dsquared = force.magSq()
dsquared = constrain(dsquared, 25, 100)
G = 100
strength = G / dsquared
force.setMag(strength)
self.acc = force
################################################################################
# Setup
################################################################################
def setup():
size(w, h)
colorMode(HSB, 360, 100, 100, 100)
background(0)
frameRate(10)
global attractor
attractor = PVector(w/2 + w*0.2*cos(0), h/2 + h*0.2*sin(0))
global particles
for n in range(10):
#particles.append(Particle(random(w), random(h)))
particles.append(Particle(w/2+random(20,-20), h/2+random(-20,20)))
save_code(None, 'output', frameCount)
################################################################################
# Draw
################################################################################
def draw():
#background(0)
pushStyle()
stroke(231, 76, 60, 100)
strokeWeight(10)
attractor = PVector(w/2 + w*0.2*cos(frameCount*TAU/360),
h/2 + h*0.2*sin(frameCount*TAU/360))
#point(attractor.x, attractor.y)
popStyle()
for idx,p in enumerate(particles):
p.attracted(attractor)
p.move()
#p.render_points()
p.render_lines(attractor)
if frameCount % 20 == 0:
print(frameCount)
if frameCount % max_frames == 0:
exit()
| [
"logging.basicConfig",
"logging.getLogger",
"os.makedirs",
"os.path.splitext",
"os.path.join",
"random.seed",
"datetime.datetime.now",
"os.path.isdir",
"os.path.basename",
"shutil.copy",
"os.path.abspath"
] | [((1189, 1214), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1204, 1214), False, 'import os\n'), ((1229, 1258), 'os.path.basename', 'os.path.basename', (['script_path'], {}), '(script_path)\n', (1245, 1258), False, 'import os\n'), ((1504, 1519), 'random.seed', 'seed', (['rand_seed'], {}), '(rand_seed)\n', (1508, 1519), False, 'from random import seed, shuffle, sample\n'), ((1941, 2085), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'stream': 'sys.stdout', 'format': '"""%(asctime)s - %(levelname)s - %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(level=logging.INFO, stream=sys.stdout, format=\n '%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')\n", (1960, 2085), False, 'import logging\n'), ((2147, 2174), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2164, 2174), False, 'import logging\n'), ((1272, 1301), 'os.path.splitext', 'os.path.splitext', (['script_name'], {}), '(script_name)\n', (1288, 1301), False, 'import os\n'), ((1319, 1348), 'os.path.splitext', 'os.path.splitext', (['script_name'], {}), '(script_name)\n', (1335, 1348), False, 'import os\n'), ((2705, 2736), 'os.path.join', 'os.path.join', (['path', 'output_file'], {}), '(path, output_file)\n', (2717, 2736), False, 'import os\n'), ((3022, 3053), 'os.path.join', 'os.path.join', (['path', 'output_file'], {}), '(path, output_file)\n', (3034, 3053), False, 'import os\n'), ((3109, 3163), 'os.path.join', 'os.path.join', (['"""archive_code"""', '(output_file + script_ext)'], {}), "('archive_code', output_file + script_ext)\n", (3121, 3163), False, 'import os\n'), ((3166, 3187), 'shutil.copy', 'shutil.copy', (['src', 'dst'], {}), '(src, dst)\n', (3177, 3187), False, 'import shutil\n'), ((1134, 1148), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1146, 1148), False, 'from datetime import datetime\n'), ((2248, 2265), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2259, 2265), False, 'import os\n'), ((2295, 2314), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (2308, 2314), False, 'import os\n')] |
from vplot import GetOutput
import subprocess as sub
import numpy as np
import os
cwd = os.path.dirname(os.path.realpath(__file__))
def test_vspace_log():
dir = cwd+'/Log_Test'
# Removes the files created when vspace is ran
sub.run(['rm', '-rf', dir],cwd=cwd)
# Runs vspace
sub.run(['python','../../vspace/vspace/vspace.py','vspace.in'],cwd=cwd)
# Grab the output
folders = sorted([f.path for f in os.scandir(dir) if f.is_dir()])
semi = []
for i in range(len(folders)):
os.chdir(folders[i])
with open('earth.in', 'r') as f:
for newline in f:
if newline.startswith("dSemi"):
newline = newline.strip().split()
semi.append(newline[1])
os.chdir('../')
for i in range(len(semi)):
semi[i] = float(semi[i])
assert np.isclose(semi[0], 1.0)
assert np.isclose(semi[1], 2.15443469)
assert np.isclose(semi[2], 4.64158883)
assert np.isclose(semi[3], 10.0)
assert np.isclose(semi[4], 21.5443469)
assert np.isclose(semi[5], 46.41588834)
assert np.isclose(semi[6], 100.0)
assert np.isclose(semi[7], 215.443469)
assert np.isclose(semi[8], 464.15888336)
assert np.isclose(semi[9], 1000.0)
if __name__ == "__main__":
test_vspace_log()
| [
"numpy.isclose",
"subprocess.run",
"os.scandir",
"os.path.realpath",
"os.chdir"
] | [((104, 130), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (120, 130), False, 'import os\n'), ((238, 274), 'subprocess.run', 'sub.run', (["['rm', '-rf', dir]"], {'cwd': 'cwd'}), "(['rm', '-rf', dir], cwd=cwd)\n", (245, 274), True, 'import subprocess as sub\n'), ((296, 370), 'subprocess.run', 'sub.run', (["['python', '../../vspace/vspace/vspace.py', 'vspace.in']"], {'cwd': 'cwd'}), "(['python', '../../vspace/vspace/vspace.py', 'vspace.in'], cwd=cwd)\n", (303, 370), True, 'import subprocess as sub\n'), ((854, 878), 'numpy.isclose', 'np.isclose', (['semi[0]', '(1.0)'], {}), '(semi[0], 1.0)\n', (864, 878), True, 'import numpy as np\n'), ((890, 921), 'numpy.isclose', 'np.isclose', (['semi[1]', '(2.15443469)'], {}), '(semi[1], 2.15443469)\n', (900, 921), True, 'import numpy as np\n'), ((933, 964), 'numpy.isclose', 'np.isclose', (['semi[2]', '(4.64158883)'], {}), '(semi[2], 4.64158883)\n', (943, 964), True, 'import numpy as np\n'), ((976, 1001), 'numpy.isclose', 'np.isclose', (['semi[3]', '(10.0)'], {}), '(semi[3], 10.0)\n', (986, 1001), True, 'import numpy as np\n'), ((1013, 1044), 'numpy.isclose', 'np.isclose', (['semi[4]', '(21.5443469)'], {}), '(semi[4], 21.5443469)\n', (1023, 1044), True, 'import numpy as np\n'), ((1056, 1088), 'numpy.isclose', 'np.isclose', (['semi[5]', '(46.41588834)'], {}), '(semi[5], 46.41588834)\n', (1066, 1088), True, 'import numpy as np\n'), ((1100, 1126), 'numpy.isclose', 'np.isclose', (['semi[6]', '(100.0)'], {}), '(semi[6], 100.0)\n', (1110, 1126), True, 'import numpy as np\n'), ((1138, 1169), 'numpy.isclose', 'np.isclose', (['semi[7]', '(215.443469)'], {}), '(semi[7], 215.443469)\n', (1148, 1169), True, 'import numpy as np\n'), ((1181, 1214), 'numpy.isclose', 'np.isclose', (['semi[8]', '(464.15888336)'], {}), '(semi[8], 464.15888336)\n', (1191, 1214), True, 'import numpy as np\n'), ((1226, 1253), 'numpy.isclose', 'np.isclose', (['semi[9]', '(1000.0)'], {}), '(semi[9], 1000.0)\n', (1236, 1253), True, 'import numpy as np\n'), ((516, 536), 'os.chdir', 'os.chdir', (['folders[i]'], {}), '(folders[i])\n', (524, 536), False, 'import os\n'), ((762, 777), 'os.chdir', 'os.chdir', (['"""../"""'], {}), "('../')\n", (770, 777), False, 'import os\n'), ((428, 443), 'os.scandir', 'os.scandir', (['dir'], {}), '(dir)\n', (438, 443), False, 'import os\n')] |
# Copyright 2017-2020 typed_python Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typed_python.compiler.type_wrappers.wrapper import Wrapper
from typed_python.compiler.type_wrappers.refcounted_wrapper import RefcountedWrapper
import typed_python.compiler.type_wrappers.runtime_functions as runtime_functions
from typed_python.compiler.conversion_level import ConversionLevel
from typed_python.compiler.type_wrappers.bound_method_wrapper import BoundMethodWrapper
from typed_python.compiler.type_wrappers.util import min
from typed_python.compiler.typed_expression import TypedExpression
from typed_python import Tuple
import typed_python.compiler.native_ast as native_ast
import typed_python.compiler
typeWrapper = lambda t: typed_python.compiler.python_object_representation.typedPythonTypeToTypeWrapper(t)
def const_dict_eq(l, r):
if len(l) != len(r):
return False
for i in range(len(l)):
if l.get_key_by_index_unsafe(i) != r.get_key_by_index_unsafe(i):
return False
if l.get_value_by_index_unsafe(i) != r.get_value_by_index_unsafe(i):
return False
return True
def const_dict_neq(l, r):
return not const_dict_eq(l, r)
def const_dict_lt(left, right):
"""Compare two 'ConstDict' instances by comparing their individual elements."""
for i in range(min(len(left), len(right))):
if left.get_key_by_index_unsafe(i) > right.get_key_by_index_unsafe(i):
return False
if left.get_key_by_index_unsafe(i) < right.get_key_by_index_unsafe(i):
return True
if left.get_value_by_index_unsafe(i) > right.get_value_by_index_unsafe(i):
return False
if left.get_value_by_index_unsafe(i) < right.get_value_by_index_unsafe(i):
return True
return len(left) < len(right)
def const_dict_lte(left, right):
"""Compare two 'ConstDict' instances by comparing their individual elements."""
for i in range(min(len(left), len(right))):
if left.get_key_by_index_unsafe(i) > right.get_key_by_index_unsafe(i):
return False
if left.get_key_by_index_unsafe(i) < right.get_key_by_index_unsafe(i):
return True
if left.get_value_by_index_unsafe(i) > right.get_value_by_index_unsafe(i):
return False
if left.get_value_by_index_unsafe(i) < right.get_value_by_index_unsafe(i):
return True
return len(left) <= len(right)
def const_dict_gt(left, right):
return not const_dict_lte(left, right)
def const_dict_gte(left, right):
return not const_dict_lt(left, right)
def const_dict_getitem(constDict, key):
# perform a binary search
lowIx = 0
highIx = len(constDict)
while lowIx < highIx:
mid = (lowIx + highIx) >> 1
keyAtVal = constDict.get_key_by_index_unsafe(mid)
if keyAtVal < key:
lowIx = mid + 1
elif key < keyAtVal:
highIx = mid
else:
return constDict.get_value_by_index_unsafe(mid)
raise KeyError(key)
def const_dict_get(constDict, key, default):
# perform a binary search
lowIx = 0
highIx = len(constDict)
while lowIx < highIx:
mid = (lowIx + highIx) >> 1
keyAtVal = constDict.get_key_by_index_unsafe(mid)
if keyAtVal < key:
lowIx = mid + 1
elif key < keyAtVal:
highIx = mid
else:
return constDict.get_value_by_index_unsafe(mid)
return default
def const_dict_contains(constDict, key):
# perform a binary search
lowIx = 0
highIx = len(constDict)
while lowIx < highIx:
mid = (lowIx + highIx) >> 1
keyAtVal = constDict.get_key_by_index_unsafe(mid)
if keyAtVal < key:
lowIx = mid + 1
elif key < keyAtVal:
highIx = mid
else:
return True
return False
class ConstDictWrapperBase(RefcountedWrapper):
"""Common method wrappers for all ConstDicts.
We subclass this for things like 'keys', 'values', and 'items' since
they all basically look like a const-dict with different methods
"""
is_pod = False
is_empty = False
is_pass_by_ref = True
def __init__(self, constDictType, behavior):
assert hasattr(constDictType, '__typed_python_category__')
super().__init__(constDictType if behavior is None else (constDictType, behavior))
self.constDictType = constDictType
self.keyType = typeWrapper(constDictType.KeyType)
self.valueType = typeWrapper(constDictType.ValueType)
self.itemType = typeWrapper(Tuple(constDictType.KeyType, constDictType.ValueType))
self.kvBytecount = self.keyType.getBytecount() + self.valueType.getBytecount()
self.keyBytecount = self.keyType.getBytecount()
self.layoutType = native_ast.Type.Struct(element_types=(
('refcount', native_ast.Int64),
('hash_cache', native_ast.Int32),
('count', native_ast.Int32),
('subpointers', native_ast.Int32),
('data', native_ast.UInt8)
), name='ConstDictLayout').pointer()
def getNativeLayoutType(self):
return self.layoutType
def on_refcount_zero(self, context, instance):
assert instance.isReference
if self.keyType.is_pod and self.valueType.is_pod:
return runtime_functions.free.call(instance.nonref_expr.cast(native_ast.UInt8Ptr))
else:
return (
context.converter.defineNativeFunction(
"destructor_" + str(self.constDictType),
('destructor', self),
[self],
typeWrapper(type(None)),
self.generateNativeDestructorFunction
)
.call(instance)
)
def generateNativeDestructorFunction(self, context, out, inst):
with context.loop(inst.convert_len()) as i:
self.convert_getkey_by_index_unsafe(context, inst, i).convert_destroy()
self.convert_getvalue_by_index_unsafe(context, inst, i).convert_destroy()
context.pushEffect(
runtime_functions.free.call(inst.nonref_expr.cast(native_ast.UInt8Ptr))
)
class ConstDictWrapper(ConstDictWrapperBase):
def __init__(self, constDictType):
super().__init__(constDictType, None)
def convert_attribute(self, context, instance, attr):
if attr in ("get_key_by_index_unsafe", "get_value_by_index_unsafe", "keys", "values", "items", "get"):
return instance.changeType(BoundMethodWrapper.Make(self, attr))
return super().convert_attribute(context, instance, attr)
def convert_default_initialize(self, context, instance):
context.pushEffect(
instance.expr.store(self.layoutType.zero())
)
def convert_method_call(self, context, instance, methodname, args, kwargs):
if methodname == "__iter__" and not args and not kwargs:
res = context.push(
ConstDictKeysIteratorWrapper(self.constDictType),
lambda instance:
instance.expr.ElementPtrIntegers(0, 0).store(-1)
# we initialize the dict pointer below, so technically
# if that were to throw, this would leak a bad value.
)
context.pushReference(
self,
res.expr.ElementPtrIntegers(0, 1)
).convert_copy_initialize(instance)
return res
if methodname == "get" and not kwargs:
if len(args) == 1:
return self.convert_get(context, instance, args[0], context.constant(None))
elif len(args) == 2:
return self.convert_get(context, instance, args[0], args[1])
if methodname == "keys" and not args and not kwargs:
return instance.changeType(ConstDictKeysWrapper(self.constDictType))
if methodname == "values" and not args and not kwargs:
return instance.changeType(ConstDictValuesWrapper(self.constDictType))
if methodname == "items" and not args and not kwargs:
return instance.changeType(ConstDictItemsWrapper(self.constDictType))
if kwargs:
return super().convert_method_call(context, instance, methodname, args, kwargs)
if methodname == "get_key_by_index_unsafe":
if len(args) == 1:
ix = args[0].toInt64()
if ix is None:
return
return self.convert_getkey_by_index_unsafe(context, instance, ix)
if methodname == "get_value_by_index_unsafe":
if len(args) == 1:
ix = args[0].toInt64()
if ix is None:
return
return self.convert_getvalue_by_index_unsafe(context, instance, ix)
return super().convert_method_call(context, instance, methodname, args, kwargs)
def convert_getkey_by_index_unsafe(self, context, expr, item):
return context.pushReference(
self.keyType,
expr.nonref_expr.ElementPtrIntegers(0, 4).elemPtr(
item.nonref_expr.mul(native_ast.const_int_expr(self.kvBytecount))
).cast(self.keyType.getNativeLayoutType().pointer())
)
def convert_getitem_by_index_unsafe(self, context, expr, item):
return context.pushReference(
self.itemType,
expr.nonref_expr.ElementPtrIntegers(0, 4).elemPtr(
item.nonref_expr.mul(native_ast.const_int_expr(self.kvBytecount))
).cast(self.itemType.getNativeLayoutType().pointer())
)
def convert_getvalue_by_index_unsafe(self, context, expr, item):
return context.pushReference(
self.valueType,
expr.nonref_expr.ElementPtrIntegers(0, 4).elemPtr(
item.nonref_expr.mul(native_ast.const_int_expr(self.kvBytecount))
.add(native_ast.const_int_expr(self.keyBytecount))
).cast(self.valueType.getNativeLayoutType().pointer())
)
def convert_bin_op(self, context, left, op, right, inplace):
if right.expr_type == left.expr_type:
if op.matches.Eq:
return context.call_py_function(const_dict_eq, (left, right), {})
if op.matches.NotEq:
return context.call_py_function(const_dict_neq, (left, right), {})
if op.matches.Lt:
return context.call_py_function(const_dict_lt, (left, right), {})
if op.matches.LtE:
return context.call_py_function(const_dict_lte, (left, right), {})
if op.matches.Gt:
return context.call_py_function(const_dict_gt, (left, right), {})
if op.matches.GtE:
return context.call_py_function(const_dict_gte, (left, right), {})
return super().convert_bin_op(context, left, op, right, inplace)
def convert_bin_op_reverse(self, context, left, op, right, inplace):
if op.matches.In:
right = right.convert_to_type(self.keyType, ConversionLevel.UpcastContainers)
if right is None:
return None
return context.call_py_function(const_dict_contains, (left, right), {})
return super().convert_bin_op_reverse(context, left, op, right, inplace)
def convert_getitem(self, context, instance, item):
item = item.convert_to_type(self.keyType, ConversionLevel.UpcastContainers)
if item is None:
return None
return context.call_py_function(const_dict_getitem, (instance, item), {})
def convert_get(self, context, expr, item, default):
if item is None or expr is None or default is None:
return None
item = item.convert_to_type(self.keyType, ConversionLevel.UpcastContainers)
if item is None:
return None
return context.call_py_function(const_dict_get, (expr, item, default), {})
def convert_len_native(self, expr):
if isinstance(expr, TypedExpression):
expr = expr.nonref_expr
return native_ast.Expression.Branch(
cond=expr,
false=native_ast.const_int_expr(0),
true=expr.ElementPtrIntegers(0, 2).load().cast(native_ast.Int64)
)
def convert_len(self, context, expr):
return context.pushPod(int, self.convert_len_native(expr.nonref_expr))
def _can_convert_to_type(self, targetType, conversionLevel):
if not conversionLevel.isNewOrHigher():
return False
if targetType.typeRepresentation is bool:
return True
if targetType.typeRepresentation is str:
return "Maybe"
return False
def convert_to_type_with_target(self, context, instance, targetVal, conversionLevel, mayThrowOnFailure=False):
if targetVal.expr_type.typeRepresentation is bool:
res = context.pushPod(bool, self.convert_len_native(instance.nonref_expr).neq(0))
context.pushEffect(
targetVal.expr.store(res.nonref_expr)
)
return context.constant(True)
return super().convert_to_type_with_target(context, instance, targetVal, conversionLevel, mayThrowOnFailure)
class ConstDictMakeIteratorWrapper(ConstDictWrapperBase):
def convert_method_call(self, context, expr, methodname, args, kwargs):
if methodname == "__iter__" and not args and not kwargs:
res = context.push(
# self.iteratorType is inherited from our specialized children
# who pick whether we're an interator over keys, values, items, etc.
self.iteratorType,
lambda instance:
instance.expr.ElementPtrIntegers(0, 0).store(-1)
)
context.pushReference(
self,
res.expr.ElementPtrIntegers(0, 1)
).convert_copy_initialize(expr)
return res
return super().convert_method_call(context, expr, methodname, args, kwargs)
class ConstDictKeysWrapper(ConstDictMakeIteratorWrapper):
def __init__(self, constDictType):
super().__init__(constDictType, "keys")
self.iteratorType = ConstDictKeysIteratorWrapper(constDictType)
class ConstDictValuesWrapper(ConstDictMakeIteratorWrapper):
def __init__(self, constDictType):
super().__init__(constDictType, "values")
self.iteratorType = ConstDictValuesIteratorWrapper(constDictType)
class ConstDictItemsWrapper(ConstDictMakeIteratorWrapper):
def __init__(self, constDictType):
super().__init__(constDictType, "items")
self.iteratorType = ConstDictItemsIteratorWrapper(constDictType)
class ConstDictIteratorWrapper(Wrapper):
is_pod = False
is_empty = False
is_pass_by_ref = True
def __init__(self, constDictType, iteratorType):
self.constDictType = constDictType
self.iteratorType = iteratorType
super().__init__((constDictType, "iterator", iteratorType))
def getNativeLayoutType(self):
return native_ast.Type.Struct(
element_types=(("pos", native_ast.Int64), ("dict", ConstDictWrapper(self.constDictType).getNativeLayoutType())),
name="const_dict_iterator"
)
def convert_fastnext(self, context, expr):
context.pushEffect(
expr.expr.ElementPtrIntegers(0, 0).store(
expr.expr.ElementPtrIntegers(0, 0).load().add(1)
)
)
self_len = self.refAs(context, expr, 1).convert_len()
canContinue = context.pushPod(
bool,
expr.expr.ElementPtrIntegers(0, 0).load().lt(self_len.nonref_expr)
)
nextIx = context.pushReference(int, expr.expr.ElementPtrIntegers(0, 0))
return self.iteratedItemForReference(context, expr, nextIx).asPointerIf(canContinue)
def refAs(self, context, expr, which):
assert expr.expr_type == self
if which == 0:
return context.pushReference(int, expr.expr.ElementPtrIntegers(0, 0))
if which == 1:
return context.pushReference(
self.constDictType,
expr.expr
.ElementPtrIntegers(0, 1)
.cast(ConstDictWrapper(self.constDictType).getNativeLayoutType().pointer())
)
def convert_assign(self, context, expr, other):
assert expr.isReference
for i in range(2):
self.refAs(context, expr, i).convert_assign(self.refAs(context, other, i))
def convert_copy_initialize(self, context, expr, other):
for i in range(2):
self.refAs(context, expr, i).convert_copy_initialize(self.refAs(context, other, i))
def convert_destroy(self, context, expr):
self.refAs(context, expr, 1).convert_destroy()
class ConstDictKeysIteratorWrapper(ConstDictIteratorWrapper):
def __init__(self, constDictType):
super().__init__(constDictType, "keys")
def iteratedItemForReference(self, context, expr, ixExpr):
return ConstDictWrapper(self.constDictType).convert_getkey_by_index_unsafe(
context,
self.refAs(context, expr, 1),
ixExpr
)
class ConstDictItemsIteratorWrapper(ConstDictIteratorWrapper):
def __init__(self, constDictType):
super().__init__(constDictType, "items")
def iteratedItemForReference(self, context, expr, ixExpr):
return ConstDictWrapper(self.constDictType).convert_getitem_by_index_unsafe(
context,
self.refAs(context, expr, 1),
ixExpr
)
class ConstDictValuesIteratorWrapper(ConstDictIteratorWrapper):
def __init__(self, constDictType):
super().__init__(constDictType, "values")
def iteratedItemForReference(self, context, expr, ixExpr):
return ConstDictWrapper(self.constDictType).convert_getvalue_by_index_unsafe(
context,
self.refAs(context, expr, 1),
ixExpr
)
| [
"typed_python.compiler.native_ast.const_int_expr",
"typed_python.compiler.native_ast.Type.Struct",
"typed_python.compiler.type_wrappers.bound_method_wrapper.BoundMethodWrapper.Make",
"typed_python.Tuple"
] | [((5152, 5205), 'typed_python.Tuple', 'Tuple', (['constDictType.KeyType', 'constDictType.ValueType'], {}), '(constDictType.KeyType, constDictType.ValueType)\n', (5157, 5205), False, 'from typed_python import Tuple\n'), ((5378, 5613), 'typed_python.compiler.native_ast.Type.Struct', 'native_ast.Type.Struct', ([], {'element_types': "(('refcount', native_ast.Int64), ('hash_cache', native_ast.Int32), ('count',\n native_ast.Int32), ('subpointers', native_ast.Int32), ('data',\n native_ast.UInt8))", 'name': '"""ConstDictLayout"""'}), "(element_types=(('refcount', native_ast.Int64), (\n 'hash_cache', native_ast.Int32), ('count', native_ast.Int32), (\n 'subpointers', native_ast.Int32), ('data', native_ast.UInt8)), name=\n 'ConstDictLayout')\n", (5400, 5613), True, 'import typed_python.compiler.native_ast as native_ast\n'), ((7133, 7168), 'typed_python.compiler.type_wrappers.bound_method_wrapper.BoundMethodWrapper.Make', 'BoundMethodWrapper.Make', (['self', 'attr'], {}), '(self, attr)\n', (7156, 7168), False, 'from typed_python.compiler.type_wrappers.bound_method_wrapper import BoundMethodWrapper\n'), ((12786, 12814), 'typed_python.compiler.native_ast.const_int_expr', 'native_ast.const_int_expr', (['(0)'], {}), '(0)\n', (12811, 12814), True, 'import typed_python.compiler.native_ast as native_ast\n'), ((9763, 9806), 'typed_python.compiler.native_ast.const_int_expr', 'native_ast.const_int_expr', (['self.kvBytecount'], {}), '(self.kvBytecount)\n', (9788, 9806), True, 'import typed_python.compiler.native_ast as native_ast\n'), ((10117, 10160), 'typed_python.compiler.native_ast.const_int_expr', 'native_ast.const_int_expr', (['self.kvBytecount'], {}), '(self.kvBytecount)\n', (10142, 10160), True, 'import typed_python.compiler.native_ast as native_ast\n'), ((10540, 10584), 'typed_python.compiler.native_ast.const_int_expr', 'native_ast.const_int_expr', (['self.keyBytecount'], {}), '(self.keyBytecount)\n', (10565, 10584), True, 'import typed_python.compiler.native_ast as native_ast\n'), ((10474, 10517), 'typed_python.compiler.native_ast.const_int_expr', 'native_ast.const_int_expr', (['self.kvBytecount'], {}), '(self.kvBytecount)\n', (10499, 10517), True, 'import typed_python.compiler.native_ast as native_ast\n')] |
import numpy as np
import random
# Select training and test data set randomly
def create_trainingset(X,y,N,N_test):
idx_train = set(range(N))
idx_test = set(random.sample(range(N),N_test))
idx_train.difference_update(idx_test)
X_test = np.array([ X[i,:] for i in idx_test])
y_test = np.array([ y[i] for i in idx_test])
X_train = np.array([ X[i,:] for i in idx_train])
y_train = np.array([ y[i] for i in idx_train])
return X_test,y_test,X_train,y_train
| [
"numpy.array"
] | [((266, 303), 'numpy.array', 'np.array', (['[X[i, :] for i in idx_test]'], {}), '([X[i, :] for i in idx_test])\n', (274, 303), True, 'import numpy as np\n'), ((318, 352), 'numpy.array', 'np.array', (['[y[i] for i in idx_test]'], {}), '([y[i] for i in idx_test])\n', (326, 352), True, 'import numpy as np\n'), ((368, 406), 'numpy.array', 'np.array', (['[X[i, :] for i in idx_train]'], {}), '([X[i, :] for i in idx_train])\n', (376, 406), True, 'import numpy as np\n'), ((421, 456), 'numpy.array', 'np.array', (['[y[i] for i in idx_train]'], {}), '([y[i] for i in idx_train])\n', (429, 456), True, 'import numpy as np\n')] |
from flask import Flask, request, redirect, render_template, json, session, jsonify,url_for
from app import app, database
@app.route('/classlist')
def classlist():
sess = json.loads(session['user_auth'])
first = sess.get('_FirstName')
last = sess.get('_LastName')
teachID = sess.get('_TeacherID')
profileID = sess.get('_StudentID')
projectID = sess.get('_ProjectID')
students = database.getClassList(teachID)
projects = [project for project in database.getProjects() if project.getTeacherID() == teachID]
return render_template("classlist.html",
students=students, projects=projects,
name=first+' '+last, profileID=profileID,
studentID=profileID, teacherID=teachID, projectID=projectID)
| [
"flask.render_template",
"app.database.getProjects",
"app.database.getClassList",
"flask.json.loads",
"app.app.route"
] | [((124, 147), 'app.app.route', 'app.route', (['"""/classlist"""'], {}), "('/classlist')\n", (133, 147), False, 'from app import app, database\n'), ((176, 208), 'flask.json.loads', 'json.loads', (["session['user_auth']"], {}), "(session['user_auth'])\n", (186, 208), False, 'from flask import Flask, request, redirect, render_template, json, session, jsonify, url_for\n'), ((408, 438), 'app.database.getClassList', 'database.getClassList', (['teachID'], {}), '(teachID)\n', (429, 438), False, 'from app import app, database\n'), ((551, 737), 'flask.render_template', 'render_template', (['"""classlist.html"""'], {'students': 'students', 'projects': 'projects', 'name': "(first + ' ' + last)", 'profileID': 'profileID', 'studentID': 'profileID', 'teacherID': 'teachID', 'projectID': 'projectID'}), "('classlist.html', students=students, projects=projects,\n name=first + ' ' + last, profileID=profileID, studentID=profileID,\n teacherID=teachID, projectID=projectID)\n", (566, 737), False, 'from flask import Flask, request, redirect, render_template, json, session, jsonify, url_for\n'), ((478, 500), 'app.database.getProjects', 'database.getProjects', ([], {}), '()\n', (498, 500), False, 'from app import app, database\n')] |
import os
import sys
import pytest
from mock import MagicMock, Mock, patch
app_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, app_path + '/../')
import photo_rename
from photo_rename.rename import *
from .stubs import *
from . import (TEST_HARVESTER_FILEMAPS_FOR_METADATA_COPY)
class TestFilemapsForMetadataCopy():
"""
Tests for function filemaps_for_metadata_copy() are in this class.
"""
skiptests = not TEST_HARVESTER_FILEMAPS_FOR_METADATA_COPY
@pytest.mark.skipif(skiptests, reason="Work in progress")
@patch('photo_rename.harvester.FilemapList')
@patch('photo_rename.harvester.Filemap')
@patch('photo_rename.harvester.os.listdir')
def test_filemaps_for_metadata_copy_one_match(
self, m_listdir, m_filemap, m_filemaplist):
"""
Test filemaps_for_metadata_copy(). One file with matching prefix.
Confirm that filemaps.add() called once.
"""
src_files = ['12.tiff', '34.tiff']
dst_files = ['12.jpg', '45.jpg']
m_listdir.return_value = dst_files
harvey = Harvester('/src/dir', metadata_dst_directory="/dst/dir")
harvey.files = src_files
actual_filemaps = harvey.filemaps_for_metadata_copy()
actual_filemaps.add.assert_called_once()
@pytest.mark.skipif(skiptests, reason="Work in progress")
@patch('photo_rename.harvester.FilemapList')
@patch('photo_rename.harvester.Filemap')
@patch('photo_rename.harvester.os.listdir')
def test_filemaps_for_metadata_copy_no_match(
self, m_listdir, m_filemap, m_filemaplist):
"""
Test filemaps_for_metadata_copy(). No files with matching prefix.
Confirm that filemaps.add() not called.
"""
src_files = ['12.tiff', '34.tiff']
dst_files = ['0.jpg', '45.jpg']
m_listdir.return_value = dst_files
harvey = Harvester('/src/dir', metadata_dst_directory="/dst/dir")
harvey.files = src_files
actual_filemaps = harvey.filemaps_for_metadata_copy()
actual_filemaps.add.assert_not_called()
| [
"os.path.abspath",
"sys.path.insert",
"mock.patch",
"pytest.mark.skipif"
] | [((129, 166), 'sys.path.insert', 'sys.path.insert', (['(0)', "(app_path + '/../')"], {}), "(0, app_path + '/../')\n", (144, 166), False, 'import sys\n'), ((102, 127), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (117, 127), False, 'import os\n'), ((495, 551), 'pytest.mark.skipif', 'pytest.mark.skipif', (['skiptests'], {'reason': '"""Work in progress"""'}), "(skiptests, reason='Work in progress')\n", (513, 551), False, 'import pytest\n'), ((557, 600), 'mock.patch', 'patch', (['"""photo_rename.harvester.FilemapList"""'], {}), "('photo_rename.harvester.FilemapList')\n", (562, 600), False, 'from mock import MagicMock, Mock, patch\n'), ((606, 645), 'mock.patch', 'patch', (['"""photo_rename.harvester.Filemap"""'], {}), "('photo_rename.harvester.Filemap')\n", (611, 645), False, 'from mock import MagicMock, Mock, patch\n'), ((651, 693), 'mock.patch', 'patch', (['"""photo_rename.harvester.os.listdir"""'], {}), "('photo_rename.harvester.os.listdir')\n", (656, 693), False, 'from mock import MagicMock, Mock, patch\n'), ((1299, 1355), 'pytest.mark.skipif', 'pytest.mark.skipif', (['skiptests'], {'reason': '"""Work in progress"""'}), "(skiptests, reason='Work in progress')\n", (1317, 1355), False, 'import pytest\n'), ((1361, 1404), 'mock.patch', 'patch', (['"""photo_rename.harvester.FilemapList"""'], {}), "('photo_rename.harvester.FilemapList')\n", (1366, 1404), False, 'from mock import MagicMock, Mock, patch\n'), ((1410, 1449), 'mock.patch', 'patch', (['"""photo_rename.harvester.Filemap"""'], {}), "('photo_rename.harvester.Filemap')\n", (1415, 1449), False, 'from mock import MagicMock, Mock, patch\n'), ((1455, 1497), 'mock.patch', 'patch', (['"""photo_rename.harvester.os.listdir"""'], {}), "('photo_rename.harvester.os.listdir')\n", (1460, 1497), False, 'from mock import MagicMock, Mock, patch\n')] |
##########################################################################################
# Base class for creating Tensorflow Records
##########################################################################################
import tensorflow as tf
import numpy as np
import os
from PIL import Image, ImageOps
import logging
import pudb
import cv2
import utils.utils as utils
import utils.utils_image as utils_image
class BaseTFRecords():
def __init__(self, config):
self.config = config
## Setup
utils.remove_dirs([config.tfrecords_path_train, config.tfrecords_path_test])
utils.create_dirs([config.tfrecords_path_train, config.tfrecords_path_test])
def read_dataset(self, dataset_path):
raise NotImplementedError
def wrap_bytes(self, value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def wrap_int64(self, value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def rotate_image(image):
image_path = Image.open(image)
image_array = np.array(image_path)
rotated_image1 = np.rot90(image_array)
rotated_image2 = np.rot90(rotated_image1)
rotated_image3 = np.rot90(rotated_image2)
return [image_array, rotated_image1, rotated_image2, rotated_image3]
| [
"PIL.Image.open",
"utils.utils.remove_dirs",
"tensorflow.train.Int64List",
"tensorflow.train.BytesList",
"numpy.array",
"numpy.rot90",
"utils.utils.create_dirs"
] | [((532, 608), 'utils.utils.remove_dirs', 'utils.remove_dirs', (['[config.tfrecords_path_train, config.tfrecords_path_test]'], {}), '([config.tfrecords_path_train, config.tfrecords_path_test])\n', (549, 608), True, 'import utils.utils as utils\n'), ((617, 693), 'utils.utils.create_dirs', 'utils.create_dirs', (['[config.tfrecords_path_train, config.tfrecords_path_test]'], {}), '([config.tfrecords_path_train, config.tfrecords_path_test])\n', (634, 693), True, 'import utils.utils as utils\n'), ((1050, 1067), 'PIL.Image.open', 'Image.open', (['image'], {}), '(image)\n', (1060, 1067), False, 'from PIL import Image, ImageOps\n'), ((1090, 1110), 'numpy.array', 'np.array', (['image_path'], {}), '(image_path)\n', (1098, 1110), True, 'import numpy as np\n'), ((1136, 1157), 'numpy.rot90', 'np.rot90', (['image_array'], {}), '(image_array)\n', (1144, 1157), True, 'import numpy as np\n'), ((1183, 1207), 'numpy.rot90', 'np.rot90', (['rotated_image1'], {}), '(rotated_image1)\n', (1191, 1207), True, 'import numpy as np\n'), ((1233, 1257), 'numpy.rot90', 'np.rot90', (['rotated_image2'], {}), '(rotated_image2)\n', (1241, 1257), True, 'import numpy as np\n'), ((850, 883), 'tensorflow.train.BytesList', 'tf.train.BytesList', ([], {'value': '[value]'}), '(value=[value])\n', (868, 883), True, 'import tensorflow as tf\n'), ((963, 996), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': '[value]'}), '(value=[value])\n', (981, 996), True, 'import tensorflow as tf\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.