code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
#!/usr/bin/python
import os.path
import cppcodebase
import random
def CreateLibJamfile(lib_number, classes):
os.chdir(cppcodebase.lib_name(lib_number))
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP lib_" + str(lib_number) + " ;\n\n")
handle.write ("SubDirHdrs $(INCLUDES) ;\n\n")
handle.write ("Library lib_" + str(lib_number) + " :\n")
for i in xrange(classes):
handle.write(' class_' + str(i) + '.cpp\n')
handle.write (' ;\n')
os.chdir('..')
def CreateFullJamfile(libs):
handle = file("Jamfile.jam", "w")
handle.write ("SubDir TOP ;\n\n")
for i in xrange(libs):
handle.write('SubInclude TOP ' + cppcodebase.lib_name(i) + ' ;\n')
handle.write('\nWorkspace GeneratedLibs :\n')
for i in xrange(libs):
handle.write('\t\t' + cppcodebase.lib_name(i) + '\n')
handle.write(';\n')
handle = file("Jamrules.jam", "w")
handle.write ('INCLUDES = $(TOP) ;\n')
def CreateCodebase(libs, classes, internal_includes, external_includes):
cppcodebase.SetDir('jamplus')
cppcodebase.CreateSetOfLibraries(libs, classes, internal_includes, external_includes, CreateLibJamfile)
CreateFullJamfile(libs)
os.chdir('..')
| [
"cppcodebase.SetDir",
"cppcodebase.CreateSetOfLibraries",
"cppcodebase.lib_name"
]
| [((1043, 1072), 'cppcodebase.SetDir', 'cppcodebase.SetDir', (['"""jamplus"""'], {}), "('jamplus')\n", (1061, 1072), False, 'import cppcodebase\n'), ((1077, 1184), 'cppcodebase.CreateSetOfLibraries', 'cppcodebase.CreateSetOfLibraries', (['libs', 'classes', 'internal_includes', 'external_includes', 'CreateLibJamfile'], {}), '(libs, classes, internal_includes,\n external_includes, CreateLibJamfile)\n', (1109, 1184), False, 'import cppcodebase\n'), ((125, 157), 'cppcodebase.lib_name', 'cppcodebase.lib_name', (['lib_number'], {}), '(lib_number)\n', (145, 157), False, 'import cppcodebase\n'), ((683, 706), 'cppcodebase.lib_name', 'cppcodebase.lib_name', (['i'], {}), '(i)\n', (703, 706), False, 'import cppcodebase\n'), ((825, 848), 'cppcodebase.lib_name', 'cppcodebase.lib_name', (['i'], {}), '(i)\n', (845, 848), False, 'import cppcodebase\n')] |
"""
FOTA update tool which is called from the dispatcher during installation
Copyright (C) 2017-2022 Intel Corporation
SPDX-License-Identifier: Apache-2.0
"""
import logging
import os
import platform
from threading import Timer
from typing import Any, Optional, Mapping
from future.moves.urllib.parse import urlparse
from inbm_common_lib.exceptions import UrlSecurityException
from inbm_common_lib.utility import canonicalize_uri
from inbm_common_lib.constants import REMOTE_SOURCE
from .constants import *
from .fota_error import FotaError
from .manifest import parse_tool_options, parse_guid, parse_hold_reboot_flag
from .os_factory import OsFactory, OsType
from ..common import dispatcher_state
from ..common.result_constants import *
from ..constants import UMASK_OTA
from ..dispatcher_callbacks import DispatcherCallbacks
from ..dispatcher_exception import DispatcherException
from ..downloader import download
from ..packagemanager.local_repo import DirectoryRepo
logger = logging.getLogger(__name__)
class FOTA:
"""AKA FOTA Tool
An instance of this class will be called from the
dispatcher if the requested type of update is FOTA
"""
def __init__(self,
parsed_manifest: Mapping[str, Optional[Any]],
repo_type: str,
dispatcher_callbacks: DispatcherCallbacks) -> None:
"""Base class constructor for variable assignment, to send telemetry info and create a new
directory if no repo is present
@param parsed_manifest: Parsed parameters from manifest
@param repo_type: OTA source location -> local or remote
@param dispatcher_callbacks: DispatcherCallbacks instance
"""
logger.debug(f"parsed_manifest: {parsed_manifest}")
self._ota_element = parsed_manifest.get('resource')
logger.debug(f"ota_element: {self._ota_element}")
self._dispatcher_callbacks = dispatcher_callbacks
self._uri: Optional[str] = parsed_manifest['uri']
self._repo_type = repo_type
repo_path: Optional[str]
"""If repo_type=local, then use path and not URI"""
if self._repo_type == REMOTE_SOURCE:
if not self._uri:
raise FotaError("missing URI.")
else:
self._pkg_filename = os.path.basename(urlparse(self._uri).path)
repo_path = None
else:
if self._ota_element is None or 'path' not in self._ota_element:
raise FotaError('attempting to use local repo for FOTA but no path specified')
self._pkg_filename = os.path.basename(self._ota_element['path'])
path = self._ota_element.get('path', None)
logger.debug(f"path: {path}")
if path is None:
repo_path = None
else:
repo_path = os.path.dirname(path)
logger.debug(f"repo_path: {repo_path}")
self.__signature = parsed_manifest['signature']
self._hash_algorithm = parsed_manifest['hash_algorithm']
self._username = parsed_manifest['username']
self._password = parsed_manifest['password']
if self._dispatcher_callbacks is None:
raise FotaError("dispatcher_callbacks not specified in FOTA constructor")
self._dispatcher_callbacks.broker_core.telemetry("Firmware Update Tool launched")
if repo_path:
logger.debug("Using manifest specified repo path")
self._repo = DirectoryRepo(repo_path)
else:
logger.debug("Using default repo path")
self._repo = DirectoryRepo(CACHE)
def install(self) -> Result:
"""checks current platform versions and then issues download
and install. Performs clean() in failure conditions
@return: (Result) containing status code and message
"""
logger.debug("")
return_message: Result = Result()
hold_reboot = False
try:
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
bios_vendor, platform_product = factory.create_upgrade_checker().check()
if self._repo_type.lower() == REMOTE_SOURCE:
# need to perform this check here because some FOTA commands don't have a URI -- see constructor
# (instead they have a path)
if self._uri is None:
raise FotaError(
"internal error: _uri uninitialized in Fota.install with download requested in manifest")
uri = canonicalize_uri(self._uri)
download(dispatcher_callbacks=self._dispatcher_callbacks,
uri=uri,
repo=self._repo,
umask=UMASK_OTA,
username=self._username,
password=self._password)
else:
logger.debug("Skipping FOTA upgradable check for local repo")
if self._ota_element is None:
raise FotaError("missing ota_element")
tool_options = parse_tool_options(self._ota_element)
logger.debug(f"tool_options: {tool_options}")
guid = parse_guid(self._ota_element)
logger.debug(f"guid: {guid}")
hold_reboot = parse_hold_reboot_flag(self._ota_element)
logger.debug(f"holdReboot: {hold_reboot}; pkg_filename: {self._pkg_filename}")
factory.create_installer(self._repo, FOTA_CONF_PATH, FOTA_CONF_SCHEMA_LOC).\
install(guid=guid,
tool_options=tool_options,
pkg_filename=self._pkg_filename,
signature=self.__signature,
hash_algorithm=self._hash_algorithm,
bios_vendor=bios_vendor,
platform_product=platform_product)
def trigger_reboot() -> None:
"""This method triggers a reboot."""
factory.create_rebooter().reboot()
if not hold_reboot:
logger.debug("")
state = {'restart_reason': "fota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
time_to_trigger_reboot = Timer(0.1, trigger_reboot)
time_to_trigger_reboot.start()
return_message = COMMAND_SUCCESS
else:
status = 'Reboot on hold after Firmware update...'
state = {'restart_reason': "pota"}
dispatcher_state.write_dispatcher_state_to_state_file(state)
logger.debug(status)
self._dispatcher_callbacks.broker_core.telemetry(status)
except (DispatcherException, FotaError, UrlSecurityException, ValueError, FileNotFoundError) as e:
error = 'Firmware Update Aborted: ' + str(e)
logger.error(error)
self._dispatcher_callbacks.broker_core.telemetry(error)
return_message = INSTALL_FAILURE
self._repo.delete(self._pkg_filename)
# In POTA, mender file needs to be deleted also.
if hold_reboot:
self._repo.delete_all()
finally:
if return_message == COMMAND_SUCCESS:
status = 'Firmware update in process...'
else:
status = 'Firmware Update Aborted'
dispatcher_state.clear_dispatcher_state()
logger.debug('Firmware update status: ' + status)
self._dispatcher_callbacks.broker_core.telemetry(status)
return return_message
@staticmethod
def _verify_os_supported():
"""checks if the current OS is supported.
@return True if OS is supported; otherwise, false.
@raise ValueError Unsupported OS
"""
logger.debug("")
os_type = platform.system()
logger.debug(f"os_type: {os_type}")
if os_type in OsType.__members__:
return os_type
else:
logger.error("Unsupported OS type.")
raise ValueError('Unsupported OS type.')
def check(self) -> None:
"""validate the manifest before FOTA"""
logger.debug("")
factory = OsFactory.get_factory(
self._verify_os_supported(), self._ota_element, self._dispatcher_callbacks)
factory.create_upgrade_checker().check()
| [
"logging.getLogger",
"threading.Timer",
"os.path.dirname",
"platform.system",
"os.path.basename",
"future.moves.urllib.parse.urlparse",
"inbm_common_lib.utility.canonicalize_uri"
]
| [((996, 1023), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1013, 1023), False, 'import logging\n'), ((7954, 7971), 'platform.system', 'platform.system', ([], {}), '()\n', (7969, 7971), False, 'import platform\n'), ((2609, 2652), 'os.path.basename', 'os.path.basename', (["self._ota_element['path']"], {}), "(self._ota_element['path'])\n", (2625, 2652), False, 'import os\n'), ((2858, 2879), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (2873, 2879), False, 'import os\n'), ((4627, 4654), 'inbm_common_lib.utility.canonicalize_uri', 'canonicalize_uri', (['self._uri'], {}), '(self._uri)\n', (4643, 4654), False, 'from inbm_common_lib.utility import canonicalize_uri\n'), ((6347, 6373), 'threading.Timer', 'Timer', (['(0.1)', 'trigger_reboot'], {}), '(0.1, trigger_reboot)\n', (6352, 6373), False, 'from threading import Timer\n'), ((2331, 2350), 'future.moves.urllib.parse.urlparse', 'urlparse', (['self._uri'], {}), '(self._uri)\n', (2339, 2350), False, 'from future.moves.urllib.parse import urlparse\n')] |
from ipso_phen.ipapi.base.ipt_abstract import IptBase
from ipso_phen.ipapi.tools import regions
import numpy as np
import cv2
import logging
logger = logging.getLogger(__name__)
from ipso_phen.ipapi.base import ip_common as ipc
class IptFilterContourBySize(IptBase):
def build_params(self):
self.add_enabled_checkbox()
self.add_spin_box(
name="min_threshold",
desc="Lower bound limit",
default_value=0,
minimum=0,
maximum=100000000,
hint="Only contours bigger than lower limit bound will be kept",
)
self.add_spin_box(
name="max_threshold",
desc="Upper bound limit",
default_value=100000000,
minimum=0,
maximum=100000000,
hint="Only contours smaller than lower limit bound will be kept",
)
self.add_roi_selector()
def process_wrapper(self, **kwargs):
"""
Filter contour by size:
'Keep or descard contours according to their size
Real time: False
Keyword Arguments (in parentheses, argument name):
* Activate tool (enabled): Toggle whether or not tool is active
* Lower bound limit (min_threshold): Only contours bigger than lower limit bound will be kept
* Upper bound limit (max_threshold): Only contours smaller than lower limit bound will be kept
* Name of ROI to be used (roi_names): Operation will only be applied inside of ROI
* ROI selection mode (roi_selection_mode):
"""
wrapper = self.init_wrapper(**kwargs)
if wrapper is None:
return False
res = False
try:
if self.get_value_of("enabled") == 1:
mask = self.get_mask()
if mask is None:
logger.error(f"FAIL {self.name}: mask must be initialized")
return
lt, ut = self.get_value_of("min_threshold"), self.get_value_of(
"max_threshold"
)
# Get source contours
contours = [
c
for c in ipc.get_contours(
mask=mask,
retrieve_mode=cv2.RETR_LIST,
method=cv2.CHAIN_APPROX_SIMPLE,
)
if cv2.contourArea(c, True) < 0
]
contours.sort(key=lambda x: cv2.contourArea(x), reverse=True)
colors = ipc.build_color_steps(step_count=len(contours))
dbg_img = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
for clr, cnt in zip(colors, contours):
cv2.drawContours(dbg_img, [cnt], 0, clr, -1)
dbg_img = np.dstack(
(
cv2.bitwise_and(dbg_img[:, :, 0], mask),
cv2.bitwise_and(dbg_img[:, :, 1], mask),
cv2.bitwise_and(dbg_img[:, :, 2], mask),
)
)
wrapper.store_image(
image=dbg_img,
text="all_contours",
)
fnt = (cv2.FONT_HERSHEY_SIMPLEX, 0.6)
for cnt in contours:
area_ = cv2.contourArea(cnt)
x, y, w, h = cv2.boundingRect(cnt)
x += w // 2 - 10
y += h // 2
if area_ > 0:
cv2.putText(
dbg_img,
f"{area_}",
(x, y),
fnt[0],
fnt[1],
(255, 255, 255),
2,
)
wrapper.store_image(
image=dbg_img,
text="all_contours_with_sizes",
)
dbg_img = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
out_mask = np.zeros_like(mask)
# Discarded contours
size_cnts = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
for cnt in contours:
area_ = cv2.contourArea(cnt)
if area_ < lt:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_RED, -1)
elif area_ > ut:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_BLUE, -1)
else:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_WHITE, -1)
wrapper.store_image(image=size_cnts, text="cnts_by_size")
# Discarded contours
size_cnts = np.dstack(
(np.zeros_like(mask), np.zeros_like(mask), np.zeros_like(mask))
)
for cnt in sorted(
contours, key=lambda x: cv2.contourArea(x), reverse=True
):
area_ = cv2.contourArea(cnt)
if area_ < lt:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_RED, -1)
elif area_ > ut:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_BLUE, -1)
else:
cv2.drawContours(size_cnts, [cnt], 0, ipc.C_WHITE, -1)
wrapper.store_image(image=size_cnts, text="cnts_by_size_reversed")
for cnt in contours:
area_ = cv2.contourArea(cnt)
if not (lt < area_ < ut):
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_RED, -1)
# Discarded contours borders
for cnt in contours:
area_ = cv2.contourArea(cnt)
if not (lt < area_ < ut):
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_MAROON, 4)
# Kept contours
for cnt in contours:
area_ = cv2.contourArea(cnt)
if lt < area_ < ut:
cv2.drawContours(out_mask, [cnt], 0, 255, -1)
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_GREEN, -1)
else:
cv2.drawContours(out_mask, [cnt], 0, 0, -1)
cv2.drawContours(dbg_img, [cnt], 0, ipc.C_RED, -1)
dbg_img = np.dstack(
(
cv2.bitwise_and(dbg_img[:, :, 0], mask),
cv2.bitwise_and(dbg_img[:, :, 1], mask),
cv2.bitwise_and(dbg_img[:, :, 2], mask),
)
)
# Discarded sizes
for cnt in contours:
area_ = cv2.contourArea(cnt)
if not (lt < area_ < ut):
x, y, w, h = cv2.boundingRect(cnt)
x += w // 2 - 10
y += h // 2
cv2.putText(
dbg_img,
f"{area_}",
(x, y),
fnt[0],
fnt[1],
ipc.C_RED,
thickness=2,
)
# Kept sizes
for cnt in contours:
area_ = cv2.contourArea(cnt)
if lt < area_ < ut:
x, y, w, h = cv2.boundingRect(cnt)
x += w // 2 - 10
y += h // 2
cv2.putText(
dbg_img,
f"{area_}",
(x, y),
fnt[0],
fnt[1],
ipc.C_LIME,
thickness=2,
)
out_mask = cv2.bitwise_and(
out_mask,
mask,
)
# Apply ROIs if needed
rois = self.get_ipt_roi(
wrapper=wrapper,
roi_names=self.get_value_of("roi_names").replace(" ", "").split(","),
selection_mode=self.get_value_of("roi_selection_mode"),
)
if rois:
untouched_mask = regions.delete_rois(rois=rois, image=self.get_mask())
self.result = cv2.bitwise_or(
untouched_mask, regions.keep_rois(rois=rois, image=out_mask)
)
self.demo_image = cv2.bitwise_or(
dbg_img,
np.dstack((untouched_mask, untouched_mask, untouched_mask)),
)
else:
self.result = out_mask
self.demo_image = dbg_img
wrapper.store_image(image=self.result, text="filtered_contours")
wrapper.store_image(image=self.demo_image, text="tagged_contours")
res = True
else:
wrapper.store_image(wrapper.current_image, "current_image")
res = True
except Exception as e:
res = False
logger.exception(f"Filter contour by size FAILED, exception: {repr(e)}")
else:
pass
finally:
return res
@property
def name(self):
return "Filter contour by size"
@property
def package(self):
return "TPMP"
@property
def real_time(self):
return False
@property
def result_name(self):
return "mask"
@property
def output_kind(self):
return "mask"
@property
def use_case(self):
return [ipc.ToolFamily.MASK_CLEANUP]
@property
def description(self):
return """'Keep or descard contours according to their size"""
| [
"logging.getLogger",
"numpy.dstack",
"ipso_phen.ipapi.tools.regions.keep_rois",
"cv2.drawContours",
"cv2.bitwise_and",
"cv2.contourArea",
"cv2.putText",
"ipso_phen.ipapi.base.ip_common.get_contours",
"numpy.zeros_like",
"cv2.boundingRect"
]
| [((159, 186), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (176, 186), False, 'import logging\n'), ((4349, 4368), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4362, 4368), True, 'import numpy as np\n'), ((8412, 8443), 'cv2.bitwise_and', 'cv2.bitwise_and', (['out_mask', 'mask'], {}), '(out_mask, mask)\n', (8427, 8443), False, 'import cv2\n'), ((2910, 2954), 'cv2.drawContours', 'cv2.drawContours', (['dbg_img', '[cnt]', '(0)', 'clr', '(-1)'], {}), '(dbg_img, [cnt], 0, clr, -1)\n', (2926, 2954), False, 'import cv2\n'), ((3515, 3535), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (3530, 3535), False, 'import cv2\n'), ((3570, 3591), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (3586, 3591), False, 'import cv2\n'), ((4620, 4640), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (4635, 4640), False, 'import cv2\n'), ((5401, 5421), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (5416, 5421), False, 'import cv2\n'), ((5913, 5933), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (5928, 5933), False, 'import cv2\n'), ((6170, 6190), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (6185, 6190), False, 'import cv2\n'), ((6416, 6436), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (6431, 6436), False, 'import cv2\n'), ((7202, 7222), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (7217, 7222), False, 'import cv2\n'), ((7843, 7863), 'cv2.contourArea', 'cv2.contourArea', (['cnt'], {}), '(cnt)\n', (7858, 7863), False, 'import cv2\n'), ((2276, 2369), 'ipso_phen.ipapi.base.ip_common.get_contours', 'ipc.get_contours', ([], {'mask': 'mask', 'retrieve_mode': 'cv2.RETR_LIST', 'method': 'cv2.CHAIN_APPROX_SIMPLE'}), '(mask=mask, retrieve_mode=cv2.RETR_LIST, method=cv2.\n CHAIN_APPROX_SIMPLE)\n', (2292, 2369), True, 'from ipso_phen.ipapi.base import ip_common as ipc\n'), ((2751, 2770), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (2764, 2770), True, 'import numpy as np\n'), ((2772, 2791), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (2785, 2791), True, 'import numpy as np\n'), ((2793, 2812), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (2806, 2812), True, 'import numpy as np\n'), ((3041, 3080), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dbg_img[:, :, 0]', 'mask'], {}), '(dbg_img[:, :, 0], mask)\n', (3056, 3080), False, 'import cv2\n'), ((3107, 3146), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dbg_img[:, :, 1]', 'mask'], {}), '(dbg_img[:, :, 1], mask)\n', (3122, 3146), False, 'import cv2\n'), ((3173, 3212), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dbg_img[:, :, 2]', 'mask'], {}), '(dbg_img[:, :, 2], mask)\n', (3188, 3212), False, 'import cv2\n'), ((3723, 3799), 'cv2.putText', 'cv2.putText', (['dbg_img', 'f"""{area_}"""', '(x, y)', 'fnt[0]', 'fnt[1]', '(255, 255, 255)', '(2)'], {}), "(dbg_img, f'{area_}', (x, y), fnt[0], fnt[1], (255, 255, 255), 2)\n", (3734, 3799), False, 'import cv2\n'), ((4239, 4258), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4252, 4258), True, 'import numpy as np\n'), ((4260, 4279), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4273, 4279), True, 'import numpy as np\n'), ((4281, 4300), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4294, 4300), True, 'import numpy as np\n'), ((4471, 4490), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4484, 4490), True, 'import numpy as np\n'), ((4492, 4511), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4505, 4511), True, 'import numpy as np\n'), ((4513, 4532), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (4526, 4532), True, 'import numpy as np\n'), ((4702, 4754), 'cv2.drawContours', 'cv2.drawContours', (['size_cnts', '[cnt]', '(0)', 'ipc.C_RED', '(-1)'], {}), '(size_cnts, [cnt], 0, ipc.C_RED, -1)\n', (4718, 4754), False, 'import cv2\n'), ((5156, 5175), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (5169, 5175), True, 'import numpy as np\n'), ((5177, 5196), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (5190, 5196), True, 'import numpy as np\n'), ((5198, 5217), 'numpy.zeros_like', 'np.zeros_like', (['mask'], {}), '(mask)\n', (5211, 5217), True, 'import numpy as np\n'), ((5483, 5535), 'cv2.drawContours', 'cv2.drawContours', (['size_cnts', '[cnt]', '(0)', 'ipc.C_RED', '(-1)'], {}), '(size_cnts, [cnt], 0, ipc.C_RED, -1)\n', (5499, 5535), False, 'import cv2\n'), ((6006, 6056), 'cv2.drawContours', 'cv2.drawContours', (['dbg_img', '[cnt]', '(0)', 'ipc.C_RED', '(-1)'], {}), '(dbg_img, [cnt], 0, ipc.C_RED, -1)\n', (6022, 6056), False, 'import cv2\n'), ((6263, 6315), 'cv2.drawContours', 'cv2.drawContours', (['dbg_img', '[cnt]', '(0)', 'ipc.C_MAROON', '(4)'], {}), '(dbg_img, [cnt], 0, ipc.C_MAROON, 4)\n', (6279, 6315), False, 'import cv2\n'), ((6503, 6548), 'cv2.drawContours', 'cv2.drawContours', (['out_mask', '[cnt]', '(0)', '(255)', '(-1)'], {}), '(out_mask, [cnt], 0, 255, -1)\n', (6519, 6548), False, 'import cv2\n'), ((6574, 6626), 'cv2.drawContours', 'cv2.drawContours', (['dbg_img', '[cnt]', '(0)', 'ipc.C_GREEN', '(-1)'], {}), '(dbg_img, [cnt], 0, ipc.C_GREEN, -1)\n', (6590, 6626), False, 'import cv2\n'), ((6679, 6722), 'cv2.drawContours', 'cv2.drawContours', (['out_mask', '[cnt]', '(0)', '(0)', '(-1)'], {}), '(out_mask, [cnt], 0, 0, -1)\n', (6695, 6722), False, 'import cv2\n'), ((6748, 6798), 'cv2.drawContours', 'cv2.drawContours', (['dbg_img', '[cnt]', '(0)', 'ipc.C_RED', '(-1)'], {}), '(dbg_img, [cnt], 0, ipc.C_RED, -1)\n', (6764, 6798), False, 'import cv2\n'), ((6885, 6924), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dbg_img[:, :, 0]', 'mask'], {}), '(dbg_img[:, :, 0], mask)\n', (6900, 6924), False, 'import cv2\n'), ((6951, 6990), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dbg_img[:, :, 1]', 'mask'], {}), '(dbg_img[:, :, 1], mask)\n', (6966, 6990), False, 'import cv2\n'), ((7017, 7056), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dbg_img[:, :, 2]', 'mask'], {}), '(dbg_img[:, :, 2], mask)\n', (7032, 7056), False, 'import cv2\n'), ((7308, 7329), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (7324, 7329), False, 'import cv2\n'), ((7434, 7519), 'cv2.putText', 'cv2.putText', (['dbg_img', 'f"""{area_}"""', '(x, y)', 'fnt[0]', 'fnt[1]', 'ipc.C_RED'], {'thickness': '(2)'}), "(dbg_img, f'{area_}', (x, y), fnt[0], fnt[1], ipc.C_RED, thickness=2\n )\n", (7445, 7519), False, 'import cv2\n'), ((7943, 7964), 'cv2.boundingRect', 'cv2.boundingRect', (['cnt'], {}), '(cnt)\n', (7959, 7964), False, 'import cv2\n'), ((8069, 8154), 'cv2.putText', 'cv2.putText', (['dbg_img', 'f"""{area_}"""', '(x, y)', 'fnt[0]', 'fnt[1]', 'ipc.C_LIME'], {'thickness': '(2)'}), "(dbg_img, f'{area_}', (x, y), fnt[0], fnt[1], ipc.C_LIME,\n thickness=2)\n", (8080, 8154), False, 'import cv2\n'), ((9025, 9069), 'ipso_phen.ipapi.tools.regions.keep_rois', 'regions.keep_rois', ([], {'rois': 'rois', 'image': 'out_mask'}), '(rois=rois, image=out_mask)\n', (9042, 9069), False, 'from ipso_phen.ipapi.tools import regions\n'), ((9207, 9266), 'numpy.dstack', 'np.dstack', (['(untouched_mask, untouched_mask, untouched_mask)'], {}), '((untouched_mask, untouched_mask, untouched_mask))\n', (9216, 9266), True, 'import numpy as np\n'), ((2488, 2512), 'cv2.contourArea', 'cv2.contourArea', (['c', '(True)'], {}), '(c, True)\n', (2503, 2512), False, 'import cv2\n'), ((2581, 2599), 'cv2.contourArea', 'cv2.contourArea', (['x'], {}), '(x)\n', (2596, 2599), False, 'import cv2\n'), ((4818, 4871), 'cv2.drawContours', 'cv2.drawContours', (['size_cnts', '[cnt]', '(0)', 'ipc.C_BLUE', '(-1)'], {}), '(size_cnts, [cnt], 0, ipc.C_BLUE, -1)\n', (4834, 4871), False, 'import cv2\n'), ((4924, 4978), 'cv2.drawContours', 'cv2.drawContours', (['size_cnts', '[cnt]', '(0)', 'ipc.C_WHITE', '(-1)'], {}), '(size_cnts, [cnt], 0, ipc.C_WHITE, -1)\n', (4940, 4978), False, 'import cv2\n'), ((5319, 5337), 'cv2.contourArea', 'cv2.contourArea', (['x'], {}), '(x)\n', (5334, 5337), False, 'import cv2\n'), ((5599, 5652), 'cv2.drawContours', 'cv2.drawContours', (['size_cnts', '[cnt]', '(0)', 'ipc.C_BLUE', '(-1)'], {}), '(size_cnts, [cnt], 0, ipc.C_BLUE, -1)\n', (5615, 5652), False, 'import cv2\n'), ((5705, 5759), 'cv2.drawContours', 'cv2.drawContours', (['size_cnts', '[cnt]', '(0)', 'ipc.C_WHITE', '(-1)'], {}), '(size_cnts, [cnt], 0, ipc.C_WHITE, -1)\n', (5721, 5759), False, 'import cv2\n')] |
from wrapper_tests.upsert_test import *
from wrapper_tests.upsertvaluedict_test import *
import os
import logging
import sys
import argparse
import signal
logging.getLogger().setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s - %(name)s] %(message)s')
ch.setFormatter(formatter)
logging.getLogger().addHandler(ch)
parser = argparse.ArgumentParser(
description='Unit testing for fiery snap.')
parser.add_argument('-config', type=str, default=None,
help='toml config for keys and such, see key.toml')
if __name__ == '__main__':
unittest.main()
os.kill(os.getpid(), signal.SIGKILL)
| [
"logging.getLogger",
"logging.StreamHandler",
"argparse.ArgumentParser",
"logging.Formatter",
"os.getpid"
]
| [((205, 238), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (226, 238), False, 'import logging\n'), ((278, 335), 'logging.Formatter', 'logging.Formatter', (['"""[%(asctime)s - %(name)s] %(message)s"""'], {}), "('[%(asctime)s - %(name)s] %(message)s')\n", (295, 335), False, 'import logging\n'), ((409, 476), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Unit testing for fiery snap."""'}), "(description='Unit testing for fiery snap.')\n", (432, 476), False, 'import argparse\n'), ((156, 175), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (173, 175), False, 'import logging\n'), ((363, 382), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (380, 382), False, 'import logging\n'), ((689, 700), 'os.getpid', 'os.getpid', ([], {}), '()\n', (698, 700), False, 'import os\n')] |
#!/usr/bin/env python
##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""
macrodescriptionviewer.py:
"""
import taurus.core
from taurus.external.qt import Qt
from taurus.qt.qtgui.base import TaurusBaseWidget
class TaurusMacroDescriptionViewer(Qt.QTextEdit, TaurusBaseWidget):
__pyqtSignals__ = ("modelChanged(const QString &)",)
def __init__(self, parent=None, designMode=False):
name = "TaurusMacroDescriptionView"
self.call__init__wo_kw(Qt.QTextEdit, parent)
self.call__init__(TaurusBaseWidget, name)
self.setReadOnly(True)
self.setFont(Qt.QFont("Courier", 9))
def defineStyle(self):
""" Defines the initial style for the widget """
self.updateStyle()
def getModelClass(self):
return taurus.core.taurusdevice.TaurusDevice
def updateStyle(self):
self.update()
def onMacroNameChanged(self, macroName):
"""Can be connected to an event emitted after macro name was changed.
As an argument receives macroName and ask BaseMacroServer object
about already prepared and stored in MacroInfoObj object macro description"""
macroServer = self.getModelObj()
if macroServer is None or macroName is None or macroName == "":
self.setText("")
return
self.setText(str(macroServer.getMacroInfoObj(macroName).doc))
def getFormatedToolTip(self, cache=True):
"""This method was overridden to get rid of the default tooltip of TaurusWidget"""
return ""
model = Qt.pyqtProperty("QString",
TaurusBaseWidget.getModel,
TaurusBaseWidget.setModel,
TaurusBaseWidget.resetModel)
useParentModel = Qt.pyqtProperty("bool",
TaurusBaseWidget.getUseParentModel,
TaurusBaseWidget.setUseParentModel,
TaurusBaseWidget.resetUseParentModel)
def test():
import sys
from sardana.taurus.core.tango.sardana.macroserver import registerExtensions
registerExtensions()
app = Qt.QApplication(sys.argv)
taurusMacroDescriptionView = TaurusMacroDescriptionViewer(designMode=1)
if len(sys.argv) != 2:
taurusMacroDescriptionView.setModel("macroserver/zreszela/1")
else:
taurusMacroDescriptionView.setModel(sys.argv[1])
taurusMacroDescriptionView.onMacroChanged("mv")
taurusMacroDescriptionView.show()
sys.exit(app.exec_())
if __name__ == "__main__":
test()
| [
"sardana.taurus.core.tango.sardana.macroserver.registerExtensions",
"taurus.external.qt.Qt.QApplication",
"taurus.external.qt.Qt.pyqtProperty",
"taurus.external.qt.Qt.QFont"
]
| [((2447, 2561), 'taurus.external.qt.Qt.pyqtProperty', 'Qt.pyqtProperty', (['"""QString"""', 'TaurusBaseWidget.getModel', 'TaurusBaseWidget.setModel', 'TaurusBaseWidget.resetModel'], {}), "('QString', TaurusBaseWidget.getModel, TaurusBaseWidget.\n setModel, TaurusBaseWidget.resetModel)\n", (2462, 2561), False, 'from taurus.external.qt import Qt\n'), ((2663, 2800), 'taurus.external.qt.Qt.pyqtProperty', 'Qt.pyqtProperty', (['"""bool"""', 'TaurusBaseWidget.getUseParentModel', 'TaurusBaseWidget.setUseParentModel', 'TaurusBaseWidget.resetUseParentModel'], {}), "('bool', TaurusBaseWidget.getUseParentModel,\n TaurusBaseWidget.setUseParentModel, TaurusBaseWidget.resetUseParentModel)\n", (2678, 2800), False, 'from taurus.external.qt import Qt\n'), ((3022, 3042), 'sardana.taurus.core.tango.sardana.macroserver.registerExtensions', 'registerExtensions', ([], {}), '()\n', (3040, 3042), False, 'from sardana.taurus.core.tango.sardana.macroserver import registerExtensions\n'), ((3053, 3078), 'taurus.external.qt.Qt.QApplication', 'Qt.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (3068, 3078), False, 'from taurus.external.qt import Qt\n'), ((1487, 1509), 'taurus.external.qt.Qt.QFont', 'Qt.QFont', (['"""Courier"""', '(9)'], {}), "('Courier', 9)\n", (1495, 1509), False, 'from taurus.external.qt import Qt\n')] |
from functools import partial
from uuid import UUID
from dateutil.parser import parse as dateutil_parse
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework.fields import ReadOnlyField, UUIDField
from datahub.core.constants import Country as CountryEnum
from datahub.core.validate_utils import DataCombiner
from datahub.core.validators import InRule, OperatorRule, RulesBasedValidator, ValidationRule
from datahub.metadata.models import AdministrativeArea, Country
MAX_LENGTH = settings.CHAR_FIELD_MAX_LENGTH
class ConstantModelSerializer(serializers.Serializer):
"""Constant models serializer."""
id = serializers.ReadOnlyField()
name = serializers.ReadOnlyField()
disabled_on = serializers.ReadOnlyField()
class PermittedFieldsModelSerializer(serializers.ModelSerializer):
"""Lets you get permitted fields only.
Needs 'permissions' attribute on Meta class in following format:
permissions = {
'app_name.permission': 'field'
}
If user doesn't have required permission, corresponding field will be filtered out.
Note: The current implementation does not allow access to the field if request.user is None.
"""
def get_fields(self):
"""Gets filtered dictionary of fields based on permissions."""
assert hasattr(self.Meta, 'permissions'), (
'Class {serializer_class} missing "Meta.permissions" attribute'.format(
serializer_class=self.__class__.__name__,
)
)
fields = super().get_fields()
request = self.context.get('request', None)
if request:
permissions = self.Meta.permissions
for permission, field in permissions.items():
if not request.user or not request.user.has_perm(permission):
del fields[field]
return fields
class NestedRelatedField(serializers.RelatedField):
"""DRF serialiser field for foreign keys and many-to-many fields.
Serialises as a dict with 'id' plus other specified keys.
"""
default_error_messages = {
'required': 'This field is required.',
'missing_pk': 'pk not provided.',
'does_not_exist': 'Invalid pk "{pk_value}" - object does not exist.',
'incorrect_type': 'Incorrect type. Expected object, received {'
'data_type}.',
}
def __init__(self, model, extra_fields=('name',), **kwargs):
"""Initialises the related field.
:param model: Model of the related field.
:param extra_fields: List of extra fields to include in the representation.
Can contain field names as strings or as tuples of
(field name, DRF field).
E.g. ['field1', ('field2', CharField())]
:param kwargs: Keyword arguments to pass to
RelatedField.__init__()
"""
super().__init__(**kwargs)
model_class = (apps.get_model(model) if isinstance(model, str) else
model)
self.pk_field = UUIDField()
self._fields = [
field if isinstance(field, tuple) else (field, ReadOnlyField())
for field in extra_fields
]
self._model = model_class
def get_queryset(self):
"""Returns the queryset corresponding to the model."""
return self._model.objects.all()
def to_internal_value(self, data):
"""Converts a user-provided value to a model instance."""
try:
if isinstance(data, (str, UUID)):
id_repr = data
else:
id_repr = data['id']
data = self.pk_field.to_internal_value(id_repr)
return self.get_queryset().get(pk=data)
except ObjectDoesNotExist:
self.fail('does_not_exist', pk_value=data)
except KeyError:
self.fail('missing_pk')
except (TypeError, ValueError):
self.fail('incorrect_type', data_type=type(data).__name__)
def to_representation(self, value):
"""Converts a model instance to a dict representation."""
if not value:
return value
extra = {
field_name: field.to_representation(getattr(value, field_name))
for field_name, field in self._fields
}
return {
**extra,
'id': self.pk_field.to_representation(value.pk),
}
def get_choices(self, cutoff=None):
"""Returns choices for DRF UI.
Standard implementation uses a dict, but that doesn't work as our
representation isn't hashable.
"""
queryset = self.get_queryset()
if queryset is None:
return ()
if cutoff is not None:
queryset = queryset[:cutoff]
return _Choices(
(
self.pk_field.to_representation(item.pk),
self.display_value(item),
)
for item in queryset
)
RelaxedDateField = partial(serializers.DateField, input_formats=('iso-8601', '%Y/%m/%d'))
class RelaxedDateTimeField(serializers.Field):
"""
Relaxed DateTime field.
Front end uses free text field for data filters, that's why
we need to accept date/datetime in various different formats.
DRF DateTimeField doesn't offer that flexibility.
"""
default_error_messages = {
'invalid': 'Date is in incorrect format.',
}
def to_internal_value(self, data):
"""Parses data into datetime."""
try:
data = dateutil_parse(data)
except ValueError:
self.fail('invalid', value=data)
return data
def to_representation(self, value):
"""Formats the datetime using a normal DateTimeField."""
repr_field = serializers.DateTimeField()
return repr_field.to_representation(value)
class RelaxedURLField(serializers.URLField):
"""URLField subclass that prepends http:// to input and output when a scheme is not present."""
def to_internal_value(self, data):
"""Converts a user-provided value to an internal value."""
return super().to_internal_value(self._fix_missing_url_scheme(data))
def to_representation(self, value):
"""Converts a stored value to the external representation."""
return super().to_representation(self._fix_missing_url_scheme(value))
@staticmethod
def _fix_missing_url_scheme(value):
if value and '://' not in value:
return f'http://{value}'
return value
class _Choices:
"""Wrapper for choices to make them compatible with DRF."""
def __init__(self, choices):
self._choices = choices
def items(self):
"""Returns the choices."""
return self._choices
class AddressSerializer(serializers.ModelSerializer):
"""
ModelSerializer that can be used to simulate nested address objects.
E.g.
Model:
class MultiAddressModel(models.Model):
primary_address_1 = models.CharField(max_length=MAX_LENGTH)
primary_address_2 = models.CharField(max_length=MAX_LENGTH, blank=True)
primary_address_town = models.CharField(max_length=MAX_LENGTH)
primary_address_county = models.CharField(max_length=MAX_LENGTH, blank=True)
primary_address_country = models.ForeignKey(
Country, on_delete=models.PROTECT, related_name='+',
)
primary_address_postcode = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_1 = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_2 = models.CharField(max_length=MAX_LENGTH, blank=True, null=True)
secondary_address_town = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_county = models.CharField(max_length=MAX_LENGTH, blank=True)
secondary_address_country = models.ForeignKey(
Country, null=True, on_delete=models.SET_NULL, related_name='+',
)
secondary_address_postcode = models.CharField(max_length=MAX_LENGTH, blank=True)
Serializer:
class MultiAddressModelSerializer(serializers.ModelSerializer):
primary_address = AddressSerializer(
source_model=MultiAddressModel,
address_source_prefix='primary_address',
)
secondary_address = AddressSerializer(
source_model=MultiAddressModel,
address_source_prefix='secondary_address',
required=False,
allow_null=True,
)
class Meta:
model = MultiAddressModel
fields = ['primary_address', 'secondary_address']
Will produce the following API response:
{
'primary_address': {
'line_1': '2',
'line_2': '',
'town': 'London',
'county': '',
'postcode': '',
'country': {
'id': '80756b9a-5d95-e211-a939-e4115bead28a',
'name': 'United Kingdom',
},
},
'secondary_address': {
'line_1': '1',
'line_2': '',
'town': 'Muckamore',
'county': '',
'postcode': '',
'country': {
'id': '736a9ab2-5d95-e211-a939-e4115bead28a',
'name': 'Ireland',
},
},
},
Please note:
1. None values for CharFields will be converted to ''
2. If all address field values are blank the nested object in the response will return None
E.g. Fiven the following fields' values:
secondary_address_1=''
secondary_address_2=''
secondary_address_town=''
secondary_address_county=''
secondary_address_postcode=''
secondary_address_country_id=None
The equivalent API response body will be:
'secondary_address': None
The same applies for changing the data.
3. If AddressSerializer has required=False, the validation is triggered only if at least
one of the fields is passed in.
"""
line_1 = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_1',
)
line_2 = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_2',
)
town = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_town',
)
county = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_county',
)
postcode = serializers.CharField(
max_length=MAX_LENGTH,
allow_blank=True,
required=False,
default='',
source='{source_prefix}_postcode',
)
area = NestedRelatedField(
AdministrativeArea,
allow_null=True,
required=False,
source='{source_prefix}_area',
)
country = NestedRelatedField(
Country,
allow_null=True,
required=False,
source='{source_prefix}_country',
)
REQUIRED_FIELDS = (
'line_1',
'town',
'country',
)
def __init__(
self, source_model, *args,
address_source_prefix='address', area_can_be_required=False,
postcode_can_be_required=False, **kwargs,
):
"""
Initialises the serializer.
It populates all necessary parts (e.g. Meta model, source, fields' source).
"""
# Define a custom Meta so that the Meta model can be specified as an argument
class MultiAddressMeta(self.Meta):
model = source_model
self.Meta = MultiAddressMeta
kwargs.setdefault('source', '*')
super().__init__(*args, **kwargs)
# populate fields' source
for field in self.fields.values():
field.source = field.source.format(source_prefix=address_source_prefix)
field.source_attrs = field.source.split('.')
self.area_can_be_required = area_can_be_required
self.postcode_can_be_required = postcode_can_be_required
self.address_source_prefix = address_source_prefix
def add_area_validator(self, validators):
"""
Mark area as required for US and Canadian companies.
"""
validators.append(
RulesBasedValidator(
ValidationRule(
'required',
OperatorRule(f'{self.address_source_prefix}_area', bool),
when=InRule(
f'{self.address_source_prefix}_country',
(
CountryEnum.united_states.value.id,
CountryEnum.canada.value.id,
),
),
),
),
)
def add_postcode_validator(self, validators):
"""
Mark postcode as required for US and Canadian companies.
"""
validators.append(
RulesBasedValidator(
ValidationRule(
'required',
OperatorRule(f'{self.address_source_prefix}_postcode', bool),
when=InRule(
f'{self.address_source_prefix}_country',
(
CountryEnum.united_states.value.id,
CountryEnum.canada.value.id,
),
),
),
),
)
def get_validators(self):
"""
Append ValidationRule for area/postcode depending on feature flag/context
Only mark area/postcode required if country is US/Canada & called from context where area
is safe to require, and if feature flag enabled. Currently the only context where area is
safe to require is CompanySerializer
"""
validators = super().get_validators()
if self.area_can_be_required:
self.add_area_validator(validators)
if self.postcode_can_be_required:
self.add_postcode_validator(validators)
return validators
def run_validation(self, data=serializers.empty):
"""
Converts None to dict with default values so that those values can be used to
reset the fields on the model.
"""
if data or not self.allow_null:
normalised_data = data
else:
normalised_data = {
field_name: None if (field.default == serializers.empty) else field.default
for field_name, field in self.fields.items()
}
return super().run_validation(data=normalised_data)
def to_representation(self, value):
"""
It returns None if none of the address values is set.
E.g.
{
'address': None
}
instead of
{
'address': {
'line_1': '',
'line_2': '',
'town': '',
'county': '',
'postcode': '',
'country': None
}
}
"""
address_dict = super().to_representation(value)
if not any(address_dict.values()):
return None
# for each address field, replace None with default if possible
for field_name, value in address_dict.items():
field_default = self.fields[field_name].default
if value is None and field_default is not serializers.empty:
address_dict[field_name] = field_default
return address_dict
def should_validate(self, data_combiner):
"""
Returns true if the data should be validated.
"""
if self.required:
return True
return any(
data_combiner.get_value(field.source)
for field in self.fields.values()
)
def validate(self, attrs):
"""
Validates the data if necessary.
This is needed because some addresses only need to be validated
if they are passed in.
"""
validated_data = super().validate(attrs)
data_combiner = DataCombiner(self.parent.instance, validated_data)
if self.should_validate(data_combiner):
errors = {}
for field_name in self.REQUIRED_FIELDS:
field = self.fields[field_name]
value = data_combiner.get_value(field.source)
if not value:
errors[field_name] = self.error_messages['required']
if errors:
raise ValidationError(errors)
return validated_data
class Meta:
"""Meta options."""
model = None
fields = (
'line_1',
'line_2',
'town',
'county',
'postcode',
'area',
'country',
)
| [
"rest_framework.serializers.DateTimeField",
"dateutil.parser.parse",
"datahub.core.validators.OperatorRule",
"datahub.core.validate_utils.DataCombiner",
"rest_framework.serializers.ReadOnlyField",
"rest_framework.fields.UUIDField",
"rest_framework.fields.ReadOnlyField",
"rest_framework.serializers.CharField",
"functools.partial",
"datahub.core.validators.InRule",
"rest_framework.exceptions.ValidationError",
"django.apps.apps.get_model"
]
| [((5268, 5338), 'functools.partial', 'partial', (['serializers.DateField'], {'input_formats': "('iso-8601', '%Y/%m/%d')"}), "(serializers.DateField, input_formats=('iso-8601', '%Y/%m/%d'))\n", (5275, 5338), False, 'from functools import partial\n'), ((793, 820), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (818, 820), False, 'from rest_framework import serializers\n'), ((832, 859), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (857, 859), False, 'from rest_framework import serializers\n'), ((878, 905), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (903, 905), False, 'from rest_framework import serializers\n'), ((10591, 10714), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': 'MAX_LENGTH', 'allow_blank': '(True)', 'required': '(False)', 'default': '""""""', 'source': '"""{source_prefix}_1"""'}), "(max_length=MAX_LENGTH, allow_blank=True, required=\n False, default='', source='{source_prefix}_1')\n", (10612, 10714), False, 'from rest_framework import serializers\n'), ((10770, 10893), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': 'MAX_LENGTH', 'allow_blank': '(True)', 'required': '(False)', 'default': '""""""', 'source': '"""{source_prefix}_2"""'}), "(max_length=MAX_LENGTH, allow_blank=True, required=\n False, default='', source='{source_prefix}_2')\n", (10791, 10893), False, 'from rest_framework import serializers\n'), ((10947, 11073), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': 'MAX_LENGTH', 'allow_blank': '(True)', 'required': '(False)', 'default': '""""""', 'source': '"""{source_prefix}_town"""'}), "(max_length=MAX_LENGTH, allow_blank=True, required=\n False, default='', source='{source_prefix}_town')\n", (10968, 11073), False, 'from rest_framework import serializers\n'), ((11129, 11257), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': 'MAX_LENGTH', 'allow_blank': '(True)', 'required': '(False)', 'default': '""""""', 'source': '"""{source_prefix}_county"""'}), "(max_length=MAX_LENGTH, allow_blank=True, required=\n False, default='', source='{source_prefix}_county')\n", (11150, 11257), False, 'from rest_framework import serializers\n'), ((11315, 11445), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': 'MAX_LENGTH', 'allow_blank': '(True)', 'required': '(False)', 'default': '""""""', 'source': '"""{source_prefix}_postcode"""'}), "(max_length=MAX_LENGTH, allow_blank=True, required=\n False, default='', source='{source_prefix}_postcode')\n", (11336, 11445), False, 'from rest_framework import serializers\n'), ((3310, 3321), 'rest_framework.fields.UUIDField', 'UUIDField', ([], {}), '()\n', (3319, 3321), False, 'from rest_framework.fields import ReadOnlyField, UUIDField\n'), ((6059, 6086), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {}), '()\n', (6084, 6086), False, 'from rest_framework import serializers\n'), ((16934, 16984), 'datahub.core.validate_utils.DataCombiner', 'DataCombiner', (['self.parent.instance', 'validated_data'], {}), '(self.parent.instance, validated_data)\n', (16946, 16984), False, 'from datahub.core.validate_utils import DataCombiner\n'), ((3202, 3223), 'django.apps.apps.get_model', 'apps.get_model', (['model'], {}), '(model)\n', (3216, 3223), False, 'from django.apps import apps\n'), ((5819, 5839), 'dateutil.parser.parse', 'dateutil_parse', (['data'], {}), '(data)\n', (5833, 5839), True, 'from dateutil.parser import parse as dateutil_parse\n'), ((17368, 17391), 'rest_framework.exceptions.ValidationError', 'ValidationError', (['errors'], {}), '(errors)\n', (17383, 17391), False, 'from rest_framework.exceptions import ValidationError\n'), ((3406, 3421), 'rest_framework.fields.ReadOnlyField', 'ReadOnlyField', ([], {}), '()\n', (3419, 3421), False, 'from rest_framework.fields import ReadOnlyField, UUIDField\n'), ((13171, 13227), 'datahub.core.validators.OperatorRule', 'OperatorRule', (['f"""{self.address_source_prefix}_area"""', 'bool'], {}), "(f'{self.address_source_prefix}_area', bool)\n", (13183, 13227), False, 'from datahub.core.validators import InRule, OperatorRule, RulesBasedValidator, ValidationRule\n'), ((13852, 13912), 'datahub.core.validators.OperatorRule', 'OperatorRule', (['f"""{self.address_source_prefix}_postcode"""', 'bool'], {}), "(f'{self.address_source_prefix}_postcode', bool)\n", (13864, 13912), False, 'from datahub.core.validators import InRule, OperatorRule, RulesBasedValidator, ValidationRule\n'), ((13254, 13373), 'datahub.core.validators.InRule', 'InRule', (['f"""{self.address_source_prefix}_country"""', '(CountryEnum.united_states.value.id, CountryEnum.canada.value.id)'], {}), "(f'{self.address_source_prefix}_country', (CountryEnum.united_states.\n value.id, CountryEnum.canada.value.id))\n", (13260, 13373), False, 'from datahub.core.validators import InRule, OperatorRule, RulesBasedValidator, ValidationRule\n'), ((13939, 14058), 'datahub.core.validators.InRule', 'InRule', (['f"""{self.address_source_prefix}_country"""', '(CountryEnum.united_states.value.id, CountryEnum.canada.value.id)'], {}), "(f'{self.address_source_prefix}_country', (CountryEnum.united_states.\n value.id, CountryEnum.canada.value.id))\n", (13945, 14058), False, 'from datahub.core.validators import InRule, OperatorRule, RulesBasedValidator, ValidationRule\n')] |
import pytest
from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO
from stable_baselines.ddpg import AdaptiveParamNoiseSpec
from stable_baselines.common.identity_env import IdentityEnv, IdentityEnvBox
from stable_baselines.common.vec_env import DummyVecEnv
PARAM_NOISE_DDPG = AdaptiveParamNoiseSpec(initial_stddev=float(0.2), desired_action_stddev=float(0.2))
# Hyperparameters for learning identity for each RL model
LEARN_FUNC_DICT = {
'a2c': lambda e: A2C(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'acer': lambda e: ACER(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'acktr': lambda e: ACKTR(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'deepq': lambda e: DeepQ(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'ddpg': lambda e: DDPG(policy="MlpPolicy", env=e, param_noise=PARAM_NOISE_DDPG).learn(total_timesteps=1000),
'ppo1': lambda e: PPO1(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'ppo2': lambda e: PPO2(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
'trpo': lambda e: TRPO(policy="MlpPolicy", env=e).learn(total_timesteps=1000),
}
@pytest.mark.slow
@pytest.mark.parametrize("model_name", ['a2c', 'acer', 'acktr', 'deepq', 'ppo1', 'ppo2', 'trpo'])
def test_identity(model_name):
"""
Test if the algorithm (with a given policy)
can learn an identity transformation (i.e. return observation as an action)
:param model_name: (str) Name of the RL model
"""
env = DummyVecEnv([lambda: IdentityEnv(10)])
model = LEARN_FUNC_DICT[model_name](env)
n_trials = 1000
obs = env.reset()
action_shape = model.predict(obs, deterministic=False)[0].shape
action, _ = model.predict(obs, deterministic=True)
assert action.shape == action_shape
for _ in range(n_trials):
new_action = model.predict(obs, deterministic=True)[0]
assert action == model.predict(obs, deterministic=True)[0]
assert new_action.shape == action_shape
# Free memory
del model, env
@pytest.mark.slow
@pytest.mark.parametrize("model_name", ['a2c', 'ddpg', 'ppo1', 'ppo2', 'trpo'])
def test_identity_continuous(model_name):
"""
Test if the algorithm (with a given policy)
can learn an identity transformation (i.e. return observation as an action)
:param model_name: (str) Name of the RL model
"""
env = DummyVecEnv([lambda: IdentityEnvBox(eps=0.5)])
model = LEARN_FUNC_DICT[model_name](env)
n_trials = 1000
obs = env.reset()
action_shape = model.predict(obs, deterministic=False)[0].shape
action, _ = model.predict(obs, deterministic=True)
assert action.shape == action_shape
for _ in range(n_trials):
new_action = model.predict(obs, deterministic=True)[0]
assert action == model.predict(obs, deterministic=True)[0]
assert new_action.shape == action_shape
| [
"stable_baselines.TRPO",
"stable_baselines.DDPG",
"stable_baselines.PPO1",
"stable_baselines.common.identity_env.IdentityEnvBox",
"stable_baselines.common.identity_env.IdentityEnv",
"stable_baselines.ACKTR",
"pytest.mark.parametrize",
"stable_baselines.PPO2",
"stable_baselines.DeepQ",
"stable_baselines.ACER",
"stable_baselines.A2C"
]
| [((1184, 1284), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""model_name"""', "['a2c', 'acer', 'acktr', 'deepq', 'ppo1', 'ppo2', 'trpo']"], {}), "('model_name', ['a2c', 'acer', 'acktr', 'deepq',\n 'ppo1', 'ppo2', 'trpo'])\n", (1207, 1284), False, 'import pytest\n'), ((2074, 2152), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""model_name"""', "['a2c', 'ddpg', 'ppo1', 'ppo2', 'trpo']"], {}), "('model_name', ['a2c', 'ddpg', 'ppo1', 'ppo2', 'trpo'])\n", (2097, 2152), False, 'import pytest\n'), ((486, 516), 'stable_baselines.A2C', 'A2C', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (489, 516), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((568, 599), 'stable_baselines.ACER', 'ACER', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (572, 599), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((652, 684), 'stable_baselines.ACKTR', 'ACKTR', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (657, 684), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((737, 769), 'stable_baselines.DeepQ', 'DeepQ', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (742, 769), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((821, 882), 'stable_baselines.DDPG', 'DDPG', ([], {'policy': '"""MlpPolicy"""', 'env': 'e', 'param_noise': 'PARAM_NOISE_DDPG'}), "(policy='MlpPolicy', env=e, param_noise=PARAM_NOISE_DDPG)\n", (825, 882), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((934, 965), 'stable_baselines.PPO1', 'PPO1', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (938, 965), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((1017, 1048), 'stable_baselines.PPO2', 'PPO2', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (1021, 1048), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((1100, 1131), 'stable_baselines.TRPO', 'TRPO', ([], {'policy': '"""MlpPolicy"""', 'env': 'e'}), "(policy='MlpPolicy', env=e)\n", (1104, 1131), False, 'from stable_baselines import A2C, ACER, ACKTR, DeepQ, DDPG, PPO1, PPO2, TRPO\n'), ((1538, 1553), 'stable_baselines.common.identity_env.IdentityEnv', 'IdentityEnv', (['(10)'], {}), '(10)\n', (1549, 1553), False, 'from stable_baselines.common.identity_env import IdentityEnv, IdentityEnvBox\n'), ((2421, 2444), 'stable_baselines.common.identity_env.IdentityEnvBox', 'IdentityEnvBox', ([], {'eps': '(0.5)'}), '(eps=0.5)\n', (2435, 2444), False, 'from stable_baselines.common.identity_env import IdentityEnv, IdentityEnvBox\n')] |
import tensorflow as tf
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
export_dir = './reference/00000002'
graph_pb = './creditcardfraud.pb'
builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
with tf.gfile.GFile(graph_pb, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
sigs = {}
with tf.Session(graph=tf.Graph()) as sess:
# name="" is important to ensure we don't get spurious prefixing
tf.import_graph_def(graph_def, name="")
g = tf.get_default_graph()
inp1 = g.get_tensor_by_name("transaction:0")
inp2 = g.get_tensor_by_name("reference:0")
out = g.get_tensor_by_name("output:0")
sigs[signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = \
tf.saved_model.signature_def_utils.predict_signature_def(
{"transaction": inp1, "reference": inp2}, {"output": out})
builder.add_meta_graph_and_variables(sess,
[tag_constants.SERVING],
signature_def_map=sigs)
builder.save()
| [
"tensorflow.saved_model.signature_def_utils.predict_signature_def",
"tensorflow.Graph",
"tensorflow.gfile.GFile",
"tensorflow.GraphDef",
"tensorflow.import_graph_def",
"tensorflow.saved_model.builder.SavedModelBuilder",
"tensorflow.get_default_graph"
]
| [((224, 276), 'tensorflow.saved_model.builder.SavedModelBuilder', 'tf.saved_model.builder.SavedModelBuilder', (['export_dir'], {}), '(export_dir)\n', (264, 276), True, 'import tensorflow as tf\n'), ((283, 313), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['graph_pb', '"""rb"""'], {}), "(graph_pb, 'rb')\n", (297, 313), True, 'import tensorflow as tf\n'), ((336, 349), 'tensorflow.GraphDef', 'tf.GraphDef', ([], {}), '()\n', (347, 349), True, 'import tensorflow as tf\n'), ((518, 557), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['graph_def'], {'name': '""""""'}), "(graph_def, name='')\n", (537, 557), True, 'import tensorflow as tf\n'), ((566, 588), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (586, 588), True, 'import tensorflow as tf\n'), ((805, 924), 'tensorflow.saved_model.signature_def_utils.predict_signature_def', 'tf.saved_model.signature_def_utils.predict_signature_def', (["{'transaction': inp1, 'reference': inp2}", "{'output': out}"], {}), "({'transaction':\n inp1, 'reference': inp2}, {'output': out})\n", (861, 924), True, 'import tensorflow as tf\n'), ((424, 434), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (432, 434), True, 'import tensorflow as tf\n')] |
import torch
from scipy.stats import median_absolute_deviation
class Transform_Base(object):
"""
Base class for transformations based on some data.
"""
def __init__(self, Ytr):
self.Ytr = Ytr
# Transform the mean
def scale_mean(self, mu):
return mu
# Reverse the transformation to the mean
def unscale_mean(self, mu):
return mu
# Reverse the transformation to the variance
def unscale_var(self, var):
return var
class Transform_Standardize(Transform_Base):
"""
Standardize the data
"""
def __init__(self, Ytr):
super().__init__(Ytr)
self.Ytr_mean = Ytr.mean()
self.Ytr_std = Ytr.std()
self.Ytr_var = Ytr.var()
def scale_mean(self, mu):
return (mu - self.Ytr_mean) / self.Ytr_std
def unscale_mean(self, mu):
return mu * self.Ytr_std + self.Ytr_mean
def unscale_var(self, var):
return var * self.Ytr_var
class Transform_StandardizeRobustly(Transform_Base):
"""
Robustly standardize the data by estimating its scale
"""
def __init__(self, Ytr):
super().__init__(Ytr)
self.Ytr_median = Ytr.median()
Ytr_numpy = Ytr.numpy().ravel()
self.Ytr_scale = torch.tensor(median_absolute_deviation(Ytr_numpy))
self.Ytr_scaleSQR = self.Ytr_scale**2
def scale_mean(self, mu):
return (mu - self.Ytr_median) / self.Ytr_scale
def unscale_mean(self, mu):
return mu * self.Ytr_scale + self.Ytr_median
def unscale_var(self, var):
return var * self.Ytr_scaleSQR
| [
"scipy.stats.median_absolute_deviation"
]
| [((1327, 1363), 'scipy.stats.median_absolute_deviation', 'median_absolute_deviation', (['Ytr_numpy'], {}), '(Ytr_numpy)\n', (1352, 1363), False, 'from scipy.stats import median_absolute_deviation\n')] |
import pytest
from prefect.core import Edge, Flow, Parameter, Task
from prefect.tasks.core import collections
from prefect.tasks.core.constants import Constant
from prefect.tasks.core.function import FunctionTask
class IdentityTask(Task):
def run(self, x):
return x
class TestConstant:
def test_constant_task_returns_its_value(self):
x = Constant("x")
assert x.run() == "x"
y = Constant(100)
assert y.run() == 100
def test_automatic_create_constant_task(self):
with Flow(name="test") as flow:
t = Task()
t.set_dependencies(upstream_tasks=[4])
assert len(flow.tasks) == 2
assert any(isinstance(t, Constant) for t in flow.tasks)
class TestFunctionTask:
def test_function_task_requires_callable(self):
with pytest.raises(TypeError):
FunctionTask(fn=1)
def test_function_task_takes_name_from_callable(self):
def my_fn():
pass
f = FunctionTask(fn=my_fn)
assert f.name == "my_fn"
def test_function_task_takes_name_from_arg_if_provided(self):
def my_fn():
pass
f = FunctionTask(fn=my_fn, name="test")
assert f.name == "test"
def test_function_task_docstring(self):
def my_fn():
"""An example docstring."""
pass
# Original docstring available on class
assert "FunctionTask" in FunctionTask.__doc__
# Wrapped function is docstring on instance
f = FunctionTask(fn=my_fn)
assert f.__doc__ == my_fn.__doc__
# Except when no docstring on wrapped function
f = FunctionTask(fn=lambda x: x + 1)
assert "FunctionTask" in f.__doc__
def test_function_task_sets__wrapped__(self):
def my_fn():
"""An example function"""
pass
t = FunctionTask(fn=my_fn)
assert t.__wrapped__ == my_fn
assert not hasattr(FunctionTask, "__wrapped__")
class TestCollections:
def test_list_returns_a_list(self):
l = collections.List()
with Flow(name="test") as f:
l.bind(1, 2)
assert f.run().result[l].result == [1, 2]
def test_list_binds_varargs(self):
t1 = Task()
t2 = Task()
l = collections.List()
with Flow(name="test") as f:
l.bind(t1, t2)
assert set([t1, t2, l]) == f.tasks
assert Edge(t1, l, key="arg_1") in f.edges
assert Edge(t2, l, key="arg_2") in f.edges
def test_tuple_returns_a_tuple(self):
l = collections.Tuple()
with Flow(name="test") as f:
l.bind(1, 2)
assert f.run().result[l].result == (1, 2)
def test_tuple_binds_varargs(self):
t1 = Task()
t2 = Task()
l = collections.Tuple()
with Flow(name="test") as f:
l.bind(t1, t2)
assert set([t1, t2, l]) == f.tasks
assert Edge(t1, l, key="arg_1") in f.edges
assert Edge(t2, l, key="arg_2") in f.edges
def test_set_returns_a_set(self):
l = collections.Set()
with Flow(name="test") as f:
l.bind(1, 2)
assert f.run().result[l].result == set([1, 2])
def test_set_binds_varargs(self):
t1 = Task()
t2 = Task()
l = collections.Set()
with Flow(name="test") as f:
l.bind(t1, t2)
assert set([t1, t2, l]) == f.tasks
assert Edge(t1, l, key="arg_1") in f.edges
assert Edge(t2, l, key="arg_2") in f.edges
def test_dict_returns_a_dict(self):
l = collections.Dict()
with Flow(name="test") as f:
l.bind(keys=["a", "b"], values=[1, 2])
assert f.run().result[l].result == dict(a=1, b=2)
def test_dict_handles_non_string_keys(self):
l = collections.Dict()
with Flow(name="test") as f:
l.bind(keys=[None, 55], values=[1, 2])
assert f.run().result[l].result == {None: 1, 55: 2}
def test_dict_raises_for_differing_length_key_value_pairs(self):
l = collections.Dict()
with Flow(name="test") as f:
l.bind(keys=["a"], values=[1, 2])
state = f.run()
assert state.result[l].is_failed()
assert isinstance(state.result[l].result, ValueError)
def test_list_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=[x, y])
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.List) for t in f.tasks) == 1
assert state.result[identity].result == [1, 2]
def test_list_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=[x, y], flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.List) for t in f.tasks) == 1
assert state.result[identity].result == [1, 2]
def test_tuple_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=(x, y))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Tuple) for t in f.tasks) == 1
assert state.result[identity].result == (1, 2)
def test_tuple_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=(x, y), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Tuple) for t in f.tasks) == 1
assert state.result[identity].result == (1, 2)
def test_set_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=set([x, y]))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Set) for t in f.tasks) == 1
assert state.result[identity].result == set([1, 2])
def test_set_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=set([x, y]), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 4
assert sum(isinstance(t, collections.Set) for t in f.tasks) == 1
assert state.result[identity].result == set([1, 2])
def test_dict_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=dict(a=x, b=y))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 5 # 2 params, identity, Dict, List of dict values
assert sum(isinstance(t, collections.Dict) for t in f.tasks) == 1
assert state.result[identity].result == dict(a=1, b=2)
def test_dict_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=dict(a=x, b=y), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 5 # 2 params, identity, Dict, List of dict values
assert sum(isinstance(t, collections.Dict) for t in f.tasks) == 1
assert state.result[identity].result == dict(a=1, b=2)
def test_nested_collection_automatically_applied_to_callargs(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
with Flow(name="test") as f:
identity.bind(x=dict(a=[x, dict(y=y)], b=(y, set([x]))))
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 10
assert state.result[identity].result == dict(a=[1, dict(y=2)], b=(2, set([1])))
def test_nested_collection_automatically_applied_to_callargs_imperative(self):
x = Parameter("x")
y = Parameter("y")
identity = IdentityTask()
f = Flow(name="test")
f.add_task(identity)
identity.bind(x=dict(a=[x, dict(y=y)], b=(y, set([x]))), flow=f)
state = f.run(parameters=dict(x=1, y=2))
assert len(f.tasks) == 10
assert state.result[identity].result == dict(a=[1, dict(y=2)], b=(2, set([1])))
def test_list_maintains_sort_order_for_more_than_10_items(self):
# https://github.com/PrefectHQ/prefect/issues/2451
l = collections.List()
with Flow(name="test") as f:
l.bind(*list(range(15)))
assert f.run().result[l].result == list(range(15))
def test_tuple_maintains_sort_order_for_more_than_10_items(self):
# https://github.com/PrefectHQ/prefect/issues/2451
t = collections.Tuple()
with Flow(name="test") as f:
t.bind(*list(range(15)))
assert f.run().result[t].result == tuple(range(15))
| [
"prefect.tasks.core.collections.Tuple",
"prefect.core.Task",
"prefect.tasks.core.collections.Dict",
"prefect.core.Edge",
"prefect.tasks.core.constants.Constant",
"prefect.tasks.core.collections.List",
"prefect.tasks.core.collections.Set",
"prefect.tasks.core.function.FunctionTask",
"pytest.raises",
"prefect.core.Parameter",
"prefect.core.Flow"
]
| [((367, 380), 'prefect.tasks.core.constants.Constant', 'Constant', (['"""x"""'], {}), "('x')\n", (375, 380), False, 'from prefect.tasks.core.constants import Constant\n'), ((424, 437), 'prefect.tasks.core.constants.Constant', 'Constant', (['(100)'], {}), '(100)\n', (432, 437), False, 'from prefect.tasks.core.constants import Constant\n'), ((993, 1015), 'prefect.tasks.core.function.FunctionTask', 'FunctionTask', ([], {'fn': 'my_fn'}), '(fn=my_fn)\n', (1005, 1015), False, 'from prefect.tasks.core.function import FunctionTask\n'), ((1167, 1202), 'prefect.tasks.core.function.FunctionTask', 'FunctionTask', ([], {'fn': 'my_fn', 'name': '"""test"""'}), "(fn=my_fn, name='test')\n", (1179, 1202), False, 'from prefect.tasks.core.function import FunctionTask\n'), ((1526, 1548), 'prefect.tasks.core.function.FunctionTask', 'FunctionTask', ([], {'fn': 'my_fn'}), '(fn=my_fn)\n', (1538, 1548), False, 'from prefect.tasks.core.function import FunctionTask\n'), ((1659, 1691), 'prefect.tasks.core.function.FunctionTask', 'FunctionTask', ([], {'fn': '(lambda x: x + 1)'}), '(fn=lambda x: x + 1)\n', (1671, 1691), False, 'from prefect.tasks.core.function import FunctionTask\n'), ((1875, 1897), 'prefect.tasks.core.function.FunctionTask', 'FunctionTask', ([], {'fn': 'my_fn'}), '(fn=my_fn)\n', (1887, 1897), False, 'from prefect.tasks.core.function import FunctionTask\n'), ((2069, 2087), 'prefect.tasks.core.collections.List', 'collections.List', ([], {}), '()\n', (2085, 2087), False, 'from prefect.tasks.core import collections\n'), ((2253, 2259), 'prefect.core.Task', 'Task', ([], {}), '()\n', (2257, 2259), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2273, 2279), 'prefect.core.Task', 'Task', ([], {}), '()\n', (2277, 2279), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2292, 2310), 'prefect.tasks.core.collections.List', 'collections.List', ([], {}), '()\n', (2308, 2310), False, 'from prefect.tasks.core import collections\n'), ((2576, 2595), 'prefect.tasks.core.collections.Tuple', 'collections.Tuple', ([], {}), '()\n', (2593, 2595), False, 'from prefect.tasks.core import collections\n'), ((2762, 2768), 'prefect.core.Task', 'Task', ([], {}), '()\n', (2766, 2768), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2782, 2788), 'prefect.core.Task', 'Task', ([], {}), '()\n', (2786, 2788), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2801, 2820), 'prefect.tasks.core.collections.Tuple', 'collections.Tuple', ([], {}), '()\n', (2818, 2820), False, 'from prefect.tasks.core import collections\n'), ((3082, 3099), 'prefect.tasks.core.collections.Set', 'collections.Set', ([], {}), '()\n', (3097, 3099), False, 'from prefect.tasks.core import collections\n'), ((3269, 3275), 'prefect.core.Task', 'Task', ([], {}), '()\n', (3273, 3275), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3289, 3295), 'prefect.core.Task', 'Task', ([], {}), '()\n', (3293, 3295), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3308, 3325), 'prefect.tasks.core.collections.Set', 'collections.Set', ([], {}), '()\n', (3323, 3325), False, 'from prefect.tasks.core import collections\n'), ((3589, 3607), 'prefect.tasks.core.collections.Dict', 'collections.Dict', ([], {}), '()\n', (3605, 3607), False, 'from prefect.tasks.core import collections\n'), ((3816, 3834), 'prefect.tasks.core.collections.Dict', 'collections.Dict', ([], {}), '()\n', (3832, 3834), False, 'from prefect.tasks.core import collections\n'), ((4065, 4083), 'prefect.tasks.core.collections.Dict', 'collections.Dict', ([], {}), '()\n', (4081, 4083), False, 'from prefect.tasks.core import collections\n'), ((4368, 4382), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (4377, 4382), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((4395, 4409), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (4404, 4409), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((4812, 4826), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (4821, 4826), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((4839, 4853), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (4848, 4853), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((4900, 4917), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (4904, 4917), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((5272, 5286), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (5281, 5286), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((5299, 5313), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (5308, 5313), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((5718, 5732), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (5727, 5732), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((5745, 5759), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (5754, 5759), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((5806, 5823), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (5810, 5823), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((6177, 6191), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (6186, 6191), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((6204, 6218), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (6213, 6218), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((6629, 6643), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (6638, 6643), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((6656, 6670), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (6665, 6670), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((6717, 6734), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (6721, 6734), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((7097, 7111), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (7106, 7111), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((7124, 7138), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (7133, 7138), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((7606, 7620), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (7615, 7620), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((7633, 7647), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (7642, 7647), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((7694, 7711), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (7698, 7711), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((8143, 8157), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (8152, 8157), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((8170, 8184), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (8179, 8184), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((8593, 8607), 'prefect.core.Parameter', 'Parameter', (['"""x"""'], {}), "('x')\n", (8602, 8607), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((8620, 8634), 'prefect.core.Parameter', 'Parameter', (['"""y"""'], {}), "('y')\n", (8629, 8634), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((8681, 8698), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (8685, 8698), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((9114, 9132), 'prefect.tasks.core.collections.List', 'collections.List', ([], {}), '()\n', (9130, 9132), False, 'from prefect.tasks.core import collections\n'), ((9408, 9427), 'prefect.tasks.core.collections.Tuple', 'collections.Tuple', ([], {}), '()\n', (9425, 9427), False, 'from prefect.tasks.core import collections\n'), ((533, 550), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (537, 550), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((576, 582), 'prefect.core.Task', 'Task', ([], {}), '()\n', (580, 582), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((825, 849), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (838, 849), False, 'import pytest\n'), ((863, 881), 'prefect.tasks.core.function.FunctionTask', 'FunctionTask', ([], {'fn': '(1)'}), '(fn=1)\n', (875, 881), False, 'from prefect.tasks.core.function import FunctionTask\n'), ((2101, 2118), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (2105, 2118), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2324, 2341), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (2328, 2341), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2434, 2458), 'prefect.core.Edge', 'Edge', (['t1', 'l'], {'key': '"""arg_1"""'}), "(t1, l, key='arg_1')\n", (2438, 2458), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2485, 2509), 'prefect.core.Edge', 'Edge', (['t2', 'l'], {'key': '"""arg_2"""'}), "(t2, l, key='arg_2')\n", (2489, 2509), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2609, 2626), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (2613, 2626), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2834, 2851), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (2838, 2851), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2944, 2968), 'prefect.core.Edge', 'Edge', (['t1', 'l'], {'key': '"""arg_1"""'}), "(t1, l, key='arg_1')\n", (2948, 2968), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((2995, 3019), 'prefect.core.Edge', 'Edge', (['t2', 'l'], {'key': '"""arg_2"""'}), "(t2, l, key='arg_2')\n", (2999, 3019), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3113, 3130), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (3117, 3130), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3339, 3356), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (3343, 3356), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3449, 3473), 'prefect.core.Edge', 'Edge', (['t1', 'l'], {'key': '"""arg_1"""'}), "(t1, l, key='arg_1')\n", (3453, 3473), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3500, 3524), 'prefect.core.Edge', 'Edge', (['t2', 'l'], {'key': '"""arg_2"""'}), "(t2, l, key='arg_2')\n", (3504, 3524), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3621, 3638), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (3625, 3638), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((3848, 3865), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (3852, 3865), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((4097, 4114), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (4101, 4114), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((4457, 4474), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (4461, 4474), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((5361, 5378), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (5365, 5378), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((6266, 6283), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (6270, 6283), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((7186, 7203), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (7190, 7203), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((8232, 8249), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (8236, 8249), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((9146, 9163), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (9150, 9163), False, 'from prefect.core import Edge, Flow, Parameter, Task\n'), ((9441, 9458), 'prefect.core.Flow', 'Flow', ([], {'name': '"""test"""'}), "(name='test')\n", (9445, 9458), False, 'from prefect.core import Edge, Flow, Parameter, Task\n')] |
# This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, <NAME> and <NAME>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
"""
This module contains functions for generating Qobj representation of a variety
of commonly occuring quantum operators.
"""
__all__ = ['jmat', 'spin_Jx', 'spin_Jy', 'spin_Jz', 'spin_Jm', 'spin_Jp',
'spin_J_set', 'sigmap', 'sigmam', 'sigmax', 'sigmay', 'sigmaz',
'destroy', 'create', 'qeye', 'identity', 'position', 'momentum',
'num', 'squeeze', 'squeezing', 'displace', 'commutator',
'qutrit_ops', 'qdiags', 'phase', 'qzero', 'enr_destroy',
'enr_identity', 'charge', 'tunneling']
import numbers
import numpy as np
import scipy
import scipy.sparse as sp
from qutip.qobj import Qobj
from qutip.fastsparse import fast_csr_matrix, fast_identity
from qutip.dimensions import flatten
#
# Spin operators
#
def jmat(j, *args):
"""Higher-order spin operators:
Parameters
----------
j : float
Spin of operator
args : str
Which operator to return 'x','y','z','+','-'.
If no args given, then output is ['x','y','z']
Returns
-------
jmat : qobj / ndarray
``qobj`` for requested spin operator(s).
Examples
--------
>>> jmat(1) # doctest: +SKIP
[ Quantum object: dims = [[3], [3]], \
shape = [3, 3], type = oper, isHerm = True
Qobj data =
[[ 0. 0.70710678 0. ]
[ 0.70710678 0. 0.70710678]
[ 0. 0.70710678 0. ]]
Quantum object: dims = [[3], [3]], \
shape = [3, 3], type = oper, isHerm = True
Qobj data =
[[ 0.+0.j 0.-0.70710678j 0.+0.j ]
[ 0.+0.70710678j 0.+0.j 0.-0.70710678j]
[ 0.+0.j 0.+0.70710678j 0.+0.j ]]
Quantum object: dims = [[3], [3]], \
shape = [3, 3], type = oper, isHerm = True
Qobj data =
[[ 1. 0. 0.]
[ 0. 0. 0.]
[ 0. 0. -1.]]]
Notes
-----
If no 'args' input, then returns array of ['x','y','z'] operators.
"""
if (np.fix(2 * j) != 2 * j) or (j < 0):
raise TypeError('j must be a non-negative integer or half-integer')
if not args:
return jmat(j, 'x'), jmat(j, 'y'), jmat(j, 'z')
if args[0] == '+':
A = _jplus(j)
elif args[0] == '-':
A = _jplus(j).getH()
elif args[0] == 'x':
A = 0.5 * (_jplus(j) + _jplus(j).getH())
elif args[0] == 'y':
A = -0.5 * 1j * (_jplus(j) - _jplus(j).getH())
elif args[0] == 'z':
A = _jz(j)
else:
raise TypeError('Invalid type')
return Qobj(A)
def _jplus(j):
"""
Internal functions for generating the data representing the J-plus
operator.
"""
m = np.arange(j, -j - 1, -1, dtype=complex)
data = (np.sqrt(j * (j + 1.0) - (m + 1.0) * m))[1:]
N = m.shape[0]
ind = np.arange(1, N, dtype=np.int32)
ptr = np.array(list(range(N-1))+[N-1]*2, dtype=np.int32)
ptr[-1] = N-1
return fast_csr_matrix((data,ind,ptr), shape=(N,N))
def _jz(j):
"""
Internal functions for generating the data representing the J-z operator.
"""
N = int(2*j+1)
data = np.array([j-k for k in range(N) if (j-k)!=0], dtype=complex)
# Even shaped matrix
if (N % 2 == 0):
ind = np.arange(N, dtype=np.int32)
ptr = np.arange(N+1,dtype=np.int32)
ptr[-1] = N
# Odd shaped matrix
else:
j = int(j)
ind = np.array(list(range(j))+list(range(j+1,N)), dtype=np.int32)
ptr = np.array(list(range(j+1))+list(range(j,N)), dtype=np.int32)
ptr[-1] = N-1
return fast_csr_matrix((data,ind,ptr), shape=(N,N))
#
# Spin j operators:
#
def spin_Jx(j):
"""Spin-j x operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, 'x')
def spin_Jy(j):
"""Spin-j y operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, 'y')
def spin_Jz(j):
"""Spin-j z operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, 'z')
def spin_Jm(j):
"""Spin-j annihilation operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, '-')
def spin_Jp(j):
"""Spin-j creation operator
Parameters
----------
j : float
Spin of operator
Returns
-------
op : Qobj
``qobj`` representation of the operator.
"""
return jmat(j, '+')
def spin_J_set(j):
"""Set of spin-j operators (x, y, z)
Parameters
----------
j : float
Spin of operators
Returns
-------
list : list of Qobj
list of ``qobj`` representating of the spin operator.
"""
return jmat(j)
#
# Pauli spin 1/2 operators:
#
def sigmap():
"""Creation operator for Pauli spins.
Examples
--------
>>> sigmap() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = False
Qobj data =
[[ 0. 1.]
[ 0. 0.]]
"""
return jmat(1 / 2., '+')
def sigmam():
"""Annihilation operator for Pauli spins.
Examples
--------
>>> sigmam() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = False
Qobj data =
[[ 0. 0.]
[ 1. 0.]]
"""
return jmat(1 / 2., '-')
def sigmax():
"""Pauli spin 1/2 sigma-x operator
Examples
--------
>>> sigmax() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = False
Qobj data =
[[ 0. 1.]
[ 1. 0.]]
"""
return 2.0 * jmat(1.0 / 2, 'x')
def sigmay():
"""Pauli spin 1/2 sigma-y operator.
Examples
--------
>>> sigmay() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = True
Qobj data =
[[ 0.+0.j 0.-1.j]
[ 0.+1.j 0.+0.j]]
"""
return 2.0 * jmat(1.0 / 2, 'y')
def sigmaz():
"""Pauli spin 1/2 sigma-z operator.
Examples
--------
>>> sigmaz() # doctest: +SKIP
Quantum object: dims = [[2], [2]], \
shape = [2, 2], type = oper, isHerm = True
Qobj data =
[[ 1. 0.]
[ 0. -1.]]
"""
return 2.0 * jmat(1.0 / 2, 'z')
#
# DESTROY returns annihilation operator for N dimensional Hilbert space
# out = destroy(N), N is integer value & N>0
#
def destroy(N, offset=0):
'''Destruction (lowering) operator.
Parameters
----------
N : int
Dimension of Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Qobj for lowering operator.
Examples
--------
>>> destroy(4) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.00000000+0.j 1.00000000+0.j 0.00000000+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.00000000+0.j 1.41421356+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j 1.73205081+0.j]
[ 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j]]
'''
if not isinstance(N, (int, np.integer)): # raise error if N not integer
raise ValueError("Hilbert space dimension must be integer value")
data = np.sqrt(np.arange(offset+1, N+offset, dtype=complex))
ind = np.arange(1,N, dtype=np.int32)
ptr = np.arange(N+1, dtype=np.int32)
ptr[-1] = N-1
return Qobj(fast_csr_matrix((data,ind,ptr),shape=(N,N)), isherm=False)
#
# create returns creation operator for N dimensional Hilbert space
# out = create(N), N is integer value & N>0
#
def create(N, offset=0):
'''Creation (raising) operator.
Parameters
----------
N : int
Dimension of Hilbert space.
Returns
-------
oper : qobj
Qobj for raising operator.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Examples
--------
>>> create(4) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j]
[ 1.00000000+0.j 0.00000000+0.j 0.00000000+0.j 0.00000000+0.j]
[ 0.00000000+0.j 1.41421356+0.j 0.00000000+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.00000000+0.j 1.73205081+0.j 0.00000000+0.j]]
'''
if not isinstance(N, (int, np.integer)): # raise error if N not integer
raise ValueError("Hilbert space dimension must be integer value")
qo = destroy(N, offset=offset) # create operator using destroy function
return qo.dag()
def _implicit_tensor_dimensions(dimensions):
"""
Total flattened size and operator dimensions for operator creation routines
that automatically perform tensor products.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
First dimension of an operator which can create an implicit tensor
product. If the type is `int`, it is promoted first to `[dimensions]`.
From there, it should be one of the two-elements `dims` parameter of a
`qutip.Qobj` representing an `oper` or `super`, with possible tensor
products.
Returns
-------
size : int
Dimension of backing matrix required to represent operator.
dimensions : list
Dimension list in the form required by ``Qobj`` creation.
"""
if not isinstance(dimensions, list):
dimensions = [dimensions]
flat = flatten(dimensions)
if not all(isinstance(x, numbers.Integral) and x >= 0 for x in flat):
raise ValueError("All dimensions must be integers >= 0")
return np.prod(flat), [dimensions, dimensions]
def qzero(dimensions):
"""
Zero operator.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
Dimension of Hilbert space. If provided as a list of ints, then the
dimension is the product over this list, but the ``dims`` property of
the new Qobj are set to this list. This can produce either `oper` or
`super` depending on the passed `dimensions`.
Returns
-------
qzero : qobj
Zero operator Qobj.
"""
size, dimensions = _implicit_tensor_dimensions(dimensions)
# A sparse matrix with no data is equal to a zero matrix.
return Qobj(fast_csr_matrix(shape=(size, size), dtype=complex),
dims=dimensions, isherm=True)
#
# QEYE returns identity operator for a Hilbert space with dimensions dims.
# a = qeye(N), N is integer or list of integers & all elements >= 0
#
def qeye(dimensions):
"""
Identity operator.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
Dimension of Hilbert space. If provided as a list of ints, then the
dimension is the product over this list, but the ``dims`` property of
the new Qobj are set to this list. This can produce either `oper` or
`super` depending on the passed `dimensions`.
Returns
-------
oper : qobj
Identity operator Qobj.
Examples
--------
>>> qeye(3) # doctest: +SKIP
Quantum object: dims = [[3], [3]], shape = (3, 3), type = oper, \
isherm = True
Qobj data =
[[ 1. 0. 0.]
[ 0. 1. 0.]
[ 0. 0. 1.]]
>>> qeye([2,2]) # doctest: +SKIP
Quantum object: dims = [[2, 2], [2, 2]], shape = (4, 4), type = oper, \
isherm = True
Qobj data =
[[1. 0. 0. 0.]
[0. 1. 0. 0.]
[0. 0. 1. 0.]
[0. 0. 0. 1.]]
"""
size, dimensions = _implicit_tensor_dimensions(dimensions)
return Qobj(fast_identity(size),
dims=dimensions, isherm=True, isunitary=True)
def identity(dims):
"""Identity operator. Alternative name to :func:`qeye`.
Parameters
----------
dimensions : (int) or (list of int) or (list of list of int)
Dimension of Hilbert space. If provided as a list of ints, then the
dimension is the product over this list, but the ``dims`` property of
the new Qobj are set to this list. This can produce either `oper` or
`super` depending on the passed `dimensions`.
Returns
-------
oper : qobj
Identity operator Qobj.
"""
return qeye(dims)
def position(N, offset=0):
"""
Position operator x=1/sqrt(2)*(a+a.dag())
Parameters
----------
N : int
Number of Fock states in Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Position operator as Qobj.
"""
a = destroy(N, offset=offset)
return 1.0 / np.sqrt(2.0) * (a + a.dag())
def momentum(N, offset=0):
"""
Momentum operator p=-1j/sqrt(2)*(a-a.dag())
Parameters
----------
N : int
Number of Fock states in Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Momentum operator as Qobj.
"""
a = destroy(N, offset=offset)
return -1j / np.sqrt(2.0) * (a - a.dag())
def num(N, offset=0):
"""Quantum object for number operator.
Parameters
----------
N : int
The dimension of the Hilbert space.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper: qobj
Qobj for number operator.
Examples
--------
>>> num(4) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = True
Qobj data =
[[0 0 0 0]
[0 1 0 0]
[0 0 2 0]
[0 0 0 3]]
"""
if offset == 0:
data = np.arange(1,N, dtype=complex)
ind = np.arange(1,N, dtype=np.int32)
ptr = np.array([0]+list(range(0,N)), dtype=np.int32)
ptr[-1] = N-1
else:
data = np.arange(offset, offset + N, dtype=complex)
ind = np.arange(N, dtype=np.int32)
ptr = np.arange(N+1,dtype=np.int32)
ptr[-1] = N
return Qobj(fast_csr_matrix((data,ind,ptr), shape=(N,N)), isherm=True)
def squeeze(N, z, offset=0):
"""Single-mode Squeezing operator.
Parameters
----------
N : int
Dimension of hilbert space.
z : float/complex
Squeezing parameter.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : :class:`qutip.qobj.Qobj`
Squeezing operator.
Examples
--------
>>> squeeze(4, 0.25) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.98441565+0.j 0.00000000+0.j 0.17585742+0.j 0.00000000+0.j]
[ 0.00000000+0.j 0.95349007+0.j 0.00000000+0.j 0.30142443+0.j]
[-0.17585742+0.j 0.00000000+0.j 0.98441565+0.j 0.00000000+0.j]
[ 0.00000000+0.j -0.30142443+0.j 0.00000000+0.j 0.95349007+0.j]]
"""
a = destroy(N, offset=offset)
op = (1 / 2.0) * np.conj(z) * (a ** 2) - (1 / 2.0) * z * (a.dag()) ** 2
return op.expm()
def squeezing(a1, a2, z):
"""Generalized squeezing operator.
.. math::
S(z) = \\exp\\left(\\frac{1}{2}\\left(z^*a_1a_2
- za_1^\\dagger a_2^\\dagger\\right)\\right)
Parameters
----------
a1 : :class:`qutip.qobj.Qobj`
Operator 1.
a2 : :class:`qutip.qobj.Qobj`
Operator 2.
z : float/complex
Squeezing parameter.
Returns
-------
oper : :class:`qutip.qobj.Qobj`
Squeezing operator.
"""
b = 0.5 * (np.conj(z) * (a1 * a2) - z * (a1.dag() * a2.dag()))
return b.expm()
def displace(N, alpha, offset=0):
"""Single-mode displacement operator.
Parameters
----------
N : int
Dimension of Hilbert space.
alpha : float/complex
Displacement amplitude.
offset : int (default 0)
The lowest number state that is included in the finite number state
representation of the operator.
Returns
-------
oper : qobj
Displacement operator.
Examples
---------
>>> displace(4,0.25) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isHerm = False
Qobj data =
[[ 0.96923323+0.j -0.24230859+0.j 0.04282883+0.j -0.00626025+0.j]
[ 0.24230859+0.j 0.90866411+0.j -0.33183303+0.j 0.07418172+0.j]
[ 0.04282883+0.j 0.33183303+0.j 0.84809499+0.j -0.41083747+0.j]
[ 0.00626025+0.j 0.07418172+0.j 0.41083747+0.j 0.90866411+0.j]]
"""
a = destroy(N, offset=offset)
D = (alpha * a.dag() - np.conj(alpha) * a).expm()
return D
def commutator(A, B, kind="normal"):
"""
Return the commutator of kind `kind` (normal, anti) of the
two operators A and B.
"""
if kind == 'normal':
return A * B - B * A
elif kind == 'anti':
return A * B + B * A
else:
raise TypeError("Unknown commutator kind '%s'" % kind)
def qutrit_ops():
"""
Operators for a three level system (qutrit).
Returns
-------
opers: array
`array` of qutrit operators.
"""
from qutip.states import qutrit_basis
one, two, three = qutrit_basis()
sig11 = one * one.dag()
sig22 = two * two.dag()
sig33 = three * three.dag()
sig12 = one * two.dag()
sig23 = two * three.dag()
sig31 = three * one.dag()
return np.array([sig11, sig22, sig33, sig12, sig23, sig31],
dtype=object)
def qdiags(diagonals, offsets, dims=None, shape=None):
"""
Constructs an operator from an array of diagonals.
Parameters
----------
diagonals : sequence of array_like
Array of elements to place along the selected diagonals.
offsets : sequence of ints
Sequence for diagonals to be set:
- k=0 main diagonal
- k>0 kth upper diagonal
- k<0 kth lower diagonal
dims : list, optional
Dimensions for operator
shape : list, tuple, optional
Shape of operator. If omitted, a square operator large enough
to contain the diagonals is generated.
See Also
--------
scipy.sparse.diags : for usage information.
Notes
-----
This function requires SciPy 0.11+.
Examples
--------
>>> qdiags(sqrt(range(1, 4)), 1) # doctest: +SKIP
Quantum object: dims = [[4], [4]], \
shape = [4, 4], type = oper, isherm = False
Qobj data =
[[ 0. 1. 0. 0. ]
[ 0. 0. 1.41421356 0. ]
[ 0. 0. 0. 1.73205081]
[ 0. 0. 0. 0. ]]
"""
data = sp.diags(diagonals, offsets, shape, format='csr', dtype=complex)
if not dims:
dims = [[], []]
if not shape:
shape = []
return Qobj(data, dims, list(shape))
def phase(N, phi0=0):
"""
Single-mode Pegg-Barnett phase operator.
Parameters
----------
N : int
Number of basis states in Hilbert space.
phi0 : float
Reference phase.
Returns
-------
oper : qobj
Phase operator with respect to reference phase.
Notes
-----
The Pegg-Barnett phase operator is Hermitian on a truncated Hilbert space.
"""
phim = phi0 + (2.0 * np.pi * np.arange(N)) / N # discrete phase angles
n = np.arange(N).reshape((N, 1))
states = np.array([np.sqrt(kk) / np.sqrt(N) * np.exp(1.0j * n * kk)
for kk in phim])
ops = np.array([np.outer(st, st.conj()) for st in states])
return Qobj(np.sum(ops, axis=0))
def enr_destroy(dims, excitations):
"""
Generate annilation operators for modes in a excitation-number-restricted
state space. For example, consider a system consisting of 4 modes, each
with 5 states. The total hilbert space size is 5**4 = 625. If we are
only interested in states that contain up to 2 excitations, we only need
to include states such as
(0, 0, 0, 0)
(0, 0, 0, 1)
(0, 0, 0, 2)
(0, 0, 1, 0)
(0, 0, 1, 1)
(0, 0, 2, 0)
...
This function creates annihilation operators for the 4 modes that act
within this state space:
a1, a2, a3, a4 = enr_destroy([5, 5, 5, 5], excitations=2)
From this point onwards, the annihiltion operators a1, ..., a4 can be
used to setup a Hamiltonian, collapse operators and expectation-value
operators, etc., following the usual pattern.
Parameters
----------
dims : list
A list of the dimensions of each subsystem of a composite quantum
system.
excitations : integer
The maximum number of excitations that are to be included in the
state space.
Returns
-------
a_ops : list of qobj
A list of annihilation operators for each mode in the composite
quantum system described by dims.
"""
from qutip.states import enr_state_dictionaries
nstates, state2idx, idx2state = enr_state_dictionaries(dims, excitations)
a_ops = [sp.lil_matrix((nstates, nstates), dtype=np.complex)
for _ in range(len(dims))]
for n1, state1 in idx2state.items():
for n2, state2 in idx2state.items():
for idx, a in enumerate(a_ops):
s1 = [s for idx2, s in enumerate(state1) if idx != idx2]
s2 = [s for idx2, s in enumerate(state2) if idx != idx2]
if (state1[idx] == state2[idx] - 1) and (s1 == s2):
a_ops[idx][n1, n2] = np.sqrt(state2[idx])
return [Qobj(a, dims=[dims, dims]) for a in a_ops]
def enr_identity(dims, excitations):
"""
Generate the identity operator for the excitation-number restricted
state space defined by the `dims` and `exciations` arguments. See the
docstring for enr_fock for a more detailed description of these arguments.
Parameters
----------
dims : list
A list of the dimensions of each subsystem of a composite quantum
system.
excitations : integer
The maximum number of excitations that are to be included in the
state space.
state : list of integers
The state in the number basis representation.
Returns
-------
op : Qobj
A Qobj instance that represent the identity operator in the
exication-number-restricted state space defined by `dims` and
`exciations`.
"""
from qutip.states import enr_state_dictionaries
nstates, _, _ = enr_state_dictionaries(dims, excitations)
data = sp.eye(nstates, nstates, dtype=np.complex)
return Qobj(data, dims=[dims, dims])
def charge(Nmax, Nmin=None, frac = 1):
"""
Generate the diagonal charge operator over charge states
from Nmin to Nmax.
Parameters
----------
Nmax : int
Maximum charge state to consider.
Nmin : int (default = -Nmax)
Lowest charge state to consider.
frac : float (default = 1)
Specify fractional charge if needed.
Returns
-------
C : Qobj
Charge operator over [Nmin,Nmax].
Notes
-----
.. versionadded:: 3.2
"""
if Nmin is None:
Nmin = -Nmax
diag = np.arange(Nmin, Nmax+1, dtype=float)
if frac != 1:
diag *= frac
C = sp.diags(diag, 0, format='csr', dtype=complex)
return Qobj(C, isherm=True)
def tunneling(N, m=1):
"""
Tunneling operator with elements of the form
:math:`\\sum |N><N+m| + |N+m><N|`.
Parameters
----------
N : int
Number of basis states in Hilbert space.
m : int (default = 1)
Number of excitations in tunneling event.
Returns
-------
T : Qobj
Tunneling operator.
Notes
-----
.. versionadded:: 3.2
"""
diags = [np.ones(N-m,dtype=int),np.ones(N-m,dtype=int)]
T = sp.diags(diags,[m,-m],format='csr', dtype=complex)
return Qobj(T, isherm=True)
# Break circular dependencies by a trailing import.
# Note that we use a relative import here to deal with that
# qutip.tensor is the *function* tensor, not the module.
from qutip.tensor import tensor
| [
"numpy.prod",
"scipy.sparse.lil_matrix",
"numpy.sqrt",
"numpy.ones",
"qutip.states.enr_state_dictionaries",
"qutip.dimensions.flatten",
"qutip.qobj.Qobj",
"scipy.sparse.eye",
"numpy.fix",
"numpy.conj",
"qutip.fastsparse.fast_csr_matrix",
"qutip.states.qutrit_basis",
"qutip.fastsparse.fast_identity",
"numpy.exp",
"numpy.array",
"numpy.sum",
"scipy.sparse.diags",
"numpy.arange"
]
| [((4281, 4288), 'qutip.qobj.Qobj', 'Qobj', (['A'], {}), '(A)\n', (4285, 4288), False, 'from qutip.qobj import Qobj\n'), ((4415, 4454), 'numpy.arange', 'np.arange', (['j', '(-j - 1)', '(-1)'], {'dtype': 'complex'}), '(j, -j - 1, -1, dtype=complex)\n', (4424, 4454), True, 'import numpy as np\n'), ((4540, 4571), 'numpy.arange', 'np.arange', (['(1)', 'N'], {'dtype': 'np.int32'}), '(1, N, dtype=np.int32)\n', (4549, 4571), True, 'import numpy as np\n'), ((4662, 4709), 'qutip.fastsparse.fast_csr_matrix', 'fast_csr_matrix', (['(data, ind, ptr)'], {'shape': '(N, N)'}), '((data, ind, ptr), shape=(N, N))\n', (4677, 4709), False, 'from qutip.fastsparse import fast_csr_matrix, fast_identity\n'), ((5293, 5340), 'qutip.fastsparse.fast_csr_matrix', 'fast_csr_matrix', (['(data, ind, ptr)'], {'shape': '(N, N)'}), '((data, ind, ptr), shape=(N, N))\n', (5308, 5340), False, 'from qutip.fastsparse import fast_csr_matrix, fast_identity\n'), ((9507, 9538), 'numpy.arange', 'np.arange', (['(1)', 'N'], {'dtype': 'np.int32'}), '(1, N, dtype=np.int32)\n', (9516, 9538), True, 'import numpy as np\n'), ((9548, 9580), 'numpy.arange', 'np.arange', (['(N + 1)'], {'dtype': 'np.int32'}), '(N + 1, dtype=np.int32)\n', (9557, 9580), True, 'import numpy as np\n'), ((11755, 11774), 'qutip.dimensions.flatten', 'flatten', (['dimensions'], {}), '(dimensions)\n', (11762, 11774), False, 'from qutip.dimensions import flatten\n'), ((19729, 19743), 'qutip.states.qutrit_basis', 'qutrit_basis', ([], {}), '()\n', (19741, 19743), False, 'from qutip.states import qutrit_basis\n'), ((19931, 19997), 'numpy.array', 'np.array', (['[sig11, sig22, sig33, sig12, sig23, sig31]'], {'dtype': 'object'}), '([sig11, sig22, sig33, sig12, sig23, sig31], dtype=object)\n', (19939, 19997), True, 'import numpy as np\n'), ((21223, 21287), 'scipy.sparse.diags', 'sp.diags', (['diagonals', 'offsets', 'shape'], {'format': '"""csr"""', 'dtype': 'complex'}), "(diagonals, offsets, shape, format='csr', dtype=complex)\n", (21231, 21287), True, 'import scipy.sparse as sp\n'), ((23557, 23598), 'qutip.states.enr_state_dictionaries', 'enr_state_dictionaries', (['dims', 'excitations'], {}), '(dims, excitations)\n', (23579, 23598), False, 'from qutip.states import enr_state_dictionaries\n'), ((25062, 25103), 'qutip.states.enr_state_dictionaries', 'enr_state_dictionaries', (['dims', 'excitations'], {}), '(dims, excitations)\n', (25084, 25103), False, 'from qutip.states import enr_state_dictionaries\n'), ((25115, 25157), 'scipy.sparse.eye', 'sp.eye', (['nstates', 'nstates'], {'dtype': 'np.complex'}), '(nstates, nstates, dtype=np.complex)\n', (25121, 25157), True, 'import scipy.sparse as sp\n'), ((25169, 25198), 'qutip.qobj.Qobj', 'Qobj', (['data'], {'dims': '[dims, dims]'}), '(data, dims=[dims, dims])\n', (25173, 25198), False, 'from qutip.qobj import Qobj\n'), ((25762, 25800), 'numpy.arange', 'np.arange', (['Nmin', '(Nmax + 1)'], {'dtype': 'float'}), '(Nmin, Nmax + 1, dtype=float)\n', (25771, 25800), True, 'import numpy as np\n'), ((25846, 25892), 'scipy.sparse.diags', 'sp.diags', (['diag', '(0)'], {'format': '"""csr"""', 'dtype': 'complex'}), "(diag, 0, format='csr', dtype=complex)\n", (25854, 25892), True, 'import scipy.sparse as sp\n'), ((25904, 25924), 'qutip.qobj.Qobj', 'Qobj', (['C'], {'isherm': '(True)'}), '(C, isherm=True)\n', (25908, 25924), False, 'from qutip.qobj import Qobj\n'), ((26405, 26458), 'scipy.sparse.diags', 'sp.diags', (['diags', '[m, -m]'], {'format': '"""csr"""', 'dtype': 'complex'}), "(diags, [m, -m], format='csr', dtype=complex)\n", (26413, 26458), True, 'import scipy.sparse as sp\n'), ((26467, 26487), 'qutip.qobj.Qobj', 'Qobj', (['T'], {'isherm': '(True)'}), '(T, isherm=True)\n', (26471, 26487), False, 'from qutip.qobj import Qobj\n'), ((4467, 4505), 'numpy.sqrt', 'np.sqrt', (['(j * (j + 1.0) - (m + 1.0) * m)'], {}), '(j * (j + 1.0) - (m + 1.0) * m)\n', (4474, 4505), True, 'import numpy as np\n'), ((4966, 4994), 'numpy.arange', 'np.arange', (['N'], {'dtype': 'np.int32'}), '(N, dtype=np.int32)\n', (4975, 4994), True, 'import numpy as np\n'), ((5009, 5041), 'numpy.arange', 'np.arange', (['(N + 1)'], {'dtype': 'np.int32'}), '(N + 1, dtype=np.int32)\n', (5018, 5041), True, 'import numpy as np\n'), ((9451, 9499), 'numpy.arange', 'np.arange', (['(offset + 1)', '(N + offset)'], {'dtype': 'complex'}), '(offset + 1, N + offset, dtype=complex)\n', (9460, 9499), True, 'import numpy as np\n'), ((9613, 9660), 'qutip.fastsparse.fast_csr_matrix', 'fast_csr_matrix', (['(data, ind, ptr)'], {'shape': '(N, N)'}), '((data, ind, ptr), shape=(N, N))\n', (9628, 9660), False, 'from qutip.fastsparse import fast_csr_matrix, fast_identity\n'), ((11925, 11938), 'numpy.prod', 'np.prod', (['flat'], {}), '(flat)\n', (11932, 11938), True, 'import numpy as np\n'), ((12619, 12669), 'qutip.fastsparse.fast_csr_matrix', 'fast_csr_matrix', ([], {'shape': '(size, size)', 'dtype': 'complex'}), '(shape=(size, size), dtype=complex)\n', (12634, 12669), False, 'from qutip.fastsparse import fast_csr_matrix, fast_identity\n'), ((13900, 13919), 'qutip.fastsparse.fast_identity', 'fast_identity', (['size'], {}), '(size)\n', (13913, 13919), False, 'from qutip.fastsparse import fast_csr_matrix, fast_identity\n'), ((16158, 16188), 'numpy.arange', 'np.arange', (['(1)', 'N'], {'dtype': 'complex'}), '(1, N, dtype=complex)\n', (16167, 16188), True, 'import numpy as np\n'), ((16202, 16233), 'numpy.arange', 'np.arange', (['(1)', 'N'], {'dtype': 'np.int32'}), '(1, N, dtype=np.int32)\n', (16211, 16233), True, 'import numpy as np\n'), ((16341, 16385), 'numpy.arange', 'np.arange', (['offset', '(offset + N)'], {'dtype': 'complex'}), '(offset, offset + N, dtype=complex)\n', (16350, 16385), True, 'import numpy as np\n'), ((16400, 16428), 'numpy.arange', 'np.arange', (['N'], {'dtype': 'np.int32'}), '(N, dtype=np.int32)\n', (16409, 16428), True, 'import numpy as np\n'), ((16443, 16475), 'numpy.arange', 'np.arange', (['(N + 1)'], {'dtype': 'np.int32'}), '(N + 1, dtype=np.int32)\n', (16452, 16475), True, 'import numpy as np\n'), ((16510, 16557), 'qutip.fastsparse.fast_csr_matrix', 'fast_csr_matrix', (['(data, ind, ptr)'], {'shape': '(N, N)'}), '((data, ind, ptr), shape=(N, N))\n', (16525, 16557), False, 'from qutip.fastsparse import fast_csr_matrix, fast_identity\n'), ((22128, 22147), 'numpy.sum', 'np.sum', (['ops'], {'axis': '(0)'}), '(ops, axis=0)\n', (22134, 22147), True, 'import numpy as np\n'), ((23613, 23664), 'scipy.sparse.lil_matrix', 'sp.lil_matrix', (['(nstates, nstates)'], {'dtype': 'np.complex'}), '((nstates, nstates), dtype=np.complex)\n', (23626, 23664), True, 'import scipy.sparse as sp\n'), ((24125, 24151), 'qutip.qobj.Qobj', 'Qobj', (['a'], {'dims': '[dims, dims]'}), '(a, dims=[dims, dims])\n', (24129, 24151), False, 'from qutip.qobj import Qobj\n'), ((26350, 26375), 'numpy.ones', 'np.ones', (['(N - m)'], {'dtype': 'int'}), '(N - m, dtype=int)\n', (26357, 26375), True, 'import numpy as np\n'), ((26373, 26398), 'numpy.ones', 'np.ones', (['(N - m)'], {'dtype': 'int'}), '(N - m, dtype=int)\n', (26380, 26398), True, 'import numpy as np\n'), ((3735, 3748), 'numpy.fix', 'np.fix', (['(2 * j)'], {}), '(2 * j)\n', (3741, 3748), True, 'import numpy as np\n'), ((15005, 15017), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (15012, 15017), True, 'import numpy as np\n'), ((15491, 15503), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (15498, 15503), True, 'import numpy as np\n'), ((21908, 21920), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (21917, 21920), True, 'import numpy as np\n'), ((17526, 17536), 'numpy.conj', 'np.conj', (['z'], {}), '(z)\n', (17533, 17536), True, 'import numpy as np\n'), ((18099, 18109), 'numpy.conj', 'np.conj', (['z'], {}), '(z)\n', (18106, 18109), True, 'import numpy as np\n'), ((21857, 21869), 'numpy.arange', 'np.arange', (['N'], {}), '(N)\n', (21866, 21869), True, 'import numpy as np\n'), ((21987, 22008), 'numpy.exp', 'np.exp', (['(1.0j * n * kk)'], {}), '(1.0j * n * kk)\n', (21993, 22008), True, 'import numpy as np\n'), ((19131, 19145), 'numpy.conj', 'np.conj', (['alpha'], {}), '(alpha)\n', (19138, 19145), True, 'import numpy as np\n'), ((21960, 21971), 'numpy.sqrt', 'np.sqrt', (['kk'], {}), '(kk)\n', (21967, 21971), True, 'import numpy as np\n'), ((21974, 21984), 'numpy.sqrt', 'np.sqrt', (['N'], {}), '(N)\n', (21981, 21984), True, 'import numpy as np\n'), ((24091, 24111), 'numpy.sqrt', 'np.sqrt', (['state2[idx]'], {}), '(state2[idx])\n', (24098, 24111), True, 'import numpy as np\n')] |
from abc import ABCMeta, abstractmethod
import random
import json
import pickle
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import nltk
from nltk.stem import WordNetLemmatizer
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.models import load_model
nltk.download('punkt', quiet=True)
nltk.download('wordnet', quiet=True)
class IAssistant(metaclass=ABCMeta):
@abstractmethod
def train_model(self):
""" Implemented in child class """
@abstractmethod
def request_tag(self, message):
""" Implemented in child class """
@abstractmethod
def get_tag_by_id(self, id):
""" Implemented in child class """
@abstractmethod
def request_method(self, message):
""" Implemented in child class """
@abstractmethod
def request(self, message):
""" Implemented in child class """
class GenericAssistant(IAssistant):
def __init__(self, intents, intent_methods={}, model_name="assistant_model", *, json_encoding='utf-8'):
self.intents = intents
self.intent_methods = intent_methods
self.model_name = model_name
self.json_encoding = json_encoding
if intents.endswith(".json"):
self.load_json_intents(intents)
self.lemmatizer = WordNetLemmatizer()
def load_json_intents(self, intents):
with open(intents, encoding=self.json_encoding) as f:
self.intents = json.load(f)
def train_model(self):
self.words = []
self.classes = []
documents = []
ignore_letters = ['!', '?', ',', '.']
for intent in self.intents['intents']:
for pattern in intent['patterns']:
word = nltk.word_tokenize(pattern)
self.words.extend(word)
documents.append((word, intent['tag']))
if intent['tag'] not in self.classes:
self.classes.append(intent['tag'])
self.words = [self.lemmatizer.lemmatize(w.lower()) for w in self.words if w not in ignore_letters]
self.words = sorted(list(set(self.words)))
self.classes = sorted(list(set(self.classes)))
training = []
output_empty = [0] * len(self.classes)
for doc in documents:
bag = []
word_patterns = doc[0]
word_patterns = [self.lemmatizer.lemmatize(word.lower()) for word in word_patterns]
for word in self.words:
bag.append(1) if word in word_patterns else bag.append(0)
output_row = list(output_empty)
output_row[self.classes.index(doc[1])] = 1
training.append([bag, output_row])
random.shuffle(training)
training = np.array(training)
train_x = list(training[:, 0])
train_y = list(training[:, 1])
self.model = Sequential()
self.model.add(Dense(128, input_shape=(len(train_x[0]),), activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(64, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(len(train_y[0]), activation='softmax'))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
self.model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
self.hist = self.model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=1)
def save_model(self, model_name=None):
if model_name is None:
self.model.save(f"{self.model_name}.h5", self.hist)
with open(f'{self.model_name}_words.pkl', 'wb') as f:
pickle.dump(self.words, f)
with open(f'{self.model_name}_classes.pkl', 'wb') as f:
pickle.dump(self.classes, f)
else:
self.model.save(f"{model_name}.h5", self.hist)
with open(f'{model_name}_words.pkl', 'wb') as f:
pickle.dump(self.words, f)
with open(f'{model_name}_classes.pkl', 'wb') as f:
pickle.dump(self.classes, f)
def load_model(self, model_name=None):
if model_name is None:
with open(f'{self.model_name}_words.pkl', 'rb') as f:
self.words = pickle.load(f)
with open(f'{self.model_name}_classes.pkl', 'rb') as f:
self.classes = pickle.load(f)
self.model = load_model(f'{self.model_name}.h5')
else:
with open(f'{model_name}_words.pkl', 'rb') as f:
self.words = pickle.load(f)
with open(f'{model_name}_classes.pkl', 'rb') as f:
self.classes = pickle.load(f)
self.model = load_model(f'{model_name}.h5')
def _clean_up_sentence(self, sentence):
sentence_words = nltk.word_tokenize(sentence)
sentence_words = [self.lemmatizer.lemmatize(word.lower()) for word in sentence_words]
return sentence_words
def _bag_of_words(self, sentence, words):
sentence_words = self._clean_up_sentence(sentence)
bag = [0] * len(words)
for s in sentence_words:
for i, word in enumerate(words):
if word == s:
bag[i] = 1
return np.array(bag)
def _predict_class(self, sentence):
p = self._bag_of_words(sentence, self.words)
res = self.model.predict(np.array([p]))[0]
ERROR_THRESHOLD = 0.1
results = [[i, r] for i, r in enumerate(res) if r > ERROR_THRESHOLD]
results.sort(key=lambda x: x[1], reverse=True)
return_list = []
for r in results:
return_list.append({'intent': self.classes[r[0]], 'probability': str(r[1])})
return return_list
def _get_response(self, ints, intents_json):
try:
tag = ints[0]['intent']
list_of_intents = intents_json['intents']
for i in list_of_intents:
if i['tag'] == tag:
result = random.choice(i['responses'])
break
except IndexError:
result = "I don't understand!"
return result
def request_tag(self, message):
pass
def get_tag_by_id(self, id):
pass
def request_method(self, message):
pass
def request(self, message):
ints = self._predict_class(message)
if ints[0]['intent'] in self.intent_methods.keys():
self.intent_methods[ints[0]['intent']]()
else:
return self._get_response(ints, self.intents) | [
"random.choice",
"pickle.dump",
"random.shuffle",
"nltk.download",
"nltk.word_tokenize",
"tensorflow.keras.layers.Dropout",
"nltk.stem.WordNetLemmatizer",
"pickle.load",
"tensorflow.keras.optimizers.SGD",
"numpy.array",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.models.load_model",
"json.load",
"tensorflow.keras.models.Sequential"
]
| [((396, 430), 'nltk.download', 'nltk.download', (['"""punkt"""'], {'quiet': '(True)'}), "('punkt', quiet=True)\n", (409, 430), False, 'import nltk\n'), ((431, 467), 'nltk.download', 'nltk.download', (['"""wordnet"""'], {'quiet': '(True)'}), "('wordnet', quiet=True)\n", (444, 467), False, 'import nltk\n'), ((1406, 1425), 'nltk.stem.WordNetLemmatizer', 'WordNetLemmatizer', ([], {}), '()\n', (1423, 1425), False, 'from nltk.stem import WordNetLemmatizer\n'), ((2806, 2830), 'random.shuffle', 'random.shuffle', (['training'], {}), '(training)\n', (2820, 2830), False, 'import random\n'), ((2850, 2868), 'numpy.array', 'np.array', (['training'], {}), '(training)\n', (2858, 2868), True, 'import numpy as np\n'), ((2970, 2982), 'tensorflow.keras.models.Sequential', 'Sequential', ([], {}), '()\n', (2980, 2982), False, 'from tensorflow.keras.models import Sequential\n'), ((3280, 3334), 'tensorflow.keras.optimizers.SGD', 'SGD', ([], {'lr': '(0.01)', 'decay': '(1e-06)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.01, decay=1e-06, momentum=0.9, nesterov=True)\n', (3283, 3334), False, 'from tensorflow.keras.optimizers import SGD\n'), ((4902, 4930), 'nltk.word_tokenize', 'nltk.word_tokenize', (['sentence'], {}), '(sentence)\n', (4920, 4930), False, 'import nltk\n'), ((5346, 5359), 'numpy.array', 'np.array', (['bag'], {}), '(bag)\n', (5354, 5359), True, 'import numpy as np\n'), ((1558, 1570), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1567, 1570), False, 'import json\n'), ((3092, 3104), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3099, 3104), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((3129, 3157), 'tensorflow.keras.layers.Dense', 'Dense', (['(64)'], {'activation': '"""relu"""'}), "(64, activation='relu')\n", (3134, 3157), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((3182, 3194), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3189, 3194), False, 'from tensorflow.keras.layers import Dense, Dropout\n'), ((3467, 3484), 'numpy.array', 'np.array', (['train_x'], {}), '(train_x)\n', (3475, 3484), True, 'import numpy as np\n'), ((3486, 3503), 'numpy.array', 'np.array', (['train_y'], {}), '(train_y)\n', (3494, 3503), True, 'import numpy as np\n'), ((4512, 4547), 'tensorflow.keras.models.load_model', 'load_model', (['f"""{self.model_name}.h5"""'], {}), "(f'{self.model_name}.h5')\n", (4522, 4547), False, 'from tensorflow.keras.models import load_model\n'), ((4801, 4831), 'tensorflow.keras.models.load_model', 'load_model', (['f"""{model_name}.h5"""'], {}), "(f'{model_name}.h5')\n", (4811, 4831), False, 'from tensorflow.keras.models import load_model\n'), ((1837, 1864), 'nltk.word_tokenize', 'nltk.word_tokenize', (['pattern'], {}), '(pattern)\n', (1855, 1864), False, 'import nltk\n'), ((3763, 3789), 'pickle.dump', 'pickle.dump', (['self.words', 'f'], {}), '(self.words, f)\n', (3774, 3789), False, 'import pickle\n'), ((3874, 3902), 'pickle.dump', 'pickle.dump', (['self.classes', 'f'], {}), '(self.classes, f)\n', (3885, 3902), False, 'import pickle\n'), ((4053, 4079), 'pickle.dump', 'pickle.dump', (['self.words', 'f'], {}), '(self.words, f)\n', (4064, 4079), False, 'import pickle\n'), ((4159, 4187), 'pickle.dump', 'pickle.dump', (['self.classes', 'f'], {}), '(self.classes, f)\n', (4170, 4187), False, 'import pickle\n'), ((4358, 4372), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4369, 4372), False, 'import pickle\n'), ((4472, 4486), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4483, 4486), False, 'import pickle\n'), ((4652, 4666), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4663, 4666), False, 'import pickle\n'), ((4761, 4775), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4772, 4775), False, 'import pickle\n'), ((5487, 5500), 'numpy.array', 'np.array', (['[p]'], {}), '([p])\n', (5495, 5500), True, 'import numpy as np\n'), ((6092, 6121), 'random.choice', 'random.choice', (["i['responses']"], {}), "(i['responses'])\n", (6105, 6121), False, 'import random\n')] |
"""
@brief Generate Fe55 images and associated darks and bias images
according to section 5.4 of the E/O document (Dec 19, 2012 version).
@author <NAME> <<EMAIL>>
"""
import os
import numpy as np
from sim_inputs import *
from sim_tools import *
def generate_Fe55_images(exptimes, nxrays, outdir, sensorid, gain=gain,
bias_level=bias_level, sys_noise=sys_noise,
dark_current=dark_current):
nexp = len(exptimes)
for i, exptime, nxray in zip(list(range(nexp)), exptimes, nxrays):
#
# Bias images
#
outfile = "Fe55_bias_%s_%02i.fits" % (sensorid, i)
bias_file = os.path.join(outdir, outfile)
bias_segs = []
for hdu in range(nhdu):
seg = SegmentExposure(exptime=0, gain=gain)
seg.add_bias(level=bias_level, sigma=sys_noise) # electronics
seg.add_bias(level=0, sigma=read_noise) # read noise
bias_segs.append(seg)
bias_output = fitsFile(bias_segs)
bias_output[0].header['GAIN'] = gain
bias_output[0].header['BIASLVL'] = bias_level
bias_output[0].header['SYSNOISE'] = sys_noise
bias_output[0].header['RDNOISE'] = read_noise
bias_output.writeto(bias_file, overwrite=True)
#
# Dark images
#
outfile = "Fe55_dark_%s_%02i.fits" % (sensorid, i)
dark_file = os.path.join(outdir, outfile)
dark_segs = []
for hdu in range(nhdu):
seg = SegmentExposure(exptime=exptime, gain=gain)
seg.add_bias(level=bias_level, sigma=sys_noise) # electronics
seg.add_bias(level=0, sigma=read_noise) # read noise
seg.add_dark_current(level=dark_current) # dark current
dark_segs.append(seg)
dark_output = fitsFile(dark_segs)
dark_output[0].header['GAIN'] = gain
dark_output[0].header['BIASLVL'] = bias_level
dark_output[0].header['SYSNOISE'] = sys_noise
dark_output[0].header['RDNOISE'] = read_noise
dark_output[0].header['DARKCURR'] = dark_current
dark_output.writeto(dark_file, overwrite=True)
#
# Fe55 exposures
#
outfile = "Fe55_exp_%s_%02i.fits" % (sensorid, i)
Fe55_file = os.path.join(outdir, outfile)
fe55_segs = []
for hdu in range(nhdu):
seg = SegmentExposure(exptime=exptime, gain=gain)
seg.add_bias(level=bias_level, sigma=sys_noise) # electronics
seg.add_bias(level=0, sigma=read_noise) # read noise
seg.add_dark_current(level=dark_current) # dark current
seg.add_Fe55_hits(nxrays=nxray)
fe55_segs.append(seg)
fe55_output = fitsFile(fe55_segs)
fe55_output[0].header['GAIN'] = gain
fe55_output[0].header['BIASLVL'] = bias_level
fe55_output[0].header['SYSNOISE'] = sys_noise
fe55_output[0].header['RDNOISE'] = read_noise
fe55_output[0].header['DARKCURR'] = dark_current
fe55_output[0].header['FE55HITS'] = nxray
fe55_output.writeto(Fe55_file, overwrite=True)
if __name__ == '__main__':
nexp = 10
exptimes = np.linspace(1, 5, nexp)
nxrays = [int(x*1000) for x in exptimes]
generate_Fe55_images(exptimes, nxrays, '.', 'xxx-xx')
| [
"numpy.linspace",
"os.path.join"
]
| [((3210, 3233), 'numpy.linspace', 'np.linspace', (['(1)', '(5)', 'nexp'], {}), '(1, 5, nexp)\n', (3221, 3233), True, 'import numpy as np\n'), ((659, 688), 'os.path.join', 'os.path.join', (['outdir', 'outfile'], {}), '(outdir, outfile)\n', (671, 688), False, 'import os\n'), ((1406, 1435), 'os.path.join', 'os.path.join', (['outdir', 'outfile'], {}), '(outdir, outfile)\n', (1418, 1435), False, 'import os\n'), ((2293, 2322), 'os.path.join', 'os.path.join', (['outdir', 'outfile'], {}), '(outdir, outfile)\n', (2305, 2322), False, 'import os\n')] |
import math
import numpy as np
import torch
import torch.nn as nn
from ....ops.pointnet2.pointnet2_stack import pointnet2_modules as pointnet2_stack_modules
from ....ops.pointnet2.pointnet2_stack import pointnet2_utils as pointnet2_stack_utils
from ....utils import common_utils
from ...backbones_2d.transformer import TransformerEncoderLayer3D, TransformerEncoder
from ...roi_heads.target_assigner.proposal_target_layer import ProposalTargetLayer
from ...model_utils.model_nms_utils import class_agnostic_nms
def bilinear_interpolate_torch(im, x, y):
"""
Args:
im: (H, W, C) [y, x]
x: (N)
y: (N)
Returns:
"""
x0 = torch.floor(x).long()
x1 = x0 + 1
y0 = torch.floor(y).long()
y1 = y0 + 1
x0 = torch.clamp(x0, 0, im.shape[1] - 1)
x1 = torch.clamp(x1, 0, im.shape[1] - 1)
y0 = torch.clamp(y0, 0, im.shape[0] - 1)
y1 = torch.clamp(y1, 0, im.shape[0] - 1)
Ia = im[y0, x0]
Ib = im[y1, x0]
Ic = im[y0, x1]
Id = im[y1, x1]
wa = (x1.type_as(x) - x) * (y1.type_as(y) - y)
wb = (x1.type_as(x) - x) * (y - y0.type_as(y))
wc = (x - x0.type_as(x)) * (y1.type_as(y) - y)
wd = (x - x0.type_as(x)) * (y - y0.type_as(y))
ans = torch.t((torch.t(Ia) * wa)) + torch.t(torch.t(Ib) * wb) + torch.t(torch.t(Ic) * wc) + torch.t(torch.t(Id) * wd)
return ans
def sample_points_with_roi(rois, points, sample_radius_with_roi, num_max_points_of_part=200000):
"""
Args:
rois: (M, 7 + C)
points: (N, 3)
sample_radius_with_roi:
num_max_points_of_part:
Returns:
sampled_points: (N_out, 3)
"""
if points.shape[0] < num_max_points_of_part:
distance = (points[:, None, :] - rois[None, :, 0:3]).norm(dim=-1)
min_dis, min_dis_roi_idx = distance.min(dim=-1)
roi_max_dim = (rois[min_dis_roi_idx, 3:6] / 2).norm(dim=-1)
point_mask = min_dis < roi_max_dim + sample_radius_with_roi
else:
start_idx = 0
point_mask_list = []
while start_idx < points.shape[0]:
distance = (points[start_idx:start_idx + num_max_points_of_part, None, :] - rois[None, :, 0:3]).norm(dim=-1)
min_dis, min_dis_roi_idx = distance.min(dim=-1)
roi_max_dim = (rois[min_dis_roi_idx, 3:6] / 2).norm(dim=-1)
cur_point_mask = min_dis < roi_max_dim + sample_radius_with_roi
point_mask_list.append(cur_point_mask)
start_idx += num_max_points_of_part
point_mask = torch.cat(point_mask_list, dim=0)
sampled_points = points[:1] if point_mask.sum() == 0 else points[point_mask, :]
return sampled_points, point_mask
def sector_fps(points, num_sampled_points, num_sectors):
"""
Args:
points: (N, 3)
num_sampled_points: int
num_sectors: int
Returns:
sampled_points: (N_out, 3)
"""
sector_size = np.pi * 2 / num_sectors
point_angles = torch.atan2(points[:, 1], points[:, 0]) + np.pi
sector_idx = (point_angles / sector_size).floor().clamp(min=0, max=num_sectors)
xyz_points_list = []
xyz_batch_cnt = []
num_sampled_points_list = []
for k in range(num_sectors):
mask = (sector_idx == k)
cur_num_points = mask.sum().item()
if cur_num_points > 0:
xyz_points_list.append(points[mask])
xyz_batch_cnt.append(cur_num_points)
ratio = cur_num_points / points.shape[0]
num_sampled_points_list.append(
min(cur_num_points, math.ceil(ratio * num_sampled_points))
)
if len(xyz_batch_cnt) == 0:
xyz_points_list.append(points)
xyz_batch_cnt.append(len(points))
num_sampled_points_list.append(num_sampled_points)
print(f'Warning: empty sector points detected in SectorFPS: points.shape={points.shape}')
xyz = torch.cat(xyz_points_list, dim=0)
xyz_batch_cnt = torch.tensor(xyz_batch_cnt, device=points.device).int()
sampled_points_batch_cnt = torch.tensor(num_sampled_points_list, device=points.device).int()
sampled_pt_idxs = pointnet2_stack_utils.stack_farthest_point_sample(
xyz.contiguous(), xyz_batch_cnt, sampled_points_batch_cnt
).long()
sampled_points = xyz[sampled_pt_idxs]
return sampled_points
class VoxelSetAbstractionTransFusionv5(nn.Module):
def __init__(self, model_cfg, voxel_size, point_cloud_range, num_bev_features=None,
num_rawpoint_features=None, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.voxel_size = voxel_size
self.point_cloud_range = point_cloud_range
SA_cfg = self.model_cfg.SA_LAYER
self.SA_layers = nn.ModuleList()
self.linears_in = nn.ModuleList()
self.linears_out = nn.ModuleList()
self.fusion_channel = sum([x[-1] for x in SA_cfg[self.model_cfg.FEATURES_SOURCE[-2]].MLPS])
# self.fusion_channel = 16
self.SA_layer_names = []
self.downsample_times_map = {}
c_in = 0
if 'bev' in self.model_cfg.FEATURES_SOURCE:
c_bev = num_bev_features
c_in += c_bev
if c_bev == self.fusion_channel:
self.linears_in.append(nn.Identity())
self.linears_out.append(nn.Identity())
else:
self.linears_in.append(nn.Sequential(
nn.Linear(c_bev, self.fusion_channel, bias=False),
nn.BatchNorm1d(self.fusion_channel)))
self.linears_out.append(nn.Sequential(
nn.Linear(self.fusion_channel, c_bev, bias=False),
nn.BatchNorm1d(c_bev)))
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
mlps = SA_cfg['raw_points'].MLPS
for k in range(len(mlps)):
mlps[k] = [num_rawpoint_features - 3] + mlps[k]
self.SA_rawpoints = pointnet2_stack_modules.StackSAModuleMSG(
radii=SA_cfg['raw_points'].POOL_RADIUS,
nsamples=SA_cfg['raw_points'].NSAMPLE,
mlps=mlps,
use_xyz=True,
pool_method='max_pool'
)
cur = sum([x[-1] for x in mlps])
if cur == self.fusion_channel:
self.linears_in.append(nn.Identity())
self.linears_out.append(nn.Identity())
else:
self.linears_in.append(nn.Sequential(
nn.Linear(cur, self.fusion_channel, bias=False),
nn.BatchNorm1d(self.fusion_channel)))
self.linears_out.append(nn.Sequential(
nn.Linear(self.fusion_channel, cur, bias=False),
nn.BatchNorm1d(cur)))
c_in += cur
for src_name in self.model_cfg.FEATURES_SOURCE:
if src_name in ['bev', 'raw_points']:
continue
self.downsample_times_map[src_name] = SA_cfg[src_name].DOWNSAMPLE_FACTOR
mlps = SA_cfg[src_name].MLPS
for k in range(len(mlps)):
mlps[k] = [mlps[k][0]] + mlps[k]
cur_layer = pointnet2_stack_modules.StackSAModuleMSG(
radii=SA_cfg[src_name].POOL_RADIUS,
nsamples=SA_cfg[src_name].NSAMPLE,
mlps=mlps,
use_xyz=True,
pool_method='max_pool',
)
self.SA_layers.append(cur_layer)
cur = sum([x[-1] for x in mlps])
if cur == self.fusion_channel:
self.linears_in.append(nn.Identity())
self.linears_out.append(nn.Identity())
else:
self.linears_in.append(nn.Sequential(
nn.Linear(cur, self.fusion_channel, bias=False),
nn.BatchNorm1d(self.fusion_channel)))
self.linears_out.append(nn.Sequential(
nn.Linear(self.fusion_channel, cur, bias=False),
nn.BatchNorm1d(cur)))
self.SA_layer_names.append(src_name)
c_in += cur
self.vsa_point_feature_fusion = nn.Sequential(
nn.Linear(c_in, self.model_cfg.NUM_OUTPUT_FEATURES, bias=False),
nn.BatchNorm1d(self.model_cfg.NUM_OUTPUT_FEATURES),
nn.ReLU(),
)
self.num_point_features = self.model_cfg.NUM_OUTPUT_FEATURES
self.num_point_features_before_fusion = c_in
if self.model_cfg.NORM:
self.transnorm = nn.LayerNorm(c_in)
else:
self.transnorm = None
if self.model_cfg.NORM2:
self.transnorm2 = nn.LayerNorm(self.fusion_channel)
else:
self.transnorm2 = None
# multi_location
self.trans_layer = TransformerEncoder(TransformerEncoderLayer3D(c_in, self.model_cfg.FUSION_HEAD), self.model_cfg.NUM_LAYERS, self.transnorm)
# have multi-modality + multi-scale
self.trans_fusion_layer = TransformerEncoder(TransformerEncoderLayer3D(self.fusion_channel, self.model_cfg.FUSION2_HEAD), self.model_cfg.NUM_LAYERS2, self.transnorm2)
self.reduce_radius = self.model_cfg.REDUCE_RADIUS**2
self.topks = self.model_cfg.NMS_CONFIG.TOPK
self.max_keypoints = self.model_cfg.NMS_CONFIG.MAX_POINTS
self.res1_actn_1 = nn.Sequential(
nn.LayerNorm(c_in),
nn.ReLU())
self.res1_actn_2 = nn.Sequential(
nn.LayerNorm(c_in),
nn.ReLU())
def interpolate_from_bev_features(self, keypoints, bev_features, batch_size, bev_stride):
x_idxs = (keypoints[:, :, 0] - self.point_cloud_range[0]) / self.voxel_size[0]
y_idxs = (keypoints[:, :, 1] - self.point_cloud_range[1]) / self.voxel_size[1]
x_idxs = x_idxs / bev_stride
y_idxs = y_idxs / bev_stride
point_bev_features_list = []
for k in range(batch_size):
cur_x_idxs = x_idxs[k]
cur_y_idxs = y_idxs[k]
cur_bev_features = bev_features[k].permute(1, 2, 0) # (H, W, C)
point_bev_features = bilinear_interpolate_torch(cur_bev_features, cur_x_idxs, cur_y_idxs)
point_bev_features_list.append(point_bev_features.unsqueeze(dim=0))
point_bev_features = torch.cat(point_bev_features_list, dim=0) # (B, N, C0)
return point_bev_features
def get_sampled_points(self, batch_dict):
batch_size = batch_dict['batch_size']
if self.model_cfg.POINT_SOURCE == 'raw_points':
src_points = batch_dict['points'][:, 1:4]
batch_indices = batch_dict['points'][:, 0].long()
elif self.model_cfg.POINT_SOURCE == 'voxel_centers':
src_points = common_utils.get_voxel_centers(
batch_dict['voxel_coords'][:, 1:4],
downsample_times=1,
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
batch_indices = batch_dict['voxel_coords'][:, 0].long()
else:
raise NotImplementedError
keypoints_list = []
for bs_idx in range(batch_size):
bs_mask = (batch_indices == bs_idx)
sampled_points = src_points[bs_mask].unsqueeze(dim=0) # (1, N, 3)
if self.model_cfg.SAMPLE_METHOD == 'FPS':
cur_pt_idxs = pointnet2_stack_utils.furthest_point_sample(
sampled_points[:, :, 0:3].contiguous(), self.model_cfg.NUM_KEYPOINTS
).long()
if sampled_points.shape[1] < self.model_cfg.NUM_KEYPOINTS:
empty_num = self.model_cfg.NUM_KEYPOINTS - sampled_points.shape[1]
cur_pt_idxs[0, -empty_num:] = cur_pt_idxs[0, :empty_num]
keypoints = sampled_points[0][cur_pt_idxs[0]].unsqueeze(dim=0)
elif self.model_cfg.SAMPLE_METHOD == 'FastFPS':
raise NotImplementedError
else:
raise NotImplementedError
keypoints_list.append(keypoints)
keypoints = torch.cat(keypoints_list, dim=0) # (B, M, 3)
return keypoints
def get_sampled_points_post(self, batch_dict, keypoints):
batch_size = batch_dict['batch_size']
src_points = keypoints
keypoints_list = []
for bs_idx in range(batch_size):
sampled_points = src_points[bs_idx].unsqueeze(dim=0) # (1, N, 3)
if sampled_points.shape[1] < self.max_keypoints:
cur_count = sampled_points.shape[1]
cur_pt_idxs = torch.arange(0, self.max_keypoints)
empty_num = self.max_keypoints - cur_count
while empty_num >= cur_count:
cur_pt_idxs[cur_count:cur_count * 2] = cur_pt_idxs[:cur_count]
empty_num -= cur_count
cur_count *= 2
if cur_count < self.max_keypoints:
assert empty_num == self.max_keypoints - cur_count
cur_pt_idxs[-empty_num:] = cur_pt_idxs[:empty_num]
keypoint = sampled_points[0][cur_pt_idxs].unsqueeze(dim=0)
else:
cur_pt_idxs = pointnet2_stack_utils.furthest_point_sample(
sampled_points[:, :, 0:3].contiguous(), self.max_keypoints
).long()
if sampled_points.shape[1] < self.max_keypoints:
empty_num = self.max_keypoints - sampled_points.shape[1]
cur_pt_idxs[0, -empty_num:] = cur_pt_idxs[0, :empty_num]
keypoint = sampled_points[0][cur_pt_idxs[0]].unsqueeze(dim=0)
keypoints_list.append(keypoint)
keypoint = torch.cat(keypoints_list, dim=0) # (B, M, 3)
return keypoint
def reduce_points(self, batch_dict):
batch_indices = batch_dict['points'][:, 0].long()
masks = []
for bs_idx, roi in enumerate(batch_dict['batch_cls_preds']):
bs_mask = (batch_indices == bs_idx)
pts = batch_dict['points'][bs_mask].unsqueeze(dim=1)[:, :, 1: 4] # (N, 1, 3)
s, _ = torch.max(batch_dict['batch_cls_preds'][bs_idx], dim=1)
top, idx = torch.topk(s, self.topks)
c = batch_dict['batch_box_preds'][bs_idx][idx][:, :3].unsqueeze(dim=0)
dist = (pts - c)**2
dist, _ = dist.sum(dim=-1).min(dim=1)
mask = (dist <= self.reduce_radius)
masks.extend(mask)
batch_dict['points'] = batch_dict['points'][masks]
return batch_dict
def reduce_points_post(self, keypoints, batch_dict):
keypoints_list = []
for bs_idx, roi in enumerate(batch_dict['batch_cls_preds']):
pts = keypoints[bs_idx].unsqueeze(dim=1)
s, _ = torch.max(batch_dict['batch_cls_preds'][bs_idx], dim=1)
top, idx = torch.topk(s, self.topks)
c = batch_dict['batch_box_preds'][bs_idx][idx][:, :3].unsqueeze(dim=0)
dist = (pts - c)**2
dist, _ = dist.sum(dim=-1).min(dim=1)
mask = (dist <= self.reduce_radius)
keypoints_list.append(keypoints[bs_idx][mask])
return keypoints_list
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size:
keypoints: (B, num_keypoints, 3)
multi_scale_3d_features: {
'x_conv4': ...
}
points: optional (N, 1 + 3 + C) [bs_idx, x, y, z, ...]
spatial_features: optional
spatial_features_stride: optional
Returns:
point_features: (N, C)
point_coords: (N, 4)
"""
if self.model_cfg.POINT_SOURCE == 'raw_points' and self.reduce_radius > 0:
# batch_dict = self.reduce_points(batch_dict)
keypoints = self.get_sampled_points(batch_dict)
keypoint_lst = self.reduce_points_post(keypoints, batch_dict)
keypoints = self.get_sampled_points_post(batch_dict, keypoint_lst)
else:
keypoints = self.get_sampled_points(batch_dict)
point_features_list = []
if 'bev' in self.model_cfg.FEATURES_SOURCE:
point_bev_features = self.interpolate_from_bev_features(
keypoints, batch_dict['spatial_features'], batch_dict['batch_size'],
bev_stride=batch_dict['spatial_features_stride']
)
point_features_list.append(point_bev_features)
batch_size, num_keypoints, _ = keypoints.shape
new_xyz = keypoints.view(-1, 3)
new_xyz_batch_cnt = new_xyz.new_zeros(batch_size).int().fill_(num_keypoints)
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
raw_points = batch_dict['points']
xyz = raw_points[:, 1:4]
xyz_batch_cnt = xyz.new_zeros(batch_size).int()
for bs_idx in range(batch_size):
xyz_batch_cnt[bs_idx] = (raw_points[:, 0] == bs_idx).sum()
point_features = raw_points[:, 4:].contiguous() if raw_points.shape[1] > 4 else None
pooled_points, pooled_features = self.SA_rawpoints(
xyz=xyz.contiguous(),
xyz_batch_cnt=xyz_batch_cnt,
new_xyz=new_xyz,
new_xyz_batch_cnt=new_xyz_batch_cnt,
features=point_features,
)
point_features_list.append(pooled_features.view(batch_size, num_keypoints, -1))
for k, src_name in enumerate(self.SA_layer_names):
cur_coords = batch_dict['multi_scale_3d_features'][src_name].indices
xyz = common_utils.get_voxel_centers(
cur_coords[:, 1:4],
downsample_times=self.downsample_times_map[src_name],
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
xyz_batch_cnt = xyz.new_zeros(batch_size).int()
for bs_idx in range(batch_size):
xyz_batch_cnt[bs_idx] = (cur_coords[:, 0] == bs_idx).sum()
pooled_points, pooled_features = self.SA_layers[k](
xyz=xyz.contiguous(),
xyz_batch_cnt=xyz_batch_cnt,
new_xyz=new_xyz,
new_xyz_batch_cnt=new_xyz_batch_cnt,
features=batch_dict['multi_scale_3d_features'][src_name].features.contiguous(),
)
point_features_list.append(pooled_features.view(batch_size, num_keypoints, -1))
point_features_list_new = []
for i, x in enumerate(point_features_list):
feat = self.linears_in[i](x.view(batch_size * num_keypoints, -1))
point_features_list_new.append(feat.view(1, batch_size * num_keypoints, -1))
fusion_feat = torch.cat(point_features_list_new, dim=0)
# have multi-modality + multi-scale
trans1_feat_list = self.trans_fusion_layer(fusion_feat).view(len(fusion_feat), batch_size, num_keypoints, -1)
trans1_feat_projected_list = []
for i, x in enumerate(trans1_feat_list):
feat = self.linears_out[i](x.view(batch_size * num_keypoints, -1))
trans1_feat_projected_list.append(feat.view(batch_size, num_keypoints, -1))
# multi_location
point_features_main1 = torch.cat(point_features_list, dim=2)
point_features_res1 = self.res1_actn_1(torch.cat(trans1_feat_projected_list, dim=2))
point_features_main2 = point_features_res1 + point_features_main1
point_features_res2 = self.res1_actn_2(self.trans_layer(point_features_main2.permute(1, 0, 2)).permute(1, 0, 2))
point_features = point_features_main2 + point_features_res2
batch_idx = torch.arange(batch_size, device=keypoints.device).view(-1, 1).repeat(1, keypoints.shape[1]).view(-1)
point_coords = torch.cat((batch_idx.view(-1, 1).float(), keypoints.view(-1, 3)), dim=1)
batch_dict['point_features_before_fusion'] = point_features.reshape(-1, point_features.shape[-1])
point_features = self.vsa_point_feature_fusion(point_features.reshape(-1, point_features.shape[-1]))
batch_dict['point_features'] = point_features # (BxN, C)
batch_dict['point_coords'] = point_coords # (BxN, 4)
return batch_dict
class VoxelSetAbstraction(nn.Module):
def __init__(self, model_cfg, voxel_size, point_cloud_range, num_bev_features=None,
num_rawpoint_features=None, **kwargs):
super().__init__()
self.model_cfg = model_cfg
self.voxel_size = voxel_size
self.point_cloud_range = point_cloud_range
SA_cfg = self.model_cfg.SA_LAYER
self.SA_layers = nn.ModuleList()
self.SA_layer_names = []
self.downsample_times_map = {}
c_in = 0
for src_name in self.model_cfg.FEATURES_SOURCE:
if src_name in ['bev', 'raw_points']:
continue
self.downsample_times_map[src_name] = SA_cfg[src_name].DOWNSAMPLE_FACTOR
if SA_cfg[src_name].get('INPUT_CHANNELS', None) is None:
input_channels = SA_cfg[src_name].MLPS[0][0] \
if isinstance(SA_cfg[src_name].MLPS[0], list) else SA_cfg[src_name].MLPS[0]
else:
input_channels = SA_cfg[src_name]['INPUT_CHANNELS']
cur_layer, cur_num_c_out = pointnet2_stack_modules.build_local_aggregation_module(
input_channels=input_channels, config=SA_cfg[src_name]
)
self.SA_layers.append(cur_layer)
self.SA_layer_names.append(src_name)
c_in += cur_num_c_out
if 'bev' in self.model_cfg.FEATURES_SOURCE:
c_bev = num_bev_features
c_in += c_bev
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
self.SA_rawpoints, cur_num_c_out = pointnet2_stack_modules.build_local_aggregation_module(
input_channels=num_rawpoint_features - 3, config=SA_cfg['raw_points']
)
c_in += cur_num_c_out
self.vsa_point_feature_fusion = nn.Sequential(
nn.Linear(c_in, self.model_cfg.NUM_OUTPUT_FEATURES, bias=False),
nn.BatchNorm1d(self.model_cfg.NUM_OUTPUT_FEATURES),
nn.ReLU(),
)
self.num_point_features = self.model_cfg.NUM_OUTPUT_FEATURES
self.num_point_features_before_fusion = c_in
def interpolate_from_bev_features(self, keypoints, bev_features, batch_size, bev_stride):
"""
Args:
keypoints: (N1 + N2 + ..., 4)
bev_features: (B, C, H, W)
batch_size:
bev_stride:
Returns:
point_bev_features: (N1 + N2 + ..., C)
"""
x_idxs = (keypoints[:, 1] - self.point_cloud_range[0]) / self.voxel_size[0]
y_idxs = (keypoints[:, 2] - self.point_cloud_range[1]) / self.voxel_size[1]
x_idxs = x_idxs / bev_stride
y_idxs = y_idxs / bev_stride
point_bev_features_list = []
for k in range(batch_size):
bs_mask = (keypoints[:, 0] == k)
cur_x_idxs = x_idxs[bs_mask]
cur_y_idxs = y_idxs[bs_mask]
cur_bev_features = bev_features[k].permute(1, 2, 0) # (H, W, C)
point_bev_features = bilinear_interpolate_torch(cur_bev_features, cur_x_idxs, cur_y_idxs)
point_bev_features_list.append(point_bev_features)
point_bev_features = torch.cat(point_bev_features_list, dim=0) # (N1 + N2 + ..., C)
return point_bev_features
def sectorized_proposal_centric_sampling(self, roi_boxes, points):
"""
Args:
roi_boxes: (M, 7 + C)
points: (N, 3)
Returns:
sampled_points: (N_out, 3)
"""
sampled_points, _ = sample_points_with_roi(
rois=roi_boxes, points=points,
sample_radius_with_roi=self.model_cfg.SPC_SAMPLING.SAMPLE_RADIUS_WITH_ROI,
num_max_points_of_part=self.model_cfg.SPC_SAMPLING.get('NUM_POINTS_OF_EACH_SAMPLE_PART', 200000)
)
sampled_points = sector_fps(
points=sampled_points, num_sampled_points=self.model_cfg.NUM_KEYPOINTS,
num_sectors=self.model_cfg.SPC_SAMPLING.NUM_SECTORS
)
return sampled_points
def get_sampled_points(self, batch_dict):
"""
Args:
batch_dict:
Returns:
keypoints: (N1 + N2 + ..., 4), where 4 indicates [bs_idx, x, y, z]
"""
batch_size = batch_dict['batch_size']
if self.model_cfg.POINT_SOURCE == 'raw_points':
src_points = batch_dict['points'][:, 1:4]
batch_indices = batch_dict['points'][:, 0].long()
elif self.model_cfg.POINT_SOURCE == 'voxel_centers':
src_points = common_utils.get_voxel_centers(
batch_dict['voxel_coords'][:, 1:4],
downsample_times=1,
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
batch_indices = batch_dict['voxel_coords'][:, 0].long()
else:
raise NotImplementedError
keypoints_list = []
for bs_idx in range(batch_size):
bs_mask = (batch_indices == bs_idx)
sampled_points = src_points[bs_mask].unsqueeze(dim=0) # (1, N, 3)
if self.model_cfg.SAMPLE_METHOD == 'FPS':
cur_pt_idxs = pointnet2_stack_utils.farthest_point_sample(
sampled_points[:, :, 0:3].contiguous(), self.model_cfg.NUM_KEYPOINTS
).long()
if sampled_points.shape[1] < self.model_cfg.NUM_KEYPOINTS:
times = int(self.model_cfg.NUM_KEYPOINTS / sampled_points.shape[1]) + 1
non_empty = cur_pt_idxs[0, :sampled_points.shape[1]]
cur_pt_idxs[0] = non_empty.repeat(times)[:self.model_cfg.NUM_KEYPOINTS]
keypoints = sampled_points[0][cur_pt_idxs[0]].unsqueeze(dim=0)
elif self.model_cfg.SAMPLE_METHOD == 'SPC':
cur_keypoints = self.sectorized_proposal_centric_sampling(
roi_boxes=batch_dict['rois'][bs_idx], points=sampled_points[0]
)
bs_idxs = cur_keypoints.new_ones(cur_keypoints.shape[0]) * bs_idx
keypoints = torch.cat((bs_idxs[:, None], cur_keypoints), dim=1)
else:
raise NotImplementedError
keypoints_list.append(keypoints)
keypoints = torch.cat(keypoints_list, dim=0) # (B, M, 3) or (N1 + N2 + ..., 4)
if len(keypoints.shape) == 3:
batch_idx = torch.arange(batch_size, device=keypoints.device).view(-1, 1).repeat(1, keypoints.shape[1]).view(-1, 1)
keypoints = torch.cat((batch_idx.float(), keypoints.view(-1, 3)), dim=1)
return keypoints
@staticmethod
def aggregate_keypoint_features_from_one_source(
batch_size, aggregate_func, xyz, xyz_features, xyz_bs_idxs, new_xyz, new_xyz_batch_cnt,
filter_neighbors_with_roi=False, radius_of_neighbor=None, num_max_points_of_part=200000, rois=None
):
"""
Args:
aggregate_func:
xyz: (N, 3)
xyz_features: (N, C)
xyz_bs_idxs: (N)
new_xyz: (M, 3)
new_xyz_batch_cnt: (batch_size), [N1, N2, ...]
filter_neighbors_with_roi: True/False
radius_of_neighbor: float
num_max_points_of_part: int
rois: (batch_size, num_rois, 7 + C)
Returns:
"""
xyz_batch_cnt = xyz.new_zeros(batch_size).int()
if filter_neighbors_with_roi:
point_features = torch.cat((xyz, xyz_features), dim=-1) if xyz_features is not None else xyz
point_features_list = []
for bs_idx in range(batch_size):
bs_mask = (xyz_bs_idxs == bs_idx)
_, valid_mask = sample_points_with_roi(
rois=rois[bs_idx], points=xyz[bs_mask],
sample_radius_with_roi=radius_of_neighbor, num_max_points_of_part=num_max_points_of_part,
)
point_features_list.append(point_features[bs_mask][valid_mask])
xyz_batch_cnt[bs_idx] = valid_mask.sum()
valid_point_features = torch.cat(point_features_list, dim=0)
xyz = valid_point_features[:, 0:3]
xyz_features = valid_point_features[:, 3:] if xyz_features is not None else None
else:
for bs_idx in range(batch_size):
xyz_batch_cnt[bs_idx] = (xyz_bs_idxs == bs_idx).sum()
pooled_points, pooled_features = aggregate_func(
xyz=xyz.contiguous(),
xyz_batch_cnt=xyz_batch_cnt,
new_xyz=new_xyz,
new_xyz_batch_cnt=new_xyz_batch_cnt,
features=xyz_features.contiguous(),
)
return pooled_features
def forward(self, batch_dict):
"""
Args:
batch_dict:
batch_size:
keypoints: (B, num_keypoints, 3)
multi_scale_3d_features: {
'x_conv4': ...
}
points: optional (N, 1 + 3 + C) [bs_idx, x, y, z, ...]
spatial_features: optional
spatial_features_stride: optional
Returns:
point_features: (N, C)
point_coords: (N, 4)
"""
keypoints = self.get_sampled_points(batch_dict)
point_features_list = []
if 'bev' in self.model_cfg.FEATURES_SOURCE:
point_bev_features = self.interpolate_from_bev_features(
keypoints, batch_dict['spatial_features'], batch_dict['batch_size'],
bev_stride=batch_dict['spatial_features_stride']
)
point_features_list.append(point_bev_features)
batch_size = batch_dict['batch_size']
new_xyz = keypoints[:, 1:4].contiguous()
new_xyz_batch_cnt = new_xyz.new_zeros(batch_size).int()
for k in range(batch_size):
new_xyz_batch_cnt[k] = (keypoints[:, 0] == k).sum()
if 'raw_points' in self.model_cfg.FEATURES_SOURCE:
raw_points = batch_dict['points']
pooled_features = self.aggregate_keypoint_features_from_one_source(
batch_size=batch_size, aggregate_func=self.SA_rawpoints,
xyz=raw_points[:, 1:4],
xyz_features=raw_points[:, 4:].contiguous() if raw_points.shape[1] > 4 else None,
xyz_bs_idxs=raw_points[:, 0],
new_xyz=new_xyz, new_xyz_batch_cnt=new_xyz_batch_cnt,
filter_neighbors_with_roi=self.model_cfg.SA_LAYER['raw_points'].get('FILTER_NEIGHBOR_WITH_ROI', False),
radius_of_neighbor=self.model_cfg.SA_LAYER['raw_points'].get('RADIUS_OF_NEIGHBOR_WITH_ROI', None),
rois=batch_dict.get('rois', None)
)
point_features_list.append(pooled_features)
for k, src_name in enumerate(self.SA_layer_names):
cur_coords = batch_dict['multi_scale_3d_features'][src_name].indices
cur_features = batch_dict['multi_scale_3d_features'][src_name].features.contiguous()
xyz = common_utils.get_voxel_centers(
cur_coords[:, 1:4], downsample_times=self.downsample_times_map[src_name],
voxel_size=self.voxel_size, point_cloud_range=self.point_cloud_range
)
pooled_features = self.aggregate_keypoint_features_from_one_source(
batch_size=batch_size, aggregate_func=self.SA_layers[k],
xyz=xyz.contiguous(), xyz_features=cur_features, xyz_bs_idxs=cur_coords[:, 0],
new_xyz=new_xyz, new_xyz_batch_cnt=new_xyz_batch_cnt,
filter_neighbors_with_roi=self.model_cfg.SA_LAYER[src_name].get('FILTER_NEIGHBOR_WITH_ROI', False),
radius_of_neighbor=self.model_cfg.SA_LAYER[src_name].get('RADIUS_OF_NEIGHBOR_WITH_ROI', None),
rois=batch_dict.get('rois', None)
)
point_features_list.append(pooled_features)
point_features = torch.cat(point_features_list, dim=-1)
batch_dict['point_features_before_fusion'] = point_features.view(-1, point_features.shape[-1])
point_features = self.vsa_point_feature_fusion(point_features.view(-1, point_features.shape[-1]))
batch_dict['point_features'] = point_features # (BxN, C)
batch_dict['point_coords'] = keypoints # (BxN, 4)
return batch_dict
| [
"torch.nn.ReLU",
"math.ceil",
"torch.nn.ModuleList",
"torch.topk",
"torch.atan2",
"torch.floor",
"torch.nn.LayerNorm",
"torch.max",
"torch.tensor",
"torch.nn.BatchNorm1d",
"torch.arange",
"torch.nn.Linear",
"torch.nn.Identity",
"torch.t",
"torch.clamp",
"torch.cat"
]
| [((756, 791), 'torch.clamp', 'torch.clamp', (['x0', '(0)', '(im.shape[1] - 1)'], {}), '(x0, 0, im.shape[1] - 1)\n', (767, 791), False, 'import torch\n'), ((801, 836), 'torch.clamp', 'torch.clamp', (['x1', '(0)', '(im.shape[1] - 1)'], {}), '(x1, 0, im.shape[1] - 1)\n', (812, 836), False, 'import torch\n'), ((846, 881), 'torch.clamp', 'torch.clamp', (['y0', '(0)', '(im.shape[0] - 1)'], {}), '(y0, 0, im.shape[0] - 1)\n', (857, 881), False, 'import torch\n'), ((891, 926), 'torch.clamp', 'torch.clamp', (['y1', '(0)', '(im.shape[0] - 1)'], {}), '(y1, 0, im.shape[0] - 1)\n', (902, 926), False, 'import torch\n'), ((3854, 3887), 'torch.cat', 'torch.cat', (['xyz_points_list'], {'dim': '(0)'}), '(xyz_points_list, dim=0)\n', (3863, 3887), False, 'import torch\n'), ((2503, 2536), 'torch.cat', 'torch.cat', (['point_mask_list'], {'dim': '(0)'}), '(point_mask_list, dim=0)\n', (2512, 2536), False, 'import torch\n'), ((2935, 2974), 'torch.atan2', 'torch.atan2', (['points[:, 1]', 'points[:, 0]'], {}), '(points[:, 1], points[:, 0])\n', (2946, 2974), False, 'import torch\n'), ((4699, 4714), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (4712, 4714), True, 'import torch.nn as nn\n'), ((4741, 4756), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (4754, 4756), True, 'import torch.nn as nn\n'), ((4784, 4799), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (4797, 4799), True, 'import torch.nn as nn\n'), ((10246, 10287), 'torch.cat', 'torch.cat', (['point_bev_features_list'], {'dim': '(0)'}), '(point_bev_features_list, dim=0)\n', (10255, 10287), False, 'import torch\n'), ((12031, 12063), 'torch.cat', 'torch.cat', (['keypoints_list'], {'dim': '(0)'}), '(keypoints_list, dim=0)\n', (12040, 12063), False, 'import torch\n'), ((13670, 13702), 'torch.cat', 'torch.cat', (['keypoints_list'], {'dim': '(0)'}), '(keypoints_list, dim=0)\n', (13679, 13702), False, 'import torch\n'), ((18822, 18863), 'torch.cat', 'torch.cat', (['point_features_list_new'], {'dim': '(0)'}), '(point_features_list_new, dim=0)\n', (18831, 18863), False, 'import torch\n'), ((19340, 19377), 'torch.cat', 'torch.cat', (['point_features_list'], {'dim': '(2)'}), '(point_features_list, dim=2)\n', (19349, 19377), False, 'import torch\n'), ((20728, 20743), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (20741, 20743), True, 'import torch.nn as nn\n'), ((23487, 23528), 'torch.cat', 'torch.cat', (['point_bev_features_list'], {'dim': '(0)'}), '(point_bev_features_list, dim=0)\n', (23496, 23528), False, 'import torch\n'), ((26577, 26609), 'torch.cat', 'torch.cat', (['keypoints_list'], {'dim': '(0)'}), '(keypoints_list, dim=0)\n', (26586, 26609), False, 'import torch\n'), ((32262, 32300), 'torch.cat', 'torch.cat', (['point_features_list'], {'dim': '(-1)'}), '(point_features_list, dim=-1)\n', (32271, 32300), False, 'import torch\n'), ((660, 674), 'torch.floor', 'torch.floor', (['x'], {}), '(x)\n', (671, 674), False, 'import torch\n'), ((708, 722), 'torch.floor', 'torch.floor', (['y'], {}), '(y)\n', (719, 722), False, 'import torch\n'), ((3908, 3957), 'torch.tensor', 'torch.tensor', (['xyz_batch_cnt'], {'device': 'points.device'}), '(xyz_batch_cnt, device=points.device)\n', (3920, 3957), False, 'import torch\n'), ((3995, 4054), 'torch.tensor', 'torch.tensor', (['num_sampled_points_list'], {'device': 'points.device'}), '(num_sampled_points_list, device=points.device)\n', (4007, 4054), False, 'import torch\n'), ((8143, 8206), 'torch.nn.Linear', 'nn.Linear', (['c_in', 'self.model_cfg.NUM_OUTPUT_FEATURES'], {'bias': '(False)'}), '(c_in, self.model_cfg.NUM_OUTPUT_FEATURES, bias=False)\n', (8152, 8206), True, 'import torch.nn as nn\n'), ((8220, 8270), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.model_cfg.NUM_OUTPUT_FEATURES'], {}), '(self.model_cfg.NUM_OUTPUT_FEATURES)\n', (8234, 8270), True, 'import torch.nn as nn\n'), ((8284, 8293), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (8291, 8293), True, 'import torch.nn as nn\n'), ((8488, 8506), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['c_in'], {}), '(c_in)\n', (8500, 8506), True, 'import torch.nn as nn\n'), ((8618, 8651), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['self.fusion_channel'], {}), '(self.fusion_channel)\n', (8630, 8651), True, 'import torch.nn as nn\n'), ((9329, 9347), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['c_in'], {}), '(c_in)\n', (9341, 9347), True, 'import torch.nn as nn\n'), ((9361, 9370), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9368, 9370), True, 'import torch.nn as nn\n'), ((9427, 9445), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['c_in'], {}), '(c_in)\n', (9439, 9445), True, 'import torch.nn as nn\n'), ((9459, 9468), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9466, 9468), True, 'import torch.nn as nn\n'), ((14087, 14142), 'torch.max', 'torch.max', (["batch_dict['batch_cls_preds'][bs_idx]"], {'dim': '(1)'}), "(batch_dict['batch_cls_preds'][bs_idx], dim=1)\n", (14096, 14142), False, 'import torch\n'), ((14166, 14191), 'torch.topk', 'torch.topk', (['s', 'self.topks'], {}), '(s, self.topks)\n', (14176, 14191), False, 'import torch\n'), ((14758, 14813), 'torch.max', 'torch.max', (["batch_dict['batch_cls_preds'][bs_idx]"], {'dim': '(1)'}), "(batch_dict['batch_cls_preds'][bs_idx], dim=1)\n", (14767, 14813), False, 'import torch\n'), ((14837, 14862), 'torch.topk', 'torch.topk', (['s', 'self.topks'], {}), '(s, self.topks)\n', (14847, 14862), False, 'import torch\n'), ((19425, 19469), 'torch.cat', 'torch.cat', (['trans1_feat_projected_list'], {'dim': '(2)'}), '(trans1_feat_projected_list, dim=2)\n', (19434, 19469), False, 'import torch\n'), ((22156, 22219), 'torch.nn.Linear', 'nn.Linear', (['c_in', 'self.model_cfg.NUM_OUTPUT_FEATURES'], {'bias': '(False)'}), '(c_in, self.model_cfg.NUM_OUTPUT_FEATURES, bias=False)\n', (22165, 22219), True, 'import torch.nn as nn\n'), ((22233, 22283), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.model_cfg.NUM_OUTPUT_FEATURES'], {}), '(self.model_cfg.NUM_OUTPUT_FEATURES)\n', (22247, 22283), True, 'import torch.nn as nn\n'), ((22297, 22306), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (22304, 22306), True, 'import torch.nn as nn\n'), ((28392, 28429), 'torch.cat', 'torch.cat', (['point_features_list'], {'dim': '(0)'}), '(point_features_list, dim=0)\n', (28401, 28429), False, 'import torch\n'), ((1317, 1328), 'torch.t', 'torch.t', (['Id'], {}), '(Id)\n', (1324, 1328), False, 'import torch\n'), ((12536, 12571), 'torch.arange', 'torch.arange', (['(0)', 'self.max_keypoints'], {}), '(0, self.max_keypoints)\n', (12548, 12571), False, 'import torch\n'), ((27767, 27805), 'torch.cat', 'torch.cat', (['(xyz, xyz_features)'], {'dim': '(-1)'}), '((xyz, xyz_features), dim=-1)\n', (27776, 27805), False, 'import torch\n'), ((1289, 1300), 'torch.t', 'torch.t', (['Ic'], {}), '(Ic)\n', (1296, 1300), False, 'import torch\n'), ((3519, 3556), 'math.ceil', 'math.ceil', (['(ratio * num_sampled_points)'], {}), '(ratio * num_sampled_points)\n', (3528, 3556), False, 'import math\n'), ((5224, 5237), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (5235, 5237), True, 'import torch.nn as nn\n'), ((5279, 5292), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (5290, 5292), True, 'import torch.nn as nn\n'), ((6296, 6309), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (6307, 6309), True, 'import torch.nn as nn\n'), ((6351, 6364), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (6362, 6364), True, 'import torch.nn as nn\n'), ((7554, 7567), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (7565, 7567), True, 'import torch.nn as nn\n'), ((7609, 7622), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (7620, 7622), True, 'import torch.nn as nn\n'), ((26398, 26449), 'torch.cat', 'torch.cat', (['(bs_idxs[:, None], cur_keypoints)'], {'dim': '(1)'}), '((bs_idxs[:, None], cur_keypoints), dim=1)\n', (26407, 26449), False, 'import torch\n'), ((1232, 1243), 'torch.t', 'torch.t', (['Ia'], {}), '(Ia)\n', (1239, 1243), False, 'import torch\n'), ((1261, 1272), 'torch.t', 'torch.t', (['Ib'], {}), '(Ib)\n', (1268, 1272), False, 'import torch\n'), ((5386, 5435), 'torch.nn.Linear', 'nn.Linear', (['c_bev', 'self.fusion_channel'], {'bias': '(False)'}), '(c_bev, self.fusion_channel, bias=False)\n', (5395, 5435), True, 'import torch.nn as nn\n'), ((5457, 5492), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.fusion_channel'], {}), '(self.fusion_channel)\n', (5471, 5492), True, 'import torch.nn as nn\n'), ((5570, 5619), 'torch.nn.Linear', 'nn.Linear', (['self.fusion_channel', 'c_bev'], {'bias': '(False)'}), '(self.fusion_channel, c_bev, bias=False)\n', (5579, 5619), True, 'import torch.nn as nn\n'), ((5641, 5662), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['c_bev'], {}), '(c_bev)\n', (5655, 5662), True, 'import torch.nn as nn\n'), ((6458, 6505), 'torch.nn.Linear', 'nn.Linear', (['cur', 'self.fusion_channel'], {'bias': '(False)'}), '(cur, self.fusion_channel, bias=False)\n', (6467, 6505), True, 'import torch.nn as nn\n'), ((6527, 6562), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.fusion_channel'], {}), '(self.fusion_channel)\n', (6541, 6562), True, 'import torch.nn as nn\n'), ((6640, 6687), 'torch.nn.Linear', 'nn.Linear', (['self.fusion_channel', 'cur'], {'bias': '(False)'}), '(self.fusion_channel, cur, bias=False)\n', (6649, 6687), True, 'import torch.nn as nn\n'), ((6709, 6728), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['cur'], {}), '(cur)\n', (6723, 6728), True, 'import torch.nn as nn\n'), ((7716, 7763), 'torch.nn.Linear', 'nn.Linear', (['cur', 'self.fusion_channel'], {'bias': '(False)'}), '(cur, self.fusion_channel, bias=False)\n', (7725, 7763), True, 'import torch.nn as nn\n'), ((7785, 7820), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['self.fusion_channel'], {}), '(self.fusion_channel)\n', (7799, 7820), True, 'import torch.nn as nn\n'), ((7898, 7945), 'torch.nn.Linear', 'nn.Linear', (['self.fusion_channel', 'cur'], {'bias': '(False)'}), '(self.fusion_channel, cur, bias=False)\n', (7907, 7945), True, 'import torch.nn as nn\n'), ((7967, 7986), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['cur'], {}), '(cur)\n', (7981, 7986), True, 'import torch.nn as nn\n'), ((19759, 19808), 'torch.arange', 'torch.arange', (['batch_size'], {'device': 'keypoints.device'}), '(batch_size, device=keypoints.device)\n', (19771, 19808), False, 'import torch\n'), ((26707, 26756), 'torch.arange', 'torch.arange', (['batch_size'], {'device': 'keypoints.device'}), '(batch_size, device=keypoints.device)\n', (26719, 26756), False, 'import torch\n')] |
#PRIMARY IMPORTS
import discord, os, datetime, sys, json, traceback, logging
#SECONDARY IMPORTS
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from discord.ext import commands
from data import config
#LOGGING
logger = logging.getLogger("ivry")
logger.debug("errors.py Started")
class Errors(commands.Cog):
def __init__(self, client):
self.client = client
#ERROR MESSAGES
@commands.Cog.listener()
async def on_command_error(self, ctx, error):
if hasattr(ctx.command, 'on_error'):
return
cog = ctx.cog
if cog:
if cog._get_overridden_method(cog.cog_command_error) is not None:
return
ignored = (commands.CommandNotFound)
error = getattr(error, 'original', error)
if isinstance(error, ignored):
return
#COMMAND ERROR
elif isinstance(error, commands.CommandError):
embed = discord.Embed(title=f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CONVERSION ERROR
elif isinstance(error, commands.ConversionError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#USER INPUT ERROR
elif isinstance(error, commands.UserInputError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal User Input Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal User Input Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING REQUIRED ARGUMENT
elif isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#TOO MANY ARGUMENTS
elif isinstance(error, commands.TooManyArguments):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD ARGUMENT
elif isinstance(error, commands.BadArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MESSAGE NOT FOUND
elif isinstance(error, commands.MessageNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Message Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Message Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MEMBER NOT FOUND
elif isinstance(error, commands.MemberNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Member Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Member Not Found occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#USER NOT FOUND
elif isinstance(error, commands.UserNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal User Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal User Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHANNEL NOT FOUND
elif isinstance(error, commands.ChannelNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Channel Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Channel Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHANNEL NOT READABLE
elif isinstance(error, commands.ChannelNotReadable):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Channel Not Readable Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Channel Not Readable Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD COLOR ARGUMENT
elif isinstance(error, commands.BadColourArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Colour Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Colour Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#ROLE NOT FOUND
elif isinstance(error, commands.RoleNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Role Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Role Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD INVITE ARGUMENT
elif isinstance(error, commands.BadInviteArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Conversion Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Conversion Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EMOJI NOT FOUND
elif isinstance(error, commands.EmojiNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Emoji Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Emoji Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#PARTIAL EMOJI CONVERSION FAILURE
elif isinstance(error, commands.PartialEmojiConversionFailure):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Partial Emoji Conversion Failure Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Partial Emoji Conversion Failure Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD BOOL ARGUMENT
elif isinstance(error, commands.BadBoolArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Bool Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Bool Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BAD UNION ARGUMENT
elif isinstance(error, commands.BadUnionArgument):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bad Union Argument Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bad Union Argument Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#ARGUMENT PARSING ERROR
elif isinstance(error, commands.ArgumentParsingError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Argument Parsing Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Argument Parsing Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#UNEXPECTED QUOTE ERROR
elif isinstance(error, commands.UnexpectedQuoteError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Unexpected Quote Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Unexpected Quote Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#INVALID END OF QUOTED STRING
elif isinstance(error, commands.InvalidEndOfQuotedStringError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Invalid End Of Quoted String Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Invalid End Of Quoted String Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXPECTED CLOSING QUOTE ERROR
elif isinstance(error, commands.ExpectedClosingQuoteError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Expected Closing Quote Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Expected Closing Quote Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#COMMAND NOT FOUND
elif isinstance(error, commands.CommandNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHECK FAILURE
elif isinstance(error, commands.CheckFailure):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Check Failure Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Check Failure Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CHECK ANY FAILURE
elif isinstance(error, commands.CheckAnyFailure):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Check Any Failure Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Check Any Failure Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#PRIVATE MESSAGE ONLY
elif isinstance(error, commands.PrivateMessageOnly):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Private Message Only Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Private Message Only Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NO PRIVATE MESSAGE
elif isinstance(error, commands.NoPrivateMessage):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal No Private Message Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal No Private Message Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NOT OWNER
elif isinstance(error, commands.NotOwner):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Not Owner Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Not Owner Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING PERMISSIONS
elif isinstance(error, commands.MissingPermissions):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Missing Permissions Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Missing Permissions Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING ROLE
elif isinstance(error, commands.MissingRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Missing Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Missing Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BOT MISSING ROLE
elif isinstance(error, commands.BotMissingRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bot Missing Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bot Missing Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MISSING ANY ROLE
elif isinstance(error, commands.MissingAnyRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Missing Any Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Missing Any Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#BOT MISSING ANY ROLE
elif isinstance(error, commands.BotMissingAnyRole):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Bot Missing Any Role Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Bot Missing Any Role Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NSFW CHANNEL REQUIRED
elif isinstance(error, commands.NSFWChannelRequired):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal NSFW Channel Required Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal NSFW Channel Required Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#DISABLED COMMAND
elif isinstance(error, commands.DisabledCommand):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Disabled Command Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Disabled Command Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#COMMAND INVOKE ERROR
elif isinstance(error, commands.CommandInvokeError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Invoke Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Invoke Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#COMMAND ON COOLDOWN
elif isinstance(error, commands.CommandOnCooldown):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command On Cooldown Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command On Cooldown Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#MAX CONCURRENCY REACHED
elif isinstance(error, commands.MaxConcurrencyReached):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Max Concurrency Reached Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Max Concurrency Reached Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION ERROR
elif isinstance(error, commands.ExtensionError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION ALREADY LOADED
elif isinstance(error, commands.ExtensionAlreadyLoaded):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Already Loaded Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Already Loaded Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION NOT LOADED
elif isinstance(error, commands.ExtensionNotLoaded):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Not Loaded Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Not Loaded Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#NO ENTRY POINT ERROR
elif isinstance(error, commands.NoEntryPointError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal No Entry Point Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal No Entrypoint Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION FAILED
elif isinstance(error, commands.ExtensionFailed):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Failed Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Failed Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#EXTENSION NOT FOUND
elif isinstance(error, commands.ExtensionNotFound):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal EXT Not Found Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Extension Not Found Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
#CLIENT EXCEPTION
#COMMAND REGISTRATION ERROR
elif isinstance(error, commands.CommandRegistrationError):
embed = discord.Embed(title = f"Type = `Fatal`",color=0x9B59B6, timestamp=ctx.message.created_at)
embed.set_author(name="IVRY Error", icon_url=self.client.user.avatar_url)
embed.add_field(name = "Error", value="`Internal Command Registration Error`", inline=True)
embed.add_field(name = "Error Point", value=f"`{ctx.command}`", inline=True)
embed.add_field(name = "Trace Back", value=f"```CSS\n{error}```", inline=False)
embed.set_footer(text=f"{config.version} | {config.shards}")
await ctx.send(embed=embed)
print(f'[WARNING] A Fatal internal Command Registration Error occured in execution of {ctx.command}')
logger.debug(f"[ERROR] {ctx.command} | {error}")
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)
def setup(client):
client.add_cog(Errors(client)) | [
"logging.getLogger",
"discord.Embed",
"discord.ext.commands.Cog.listener"
]
| [((248, 273), 'logging.getLogger', 'logging.getLogger', (['"""ivry"""'], {}), "('ivry')\n", (265, 273), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((472, 495), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (493, 495), False, 'from discord.ext import commands\n'), ((1153, 1246), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (1166, 1246), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((2137, 2230), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (2150, 2230), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((3144, 3237), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (3157, 3237), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((4169, 4262), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (4182, 4262), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((5170, 5263), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (5183, 5263), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((6162, 6255), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (6175, 6255), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((7188, 7281), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (7201, 7281), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((8222, 8315), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (8235, 8315), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((9247, 9340), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (9260, 9340), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((10274, 10367), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (10287, 10367), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((11313, 11406), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (11326, 11406), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((12355, 12448), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (12368, 12448), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((13385, 13478), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (13398, 13478), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((14414, 14507), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (14427, 14507), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((15419, 15512), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (15432, 15512), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((16459, 16552), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (16472, 16552), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((17504, 17597), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (17517, 17597), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((18523, 18616), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (18536, 18616), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((19552, 19645), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (19565, 19645), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((20577, 20670), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (20590, 20670), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((21618, 21711), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (21631, 21711), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((22676, 22769), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (22689, 22769), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((23709, 23802), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (23722, 23802), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((24711, 24804), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (24724, 24804), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((25726, 25819), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (25739, 25819), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((26761, 26854), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (26774, 26854), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((27790, 27883), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (27803, 27883), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((28798, 28891), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (28811, 28891), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((29808, 29901), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (29821, 29901), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((30840, 30933), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (30853, 30933), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((31849, 31942), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (31862, 31942), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((32866, 32959), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (32879, 32959), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((33890, 33983), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (33903, 33983), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((34922, 35015), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (34935, 35015), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((35953, 36046), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (35966, 36046), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((36974, 37067), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (36987, 37067), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((37989, 38082), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (38002, 38082), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((39026, 39119), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (39039, 39119), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((40063, 40156), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (40076, 40156), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((41072, 41165), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (41085, 41165), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((42101, 42194), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (42114, 42194), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((43121, 43214), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (43134, 43214), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((44128, 44221), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (44141, 44221), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((45139, 45232), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (45152, 45232), False, 'import discord, os, datetime, sys, json, traceback, logging\n'), ((46199, 46292), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Type = `Fatal`"""', 'color': '(10181046)', 'timestamp': 'ctx.message.created_at'}), "(title=f'Type = `Fatal`', color=10181046, timestamp=ctx.\n message.created_at)\n", (46212, 46292), False, 'import discord, os, datetime, sys, json, traceback, logging\n')] |
from ariadne import make_executable_schema, load_schema_from_path
from ariadne.asgi import GraphQL
from resolvers import query, skill, person, eye_color, mutation
# import schema from GraphQL file
type_defs = load_schema_from_path("./schema.gql")
schema = make_executable_schema(
type_defs, query, skill, person, eye_color, mutation
)
app = GraphQL(schema, debug=True)
| [
"ariadne.asgi.GraphQL",
"ariadne.make_executable_schema",
"ariadne.load_schema_from_path"
]
| [((211, 248), 'ariadne.load_schema_from_path', 'load_schema_from_path', (['"""./schema.gql"""'], {}), "('./schema.gql')\n", (232, 248), False, 'from ariadne import make_executable_schema, load_schema_from_path\n'), ((259, 335), 'ariadne.make_executable_schema', 'make_executable_schema', (['type_defs', 'query', 'skill', 'person', 'eye_color', 'mutation'], {}), '(type_defs, query, skill, person, eye_color, mutation)\n', (281, 335), False, 'from ariadne import make_executable_schema, load_schema_from_path\n'), ((348, 375), 'ariadne.asgi.GraphQL', 'GraphQL', (['schema'], {'debug': '(True)'}), '(schema, debug=True)\n', (355, 375), False, 'from ariadne.asgi import GraphQL\n')] |
import importlib
import time
from pathlib import Path
import os
import sys
def import_plugins():
#find actual path
realpath = os.path.realpath(__file__)
dirname = os.path.dirname(realpath)
#add modules & plugins
plugin_path = os.path.join(dirname, "plugins")
for dir_path in Path(plugin_path).rglob('*.py'):
dp = str(dir_path)
if dp.lower().endswith("__init__.py"):
continue
path = dp[len(dirname)+1:-3].replace(os.sep,".")
if len(path.split('.')) < 4:
'''only import the top level plugin directory, so that potential submodules are
only imported if they are imported by the plugins.'''
print(" > " + path)
importlib.import_module(path)
print("Import plugins ..")
import_plugins()
print("Import app ..")
import modules.app.App as piTomation
app: piTomation.App
print("Start app ..")
app = piTomation.App()
#try:
# app = piTomation.App()
#except Exception as ex:
# print(ex)
# exit()
try:
while not app.is_disposed:
time.sleep(1)
except Exception as ex:
print(ex)
| [
"modules.app.App.App",
"importlib.import_module",
"pathlib.Path",
"os.path.join",
"time.sleep",
"os.path.realpath",
"os.path.dirname"
]
| [((914, 930), 'modules.app.App.App', 'piTomation.App', ([], {}), '()\n', (928, 930), True, 'import modules.app.App as piTomation\n'), ((135, 161), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (151, 161), False, 'import os\n'), ((176, 201), 'os.path.dirname', 'os.path.dirname', (['realpath'], {}), '(realpath)\n', (191, 201), False, 'import os\n'), ((248, 280), 'os.path.join', 'os.path.join', (['dirname', '"""plugins"""'], {}), "(dirname, 'plugins')\n", (260, 280), False, 'import os\n'), ((1063, 1076), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1073, 1076), False, 'import time\n'), ((302, 319), 'pathlib.Path', 'Path', (['plugin_path'], {}), '(plugin_path)\n', (306, 319), False, 'from pathlib import Path\n'), ((728, 757), 'importlib.import_module', 'importlib.import_module', (['path'], {}), '(path)\n', (751, 757), False, 'import importlib\n')] |
"""
Functions support other modules.
"""
import uuid
def check_response(response, key=None):
"""CHeck the api response.
Make sure the status call is successful and the response have specific key.
Return:
class: `Response <Response>`
"""
code = response.status_code
if not 200 <= code < 300:
raise Exception('[Decanter Core response Error] Request Error')
if key is not None and key not in response.json():
raise KeyError('[Decanter Core response Error] No key value')
return response
def gen_id(type_, name):
"""Generate a random UUID if name isn't given.
Returns:
string
"""
if name is None:
rand_id = uuid.uuid4()
rand_id = str(rand_id)[:8]
name = type_ + '_' + rand_id
return name
def isnotebook():
"""Return True if SDK is running on Jupyter Notebook."""
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True # Jupyter notebook or qtconsole
if shell == 'TerminalInteractiveShell':
return False # Terminal running IPython
return False
except NameError:
return False
| [
"uuid.uuid4"
]
| [((699, 711), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (709, 711), False, 'import uuid\n')] |
import scipy.stats
import numpy as np
def f_test(sample_x, sample_y, larger_varx_alt):
"""
Computes the F-value and corresponding p-value for a pair of samples and alternative hypothesis.
Parameters
----------
sample_x : list
A random sample x1,...,xnx. Let its (underlying) variance be ox^2 and its sample variance Sx^2.
sample_y : list
A random sample y1,...,yny. Let its (underlying) variance be oy^2 and its sample variance Sy^2.
larger_varx_alt : bool
True if alternative hypothesis is ox^2 > oy^2. False if ox^2 < oy^2.
Returns
-------
f_value : float
Sx^2 / Sy^2 as defined in 'A Quick, Compact, Two-Sample Dispersion Test: Count Five'.
p_value : float
Let F be the F-distribution with nx, ny df. 1 - P(F < f_value) if larger_varx_alt = True, P(F < f_value) otherwise. More extreme F = Sx^2 / Sy^2 values for alternative ox^2 > oy^2 are to the right. More extreme F values for ox^2 < oy^2 are to the left.
"""
# calculate unbiased sample variances (n-1 in the denominator)
sample_var_x = np.var(sample_x, ddof=1)
sample_var_y = np.var(sample_y, ddof=1)
f_value = sample_var_x/sample_var_y
nx = len(sample_x)
ny = len(sample_y)
# compute P(F < f_value) with nx-1, ny-1 df
cdf = scipy.stats.f.cdf(f_value, nx-1, ny-1)
# More extreme f_value = Sx^2 / Sy^2 values for alternative ox^2 > oy^2. ox^2 being even bigger would be represented by larger quotient Sx^2 / Sy^2.
# More extreme f_value for ox^2 < oy^2 are to the left. ox^2 being even smaller would be represented by smaller quotient.
p_value = 1 - cdf if larger_varx_alt else cdf
return f_value, p_value
def f1_test(sample_x, sample_y, larger_varx_alt):
"""
Computes the F1-value as defined in 'Fixing the F Test for Equal Variances' and corresponding p-value for a pair of samples and alternative hypothesis.
Parameters
----------
sample_x : list
A random sample x1,...,xnx. Let its (underlying) variance be ox^2 and its sample variance Sx^2.
sample_y : list
A random sample y1,...,yny. Let its (underlying) variance be oy^2 and its sample variance Sy^2.
larger_varx_alt : bool
True if alternative hypothesis is ox^2 > oy^2. False if ox^2 < oy^2.
Returns
-------
p_value : float
Let F be the F-distribution with rx, ry df as specified in equation (1) of 'Fixing the F Test for Equal Variances'. 1 - P(F < f_value) if larger_varx_alt = True, P(F < f_value) otherwise.
"""
# calculate unbiased sample variances (n-1 in the denominator)
sample_var_x = np.var(sample_x, ddof=1)
sample_var_y = np.var(sample_y, ddof=1)
f_value = sample_var_x/sample_var_y
nx = len(sample_x)
ny = len(sample_y)
xmean = np.mean(sample_x)
ymean = np.mean(sample_y)
# compute moment, variance below equatio (1) of Shoemaker paper
fourth_moment = (np.sum((sample_x - xmean)**4) +
np.sum((sample_y - ymean)**4))/(nx + ny)
pooled_var = ((nx-1)*sample_var_x + (ny-1)*sample_var_y)/(nx + ny)
# see equation (1) of Shoemaker paper
rx = 2*nx / ((fourth_moment/pooled_var**2) - ((nx - 3)/(nx - 1)))
ry = 2*ny / ((fourth_moment/pooled_var**2) - ((ny - 3)/(ny - 1)))
# compute P(F < f_value) with rx-1, ry-1 df
cdf = scipy.stats.f.cdf(f_value, rx-1, ry-1)
# More extreme f_value = Sx^2 / Sy^2 values for alternative ox^2 > oy^2. ox^2 being even bigger would be represented by larger quotient Sx^2 / Sy^2.
# More extreme f_value for ox^2 < oy^2 are to the left. ox^2 being even smaller would be represented by smaller quotient.
p_value = 1 - cdf if larger_varx_alt else cdf
return p_value
def count_five(sample_x, sample_y, center):
"""
Computes the extreme counts for samples x and y as defined in 'A Quick, Compact, Two-Sample Dispersion Test: Count Five'.
Parameters
----------
sample_x : list
A random sample x1,...,xn.
sample_y : list
A random sample y1,...,ym.
center : str
Whether to use 'mean' or 'median' for centering.
Returns
-------
extreme_count_x : int
C_x computed with centering mu being sample mean if center = 'mean' and sample median if center = 'median' as defined in equation (1) of 'A Quick, Compact, Two-Sample Dispersion Test: Count Five'.
extreme_count_y : int
C_y defined analogously to C_x above.
Raises
------
ValueError
If center is neither 'mean' or 'median'.
"""
if center not in {'mean', 'median'}:
raise ValueError('Invalid center %s' % (center))
if center == 'mean':
centering_x = np.mean(sample_x)
centering_y = np.mean(sample_y)
else:
centering_x = np.median(sample_x)
centering_y = np.median(sample_y)
# compute absolute deviations from centering for x, y samples
abs_dev_x = np.abs(np.array(sample_x) - centering_x)
abs_dev_y = np.abs(np.array(sample_y) - centering_y)
# count number of X deviations greater than max Y deviation and vice versa
# see equation (1) of Count Five paper
extreme_count_x = np.sum(np.where(abs_dev_x > np.max(abs_dev_y), 1, 0))
extreme_count_y = np.sum(np.where(abs_dev_y > np.max(abs_dev_x), 1, 0))
return extreme_count_x, extreme_count_y
| [
"numpy.mean",
"numpy.median",
"numpy.max",
"numpy.array",
"numpy.sum",
"numpy.var"
]
| [((1099, 1123), 'numpy.var', 'np.var', (['sample_x'], {'ddof': '(1)'}), '(sample_x, ddof=1)\n', (1105, 1123), True, 'import numpy as np\n'), ((1143, 1167), 'numpy.var', 'np.var', (['sample_y'], {'ddof': '(1)'}), '(sample_y, ddof=1)\n', (1149, 1167), True, 'import numpy as np\n'), ((2649, 2673), 'numpy.var', 'np.var', (['sample_x'], {'ddof': '(1)'}), '(sample_x, ddof=1)\n', (2655, 2673), True, 'import numpy as np\n'), ((2693, 2717), 'numpy.var', 'np.var', (['sample_y'], {'ddof': '(1)'}), '(sample_y, ddof=1)\n', (2699, 2717), True, 'import numpy as np\n'), ((2816, 2833), 'numpy.mean', 'np.mean', (['sample_x'], {}), '(sample_x)\n', (2823, 2833), True, 'import numpy as np\n'), ((2846, 2863), 'numpy.mean', 'np.mean', (['sample_y'], {}), '(sample_y)\n', (2853, 2863), True, 'import numpy as np\n'), ((4713, 4730), 'numpy.mean', 'np.mean', (['sample_x'], {}), '(sample_x)\n', (4720, 4730), True, 'import numpy as np\n'), ((4753, 4770), 'numpy.mean', 'np.mean', (['sample_y'], {}), '(sample_y)\n', (4760, 4770), True, 'import numpy as np\n'), ((4803, 4822), 'numpy.median', 'np.median', (['sample_x'], {}), '(sample_x)\n', (4812, 4822), True, 'import numpy as np\n'), ((4845, 4864), 'numpy.median', 'np.median', (['sample_y'], {}), '(sample_y)\n', (4854, 4864), True, 'import numpy as np\n'), ((2954, 2985), 'numpy.sum', 'np.sum', (['((sample_x - xmean) ** 4)'], {}), '((sample_x - xmean) ** 4)\n', (2960, 2985), True, 'import numpy as np\n'), ((3007, 3038), 'numpy.sum', 'np.sum', (['((sample_y - ymean) ** 4)'], {}), '((sample_y - ymean) ** 4)\n', (3013, 3038), True, 'import numpy as np\n'), ((4955, 4973), 'numpy.array', 'np.array', (['sample_x'], {}), '(sample_x)\n', (4963, 4973), True, 'import numpy as np\n'), ((5012, 5030), 'numpy.array', 'np.array', (['sample_y'], {}), '(sample_y)\n', (5020, 5030), True, 'import numpy as np\n'), ((5219, 5236), 'numpy.max', 'np.max', (['abs_dev_y'], {}), '(abs_dev_y)\n', (5225, 5236), True, 'import numpy as np\n'), ((5295, 5312), 'numpy.max', 'np.max', (['abs_dev_x'], {}), '(abs_dev_x)\n', (5301, 5312), True, 'import numpy as np\n')] |
"""
Pycovjson - Command line interface
Author: rileywilliams
Version: 0.1.0
"""
import argparse
from pycovjson.write import Writer
from pycovjson.read_netcdf import NetCDFReader as Reader
def main():
"""
Command line interface for pycovjson - Converts Scientific Data Formats into CovJSON and saves to disk.
:argument -i: Input file path.
:argument -o: Output file name.
:argument -t: Use Tiling.
:argument -v: Which variable to populate coverage with.
:argument -s: [tile shape]: Tile shape.
:argument -n: Use interactive mode.
:argument -u: MongoDB URL
"""
parser = argparse.ArgumentParser(
description='Convert Scientific Data Formats into CovJSON.')
parser.add_argument('-i', '--input', dest='inputfile',
help='Name of input file', required=True)
parser.add_argument('-o', '--output', dest='outputfile',
help='Name and location of output file', default='coverage.covjson')
parser.add_argument('-t', '--tiled', action='store_true', help='Apply tiling')
parser.add_argument('-s', '--shape', nargs='+',
help='Tile shape, list', type=int)
parser.add_argument('-v', dest='variable',
help='Variable to populate coverage with', required=True)
parser.add_argument('-n', '--interactive', action='store_true', help='Enter interactive mode')
parser.add_argument('-u', '--endpoint_url', dest='endpoint_url', nargs=1,
help='MongoDB endpoint for CovJSON persistence')
args = parser.parse_args()
inputfile = args.inputfile
outputfile = args.outputfile
variable = args.variable
tiled = args.tiled
tile_shape = args.shape
interactive = args.interactive
endpoint_url = args.endpoint_url
if interactive:
axis = input('Which Axis?', Reader.get_axis(variable))
if tiled and len(tile_shape) == 0:
reader = Reader(inputfile)
shape_list = reader.get_shape(variable)
dims = reader.get_dimensions(variable)
print(list(zip(dims, shape_list)))
tile_shape = input(
'Enter the shape tile shape as a list of comma separated integers')
tile_shape = tile_shape.split(',')
tile_shape = list(map(int, tile_shape))
print(tile_shape)
if outputfile == None:
outputfile = outputfile.default
Writer(outputfile, inputfile, [variable],
tiled=tiled, tile_shape=tile_shape, endpoint_url=endpoint_url).write()
if __name__ == '__main__':
main()
| [
"pycovjson.read_netcdf.NetCDFReader.get_axis",
"pycovjson.read_netcdf.NetCDFReader",
"argparse.ArgumentParser",
"pycovjson.write.Writer"
]
| [((618, 707), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Convert Scientific Data Formats into CovJSON."""'}), "(description=\n 'Convert Scientific Data Formats into CovJSON.')\n", (641, 707), False, 'import argparse\n'), ((1953, 1970), 'pycovjson.read_netcdf.NetCDFReader', 'Reader', (['inputfile'], {}), '(inputfile)\n', (1959, 1970), True, 'from pycovjson.read_netcdf import NetCDFReader as Reader\n'), ((1869, 1894), 'pycovjson.read_netcdf.NetCDFReader.get_axis', 'Reader.get_axis', (['variable'], {}), '(variable)\n', (1884, 1894), True, 'from pycovjson.read_netcdf import NetCDFReader as Reader\n'), ((2407, 2516), 'pycovjson.write.Writer', 'Writer', (['outputfile', 'inputfile', '[variable]'], {'tiled': 'tiled', 'tile_shape': 'tile_shape', 'endpoint_url': 'endpoint_url'}), '(outputfile, inputfile, [variable], tiled=tiled, tile_shape=\n tile_shape, endpoint_url=endpoint_url)\n', (2413, 2516), False, 'from pycovjson.write import Writer\n')] |
import numpy as np
from kivygames.games import Game
import kivygames.games.noughtsandcrosses.c as c
class CellOccupiedError(Exception):
pass
class NoughtsAndCrosses(Game):
minPlayers = 2
maxPlayers = 2
hasAI = True
gridShape = (3, 3)
def __init__(self):
Game.__init__(self)
self.grid = np.zeros(self.gridShape, dtype="u1")
self.player = 1
def isEmpty(self, position):
return self.grid[position] == 0
async def turn(self):
await self.sendOutput("Player", self.player)
while True:
position = await self.getInput("Position", tuple, self.player)
if self.isEmpty(position):
break
await self.sendOutput("Error", "That space is already full.")
await self.sendOutput("Error", "")
self.grid[position] = self.player
await self.sendOutput("Grid", self.grid)
if c.hasPlayerWon(self.grid, self.player):
await self.sendOutput("End", f"Player {self.player} wins.")
return True
if np.count_nonzero(self.grid) == 9:
await self.sendOutput("End", f"It's a draw!")
return True
self.player = 3 - self.player
return False
def getAIInput(self, name):
if name == "Position":
return c.minimax(self.player, self.player, True, self.grid)[1]
async def game(self):
while True:
ended = await self.turn()
if ended:
break
await self.end()
| [
"numpy.count_nonzero",
"numpy.zeros",
"kivygames.games.noughtsandcrosses.c.hasPlayerWon",
"kivygames.games.noughtsandcrosses.c.minimax",
"kivygames.games.Game.__init__"
]
| [((294, 313), 'kivygames.games.Game.__init__', 'Game.__init__', (['self'], {}), '(self)\n', (307, 313), False, 'from kivygames.games import Game\n'), ((335, 371), 'numpy.zeros', 'np.zeros', (['self.gridShape'], {'dtype': '"""u1"""'}), "(self.gridShape, dtype='u1')\n", (343, 371), True, 'import numpy as np\n'), ((926, 964), 'kivygames.games.noughtsandcrosses.c.hasPlayerWon', 'c.hasPlayerWon', (['self.grid', 'self.player'], {}), '(self.grid, self.player)\n', (940, 964), True, 'import kivygames.games.noughtsandcrosses.c as c\n'), ((1073, 1100), 'numpy.count_nonzero', 'np.count_nonzero', (['self.grid'], {}), '(self.grid)\n', (1089, 1100), True, 'import numpy as np\n'), ((1332, 1384), 'kivygames.games.noughtsandcrosses.c.minimax', 'c.minimax', (['self.player', 'self.player', '(True)', 'self.grid'], {}), '(self.player, self.player, True, self.grid)\n', (1341, 1384), True, 'import kivygames.games.noughtsandcrosses.c as c\n')] |
import awkward as ak
from coffea.nanoevents.methods import vector
import pytest
ATOL = 1e-8
def record_arrays_equal(a, b):
return (ak.fields(a) == ak.fields(b)) and all(ak.all(a[f] == b[f]) for f in ak.fields(a))
def test_two_vector():
a = ak.zip(
{
"x": [[1, 2], [], [3], [4]],
"y": [[5, 6], [], [7], [8]]
},
with_name="TwoVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
b = ak.zip(
{
"x": [[11, 12], [], [13], [14]],
"y": [[15, 16], [], [17], [18]]
},
with_name="TwoVector",
highlevel=False
)
b = ak.Array(b, behavior=vector.behavior)
assert record_arrays_equal(- a, ak.zip(
{
"x": [[-1, -2], [], [-3], [-4]],
"y": [[-5, -6], [], [-7], [-8]]
}
))
assert record_arrays_equal(a + b, ak.zip(
{
"x": [[12, 14], [], [16], [18]],
"y": [[20, 22], [], [24], [26]]
}
))
assert record_arrays_equal(a - b, ak.zip(
{
"x": [[-10, -10], [], [-10], [-10]],
"y": [[-10, -10], [], [-10], [-10]]
}
))
assert record_arrays_equal(a * 2, ak.zip(
{
"x": [[2, 4], [], [6], [8]],
"y": [[10, 12], [], [14], [16]]
}
))
assert record_arrays_equal(a / 2, ak.zip(
{
"x": [[0.5, 1], [], [1.5], [2]],
"y": [[2.5, 3], [], [3.5], [4]]
}
))
assert record_arrays_equal(a.dot(b), ak.Array([[86, 120], [], [158], [200]]))
assert record_arrays_equal(b.dot(a), ak.Array([[86, 120], [], [158], [200]]))
assert ak.all(abs(a.unit.r - 1) < ATOL)
assert ak.all(abs(a.unit.phi - a.phi) < ATOL)
def test_polar_two_vector():
a = ak.zip(
{
"r": [[1, 2], [], [3], [4]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]],
},
with_name="PolarTwoVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
assert record_arrays_equal(a * 2, ak.zip(
{
"r": [[2, 4], [], [6], [8]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]]
}
))
assert ak.all((a * (-2)).r == [[2, 4], [], [6], [8]])
assert ak.all((a * (-2)).phi - ak.Array([
[-2.8415926535, -2.7415926535],
[],
[-2.6415926535],
[-2.5415926535]
]) < ATOL)
assert record_arrays_equal(a / 2, ak.zip(
{
"r": [[0.5, 1], [], [1.5], [2]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]]
}
))
assert ak.all(abs((-a).x + a.x) < ATOL)
assert ak.all(abs((-a).y + a.y) < ATOL)
assert record_arrays_equal(a * (-1), -a)
assert ak.all(a.unit.phi == a.phi)
def test_three_vector():
a = ak.zip(
{
"x": [[1, 2], [], [3], [4]],
"y": [[5, 6], [], [7], [8]],
"z": [[9, 10], [], [11], [12]]
},
with_name="ThreeVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
b = ak.zip(
{
"x": [[4, 1], [], [10], [11]],
"y": [[17, 7], [], [11], [6]],
"z": [[9, 11], [], [5], [16]]
},
with_name="ThreeVector",
highlevel=False
)
b = ak.Array(b, behavior=vector.behavior)
assert record_arrays_equal(- a, ak.zip(
{
"x": [[-1, -2], [], [-3], [-4]],
"y": [[-5, -6], [], [-7], [-8]],
"z": [[-9, -10], [], [-11], [-12]]
}
))
assert record_arrays_equal(a + b, ak.zip(
{
"x": [[5, 3], [], [13], [15]],
"y": [[22, 13], [], [18], [14]],
"z": [[18, 21], [], [16], [28]]
}
))
assert record_arrays_equal(a - b, ak.zip(
{
"x": [[-3, 1], [], [-7], [-7]],
"y": [[-12, -1], [], [-4], [2]],
"z": [[0, -1], [], [6], [-4]]
}
))
assert record_arrays_equal(a * 2, ak.zip(
{
"x": [[2, 4], [], [6], [8]],
"y": [[10, 12], [], [14], [16]],
"z": [[18, 20], [], [22], [24]]
}
))
assert record_arrays_equal(a / 2, ak.zip(
{
"x": [[0.5, 1], [], [1.5], [2]],
"y": [[2.5, 3], [], [3.5], [4]],
"z": [[4.5, 5], [], [5.5], [6]]
}
))
assert ak.all(a.dot(b) == ak.Array([[170, 154], [], [162], [284]]))
assert ak.all(b.dot(a) == ak.Array([[170, 154], [], [162], [284]]))
assert record_arrays_equal(a.cross(b), ak.zip(
{
"x": [[-108, -4], [], [-86], [56]],
"y": [[27, -12], [], [95], [68]],
"z": [[-3, 8], [], [-37], [-64]]
}
))
assert record_arrays_equal(b.cross(a), ak.zip(
{
"x": [[108, 4], [], [86], [-56]],
"y": [[-27, 12], [], [-95], [-68]],
"z": [[3, -8], [], [37], [64]]
}
))
assert ak.all(abs(a.unit.rho - 1) < ATOL)
assert ak.all(abs(a.unit.phi - a.phi) < ATOL)
def test_spherical_three_vector():
a = ak.zip(
{
"rho": [[1.0, 2.0], [], [3.0], [4.0]],
"theta": [[1.2, 0.7], [], [1.8], [1.9]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]],
},
with_name="SphericalThreeVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
assert ak.all(abs((-a).x + a.x) < ATOL)
assert ak.all(abs((-a).y + a.y) < ATOL)
assert ak.all(abs((-a).z + a.z) < ATOL)
assert record_arrays_equal(a * (-1), -a)
def test_lorentz_vector():
a = ak.zip(
{
"x": [[1, 2], [], [3], [4]],
"y": [[5, 6], [], [7], [8]],
"z": [[9, 10], [], [11], [12]],
"t": [[50, 51], [], [52], [53]]
},
with_name="LorentzVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
b = ak.zip(
{
"x": [[4, 1], [], [10], [11]],
"y": [[17, 7], [], [11], [6]],
"z": [[9, 11], [], [5], [16]],
"t": [[60, 61], [], [62], [63]]
},
with_name="LorentzVector",
highlevel=False
)
b = ak.Array(b, behavior=vector.behavior)
assert record_arrays_equal(- a, ak.zip(
{
"x": [[-1, -2], [], [-3], [-4]],
"y": [[-5, -6], [], [-7], [-8]],
"z": [[-9, -10], [], [-11], [-12]],
"t": [[-50, -51], [], [-52], [-53]]
}
))
assert record_arrays_equal(a + b, ak.zip(
{
"x": [[5, 3], [], [13], [15]],
"y": [[22, 13], [], [18], [14]],
"z": [[18, 21], [], [16], [28]],
"t": [[110, 112], [], [114], [116]]
}
))
assert record_arrays_equal(a - b, ak.zip(
{
"x": [[-3, 1], [], [-7], [-7]],
"y": [[-12, -1], [], [-4], [2]],
"z": [[0, -1], [], [6], [-4]],
"t": [[-10, -10], [], [-10], [-10]]
}
))
assert record_arrays_equal(a * 2, ak.zip(
{
"x": [[2, 4], [], [6], [8]],
"y": [[10, 12], [], [14], [16]],
"z": [[18, 20], [], [22], [24]],
"t": [[100, 102], [], [104], [106]]
}
))
assert record_arrays_equal(a / 2, ak.zip(
{
"x": [[0.5, 1], [], [1.5], [2]],
"y": [[2.5, 3], [], [3.5], [4]],
"z": [[4.5, 5], [], [5.5], [6]],
"t": [[25, 25.5], [], [26], [26.5]]
}
))
assert record_arrays_equal(a.pvec, ak.zip(
{
"x": [[1, 2], [], [3], [4]],
"y": [[5, 6], [], [7], [8]],
"z": [[9, 10], [], [11], [12]],
}
))
boosted = a.boost(-a.boostvec)
assert ak.all(abs(boosted.x) < ATOL)
assert ak.all(abs(boosted.y) < ATOL)
assert ak.all(abs(boosted.z) < ATOL)
def test_pt_eta_phi_m_lorentz_vector():
a = ak.zip(
{
"pt": [[1, 2], [], [3], [4]],
"eta": [[1.2, 1.4], [], [1.6], [3.4]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]],
"mass": [[0.5, 0.9], [], [1.3], [4.5]]
},
with_name="PtEtaPhiMLorentzVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
assert ak.all((a * (-2)).pt == ak.Array([[2, 4], [], [6], [8]]))
assert ak.all((a * (-2)).theta - ak.Array([
[2.556488570968, 2.65804615357],
[],
[2.74315571762],
[3.07487087733]
]) < ATOL)
assert ak.all((a * (-2)).phi - ak.Array([
[-2.8415926535, -2.7415926535],
[],
[-2.6415926535],
[-2.5415926535]
]) < ATOL)
assert record_arrays_equal(a / 2, ak.zip(
{
"pt": [[0.5, 1], [], [1.5], [2]],
"eta": [[1.2, 1.4], [], [1.6], [3.4]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]],
"mass": [[0.25, 0.45], [], [0.65], [2.25]]
}
))
assert record_arrays_equal(a * (-1), -a)
boosted = a.boost(-a.boostvec)
assert ak.all(abs(boosted.x) < ATOL)
assert ak.all(abs(boosted.y) < ATOL)
assert ak.all(abs(boosted.z) < ATOL)
def test_pt_eta_phi_e_lorentz_vector():
a = ak.zip(
{
"pt": [[1, 2], [], [3], [4]],
"eta": [[1.2, 1.4], [], [1.6], [3.4]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]],
"energy": [[50, 51], [], [52], [60]]
},
with_name="PtEtaPhiELorentzVector",
highlevel=False
)
a = ak.Array(a, behavior=vector.behavior)
assert ak.all((a * (-2)).pt == ak.Array([[2, 4], [], [6], [8]]))
assert ak.all((a * (-2)).theta - ak.Array([
[2.556488570968, 2.65804615357],
[],
[2.74315571762],
[3.07487087733]
]) < ATOL)
assert ak.all((a * (-2)).phi - ak.Array([
[-2.8415926535, -2.7415926535],
[],
[-2.6415926535],
[-2.5415926535]
]) < ATOL)
assert record_arrays_equal(a / 2, ak.zip(
{
"pt": [[0.5, 1], [], [1.5], [2]],
"eta": [[1.2, 1.4], [], [1.6], [3.4]],
"phi": [[0.3, 0.4], [], [0.5], [0.6]],
"energy": [[25, 25.5], [], [26], [30]]
}
))
assert record_arrays_equal(a * (-1), -a)
boosted = a.boost(-a.boostvec)
assert ak.all(abs(boosted.x) < ATOL)
assert ak.all(abs(boosted.y) < ATOL)
assert ak.all(abs(boosted.z) < ATOL)
| [
"awkward.all",
"awkward.Array",
"awkward.zip",
"awkward.fields"
]
| [((254, 364), 'awkward.zip', 'ak.zip', (["{'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]]}"], {'with_name': '"""TwoVector"""', 'highlevel': '(False)'}), "({'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]]},\n with_name='TwoVector', highlevel=False)\n", (260, 364), True, 'import awkward as ak\n'), ((433, 470), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (441, 470), True, 'import awkward as ak\n'), ((479, 597), 'awkward.zip', 'ak.zip', (["{'x': [[11, 12], [], [13], [14]], 'y': [[15, 16], [], [17], [18]]}"], {'with_name': '"""TwoVector"""', 'highlevel': '(False)'}), "({'x': [[11, 12], [], [13], [14]], 'y': [[15, 16], [], [17], [18]]},\n with_name='TwoVector', highlevel=False)\n", (485, 597), True, 'import awkward as ak\n'), ((666, 703), 'awkward.Array', 'ak.Array', (['b'], {'behavior': 'vector.behavior'}), '(b, behavior=vector.behavior)\n', (674, 703), True, 'import awkward as ak\n'), ((1818, 1943), 'awkward.zip', 'ak.zip', (["{'r': [[1, 2], [], [3], [4]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]}"], {'with_name': '"""PolarTwoVector"""', 'highlevel': '(False)'}), "({'r': [[1, 2], [], [3], [4]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]},\n with_name='PolarTwoVector', highlevel=False)\n", (1824, 1943), True, 'import awkward as ak\n'), ((2013, 2050), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (2021, 2050), True, 'import awkward as ak\n'), ((2227, 2271), 'awkward.all', 'ak.all', (['((a * -2).r == [[2, 4], [], [6], [8]])'], {}), '((a * -2).r == [[2, 4], [], [6], [8]])\n', (2233, 2271), True, 'import awkward as ak\n'), ((2750, 2777), 'awkward.all', 'ak.all', (['(a.unit.phi == a.phi)'], {}), '(a.unit.phi == a.phi)\n', (2756, 2777), True, 'import awkward as ak\n'), ((2813, 2957), 'awkward.zip', 'ak.zip', (["{'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]], 'z': [[9, 10], [\n ], [11], [12]]}"], {'with_name': '"""ThreeVector"""', 'highlevel': '(False)'}), "({'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]], 'z': [[9,\n 10], [], [11], [12]]}, with_name='ThreeVector', highlevel=False)\n", (2819, 2957), True, 'import awkward as ak\n'), ((3038, 3075), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (3046, 3075), True, 'import awkward as ak\n'), ((3084, 3231), 'awkward.zip', 'ak.zip', (["{'x': [[4, 1], [], [10], [11]], 'y': [[17, 7], [], [11], [6]], 'z': [[9, 11\n ], [], [5], [16]]}"], {'with_name': '"""ThreeVector"""', 'highlevel': '(False)'}), "({'x': [[4, 1], [], [10], [11]], 'y': [[17, 7], [], [11], [6]], 'z':\n [[9, 11], [], [5], [16]]}, with_name='ThreeVector', highlevel=False)\n", (3090, 3231), True, 'import awkward as ak\n'), ((3312, 3349), 'awkward.Array', 'ak.Array', (['b'], {'behavior': 'vector.behavior'}), '(b, behavior=vector.behavior)\n', (3320, 3349), True, 'import awkward as ak\n'), ((5100, 5288), 'awkward.zip', 'ak.zip', (["{'rho': [[1.0, 2.0], [], [3.0], [4.0]], 'theta': [[1.2, 0.7], [], [1.8], [\n 1.9]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]}"], {'with_name': '"""SphericalThreeVector"""', 'highlevel': '(False)'}), "({'rho': [[1.0, 2.0], [], [3.0], [4.0]], 'theta': [[1.2, 0.7], [], [\n 1.8], [1.9]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]}, with_name=\n 'SphericalThreeVector', highlevel=False)\n", (5106, 5288), True, 'import awkward as ak\n'), ((5364, 5401), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (5372, 5401), True, 'import awkward as ak\n'), ((5616, 5800), 'awkward.zip', 'ak.zip', (["{'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]], 'z': [[9, 10], [\n ], [11], [12]], 't': [[50, 51], [], [52], [53]]}"], {'with_name': '"""LorentzVector"""', 'highlevel': '(False)'}), "({'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]], 'z': [[9,\n 10], [], [11], [12]], 't': [[50, 51], [], [52], [53]]}, with_name=\n 'LorentzVector', highlevel=False)\n", (5622, 5800), True, 'import awkward as ak\n'), ((5888, 5925), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (5896, 5925), True, 'import awkward as ak\n'), ((5934, 6121), 'awkward.zip', 'ak.zip', (["{'x': [[4, 1], [], [10], [11]], 'y': [[17, 7], [], [11], [6]], 'z': [[9, 11\n ], [], [5], [16]], 't': [[60, 61], [], [62], [63]]}"], {'with_name': '"""LorentzVector"""', 'highlevel': '(False)'}), "({'x': [[4, 1], [], [10], [11]], 'y': [[17, 7], [], [11], [6]], 'z':\n [[9, 11], [], [5], [16]], 't': [[60, 61], [], [62], [63]]}, with_name=\n 'LorentzVector', highlevel=False)\n", (5940, 6121), True, 'import awkward as ak\n'), ((6209, 6246), 'awkward.Array', 'ak.Array', (['b'], {'behavior': 'vector.behavior'}), '(b, behavior=vector.behavior)\n', (6217, 6246), True, 'import awkward as ak\n'), ((7931, 8148), 'awkward.zip', 'ak.zip', (["{'pt': [[1, 2], [], [3], [4]], 'eta': [[1.2, 1.4], [], [1.6], [3.4]], 'phi':\n [[0.3, 0.4], [], [0.5], [0.6]], 'mass': [[0.5, 0.9], [], [1.3], [4.5]]}"], {'with_name': '"""PtEtaPhiMLorentzVector"""', 'highlevel': '(False)'}), "({'pt': [[1, 2], [], [3], [4]], 'eta': [[1.2, 1.4], [], [1.6], [3.4]],\n 'phi': [[0.3, 0.4], [], [0.5], [0.6]], 'mass': [[0.5, 0.9], [], [1.3],\n [4.5]]}, with_name='PtEtaPhiMLorentzVector', highlevel=False)\n", (7937, 8148), True, 'import awkward as ak\n'), ((8237, 8274), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (8245, 8274), True, 'import awkward as ak\n'), ((9201, 9417), 'awkward.zip', 'ak.zip', (["{'pt': [[1, 2], [], [3], [4]], 'eta': [[1.2, 1.4], [], [1.6], [3.4]], 'phi':\n [[0.3, 0.4], [], [0.5], [0.6]], 'energy': [[50, 51], [], [52], [60]]}"], {'with_name': '"""PtEtaPhiELorentzVector"""', 'highlevel': '(False)'}), "({'pt': [[1, 2], [], [3], [4]], 'eta': [[1.2, 1.4], [], [1.6], [3.4]],\n 'phi': [[0.3, 0.4], [], [0.5], [0.6]], 'energy': [[50, 51], [], [52], [\n 60]]}, with_name='PtEtaPhiELorentzVector', highlevel=False)\n", (9207, 9417), True, 'import awkward as ak\n'), ((9505, 9542), 'awkward.Array', 'ak.Array', (['a'], {'behavior': 'vector.behavior'}), '(a, behavior=vector.behavior)\n', (9513, 9542), True, 'import awkward as ak\n'), ((741, 815), 'awkward.zip', 'ak.zip', (["{'x': [[-1, -2], [], [-3], [-4]], 'y': [[-5, -6], [], [-7], [-8]]}"], {}), "({'x': [[-1, -2], [], [-3], [-4]], 'y': [[-5, -6], [], [-7], [-8]]})\n", (747, 815), True, 'import awkward as ak\n'), ((904, 978), 'awkward.zip', 'ak.zip', (["{'x': [[12, 14], [], [16], [18]], 'y': [[20, 22], [], [24], [26]]}"], {}), "({'x': [[12, 14], [], [16], [18]], 'y': [[20, 22], [], [24], [26]]})\n", (910, 978), True, 'import awkward as ak\n'), ((1066, 1153), 'awkward.zip', 'ak.zip', (["{'x': [[-10, -10], [], [-10], [-10]], 'y': [[-10, -10], [], [-10], [-10]]}"], {}), "({'x': [[-10, -10], [], [-10], [-10]], 'y': [[-10, -10], [], [-10], [\n -10]]})\n", (1072, 1153), True, 'import awkward as ak\n'), ((1237, 1307), 'awkward.zip', 'ak.zip', (["{'x': [[2, 4], [], [6], [8]], 'y': [[10, 12], [], [14], [16]]}"], {}), "({'x': [[2, 4], [], [6], [8]], 'y': [[10, 12], [], [14], [16]]})\n", (1243, 1307), True, 'import awkward as ak\n'), ((1395, 1469), 'awkward.zip', 'ak.zip', (["{'x': [[0.5, 1], [], [1.5], [2]], 'y': [[2.5, 3], [], [3.5], [4]]}"], {}), "({'x': [[0.5, 1], [], [1.5], [2]], 'y': [[2.5, 3], [], [3.5], [4]]})\n", (1401, 1469), True, 'import awkward as ak\n'), ((1561, 1600), 'awkward.Array', 'ak.Array', (['[[86, 120], [], [158], [200]]'], {}), '([[86, 120], [], [158], [200]])\n', (1569, 1600), True, 'import awkward as ak\n'), ((1643, 1682), 'awkward.Array', 'ak.Array', (['[[86, 120], [], [158], [200]]'], {}), '([[86, 120], [], [158], [200]])\n', (1651, 1682), True, 'import awkward as ak\n'), ((2090, 2166), 'awkward.zip', 'ak.zip', (["{'r': [[2, 4], [], [6], [8]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]}"], {}), "({'r': [[2, 4], [], [6], [8]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]})\n", (2096, 2166), True, 'import awkward as ak\n'), ((2474, 2559), 'awkward.zip', 'ak.zip', (["{'r': [[0.5, 1], [], [1.5], [2]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]}"], {}), "({'r': [[0.5, 1], [], [1.5], [2]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]]}\n )\n", (2480, 2559), True, 'import awkward as ak\n'), ((3387, 3501), 'awkward.zip', 'ak.zip', (["{'x': [[-1, -2], [], [-3], [-4]], 'y': [[-5, -6], [], [-7], [-8]], 'z': [[-\n 9, -10], [], [-11], [-12]]}"], {}), "({'x': [[-1, -2], [], [-3], [-4]], 'y': [[-5, -6], [], [-7], [-8]],\n 'z': [[-9, -10], [], [-11], [-12]]})\n", (3393, 3501), True, 'import awkward as ak\n'), ((3598, 3707), 'awkward.zip', 'ak.zip', (["{'x': [[5, 3], [], [13], [15]], 'y': [[22, 13], [], [18], [14]], 'z': [[18,\n 21], [], [16], [28]]}"], {}), "({'x': [[5, 3], [], [13], [15]], 'y': [[22, 13], [], [18], [14]], 'z':\n [[18, 21], [], [16], [28]]})\n", (3604, 3707), True, 'import awkward as ak\n'), ((3803, 3911), 'awkward.zip', 'ak.zip', (["{'x': [[-3, 1], [], [-7], [-7]], 'y': [[-12, -1], [], [-4], [2]], 'z': [[0,\n -1], [], [6], [-4]]}"], {}), "({'x': [[-3, 1], [], [-7], [-7]], 'y': [[-12, -1], [], [-4], [2]],\n 'z': [[0, -1], [], [6], [-4]]})\n", (3809, 3911), True, 'import awkward as ak\n'), ((4008, 4115), 'awkward.zip', 'ak.zip', (["{'x': [[2, 4], [], [6], [8]], 'y': [[10, 12], [], [14], [16]], 'z': [[18, \n 20], [], [22], [24]]}"], {}), "({'x': [[2, 4], [], [6], [8]], 'y': [[10, 12], [], [14], [16]], 'z':\n [[18, 20], [], [22], [24]]})\n", (4014, 4115), True, 'import awkward as ak\n'), ((4211, 4322), 'awkward.zip', 'ak.zip', (["{'x': [[0.5, 1], [], [1.5], [2]], 'y': [[2.5, 3], [], [3.5], [4]], 'z': [[\n 4.5, 5], [], [5.5], [6]]}"], {}), "({'x': [[0.5, 1], [], [1.5], [2]], 'y': [[2.5, 3], [], [3.5], [4]],\n 'z': [[4.5, 5], [], [5.5], [6]]})\n", (4217, 4322), True, 'import awkward as ak\n'), ((4569, 4686), 'awkward.zip', 'ak.zip', (["{'x': [[-108, -4], [], [-86], [56]], 'y': [[27, -12], [], [95], [68]], 'z':\n [[-3, 8], [], [-37], [-64]]}"], {}), "({'x': [[-108, -4], [], [-86], [56]], 'y': [[27, -12], [], [95], [68]\n ], 'z': [[-3, 8], [], [-37], [-64]]})\n", (4575, 4686), True, 'import awkward as ak\n'), ((4786, 4901), 'awkward.zip', 'ak.zip', (["{'x': [[108, 4], [], [86], [-56]], 'y': [[-27, 12], [], [-95], [-68]], 'z':\n [[3, -8], [], [37], [64]]}"], {}), "({'x': [[108, 4], [], [86], [-56]], 'y': [[-27, 12], [], [-95], [-68]\n ], 'z': [[3, -8], [], [37], [64]]})\n", (4792, 4901), True, 'import awkward as ak\n'), ((6284, 6435), 'awkward.zip', 'ak.zip', (["{'x': [[-1, -2], [], [-3], [-4]], 'y': [[-5, -6], [], [-7], [-8]], 'z': [[-\n 9, -10], [], [-11], [-12]], 't': [[-50, -51], [], [-52], [-53]]}"], {}), "({'x': [[-1, -2], [], [-3], [-4]], 'y': [[-5, -6], [], [-7], [-8]],\n 'z': [[-9, -10], [], [-11], [-12]], 't': [[-50, -51], [], [-52], [-53]]})\n", (6290, 6435), True, 'import awkward as ak\n'), ((6544, 6690), 'awkward.zip', 'ak.zip', (["{'x': [[5, 3], [], [13], [15]], 'y': [[22, 13], [], [18], [14]], 'z': [[18,\n 21], [], [16], [28]], 't': [[110, 112], [], [114], [116]]}"], {}), "({'x': [[5, 3], [], [13], [15]], 'y': [[22, 13], [], [18], [14]], 'z':\n [[18, 21], [], [16], [28]], 't': [[110, 112], [], [114], [116]]})\n", (6550, 6690), True, 'import awkward as ak\n'), ((6798, 6943), 'awkward.zip', 'ak.zip', (["{'x': [[-3, 1], [], [-7], [-7]], 'y': [[-12, -1], [], [-4], [2]], 'z': [[0,\n -1], [], [6], [-4]], 't': [[-10, -10], [], [-10], [-10]]}"], {}), "({'x': [[-3, 1], [], [-7], [-7]], 'y': [[-12, -1], [], [-4], [2]],\n 'z': [[0, -1], [], [6], [-4]], 't': [[-10, -10], [], [-10], [-10]]})\n", (6804, 6943), True, 'import awkward as ak\n'), ((7052, 7196), 'awkward.zip', 'ak.zip', (["{'x': [[2, 4], [], [6], [8]], 'y': [[10, 12], [], [14], [16]], 'z': [[18, \n 20], [], [22], [24]], 't': [[100, 102], [], [104], [106]]}"], {}), "({'x': [[2, 4], [], [6], [8]], 'y': [[10, 12], [], [14], [16]], 'z':\n [[18, 20], [], [22], [24]], 't': [[100, 102], [], [104], [106]]})\n", (7058, 7196), True, 'import awkward as ak\n'), ((7304, 7452), 'awkward.zip', 'ak.zip', (["{'x': [[0.5, 1], [], [1.5], [2]], 'y': [[2.5, 3], [], [3.5], [4]], 'z': [[\n 4.5, 5], [], [5.5], [6]], 't': [[25, 25.5], [], [26], [26.5]]}"], {}), "({'x': [[0.5, 1], [], [1.5], [2]], 'y': [[2.5, 3], [], [3.5], [4]],\n 'z': [[4.5, 5], [], [5.5], [6]], 't': [[25, 25.5], [], [26], [26.5]]})\n", (7310, 7452), True, 'import awkward as ak\n'), ((7562, 7664), 'awkward.zip', 'ak.zip', (["{'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]], 'z': [[9, 10], [\n ], [11], [12]]}"], {}), "({'x': [[1, 2], [], [3], [4]], 'y': [[5, 6], [], [7], [8]], 'z': [[9,\n 10], [], [11], [12]]})\n", (7568, 7664), True, 'import awkward as ak\n'), ((8710, 8883), 'awkward.zip', 'ak.zip', (["{'pt': [[0.5, 1], [], [1.5], [2]], 'eta': [[1.2, 1.4], [], [1.6], [3.4]],\n 'phi': [[0.3, 0.4], [], [0.5], [0.6]], 'mass': [[0.25, 0.45], [], [0.65\n ], [2.25]]}"], {}), "({'pt': [[0.5, 1], [], [1.5], [2]], 'eta': [[1.2, 1.4], [], [1.6], [\n 3.4]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]], 'mass': [[0.25, 0.45], [],\n [0.65], [2.25]]})\n", (8716, 8883), True, 'import awkward as ak\n'), ((9978, 10147), 'awkward.zip', 'ak.zip', (["{'pt': [[0.5, 1], [], [1.5], [2]], 'eta': [[1.2, 1.4], [], [1.6], [3.4]],\n 'phi': [[0.3, 0.4], [], [0.5], [0.6]], 'energy': [[25, 25.5], [], [26],\n [30]]}"], {}), "({'pt': [[0.5, 1], [], [1.5], [2]], 'eta': [[1.2, 1.4], [], [1.6], [\n 3.4]], 'phi': [[0.3, 0.4], [], [0.5], [0.6]], 'energy': [[25, 25.5], [],\n [26], [30]]})\n", (9984, 10147), True, 'import awkward as ak\n'), ((139, 151), 'awkward.fields', 'ak.fields', (['a'], {}), '(a)\n', (148, 151), True, 'import awkward as ak\n'), ((155, 167), 'awkward.fields', 'ak.fields', (['b'], {}), '(b)\n', (164, 167), True, 'import awkward as ak\n'), ((4411, 4451), 'awkward.Array', 'ak.Array', (['[[170, 154], [], [162], [284]]'], {}), '([[170, 154], [], [162], [284]])\n', (4419, 4451), True, 'import awkward as ak\n'), ((4483, 4523), 'awkward.Array', 'ak.Array', (['[[170, 154], [], [162], [284]]'], {}), '([[170, 154], [], [162], [284]])\n', (4491, 4523), True, 'import awkward as ak\n'), ((8311, 8343), 'awkward.Array', 'ak.Array', (['[[2, 4], [], [6], [8]]'], {}), '([[2, 4], [], [6], [8]])\n', (8319, 8343), True, 'import awkward as ak\n'), ((9579, 9611), 'awkward.Array', 'ak.Array', (['[[2, 4], [], [6], [8]]'], {}), '([[2, 4], [], [6], [8]])\n', (9587, 9611), True, 'import awkward as ak\n'), ((177, 197), 'awkward.all', 'ak.all', (['(a[f] == b[f])'], {}), '(a[f] == b[f])\n', (183, 197), True, 'import awkward as ak\n'), ((2309, 2394), 'awkward.Array', 'ak.Array', (['[[-2.8415926535, -2.7415926535], [], [-2.6415926535], [-2.5415926535]]'], {}), '([[-2.8415926535, -2.7415926535], [], [-2.6415926535], [-2.5415926535]]\n )\n', (2317, 2394), True, 'import awkward as ak\n'), ((8382, 8468), 'awkward.Array', 'ak.Array', (['[[2.556488570968, 2.65804615357], [], [2.74315571762], [3.07487087733]]'], {}), '([[2.556488570968, 2.65804615357], [], [2.74315571762], [\n 3.07487087733]])\n', (8390, 8468), True, 'import awkward as ak\n'), ((8545, 8630), 'awkward.Array', 'ak.Array', (['[[-2.8415926535, -2.7415926535], [], [-2.6415926535], [-2.5415926535]]'], {}), '([[-2.8415926535, -2.7415926535], [], [-2.6415926535], [-2.5415926535]]\n )\n', (8553, 8630), True, 'import awkward as ak\n'), ((9650, 9736), 'awkward.Array', 'ak.Array', (['[[2.556488570968, 2.65804615357], [], [2.74315571762], [3.07487087733]]'], {}), '([[2.556488570968, 2.65804615357], [], [2.74315571762], [\n 3.07487087733]])\n', (9658, 9736), True, 'import awkward as ak\n'), ((9813, 9898), 'awkward.Array', 'ak.Array', (['[[-2.8415926535, -2.7415926535], [], [-2.6415926535], [-2.5415926535]]'], {}), '([[-2.8415926535, -2.7415926535], [], [-2.6415926535], [-2.5415926535]]\n )\n', (9821, 9898), True, 'import awkward as ak\n'), ((207, 219), 'awkward.fields', 'ak.fields', (['a'], {}), '(a)\n', (216, 219), True, 'import awkward as ak\n')] |
from collections import defaultdict
import graphene
import pytest
from django.core.exceptions import ValidationError
from ....shipping.error_codes import ShippingErrorCode
from ..mutations import BaseChannelListingMutation
def test_validate_duplicated_channel_ids(channel_PLN, channel_USD):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
second_channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
errors = defaultdict(list)
# when
result = BaseChannelListingMutation.validate_duplicated_channel_ids(
[channel_id],
[second_channel_id],
errors,
ShippingErrorCode.DUPLICATED_INPUT_ITEM.value,
)
# then
assert result is None
assert errors["input"] == []
def test_validate_duplicated_channel_ids_with_duplicates(channel_PLN):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
second_channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
error_code = ShippingErrorCode.DUPLICATED_INPUT_ITEM.value
errors = defaultdict(list)
# when
result = BaseChannelListingMutation.validate_duplicated_channel_ids(
[channel_id], [second_channel_id], errors, error_code
)
# then
assert result is None
assert errors["input"][0].code == error_code
def test_validate_duplicated_channel_values(channel_PLN, channel_USD):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
second_channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
error_code = ShippingErrorCode.DUPLICATED_INPUT_ITEM.value
errors = defaultdict(list)
field = "add_channels"
# when
result = BaseChannelListingMutation.validate_duplicated_channel_values(
[channel_id, second_channel_id], field, errors, error_code
)
# then
assert result is None
assert errors[field] == []
def test_validate_duplicated_channel_values_with_duplicates(channel_PLN):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
second_channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
error_code = ShippingErrorCode.DUPLICATED_INPUT_ITEM.value
errors = defaultdict(list)
field = "add_channels"
# when
result = BaseChannelListingMutation.validate_duplicated_channel_values(
[channel_id, second_channel_id], field, errors, error_code
)
# then
assert result is None
assert errors[field][0].code == error_code
def test_clean_channels_add_channels(channel_PLN):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
error_code = ShippingErrorCode.DUPLICATED_INPUT_ITEM.value
errors = defaultdict(list)
# when
result = BaseChannelListingMutation.clean_channels(
None, {"add_channels": [{"channel_id": channel_id}]}, errors, error_code
)
# then
assert result == {
"add_channels": [{"channel_id": channel_id, "channel": channel_PLN}],
"remove_channels": [],
}
assert errors["input"] == []
def test_clean_channels_remove_channels(channel_PLN):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
error_code = ShippingErrorCode.DUPLICATED_INPUT_ITEM.value
errors = defaultdict(list)
# when
result = BaseChannelListingMutation.clean_channels(
None, {"remove_channels": [channel_id]}, errors, error_code
)
# then
assert result == {"add_channels": [], "remove_channels": [str(channel_PLN.id)]}
assert errors["input"] == []
def test_test_clean_channels_with_errors(channel_PLN):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_PLN.id)
error_code = ShippingErrorCode.DUPLICATED_INPUT_ITEM.value
errors = defaultdict(list)
# when
result = BaseChannelListingMutation.clean_channels(
None, {"remove_channels": [channel_id, channel_id]}, errors, error_code
)
# then
assert result == {}
assert errors["remove_channels"][0].code == error_code
def test_test_clean_channels_invalid_object_type(channel_PLN):
# given
channel_id = graphene.Node.to_global_id("Product", channel_PLN.id)
error_code = ShippingErrorCode.GRAPHQL_ERROR.value
errors = defaultdict(list)
# when
with pytest.raises(ValidationError) as error:
BaseChannelListingMutation.clean_channels(
None, {"remove_channels": [channel_id]}, errors, error_code
)
# then
assert (
error.value.error_dict["remove_channels"][0].message
== f"Must receive Channel id: {channel_id}."
)
| [
"graphene.Node.to_global_id",
"collections.defaultdict",
"pytest.raises"
]
| [((324, 377), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_USD.id'], {}), "('Channel', channel_USD.id)\n", (350, 377), False, 'import graphene\n'), ((402, 455), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (428, 455), False, 'import graphene\n'), ((469, 486), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (480, 486), False, 'from collections import defaultdict\n'), ((873, 926), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (899, 926), False, 'import graphene\n'), ((951, 1004), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (977, 1004), False, 'import graphene\n'), ((1081, 1098), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1092, 1098), False, 'from collections import defaultdict\n'), ((1441, 1494), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (1467, 1494), False, 'import graphene\n'), ((1519, 1572), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_USD.id'], {}), "('Channel', channel_USD.id)\n", (1545, 1572), False, 'import graphene\n'), ((1649, 1666), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1660, 1666), False, 'from collections import defaultdict\n'), ((2029, 2082), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (2055, 2082), False, 'import graphene\n'), ((2107, 2160), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (2133, 2160), False, 'import graphene\n'), ((2237, 2254), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2248, 2254), False, 'from collections import defaultdict\n'), ((2610, 2663), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (2636, 2663), False, 'import graphene\n'), ((2740, 2757), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2751, 2757), False, 'from collections import defaultdict\n'), ((3181, 3234), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (3207, 3234), False, 'import graphene\n'), ((3311, 3328), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3322, 3328), False, 'from collections import defaultdict\n'), ((3686, 3739), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Channel"""', 'channel_PLN.id'], {}), "('Channel', channel_PLN.id)\n", (3712, 3739), False, 'import graphene\n'), ((3816, 3833), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (3827, 3833), False, 'from collections import defaultdict\n'), ((4177, 4230), 'graphene.Node.to_global_id', 'graphene.Node.to_global_id', (['"""Product"""', 'channel_PLN.id'], {}), "('Product', channel_PLN.id)\n", (4203, 4230), False, 'import graphene\n'), ((4299, 4316), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (4310, 4316), False, 'from collections import defaultdict\n'), ((4338, 4368), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (4351, 4368), False, 'import pytest\n')] |
# -*- coding: utf-8 -*-
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors: <NAME> / Coopérative ARTEFACTS <<EMAIL>>
import requests
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect, Http404
from django.utils.translation import gettext as _
from requests.exceptions import RequestException
from rest_framework import status
from francoralite.apps.francoralite_front.errors import APPLICATION_ERRORS
from .views.related import (
write_fond_related,
write_mission_related,
write_collection_related,
write_item_related)
HTTP_ERRORS = {
status.HTTP_400_BAD_REQUEST: APPLICATION_ERRORS['HTTP_API_400'],
status.HTTP_401_UNAUTHORIZED: APPLICATION_ERRORS['HTTP_API_401'],
status.HTTP_403_FORBIDDEN: APPLICATION_ERRORS['HTTP_API_403'],
status.HTTP_404_NOT_FOUND: APPLICATION_ERRORS['HTTP_API_404'],
status.HTTP_409_CONFLICT: APPLICATION_ERRORS['HTTP_API_409'],
}
PROBLEM_NAMES = [
"legal_rights",
"recording_context",
"location_gis",
]
class UserMessageError(RequestException): pass
def get_token_header(request):
"""
TODO: À renseigner
"""
auth_token = request.session.get('oidc_access_token')
if auth_token:
return {'Authorization': 'Bearer ' + auth_token}
else:
return {}
def check_status_code(status_code, allowed_codes=(status.HTTP_200_OK,)):
"""
TODO: À renseigner
"""
if status_code == status.HTTP_403_FORBIDDEN:
raise PermissionDenied(_('Accès interdit.'))
if status_code == status.HTTP_404_NOT_FOUND:
raise Http404(_('Cette fiche n’existe pas.'))
if status_code == status.HTTP_409_CONFLICT:
raise UserMessageError(_('Une fiche avec ce code existe déjà.'))
if status.HTTP_400_BAD_REQUEST <= status_code < status.HTTP_500_INTERNAL_SERVER_ERROR:
raise RequestException()
if status_code not in allowed_codes:
raise Exception(HTTP_ERRORS[status_code])
def handle_message_from_exception(request, exception):
"""
TODO: À renseigner
"""
if isinstance(exception, UserMessageError):
messages.add_message(request, messages.ERROR, exception)
elif exception is not None:
messages.add_message(request, messages.ERROR,
_('Une erreur indéterminée est survenue.'))
def request_api(endpoint):
"""
TODO: À renseigner
"""
response = requests.get(settings.FRONT_HOST_URL + endpoint)
check_status_code(response.status_code)
return response.json()
def post(entity, form_entity, request, *args, **kwargs):
"""
TODO: À renseigner
"""
form = form_entity(request.POST, request.FILES)
entity_api = entity
entity_url = entity
# Processing the problem names entities
if entity in PROBLEM_NAMES:
entity_api = entity.replace('_', '')
# Processing URL for Mission entity
if entity == 'fond':
entity_url = 'institution/' + kwargs['id_institution'] \
+ '/' + entity
# Processing URL for Mission entity
if entity == 'mission':
entity_url = 'institution/' + kwargs['id_institution'] \
+ '/fond/' + kwargs['id_fond']\
+ '/' + entity
# Processing URL for Collection entity
if entity == 'collection':
entity_url = 'institution/' + kwargs['id_institution'] \
+ '/fond/' + kwargs['id_fond']\
+ '/mission/' + kwargs['id_mission'] \
+ '/' + entity
# Processing URL for Item entity
if entity == 'item':
entity_url = 'institution/' + kwargs['id_institution'] \
+ '/fond/' + kwargs['id_fond']\
+ '/mission/' + kwargs['id_mission'] \
+ '/collection/' + kwargs['id_collection'] \
+ '/' + entity
# Problem with old Telemeta fields/entities
if form.is_valid():
if entity == 'item':
# Concatenate domains
form.cleaned_data['domain'] = ''.join(form.cleaned_data['domain'])
# Remove the 'file' entry : if not, there some bugs
del form.cleaned_data['file']
try:
post_api(settings.FRONT_HOST_URL + '/api/' + entity_api,
data=form.cleaned_data,
request=request,
entity=entity)
if entity == 'fond':
return HttpResponseRedirect(
'/institution/' +
str(form.cleaned_data['institution']))
# Previous page ( not an edit page ... )
if len(request.session["referers"]) > 1:
try:
for referer in request.session["referers"]:
if 'add' not in referer.split('/'):
return HttpResponseRedirect(referer)
except Exception:
return HttpResponseRedirect('/' + entity)
return HttpResponseRedirect('/' + entity)
except RequestException as e:
handle_message_from_exception(request, e)
return HttpResponseRedirect('/' + entity_url + '/add')
return HttpResponseRedirect('/' + entity_url + '/add')
def post_api(endpoint, data, request, entity):
"""
TODO: À renseigner
"""
headers = get_token_header(request=request)
response = requests.post(
endpoint,
data=data,
files=request.FILES,
headers=headers,
)
check_status_code(response.status_code,
allowed_codes=(status.HTTP_200_OK, status.HTTP_201_CREATED))
entity_json = response.json()
if entity == "fond":
write_fond_related(entity_json, request, headers)
if entity == "mission":
write_mission_related(entity_json, request, headers)
if entity == "collection":
write_collection_related(entity_json, request, headers)
if entity == "item":
write_item_related(entity_json, request, headers)
return entity_json
def patch(entity, form_entity, request, *args, **kwargs):
"""
TODO: À renseigner
"""
form = form_entity(request.POST)
if entity == 'item':
form.fields['file'].required = False
id = kwargs.get('id')
entity_api = entity
if entity in PROBLEM_NAMES:
entity_api = entity.replace('_', '')
if form.is_valid():
if entity == "collection":
form.cleaned_data['recorded_from_year'] = \
form.data['recorded_from_year']
form.cleaned_data['recorded_to_year'] = \
form.data['recorded_to_year']
if form.cleaned_data['year_published'] is None:
form.cleaned_data['year_published'] = ''
if entity == "item":
# Concatenate domains
form.cleaned_data['domain'] = ''.join(form.cleaned_data['domain'])
try:
response = patch_api(
settings.FRONT_HOST_URL + '/api/' + entity_api + '/' + str(id),
data=form.cleaned_data,
request=request,
entity=entity
)
if(response.status_code != status.HTTP_200_OK):
return HttpResponseRedirect('/' + entity + '/edit/' +
str(id))
# Previous page ( not an edit page ... )
if len(request.session["referers"]) > 1:
for referer in request.session["referers"]:
if 'edit' not in referer.split('/'):
return HttpResponseRedirect(referer)
return HttpResponseRedirect('/' + entity)
except RequestException as e:
handle_message_from_exception(request, e)
return HttpResponseRedirect('/' + entity + '/edit/' + str(id))
return HttpResponseRedirect('/' + entity + '/edit/' + str(id))
def patch_api(endpoint, data, request, entity):
"""
TODO: À renseigner
"""
response = requests.patch(
endpoint,
data=data,
headers=get_token_header(request=request),
)
check_status_code(response.status_code)
entity_json = response.json()
if entity == "fond":
write_fond_related(
entity_json,
request,
headers=get_token_header(request=request),
)
if entity == "mission":
write_mission_related(
entity_json,
request,
headers=get_token_header(request=request),
)
if entity == "collection":
write_collection_related(
entity_json,
request,
headers=get_token_header(request=request),
)
if entity == "item":
write_item_related(
entity_json,
request,
headers=get_token_header(request=request),
)
return response
def delete(entity, request, *args, **kwargs):
"""
TODO: À renseigner
"""
id = kwargs.get('id')
entity_api = entity
if entity in PROBLEM_NAMES:
entity_api = entity.replace('_', '')
try:
delete_api(
settings.FRONT_HOST_URL + '/api/' + entity_api + '/' + str(id),
request=request,
)
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
except RequestException as e:
handle_message_from_exception(request, e)
return HttpResponseRedirect('/' + entity)
def delete_api(endpoint, request):
"""
TODO: À renseigner
"""
response = requests.delete(
endpoint,
headers=get_token_header(request=request),
)
check_status_code(response.status_code)
return response
| [
"django.http.HttpResponseRedirect",
"requests.post",
"django.utils.translation.gettext",
"requests.get",
"django.contrib.messages.add_message",
"requests.exceptions.RequestException"
]
| [((2605, 2653), 'requests.get', 'requests.get', (['(settings.FRONT_HOST_URL + endpoint)'], {}), '(settings.FRONT_HOST_URL + endpoint)\n', (2617, 2653), False, 'import requests\n'), ((5312, 5359), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + entity_url + '/add')"], {}), "('/' + entity_url + '/add')\n", (5332, 5359), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((5512, 5584), 'requests.post', 'requests.post', (['endpoint'], {'data': 'data', 'files': 'request.FILES', 'headers': 'headers'}), '(endpoint, data=data, files=request.FILES, headers=headers)\n', (5525, 5584), False, 'import requests\n'), ((2040, 2058), 'requests.exceptions.RequestException', 'RequestException', ([], {}), '()\n', (2056, 2058), False, 'from requests.exceptions import RequestException\n'), ((2304, 2360), 'django.contrib.messages.add_message', 'messages.add_message', (['request', 'messages.ERROR', 'exception'], {}), '(request, messages.ERROR, exception)\n', (2324, 2360), False, 'from django.contrib import messages\n'), ((1686, 1706), 'django.utils.translation.gettext', '_', (['"""Accès interdit."""'], {}), "('Accès interdit.')\n", (1687, 1706), True, 'from django.utils.translation import gettext as _\n'), ((1780, 1810), 'django.utils.translation.gettext', '_', (['"""Cette fiche n’existe pas."""'], {}), "('Cette fiche n’existe pas.')\n", (1781, 1810), True, 'from django.utils.translation import gettext as _\n'), ((1892, 1932), 'django.utils.translation.gettext', '_', (['"""Une fiche avec ce code existe déjà."""'], {}), "('Une fiche avec ce code existe déjà.')\n", (1893, 1932), True, 'from django.utils.translation import gettext as _\n'), ((5105, 5139), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + entity)"], {}), "('/' + entity)\n", (5125, 5139), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((7748, 7782), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + entity)"], {}), "('/' + entity)\n", (7768, 7782), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((9539, 9573), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + entity)"], {}), "('/' + entity)\n", (9559, 9573), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((2477, 2519), 'django.utils.translation.gettext', '_', (['"""Une erreur indéterminée est survenue."""'], {}), "('Une erreur indéterminée est survenue.')\n", (2478, 2519), True, 'from django.utils.translation import gettext as _\n'), ((5252, 5299), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + entity_url + '/add')"], {}), "('/' + entity_url + '/add')\n", (5272, 5299), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((5051, 5085), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["('/' + entity)"], {}), "('/' + entity)\n", (5071, 5085), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((7699, 7728), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['referer'], {}), '(referer)\n', (7719, 7728), False, 'from django.http import HttpResponseRedirect, Http404\n'), ((4960, 4989), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['referer'], {}), '(referer)\n', (4980, 4989), False, 'from django.http import HttpResponseRedirect, Http404\n')] |
"""
:mod:`meshes` -- Discretization
===============================
Everything related to meshes appropriate for the multigrid solver.
"""
# Copyright 2018-2020 The emg3d Developers.
#
# This file is part of emg3d.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import numpy as np
from copy import deepcopy
from scipy import optimize
__all__ = ['TensorMesh', 'get_hx_h0', 'get_cell_numbers', 'get_stretched_h',
'get_domain', 'get_hx']
class TensorMesh:
"""Rudimentary mesh for multigrid calculation.
The tensor-mesh :class:`discretize.TensorMesh` is a powerful tool,
including sophisticated mesh-generation possibilities in 1D, 2D, and 3D,
plotting routines, and much more. However, in the multigrid solver we have
to generate a mesh at each level, many times over and over again, and we
only need a very limited set of attributes. This tensor-mesh class provides
all required attributes. All attributes here are the same as their
counterparts in :class:`discretize.TensorMesh` (both in name and value).
.. warning::
This is a slimmed-down version of :class:`discretize.TensorMesh`, meant
principally for internal use by the multigrid modeller. It is highly
recommended to use :class:`discretize.TensorMesh` to create the input
meshes instead of this class. There are no input-checks carried out
here, and there is only one accepted input format for `h` and `x0`.
Parameters
----------
h : list of three ndarrays
Cell widths in [x, y, z] directions.
x0 : ndarray of dimension (3, )
Origin (x, y, z).
"""
def __init__(self, h, x0):
"""Initialize the mesh."""
self.x0 = x0
# Width of cells.
self.hx = h[0]
self.hy = h[1]
self.hz = h[2]
# Cell related properties.
self.nCx = int(self.hx.size)
self.nCy = int(self.hy.size)
self.nCz = int(self.hz.size)
self.vnC = np.array([self.hx.size, self.hy.size, self.hz.size])
self.nC = int(self.vnC.prod())
self.vectorCCx = np.r_[0, self.hx[:-1].cumsum()]+self.hx*0.5+self.x0[0]
self.vectorCCy = np.r_[0, self.hy[:-1].cumsum()]+self.hy*0.5+self.x0[1]
self.vectorCCz = np.r_[0, self.hz[:-1].cumsum()]+self.hz*0.5+self.x0[2]
# Node related properties.
self.nNx = self.nCx + 1
self.nNy = self.nCy + 1
self.nNz = self.nCz + 1
self.vnN = np.array([self.nNx, self.nNy, self.nNz], dtype=int)
self.nN = int(self.vnN.prod())
self.vectorNx = np.r_[0., self.hx.cumsum()] + self.x0[0]
self.vectorNy = np.r_[0., self.hy.cumsum()] + self.x0[1]
self.vectorNz = np.r_[0., self.hz.cumsum()] + self.x0[2]
# Edge related properties.
self.vnEx = np.array([self.nCx, self.nNy, self.nNz], dtype=int)
self.vnEy = np.array([self.nNx, self.nCy, self.nNz], dtype=int)
self.vnEz = np.array([self.nNx, self.nNy, self.nCz], dtype=int)
self.nEx = int(self.vnEx.prod())
self.nEy = int(self.vnEy.prod())
self.nEz = int(self.vnEz.prod())
self.vnE = np.array([self.nEx, self.nEy, self.nEz], dtype=int)
self.nE = int(self.vnE.sum())
def __repr__(self):
"""Simple representation."""
return (f"TensorMesh: {self.nCx} x {self.nCy} x {self.nCz} "
f"({self.nC:,})")
def copy(self):
"""Return a copy of the TensorMesh."""
return TensorMesh.from_dict(self.to_dict(True))
def to_dict(self, copy=False):
"""Store the necessary information of the TensorMesh in a dict."""
out = {'hx': self.hx, 'hy': self.hy, 'hz': self.hz, 'x0': self.x0,
'__class__': self.__class__.__name__}
if copy:
return deepcopy(out)
else:
return out
@classmethod
def from_dict(cls, inp):
"""Convert dictionary into :class:`TensorMesh` instance.
Parameters
----------
inp : dict
Dictionary as obtained from :func:`TensorMesh.to_dict`.
The dictionary needs the keys `hx`, `hy`, `hz`, and `x0`.
Returns
-------
obj : :class:`TensorMesh` instance
"""
try:
return cls(h=[inp['hx'], inp['hy'], inp['hz']], x0=inp['x0'])
except KeyError as e:
print(f"* ERROR :: Variable {e} missing in `inp`.")
raise
@property
def vol(self):
"""Construct cell volumes of the 3D model as 1D array."""
if getattr(self, '_vol', None) is None:
self._vol = (self.hx[None, None, :]*self.hy[None, :, None] *
self.hz[:, None, None]).ravel()
return self._vol
def get_hx_h0(freq, res, domain, fixed=0., possible_nx=None, min_width=None,
pps=3, alpha=None, max_domain=100000., raise_error=True, verb=1,
return_info=False):
r"""Return cell widths and origin for given parameters.
Returns cell widths for the provided frequency, resistivity, domain extent,
and other parameters using a flexible amount of cells. See input parameters
for more details. A maximum of three hard/fixed boundaries can be provided
(one of which is the grid center).
The minimum cell width is calculated through :math:`\delta/\rm{pps}`, where
the skin depth is given by :math:`\delta = 503.3 \sqrt{\rho/f}`, and the
parameter `pps` stands for 'points-per-skindepth'. The minimum cell width
can be restricted with the parameter `min_width`.
The actual calculation domain adds a buffer zone around the (survey)
domain. The thickness of the buffer is six times the skin depth. The field
is basically zero after two wavelengths. A wavelength is
:math:`2\pi\delta`, hence roughly 6 times the skin depth. Taking a factor 6
gives therefore almost two wavelengths, as the field travels to the
boundary and back. The actual buffer thickness can be steered with the
`res` parameter.
One has to take into account that the air is very resistive, which has to
be considered not just in the vertical direction, but also in the
horizontal directions, as the airwave will bounce back from the sides
otherwise. In the marine case this issue reduces with increasing water
depth.
See Also
--------
get_stretched_h : Get `hx` for a fixed number `nx` and within a fixed
domain.
Parameters
----------
freq : float
Frequency (Hz) to calculate the skin depth. The skin depth is a concept
defined in the frequency domain. If a negative frequency is provided,
it is assumed that the calculation is carried out in the Laplace
domain. To calculate the skin depth, the value of `freq` is then
multiplied by :math:`-2\pi`, to simulate the closest
frequency-equivalent.
res : float or list
Resistivity (Ohm m) to calculate the skin depth. The skin depth is
used to calculate the minimum cell width and the boundary thicknesses.
Up to three resistivities can be provided:
- float: Same resistivity for everything;
- [min_width, boundaries];
- [min_width, left boundary, right boundary].
domain : list
Contains the survey-domain limits [min, max]. The actual calculation
domain consists of this domain plus a buffer zone around it, which
depends on frequency and resistivity.
fixed : list, optional
Fixed boundaries, one, two, or maximum three values. The grid is
centered around the first value. Hence it is the center location with
the smallest cell. Two more fixed boundaries can be added, at most one
on each side of the first one.
Default is 0.
possible_nx : list, optional
List of possible numbers of cells. See :func:`get_cell_numbers`.
Default is ``get_cell_numbers(500, 5, 3)``, which corresponds to
[16, 24, 32, 40, 48, 64, 80, 96, 128, 160, 192, 256, 320, 384].
min_width : float, list or None, optional
Minimum cell width restriction:
- None : No restriction;
- float : Fixed to this value, ignoring skin depth and `pps`.
- list [min, max] : Lower and upper bounds.
Default is None.
pps : int, optional
Points per skindepth; minimum cell width is calculated via
`dmin = skindepth/pps`.
Default = 3.
alpha : list, optional
Maximum alpha and step size to find a good alpha. The first value is
the maximum alpha of the survey domain, the second value is the maximum
alpha for the buffer zone, and the third value is the step size.
Default = [1, 1.5, .01], hence no stretching within the survey domain
and a maximum stretching of 1.5 in the buffer zone; step size is 0.01.
max_domain : float, optional
Maximum calculation domain from fixed[0] (usually source position).
Default is 100,000.
raise_error : bool, optional
If True, an error is raised if no suitable grid is found. Otherwise it
just prints a message and returns None's.
Default is True.
verb : int, optional
Verbosity, 0 or 1.
Default = 1.
return_info : bool
If True, a dictionary is returned with some grid info (min and max
cell width and alpha).
Returns
-------
hx : ndarray
Cell widths of mesh.
x0 : float
Origin of the mesh.
info : dict
Dictionary with mesh info; only if ``return_info=True``.
Keys:
- `dmin`: Minimum cell width;
- `dmax`: Maximum cell width;
- `amin`: Minimum alpha;
- `amax`: Maximum alpha.
"""
# Get variables with default lists:
if alpha is None:
alpha = [1, 1.5, 0.01]
if possible_nx is None:
possible_nx = get_cell_numbers(500, 5, 3)
# Cast resistivity value(s).
res = np.array(res, ndmin=1)
if res.size == 1:
res_arr = np.array([res[0], res[0], res[0]])
elif res.size == 2:
res_arr = np.array([res[0], res[1], res[1]])
else:
res_arr = np.array([res[0], res[1], res[2]])
# Cast and check fixed.
fixed = np.array(fixed, ndmin=1)
if fixed.size > 2:
# Check length.
if fixed.size > 3:
print("\n* ERROR :: Maximum three fixed boundaries permitted.\n"
f" Provided: {fixed.size}.")
raise ValueError("Wrong input for fixed")
# Sort second and third, so it doesn't matter how it was provided.
fixed = np.array([fixed[0], max(fixed[1:]), min(fixed[1:])])
# Check side.
if np.sign(np.diff(fixed[:2])) == np.sign(np.diff(fixed[::2])):
print("\n* ERROR :: 2nd and 3rd fixed boundaries have to be "
"left and right of the first one.\n "
f"Provided: [{fixed[0]}, {fixed[1]}, {fixed[2]}]")
raise ValueError("Wrong input for fixed")
# Calculate skin depth.
skind = 503.3*np.sqrt(res_arr/abs(freq))
if freq < 0: # For Laplace-domain calculations.
skind /= np.sqrt(2*np.pi)
# Minimum cell width.
dmin = skind[0]/pps
if min_width is not None: # Respect user input.
min_width = np.array(min_width, ndmin=1)
if min_width.size == 1:
dmin = min_width
else:
dmin = np.clip(dmin, *min_width)
# Survey domain; contains all sources and receivers.
domain = np.array(domain, dtype=float)
# Calculation domain; big enough to avoid boundary effects.
# To avoid boundary effects we want the signal to travel two wavelengths
# from the source to the boundary and back to the receiver.
# => 2*pi*sd ~ 6.3*sd = one wavelength => signal is ~ 0.2 %.
# Two wavelengths we can safely assume it is zero.
#
# The air does not follow the concept of skin depth, as it is a wave rather
# than diffusion. For this is the factor `max_domain`, which restricts
# the domain in each direction to this value from the center.
# (a) Source to edges of domain.
dist_in_domain = abs(domain - fixed[0])
# (b) Two wavelengths.
two_lambda = skind[1:]*4*np.pi
# (c) Required buffer, additional to domain.
dist_buff = np.max([np.zeros(2), (two_lambda - dist_in_domain)/2], axis=0)
# (d) Add buffer to domain.
calc_domain = np.array([domain[0]-dist_buff[0], domain[1]+dist_buff[1]])
# (e) Restrict total domain to max_domain.
calc_domain[0] = max(calc_domain[0], fixed[0]-max_domain)
calc_domain[1] = min(calc_domain[1], fixed[0]+max_domain)
# Initiate flag if terminated.
finished = False
# Initiate alpha variables for survey and calculation domains.
sa, ca = 1.0, 1.0
# Loop over possible cell numbers from small to big.
for nx in np.unique(possible_nx):
# Loop over possible alphas for domain.
for sa in np.arange(1.0, alpha[0]+alpha[2]/2, alpha[2]):
# Get current stretched grid cell sizes.
thxl = dmin*sa**np.arange(nx) # Left of origin.
thxr = dmin*sa**np.arange(nx) # Right of origin.
# 0. Adjust stretching for fixed boundaries.
if fixed.size > 1: # Move mesh to first fixed boundary.
t_nx = np.r_[fixed[0], fixed[0]+np.cumsum(thxr)]
ii = np.argmin(abs(t_nx-fixed[1]))
thxr *= abs(fixed[1]-fixed[0])/np.sum(thxr[:ii])
if fixed.size > 2: # Move mesh to second fixed boundary.
t_nx = np.r_[fixed[0], fixed[0]-np.cumsum(thxl)]
ii = np.argmin(abs(t_nx-fixed[2]))
thxl *= abs(fixed[2]-fixed[0])/np.sum(thxl[:ii])
# 1. Fill from center to left domain.
nl = np.sum((fixed[0]-np.cumsum(thxl)) > domain[0])+1
# 2. Fill from center to right domain.
nr = np.sum((fixed[0]+np.cumsum(thxr)) < domain[1])+1
# 3. Get remaining number of cells and check termination criteria.
nsdc = nl+nr # Number of domain cells.
nx_remain = nx-nsdc
# Not good, try next.
if nx_remain <= 0:
continue
# Create the current hx-array.
hx = np.r_[thxl[:nl][::-1], thxr[:nr]]
hxo = np.r_[thxl[:nl][::-1], thxr[:nr]]
# Get actual domain:
asurv_domain = [fixed[0]-np.sum(thxl[:nl]),
fixed[0]+np.sum(thxr[:nr])]
x0 = float(fixed[0]-np.sum(thxl[:nl]))
# Get actual stretching (differs in case of fixed layers).
sa_adj = np.max([hx[1:]/hx[:-1], hx[:-1]/hx[1:]])
# Loop over possible alphas for calc_domain.
for ca in np.arange(sa, alpha[1]+alpha[2]/2, alpha[2]):
# 4. Fill to left calc_domain.
thxl = hx[0]*ca**np.arange(1, nx_remain+1)
nl = np.sum((asurv_domain[0]-np.cumsum(thxl)) >
calc_domain[0])+1
# 5. Fill to right calc_domain.
thxr = hx[-1]*ca**np.arange(1, nx_remain+1)
nr = np.sum((asurv_domain[1]+np.cumsum(thxr)) <
calc_domain[1])+1
# 6. Get remaining number of cells and check termination
# criteria.
ncdc = nl+nr # Number of calc_domain cells.
nx_remain2 = nx-nsdc-ncdc
if nx_remain2 < 0: # Not good, try next.
continue
# Create hx-array.
nl += int(np.floor(nx_remain2/2)) # If uneven, add one cell
nr += int(np.ceil(nx_remain2/2)) # more on the right.
hx = np.r_[thxl[:nl][::-1], hx, thxr[:nr]]
# Calculate origin.
x0 = float(asurv_domain[0]-np.sum(thxl[:nl]))
# Mark it as finished and break out of the loop.
finished = True
break
if finished:
break
if finished:
break
# Check finished and print info about found grid.
if not finished:
# Throw message if no solution was found.
print("\n* ERROR :: No suitable grid found; relax your criteria.\n")
if raise_error:
raise ArithmeticError("No grid found!")
else:
hx, x0 = None, None
elif verb > 0:
print(f" Skin depth ", end="")
if res.size == 1:
print(f" [m] : {skind[0]:.0f}")
elif res.size == 2:
print(f"(m/l-r) [m] : {skind[0]:.0f} / {skind[1]:.0f}")
else:
print(f"(m/l/r) [m] : {skind[0]:.0f} / {skind[1]:.0f} / "
f"{skind[2]:.0f}")
print(f" Survey domain [m] : {domain[0]:.0f} - "
f"{domain[1]:.0f}")
print(f" Calculation domain [m] : {calc_domain[0]:.0f} - "
f"{calc_domain[1]:.0f}")
print(f" Final extent [m] : {x0:.0f} - "
f"{x0+np.sum(hx):.0f}")
extstr = f" Min/max cell width [m] : {min(hx):.0f} / "
alstr = f" Alpha survey"
nrstr = " Number of cells "
if not np.isclose(sa, sa_adj):
sastr = f"{sa:.3f} ({sa_adj:.3f})"
else:
sastr = f"{sa:.3f}"
print(extstr+f"{max(hxo):.0f} / {max(hx):.0f}")
print(alstr+f"/calc : {sastr} / {ca:.3f}")
print(nrstr+f"(s/c/r) : {nx} ({nsdc}/{ncdc}/{nx_remain2})")
print()
if return_info:
if not fixed.size > 1:
sa_adj = sa
info = {'dmin': dmin,
'dmax': np.nanmax(hx),
'amin': np.nanmin([ca, sa, sa_adj]),
'amax': np.nanmax([ca, sa, sa_adj])}
return hx, x0, info
else:
return hx, x0
def get_cell_numbers(max_nr, max_prime=5, min_div=3):
r"""Returns 'good' cell numbers for the multigrid method.
'Good' cell numbers are numbers which can be divided by 2 as many times as
possible. At the end there will be a low prime number.
The function adds all numbers :math:`p 2^n \leq M` for :math:`p={2, 3, ...,
p_\text{max}}` and :math:`n={n_\text{min}, n_\text{min}+1, ..., \infty}`;
:math:`M, p_\text{max}, n_\text{min}` correspond to `max_nr`, `max_prime`,
and `min_div`, respectively.
Parameters
----------
max_nr : int
Maximum number of cells.
max_prime : int
Highest permitted prime number p for p*2^n. {2, 3, 5, 7} are good upper
limits in order to avoid too big lowest grids in the multigrid method.
Default is 5.
min_div : int
Minimum times the number can be divided by two.
Default is 3.
Returns
-------
numbers : array
Array containing all possible cell numbers from lowest to highest.
"""
# Primes till 20.
primes = np.array([2, 3, 5, 7, 11, 13, 17, 19])
# Sanity check; 19 is already ridiculously high.
if max_prime > primes[-1]:
print(f"* ERROR :: Highest prime is {max_prime}, "
"please use a value < 20.")
raise ValueError("Highest prime too high")
# Restrict to max_prime.
primes = primes[primes <= max_prime]
# Get possible values.
# Currently restricted to prime*2**30 (for prime=2 => 1,073,741,824 cells).
numbers = primes[:, None]*2**np.arange(min_div, 30)
# Get unique values.
numbers = np.unique(numbers)
# Restrict to max_nr and return.
return numbers[numbers <= max_nr]
def get_stretched_h(min_width, domain, nx, x0=0, x1=None, resp_domain=False):
"""Return cell widths for a stretched grid within the domain.
Returns `nx` cell widths within `domain`, where the minimum cell width is
`min_width`. The cells are not stretched within `x0` and `x1`, and outside
uses a power-law stretching. The actual stretching factor and the number of
cells left and right of `x0` and `x1` are find in a minimization process.
The domain is not completely respected. The starting point of the domain
is, but the endpoint of the domain might slightly shift (this is more
likely the case for small `nx`, for big `nx` the shift should be small).
The new endpoint can be obtained with ``domain[0]+np.sum(hx)``. If you want
the domain to be respected absolutely, set ``resp_domain=True``. However,
be aware that this will introduce one stretch-factor which is different
from the other stretch factors, to accommodate the restriction. This
one-off factor is between the left- and right-side of `x0`, or, if `x1` is
provided, just after `x1`.
See Also
--------
get_hx_x0 : Get `hx` and `x0` for a flexible number of `nx` with
given bounds.
Parameters
----------
min_width : float
Minimum cell width. If x1 is provided, the actual minimum cell width
might be smaller than min_width.
domain : list
[start, end] of model domain.
nx : int
Number of cells.
x0 : float
Center of the grid. `x0` is restricted to `domain`.
Default is 0.
x1 : float
If provided, then no stretching is applied between `x0` and `x1`. The
non-stretched part starts at `x0` and stops at the first possible
location at or after `x1`. `x1` is restricted to `domain`. This will
min_width so that an integer number of cells fit within x0 and x1.
resp_domain : bool
If False (default), then the domain-end might shift slightly to assure
that the same stretching factor is applied throughout. If set to True,
however, the domain is respected absolutely. This will introduce one
stretch-factor which is different from the other stretch factors, to
accommodate the restriction. This one-off factor is between the left-
and right-side of `x0`, or, if `x1` is provided, just after `x1`.
Returns
-------
hx : ndarray
Cell widths of mesh.
"""
# Cast to arrays
domain = np.array(domain, dtype=float)
x0 = np.array(x0, dtype=float)
x0 = np.clip(x0, *domain) # Restrict to model domain
min_width = np.array(min_width, dtype=float)
if x1 is not None:
x1 = np.array(x1, dtype=float)
x1 = np.clip(x1, *domain) # Restrict to model domain
# If x1 is provided (a part is not stretched)
if x1 is not None:
# Store original values
xlim_orig = domain.copy()
nx_orig = int(nx)
x0_orig = x0.copy()
h_min_orig = min_width.copy()
# Get number of non-stretched cells
n_nos = int(np.ceil((x1-x0)/min_width))
# Re-calculate min_width to fit with x0-x1-limits:
min_width = (x1-x0)/n_nos
# Subtract one cell, because the standard scheme provides one
# min_width-cell.
n_nos -= 1
# Reset x0, because the first min_width comes from normal scheme
x0 += min_width
# Reset xmax for normal scheme
domain[1] -= n_nos*min_width
# Reset nx for normal scheme
nx -= n_nos
# If there are not enough points reset to standard procedure. The limit
# of five is arbitrary. However, nx should be much bigger than five
# anyways, otherwise stretched grid doesn't make sense.
if nx <= 5:
print("Warning :: Not enough points for non-stretched part,"
"ignoring therefore `x1`.")
domain = xlim_orig
nx = nx_orig
x0 = x0_orig
x1 = None
min_width = h_min_orig
# Get stretching factor (a = 1+alpha).
if min_width == 0 or min_width > np.diff(domain)/nx:
# If min_width is bigger than the domain-extent divided by nx, no
# stretching is required at all.
alpha = 0
else:
# Wrap _get_dx into a minimization function to call with fsolve.
def find_alpha(alpha, min_width, args):
"""Find alpha such that min(hx) = min_width."""
return min(get_hx(alpha, *args))/min_width-1
# Search for best alpha, must be at least 0
args = (domain, nx, x0)
alpha = max(0, optimize.fsolve(find_alpha, 0.02, (min_width, args)))
# With alpha get actual cell spacing with `resp_domain` to respect the
# users decision.
hx = get_hx(alpha, domain, nx, x0, resp_domain)
# Add the non-stretched center if x1 is provided
if x1 is not None:
hx = np.r_[hx[: np.argmin(hx)], np.ones(n_nos)*min_width,
hx[np.argmin(hx):]]
# Print warning min_width could not be respected.
if abs(hx.min() - min_width) > 0.1:
print(f"Warning :: Minimum cell width ({np.round(hx.min(), 2)} m) is "
"below `min_width`, because `nx` is too big for `domain`.")
return hx
def get_domain(x0=0, freq=1, res=0.3, limits=None, min_width=None,
fact_min=0.2, fact_neg=5, fact_pos=None):
r"""Get domain extent and minimum cell width as a function of skin depth.
Returns the extent of the calculation domain and the minimum cell width as
a multiple of the skin depth, with possible user restrictions on minimum
calculation domain and range of possible minimum cell widths.
.. math::
\delta &= 503.3 \sqrt{\frac{\rho}{f}} , \\
x_\text{start} &= x_0-k_\text{neg}\delta , \\
x_\text{end} &= x_0+k_\text{pos}\delta , \\
h_\text{min} &= k_\text{min} \delta .
Parameters
----------
x0 : float
Center of the calculation domain. Normally the source location.
Default is 0.
freq : float
Frequency (Hz) to calculate the skin depth. The skin depth is a concept
defined in the frequency domain. If a negative frequency is provided,
it is assumed that the calculation is carried out in the Laplace
domain. To calculate the skin depth, the value of `freq` is then
multiplied by :math:`-2\pi`, to simulate the closest
frequency-equivalent.
Default is 1 Hz.
res : float, optional
Resistivity (Ohm m) to calculate skin depth.
Default is 0.3 Ohm m (sea water).
limits : None or list
[start, end] of model domain. This extent represents the minimum extent
of the domain. The domain is therefore only adjusted if it has to reach
outside of [start, end].
Default is None.
min_width : None, float, or list of two floats
Minimum cell width is calculated as a function of skin depth:
fact_min*sd. If `min_width` is a float, this is used. If a list of
two values [min, max] are provided, they are used to restrain
min_width. Default is None.
fact_min, fact_neg, fact_pos : floats
The skin depth is multiplied with these factors to estimate:
- Minimum cell width (`fact_min`, default 0.2)
- Domain-start (`fact_neg`, default 5), and
- Domain-end (`fact_pos`, defaults to `fact_neg`).
Returns
-------
h_min : float
Minimum cell width.
domain : list
Start- and end-points of calculation domain.
"""
# Set fact_pos to fact_neg if not provided.
if fact_pos is None:
fact_pos = fact_neg
# Calculate the skin depth.
skind = 503.3*np.sqrt(res/abs(freq))
if freq < 0: # For Laplace-domain calculations.
skind /= np.sqrt(2*np.pi)
# Estimate minimum cell width.
h_min = fact_min*skind
if min_width is not None: # Respect user input.
if np.array(min_width).size == 1:
h_min = min_width
else:
h_min = np.clip(h_min, *min_width)
# Estimate calculation domain.
domain = [x0-fact_neg*skind, x0+fact_pos*skind]
if limits is not None: # Respect user input.
domain = [min(limits[0], domain[0]), max(limits[1], domain[1])]
return h_min, domain
def get_hx(alpha, domain, nx, x0, resp_domain=True):
r"""Return cell widths for given input.
Find the number of cells left and right of `x0`, `nl` and `nr`
respectively, for the provided alpha. For this, we solve
.. math:: \frac{x_\text{max}-x_0}{x_0-x_\text{min}} =
\frac{a^{nr}-1}{a^{nl}-1}
where :math:`a = 1+\alpha`.
Parameters
----------
alpha : float
Stretching factor `a` is given by ``a=1+alpha``.
domain : list
[start, end] of model domain.
nx : int
Number of cells.
x0 : float
Center of the grid. `x0` is restricted to `domain`.
resp_domain : bool
If False (default), then the domain-end might shift slightly to assure
that the same stretching factor is applied throughout. If set to True,
however, the domain is respected absolutely. This will introduce one
stretch-factor which is different from the other stretch factors, to
accommodate the restriction. This one-off factor is between the left-
and right-side of `x0`, or, if `x1` is provided, just after `x1`.
Returns
-------
hx : ndarray
Cell widths of mesh.
"""
if alpha <= 0.: # If alpha <= 0: equal spacing (no stretching at all)
hx = np.ones(nx)*np.diff(np.squeeze(domain))/nx
else: # Get stretched hx
a = alpha+1
# Get hx depending if x0 is on the domain boundary or not.
if np.isclose(x0, domain[0]) or np.isclose(x0, domain[1]):
# Get al a's
alr = np.diff(domain)*alpha/(a**nx-1)*a**np.arange(nx)
if x0 == domain[1]:
alr = alr[::-1]
# Calculate differences
hx = alr*np.diff(domain)/sum(alr)
else:
# Find number of elements left and right by solving:
# (xmax-x0)/(x0-xmin) = a**nr-1/(a**nl-1)
nr = np.arange(2, nx+1)
er = (domain[1]-x0)/(x0-domain[0]) - (a**nr[::-1]-1)/(a**nr-1)
nl = np.argmin(abs(np.floor(er)))+1
nr = nx-nl
# Get all a's
al = a**np.arange(nl-1, -1, -1)
ar = a**np.arange(1, nr+1)
# Calculate differences
if resp_domain:
# This version honours domain[0] and domain[1], but to achieve
# this it introduces one stretch-factor which is different from
# all the others between al to ar.
hx = np.r_[al*(x0-domain[0])/sum(al),
ar*(domain[1]-x0)/sum(ar)]
else:
# This version moves domain[1], but each stretch-factor is
# exactly the same.
fact = (x0-domain[0])/sum(al) # Take distance from al.
hx = np.r_[al, ar]*fact
# Note: this hx is equivalent as providing the following h
# to TensorMesh:
# h = [(min_width, nl-1, -a), (min_width, n_nos+1),
# (min_width, nr, a)]
return hx
| [
"numpy.clip",
"numpy.sqrt",
"numpy.array",
"copy.deepcopy",
"numpy.nanmin",
"numpy.arange",
"numpy.diff",
"numpy.max",
"numpy.nanmax",
"numpy.argmin",
"numpy.ceil",
"numpy.ones",
"numpy.floor",
"numpy.squeeze",
"scipy.optimize.fsolve",
"numpy.isclose",
"numpy.unique",
"numpy.sum",
"numpy.zeros",
"numpy.cumsum"
]
| [((10528, 10550), 'numpy.array', 'np.array', (['res'], {'ndmin': '(1)'}), '(res, ndmin=1)\n', (10536, 10550), True, 'import numpy as np\n'), ((10807, 10831), 'numpy.array', 'np.array', (['fixed'], {'ndmin': '(1)'}), '(fixed, ndmin=1)\n', (10815, 10831), True, 'import numpy as np\n'), ((12111, 12140), 'numpy.array', 'np.array', (['domain'], {'dtype': 'float'}), '(domain, dtype=float)\n', (12119, 12140), True, 'import numpy as np\n'), ((13019, 13081), 'numpy.array', 'np.array', (['[domain[0] - dist_buff[0], domain[1] + dist_buff[1]]'], {}), '([domain[0] - dist_buff[0], domain[1] + dist_buff[1]])\n', (13027, 13081), True, 'import numpy as np\n'), ((13469, 13491), 'numpy.unique', 'np.unique', (['possible_nx'], {}), '(possible_nx)\n', (13478, 13491), True, 'import numpy as np\n'), ((19562, 19600), 'numpy.array', 'np.array', (['[2, 3, 5, 7, 11, 13, 17, 19]'], {}), '([2, 3, 5, 7, 11, 13, 17, 19])\n', (19570, 19600), True, 'import numpy as np\n'), ((20115, 20133), 'numpy.unique', 'np.unique', (['numbers'], {}), '(numbers)\n', (20124, 20133), True, 'import numpy as np\n'), ((22736, 22765), 'numpy.array', 'np.array', (['domain'], {'dtype': 'float'}), '(domain, dtype=float)\n', (22744, 22765), True, 'import numpy as np\n'), ((22775, 22800), 'numpy.array', 'np.array', (['x0'], {'dtype': 'float'}), '(x0, dtype=float)\n', (22783, 22800), True, 'import numpy as np\n'), ((22810, 22830), 'numpy.clip', 'np.clip', (['x0', '*domain'], {}), '(x0, *domain)\n', (22817, 22830), True, 'import numpy as np\n'), ((22875, 22907), 'numpy.array', 'np.array', (['min_width'], {'dtype': 'float'}), '(min_width, dtype=float)\n', (22883, 22907), True, 'import numpy as np\n'), ((2493, 2545), 'numpy.array', 'np.array', (['[self.hx.size, self.hy.size, self.hz.size]'], {}), '([self.hx.size, self.hy.size, self.hz.size])\n', (2501, 2545), True, 'import numpy as np\n'), ((2976, 3027), 'numpy.array', 'np.array', (['[self.nNx, self.nNy, self.nNz]'], {'dtype': 'int'}), '([self.nNx, self.nNy, self.nNz], dtype=int)\n', (2984, 3027), True, 'import numpy as np\n'), ((3318, 3369), 'numpy.array', 'np.array', (['[self.nCx, self.nNy, self.nNz]'], {'dtype': 'int'}), '([self.nCx, self.nNy, self.nNz], dtype=int)\n', (3326, 3369), True, 'import numpy as np\n'), ((3390, 3441), 'numpy.array', 'np.array', (['[self.nNx, self.nCy, self.nNz]'], {'dtype': 'int'}), '([self.nNx, self.nCy, self.nNz], dtype=int)\n', (3398, 3441), True, 'import numpy as np\n'), ((3462, 3513), 'numpy.array', 'np.array', (['[self.nNx, self.nNy, self.nCz]'], {'dtype': 'int'}), '([self.nNx, self.nNy, self.nCz], dtype=int)\n', (3470, 3513), True, 'import numpy as np\n'), ((3656, 3707), 'numpy.array', 'np.array', (['[self.nEx, self.nEy, self.nEz]'], {'dtype': 'int'}), '([self.nEx, self.nEy, self.nEz], dtype=int)\n', (3664, 3707), True, 'import numpy as np\n'), ((10591, 10625), 'numpy.array', 'np.array', (['[res[0], res[0], res[0]]'], {}), '([res[0], res[0], res[0]])\n', (10599, 10625), True, 'import numpy as np\n'), ((11750, 11768), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (11757, 11768), True, 'import numpy as np\n'), ((11891, 11919), 'numpy.array', 'np.array', (['min_width'], {'ndmin': '(1)'}), '(min_width, ndmin=1)\n', (11899, 11919), True, 'import numpy as np\n'), ((13560, 13609), 'numpy.arange', 'np.arange', (['(1.0)', '(alpha[0] + alpha[2] / 2)', 'alpha[2]'], {}), '(1.0, alpha[0] + alpha[2] / 2, alpha[2])\n', (13569, 13609), True, 'import numpy as np\n'), ((22944, 22969), 'numpy.array', 'np.array', (['x1'], {'dtype': 'float'}), '(x1, dtype=float)\n', (22952, 22969), True, 'import numpy as np\n'), ((22983, 23003), 'numpy.clip', 'np.clip', (['x1', '*domain'], {}), '(x1, *domain)\n', (22990, 23003), True, 'import numpy as np\n'), ((28143, 28161), 'numpy.sqrt', 'np.sqrt', (['(2 * np.pi)'], {}), '(2 * np.pi)\n', (28150, 28161), True, 'import numpy as np\n'), ((4310, 4323), 'copy.deepcopy', 'deepcopy', (['out'], {}), '(out)\n', (4318, 4323), False, 'from copy import deepcopy\n'), ((10668, 10702), 'numpy.array', 'np.array', (['[res[0], res[1], res[1]]'], {}), '([res[0], res[1], res[1]])\n', (10676, 10702), True, 'import numpy as np\n'), ((10731, 10765), 'numpy.array', 'np.array', (['[res[0], res[1], res[2]]'], {}), '([res[0], res[1], res[2]])\n', (10739, 10765), True, 'import numpy as np\n'), ((12014, 12039), 'numpy.clip', 'np.clip', (['dmin', '*min_width'], {}), '(dmin, *min_width)\n', (12021, 12039), True, 'import numpy as np\n'), ((12913, 12924), 'numpy.zeros', 'np.zeros', (['(2)'], {}), '(2)\n', (12921, 12924), True, 'import numpy as np\n'), ((15271, 15315), 'numpy.max', 'np.max', (['[hx[1:] / hx[:-1], hx[:-1] / hx[1:]]'], {}), '([hx[1:] / hx[:-1], hx[:-1] / hx[1:]])\n', (15277, 15315), True, 'import numpy as np\n'), ((15392, 15440), 'numpy.arange', 'np.arange', (['sa', '(alpha[1] + alpha[2] / 2)', 'alpha[2]'], {}), '(sa, alpha[1] + alpha[2] / 2, alpha[2])\n', (15401, 15440), True, 'import numpy as np\n'), ((18306, 18319), 'numpy.nanmax', 'np.nanmax', (['hx'], {}), '(hx)\n', (18315, 18319), True, 'import numpy as np\n'), ((18345, 18372), 'numpy.nanmin', 'np.nanmin', (['[ca, sa, sa_adj]'], {}), '([ca, sa, sa_adj])\n', (18354, 18372), True, 'import numpy as np\n'), ((18398, 18425), 'numpy.nanmax', 'np.nanmax', (['[ca, sa, sa_adj]'], {}), '([ca, sa, sa_adj])\n', (18407, 18425), True, 'import numpy as np\n'), ((20052, 20074), 'numpy.arange', 'np.arange', (['min_div', '(30)'], {}), '(min_div, 30)\n', (20061, 20074), True, 'import numpy as np\n'), ((23330, 23360), 'numpy.ceil', 'np.ceil', (['((x1 - x0) / min_width)'], {}), '((x1 - x0) / min_width)\n', (23337, 23360), True, 'import numpy as np\n'), ((24890, 24942), 'scipy.optimize.fsolve', 'optimize.fsolve', (['find_alpha', '(0.02)', '(min_width, args)'], {}), '(find_alpha, 0.02, (min_width, args))\n', (24905, 24942), False, 'from scipy import optimize\n'), ((28382, 28408), 'numpy.clip', 'np.clip', (['h_min', '*min_width'], {}), '(h_min, *min_width)\n', (28389, 28408), True, 'import numpy as np\n'), ((30129, 30154), 'numpy.isclose', 'np.isclose', (['x0', 'domain[0]'], {}), '(x0, domain[0])\n', (30139, 30154), True, 'import numpy as np\n'), ((30158, 30183), 'numpy.isclose', 'np.isclose', (['x0', 'domain[1]'], {}), '(x0, domain[1])\n', (30168, 30183), True, 'import numpy as np\n'), ((30579, 30599), 'numpy.arange', 'np.arange', (['(2)', '(nx + 1)'], {}), '(2, nx + 1)\n', (30588, 30599), True, 'import numpy as np\n'), ((11286, 11304), 'numpy.diff', 'np.diff', (['fixed[:2]'], {}), '(fixed[:2])\n', (11293, 11304), True, 'import numpy as np\n'), ((11317, 11336), 'numpy.diff', 'np.diff', (['fixed[::2]'], {}), '(fixed[::2])\n', (11324, 11336), True, 'import numpy as np\n'), ((17861, 17883), 'numpy.isclose', 'np.isclose', (['sa', 'sa_adj'], {}), '(sa, sa_adj)\n', (17871, 17883), True, 'import numpy as np\n'), ((24380, 24395), 'numpy.diff', 'np.diff', (['domain'], {}), '(domain)\n', (24387, 24395), True, 'import numpy as np\n'), ((28287, 28306), 'numpy.array', 'np.array', (['min_width'], {}), '(min_width)\n', (28295, 28306), True, 'import numpy as np\n'), ((29946, 29957), 'numpy.ones', 'np.ones', (['nx'], {}), '(nx)\n', (29953, 29957), True, 'import numpy as np\n'), ((30791, 30816), 'numpy.arange', 'np.arange', (['(nl - 1)', '(-1)', '(-1)'], {}), '(nl - 1, -1, -1)\n', (30800, 30816), True, 'import numpy as np\n'), ((30835, 30855), 'numpy.arange', 'np.arange', (['(1)', '(nr + 1)'], {}), '(1, nr + 1)\n', (30844, 30855), True, 'import numpy as np\n'), ((13689, 13702), 'numpy.arange', 'np.arange', (['nx'], {}), '(nx)\n', (13698, 13702), True, 'import numpy as np\n'), ((13750, 13763), 'numpy.arange', 'np.arange', (['nx'], {}), '(nx)\n', (13759, 13763), True, 'import numpy as np\n'), ((14074, 14091), 'numpy.sum', 'np.sum', (['thxr[:ii]'], {}), '(thxr[:ii])\n', (14080, 14091), True, 'import numpy as np\n'), ((14326, 14343), 'numpy.sum', 'np.sum', (['thxl[:ii]'], {}), '(thxl[:ii])\n', (14332, 14343), True, 'import numpy as np\n'), ((15052, 15069), 'numpy.sum', 'np.sum', (['thxl[:nl]'], {}), '(thxl[:nl])\n', (15058, 15069), True, 'import numpy as np\n'), ((15108, 15125), 'numpy.sum', 'np.sum', (['thxr[:nr]'], {}), '(thxr[:nr])\n', (15114, 15125), True, 'import numpy as np\n'), ((15159, 15176), 'numpy.sum', 'np.sum', (['thxl[:nl]'], {}), '(thxl[:nl])\n', (15165, 15176), True, 'import numpy as np\n'), ((16229, 16253), 'numpy.floor', 'np.floor', (['(nx_remain2 / 2)'], {}), '(nx_remain2 / 2)\n', (16237, 16253), True, 'import numpy as np\n'), ((16306, 16329), 'numpy.ceil', 'np.ceil', (['(nx_remain2 / 2)'], {}), '(nx_remain2 / 2)\n', (16313, 16329), True, 'import numpy as np\n'), ((25211, 25225), 'numpy.ones', 'np.ones', (['n_nos'], {}), '(n_nos)\n', (25218, 25225), True, 'import numpy as np\n'), ((29966, 29984), 'numpy.squeeze', 'np.squeeze', (['domain'], {}), '(domain)\n', (29976, 29984), True, 'import numpy as np\n'), ((30263, 30276), 'numpy.arange', 'np.arange', (['nx'], {}), '(nx)\n', (30272, 30276), True, 'import numpy as np\n'), ((30399, 30414), 'numpy.diff', 'np.diff', (['domain'], {}), '(domain)\n', (30406, 30414), True, 'import numpy as np\n'), ((15519, 15546), 'numpy.arange', 'np.arange', (['(1)', '(nx_remain + 1)'], {}), '(1, nx_remain + 1)\n', (15528, 15546), True, 'import numpy as np\n'), ((15738, 15765), 'numpy.arange', 'np.arange', (['(1)', '(nx_remain + 1)'], {}), '(1, nx_remain + 1)\n', (15747, 15765), True, 'import numpy as np\n'), ((16491, 16508), 'numpy.sum', 'np.sum', (['thxl[:nl]'], {}), '(thxl[:nl])\n', (16497, 16508), True, 'import numpy as np\n'), ((25195, 25208), 'numpy.argmin', 'np.argmin', (['hx'], {}), '(hx)\n', (25204, 25208), True, 'import numpy as np\n'), ((25259, 25272), 'numpy.argmin', 'np.argmin', (['hx'], {}), '(hx)\n', (25268, 25272), True, 'import numpy as np\n'), ((30228, 30243), 'numpy.diff', 'np.diff', (['domain'], {}), '(domain)\n', (30235, 30243), True, 'import numpy as np\n'), ((30704, 30716), 'numpy.floor', 'np.floor', (['er'], {}), '(er)\n', (30712, 30716), True, 'import numpy as np\n'), ((13959, 13974), 'numpy.cumsum', 'np.cumsum', (['thxr'], {}), '(thxr)\n', (13968, 13974), True, 'import numpy as np\n'), ((14211, 14226), 'numpy.cumsum', 'np.cumsum', (['thxl'], {}), '(thxl)\n', (14220, 14226), True, 'import numpy as np\n'), ((14429, 14444), 'numpy.cumsum', 'np.cumsum', (['thxl'], {}), '(thxl)\n', (14438, 14444), True, 'import numpy as np\n'), ((14547, 14562), 'numpy.cumsum', 'np.cumsum', (['thxr'], {}), '(thxr)\n', (14556, 14562), True, 'import numpy as np\n'), ((17689, 17699), 'numpy.sum', 'np.sum', (['hx'], {}), '(hx)\n', (17695, 17699), True, 'import numpy as np\n'), ((15590, 15605), 'numpy.cumsum', 'np.cumsum', (['thxl'], {}), '(thxl)\n', (15599, 15605), True, 'import numpy as np\n'), ((15809, 15824), 'numpy.cumsum', 'np.cumsum', (['thxr'], {}), '(thxr)\n', (15818, 15824), True, 'import numpy as np\n')] |
from common_src.lib.model.post import Post
from common_src.lib.model.source import Source
from common_src.scrapers.abstract_scraper import make_soup, remove_dups, now
SOURCE_CODE = "second_extinction"
WEBSITE = "https://www.secondextinctiongame.com/news"
ALT_IMAGE = 'https://www.secondextinctiongame.com/static/242486b363d867dc483deb6d7038dde1/d8255/se_screenshot_5.jpg'
FILENAME = "../resources/data/second_extinction.txt"
def get_source():
name = "Second Extinction"
description = 'Second Extinction is a first person shooter game where earth has been invaded by mutated dinosaurs.'
profile_image = 'https://www.secondextinctiongame.com/static/logo-0d52f8575a251eff8ebd6e2d6bd6c51b.png'
return Source(SOURCE_CODE, name, description, profile_image, ALT_IMAGE, None)
def scrape():
soup = make_soup(WEBSITE)
base_site = "https://www.secondextinctiongame.com"
data = []
for post in soup.findAll("article", {"class": "cgYILD"}):
date = post.find("time").text.replace("-", "") + "0000"
title = post.find("h3").text.strip()
link = base_site + post.find("a").get("href")
alt_image = ALT_IMAGE
image = base_site + post.find("picture").find("img").get("src").replace(" ", "%20")
data.append(Post(None, date, title, link, image, alt_image, SOURCE_CODE, None))
if len(data) % 25 == 0:
print(now() + f"Processed {len(data)} posts")
return remove_dups(data)
| [
"common_src.scrapers.abstract_scraper.make_soup",
"common_src.lib.model.post.Post",
"common_src.lib.model.source.Source",
"common_src.scrapers.abstract_scraper.remove_dups",
"common_src.scrapers.abstract_scraper.now"
]
| [((716, 786), 'common_src.lib.model.source.Source', 'Source', (['SOURCE_CODE', 'name', 'description', 'profile_image', 'ALT_IMAGE', 'None'], {}), '(SOURCE_CODE, name, description, profile_image, ALT_IMAGE, None)\n', (722, 786), False, 'from common_src.lib.model.source import Source\n'), ((814, 832), 'common_src.scrapers.abstract_scraper.make_soup', 'make_soup', (['WEBSITE'], {}), '(WEBSITE)\n', (823, 832), False, 'from common_src.scrapers.abstract_scraper import make_soup, remove_dups, now\n'), ((1443, 1460), 'common_src.scrapers.abstract_scraper.remove_dups', 'remove_dups', (['data'], {}), '(data)\n', (1454, 1460), False, 'from common_src.scrapers.abstract_scraper import make_soup, remove_dups, now\n'), ((1272, 1338), 'common_src.lib.model.post.Post', 'Post', (['None', 'date', 'title', 'link', 'image', 'alt_image', 'SOURCE_CODE', 'None'], {}), '(None, date, title, link, image, alt_image, SOURCE_CODE, None)\n', (1276, 1338), False, 'from common_src.lib.model.post import Post\n'), ((1391, 1396), 'common_src.scrapers.abstract_scraper.now', 'now', ([], {}), '()\n', (1394, 1396), False, 'from common_src.scrapers.abstract_scraper import make_soup, remove_dups, now\n')] |
#
# alexnet.py
#
# Author(s):
# <NAME> <<EMAIL>>
#
# Copyright (c) 2020-2021 ETH Zurich.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import torch
import torch.nn as nn
class AlexNet(nn.Module):
def __init__(self, use_bn: bool, num_classes: int = 1000, seed : int = -1) -> None:
super(AlexNet, self).__init__()
self.features = self._make_features(use_bn)
self.avgpool = nn.AdaptiveAvgPool2d((6, 6))
self.classifier = self._make_classifier(num_classes)
self._initialize_weights(seed)
def _make_features(self, use_bn: bool) -> nn.Sequential:
modules = []
# conv 1
modules += [nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2, bias=not use_bn)]
modules += [nn.BatchNorm2d(64)] if use_bn else []
modules += [nn.ReLU(inplace=True)]
# max pool
modules += [nn.MaxPool2d(kernel_size=3, stride=2)]
# conv 2
modules += [nn.Conv2d(64, 192, kernel_size=5, padding=2, bias=not use_bn)]
modules += [nn.BatchNorm2d(192)] if use_bn else []
modules += [nn.ReLU(inplace=True)]
# max pool
modules += [nn.MaxPool2d(kernel_size=3, stride=2)]
# conv 3
modules += [nn.Conv2d(192, 384, kernel_size=3, padding=1, bias=not use_bn)]
modules += [nn.BatchNorm2d(384)] if use_bn else []
modules += [nn.ReLU(inplace=True)]
# conv 4
modules += [nn.Conv2d(384, 256, kernel_size=3, padding=1, bias=not use_bn)]
modules += [nn.BatchNorm2d(256)] if use_bn else []
modules += [nn.ReLU(inplace=True)]
# conv 5
modules += [nn.Conv2d(256, 256, kernel_size=3, padding=1, bias=not use_bn)]
modules += [nn.BatchNorm2d(256)] if use_bn else []
modules += [nn.ReLU(inplace=True)]
# max pool
modules += [nn.MaxPool2d(kernel_size=3, stride=2)]
return nn.Sequential(*modules)
def _make_classifier(self, num_classes: int) -> nn.Sequential:
modules = []
# dropout
modules += [nn.Dropout()]
# linear 1
modules += [nn.Linear(256 * 6 * 6, 4096)]
modules += [nn.ReLU(inplace=True)]
# dropout
modules += [nn.Dropout()]
# linear 2
modules += [nn.Linear(4096, 4096)]
modules += [nn.ReLU(inplace=True)]
# linear 3
modules += [nn.Linear(4096, num_classes)]
return nn.Sequential(*modules)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.features(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def _initialize_weights(self, seed: int = -1):
if seed >= 0:
torch.manual_seed(seed)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
| [
"torch.manual_seed",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.BatchNorm2d",
"torch.nn.init.constant_",
"torch.nn.Sequential",
"torch.nn.init.kaiming_normal_",
"torch.nn.Conv2d",
"torch.nn.MaxPool2d",
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.Linear",
"torch.nn.init.normal_"
]
| [((915, 943), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(6, 6)'], {}), '((6, 6))\n', (935, 943), True, 'import torch.nn as nn\n'), ((2400, 2423), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (2413, 2423), True, 'import torch.nn as nn\n'), ((2921, 2944), 'torch.nn.Sequential', 'nn.Sequential', (['*modules'], {}), '(*modules)\n', (2934, 2944), True, 'import torch.nn as nn\n'), ((1166, 1236), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(64)'], {'kernel_size': '(11)', 'stride': '(4)', 'padding': '(2)', 'bias': '(not use_bn)'}), '(3, 64, kernel_size=11, stride=4, padding=2, bias=not use_bn)\n', (1175, 1236), True, 'import torch.nn as nn\n'), ((1316, 1337), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1323, 1337), True, 'import torch.nn as nn\n'), ((1378, 1415), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (1390, 1415), True, 'import torch.nn as nn\n'), ((1454, 1515), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(192)'], {'kernel_size': '(5)', 'padding': '(2)', 'bias': '(not use_bn)'}), '(64, 192, kernel_size=5, padding=2, bias=not use_bn)\n', (1463, 1515), True, 'import torch.nn as nn\n'), ((1596, 1617), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1603, 1617), True, 'import torch.nn as nn\n'), ((1658, 1695), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (1670, 1695), True, 'import torch.nn as nn\n'), ((1734, 1796), 'torch.nn.Conv2d', 'nn.Conv2d', (['(192)', '(384)'], {'kernel_size': '(3)', 'padding': '(1)', 'bias': '(not use_bn)'}), '(192, 384, kernel_size=3, padding=1, bias=not use_bn)\n', (1743, 1796), True, 'import torch.nn as nn\n'), ((1877, 1898), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1884, 1898), True, 'import torch.nn as nn\n'), ((1937, 1999), 'torch.nn.Conv2d', 'nn.Conv2d', (['(384)', '(256)'], {'kernel_size': '(3)', 'padding': '(1)', 'bias': '(not use_bn)'}), '(384, 256, kernel_size=3, padding=1, bias=not use_bn)\n', (1946, 1999), True, 'import torch.nn as nn\n'), ((2080, 2101), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2087, 2101), True, 'import torch.nn as nn\n'), ((2140, 2202), 'torch.nn.Conv2d', 'nn.Conv2d', (['(256)', '(256)'], {'kernel_size': '(3)', 'padding': '(1)', 'bias': '(not use_bn)'}), '(256, 256, kernel_size=3, padding=1, bias=not use_bn)\n', (2149, 2202), True, 'import torch.nn as nn\n'), ((2283, 2304), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2290, 2304), True, 'import torch.nn as nn\n'), ((2345, 2382), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(2)'}), '(kernel_size=3, stride=2)\n', (2357, 2382), True, 'import torch.nn as nn\n'), ((2553, 2565), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (2563, 2565), True, 'import torch.nn as nn\n'), ((2606, 2634), 'torch.nn.Linear', 'nn.Linear', (['(256 * 6 * 6)', '(4096)'], {}), '(256 * 6 * 6, 4096)\n', (2615, 2634), True, 'import torch.nn as nn\n'), ((2656, 2677), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2663, 2677), True, 'import torch.nn as nn\n'), ((2717, 2729), 'torch.nn.Dropout', 'nn.Dropout', ([], {}), '()\n', (2727, 2729), True, 'import torch.nn as nn\n'), ((2770, 2791), 'torch.nn.Linear', 'nn.Linear', (['(4096)', '(4096)'], {}), '(4096, 4096)\n', (2779, 2791), True, 'import torch.nn as nn\n'), ((2813, 2834), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2820, 2834), True, 'import torch.nn as nn\n'), ((2875, 2903), 'torch.nn.Linear', 'nn.Linear', (['(4096)', 'num_classes'], {}), '(4096, num_classes)\n', (2884, 2903), True, 'import torch.nn as nn\n'), ((3232, 3255), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (3249, 3255), False, 'import torch\n'), ((1258, 1276), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(64)'], {}), '(64)\n', (1272, 1276), True, 'import torch.nn as nn\n'), ((1537, 1556), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(192)'], {}), '(192)\n', (1551, 1556), True, 'import torch.nn as nn\n'), ((1818, 1837), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(384)'], {}), '(384)\n', (1832, 1837), True, 'import torch.nn as nn\n'), ((2021, 2040), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (2035, 2040), True, 'import torch.nn as nn\n'), ((2224, 2243), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(256)'], {}), '(256)\n', (2238, 2243), True, 'import torch.nn as nn\n'), ((3348, 3418), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {'mode': '"""fan_out"""', 'nonlinearity': '"""relu"""'}), "(m.weight, mode='fan_out', nonlinearity='relu')\n", (3371, 3418), True, 'import torch.nn as nn\n'), ((3478, 3506), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (3495, 3506), True, 'import torch.nn as nn\n'), ((3572, 3602), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.weight', '(1)'], {}), '(m.weight, 1)\n', (3589, 3602), True, 'import torch.nn as nn\n'), ((3619, 3647), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (3636, 3647), True, 'import torch.nn as nn\n'), ((3708, 3742), 'torch.nn.init.normal_', 'nn.init.normal_', (['m.weight', '(0)', '(0.01)'], {}), '(m.weight, 0, 0.01)\n', (3723, 3742), True, 'import torch.nn as nn\n'), ((3802, 3830), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (3819, 3830), True, 'import torch.nn as nn\n')] |
import pytest
from django.utils.timezone import now
from pretix.base.models import Device, Event, Organizer, Team, User
from pretix.base.models.devices import generate_api_token
@pytest.fixture
def organizer():
return Organizer.objects.create(name='Dummy', slug='dummy')
@pytest.fixture
def event(organizer):
event = Event.objects.create(
organizer=organizer, name='Dummy', slug='dummy',
date_from=now()
)
return event
@pytest.fixture
def device(organizer):
return organizer.devices.create(name='Cashdesk')
@pytest.fixture
def admin_user(admin_team):
u = User.objects.create_user('<EMAIL>', 'dummy')
admin_team.members.add(u)
return u
@pytest.fixture
def admin_team(organizer):
return Team.objects.create(organizer=organizer, can_change_organizer_settings=True, name='Admin team')
@pytest.mark.django_db
def test_list_of_devices(event, admin_user, client, device):
client.login(email='<EMAIL>', password='<PASSWORD>')
resp = client.get('/control/organizer/dummy/devices')
assert 'Cashdesk' in resp.rendered_content
@pytest.mark.django_db
def test_create_device(event, admin_user, admin_team, client):
client.login(email='<EMAIL>', password='<PASSWORD>')
resp = client.post('/control/organizer/dummy/device/add', {
'name': 'Foo',
'limit_events': str(event.pk),
}, follow=True)
d = Device.objects.last()
assert d.name == 'Foo'
assert not d.all_events
assert list(d.limit_events.all()) == [event]
assert d.initialization_token in resp.content.decode()
@pytest.mark.django_db
def test_update_device(event, admin_user, admin_team, device, client):
client.login(email='<EMAIL>', password='<PASSWORD>')
client.post('/control/organizer/dummy/device/{}/edit'.format(device.pk), {
'name': 'Cashdesk 2',
'limit_events': str(event.pk),
}, follow=True)
device.refresh_from_db()
assert device.name == 'Cashdesk 2'
assert not device.all_events
assert list(device.limit_events.all()) == [event]
@pytest.mark.django_db
def test_revoke_device(event, admin_user, admin_team, device, client):
client.login(email='<EMAIL>', password='<PASSWORD>')
device.api_token = generate_api_token()
device.initialized = now()
device.save()
client.get('/control/organizer/dummy/device/{}/revoke'.format(device.pk))
client.post('/control/organizer/dummy/device/{}/revoke'.format(device.pk), {}, follow=True)
device.refresh_from_db()
assert device.revoked
| [
"pretix.base.models.Team.objects.create",
"pretix.base.models.Organizer.objects.create",
"pretix.base.models.devices.generate_api_token",
"django.utils.timezone.now",
"pretix.base.models.Device.objects.last",
"pretix.base.models.User.objects.create_user"
]
| [((225, 277), 'pretix.base.models.Organizer.objects.create', 'Organizer.objects.create', ([], {'name': '"""Dummy"""', 'slug': '"""dummy"""'}), "(name='Dummy', slug='dummy')\n", (249, 277), False, 'from pretix.base.models import Device, Event, Organizer, Team, User\n'), ((604, 648), 'pretix.base.models.User.objects.create_user', 'User.objects.create_user', (['"""<EMAIL>"""', '"""dummy"""'], {}), "('<EMAIL>', 'dummy')\n", (628, 648), False, 'from pretix.base.models import Device, Event, Organizer, Team, User\n'), ((748, 847), 'pretix.base.models.Team.objects.create', 'Team.objects.create', ([], {'organizer': 'organizer', 'can_change_organizer_settings': '(True)', 'name': '"""Admin team"""'}), "(organizer=organizer, can_change_organizer_settings=True,\n name='Admin team')\n", (767, 847), False, 'from pretix.base.models import Device, Event, Organizer, Team, User\n'), ((1391, 1412), 'pretix.base.models.Device.objects.last', 'Device.objects.last', ([], {}), '()\n', (1410, 1412), False, 'from pretix.base.models import Device, Event, Organizer, Team, User\n'), ((2228, 2248), 'pretix.base.models.devices.generate_api_token', 'generate_api_token', ([], {}), '()\n', (2246, 2248), False, 'from pretix.base.models.devices import generate_api_token\n'), ((2274, 2279), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (2277, 2279), False, 'from django.utils.timezone import now\n'), ((427, 432), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (430, 432), False, 'from django.utils.timezone import now\n')] |
# -*- coding: utf-8 -*-
import bleach
import json
def strip_html(unclean):
"""Sanitize a string, removing (as opposed to escaping) HTML tags
:param unclean: A string to be stripped of HTML tags
:return: stripped string
:rtype: str
"""
return bleach.clean(unclean, strip=True, tags=[], attributes=[], styles=[])
def clean_tag(data):
"""Format as a valid Tag
:param data: A string to be cleaned
:return: cleaned string
:rtype: str
"""
# TODO: make this a method of Tag?
return escape_html(data).replace('"', '"').replace("'", ''')
def is_iterable_but_not_string(obj):
"""Return True if ``obj`` is an iterable object that isn't a string."""
return (hasattr(obj, '__iter__') and not hasattr(obj, 'strip'))
def escape_html(data):
"""Escape HTML characters in data.
:param data: A string, dict, or list to clean of HTML characters
:return: A cleaned object
:rtype: str or list or dict
"""
if isinstance(data, dict):
return {
key: escape_html(value)
for (key, value) in data.iteritems()
}
if is_iterable_but_not_string(data):
return [
escape_html(value)
for value in data
]
if isinstance(data, basestring):
return bleach.clean(data)
return data
def assert_clean(data):
"""Ensure that data is cleaned
:raise: AssertionError
"""
def _ensure_clean(value):
if value != bleach.clean(value):
raise ValueError
return escape_html(data)
# TODO: Remove safe_unescape_html when mako html safe comes in
def safe_unescape_html(value):
"""
Return data without html escape characters.
:param value: A string, dict, or list
:return: A string or list or dict without html escape characters
"""
safe_characters = {
'&': '&',
'<': '<',
'>': '>',
}
if isinstance(value, dict):
return {
key: safe_unescape_html(value)
for (key, value) in value.iteritems()
}
if is_iterable_but_not_string(value):
return [
safe_unescape_html(each)
for each in value
]
if isinstance(value, basestring):
for escape_sequence, character in safe_characters.items():
value = value.replace(escape_sequence, character)
return value
return value
def safe_json(value):
"""
Dump a string to JSON in a manner that can be used for JS strings in mako templates.
Providing additional forward-slash escaping to prevent injection of closing markup in strings. See:
http://benalpert.com/2012/08/03/preventing-xss-json.html
:param value: A string to be converted
:return: A JSON-formatted string that explicitly escapes forward slashes when needed
"""
return json.dumps(value).replace('</', '<\\/') # Fix injection of closing markup in strings
| [
"bleach.clean",
"json.dumps"
]
| [((270, 338), 'bleach.clean', 'bleach.clean', (['unclean'], {'strip': '(True)', 'tags': '[]', 'attributes': '[]', 'styles': '[]'}), '(unclean, strip=True, tags=[], attributes=[], styles=[])\n', (282, 338), False, 'import bleach\n'), ((1309, 1327), 'bleach.clean', 'bleach.clean', (['data'], {}), '(data)\n', (1321, 1327), False, 'import bleach\n'), ((1491, 1510), 'bleach.clean', 'bleach.clean', (['value'], {}), '(value)\n', (1503, 1510), False, 'import bleach\n'), ((2873, 2890), 'json.dumps', 'json.dumps', (['value'], {}), '(value)\n', (2883, 2890), False, 'import json\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
from torch.utils import data
from torch import optim
import torchvision.models as models
from torch.autograd import Variable
import torchvision as tv
import random
import math
import time
from datetime import datetime
import os
import argparse
import subprocess
from util.LFUtil import *
import numpy as np
from networks.LFMNet import LFMNet
def main(args=None):
# # Arguments
# parser = argparse.ArgumentParser()
# # Number of epochs
# parser.add_argument('--epochs', type=int, default=1000)
# # Validate every n percentage of the data
# parser.add_argument('--valEvery', type=float, default=0.25)
# # Image indices to use for training and validation
# parser.add_argument('--imagesToUse', nargs='+', type=int, default=list(range(0,5,1)))
# # List of GPUs to use: 0 1 2 for example
# parser.add_argument('--GPUs', nargs='+', type=int, default=None)
# # Batch size
# parser.add_argument('--batchSize', type=int, default=128)
# # Perentage of the data to use for validation, from 0 to 1
# parser.add_argument('--validationSplit', type=float, default=0.1)
# # Bias initialization value
# parser.add_argument('--biasVal', type=float, default=0.1)
# # Learning rate
# parser.add_argument('--learningRate', type=float, default=0.001)
# # Use bias flag
# parser.add_argument('--useBias', type=str2bool, default=True)
# # Use skip connections flag
# parser.add_argument('--useSkipCon', type=str2bool, default=False)
# # User selected random seed
# parser.add_argument('--randomSeed', type=int, default=None)
# # fov of input or neighboarhood around lenslet to reconstruct
# parser.add_argument('--fovInput', type=int, default=9)
# # nT number of lenslets to reconstruct simultaneously use at training time
# parser.add_argument('--neighShape', type=int, default=3)
# # Flag to use shallow or large U-net
# parser.add_argument('--useShallowUnet', type=str2bool, default=True)
# # Lower threshold of GT stacks, to get rid of autofluorescence
# parser.add_argument('--ths', type=float, default=0.03)
# # Path to dataset
# parser.add_argument('--datasetPath', nargs='?', default="BrainLFMConfocalDataset/Brain_40x_64Depths_362imgs.h5")
# # Path to directory where models and tensorboard logs are stored
# parser.add_argument('--outputPath', nargs='?', default="runs/")
# # Prefix for current output folder
# parser.add_argument('--outputPrefix', nargs='?', default="")
# # Path to model in case of continuing a training
# parser.add_argument('--checkpointPath', nargs='?', default=None)
# args = parser.parse_args()
nImgs = len(args.imagesToUse)
# Setup multithreading
num_workers = getThreads()
if num_workers!=0:
torch.set_num_threads(num_workers)
if not torch.cuda.is_available():
print("GPU initialization error")
exit(-1)
if torch.cuda.is_available():
print ("Cuda is available")
device_id = torch.cuda.current_device()
gpu_properties = torch.cuda.get_device_properties(device_id)
print("Found %d GPUs available. Using GPU %d (%s) of compute capability %d.%d with "
"%.1fGb total memory.\n" %
(torch.cuda.device_count(),
device_id,
gpu_properties.name,
gpu_properties.major,
gpu_properties.minor,
gpu_properties.total_memory / 1e9))
# Select GPUs to use
args.GPUs = list(range(torch.cuda.device_count())) if args.GPUs is None else args.GPUs
print('Using GPUs: ' + str(args.GPUs))
device_ids = args.GPUs
# Set common random seed
if args.randomSeed is not None:
np.random.seed(args.randomSeed)
torch.manual_seed(args.randomSeed)
# Load checkpoint if provided
if args.checkpointPath is not None:
checkpointPath = args.checkpointPath
checkpoint = torch.load(checkpointPath)
# overwrite args
args = checkpoint['args']
args.checkpointPath = checkpointPath
# set Device to use
device = torch.device("cuda:"+str(device_ids[0]) if torch.cuda.is_available() else "cpu")
# Create unique label
today = datetime.now()
# Get commit number
# label = subprocess.check_output(["git", "describe", "--always"]).strip()
#specific to MBL lab workstation
label = subprocess.check_output(["C:/Program Files/git/bin/git", "describe", "--always"]).strip()
comment = today.strftime('%Y_%m_%d__%H%M%S') + "_"+ str(args.useBias) +"B_"+str(args.biasVal)+"bias_" + str(nImgs) + \
"I_"+ str(args.batchSize)+"BS_"+str(args.useSkipCon)+"Sk_" + str(args.fovInput) + "FOV_" + str(args.neighShape) + "nT_" \
+ str(args.ths) + "ths_" + str(label.decode("utf-8") ) + "_commit__" + args.outputPrefix
# Create output folder
save_folder = args.outputPath + "/" + comment
# If asked to continue a training, save in the same folder
if args.checkpointPath is not None:
save_folder = os.path.split(args.checkpointPath)[0]
print(save_folder)
# Create summary writer to log stuff
writer = SummaryWriter(log_dir=save_folder)
writer.add_text('Description',comment,0)
writer.flush()
# Load dataset
all_data = Dataset(args.datasetPath, args.randomSeed, \
fov=args.fovInput, neighShape=args.neighShape, img_indices=args.imagesToUse, get_full_imgs=False, center_region=None)
# Split validation and testing
train_size = int((1 - args.validationSplit) * len(all_data))
test_size = len(all_data) - train_size
train_dataset, test_dataset = torch.utils.data.random_split(all_data, [train_size, test_size])
# Create data loaders
train_dataset = data.DataLoader(train_dataset, batch_size=args.batchSize,
shuffle=True, num_workers=num_workers, pin_memory=True)
test_dataset = data.DataLoader(test_dataset, batch_size=args.batchSize,
shuffle=True, num_workers=num_workers, pin_memory=True)
validate_every = np.round(len(train_dataset)*args.valEvery)
# Get Dataset information
nDepths = all_data.get_n_depths()
volShape, LFshape = all_data.__shape__()
LFshape = LFshape[0:4]
lateralTile = int(math.sqrt(nDepths))
# Find normalization values
maxInputTrain, maxVolumeTrain = all_data.get_max()
maxInputTest, maxVolumeTest = all_data.get_max()
# Create network
net = LFMNet(nDepths, args.useBias, args.useSkipCon, LFshape, LFfov=args.fovInput, use_small_unet=args.useShallowUnet).to(device)
optimizer = optim.Adam(net.parameters(), lr=args.learningRate)
lossFunction = nn.L1Loss()
# Create SSIM criteria
ssim = SSIM()
ssim.eval()
# Init bias and weights if needed
if args.useBias:
def bias_init(m):
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv3d):
if m.bias is not None:
nn.init.constant_(m.bias.data, args.biasVal)
nn.init.kaiming_normal_(m.weight)
if isinstance(m, nn.ConvTranspose2d):
nn.init.constant_(m.bias.data, args.biasVal)
nn.init.kaiming_normal_(m.weight)
net.apply(bias_init)
# Load network from checkpoint
if args.checkpointPath is not None:
net.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
epochStart = checkpoint['epoch']
epochs = args.epochs + epochStart
train_loss = checkpoint['loss']
# Start distributed data parallel, as it's faster than DataParallel
if torch.cuda.device_count() > 1:
print("Let's use", torch.cuda.device_count(), "GPUs!")
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '1234'+str(device_ids[0])
torch.distributed.init_process_group(backend="nccl", rank=0, world_size=1)
# Move network to distributed data parallel
net = nn.parallel.DistributedDataParallel(net, device_ids=args.GPUs, output_device=args.GPUs[0]).to(device)
# timers
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
global_it_counter = 0
# define indices to grab for tensorboard visualization
indices_to_show = torch.randperm(test_size)[0:8]
# Init arrays to store losses
train_losses, test_losses = [], []
test_loss = 0
epochStart = 0
# Start training
for epoch in range(epochStart, args.epochs):
net.train()
torch.set_grad_enabled(True)
torch.cuda.empty_cache()
train_loss = 0
print('Training')
global_it_counter = 0
for nBatch,(inputs,labels) in enumerate(train_dataset):
# compute current iteration
curr_it = epoch*len(train_dataset) + nBatch
# start timer
start.record()
print('ep: ' + str(epoch) + ' ' + str(nBatch+1) + '/' + str(len(train_dataset)) + ' currIt: ' + str(curr_it))
optimizer.zero_grad()
# load data to gpu and normalize from 0 to 1
inputGPU = inputs.float().to(device) / maxInputTest
outputsGT = labels.float().to(device) / maxVolumeTrain
# Threshold GT to get rid of autofluorescence
if args.ths!=0:
outputsGT = imadjust(outputsGT, args.ths,outputsGT.max(), outputsGT.min(), outputsGT.max())
# Predict
outputsVol = net(inputGPU)
loss = lossFunction(outputsGT,outputsVol)
loss.backward()
train_loss += loss.item() / nDepths
optimizer.step()
global_it_counter += inputs.shape[0]
# Record training time
end.record()
torch.cuda.synchronize()
end_time = start.elapsed_time(end)
# Compute time per sample
elapsed_time = end_time/inputs.shape[0]
# Check if validation is required
if nBatch%validate_every==0:
print(comment)
# Write training images to tensorboard
lastBatchSize = min(outputsGT.shape[0],4)
gridOut2 = torch.cat((outputsGT[0:lastBatchSize, :, :, :, :].sum(2).cpu().data.detach(), outputsVol[0:lastBatchSize, :, :, :, :].sum(2).cpu().data.detach()), dim=0)
gridOut2 = tv.utils.make_grid(gridOut2, normalize=True, scale_each=False)
# Select some images in the batch for showing
indices_to_display = torch.randperm(inputGPU.shape[0])[0:4]
outputsGT = F.interpolate(outputsGT[indices_to_display, :, :, :, :],[LFshape[0]*2,LFshape[1]*2,volShape[2]])
outputsVol = F.interpolate(outputsVol[indices_to_display, :, :, :, :],[LFshape[0]*2,LFshape[1]*2,volShape[2]])
inputGPU = inputGPU[indices_to_display,:,:,:,:,:]
currPred = convert3Dto2DTiles(outputsVol, [lateralTile, lateralTile])
currGT = convert3Dto2DTiles(outputsGT, [lateralTile, lateralTile])
inputGrid = LF2Spatial(inputGPU, inputGPU.shape[2:])
gridPred = tv.utils.make_grid(currPred,normalize=True, scale_each=False)
gridGT = tv.utils.make_grid(currGT,normalize=True, scale_each=False)
gridInput = tv.utils.make_grid(inputGrid,normalize=True, scale_each=False)
gt = outputsGT[0,:,:,:,:].sum(3).repeat(3,1,1)
gt /= gt.max()
# Write to tensorboard
writer.add_image('z_proj_train',gt,curr_it)
writer.add_image('images_train_YZ_projection', gridOut2, curr_it)
writer.add_image('outputRGB_train', gridPred, curr_it)
writer.add_image('outputRGB_train_GT', gridGT, curr_it)
writer.add_image('input_train', gridInput, curr_it)
writer.add_scalar('Loss/train', train_loss/global_it_counter, curr_it)
writer.add_scalar('times/train', elapsed_time, curr_it)
# Restart
train_loss = 0.0
global_it_counter = 0
print('Validating')
net.eval()
with torch.no_grad():
avg_psnr = 0
avg_ssim = 0
test_loss = 0
start.record()
for nBatch,(inputs,labels) in enumerate(test_dataset):
inputGPU = inputs.float().to(device) / maxInputTest
outputsGT = labels.float().to(device) / maxVolumeTrain
# Threshold GT to get rid of autofluorescence
outputsGT = imadjust(outputsGT,args.ths,outputsGT.max(), outputsGT.min(), outputsGT.max())
outputsVol = net(inputGPU)
loss = lossFunction(outputsGT,outputsVol)
test_loss += loss.item() / nDepths
# Compute PSNR
lossMSE = nn.functional.mse_loss(outputsVol.to(device).detach(), outputsGT.to(device).detach())
avg_psnr += 10 * math.log10(1 / lossMSE.item())
# Compute ssim
avg_ssim += ssim(outputsVol[:,0,:,:,:].permute(0,3,1,2).contiguous().detach().to(device), outputsGT[:,0,:,:,:].permute(0,3,1,2).contiguous().detach().to(device)).sum()
end.record()
torch.cuda.synchronize()
lastBatchSize = min(outputsGT.shape[0],4)
gridOut2 = torch.cat((outputsGT[0:lastBatchSize, :, :, :, :].sum(2).cpu().data.detach(), outputsVol[0:lastBatchSize, :, :, :, :].sum(2).cpu().data.detach()), dim=0)
gridOut2 = tv.utils.make_grid(gridOut2, normalize=True, scale_each=False)
# process some for showing
indices_to_display = torch.randperm(inputGPU.shape[0])[0:lastBatchSize]
outputsGT = F.interpolate(outputsGT[indices_to_display, :, :, :, :],[LFshape[0]*2,LFshape[1]*2,volShape[2]])
outputsVol = F.interpolate(outputsVol[indices_to_display, :, :, :, :],[LFshape[0]*2,LFshape[1]*2,volShape[2]])
inputGPU = inputGPU[indices_to_display,:,:,:,:,:]
currPred = convert3Dto2DTiles(outputsVol, [lateralTile, lateralTile])
currGT = convert3Dto2DTiles(outputsGT, [lateralTile, lateralTile])
inputGrid = LF2Spatial(inputGPU, inputGPU.shape[2:])
gridPred = tv.utils.make_grid(currPred,normalize=True, scale_each=False)
gridGT = tv.utils.make_grid(currGT,normalize=True, scale_each=False)
gridInput = tv.utils.make_grid(inputGrid,normalize=True, scale_each=False)
# Write to tensorboard
writer.add_image('images_val_YZ_projection', gridOut2, curr_it)
writer.add_image('outputRGB_test', gridPred, curr_it)
writer.add_image('outputRGB_test_GT', gridGT, curr_it)
writer.add_image('input_test', gridInput, curr_it)
writer.add_scalar('Loss/test', test_loss/len(test_dataset), curr_it)
writer.add_scalar('Loss/psnr_val', avg_psnr/len(test_dataset), curr_it)
writer.add_scalar('Loss/ssim_val', avg_ssim/len(test_dataset), curr_it)
writer.add_scalar('LearningRate', args.learningRate, curr_it)
writer.add_scalar('times/val', start.elapsed_time(end)/test_size, curr_it)
net.train()
if epoch%2==0:
torch.save({
'epoch': epoch,
'args' : args,
'model_state_dict': net.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'loss': train_loss,
'dataset_path': args.datasetPath},
save_folder + '/model_'+str(epoch))
print(f"Epoch {epoch + 1}/{args.epochs}.. "
f"Train loss: {train_loss / len(train_dataset):.7f}.. "
f"Test loss: {test_loss / len(test_dataset):.7f}.. ")
if __name__ == '__main__':
main() | [
"torch.randperm",
"torch.nn.init.constant_",
"torch.nn.L1Loss",
"math.sqrt",
"torch.cuda.device_count",
"torch.cuda.synchronize",
"torch.cuda.is_available",
"torch.nn.functional.interpolate",
"torchvision.utils.make_grid",
"torch.utils.tensorboard.SummaryWriter",
"torch.nn.init.kaiming_normal_",
"torch.set_num_threads",
"os.path.split",
"numpy.random.seed",
"torch.cuda.current_device",
"networks.LFMNet.LFMNet",
"torch.cuda.Event",
"subprocess.check_output",
"torch.nn.parallel.DistributedDataParallel",
"torch.utils.data.random_split",
"torch.no_grad",
"torch.cuda.empty_cache",
"torch.manual_seed",
"torch.load",
"datetime.datetime.now",
"torch.utils.data.DataLoader",
"torch.set_grad_enabled",
"torch.distributed.init_process_group",
"torch.cuda.get_device_properties"
]
| [((3063, 3088), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3086, 3088), False, 'import torch\n'), ((4383, 4397), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4395, 4397), False, 'from datetime import datetime\n'), ((5320, 5354), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'save_folder'}), '(log_dir=save_folder)\n', (5333, 5354), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((5805, 5869), 'torch.utils.data.random_split', 'torch.utils.data.random_split', (['all_data', '[train_size, test_size]'], {}), '(all_data, [train_size, test_size])\n', (5834, 5869), False, 'import torch\n'), ((5916, 6033), 'torch.utils.data.DataLoader', 'data.DataLoader', (['train_dataset'], {'batch_size': 'args.batchSize', 'shuffle': '(True)', 'num_workers': 'num_workers', 'pin_memory': '(True)'}), '(train_dataset, batch_size=args.batchSize, shuffle=True,\n num_workers=num_workers, pin_memory=True)\n', (5931, 6033), False, 'from torch.utils import data\n'), ((6085, 6201), 'torch.utils.data.DataLoader', 'data.DataLoader', (['test_dataset'], {'batch_size': 'args.batchSize', 'shuffle': '(True)', 'num_workers': 'num_workers', 'pin_memory': '(True)'}), '(test_dataset, batch_size=args.batchSize, shuffle=True,\n num_workers=num_workers, pin_memory=True)\n', (6100, 6201), False, 'from torch.utils import data\n'), ((6860, 6871), 'torch.nn.L1Loss', 'nn.L1Loss', ([], {}), '()\n', (6869, 6871), True, 'import torch.nn as nn\n'), ((8325, 8361), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (8341, 8361), False, 'import torch\n'), ((8372, 8408), 'torch.cuda.Event', 'torch.cuda.Event', ([], {'enable_timing': '(True)'}), '(enable_timing=True)\n', (8388, 8408), False, 'import torch\n'), ((2914, 2948), 'torch.set_num_threads', 'torch.set_num_threads', (['num_workers'], {}), '(num_workers)\n', (2935, 2948), False, 'import torch\n'), ((2961, 2986), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2984, 2986), False, 'import torch\n'), ((3146, 3173), 'torch.cuda.current_device', 'torch.cuda.current_device', ([], {}), '()\n', (3171, 3173), False, 'import torch\n'), ((3199, 3242), 'torch.cuda.get_device_properties', 'torch.cuda.get_device_properties', (['device_id'], {}), '(device_id)\n', (3231, 3242), False, 'import torch\n'), ((3878, 3909), 'numpy.random.seed', 'np.random.seed', (['args.randomSeed'], {}), '(args.randomSeed)\n', (3892, 3909), True, 'import numpy as np\n'), ((3918, 3952), 'torch.manual_seed', 'torch.manual_seed', (['args.randomSeed'], {}), '(args.randomSeed)\n', (3935, 3952), False, 'import torch\n'), ((4094, 4120), 'torch.load', 'torch.load', (['checkpointPath'], {}), '(checkpointPath)\n', (4104, 4120), False, 'import torch\n'), ((6458, 6476), 'math.sqrt', 'math.sqrt', (['nDepths'], {}), '(nDepths)\n', (6467, 6476), False, 'import math\n'), ((7842, 7867), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (7865, 7867), False, 'import torch\n'), ((8054, 8128), 'torch.distributed.init_process_group', 'torch.distributed.init_process_group', ([], {'backend': '"""nccl"""', 'rank': '(0)', 'world_size': '(1)'}), "(backend='nccl', rank=0, world_size=1)\n", (8090, 8128), False, 'import torch\n'), ((8516, 8541), 'torch.randperm', 'torch.randperm', (['test_size'], {}), '(test_size)\n', (8530, 8541), False, 'import torch\n'), ((8756, 8784), 'torch.set_grad_enabled', 'torch.set_grad_enabled', (['(True)'], {}), '(True)\n', (8778, 8784), False, 'import torch\n'), ((8793, 8817), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (8815, 8817), False, 'import torch\n'), ((4306, 4331), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4329, 4331), False, 'import torch\n'), ((4551, 4636), 'subprocess.check_output', 'subprocess.check_output', (["['C:/Program Files/git/bin/git', 'describe', '--always']"], {}), "(['C:/Program Files/git/bin/git', 'describe',\n '--always'])\n", (4574, 4636), False, 'import subprocess\n'), ((5204, 5238), 'os.path.split', 'os.path.split', (['args.checkpointPath'], {}), '(args.checkpointPath)\n', (5217, 5238), False, 'import os\n'), ((6650, 6766), 'networks.LFMNet.LFMNet', 'LFMNet', (['nDepths', 'args.useBias', 'args.useSkipCon', 'LFshape'], {'LFfov': 'args.fovInput', 'use_small_unet': 'args.useShallowUnet'}), '(nDepths, args.useBias, args.useSkipCon, LFshape, LFfov=args.fovInput,\n use_small_unet=args.useShallowUnet)\n', (6656, 6766), False, 'from networks.LFMNet import LFMNet\n'), ((7900, 7925), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (7923, 7925), False, 'import torch\n'), ((9994, 10018), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (10016, 10018), False, 'import torch\n'), ((3670, 3695), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3693, 3695), False, 'import torch\n'), ((7208, 7241), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {}), '(m.weight)\n', (7231, 7241), True, 'import torch.nn as nn\n'), ((7308, 7352), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias.data', 'args.biasVal'], {}), '(m.bias.data, args.biasVal)\n', (7325, 7352), True, 'import torch.nn as nn\n'), ((7369, 7402), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['m.weight'], {}), '(m.weight)\n', (7392, 7402), True, 'import torch.nn as nn\n'), ((8196, 8290), 'torch.nn.parallel.DistributedDataParallel', 'nn.parallel.DistributedDataParallel', (['net'], {'device_ids': 'args.GPUs', 'output_device': 'args.GPUs[0]'}), '(net, device_ids=args.GPUs,\n output_device=args.GPUs[0])\n', (8231, 8290), True, 'import torch.nn as nn\n'), ((10596, 10658), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['gridOut2'], {'normalize': '(True)', 'scale_each': '(False)'}), '(gridOut2, normalize=True, scale_each=False)\n', (10614, 10658), True, 'import torchvision as tv\n'), ((10825, 10933), 'torch.nn.functional.interpolate', 'F.interpolate', (['outputsGT[indices_to_display, :, :, :, :]', '[LFshape[0] * 2, LFshape[1] * 2, volShape[2]]'], {}), '(outputsGT[indices_to_display, :, :, :, :], [LFshape[0] * 2, \n LFshape[1] * 2, volShape[2]])\n', (10838, 10933), True, 'import torch.nn.functional as F\n'), ((10951, 11060), 'torch.nn.functional.interpolate', 'F.interpolate', (['outputsVol[indices_to_display, :, :, :, :]', '[LFshape[0] * 2, LFshape[1] * 2, volShape[2]]'], {}), '(outputsVol[indices_to_display, :, :, :, :], [LFshape[0] * 2, \n LFshape[1] * 2, volShape[2]])\n', (10964, 11060), True, 'import torch.nn.functional as F\n'), ((11380, 11442), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['currPred'], {'normalize': '(True)', 'scale_each': '(False)'}), '(currPred, normalize=True, scale_each=False)\n', (11398, 11442), True, 'import torchvision as tv\n'), ((11467, 11527), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['currGT'], {'normalize': '(True)', 'scale_each': '(False)'}), '(currGT, normalize=True, scale_each=False)\n', (11485, 11527), True, 'import torchvision as tv\n'), ((11555, 11618), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['inputGrid'], {'normalize': '(True)', 'scale_each': '(False)'}), '(inputGrid, normalize=True, scale_each=False)\n', (11573, 11618), True, 'import torchvision as tv\n'), ((3397, 3422), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (3420, 3422), False, 'import torch\n'), ((7147, 7191), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias.data', 'args.biasVal'], {}), '(m.bias.data, args.biasVal)\n', (7164, 7191), True, 'import torch.nn as nn\n'), ((10758, 10791), 'torch.randperm', 'torch.randperm', (['inputGPU.shape[0]'], {}), '(inputGPU.shape[0])\n', (10772, 10791), False, 'import torch\n'), ((12471, 12486), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (12484, 12486), False, 'import torch\n'), ((13730, 13754), 'torch.cuda.synchronize', 'torch.cuda.synchronize', ([], {}), '()\n', (13752, 13754), False, 'import torch\n'), ((14051, 14113), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['gridOut2'], {'normalize': '(True)', 'scale_each': '(False)'}), '(gridOut2, normalize=True, scale_each=False)\n', (14069, 14113), True, 'import torchvision as tv\n'), ((14285, 14393), 'torch.nn.functional.interpolate', 'F.interpolate', (['outputsGT[indices_to_display, :, :, :, :]', '[LFshape[0] * 2, LFshape[1] * 2, volShape[2]]'], {}), '(outputsGT[indices_to_display, :, :, :, :], [LFshape[0] * 2, \n LFshape[1] * 2, volShape[2]])\n', (14298, 14393), True, 'import torch.nn.functional as F\n'), ((14415, 14524), 'torch.nn.functional.interpolate', 'F.interpolate', (['outputsVol[indices_to_display, :, :, :, :]', '[LFshape[0] * 2, LFshape[1] * 2, volShape[2]]'], {}), '(outputsVol[indices_to_display, :, :, :, :], [LFshape[0] * 2, \n LFshape[1] * 2, volShape[2]])\n', (14428, 14524), True, 'import torch.nn.functional as F\n'), ((14865, 14927), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['currPred'], {'normalize': '(True)', 'scale_each': '(False)'}), '(currPred, normalize=True, scale_each=False)\n', (14883, 14927), True, 'import torchvision as tv\n'), ((14956, 15016), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['currGT'], {'normalize': '(True)', 'scale_each': '(False)'}), '(currGT, normalize=True, scale_each=False)\n', (14974, 15016), True, 'import torchvision as tv\n'), ((15048, 15111), 'torchvision.utils.make_grid', 'tv.utils.make_grid', (['inputGrid'], {'normalize': '(True)', 'scale_each': '(False)'}), '(inputGrid, normalize=True, scale_each=False)\n', (15066, 15111), True, 'import torchvision as tv\n'), ((14202, 14235), 'torch.randperm', 'torch.randperm', (['inputGPU.shape[0]'], {}), '(inputGPU.shape[0])\n', (14216, 14235), False, 'import torch\n')] |
from stix_shifter_utils.utils.entry_point_base import EntryPointBase
from stix_shifter_utils.modules.cim.stix_translation.cim_data_mapper import CimDataMapper
from stix_shifter_utils.modules.car.stix_translation.car_data_mapper import CarDataMapper
from .stix_translation.stix_to_elastic import StixToElastic
class EntryPoint(EntryPointBase):
def __init__(self, connection={}, configuration={}, options={}):
super().__init__(options)
self.add_dialect('default', query_translator=StixToElastic(), data_mapper=CarDataMapper(options), default=True)
self.add_dialect('cim', query_translator=StixToElastic(), data_mapper=CimDataMapper(options), default_include=False)
self.add_dialect('car', query_translator=StixToElastic(), data_mapper=CarDataMapper(options), default_include=False) | [
"stix_shifter_utils.modules.car.stix_translation.car_data_mapper.CarDataMapper",
"stix_shifter_utils.modules.cim.stix_translation.cim_data_mapper.CimDataMapper"
]
| [((530, 552), 'stix_shifter_utils.modules.car.stix_translation.car_data_mapper.CarDataMapper', 'CarDataMapper', (['options'], {}), '(options)\n', (543, 552), False, 'from stix_shifter_utils.modules.car.stix_translation.car_data_mapper import CarDataMapper\n'), ((646, 668), 'stix_shifter_utils.modules.cim.stix_translation.cim_data_mapper.CimDataMapper', 'CimDataMapper', (['options'], {}), '(options)\n', (659, 668), False, 'from stix_shifter_utils.modules.cim.stix_translation.cim_data_mapper import CimDataMapper\n'), ((771, 793), 'stix_shifter_utils.modules.car.stix_translation.car_data_mapper.CarDataMapper', 'CarDataMapper', (['options'], {}), '(options)\n', (784, 793), False, 'from stix_shifter_utils.modules.car.stix_translation.car_data_mapper import CarDataMapper\n')] |
"""
Crack a password using a genetic algorithm!
"""
import random as rnd
def main():
"""
This file implements a genetic algorithm to solve the problem of
cracking a given password, by creating 'generations' of different
words, selecting the best, breeeding them, applying a simple crossover
(randomized) and a mutation chance.
"""
#variables dict: Define the problem constants
genetic_variables = {
'password' : "<PASSWORD>",
'size_population' : 100,
'best_sample' : 20,
'lucky_few' : 20,
'number_of_child' : 5,
'number_of_generations' : 10000, #Overkill >:D
'chance_of_mutation' : .5
}
prob = genetic_variables
#program
if (prob['best_sample'] + prob['lucky_few'])/2*prob['number_of_child'] != prob['size_population']:
print ("population size not stable")
return
last_gen, _ = genetic_algorithm(**genetic_variables)
print("Last generation: \n\n")
print(last_gen)
def genetic_algorithm(**kwargs):
"""
Execute the genetic algorithm.
This algorithm takes a dict as an argument.
It will iterate based on the variable 'number_of_generations', and return
the last_gen and the historic
"""
# Unpack the values from the dict
password = kwargs['password']
size_population = kwargs['size_population']
best_sample = kwargs['best_sample']
lucky_few = kwargs['lucky_few']
number_of_child = kwargs['number_of_child']
number_of_generations = kwargs['number_of_generations']
chance_of_mutation = kwargs['chance_of_mutation']
hist = []
# The genetic algorithm
curr_pop = initial_pop(size_population, password)
hist = curr_pop
last_found = -1
for _ in range (number_of_generations):
curr_pop = next_gen(curr_pop, password, best_sample, lucky_few, number_of_child, chance_of_mutation)
hist.append(curr_pop)
if check_solution(curr_pop, password):
last_found = _
break
if last_found != -1:
print(f"Found a solution in the {last_found} generation!!")
else:
print("No solution found! D':")
return curr_pop, hist
def next_gen(curr_pop, password, best_sample, lucky_few, number_of_child, chance_of_mutation):
"""
-> This is the main task of the Genetic Algorithm <-
Given the current population, apply the following steps:
- Compute the fitness of each individual in the population
- Select the best ones (and some lucky guys)
- Make them reproduce
- Mutate the children
- Return this new population
"""
pop_sorted = compute_perf_pop(curr_pop, password)
next_breeders = select_from_population(pop_sorted, best_sample, lucky_few)
next_pop = create_children(next_breeders, number_of_child)
next_gen = mutate_pop(next_pop, chance_of_mutation)
return next_gen
def initial_pop(size, password):
"""
Generate a population consisting of random words, each with the same
length as the password, and the population has the size specified.
"""
return [word_generate(len(password)) for _ in range(size)]
def fitness (password, test_word):
"""
The fitness function:
fitness(test_word): (# of correct chars) / (total number of chars)
fitness(test_word) = 0 if # of correct chars = 0
fitness(test_word) = 100 if # of correct chars = total number of chars
"""
if (len(test_word) != len(password)):
print("Incompatible password...")
return
else:
score = (1 if password[i] == test_word[i] else 0 for i in range(len(password)))
return sum(score)*100/len(password)
def compute_perf_pop(population, password):
"""
Return the population, sorted by the fitness from each individual
"""
populationPerf = {ind:fitness(password, ind) for ind in population}
# Sort by fitness, reversed (best ones in the beginning of the list)
return sorted(populationPerf.items(), key= lambda it: it[1], reverse=True)
def select_from_population(pop_sorted, best_sample, lucky_few):
"""
Create the next breeders, with 'best_sample' individuals which have the
top fitness value from the population, and 'lucky_few' individuals which
are randomly selected.
"""
next_gen = []
for i in range(best_sample):
next_gen.append(pop_sorted[i][0])
# Simple lucky few: randomly select some elements from the population
for i in range(lucky_few):
next_gen.append(rnd.choice(pop_sorted)[0])
rnd.shuffle(next_gen)
return next_gen
def create_children(breeders, nof_childs):
"""
Create the next population of individuals, by breeding two by two
"""
next_pop = []
mid_pos = len(breeders)//2 # len(breeders) must be an even number
for ind_1, ind_2 in zip(breeders[:mid_pos], breeders[mid_pos:]):
for _ in range(nof_childs):
next_pop.append(create_child(ind_1, ind_2))
return next_pop
def mutate_pop(population, chance):
"""
Given a chance for mutation, this apply the mutation layer
to the genetic algorithm, by generating a mutation with the chance
specified.
"""
for i in range(len(population)):
if rnd.random() < chance:
population[i] = mutate_word(population[i])
return population
def mutate_word(word):
"""
Mutate a letter(gene) from the word, then return it
"""
pos = int(rnd.random()*len(word))
word = word[:pos] + chr(97 + int(26*rnd.random())) + word[pos + 1:]
return word
def create_child(ind_1, ind_2):
"""
For each letter of the child, get a random gene from ind_1 or ind_2
in the i-th position.
"""
temp = [ind_1[i] if rnd.random() < 0.5 else ind_2[i] for i in range(len(ind_1))]
return "".join(temp)
def word_generate(length):
"""
Generate a string with random lowercase letters, with length = length!
"""
# Generate a random letter from alphabet, lowercase, and add to result
return "".join((chr(97 + rnd.randint(0, 26)) for _ in range(length)))
def check_solution(population, password):
"""
Check if the population found a solution to the problem
"""
return any(ind == password for ind in population)
if __name__ == '__main__':
main()
| [
"random.random",
"random.choice",
"random.randint",
"random.shuffle"
]
| [((4567, 4588), 'random.shuffle', 'rnd.shuffle', (['next_gen'], {}), '(next_gen)\n', (4578, 4588), True, 'import random as rnd\n'), ((5259, 5271), 'random.random', 'rnd.random', ([], {}), '()\n', (5269, 5271), True, 'import random as rnd\n'), ((5469, 5481), 'random.random', 'rnd.random', ([], {}), '()\n', (5479, 5481), True, 'import random as rnd\n'), ((4535, 4557), 'random.choice', 'rnd.choice', (['pop_sorted'], {}), '(pop_sorted)\n', (4545, 4557), True, 'import random as rnd\n'), ((5752, 5764), 'random.random', 'rnd.random', ([], {}), '()\n', (5762, 5764), True, 'import random as rnd\n'), ((6061, 6079), 'random.randint', 'rnd.randint', (['(0)', '(26)'], {}), '(0, 26)\n', (6072, 6079), True, 'import random as rnd\n'), ((5533, 5545), 'random.random', 'rnd.random', ([], {}), '()\n', (5543, 5545), True, 'import random as rnd\n')] |
import pygame
import time
import numpy as np
import sys
gray = (150, 150, 150)
white = (255, 255, 255)
black = (0, 0, 0, )
red_block = (255, 0, 0)
red_border = (76, 0, 19)
block_color = (255, 128, 0)
border_color = (165,42,42)
screen = None
SIDE = 50
BORDER = 5
MARGIN = 5
LINE = 1
h_switch = True
def __draw_horizontal_block(x,y):
global screen, h_switch
pygame.draw.rect(screen, border_color, pygame.Rect(MARGIN + y*SIDE,MARGIN + x*SIDE, SIDE, SIDE))
pygame.draw.rect(screen, block_color, pygame.Rect(MARGIN + y*SIDE + h_switch*BORDER, MARGIN + x*SIDE + BORDER,
SIDE - BORDER, SIDE - 2*BORDER))
h_switch = not h_switch
def __draw_red_block(x,y):
global screen, h_switch
pygame.draw.rect(screen, red_border, pygame.Rect(MARGIN + y*SIDE,MARGIN + x*SIDE, SIDE, SIDE))
pygame.draw.rect(screen, red_block, pygame.Rect(MARGIN + y*SIDE + h_switch*BORDER, MARGIN + x*SIDE + BORDER,
SIDE - BORDER, SIDE - 2*BORDER))
h_switch = not h_switch
def __draw_vertical_block(x,y):
global screen
pygame.draw.rect(screen, border_color, pygame.Rect(MARGIN + y*SIDE, MARGIN + x*SIDE, SIDE, 2*SIDE))
pygame.draw.rect(screen, block_color, pygame.Rect(MARGIN + y*SIDE + BORDER, MARGIN + x*SIDE + BORDER,
SIDE - 2*BORDER, 2*SIDE - 2*BORDER))
## Render function for the unblockme_class
def render_unblockme(game_object):
matrix = game_object.internal_state
k, h, _ = game_object.shape
global screen
if screen is None:
pygame.init()
screen = pygame.display.set_mode((2*MARGIN+k*SIDE, 2*MARGIN+h*SIDE))
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.display.quit()
pygame.quit()
sys.exit(0)
screen.fill(black)
# first we draw the background
for x in range(0,k):
for y in range(0,h):
cell = matrix[x,y,:]
selected_block = np.where(cell == 1)[0]
if len(selected_block) != 0:
#draw the exit on the outer border
if selected_block[0] == 0:
if y == 0:
pygame.draw.rect(screen, white, pygame.Rect(y*SIDE,x*SIDE+MARGIN, SIDE+MARGIN, SIDE))
else:
pygame.draw.rect(screen, white, pygame.Rect(y*SIDE+MARGIN,x*SIDE+MARGIN, SIDE+MARGIN, SIDE))
# Draw the background with the grid pattern
pygame.draw.rect(screen, gray , pygame.Rect(MARGIN + y*SIDE,MARGIN + x*SIDE, SIDE, SIDE))
pygame.draw.rect(screen, white, pygame.Rect(MARGIN + y*SIDE + LINE,MARGIN + x*SIDE + LINE,
SIDE - 2*LINE, SIDE - 2*LINE))
# then we draw the blocks in the grid
for x in range(0,k):
for y in range(0,h):
cell = matrix[x,y,1:]
selected_block = np.where(cell == 1)[0]
if len(selected_block) != 0:
if selected_block[-1] == 1:
__draw_horizontal_block(x,y)
elif selected_block[-1] == 2:
if (x == 0 or not (matrix[x-1,y,1:] == cell).all() ) and \
(x != k-1 and (matrix[x+1,y,1:] == cell).all() ):
__draw_vertical_block(x,y)
elif selected_block[-1] == 0:
__draw_red_block(x,y)
pygame.display.update()
time.sleep(0.1)
if __name__ == "__main__":
from unblockme_class import *
matrix, goal = get_example()
game = unblock_me(matrix, goal)
render_unblockme(game) | [
"pygame.init",
"pygame.quit",
"pygame.event.get",
"numpy.where",
"pygame.display.set_mode",
"time.sleep",
"pygame.display.quit",
"sys.exit",
"pygame.display.update",
"pygame.Rect"
]
| [((1755, 1773), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (1771, 1773), False, 'import pygame\n'), ((3536, 3559), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (3557, 3559), False, 'import pygame\n'), ((3564, 3579), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (3574, 3579), False, 'import time\n'), ((412, 473), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE)', '(MARGIN + x * SIDE)', 'SIDE', 'SIDE'], {}), '(MARGIN + y * SIDE, MARGIN + x * SIDE, SIDE, SIDE)\n', (423, 473), False, 'import pygame\n'), ((513, 629), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE + h_switch * BORDER)', '(MARGIN + x * SIDE + BORDER)', '(SIDE - BORDER)', '(SIDE - 2 * BORDER)'], {}), '(MARGIN + y * SIDE + h_switch * BORDER, MARGIN + x * SIDE +\n BORDER, SIDE - BORDER, SIDE - 2 * BORDER)\n', (524, 629), False, 'import pygame\n'), ((800, 861), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE)', '(MARGIN + x * SIDE)', 'SIDE', 'SIDE'], {}), '(MARGIN + y * SIDE, MARGIN + x * SIDE, SIDE, SIDE)\n', (811, 861), False, 'import pygame\n'), ((899, 1015), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE + h_switch * BORDER)', '(MARGIN + x * SIDE + BORDER)', '(SIDE - BORDER)', '(SIDE - 2 * BORDER)'], {}), '(MARGIN + y * SIDE + h_switch * BORDER, MARGIN + x * SIDE +\n BORDER, SIDE - BORDER, SIDE - 2 * BORDER)\n', (910, 1015), False, 'import pygame\n'), ((1183, 1248), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE)', '(MARGIN + x * SIDE)', 'SIDE', '(2 * SIDE)'], {}), '(MARGIN + y * SIDE, MARGIN + x * SIDE, SIDE, 2 * SIDE)\n', (1194, 1248), False, 'import pygame\n'), ((1287, 1401), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE + BORDER)', '(MARGIN + x * SIDE + BORDER)', '(SIDE - 2 * BORDER)', '(2 * SIDE - 2 * BORDER)'], {}), '(MARGIN + y * SIDE + BORDER, MARGIN + x * SIDE + BORDER, SIDE - \n 2 * BORDER, 2 * SIDE - 2 * BORDER)\n', (1298, 1401), False, 'import pygame\n'), ((1646, 1659), 'pygame.init', 'pygame.init', ([], {}), '()\n', (1657, 1659), False, 'import pygame\n'), ((1677, 1748), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(2 * MARGIN + k * SIDE, 2 * MARGIN + h * SIDE)'], {}), '((2 * MARGIN + k * SIDE, 2 * MARGIN + h * SIDE))\n', (1700, 1748), False, 'import pygame\n'), ((1825, 1846), 'pygame.display.quit', 'pygame.display.quit', ([], {}), '()\n', (1844, 1846), False, 'import pygame\n'), ((1859, 1872), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (1870, 1872), False, 'import pygame\n'), ((1885, 1896), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1893, 1896), False, 'import sys\n'), ((2073, 2092), 'numpy.where', 'np.where', (['(cell == 1)'], {}), '(cell == 1)\n', (2081, 2092), True, 'import numpy as np\n'), ((2615, 2676), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE)', '(MARGIN + x * SIDE)', 'SIDE', 'SIDE'], {}), '(MARGIN + y * SIDE, MARGIN + x * SIDE, SIDE, SIDE)\n', (2626, 2676), False, 'import pygame\n'), ((2717, 2818), 'pygame.Rect', 'pygame.Rect', (['(MARGIN + y * SIDE + LINE)', '(MARGIN + x * SIDE + LINE)', '(SIDE - 2 * LINE)', '(SIDE - 2 * LINE)'], {}), '(MARGIN + y * SIDE + LINE, MARGIN + x * SIDE + LINE, SIDE - 2 *\n LINE, SIDE - 2 * LINE)\n', (2728, 2818), False, 'import pygame\n'), ((3027, 3046), 'numpy.where', 'np.where', (['(cell == 1)'], {}), '(cell == 1)\n', (3035, 3046), True, 'import numpy as np\n'), ((2318, 2379), 'pygame.Rect', 'pygame.Rect', (['(y * SIDE)', '(x * SIDE + MARGIN)', '(SIDE + MARGIN)', 'SIDE'], {}), '(y * SIDE, x * SIDE + MARGIN, SIDE + MARGIN, SIDE)\n', (2329, 2379), False, 'import pygame\n'), ((2454, 2524), 'pygame.Rect', 'pygame.Rect', (['(y * SIDE + MARGIN)', '(x * SIDE + MARGIN)', '(SIDE + MARGIN)', 'SIDE'], {}), '(y * SIDE + MARGIN, x * SIDE + MARGIN, SIDE + MARGIN, SIDE)\n', (2465, 2524), False, 'import pygame\n')] |
# API
from pyramid.scaffolds import PyramidTemplate
import os
import re
import logging
def _camelcase_to_upper_camel_case(the_str):
if not the_str:
return ''
return the_str[0].upper() + the_str[1:]
def _upper_camelcase_to_camelcase(the_str):
if not the_str:
return ''
return the_str[0].lower() + the_str[1:]
def _camelcase_to_constant(the_str):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', the_str)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).upper()
class MyTemplate(PyramidTemplate):
def pre(self, command, output_dir, vars):
the_args = command.args
module_name = '' if not isinstance(the_args, list) or len(the_args) < 2 else the_args[1]
logging.warning('command: %s output_dir: %s vars: %s args: %s module_name: %s', command, output_dir, vars, command.args, module_name)
self._setup_module(vars, module_name)
return PyramidTemplate.pre(self, command, output_dir, vars)
def _setup_module(self, vars, full_module_name):
full_module_path = full_module_name.replace('.', os.path.sep)
module_name = os.path.basename(full_module_path)
class_name = _camelcase_to_upper_camel_case(module_name)
constant_name = _camelcase_to_constant(module_name)
sub_pkg_dir = os.path.dirname(full_module_path)
sub_pkg_name = sub_pkg_dir.replace(os.path.sep, '.')
test_name = '' if not module_name else 'test' + class_name
sub_pkg_dir_list = [] if not sub_pkg_dir else sub_pkg_dir.split(os.path.sep)
test_dir_list = ['test_' + each_pkg for each_pkg in sub_pkg_dir_list]
test_dir = os.path.sep.join(test_dir_list)
pkg_name = vars['package']
if sub_pkg_name:
pkg_name += '.' + sub_pkg_name
project_name = vars['project']
vars['module_name'] = module_name
vars['class_name'] = class_name
vars['sub_pkg_name'] = sub_pkg_name
vars['sub_pkg_dir'] = sub_pkg_dir
vars['constant_name'] = constant_name
vars['test_name'] = test_name
vars['test_dir'] = test_dir
vars['pkg_name'] = pkg_name
vars['project_name'] = project_name
class ComponentProjectTemplate(MyTemplate):
_template_dir = 'component'
summary = 'component'
class ContainerProjectTemplate(MyTemplate):
_template_dir = 'container'
summary = 'container'
class SubContainerProjectTemplate(MyTemplate):
_template_dir = 'subcontainer'
summary = 'subcontainer'
class ModuleProjectTemplate(MyTemplate):
_template_dir = 'module'
summary = 'module'
class InitStarterProjectTemplate(MyTemplate):
_template_dir = 'init_starter'
summary = 'including store / middleware / utils'
class InitDevProjectTemplate(MyTemplate):
_template_dir = 'init_dev'
summary = 'starting project'
| [
"logging.warning",
"os.path.dirname",
"os.path.sep.join",
"pyramid.scaffolds.PyramidTemplate.pre",
"os.path.basename",
"re.sub"
]
| [((393, 439), 're.sub', 're.sub', (['"""(.)([A-Z][a-z]+)"""', '"""\\\\1_\\\\2"""', 'the_str'], {}), "('(.)([A-Z][a-z]+)', '\\\\1_\\\\2', the_str)\n", (399, 439), False, 'import re\n'), ((722, 859), 'logging.warning', 'logging.warning', (['"""command: %s output_dir: %s vars: %s args: %s module_name: %s"""', 'command', 'output_dir', 'vars', 'command.args', 'module_name'], {}), "('command: %s output_dir: %s vars: %s args: %s module_name: %s',\n command, output_dir, vars, command.args, module_name)\n", (737, 859), False, 'import logging\n'), ((919, 971), 'pyramid.scaffolds.PyramidTemplate.pre', 'PyramidTemplate.pre', (['self', 'command', 'output_dir', 'vars'], {}), '(self, command, output_dir, vars)\n', (938, 971), False, 'from pyramid.scaffolds import PyramidTemplate\n'), ((1119, 1153), 'os.path.basename', 'os.path.basename', (['full_module_path'], {}), '(full_module_path)\n', (1135, 1153), False, 'import os\n'), ((1303, 1336), 'os.path.dirname', 'os.path.dirname', (['full_module_path'], {}), '(full_module_path)\n', (1318, 1336), False, 'import os\n'), ((1648, 1679), 'os.path.sep.join', 'os.path.sep.join', (['test_dir_list'], {}), '(test_dir_list)\n', (1664, 1679), False, 'import os\n'), ((450, 492), 're.sub', 're.sub', (['"""([a-z0-9])([A-Z])"""', '"""\\\\1_\\\\2"""', 's1'], {}), "('([a-z0-9])([A-Z])', '\\\\1_\\\\2', s1)\n", (456, 492), False, 'import re\n')] |
# -*- coding: utf-8 -*-
"""Test GUI component."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
#from contextlib import contextmanager
from pytest import yield_fixture, fixture, raises
import numpy as np
from numpy.testing import assert_array_equal as ae
from .. import supervisor as _supervisor
from ..supervisor import (Supervisor,
TaskLogger,
ClusterView,
SimilarityView,
ActionCreator,
)
from phy.gui import GUI
from phy.gui.widgets import Barrier
from phy.gui.qt import qInstallMessageHandler
from phy.gui.tests.test_widgets import _assert, _wait_until_table_ready
from phy.utils.context import Context
from phylib.utils import connect, Bunch, emit
def handler(msg_type, msg_log_context, msg_string):
pass
qInstallMessageHandler(handler)
#------------------------------------------------------------------------------
# Fixtures
#------------------------------------------------------------------------------
@yield_fixture
def gui(tempdir, qtbot):
# NOTE: mock patch show box exec_
_supervisor._show_box = lambda _: _
gui = GUI(position=(200, 100), size=(500, 500), config_dir=tempdir)
gui.set_default_actions()
gui.show()
qtbot.waitForWindowShown(gui)
yield gui
qtbot.wait(5)
gui.close()
del gui
qtbot.wait(5)
@fixture
def supervisor(qtbot, gui, cluster_ids, cluster_groups, cluster_labels,
similarity, tempdir):
spike_clusters = np.repeat(cluster_ids, 2)
s = Supervisor(
spike_clusters,
cluster_groups=cluster_groups,
cluster_labels=cluster_labels,
similarity=similarity,
context=Context(tempdir),
sort=('id', 'desc'),
)
s.attach(gui)
b = Barrier()
connect(b('cluster_view'), event='ready', sender=s.cluster_view)
connect(b('similarity_view'), event='ready', sender=s.similarity_view)
b.wait()
return s
#------------------------------------------------------------------------------
# Test tasks
#------------------------------------------------------------------------------
@fixture
def tl():
class MockClusterView(object):
_selected = [0]
def select(self, cl, callback=None, **kwargs):
self._selected = cl
callback({'selected': cl, 'next': cl[-1] + 1})
def next(self, callback=None):
callback({'selected': [self._selected[-1] + 1], 'next': self._selected[-1] + 2})
def previous(self, callback=None): # pragma: no cover
callback({'selected': [self._selected[-1] - 1], 'next': self._selected[-1]})
class MockSimilarityView(MockClusterView):
pass
class MockSupervisor(object):
def merge(self, cluster_ids, to, callback=None):
callback(Bunch(deleted=cluster_ids, added=[to]))
def split(self, old_cluster_ids, new_cluster_ids, callback=None):
callback(Bunch(deleted=old_cluster_ids, added=new_cluster_ids))
def move(self, which, group, callback=None):
callback(Bunch(metadata_changed=which, metadata_value=group))
def undo(self, callback=None):
callback(Bunch())
def redo(self, callback=None):
callback(Bunch())
out = TaskLogger(MockClusterView(), MockSimilarityView(), MockSupervisor())
return out
def test_task_1(tl):
assert tl.last_state(None) is None
def test_task_2(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.process()
assert tl.last_state() == ([0], 1, None, None)
def test_task_3(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.similarity_view, 'select', [100])
tl.process()
assert tl.last_state() == ([0], 1, [100], 101)
def test_task_merge(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.similarity_view, 'select', [100])
tl.enqueue(tl.supervisor, 'merge', [0, 100], 1000)
tl.process()
assert tl.last_state() == ([1000], 1001, None, None)
tl.enqueue(tl.supervisor, 'undo')
tl.process()
assert tl.last_state() == ([0], 1, [100], 101)
tl.enqueue(tl.supervisor, 'redo')
tl.process()
assert tl.last_state() == ([1000], 1001, None, None)
def test_task_split(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.similarity_view, 'select', [100])
tl.enqueue(tl.supervisor, 'split', [0, 100], [1000, 1001])
tl.process()
assert tl.last_state() == ([1000, 1001], 1002, None, None)
def test_task_move_1(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.supervisor, 'move', [0], 'good')
tl.process()
assert tl.last_state() == ([1], 2, None, None)
def test_task_move_best(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.similarity_view, 'select', [100])
tl.enqueue(tl.supervisor, 'move', 'best', 'good')
tl.process()
assert tl.last_state() == ([1], 2, None, None)
def test_task_move_similar(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.similarity_view, 'select', [100])
tl.enqueue(tl.supervisor, 'move', 'similar', 'good')
tl.process()
assert tl.last_state() == ([0], 1, [101], 102)
def test_task_move_all(tl):
tl.enqueue(tl.cluster_view, 'select', [0])
tl.enqueue(tl.similarity_view, 'select', [100])
tl.enqueue(tl.supervisor, 'move', 'all', 'good')
tl.process()
assert tl.last_state() == ([1], 2, [101], 102)
#------------------------------------------------------------------------------
# Test cluster and similarity views
#------------------------------------------------------------------------------
@fixture
def data():
_data = [{"id": i,
"n_spikes": 100 - 10 * i,
"group": {2: 'noise', 3: 'noise', 5: 'mua', 8: 'good'}.get(i, None),
"is_masked": i in (2, 3, 5),
} for i in range(10)]
return _data
def test_cluster_view_1(qtbot, gui, data):
cv = ClusterView(gui, data=data)
_wait_until_table_ready(qtbot, cv)
cv.sort_by('n_spikes', 'asc')
cv.select([1])
qtbot.wait(10)
assert cv.state == {'current_sort': ('n_spikes', 'asc'), 'selected': [1]}
cv.set_state({'current_sort': ('id', 'desc'), 'selected': [2]})
assert cv.state == {'current_sort': ('id', 'desc'), 'selected': [2]}
def test_similarity_view_1(qtbot, gui, data):
sv = SimilarityView(gui, data=data)
_wait_until_table_ready(qtbot, sv)
@connect(sender=sv)
def on_request_similar_clusters(sender, cluster_id):
return [{'id': id} for id in (100 + cluster_id, 110 + cluster_id, 102 + cluster_id)]
sv.reset([5])
_assert(sv.get_ids, [105, 115, 107])
def test_cluster_view_extra_columns(qtbot, gui, data):
for cl in data:
cl['my_metrics'] = cl['id'] * 1000
cv = ClusterView(gui, data=data, columns=['id', 'n_spikes', 'my_metrics'])
_wait_until_table_ready(qtbot, cv)
#------------------------------------------------------------------------------
# Test ActionCreator
#------------------------------------------------------------------------------
def test_action_creator_1(qtbot, gui):
ac = ActionCreator()
ac.attach(gui)
gui.show()
#------------------------------------------------------------------------------
# Test GUI component
#------------------------------------------------------------------------------
def _select(supervisor, cluster_ids, similar=None):
supervisor.task_logger.enqueue(supervisor.cluster_view, 'select', cluster_ids)
if similar is not None:
supervisor.task_logger.enqueue(supervisor.similarity_view, 'select', similar)
supervisor.task_logger.process()
supervisor.block()
supervisor.task_logger.show_history()
assert supervisor.task_logger.last_state()[0] == cluster_ids
assert supervisor.task_logger.last_state()[2] == similar
def _assert_selected(supervisor, sel):
assert supervisor.selected == sel
def test_select(qtbot, supervisor):
_select(supervisor, [30], [20])
_assert_selected(supervisor, [30, 20])
def test_supervisor_busy(qtbot, supervisor):
_select(supervisor, [30], [20])
o = object()
emit('is_busy', o, True)
assert supervisor._is_busy
# The action fails while the supervisor is busy.
with raises(RuntimeError):
emit('action', supervisor.action_creator, 'merge')
emit('is_busy', o, False)
assert not supervisor._is_busy
# The action succeeds because the supervisor is no longer busy.
emit('action', supervisor.action_creator, 'merge')
supervisor.block()
assert not supervisor._is_busy
def test_supervisor_cluster_metrics(
qtbot, gui, cluster_ids, cluster_groups, similarity, tempdir):
spike_clusters = np.repeat(cluster_ids, 2)
def my_metrics(cluster_id):
return cluster_id ** 2
cluster_metrics = {'my_metrics': my_metrics}
mc = Supervisor(spike_clusters,
cluster_groups=cluster_groups,
cluster_metrics=cluster_metrics,
similarity=similarity,
context=Context(tempdir),
)
mc.attach(gui)
b = Barrier()
connect(b('cluster_view'), event='ready', sender=mc.cluster_view)
connect(b('similarity_view'), event='ready', sender=mc.similarity_view)
b.wait()
assert 'my_metrics' in mc.columns
def test_supervisor_select_1(qtbot, supervisor):
# WARNING: always use actions in tests, because this doesn't call
# the supervisor method directly, but raises an event, enqueue the task,
# and call TaskLogger.process() which handles the cascade of callbacks.
supervisor.select_actions.select([0])
supervisor.block()
_assert_selected(supervisor, [0])
supervisor.task_logger.show_history()
def test_supervisor_color(qtbot, supervisor):
supervisor.view_actions.colormap_linear()
supervisor.view_actions.color_field_n_spikes()
supervisor.view_actions.toggle_categorical_colormap(False)
supervisor.view_actions.toggle_logarithmic_colormap(True)
def test_supervisor_select_2(qtbot, supervisor):
supervisor.select_actions.next_best()
supervisor.block()
_assert_selected(supervisor, [30])
def test_supervisor_select_order(qtbot, supervisor):
_select(supervisor, [1, 0])
_assert_selected(supervisor, [1, 0])
_select(supervisor, [0, 1])
_assert_selected(supervisor, [0, 1])
def test_supervisor_edge_cases(supervisor):
# Empty selection at first.
ae(supervisor.clustering.cluster_ids, [0, 1, 2, 10, 11, 20, 30])
_select(supervisor, [0])
supervisor.undo()
supervisor.block()
supervisor.redo()
supervisor.block()
# Merge.
supervisor.merge()
supervisor.block()
_assert_selected(supervisor, [0])
supervisor.merge([])
supervisor.block()
_assert_selected(supervisor, [0])
supervisor.merge([10])
supervisor.block()
_assert_selected(supervisor, [0])
# Split.
supervisor.split([])
supervisor.block()
_assert_selected(supervisor, [0])
# Move.
supervisor.move('ignored', [])
supervisor.block()
supervisor.save()
def test_supervisor_save(qtbot, gui, supervisor):
emit('request_save', gui)
def test_supervisor_skip(qtbot, gui, supervisor):
# yield [0, 1, 2, 10, 11, 20, 30]
# # i, g, N, i, g, N, N
expected = [30, 20, 11, 2, 1]
for clu in expected:
supervisor.select_actions.next_best()
supervisor.block()
_assert_selected(supervisor, [clu])
def test_supervisor_sort(qtbot, supervisor):
supervisor.sort('id', 'desc')
qtbot.wait(50)
assert supervisor.state.cluster_view.current_sort == ('id', 'desc')
supervisor.select_actions.sort_by_n_spikes()
qtbot.wait(50)
assert supervisor.state.cluster_view.current_sort == ('n_spikes', 'desc')
def test_supervisor_filter(qtbot, supervisor):
supervisor.filter('5 <= id && id <= 20')
qtbot.wait(50)
_cl = []
supervisor.cluster_view.get_ids(lambda cluster_ids: _cl.extend(cluster_ids))
qtbot.wait(50)
assert _cl == [20, 11, 10]
def test_supervisor_merge_1(qtbot, supervisor):
_select(supervisor, [30], [20])
_assert_selected(supervisor, [30, 20])
supervisor.actions.merge()
supervisor.block()
_assert_selected(supervisor, [31])
supervisor.actions.undo()
supervisor.block()
_assert_selected(supervisor, [30, 20])
supervisor.actions.redo()
supervisor.block()
supervisor.task_logger.show_history()
_assert_selected(supervisor, [31])
assert supervisor.is_dirty()
def test_supervisor_merge_event(qtbot, supervisor):
_select(supervisor, [30], [20])
_l = []
@connect(sender=supervisor)
def on_select(sender, cluster_ids):
_l.append(cluster_ids)
supervisor.actions.merge()
supervisor.block()
# After a merge, there should be only one select event.
assert len(_l) == 1
def test_supervisor_merge_move(qtbot, supervisor):
"""Check that merge then move selects the next cluster in the original
cluster view, not the updated cluster view."""
_select(supervisor, [20, 11], [])
_assert_selected(supervisor, [20, 11])
supervisor.actions.merge()
supervisor.block()
_assert_selected(supervisor, [31])
supervisor.actions.move('good', 'all')
supervisor.block()
_assert_selected(supervisor, [30])
supervisor.actions.move('good', 'all')
supervisor.block()
_assert_selected(supervisor, [2])
def test_supervisor_split_0(qtbot, supervisor):
_select(supervisor, [1, 2])
_assert_selected(supervisor, [1, 2])
supervisor.actions.split([1, 2])
supervisor.block()
_assert_selected(supervisor, [31, 32, 33])
supervisor.actions.undo()
supervisor.block()
_assert_selected(supervisor, [1, 2])
supervisor.actions.redo()
supervisor.block()
_assert_selected(supervisor, [31, 32, 33])
def test_supervisor_split_1(supervisor):
supervisor.select_actions.select([1, 2])
supervisor.block()
@connect(sender=supervisor)
def on_request_split(sender):
return [1, 2]
supervisor.actions.split()
supervisor.block()
_assert_selected(supervisor, [31, 32, 33])
def test_supervisor_split_2(gui, similarity):
spike_clusters = np.array([0, 0, 1])
supervisor = Supervisor(spike_clusters,
similarity=similarity,
)
supervisor.attach(gui)
b = Barrier()
connect(b('cluster_view'), event='ready', sender=supervisor.cluster_view)
connect(b('similarity_view'), event='ready', sender=supervisor.similarity_view)
b.wait()
supervisor.actions.split([0])
supervisor.block()
_assert_selected(supervisor, [2, 3])
def test_supervisor_state(tempdir, qtbot, gui, supervisor):
supervisor.select(1)
cv = supervisor.cluster_view
assert supervisor.state.cluster_view.current_sort == ('id', 'desc')
assert supervisor.state.cluster_view.selected == [1]
cv.sort_by('id')
assert supervisor.state.cluster_view.current_sort == ('id', 'asc')
cv.set_state({'current_sort': ('n_spikes', 'desc')})
assert supervisor.state.cluster_view.current_sort == ('n_spikes', 'desc')
cv.sort_by('id', 'desc')
assert supervisor.all_cluster_ids == [30, 20, 11, 10, 2, 1, 0]
def test_supervisor_label(supervisor):
_select(supervisor, [20])
supervisor.label("my_field", 3.14)
supervisor.block()
supervisor.label("my_field", 1.23, cluster_ids=30)
supervisor.block()
assert 'my_field' in supervisor.fields
assert supervisor.get_labels('my_field')[20] == 3.14
assert supervisor.get_labels('my_field')[30] == 1.23
def test_supervisor_label_cluster_1(supervisor):
_select(supervisor, [20, 30])
supervisor.label("my_field", 3.14)
supervisor.block()
# Same value for the old clusters.
l = supervisor.get_labels('my_field')
assert l[20] == l[30] == 3.14
up = supervisor.merge()
supervisor.block()
assert supervisor.get_labels('my_field')[up.added[0]] == 3.14
def test_supervisor_label_cluster_2(supervisor):
_select(supervisor, [20])
supervisor.label("my_field", 3.14)
supervisor.block()
# One of the parents.
l = supervisor.get_labels('my_field')
assert l[20] == 3.14
assert l[30] is None
up = supervisor.merge([20, 30])
supervisor.block()
assert supervisor.get_labels('my_field')[up.added[0]] == 3.14
def test_supervisor_label_cluster_3(supervisor):
# Conflict: largest cluster wins.
_select(supervisor, [20, 30])
supervisor.label("my_field", 3.14)
supervisor.block()
# Create merged cluster from 20 and 30.
up = supervisor.merge()
new = up.added[0]
supervisor.block()
# It fot the label of its parents.
assert supervisor.get_labels('my_field')[new] == 3.14
# Now, we label a smaller cluster.
supervisor.label("my_field", 2.718, cluster_ids=[10])
# We merge the large and small cluster together.
up = supervisor.merge(up.added + [10])
supervisor.block()
# The new cluster should have the value of the first, merged big cluster, i.e. 3.14.
assert supervisor.get_labels('my_field')[up.added[0]] == 3.14
def test_supervisor_move_1(supervisor):
_select(supervisor, [20])
_assert_selected(supervisor, [20])
assert not supervisor.move('', '')
supervisor.actions.move('noise', 'all')
supervisor.block()
_assert_selected(supervisor, [11])
supervisor.actions.undo()
supervisor.block()
_assert_selected(supervisor, [20])
supervisor.actions.redo()
supervisor.block()
_assert_selected(supervisor, [11])
def test_supervisor_move_2(supervisor):
_select(supervisor, [20], [10])
_assert_selected(supervisor, [20, 10])
supervisor.actions.move('noise', 10)
supervisor.block()
_assert_selected(supervisor, [20, 2])
supervisor.actions.undo()
supervisor.block()
_assert_selected(supervisor, [20, 10])
supervisor.actions.redo()
supervisor.block()
_assert_selected(supervisor, [20, 2])
def test_supervisor_move_3(qtbot, supervisor):
supervisor.select_actions.next()
supervisor.block()
_assert_selected(supervisor, [30])
supervisor.actions.move_best_to_noise()
supervisor.block()
_assert_selected(supervisor, [20])
supervisor.actions.move_best_to_mua()
supervisor.block()
_assert_selected(supervisor, [11])
supervisor.actions.move_best_to_good()
supervisor.block()
_assert_selected(supervisor, [2])
supervisor.cluster_meta.get('group', 30) == 'noise'
supervisor.cluster_meta.get('group', 20) == 'mua'
supervisor.cluster_meta.get('group', 11) == 'good'
def test_supervisor_move_4(supervisor):
_select(supervisor, [30], [20])
_assert_selected(supervisor, [30, 20])
supervisor.actions.move_similar_to_noise()
supervisor.block()
_assert_selected(supervisor, [30, 11])
supervisor.actions.move_similar_to_mua()
supervisor.block()
_assert_selected(supervisor, [30, 2])
supervisor.actions.move_similar_to_good()
supervisor.block()
_assert_selected(supervisor, [30, 1])
supervisor.cluster_meta.get('group', 20) == 'noise'
supervisor.cluster_meta.get('group', 11) == 'mua'
supervisor.cluster_meta.get('group', 2) == 'good'
def test_supervisor_move_5(supervisor):
_select(supervisor, [30], [20])
_assert_selected(supervisor, [30, 20])
supervisor.actions.move_all_to_noise()
supervisor.block()
_assert_selected(supervisor, [11, 2])
supervisor.select_actions.next()
supervisor.block()
_assert_selected(supervisor, [11, 1])
supervisor.actions.move_all_to_mua()
supervisor.block()
_assert_selected(supervisor, [2])
supervisor.actions.move_all_to_good()
supervisor.block()
_assert_selected(supervisor, [])
supervisor.cluster_meta.get('group', 30) == 'noise'
supervisor.cluster_meta.get('group', 20) == 'noise'
supervisor.cluster_meta.get('group', 11) == 'mua'
supervisor.cluster_meta.get('group', 10) == 'mua'
supervisor.cluster_meta.get('group', 2) == 'good'
supervisor.cluster_meta.get('group', 1) == 'good'
def test_supervisor_reset(qtbot, supervisor):
supervisor.select_actions.select([10, 11])
supervisor.select_actions.reset_wizard()
supervisor.block()
_assert_selected(supervisor, [30])
supervisor.select_actions.next()
supervisor.block()
_assert_selected(supervisor, [30, 20])
supervisor.select_actions.next()
supervisor.block()
_assert_selected(supervisor, [30, 11])
supervisor.select_actions.previous()
supervisor.block()
_assert_selected(supervisor, [30, 20])
def test_supervisor_nav(qtbot, supervisor):
supervisor.select_actions.reset_wizard()
supervisor.block()
_assert_selected(supervisor, [30])
supervisor.select_actions.next_best()
supervisor.block()
_assert_selected(supervisor, [20])
supervisor.select_actions.previous_best()
supervisor.block()
_assert_selected(supervisor, [30])
| [
"phylib.utils.emit",
"phy.gui.tests.test_widgets._assert",
"phylib.utils.Bunch",
"phy.gui.qt.qInstallMessageHandler",
"numpy.repeat",
"phy.utils.context.Context",
"phy.gui.tests.test_widgets._wait_until_table_ready",
"phy.gui.GUI",
"numpy.array",
"pytest.raises",
"phylib.utils.connect",
"phy.gui.widgets.Barrier",
"numpy.testing.assert_array_equal"
]
| [((978, 1009), 'phy.gui.qt.qInstallMessageHandler', 'qInstallMessageHandler', (['handler'], {}), '(handler)\n', (1000, 1009), False, 'from phy.gui.qt import qInstallMessageHandler\n'), ((1313, 1374), 'phy.gui.GUI', 'GUI', ([], {'position': '(200, 100)', 'size': '(500, 500)', 'config_dir': 'tempdir'}), '(position=(200, 100), size=(500, 500), config_dir=tempdir)\n', (1316, 1374), False, 'from phy.gui import GUI\n'), ((1673, 1698), 'numpy.repeat', 'np.repeat', (['cluster_ids', '(2)'], {}), '(cluster_ids, 2)\n', (1682, 1698), True, 'import numpy as np\n'), ((1948, 1957), 'phy.gui.widgets.Barrier', 'Barrier', ([], {}), '()\n', (1955, 1957), False, 'from phy.gui.widgets import Barrier\n'), ((6187, 6221), 'phy.gui.tests.test_widgets._wait_until_table_ready', '_wait_until_table_ready', (['qtbot', 'cv'], {}), '(qtbot, cv)\n', (6210, 6221), False, 'from phy.gui.tests.test_widgets import _assert, _wait_until_table_ready\n'), ((6607, 6641), 'phy.gui.tests.test_widgets._wait_until_table_ready', '_wait_until_table_ready', (['qtbot', 'sv'], {}), '(qtbot, sv)\n', (6630, 6641), False, 'from phy.gui.tests.test_widgets import _assert, _wait_until_table_ready\n'), ((6648, 6666), 'phylib.utils.connect', 'connect', ([], {'sender': 'sv'}), '(sender=sv)\n', (6655, 6666), False, 'from phylib.utils import connect, Bunch, emit\n'), ((6840, 6876), 'phy.gui.tests.test_widgets._assert', '_assert', (['sv.get_ids', '[105, 115, 107]'], {}), '(sv.get_ids, [105, 115, 107])\n', (6847, 6876), False, 'from phy.gui.tests.test_widgets import _assert, _wait_until_table_ready\n'), ((7082, 7116), 'phy.gui.tests.test_widgets._wait_until_table_ready', '_wait_until_table_ready', (['qtbot', 'cv'], {}), '(qtbot, cv)\n', (7105, 7116), False, 'from phy.gui.tests.test_widgets import _assert, _wait_until_table_ready\n'), ((8363, 8387), 'phylib.utils.emit', 'emit', (['"""is_busy"""', 'o', '(True)'], {}), "('is_busy', o, True)\n", (8367, 8387), False, 'from phylib.utils import connect, Bunch, emit\n'), ((8568, 8593), 'phylib.utils.emit', 'emit', (['"""is_busy"""', 'o', '(False)'], {}), "('is_busy', o, False)\n", (8572, 8593), False, 'from phylib.utils import connect, Bunch, emit\n'), ((8702, 8752), 'phylib.utils.emit', 'emit', (['"""action"""', 'supervisor.action_creator', '"""merge"""'], {}), "('action', supervisor.action_creator, 'merge')\n", (8706, 8752), False, 'from phylib.utils import connect, Bunch, emit\n'), ((8942, 8967), 'numpy.repeat', 'np.repeat', (['cluster_ids', '(2)'], {}), '(cluster_ids, 2)\n', (8951, 8967), True, 'import numpy as np\n'), ((9361, 9370), 'phy.gui.widgets.Barrier', 'Barrier', ([], {}), '()\n', (9368, 9370), False, 'from phy.gui.widgets import Barrier\n'), ((10697, 10761), 'numpy.testing.assert_array_equal', 'ae', (['supervisor.clustering.cluster_ids', '[0, 1, 2, 10, 11, 20, 30]'], {}), '(supervisor.clustering.cluster_ids, [0, 1, 2, 10, 11, 20, 30])\n', (10699, 10761), True, 'from numpy.testing import assert_array_equal as ae\n'), ((11408, 11433), 'phylib.utils.emit', 'emit', (['"""request_save"""', 'gui'], {}), "('request_save', gui)\n", (11412, 11433), False, 'from phylib.utils import connect, Bunch, emit\n'), ((12914, 12940), 'phylib.utils.connect', 'connect', ([], {'sender': 'supervisor'}), '(sender=supervisor)\n', (12921, 12940), False, 'from phylib.utils import connect, Bunch, emit\n'), ((14265, 14291), 'phylib.utils.connect', 'connect', ([], {'sender': 'supervisor'}), '(sender=supervisor)\n', (14272, 14291), False, 'from phylib.utils import connect, Bunch, emit\n'), ((14519, 14538), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (14527, 14538), True, 'import numpy as np\n'), ((14701, 14710), 'phy.gui.widgets.Barrier', 'Barrier', ([], {}), '()\n', (14708, 14710), False, 'from phy.gui.widgets import Barrier\n'), ((8482, 8502), 'pytest.raises', 'raises', (['RuntimeError'], {}), '(RuntimeError)\n', (8488, 8502), False, 'from pytest import yield_fixture, fixture, raises\n'), ((8512, 8562), 'phylib.utils.emit', 'emit', (['"""action"""', 'supervisor.action_creator', '"""merge"""'], {}), "('action', supervisor.action_creator, 'merge')\n", (8516, 8562), False, 'from phylib.utils import connect, Bunch, emit\n'), ((1869, 1885), 'phy.utils.context.Context', 'Context', (['tempdir'], {}), '(tempdir)\n', (1876, 1885), False, 'from phy.utils.context import Context\n'), ((9294, 9310), 'phy.utils.context.Context', 'Context', (['tempdir'], {}), '(tempdir)\n', (9301, 9310), False, 'from phy.utils.context import Context\n'), ((2989, 3027), 'phylib.utils.Bunch', 'Bunch', ([], {'deleted': 'cluster_ids', 'added': '[to]'}), '(deleted=cluster_ids, added=[to])\n', (2994, 3027), False, 'from phylib.utils import connect, Bunch, emit\n'), ((3125, 3178), 'phylib.utils.Bunch', 'Bunch', ([], {'deleted': 'old_cluster_ids', 'added': 'new_cluster_ids'}), '(deleted=old_cluster_ids, added=new_cluster_ids)\n', (3130, 3178), False, 'from phylib.utils import connect, Bunch, emit\n'), ((3255, 3306), 'phylib.utils.Bunch', 'Bunch', ([], {'metadata_changed': 'which', 'metadata_value': 'group'}), '(metadata_changed=which, metadata_value=group)\n', (3260, 3306), False, 'from phylib.utils import connect, Bunch, emit\n'), ((3369, 3376), 'phylib.utils.Bunch', 'Bunch', ([], {}), '()\n', (3374, 3376), False, 'from phylib.utils import connect, Bunch, emit\n'), ((3439, 3446), 'phylib.utils.Bunch', 'Bunch', ([], {}), '()\n', (3444, 3446), False, 'from phylib.utils import connect, Bunch, emit\n')] |
#
# Copyright (C) 2018 <NAME>.
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
#
#
# Portions of this module are copied or lightly modified from the
# Tensor2Tensor registry_test module, so here is their license:
#
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for utils.registry
References:
Slight modification of `Tensor2Tensor registry_test`_.
.. _Tensor2Tensor registry_test: https://github.com/tensorflow/
tensor2tensor/blob/master/tensor2tensor/utils/registry_test.py
"""
import unittest
from carpedm.util import registry
from carpedm.models.generic import Model
from carpedm.models.baseline import SingleCharBaseline
class ModelRegistryTest(unittest.TestCase):
def setUp(self):
registry._reset()
def test_model_registration(self):
@registry.register_model
class MyModel1(Model):
pass
model = registry.model("my_model1")
self.assertTrue(model is MyModel1)
def test_named_registration(self):
@registry.register_model("model2")
class MyModel1(Model):
pass
model = registry.model("model2")
self.assertTrue(model is MyModel1)
def test_request_unprovided_model(self):
with self.assertRaisesRegex(LookupError, "never registered"):
_ = registry.model("not_provided")
def test_duplicate_registration(self):
@registry.register_model
def m1():
pass
with self.assertRaisesRegex(LookupError, "already registered"):
@registry.register_model("m1")
def m2():
pass
def test_list_models(self):
@registry.register_model
def m1():
pass
@registry.register_model
def m2():
pass
self.assertSetEqual({"m1", "m2"}, set(registry.list_models()))
def test_snake_case(self):
convert = registry._convert_camel_to_snake
self.assertEqual("typical_camel_case", convert("TypicalCamelCase"))
self.assertEqual("numbers_fuse2gether", convert("NumbersFuse2gether"))
self.assertEqual("numbers_fuse2_gether", convert("NumbersFuse2Gether"))
self.assertEqual("lstm_seq2_seq", convert("LSTMSeq2Seq"))
self.assertEqual("starts_lower", convert("startsLower"))
self.assertEqual("starts_lower_caps", convert("startsLowerCAPS"))
self.assertEqual("caps_fuse_together", convert("CapsFUSETogether"))
self.assertEqual("startscap", convert("Startscap"))
self.assertEqual("s_tartscap", convert("STartscap"))
class ModelProvidedTest(unittest.TestCase):
def setUp(self):
from carpedm import models
def test_access_provided_model(self):
model = registry.model("single_char_baseline")
self.assertTrue(model is SingleCharBaseline)
if __name__ == '__main__':
unittest.main()
| [
"carpedm.util.registry.register_model",
"carpedm.util.registry.model",
"carpedm.util.registry._reset",
"unittest.main",
"carpedm.util.registry.list_models"
]
| [((3474, 3489), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3487, 3489), False, 'import unittest\n'), ((1342, 1359), 'carpedm.util.registry._reset', 'registry._reset', ([], {}), '()\n', (1357, 1359), False, 'from carpedm.util import registry\n'), ((1499, 1526), 'carpedm.util.registry.model', 'registry.model', (['"""my_model1"""'], {}), "('my_model1')\n", (1513, 1526), False, 'from carpedm.util import registry\n'), ((1620, 1653), 'carpedm.util.registry.register_model', 'registry.register_model', (['"""model2"""'], {}), "('model2')\n", (1643, 1653), False, 'from carpedm.util import registry\n'), ((1719, 1743), 'carpedm.util.registry.model', 'registry.model', (['"""model2"""'], {}), "('model2')\n", (1733, 1743), False, 'from carpedm.util import registry\n'), ((3349, 3387), 'carpedm.util.registry.model', 'registry.model', (['"""single_char_baseline"""'], {}), "('single_char_baseline')\n", (3363, 3387), False, 'from carpedm.util import registry\n'), ((1919, 1949), 'carpedm.util.registry.model', 'registry.model', (['"""not_provided"""'], {}), "('not_provided')\n", (1933, 1949), False, 'from carpedm.util import registry\n'), ((2150, 2179), 'carpedm.util.registry.register_model', 'registry.register_model', (['"""m1"""'], {}), "('m1')\n", (2173, 2179), False, 'from carpedm.util import registry\n'), ((2441, 2463), 'carpedm.util.registry.list_models', 'registry.list_models', ([], {}), '()\n', (2461, 2463), False, 'from carpedm.util import registry\n')] |
# The MIT License (MIT)
# Copyright (c) 2021 by the xcube development team and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import numpy as np
import xarray as xr
def is_empty_cube(cube: xr.Dataset) -> bool:
return len(cube.data_vars) == 0
def strip_cube(cube: xr.Dataset) -> xr.Dataset:
drop_vars = [k for k, v in cube.data_vars.items()
if len(v.shape) < 3
or np.product(v.shape) == 0
or v.shape[-2] < 2
or v.shape[-1] < 2]
if drop_vars:
return cube.drop_vars(drop_vars)
return cube
| [
"numpy.product"
]
| [((1433, 1452), 'numpy.product', 'np.product', (['v.shape'], {}), '(v.shape)\n', (1443, 1452), True, 'import numpy as np\n')] |
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import os
import pickle
from dataclasses import dataclass
from multiprocessing import Pipe, Process, Queue
import ijson
import smarts.core.scenario as scenario
@dataclass
class RequestHistoryRange:
start_index: int
batch_count: int
class Traffic_history_service:
"""responsible for dynamically fetching traffic history json to reduce
memory use of traffic history data
"""
class QueueDone:
pass
def __init__(self, history_file_path):
self._history_file_path = history_file_path
self._all_timesteps = set()
self._current_traffic_history = {}
self._prev_batch_history = {}
# return if traffic history is not used
if history_file_path is None:
return
self._log = logging.getLogger(self.__class__.__name__)
send_data_conn, receive_data_conn = Pipe()
self._receive_data_conn = receive_data_conn
self._request_queue = Queue()
self._fetch_history_proc = Process(
target=self._fetch_history,
args=(
send_data_conn,
self._request_queue,
self._history_file_path,
),
)
self._fetch_history_proc.daemon = True
self._fetch_history_proc.start()
self._range_start = 0
self._batch_size = 300
# initialize
with open(self._history_file_path, "rb") as f:
for index, (t, vehicles_state) in enumerate(
ijson.kvitems(f, "", use_float=True)
):
self._all_timesteps.add(t)
if (
self._range_start <= index
and index < self._range_start + self._batch_size
):
self._current_traffic_history[t] = vehicles_state
self._range_start += self._batch_size
# prepares the next batch
self._prepare_next_batch()
self._receive_data_conn.recv()
def teardown(self):
if self.is_in_use:
self._request_queue.put(Traffic_history_service.QueueDone())
self._request_queue.close()
self._request_queue = None
self._fetch_history_proc.join(timeout=3)
if self._fetch_history_proc.is_alive():
self._log.warning("fetch history process still alive after teardown")
self._fetch_history_proc = None
self._history_file_path = None
def __del__(self):
self.teardown()
@property
def is_in_use(self):
return self._history_file_path is not None
def _fetch_history(self, send_data_conn, request_queue, history_file_path):
"""prepare 1 batch ahead, when received request, immediately return the previously
prepared batch and prepares the next batch.
"""
return_batch = {}
while True:
historyRange = request_queue.get()
if type(historyRange) is Traffic_history_service.QueueDone:
break
assert isinstance(historyRange, RequestHistoryRange)
send_data_conn.send(return_batch)
return_batch = {}
with open(history_file_path, "rb") as f:
for index, (t, vehicles_state) in enumerate(
ijson.kvitems(f, "", use_float=True)
):
if (
historyRange.start_index <= index
and index < historyRange.start_index + historyRange.batch_count
):
return_batch[t] = vehicles_state
send_data_conn.close()
@property
def all_timesteps(self):
return self._all_timesteps
@property
def history_file_path(self):
return self._history_file_path
@property
def traffic_history(self):
return {**self._current_traffic_history, **self._prev_batch_history}
def _prepare_next_batch(self):
self._request_queue.put(
RequestHistoryRange(
start_index=self._range_start,
batch_count=self._batch_size,
)
)
self._range_start += self._batch_size
def fetch_history_at_timestep(self, timestep: str):
if timestep not in self._all_timesteps:
return {}
elif timestep in self.traffic_history:
return self.traffic_history[timestep]
# ask child process to prepare the next batch:
self._prepare_next_batch()
self._prev_batch_history = self._current_traffic_history
# receives the previous batch child process prepared
self._current_traffic_history = self._receive_data_conn.recv()
if timestep in self._current_traffic_history:
return self._current_traffic_history[timestep]
# no history exists at requested timestamp
return {}
@staticmethod
def apply_map_location_offset(position, map_offset):
return [pos + map_offset[i] for i, pos in enumerate(position[:2])]
@staticmethod
def fetch_agent_missions(
history_file_path: str, scenario_root_path: str, mapLocationOffset
):
assert os.path.isdir(scenario_root_path)
history_mission_filepath = os.path.join(
scenario_root_path, "history_mission.pkl"
)
if not os.path.exists(history_mission_filepath):
history_mission = {}
else:
with open(history_mission_filepath, "rb") as f:
history_mission = pickle.load(f)
if history_file_path in history_mission:
return history_mission[history_file_path]
vehicle_missions = {}
with open(history_file_path, "rb") as f:
for t, vehicles_state in ijson.kvitems(f, "", use_float=True):
for vehicle_id in vehicles_state:
if vehicle_id in vehicle_missions:
continue
vehicle_missions[vehicle_id] = scenario.Mission(
start=scenario.Start(
Traffic_history_service.apply_map_location_offset(
vehicles_state[vehicle_id]["position"],
mapLocationOffset,
),
scenario.Heading(vehicles_state[vehicle_id]["heading"]),
),
goal=scenario.EndlessGoal(),
start_time=float(t),
)
history_mission[history_file_path] = vehicle_missions
# update cached history_mission_file
with open(history_mission_filepath, "wb") as f:
pickle.dump(history_mission, f)
return vehicle_missions
| [
"logging.getLogger",
"os.path.exists",
"pickle.dump",
"smarts.core.scenario.Heading",
"multiprocessing.Process",
"os.path.join",
"pickle.load",
"smarts.core.scenario.EndlessGoal",
"os.path.isdir",
"ijson.kvitems",
"multiprocessing.Queue",
"multiprocessing.Pipe"
]
| [((1918, 1960), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (1935, 1960), False, 'import logging\n'), ((2005, 2011), 'multiprocessing.Pipe', 'Pipe', ([], {}), '()\n', (2009, 2011), False, 'from multiprocessing import Pipe, Process, Queue\n'), ((2094, 2101), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (2099, 2101), False, 'from multiprocessing import Pipe, Process, Queue\n'), ((2137, 2246), 'multiprocessing.Process', 'Process', ([], {'target': 'self._fetch_history', 'args': '(send_data_conn, self._request_queue, self._history_file_path)'}), '(target=self._fetch_history, args=(send_data_conn, self.\n _request_queue, self._history_file_path))\n', (2144, 2246), False, 'from multiprocessing import Pipe, Process, Queue\n'), ((6317, 6350), 'os.path.isdir', 'os.path.isdir', (['scenario_root_path'], {}), '(scenario_root_path)\n', (6330, 6350), False, 'import os\n'), ((6386, 6441), 'os.path.join', 'os.path.join', (['scenario_root_path', '"""history_mission.pkl"""'], {}), "(scenario_root_path, 'history_mission.pkl')\n", (6398, 6441), False, 'import os\n'), ((6480, 6520), 'os.path.exists', 'os.path.exists', (['history_mission_filepath'], {}), '(history_mission_filepath)\n', (6494, 6520), False, 'import os\n'), ((6899, 6935), 'ijson.kvitems', 'ijson.kvitems', (['f', '""""""'], {'use_float': '(True)'}), "(f, '', use_float=True)\n", (6912, 6935), False, 'import ijson\n'), ((7831, 7862), 'pickle.dump', 'pickle.dump', (['history_mission', 'f'], {}), '(history_mission, f)\n', (7842, 7862), False, 'import pickle\n'), ((2639, 2675), 'ijson.kvitems', 'ijson.kvitems', (['f', '""""""'], {'use_float': '(True)'}), "(f, '', use_float=True)\n", (2652, 2675), False, 'import ijson\n'), ((6663, 6677), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (6674, 6677), False, 'import pickle\n'), ((4434, 4470), 'ijson.kvitems', 'ijson.kvitems', (['f', '""""""'], {'use_float': '(True)'}), "(f, '', use_float=True)\n", (4447, 4470), False, 'import ijson\n'), ((7564, 7586), 'smarts.core.scenario.EndlessGoal', 'scenario.EndlessGoal', ([], {}), '()\n', (7584, 7586), True, 'import smarts.core.scenario as scenario\n'), ((7451, 7506), 'smarts.core.scenario.Heading', 'scenario.Heading', (["vehicles_state[vehicle_id]['heading']"], {}), "(vehicles_state[vehicle_id]['heading'])\n", (7467, 7506), True, 'import smarts.core.scenario as scenario\n')] |
import argparse
from time import sleep, time
from collections import defaultdict
from sqlalchemy import orm, text, insert, delete
from sqlalchemy.orm import selectinload
import models
from app import db
from app import logger
from scripts.queue import JsonWorks, JsonAuthors, JsonConcepts, JsonInstitutions, JsonVenues
from util import elapsed
def run(**kwargs):
entity_type = kwargs.get("entity")
method_name = kwargs.get("method")
if entity_type == "work" and method_name == "add_everything":
queue_table = "queue.work_add_everything"
elif method_name == "store":
queue_table = f"queue.{entity_type.lower()}_store"
else:
queue_table = f"queue.{method_name.lower()}"
if single_id := kwargs.get('id'):
if objects := get_objects(entity_type, [single_id]):
logger.info(f'found object {objects[0]}')
store_objects(objects)
db.session.commit()
else:
logger.warn(f'found no object with id {single_id}')
else:
objects_updated = 0
limit = kwargs.get('limit')
chunk = kwargs.get('chunk')
total_count = 0
while limit is None or objects_updated < limit:
loop_start = time()
if object_ids := fetch_queue_chunk_ids(queue_table, chunk):
objects = get_objects(entity_type, object_ids)
for obj in objects:
method_start_time = time()
total_count += 1
print(f"*** #{total_count} starting {obj}.{method_name}() method")
method_to_run = getattr(obj, method_name)
method_to_run()
print(f">>> finished {obj}.{method_name}(). took {elapsed(method_start_time, 4)} seconds")
# print(1/0)
logger.info('committing')
start_time = time()
if method_name == "store":
store_json_objects(objects)
else:
db.session.commit() # fail loudly for now
logger.info(f'commit took {elapsed(start_time, 4)}s')
finish_object_ids(queue_table, object_ids)
objects_updated += len(objects)
logger.info(f'processed chunk of {chunk} objects in {elapsed(loop_start, 2)} seconds')
else:
logger.info('nothing ready in the queue, waiting 5 seconds...')
sleep(5)
def store_json_objects(objects):
delete_dict_all_objects = defaultdict(list)
insert_dict_all_objects = defaultdict(list)
for count, obj in enumerate(objects):
obj.delete_dict = defaultdict(list)
for row in obj.insert_dicts:
for table_name, insert_dict in row.items():
insert_dict_all_objects[table_name] += [insert_dict]
obj.delete_dict[table_name] += [insert_dict["id"]]
for table_name, ids in obj.delete_dict.items():
delete_dict_all_objects[table_name] += ids
start_time = time()
for table_name, delete_ids in delete_dict_all_objects.items():
my_table = globals()[table_name]
db.session.remove()
db.session.execute(delete(my_table).where(my_table.id.in_(delete_ids)))
db.session.commit()
print("delete done")
for table_name, all_insert_strings in insert_dict_all_objects.items():
my_table = globals()[table_name]
db.session.remove()
db.session.execute(insert(my_table).values(all_insert_strings))
db.session.commit()
print("insert and commit took {} seconds".format(elapsed(start_time, 2)))
def fetch_queue_chunk_ids(queue_table, chunk_size):
text_query = f"""
with chunk as (
select id
from {queue_table}
where started is null
order by
finished asc nulls first,
rand
limit :chunk
for update skip locked
)
update {queue_table}
set started = now()
from chunk
where {queue_table}.id = chunk.id
returning chunk.id;
"""
logger.info(f'getting {chunk_size} ids from the queue')
start_time = time()
ids = [
row[0] for row in
db.engine.execute(text(text_query).bindparams(chunk=chunk_size).execution_options(autocommit=True)).all()
]
logger.info(f'got {len(ids)} ids from the queue in {elapsed(start_time, 4)}s')
logger.info(f'got these ids: {ids}')
return ids
def finish_object_ids(queue_table, object_ids):
# logger.info(f'finishing queue chunk')
start_time = time()
query_text = f'''
update {queue_table}
set finished = now(), started=null
where id = any(:ids)
'''
db.session.execute(text(query_text).bindparams(ids=object_ids))
db.session.commit()
# logger.info(f'finished saving finish_objects in {elapsed(start_time, 4)}s')
def get_objects(entity_type, object_ids):
logger.info(f'getting {len(object_ids)} objects')
start_time = time()
if entity_type == "work":
objects = db.session.query(models.Work).options(
selectinload(models.Work.records).selectinload(models.Record.journals).raiseload('*'),
selectinload(models.Work.records).raiseload('*'),
selectinload(models.Work.locations),
selectinload(models.Work.journal).raiseload('*'),
selectinload(models.Work.references).raiseload('*'),
selectinload(models.Work.references_unmatched).raiseload('*'),
selectinload(models.Work.mesh),
selectinload(models.Work.counts_by_year).raiseload('*'),
selectinload(models.Work.abstract),
selectinload(models.Work.extra_ids).raiseload('*'),
selectinload(models.Work.related_works).raiseload('*'),
selectinload(models.Work.affiliations).selectinload(models.Affiliation.author).selectinload(models.Author.orcids).raiseload('*'),
selectinload(models.Work.affiliations).selectinload(models.Affiliation.author).raiseload('*'),
selectinload(models.Work.affiliations).selectinload(models.Affiliation.institution).selectinload(models.Institution.ror).raiseload('*'),
selectinload(models.Work.affiliations).selectinload(models.Affiliation.institution).raiseload('*'),
selectinload(models.Work.concepts).selectinload(models.WorkConcept.concept).raiseload('*'),
selectinload(models.Work.concepts_full).raiseload('*'),
orm.Load(models.Work).raiseload('*')
).filter(models.Work.paper_id.in_(object_ids)).all()
elif entity_type == "author":
objects = db.session.query(models.Author).options(
selectinload(models.Author.counts_by_year_papers),
selectinload(models.Author.counts_by_year_citations),
selectinload(models.Author.alternative_names),
selectinload(models.Author.author_concepts),
selectinload(models.Author.orcids).selectinload(models.AuthorOrcid.orcid_data),
selectinload(models.Author.last_known_institution).selectinload(models.Institution.ror).raiseload('*'),
selectinload(models.Author.last_known_institution).raiseload('*'),
orm.Load(models.Author).raiseload('*')
).filter(models.Author.author_id.in_(object_ids)).all()
elif entity_type == "venue":
objects = db.session.query(models.Venue).options(
selectinload(models.Venue.counts_by_year_papers),
selectinload(models.Venue.counts_by_year_citations),
orm.Load(models.Venue).raiseload('*')
).filter(models.Venue.journal_id.in_(object_ids)).all()
elif entity_type == "institution":
objects = db.session.query(models.Institution).filter(models.Institution.affiliation_id.in_(object_ids)).all()
elif entity_type == "concept":
objects = db.session.query(models.Concept).filter(models.Concept.field_of_study_id.in_(object_ids)).all()
logger.info(f'got {len(objects)} objects in {elapsed(start_time, 4)}s')
return objects
# python -m scripts.fast_queue --entity=work --method=add_everything --limit=3
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run fast queue.")
parser.add_argument('--entity', type=str, help="the entity type to run")
parser.add_argument('--method', type=str, help="the method to run")
parser.add_argument('--id', nargs="?", type=str, help="id of the one thing you want to update (case sensitive)")
parser.add_argument('--limit', "-l", nargs="?", type=int, help="how many objects to work on")
parser.add_argument(
'--chunk', "-ch", nargs="?", default=100, type=int, help="how many objects to take off the queue at once"
)
parsed_args = parser.parse_args()
run(**vars(parsed_args))
| [
"app.db.session.commit",
"time.sleep",
"sqlalchemy.delete",
"app.logger.info",
"models.Venue.journal_id.in_",
"sqlalchemy.orm.selectinload",
"argparse.ArgumentParser",
"models.Author.author_id.in_",
"sqlalchemy.insert",
"sqlalchemy.orm.Load",
"models.Work.paper_id.in_",
"app.logger.warn",
"util.elapsed",
"app.db.session.query",
"models.Concept.field_of_study_id.in_",
"models.Institution.affiliation_id.in_",
"app.db.session.remove",
"time.time",
"sqlalchemy.text",
"collections.defaultdict"
]
| [((2547, 2564), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2558, 2564), False, 'from collections import defaultdict\n'), ((2595, 2612), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2606, 2612), False, 'from collections import defaultdict\n'), ((3057, 3063), 'time.time', 'time', ([], {}), '()\n', (3061, 3063), False, 'from time import sleep, time\n'), ((4189, 4244), 'app.logger.info', 'logger.info', (['f"""getting {chunk_size} ids from the queue"""'], {}), "(f'getting {chunk_size} ids from the queue')\n", (4200, 4244), False, 'from app import logger\n'), ((4262, 4268), 'time.time', 'time', ([], {}), '()\n', (4266, 4268), False, 'from time import sleep, time\n'), ((4516, 4552), 'app.logger.info', 'logger.info', (['f"""got these ids: {ids}"""'], {}), "(f'got these ids: {ids}')\n", (4527, 4552), False, 'from app import logger\n'), ((4680, 4686), 'time.time', 'time', ([], {}), '()\n', (4684, 4686), False, 'from time import sleep, time\n'), ((4892, 4911), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4909, 4911), False, 'from app import db\n'), ((5110, 5116), 'time.time', 'time', ([], {}), '()\n', (5114, 5116), False, 'from time import sleep, time\n'), ((8318, 8372), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run fast queue."""'}), "(description='Run fast queue.')\n", (8341, 8372), False, 'import argparse\n'), ((2681, 2698), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2692, 2698), False, 'from collections import defaultdict\n'), ((3180, 3199), 'app.db.session.remove', 'db.session.remove', ([], {}), '()\n', (3197, 3199), False, 'from app import db\n'), ((3288, 3307), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3305, 3307), False, 'from app import db\n'), ((3461, 3480), 'app.db.session.remove', 'db.session.remove', ([], {}), '()\n', (3478, 3480), False, 'from app import db\n'), ((3561, 3580), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3578, 3580), False, 'from app import db\n'), ((828, 869), 'app.logger.info', 'logger.info', (['f"""found object {objects[0]}"""'], {}), "(f'found object {objects[0]}')\n", (839, 869), False, 'from app import logger\n'), ((917, 936), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (934, 936), False, 'from app import db\n'), ((963, 1014), 'app.logger.warn', 'logger.warn', (['f"""found no object with id {single_id}"""'], {}), "(f'found no object with id {single_id}')\n", (974, 1014), False, 'from app import logger\n'), ((1231, 1237), 'time.time', 'time', ([], {}), '()\n', (1235, 1237), False, 'from time import sleep, time\n'), ((3634, 3656), 'util.elapsed', 'elapsed', (['start_time', '(2)'], {}), '(start_time, 2)\n', (3641, 3656), False, 'from util import elapsed\n'), ((1839, 1864), 'app.logger.info', 'logger.info', (['"""committing"""'], {}), "('committing')\n", (1850, 1864), False, 'from app import logger\n'), ((1894, 1900), 'time.time', 'time', ([], {}), '()\n', (1898, 1900), False, 'from time import sleep, time\n'), ((2393, 2456), 'app.logger.info', 'logger.info', (['"""nothing ready in the queue, waiting 5 seconds..."""'], {}), "('nothing ready in the queue, waiting 5 seconds...')\n", (2404, 2456), False, 'from app import logger\n'), ((2473, 2481), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (2478, 2481), False, 'from time import sleep, time\n'), ((4485, 4507), 'util.elapsed', 'elapsed', (['start_time', '(4)'], {}), '(start_time, 4)\n', (4492, 4507), False, 'from util import elapsed\n'), ((4843, 4859), 'sqlalchemy.text', 'text', (['query_text'], {}), '(query_text)\n', (4847, 4859), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((8150, 8172), 'util.elapsed', 'elapsed', (['start_time', '(4)'], {}), '(start_time, 4)\n', (8157, 8172), False, 'from util import elapsed\n'), ((1450, 1456), 'time.time', 'time', ([], {}), '()\n', (1454, 1456), False, 'from time import sleep, time\n'), ((2035, 2054), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2052, 2054), False, 'from app import db\n'), ((3227, 3243), 'sqlalchemy.delete', 'delete', (['my_table'], {}), '(my_table)\n', (3233, 3243), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((3508, 3524), 'sqlalchemy.insert', 'insert', (['my_table'], {}), '(my_table)\n', (3514, 3524), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((6675, 6711), 'models.Work.paper_id.in_', 'models.Work.paper_id.in_', (['object_ids'], {}), '(object_ids)\n', (6699, 6711), False, 'import models\n'), ((7412, 7451), 'models.Author.author_id.in_', 'models.Author.author_id.in_', (['object_ids'], {}), '(object_ids)\n', (7439, 7451), False, 'import models\n'), ((2120, 2142), 'util.elapsed', 'elapsed', (['start_time', '(4)'], {}), '(start_time, 4)\n', (2127, 2142), False, 'from util import elapsed\n'), ((2325, 2347), 'util.elapsed', 'elapsed', (['loop_start', '(2)'], {}), '(loop_start, 2)\n', (2332, 2347), False, 'from util import elapsed\n'), ((5380, 5415), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.locations'], {}), '(models.Work.locations)\n', (5392, 5415), False, 'from sqlalchemy.orm import selectinload\n'), ((5635, 5665), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.mesh'], {}), '(models.Work.mesh)\n', (5647, 5665), False, 'from sqlalchemy.orm import selectinload\n'), ((5750, 5784), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.abstract'], {}), '(models.Work.abstract)\n', (5762, 5784), False, 'from sqlalchemy.orm import selectinload\n'), ((7747, 7786), 'models.Venue.journal_id.in_', 'models.Venue.journal_id.in_', (['object_ids'], {}), '(object_ids)\n', (7774, 7786), False, 'import models\n'), ((1752, 1781), 'util.elapsed', 'elapsed', (['method_start_time', '(4)'], {}), '(method_start_time, 4)\n', (1759, 1781), False, 'from util import elapsed\n'), ((5165, 5194), 'app.db.session.query', 'db.session.query', (['models.Work'], {}), '(models.Work)\n', (5181, 5194), False, 'from app import db\n'), ((6824, 6873), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.counts_by_year_papers'], {}), '(models.Author.counts_by_year_papers)\n', (6836, 6873), False, 'from sqlalchemy.orm import selectinload\n'), ((6887, 6939), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.counts_by_year_citations'], {}), '(models.Author.counts_by_year_citations)\n', (6899, 6939), False, 'from sqlalchemy.orm import selectinload\n'), ((6953, 6998), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.alternative_names'], {}), '(models.Author.alternative_names)\n', (6965, 6998), False, 'from sqlalchemy.orm import selectinload\n'), ((7012, 7055), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.author_concepts'], {}), '(models.Author.author_concepts)\n', (7024, 7055), False, 'from sqlalchemy.orm import selectinload\n'), ((7895, 7944), 'models.Institution.affiliation_id.in_', 'models.Institution.affiliation_id.in_', (['object_ids'], {}), '(object_ids)\n', (7932, 7944), False, 'import models\n'), ((5317, 5350), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.records'], {}), '(models.Work.records)\n', (5329, 5350), False, 'from sqlalchemy.orm import selectinload\n'), ((5430, 5463), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.journal'], {}), '(models.Work.journal)\n', (5442, 5463), False, 'from sqlalchemy.orm import selectinload\n'), ((5493, 5529), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.references'], {}), '(models.Work.references)\n', (5505, 5529), False, 'from sqlalchemy.orm import selectinload\n'), ((5559, 5605), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.references_unmatched'], {}), '(models.Work.references_unmatched)\n', (5571, 5605), False, 'from sqlalchemy.orm import selectinload\n'), ((5680, 5720), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.counts_by_year'], {}), '(models.Work.counts_by_year)\n', (5692, 5720), False, 'from sqlalchemy.orm import selectinload\n'), ((5799, 5834), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.extra_ids'], {}), '(models.Work.extra_ids)\n', (5811, 5834), False, 'from sqlalchemy.orm import selectinload\n'), ((5864, 5903), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.related_works'], {}), '(models.Work.related_works)\n', (5876, 5903), False, 'from sqlalchemy.orm import selectinload\n'), ((6552, 6591), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.concepts_full'], {}), '(models.Work.concepts_full)\n', (6564, 6591), False, 'from sqlalchemy.orm import selectinload\n'), ((6621, 6642), 'sqlalchemy.orm.Load', 'orm.Load', (['models.Work'], {}), '(models.Work)\n', (6629, 6642), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((6771, 6802), 'app.db.session.query', 'db.session.query', (['models.Author'], {}), '(models.Author)\n', (6787, 6802), False, 'from app import db\n'), ((7563, 7611), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Venue.counts_by_year_papers'], {}), '(models.Venue.counts_by_year_papers)\n', (7575, 7611), False, 'from sqlalchemy.orm import selectinload\n'), ((7626, 7677), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Venue.counts_by_year_citations'], {}), '(models.Venue.counts_by_year_citations)\n', (7638, 7677), False, 'from sqlalchemy.orm import selectinload\n'), ((7851, 7887), 'app.db.session.query', 'db.session.query', (['models.Institution'], {}), '(models.Institution)\n', (7867, 7887), False, 'from app import db\n'), ((8045, 8093), 'models.Concept.field_of_study_id.in_', 'models.Concept.field_of_study_id.in_', (['object_ids'], {}), '(object_ids)\n', (8081, 8093), False, 'import models\n'), ((4334, 4350), 'sqlalchemy.text', 'text', (['text_query'], {}), '(text_query)\n', (4338, 4350), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((7069, 7103), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.orcids'], {}), '(models.Author.orcids)\n', (7081, 7103), False, 'from sqlalchemy.orm import selectinload\n'), ((7277, 7327), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.last_known_institution'], {}), '(models.Author.last_known_institution)\n', (7289, 7327), False, 'from sqlalchemy.orm import selectinload\n'), ((7356, 7379), 'sqlalchemy.orm.Load', 'orm.Load', (['models.Author'], {}), '(models.Author)\n', (7364, 7379), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((7510, 7540), 'app.db.session.query', 'db.session.query', (['models.Venue'], {}), '(models.Venue)\n', (7526, 7540), False, 'from app import db\n'), ((8005, 8037), 'app.db.session.query', 'db.session.query', (['models.Concept'], {}), '(models.Concept)\n', (8021, 8037), False, 'from app import db\n'), ((5217, 5250), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.records'], {}), '(models.Work.records)\n', (5229, 5250), False, 'from sqlalchemy.orm import selectinload\n'), ((6076, 6114), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.affiliations'], {}), '(models.Work.affiliations)\n', (6088, 6114), False, 'from sqlalchemy.orm import selectinload\n'), ((6334, 6372), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.affiliations'], {}), '(models.Work.affiliations)\n', (6346, 6372), False, 'from sqlalchemy.orm import selectinload\n'), ((6447, 6481), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.concepts'], {}), '(models.Work.concepts)\n', (6459, 6481), False, 'from sqlalchemy.orm import selectinload\n'), ((7692, 7714), 'sqlalchemy.orm.Load', 'orm.Load', (['models.Venue'], {}), '(models.Venue)\n', (7700, 7714), False, 'from sqlalchemy import orm, text, insert, delete\n'), ((7161, 7211), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Author.last_known_institution'], {}), '(models.Author.last_known_institution)\n', (7173, 7211), False, 'from sqlalchemy.orm import selectinload\n'), ((5933, 5971), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.affiliations'], {}), '(models.Work.affiliations)\n', (5945, 5971), False, 'from sqlalchemy.orm import selectinload\n'), ((6184, 6222), 'sqlalchemy.orm.selectinload', 'selectinload', (['models.Work.affiliations'], {}), '(models.Work.affiliations)\n', (6196, 6222), False, 'from sqlalchemy.orm import selectinload\n')] |
import json
import discord
from discord.ext import commands
from assets import internet_funcs
from assets.list_funcs import chunks
class Memes(commands.Cog, description="Memes from https://imgflip.com/"):
def __init__(self, bot):
self.bot = bot
with open("config.json") as configFile:
config = json.load(configFile)
self.username = config.get("imgflip_username")
self.password = config.get("imgflip_password")
self.memetemps = {}
@commands.Cog.listener()
async def on_ready(self):
result = json.loads(await internet_funcs.get_response("https://api.imgflip.com/get_memes"))
if result["success"] is not True:
return
result = result["data"]["memes"]
for k in result:
self.memetemps[k["id"]] = {"name": k["name"], "box_count": k["box_count"]}
@commands.command(name="memetemplates", aliases=["memetemps"])
async def meme_temps(self, ctx):
"""Fetches top 100 meme templates from imgflip.com"""
# TODO: pagination for meme templates
result = list(self.memetemps.items())
if not result:
await self.on_ready()
result = list(self.memetemps.items())
n = 0
split_entries = list(chunks(result, 25))
for entry in split_entries:
embed = discord.Embed(title="Meme Templates", color=0x00ff00)
for meme in entry:
n += 1
meme_id = meme[0]
meme_name = meme[1]["name"]
embed.add_field(name=f"{n}. {meme_name}", value=f"ID: `{meme_id}`", inline=False)
try:
await ctx.author.send(embed=embed)
except discord.Forbidden:
await ctx.send("I can't DM you! Please enable DMs and try again.")
return
@commands.command(name="memegen", aliases=["memegenerator"])
async def meme_gen(self, ctx, meme_id, *text):
"""Generates a meme from imgflip. For template IDs, see the `memetemplates` command"""
text = list(text)
if self.memetemps == {}:
await self.on_ready()
if len(text) > 20:
text = text[:20]
if not str(meme_id).isnumeric():
found = False
for k, v in self.memetemps.items():
if str(meme_id).lower() == str(v["name"]).lower():
meme_id = int(k)
found = True
break
if not found:
return await ctx.send("Meme not found. Please check the ID and try again.")
# clean up the number of boxes to send
if meme_id in self.memetemps.keys():
if len(text) > self.memetemps[meme_id]["box_count"]:
text = text[:int(self.memetemps[meme_id]["box_count"])]
if len(text) < self.memetemps[meme_id]["box_count"]:
text += [""] * int(self.memetemps[meme_id]["box_count"] - len(text))
# ready the text boxes
boxes_dict = {}
for box_count in range(len(text)):
boxes_dict[f"boxes[{box_count}][text]"] = text[box_count]
boxes_dict[f"boxes[{box_count}][color]"] = "#000000"
boxes_dict[f"boxes[{box_count}][outline_color]"] = "#FFFFFF"
# send the request
payload = {"template_id": meme_id, "username": self.username, "password": self.password}
payload.update(boxes_dict)
result = json.loads(await internet_funcs.post("https://api.imgflip.com/caption_image", data=payload))
if result["success"] is not True:
await ctx.send("An error occurred:" + " " + "**" + result["error_message"] + "**")
return
await ctx.send(result["data"]["url"])
def setup(bot):
bot.add_cog(Memes(bot))
| [
"discord.ext.commands.Cog.listener",
"assets.internet_funcs.get_response",
"assets.list_funcs.chunks",
"json.load",
"assets.internet_funcs.post",
"discord.Embed",
"discord.ext.commands.command"
]
| [((497, 520), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (518, 520), False, 'from discord.ext import commands\n'), ((871, 932), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""memetemplates"""', 'aliases': "['memetemps']"}), "(name='memetemplates', aliases=['memetemps'])\n", (887, 932), False, 'from discord.ext import commands\n'), ((1852, 1911), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""memegen"""', 'aliases': "['memegenerator']"}), "(name='memegen', aliases=['memegenerator'])\n", (1868, 1911), False, 'from discord.ext import commands\n'), ((331, 352), 'json.load', 'json.load', (['configFile'], {}), '(configFile)\n', (340, 352), False, 'import json\n'), ((1274, 1292), 'assets.list_funcs.chunks', 'chunks', (['result', '(25)'], {}), '(result, 25)\n', (1280, 1292), False, 'from assets.list_funcs import chunks\n'), ((1350, 1400), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Meme Templates"""', 'color': '(65280)'}), "(title='Meme Templates', color=65280)\n", (1363, 1400), False, 'import discord\n'), ((585, 649), 'assets.internet_funcs.get_response', 'internet_funcs.get_response', (['"""https://api.imgflip.com/get_memes"""'], {}), "('https://api.imgflip.com/get_memes')\n", (612, 649), False, 'from assets import internet_funcs\n'), ((3486, 3560), 'assets.internet_funcs.post', 'internet_funcs.post', (['"""https://api.imgflip.com/caption_image"""'], {'data': 'payload'}), "('https://api.imgflip.com/caption_image', data=payload)\n", (3505, 3560), False, 'from assets import internet_funcs\n')] |
import itertools
import json
import logging
import re
from django.views.generic import TemplateView
from django.http import HttpResponse
from django.views import View
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, redirect
from django.urls import reverse
from django.utils import timezone
from django.utils.decorators import method_decorator
from django_chunk_upload_handlers.clam_av import VirusFoundInFileException
from core.base import GroupRequiredMixin
from core.utils import (
deep_index_items_by,
deep_index_items_by_exists,
get,
key_by,
index_users_by_group,
compact_list,
submission_contact,
public_login_url,
parse_notify_template,
parse_api_datetime,
pluck,
to_json,
from_json,
deep_update,
internal_redirect,
is_date,
notify_footer,
notify_contact_email,
)
from django_countries import countries
from django.conf import settings
from cases.submissions import SUBMISSION_TYPE_HELPERS, get_submission_deadline
from cases.utils import decorate_orgs
from core.constants import (
ALL_REGION_ALLOWED_TYPE_IDS,
SECURITY_GROUP_TRA_HEAD_OF_INVESTIGATION,
SECURITY_GROUP_TRA_LEAD_INVESTIGATOR,
SECURITY_GROUPS_TRA,
SECURITY_GROUP_TRA_ADMINISTRATOR,
SECURITY_GROUPS_TRA_ADMINS,
SECURITY_GROUP_ORGANISATION_OWNER,
SUBMISSION_TYPE_QUESTIONNAIRE,
SUBMISSION_TYPE_APPLICATION,
SUBMISSION_NOTICE_TYPE_INVITE,
SUBMISSION_NOTICE_TYPE_DEFICIENCY,
SUBMISSION_TYPE_THIRD_PARTY,
CASE_ROLE_AWAITING_APPROVAL,
CASE_ROLE_REJECTED,
CASE_ROLE_APPLICANT,
CASE_ROLE_PREPARING,
DIRECTION_TRA_TO_PUBLIC,
)
from trade_remedies_client.mixins import TradeRemediesAPIClientMixin
from trade_remedies_client.exceptions import APIException
logger = logging.getLogger(__name__)
org_fields = json.dumps(
{
"Organisation": {
"id": 0,
"has_non_draft_subs": 0,
"gov_body": 0,
"has_roi": 0,
}
}
)
class CasesView(LoginRequiredMixin, TemplateView, TradeRemediesAPIClientMixin):
template_name = "cases/cases.html"
def get(self, request, *args, **kwargs):
list_mode = request.GET.get("tab", "my")
panel_layout = self.client(self.request.user).get_system_boolean("PRE_RELEASE_PANELS")
fields = {
"Case": {
"id": 0,
"user_case": 0,
"name": 0,
"reference": 0,
"created_at": 0,
"type": {
"name": 0,
"acronym": 0,
"colour": 0,
"reference": 0,
"applicant": {"organisation": 0, "name": 0, "id": 0},
},
"applicant": {
"organisation": {
"name": 0,
"id": 0,
}
},
"stage": {"name": 0},
"case_status": {"next_action": 0, "next_notice_due": 0},
}
}
if list_mode == "archived":
fields = deep_update(
fields,
{
"Case": {
"workflow_state": {
"MEASURE_EXPIRY": 0,
"DETERMINATION_ACTIVE_DATE": 0,
}
}
},
)
cases = self.client(request.user).get_cases(
archived=list_mode == "archived",
all_cases=list_mode == "all",
new_cases=list_mode == "new",
fields=json.dumps(fields),
)
tabs = {
"value": list_mode,
"tabList": [
{"label": "Your cases", "value": "my", "sr_text": "Show your cases"},
{"label": "Open cases", "value": "all", "sr_text": "Show open cases"},
{
"label": "New applications",
"value": "new",
"sr_text": "Show new applications",
},
{
"label": "Archived",
"value": "archived",
"sr_text": "Show archived cases",
},
],
}
template_name = self.template_name if panel_layout else "cases/cases_old.html"
body_class = "full-width kill-footer" if panel_layout else "full-width"
return render(
request,
template_name,
{
"body_classes": body_class,
"cases": cases,
"tabs": tabs,
},
)
class CaseBaseView(
LoginRequiredMixin,
GroupRequiredMixin,
PermissionRequiredMixin,
TemplateView,
TradeRemediesAPIClientMixin,
):
permission_required = []
groups_required = SECURITY_GROUPS_TRA
supress_nav_section = False
def dispatch(self, *args, **kwargs):
if self.request.user.is_authenticated:
self._client = self.client(self.request.user)
self.case_id = kwargs.get("case_id")
return super().dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
self.kwargs = kwargs
self.organisation_id = kwargs.get("organisation_id")
self.request = request
self.user_token = request.user.token
case_fields = json.dumps(
{
"Case": {
"id": 0,
"name": 0,
"initiated_at": 0,
"decision_to_initiate,name": 0,
"reference": 0,
"sequence": 0,
"type": 0,
"archived_at": 0,
"archive_reason": {"name": 0},
"submission_count": 0,
"participant_count": 0,
"stage": {"name": 0},
"case_status": 0,
"organisation": {"id": 0, "name": 0},
}
}
)
self.case = self._client.get_case(self.case_id, fields=case_fields)
self.document_count = self._client.get_case_document_count(self.case_id)
self.start = int(request.GET.get("start", 0))
self.limit = int(request.GET.get("limit", 20))
content_id = self.kwargs.get("nav_section_id")
context = {
"case": self.case,
"case_id": self.case_id,
"document_count": self.document_count,
"content": self._client.get_case_content(self.case_id, content_id=content_id),
"tree": self._client.get_nav_section(self.case_id, selected_content=content_id),
"body_classes": "full-width",
"panel_layout": self._client.get_system_boolean("PRE_RELEASE_PANELS"),
"organisation_id": self.organisation_id,
"submission_group_name": "submission",
"alert": request.GET.get("alert"),
"user": request.user,
}
deep_update(context, self.add_page_data())
if context.get("redirect"):
return redirect(context.get("redirect"))
return render(request, self.template_name, context)
def add_page_data(self):
return {}
def get_documents(self, submission, all_versions=None):
result = self._client.get_submission_documents(
self.case_id, submission.get("id"), all_versions=all_versions
)
all_documents = result.get("documents", [])
deficiency_docs = result.get("deficiency_documents", [])
if all_versions:
# If this submission has an immediate ancestor, get the docs from that to mark status
docs_by_submission = deep_index_items_by(all_documents, "version")
this_version = int(submission.get("version"))
this_sub = docs_by_submission.get(str(this_version))
sub_docs = this_sub[0].get("documents")
# we have a list of the submissions that make up a family - id, version and documents.
if this_version > 1:
parent_sub = docs_by_submission.get(str(this_version - 1))
parent_docs = parent_sub and parent_sub[0].get("documents")
parent_doc_idx = {}
for parent_doc in parent_docs:
doc_type = get(parent_doc, "type/name") + "|" + get(parent_doc, "name")
parent_doc_idx[doc_type] = parent_doc
for document in sub_docs:
document["parent"] = parent_doc_idx.get(
get(document, "type/name") + "|" + get(document, "name")
)
else:
sub_docs = all_documents
submission_documents = deep_index_items_by(sub_docs, "type/key")
document_conf_index = deep_index_items_by(
submission_documents.get("respondent", []), "confidential"
)
confidential = document_conf_index.get("true", [])
confidential.sort(key=lambda cf: cf.get("name"))
non_conf = document_conf_index.get("", [])
doc_index = key_by(confidential, "id")
non_conf.sort(key=lambda nc: get(get(doc_index, str(nc.get("parent_id"))), "name"))
return {
"caseworker": submission_documents.get("caseworker", []),
"respondent": submission_documents.get("respondent", []),
"loa": submission_documents.get("loa", []),
"deficiency": deficiency_docs,
"confidential": confidential,
"nonconfidential": non_conf,
}
def has_permission(self):
"""
Override this method to customize the way permissions are checked.
"""
perms = self.get_permission_required()
return not perms or self.request.user.has_perms(perms)
class CaseAdminView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
permission_required = ("case_admin",)
template_name = "cases/admin.html"
def add_page_data(self):
case_enums = self._client.get_all_case_enums()
case_users = self._client.get_case_users(self.case["id"])
context = {
"case_enums": case_enums,
"case": self.case,
"users": case_users,
"participants": self._client.get_case_participants(self.case_id),
}
return context
def post(self, request, case_id, *args, **kwargs):
action = request.POST.get("action")
case = self._client.get_case(case_id)
update_spec = {}
if action == "initiation_flag_toggle":
if case["initiated_at"]:
update_spec["initiated_at"] = ""
else:
update_spec["initiated_at"] = timezone.now()
elif action == "set_case_stage":
update_spec["ignore_flow"] = request.POST.get("ignore_flow") or "false"
update_spec["stage_id"] = request.POST.get("stage_id")
elif action == "set_name":
update_spec["name"] = request.POST.get("name")
elif action == "set_case_type":
update_spec["stage_id"] = ""
update_spec["type_id"] = request.POST.get("type_id")
elif action == "toggle_archived":
if case.get("archived_at"):
update_spec["archived_at"] = ""
else:
update_spec["archived_at"] = timezone.now()
update_spec["archive_reason_id"] = request.POST.get("archive_reason_id")
elif action == "reset_initiation_decision":
update_spec["reset_initiation_decision"] = True
if update_spec:
response = self._client.update_case(case_id, update_spec)
return redirect(f"/case/{case_id}/admin/")
class CaseMilestoneDatesView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
permission_required = ("case_admin",)
template_name = "cases/milestone_dates.html"
def add_page_data(self):
case_enums = self._client.get_all_case_enums(self.case_id)
case_milestones = self._client.case_milestones(self.case["id"])
existing_keys = [cm["key"] for cm in case_milestones]
context = {
"milestone_types": case_enums.get("milestone_types"),
"available_review_types": case_enums.get("available_review_types"),
"milestones": case_milestones,
"existing_milestones": existing_keys,
}
return context
def post(self, request, case_id, milestone_key=None):
milestone_key = milestone_key or request.POST.get("milestone_key")
date = request.POST.get("date")
response = self._client.set_case_milestone(case_id, milestone_key, date)
return redirect(f"/case/{case_id}/milestones/")
class CaseView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
permission_required = []
template_name = "cases/case.html"
extra_case_fields = json.dumps(
{
"Case": {
"applicant": {
"organisation": {
"id": 0,
"name": 0,
"primary_contact": {
"name": 0,
"email": 0,
"phone": 0,
"address": 0,
"post_code": 0,
"country": {"name": 0},
"has_user": 0,
"user": {"id": 0, "organisation": {"id": 0, "name": 0}},
},
}
},
"parent": {"id": 0, "name": 0, "reference": 0, "type": 0},
"workflow_state": {"LINKED_CASE_CONFIRM": 0},
"initiated_sequence": 0,
}
}
)
def add_page_data(self):
team = self._client.get_case_team_members(self.case_id)
team_by_group = index_users_by_group([member.get("user") for member in team])
group_order = [
SECURITY_GROUP_TRA_ADMINISTRATOR,
SECURITY_GROUP_TRA_HEAD_OF_INVESTIGATION,
SECURITY_GROUP_TRA_LEAD_INVESTIGATOR,
]
case_extras = self._client.get_case(self.case_id, fields=self.extra_case_fields)
return {
"audit": self._client.get_audit(
case_id=self.case_id, start=0, limit=20, milestone=True
),
"case_page": True,
"case": case_extras,
"team_groups": team_by_group,
"group_order": group_order,
"public_base_url": settings.PUBLIC_BASE_URL,
}
def post(self, request, case_id, *args, **kwargs):
self._client.set_case_data(case_id, {"name": request.POST.get("name")})
redirect = request.POST.get("redirect")
if redirect:
return internal_redirect(request.POST.get("redirect"), "/")
else:
return HttpResponse(json.dumps({"result": "ok"}), content_type="application/json")
class PartiesView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/parties.html"
def add_page_data(self):
parties = []
roles = self._client.get_case_roles()
all_case_invites = self._client.get_contact_case_invitations(self.case_id)
all_participants = self._client.get_case_participants(self.case_id, fields=org_fields)
case_invites = deep_index_items_by(all_case_invites, "contact/id")
invited = set([])
accepted = set([])
for invite in all_case_invites:
org_id = invite.get("organisation", {}).get("id")
if invite.get("accepted_at"):
# note: accepted and invited are mutually exclusive
accepted.add(org_id)
else:
invited.add(org_id)
for role in roles:
_base = all_participants[role["key"]]
_base["key"] = role["key"]
_base["name"] = role["plural"]
if role["allow_cw_create"]:
_base["add_link"] = f"Add {role['name']}"
parties.append(_base)
return {
"party_types": parties,
"invites": case_invites,
"accepted_orgs": list(accepted),
"invited_orgs": list(invited),
"pre_release_invitations": self._client.get_system_boolean("PRE_RELEASE_INVITATIONS"),
"alert": self.request.GET.get("alert"),
}
class CaseTeamView(CaseBaseView):
permission_required = "can_assign_team"
template_name = "cases/team.html"
def add_page_data(self):
all_users = self._client.get_all_users(group_name="caseworker")
users_by_group = index_users_by_group(all_users)
team = self._client.get_case_team_members(self.case_id)
return {
"team": [member.get("user", {}).get("id") for member in team],
"tra_users": all_users,
"grouped_users": users_by_group,
"groups": SECURITY_GROUPS_TRA[1:],
"inactive_user_count": sum(user["active"] is False for user in all_users),
"singleton_groups": [
SECURITY_GROUP_TRA_HEAD_OF_INVESTIGATION,
SECURITY_GROUP_TRA_ADMINISTRATOR,
],
}
def post(self, request, case_id, *args, **kwargs):
user_ids = request.POST.getlist("user_id")
response = self._client.assign_case_team(case_id, user_ids)
return redirect(f"/case/{case_id}/")
class SubmissionsView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/submissions.html"
show_global = False
sub_page = ""
def get_tab(self, role, party):
if not role.get("allow_cw_create"):
return role["key"]
return "sampled" if party.get("sampled") else "not_sampled"
def consolidate_submissions(
self, case, participants, submissions_by_party, counts, selected_tab
):
roles = []
single_role_return = None # for awaiting and rejected - only return that specific role
for role in self._client.get_case_roles():
role["participants"] = []
for party in participants.get(role["key"], {}).get("parties", []):
tab = self.get_tab(role, party)
submissions = submissions_by_party.get(party["id"], [])
submissions += submissions_by_party.get("", [])
if submissions:
counts[tab] = counts.get(tab, 0) + len(submissions)
if tab == selected_tab:
party["submissions"] = submissions
role["participants"].append(party)
if not party.get("gov_body"):
role["customer_parties"] = True
sort_key = (
"submissions/0/received_at"
if selected_tab == CASE_ROLE_AWAITING_APPROVAL
else "name"
)
role["participants"].sort(key=lambda pt: get(pt, sort_key) or "")
if role.get("key") == selected_tab:
single_role_return = role
if role.get("allow_cw_create"):
roles.append(role)
return [single_role_return] if single_role_return else roles
def get_name(self, participant):
return participant.get("name")
def flatten_participants(self, source):
participants = []
for role in source:
rec = source[role]
participants = participants + rec["parties"]
participants.sort(key=self.get_name)
return participants
def divide_submissions(self, submissions):
incoming = []
outgoing = []
draft = []
for submission in submissions:
if get(submission, "status/sent"):
outgoing.append(submission)
elif get(submission, "status/default") and get(submission, "type/direction") != 1:
draft.append(submission)
else:
if (
not get(submission, "status/draft")
or get(submission, "type/key") == "application"
): # customer draft should not be seen by investigators
incoming.append(submission)
return {
"incoming": sorted(incoming, key=lambda su: su.get("received_at") or "", reverse=True),
"outgoing": sorted(outgoing, key=lambda su: su.get("sent_at") or "", reverse=True),
"draft": sorted(draft, key=lambda su: su.get("created_at") or "", reverse=True),
}
def add_page_data(self):
tab = self.request.GET.get("tab", "sampled").lower()
all_submissions = self._client.get_submissions(self.case_id, show_global=True)
submissions_by_type = deep_index_items_by(all_submissions, "type/name")
# Get submissions that have just been created by customer
# or are still in draft after creation
draft_submissions = deep_index_items_by(all_submissions, "status/default").get("true") or []
# Remove any that are back with the customer following deficiency
draft_first_version_submissions = (
deep_index_items_by(draft_submissions, "version").get("1") or []
)
# Exclude these drafts from our list
non_draft_submissions = [
sub for sub in all_submissions if sub not in draft_first_version_submissions
]
# draft applications are included to allow a heads up view
# to the caseworker before it's submitted
if submissions_by_type.get("application", [{}])[0].get("status", {}).get("default") is True:
submissions_by_type["application"][0]["tra_editable"] = True
non_draft_submissions += submissions_by_type["application"]
submissions_by_party = deep_index_items_by(non_draft_submissions, "organisation/id")
case_enums = self._client.get_all_case_enums()
invites = self._client.get_case_invite_submissions(self.case_id)
participants = self._client.get_case_participants(self.case_id, fields=org_fields)
flat_participants = self.flatten_participants(participants)
counts = {}
if self.sub_page:
self.template_name = f"cases/submissions_{self.sub_page}.html"
tab = self.request.GET.get("tab", "incoming").lower()
elif self._client.get_system_boolean("PRE_NEW_SUBMISSION_PAGE"):
self.template_name = "cases/submissions_new.html"
context = {
"raw_participants": participants,
"submissions": submissions_by_type,
"participants": flat_participants,
"counts": counts,
"all_roles": self.consolidate_submissions(
self.case,
participants=participants,
submissions_by_party=submissions_by_party,
counts=counts,
selected_tab=tab,
),
"submission_types": case_enums["case_worker_allowed_submission_types"],
"invites": invites,
"tab": tab,
"submission_groups": self.divide_submissions(all_submissions),
"all_submissions": all_submissions,
}
# TODO: Temp handling of application vs ex_officio ones
if not submissions_by_type.get("application") and submissions_by_type.get(
"ex officio application"
):
context["submissions"]["application"] = submissions_by_type["ex officio application"]
return context
class SubmissionView(CaseBaseView):
"""
View and modify submissions
"""
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/submission.html"
extra_case_fields = json.dumps(
{
"Case": {
"applicant": 0,
"product": 0,
"sources": 0,
}
}
)
def add_page_data_old(self):
alert = self.request.GET.get("alert") # indicates the submission has just been created
documents = []
submission = {}
submission_id = self.kwargs.get("submission_id")
third_party_invite = False
if submission_id:
submission = self._client.get_submission(self.case_id, submission_id)
submission_type = submission["type"]
third_party_invite = submission_type["name"] == "Invite 3rd party"
self.organisation_id = submission["organisation"]["id"]
created_by_id = get(submission, "created_by/id")
if created_by_id:
full_user = self._client.get_user(created_by_id)
submission["created_by"]["organisation"] = get(full_user, "organisations/0")
submission_context = {}
if SUBMISSION_TYPE_HELPERS.get(submission_type["key"]):
submission_context = SUBMISSION_TYPE_HELPERS[submission_type["key"]](
submission, self.request.user
).get_context()
self.template_name = "cases/submission.html"
case_extras = self._client.get_case(self.case_id, fields=self.extra_case_fields)
context = {
"submission": submission,
"template_name": f"{submission_type['key']}",
"documents": self.get_documents(submission=submission, all_versions=True),
"alert": alert,
"case": case_extras,
"third_party_invite": third_party_invite,
**submission_context,
}
if (
not submission
or not submission.get("status")
or submission.get("status", {}).get("default")
):
context["mode"] = "form"
else:
context["mode"] = "view"
if self.organisation_id:
self.organisation = self._client.get_organisation(self.organisation_id)
context["organisation"] = self.organisation
context["organisation_id"] = str(self.organisation["id"])
return context
def get_all_participants(self, case_participants):
all_parties = []
roles = {}
for type_name, role_parties in case_participants.items():
parties = role_parties.get("parties")
if parties:
all_parties.extend(parties)
role = parties[0].get("role")
roles[role.get("key")] = role
return deep_index_items_by(all_parties, "sampled"), roles
def add_page_data(self):
case_enums = self._client.get_all_case_enums()
submission = {}
participants = self._client.get_case_participants(self.case_id, fields=org_fields)
parties, roles = self.get_all_participants(participants)
alert = self.request.GET.get("alert") # indicates the submission has just been created
virus = self.request.GET.get("virus")
upload_error = self.request.GET.get("upload_error")
return_data = {
"virus": virus,
"upload_error": upload_error,
}
submission_id = self.kwargs.get("submission_id")
if submission_id:
submission = self._client.get_submission(self.case_id, submission_id)
json_data = from_json(submission.get("deficiency_notice_params"))
_default = submission.get("status", {}).get("default")
if not _default or (
_default and submission["type"]["id"] == SUBMISSION_TYPE_APPLICATION
):
page_data = self.add_page_data_old()
return_data.update(page_data)
return return_data
self.organisation_id = submission["organisation"]["id"]
return_data.update(
{
"roles": roles,
"submission": submission,
"status": (submission.get("status") or {}).get("id"),
"alert": alert,
"documents": self.get_documents(submission=submission),
"role": submission.get("organisation_case_role") or {"name": "Public file"},
"participants": participants,
"all_participants": parties,
"json_data": json_data,
"selected_submission_type": submission.get("type", {}).get("key")
or "questionnaire",
}
)
else:
role = self.request.GET.get("for")
sampled = self.request.GET.get("sampled") == "sampled"
full_role = (
self._client.get_case_role(role)
if (role and role != "public")
else {"name": "Public file"}
)
case_enums = self._client.get_all_case_enums(direction=DIRECTION_TRA_TO_PUBLIC)
# Get all draft submissions of this type
all_submissions = self._client.get_submissions(self.case_id, show_global=True)
draft_submissions = (
deep_index_items_by(all_submissions, "status/default").get("true") or []
)
# draft_submissions_this_role = deep_index_items_by(draft_submissions,
# 'organisation_case_role/key').get('' if role == 'public' else role)
draft_submissions_this_role = deep_index_items_by(
draft_submissions, "organisation_id"
).get("")
return_data.update(
{
"submission": submission,
"submission_type_id": self.kwargs.get("submission_type_id")
or self.request.GET.get("submission_type_id"),
"submission_statuses": case_enums["submission_statuses"],
"statuses_by_type": case_enums["statuses_by_type"],
"selected_submission_type": self.request.GET.get("submission_type")
or "questionnaire",
"organisation_id": self.kwargs.get("organisation_id"),
"draft_submissions": draft_submissions_this_role,
"role": full_role,
}
)
if role == "public":
return_data.update(
{
"submission_types": case_enums["public_submission_types"],
"public": True,
"organisation_id": self.kwargs.get("organisation_id"),
}
)
else:
role_participants = participants.get(role, {}).get("parties", [])
filtered = list(
filter(
lambda party: party
if party.get("sampled") == sampled and not party.get("gov_body")
else None,
role_participants,
)
)
return_data.update(
{
"submission_types": case_enums["case_worker_allowed_submission_types"],
"participants": participants,
"roles": roles,
}
)
self.organisation_id = self.organisation_id or self.request.GET.get("organisation_id")
if self.organisation_id:
self.organisation = self._client.get_organisation(self.organisation_id)
return_data["organisation"] = self.organisation
return_data["organisation_id"] = str(self.organisation["id"])
# add errors from the url
errors = self.request.GET.get("errors")
if errors:
try:
return_data["errors"] = json.loads(errors)
except Exception as ex:
pass
# Set up template to use
template_name = (
submission["type"]["key"]
if submission
else (role if role == "public" else "questionnaire")
)
return_data.update({"template_name": template_name, "mode": "form"})
return return_data
def post( # noqa: C901
self,
request,
case_id,
submission_id=None,
organisation_id=None,
*args,
**kwargs,
):
"""
Update an existing submission
"""
return_data = {"submission_id": str(submission_id)}
contact_id = request.POST.get("contact_id")
btn_value = request.POST.get("btn-value")
review = request.POST.get("review")
name = request.POST.get("name")
due_at = request.POST.get("due_at")
response_window_yn = request.POST.get("response_window_yn")
time_window = request.POST.get("time_window")
meta_raw = request.POST.getlist("meta")
meta = [json.loads(block) for block in meta_raw]
file_details = deep_index_items_by(meta, "name")
file_details_by_id = deep_index_items_by(meta, "file/id")
organisation_id = organisation_id or request.POST.get("organisation_id")
send_to = request.POST.get("send_to")
submission = self._client.get_submission(case_id, submission_id)
organisation_id = submission.get("organisation", {}).get("id")
status_id = request.POST.get("submission_status_id")
if submission_id and btn_value == "discard":
delete_submission_response = self._client.delete_submission(
case_id=case_id, submission_id=submission_id
)
return HttpResponse(
json.dumps({"redirect_url": f"/case/{case_id}/submissions/"}),
content_type="application/json",
)
# check if the update is for name or notify contact
if (
submission["name"] != name
or not submission["contact"]
or submission.get("contact", {}).get("id") != contact_id
):
if name is not None and not name:
return_data.update({"errors": '{"name":"You must enter a name"}'})
if due_at and not is_date(due_at):
return_data.update({"errors": '{"due_date":"Invalid date"}'})
if not return_data.get("errors"):
self._client.update_submission(
case_id=case_id,
submission_id=submission_id,
name=name,
contact_id=contact_id, # TODO:not used
due_at=due_at,
time_window=time_window,
description=request.POST.get("description"),
url=request.POST.get("url"),
)
# API `update_submission` returns an incomplete submission
# (no documents) so we re-fetch the submission here.
submission = self._client.get_submission(case_id, submission_id)
return_data.update({"submission": submission})
if submission.get("id"):
for _file in request.FILES.getlist("files"):
try:
_file.readline() # Important, will raise VirusFoundInFileException if infected
original_file_name = _file.original_name
details = file_details.get(original_file_name.lower())[0]
confidential = details.get("confidential")
document_type = details.get("submission_document_type")
document = self._client.upload_document(
case_id=str(case_id),
submission_id=submission_id,
organisation_id=str(organisation_id),
data={
"name": "Questionnaire",
"confidential": confidential,
"submission_document_type": document_type,
"document_name": original_file_name,
"file_name": _file.name,
"file_size": _file.file_size,
},
)
except (VirusFoundInFileException, APIException) as e:
redirect_url = f"/case/{case_id}/submission/{submission_id}/?"
if isinstance(e, VirusFoundInFileException):
redirect_url += "virus=true"
else:
redirect_url += f"upload_error={e}"
logger.warning(f"File upload aborted: {e}")
return HttpResponse(
json.dumps({"redirect_url": redirect_url}),
content_type="application/json",
)
if case_files := request.POST.getlist("case_files"):
for case_file_id in case_files:
details = (file_details_by_id.get(case_file_id) or [])[0]
document = self._client.attach_document(
case_id=str(case_id),
submission_id=submission_id,
organisation_id=str(organisation_id),
data={"submission_document_type": details.get("submission_document_type")},
document_id=case_file_id,
)
submission_group_name = get(submission, "type/key")
if btn_value in ["send", "publish", "withdraw"]:
if btn_value in ["publish", "withdraw"]:
result = self._client.set_submission_state(
case_id,
submission_id,
"sent"
if (btn_value == "send" or submission_group_name == "public")
else "",
{"publish": "issue", "withdraw": "un-issue"}[btn_value],
)
result = self._client.update_submission(
case_id=case_id, submission_id=submission_id
)
return_data.update(
{
"redirect_url": f"/case/{case_id}/submission/{submission['id']}/?alert={btn_value}" # noqa: E301, E501
}
)
if btn_value == "sufficient":
# Set the submission to sufficient
result = self._client.set_submission_state(case_id, submission_id, btn_value)
return_data.update({"alert": "Submission approved"})
submission_type = submission["type"]
type_helpers = SUBMISSION_TYPE_HELPERS.get(submission_type["key"])
if type_helpers:
return_data.update(
type_helpers(submission, self.request.user).on_approve() or {}
)
# Update submission document approvals
self.update_submission_status(request.POST, submission)
# set any deficiency-notice parameters
updated = False
deficiency_notice_params = from_json(submission.get("deficiency_notice_params"))
send_to = request.POST.getlist("send_to")
if send_to:
deficiency_notice_params["send_to"] = send_to
updated = True
regex = r"^deficiency_notice_params_"
for param_key in request.POST:
matches = re.split(regex, param_key)
if len(matches) > 1:
value = request.POST[param_key]
updated = updated or (deficiency_notice_params.get(matches[1]) != value)
if value == "__remove":
if get(deficiency_notice_params, matches[1]):
deficiency_notice_params.pop(matches[1])
else:
deficiency_notice_params[matches[1]] = value
if updated:
update_submission_response = self._client.update_submission(
case_id=case_id,
submission_id=submission_id,
deficiency_notice_params=to_json(deficiency_notice_params),
)
if btn_value == "save-exit":
return_data.update({"redirect_url": f"/case/{case_id}/submissions"})
if deficiency_notice_params:
return_data.update(
{"redirect_url": f"/case/{case_id}/submission/{submission_id}"}
)
return HttpResponse(json.dumps(return_data), content_type="application/json")
def update_submission_status(self, request_params, submission):
"""Update submission document statuses.
For each document in the submission review, examine response to
establish if it was marked sufficient/deficient. Call API to update
submission document status if it has changed.
:param (dict) request_params: request parameters
:param (dict) submission: submission
"""
submission_docs = {doc["id"]: doc for doc in submission.get("documents")}
for doc_id in request_params:
if doc_id in submission_docs:
current_status = submission_docs[doc_id]["sufficient"]
new_status = request_params[doc_id] == "yes"
if current_status != new_status:
self._client.set_submission_document_state(
case_id=submission["case"]["id"],
submission_id=submission.get("id"),
document_id=doc_id,
status="sufficient" if new_status else "deficient",
block_from_public_file=submission_docs.get("block_from_public_file"),
block_reason=submission_docs.get("block_reason"),
)
class SubmissionCreateView(SubmissionView):
groups_required = SECURITY_GROUPS_TRA
def post(self, request, case_id, *args, **kwargs):
btn_value = request.POST.get("btn-value")
send_to = request.POST.getlist("send_to")
organisation_id = request.POST.get("organisation_id")
submission_data = {
"submission_type": int(
request.POST.get("submission_type_id", SUBMISSION_TYPE_QUESTIONNAIRE)
),
"case_id": str(case_id),
"organisation_id": str(organisation_id) if organisation_id else None,
"contact_id": request.POST.getlist("contact_id"),
"public": request.POST.get("public"),
}
if send_to:
submission_data["deficiency_notice_params"] = to_json(
{"send_to": send_to, "case_role": request.POST.get("role_key")}
)
result = self._client.create_submission(**submission_data)
submission = result.get("submission", {}) if result else {}
return HttpResponse(
json.dumps(
{
"submission_id": submission.get("id"),
"redirect_url": f"/case/{case_id}/submission/{submission['id']}/",
}
),
content_type="application/json",
)
class SubmissionDocumentView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
def post(self, request, case_id, submission_id, organisation_id=None, *args, **kwargs):
response = {}
document_list_json = request.POST.get("document_list")
if document_list_json:
document_list = json.loads(document_list_json)
for doc_id, doc_status in document_list.items():
logger.debug(f"update document state {doc_id}")
response = self._client.set_submission_document_state(
case_id=case_id,
submission_id=submission_id,
document_id=doc_id,
status=doc_status["status"],
block_from_public_file=doc_status["block_from_public_file"],
block_reason=doc_status["block_reason"],
)
return HttpResponse(json.dumps(response), content_type="application/json")
def delete(self, request, case_id, submission_id, document_id, *args, **kwargs):
response = self._client.detach_document(
case_id=case_id, submission_id=submission_id, document_id=document_id
)
return HttpResponse(json.dumps(response), content_type="application/json")
class SubmissionStatusView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
def post(self, request, case_id, submission_id, *args, **kwargs):
stage_change_if_sufficient = request.POST.get("stage_change_if_sufficient")
stage_change_if_deficient = request.POST.get("stage_change_if_deficient")
submission = self._client.get_submission(case_id, submission_id)
status_id = request.POST.get("submission_status_id")
if submission.get("status", {}).get("id") != status_id:
status_response = self._client.set_submission_status(
case_id=case_id,
submission_id=submission_id,
status_id=status_id,
stage_change_if_sufficient=stage_change_if_sufficient,
stage_change_if_deficient=stage_change_if_deficient,
deficiency_documents=request.FILES.getlist("deficiency_document"),
issue=request.POST.get("issue"),
)
if status_response.get("submission"):
submission_id = status_response["submission"]["id"]
return redirect(f"/case/{case_id}/submission/{submission_id}/")
class SubmissionApprovalView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/submission.html"
def add_page_data(self):
submission_id = self.kwargs.get("submission_id")
submission = self._client.get_submission(self.case_id, submission_id)
sub_documents = self._client.get_submission_documents(self.case_id, submission_id)
documents = sub_documents.get("documents", [])
submission.update(sub_documents)
case_enums = self._client.get_all_case_enums()
submission_type_id = submission["type"]["id"]
status_map = case_enums["submission_status_map"]
status_options = status_map.get(str(submission_type_id), {}).get("keys", [])
status_context = status_map.get(str(submission_type_id))
submission_documents = self.get_documents(submission=submission)
context = {
"template_name": submission["type"]["key"],
"mode": "approval",
"submission": submission,
"case_enums": case_enums,
"status_context": status_context,
"documents": submission_documents,
}
return context
class SubmissionDeficiencyView(CaseBaseView):
"""
Set the submission into a deficiency status and notify the party about it.
"""
groups_required = SECURITY_GROUPS_TRA
raise_exception = True
def get(self, request, case_id, submission_id, *args, **kwargs):
submission = self._client.get_submission(case_id, submission_id)
submission_type = submission["type"]
contact = submission_contact(submission)
contact_name = contact.get("name")
organisation_name = submission.get("organisation", {}).get("name") or (
contact.get("organisation") or {}
).get("name")
notification_template = self._client.get_notification_template(
"NOTIFY_SUBMISSION_DEFICIENCY"
)
template_name = f"cases/submissions/{submission_type['key']}/notify.html"
due_at = get_submission_deadline(submission, settings.FRIENDLY_DATE_FORMAT)
case_number = submission["case"]["reference"]
email = notify_contact_email(self._client, case_number)
footer = notify_footer(self._client, email)
values = {
"full_name": contact_name,
"case_name": submission["case"]["name"],
"case_number": case_number,
"company_name": organisation_name,
"deadline": due_at or "No deadline assigned",
"submission_type": submission.get("type", {}).get("name"),
"login_url": public_login_url(),
"footer": footer,
}
context = {
"form_action": f"/case/{case_id}/submission/{submission_id}/status/notify/",
"form_title": f"Deficiency Notice for {organisation_name}",
"cancel_redirect_url": f"/case/{case_id}/submission/{submission_id}/",
"editable_fields": { # leaving one as a future example
# 'full_name': {'title': 'Name'},
},
"notification_template": notification_template,
"submission": submission,
"case_id": str(case_id),
"contact": contact,
"values": values,
"parsed_template": parse_notify_template(notification_template["body"], values),
}
return render(request, template_name, context)
def post(self, request, case_id, submission_id, *args, **kwargs):
stage_change_if_sufficient = request.POST.get("stage_change_if_sufficient")
stage_change_if_deficient = request.POST.get("stage_change_if_deficient")
submission = self._client.get_submission(case_id, submission_id)
notify_keys = [
"full_name",
"case_name",
"case_number",
"company_name",
"deadline",
"submission_type",
"login_url",
]
notify_data = {key: request.POST.get(key) for key in notify_keys}
if request.POST.get("contact_id"):
notify_data["contact_id"] = request.POST["contact_id"]
case_enums = self._client.get_all_case_enums()
submission_type_id = submission["type"]["id"]
status_map = case_enums["submission_status_map"]
status_context = status_map.get(str(submission_type_id))
status_id = status_context.get("NO")
error = None
if status_id:
if submission.get("status", {}).get("id") != status_id:
status_response = self._client.set_submission_status(
case_id=case_id,
submission_id=submission_id,
status_id=status_id,
stage_change_if_sufficient=stage_change_if_sufficient,
stage_change_if_deficient=stage_change_if_deficient,
)
self._client.submission_notify(
case_id=case_id,
organisation_id=submission["organisation"]["id"],
submission_id=submission["id"],
values=notify_data,
notice_type=SUBMISSION_NOTICE_TYPE_DEFICIENCY,
)
# reset the submission id to redirect to the new clone if available
if status_response.get("submission"):
submission_id = status_response["submission"]["id"]
return HttpResponse(
json.dumps(
{
"redirect_url": f"/case/{case_id}/submission/{submission_id}/",
}
),
content_type="application/json",
)
# If there's no deficiency state for this submission type, return an error
return HttpResponse(
json.dumps(
{
"error": "No deficiency status for this submission type",
}
),
content_type="application/json",
)
class SubmissionVerifyBaseView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
def get_submission_id(self, case_id=None, organisation_id=None):
submission_id = self.kwargs.get("submission_id")
if not submission_id:
# If this is called from the party page - there is no submission id
# so find from the org/case
submissions = self._client.get_submissions_public(
organisation_id=organisation_id,
case_id=case_id,
fields=json.dumps({"id": 0, "type": {"key": 0}}),
)
for submission in submissions:
if get(submission, "type/key") in ["interest", "application"]:
submission_id = submission.get("id")
break # we only want one reg-of-interest submission
return submission_id
def update_submission_json(self, case_id, submission, params):
regex = r"^deficiency_notice_params_"
deficiency_notice_params = submission.get("deficiency_notice_params") or {}
updated = False
response = None
for param_key in params:
matches = re.split(regex, param_key)
if len(matches) > 1:
value = params[param_key]
updated = updated or (deficiency_notice_params.get(matches[1]) != value)
deficiency_notice_params[matches[1]] = value
if updated:
response = self._client.update_submission(
case_id=case_id,
submission_id=get(submission, "id"),
deficiency_notice_params=to_json(deficiency_notice_params),
)
return response
class SubmissionVerifyViewTasks(SubmissionVerifyBaseView):
"""
Used to verify user and orgs admission to a case
"""
template_name = "cases/verify/submission_verify_tasks.html"
submission_fields = json.dumps(
{
"Submission": {
"id": 0,
"deficiency_notice_params": 0,
"organisation": {
"id": 0,
"name": 0,
},
"contact": {
"name": 0,
"email": 0,
"user": {
"name": 0,
"email": 0,
"id": 0,
"organisation": {
"organisation": {
"id": 0,
"name": 0,
}
},
},
"organisation": {
"id": 0,
"name": 0,
},
},
"case": 0,
"type": 0,
"created_by": 0,
"organisation_case_role_outer": 0,
}
}
)
def get(self, request, case_id, organisation_id, **kwargs):
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
if not submission_id:
return HttpResponse(
json.dumps(
{
"error": "You cannot verify this organisation "
"as they have not yet registered interest in this case.",
}
),
content_type="application/json",
)
submission = self._client.get_submission(
self.case_id, submission_id, fields=self.submission_fields
)
json_data = submission.get("deficiency_notice_params") or {}
organisation = submission.get("organisation")
caserole = self._client.get_organisation_case_role(
case_id=case_id, organisation_id=get(submission, "organisation/id")
)
org_matches = self._client.get_organisation_matches(organisation_id, with_details="none")
return render(
request,
self.template_name,
{
"submission": submission,
"organisation": organisation,
"caserole": caserole,
"org_matches": org_matches,
"page_data": {
"submission": submission,
"organisation": organisation,
},
},
)
class SubmisisonVerifyEditLoaView(SubmissionVerifyBaseView):
def get(self, request, case_id, organisation_id):
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
submission = self._client.get_submission(case_id, submission_id)
organisation = self._client.get_organisation(
case_id=case_id, organisation_id=organisation_id
)
documents = self.get_documents(submission)
caserole = self._client.get_organisation_case_role(
case_id=self.case_id, organisation_id=organisation_id
)
org_contacts = self._client.get_organisation_contacts(
organisation_id, case_id, exclude_indirect=True
)
return render(
request,
"cases/verify/loa.html",
{
"auth_contacts": org_contacts,
"organisation": organisation,
"documents": documents,
"LOA": caserole.get("auth_contact"),
"submission": submission,
},
)
def post(self, request, case_id, organisation_id, *args, **kwargs):
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
submission = self._client.get_submission(case_id, submission_id)
self.update_submission_json(case_id, submission, request.POST)
result = self._client.set_organisation_case_role_loa(
case_id,
organisation_id,
pluck(
request.POST,
["LOA_contact_id", "name", "email", "address", "org_name", "phone"],
),
)
return HttpResponse(json.dumps(result))
class SubmisisonVerifyOrganisation(SubmissionVerifyBaseView):
enable_merge = False
def get(self, request, case_id, organisation_id):
test_org_id = request.GET.get("org_id") or organisation_id
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
submission = self._client.get_submission(case_id, submission_id)
organisation = self._client.get_organisation(case_id=case_id, organisation_id=test_org_id)
if self.enable_merge:
org_matches = self._client.get_organisation_matches(test_org_id, with_details=True)
else:
org_matches = self._client.get_organisation_matches(test_org_id, with_details=False)
org_matches.sort(
key=lambda m: 1 if m.get("id") == test_org_id else 0
) # put the actual match at the end
matches = decorate_orgs(org_matches, test_org_id, exclude_case_id=case_id)
for match in matches:
if str(match.get("id")) == str(organisation.get("id")):
organisation.update(match)
return render(
request,
"cases/verify/merge_org.html" if self.enable_merge else "cases/verify/verify_org.html",
{
"case_id": self.case_id,
"organisation": organisation,
"match_list": matches,
"representing": test_org_id != organisation_id,
"json_data": submission.get("deficiency_notice_params"),
},
)
def post(self, request, case_id, organisation_id, *args, **kwargs):
test_org_id = request.POST.get("org_id") or organisation_id
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
submission = self._client.get_submission(case_id, submission_id)
verify = request.POST.get("deficiency_notice_params_org_verify")
if verify == "verified":
self._client.verify_caserole(
case_id=case_id, organisation_id=get(submission, "organisation/id")
)
elif verify == "rejected":
result = self._client.reject_organisation(case_id, organisation_id)
result = self.update_submission_json(case_id, submission, request.POST)
return HttpResponse(json.dumps({"result": True}))
class SubmissionVerifyAccept(SubmissionVerifyBaseView):
def get(self, request, case_id, organisation_id):
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
submission = self._client.get_submission(case_id, submission_id)
organisation = self._client.get_organisation(
case_id=case_id, organisation_id=organisation_id
)
caserole = self._client.get_organisation_case_role(
case_id=self.case_id, organisation_id=organisation_id
)
roles = self._client.get_case_roles(
exclude=[
CASE_ROLE_APPLICANT,
CASE_ROLE_AWAITING_APPROVAL,
CASE_ROLE_REJECTED,
CASE_ROLE_PREPARING,
]
)
return render(
request,
"cases/verify/accept.html",
{
"submission": submission,
"organisation": organisation,
"roles": roles,
"caserole": caserole,
"role_name": get(caserole, "role/name"),
},
)
def post(self, request, case_id, organisation_id, *args, **kwargs):
role_key = request.POST.get("role_key")
result = {}
result = self._client.set_organisation_case_role(
case_id, organisation_id, role_key, pluck(request.POST, ["approve"])
)
return HttpResponse(json.dumps(result))
class SubmissionVerifyNotify(SubmissionVerifyBaseView):
def get(self, request, case_id, organisation_id):
caserole = self._client.get_organisation_case_role(
case_id=self.case_id, organisation_id=organisation_id
)
role_name = get(caserole, "role/name")
action = (
"reject" if get(caserole, "role/key") == "rejected" else "accept"
) # Todo: get this from the right place
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
submission = self._client.get_submission(case_id, submission_id)
case = self._client.get_case(case_id)
contact = submission_contact(submission)
organisation = self._client.get_organisation(
case_id=case_id, organisation_id=organisation_id
)
notify_key = (
"NOTIFY_INTERESTED_PARTY_REQUEST_PERMITTED"
if action == "accept"
else "NOTIFY_INTERESTED_PARTY_REQUEST_DENIED"
)
try:
notification_template = self._client.get_notification_template(notify_key)
values = self._client.create_notify_context(
{
"full_name": contact.get("name"),
"case_name": case.get("name"),
"case_number": case.get("reference"),
"company_name": organisation["name"],
"login_url": public_login_url(),
"role": role_name,
}
)
parsed_template = parse_notify_template(notification_template["body"], values)
except Exception as ex:
parsed_template = ""
# contacts for the notification contact selector
contacts = organisation.get("contacts", [])
user = self._client.get_user(get(submission, "created_by/id"))
contacts.append(user.get("contact"))
return render(
request,
"cases/verify/notify.html",
{
"parsed_template": parsed_template,
},
)
def post(self, request, case_id, organisation_id, *args, **kwargs):
submission_id = self.get_submission_id(case_id=case_id, organisation_id=organisation_id)
self._client.approve_submission(submission_id=submission_id)
return HttpResponse(json.dumps({"result": True}))
class SubmissionNotifyView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
raise_exception = True
def get(self, request, case_id, submission_id, *args, **kwargs):
case = self._client.get_case(case_id)
submission = self._client.get_submission(case_id, submission_id)
json_data = from_json(submission.get("deficiency_notice_params"))
contact = None
contact_name = None
send_to = json_data.get("send_to")
if not send_to:
contact = submission_contact(submission)
contact_name = contact and contact.get("name")
submission_type = submission["type"]
notify_sys_param_name = submission_type.get("notify_template") or "NOTIFY_QUESTIONNAIRE"
notification_template = self._client.get_notification_template(notify_sys_param_name)
template_name = f"cases/submissions/{submission_type['key']}/notify.html"
due_at = get_submission_deadline(submission, settings.FRIENDLY_DATE_FORMAT)
case_number = case["reference"]
email = notify_contact_email(self._client, case_number)
footer = notify_footer(self._client, email)
values = {
"full_name": contact_name,
"case_number": case_number,
"case_name": case["name"],
"investigation_type": case["type"]["name"],
"country": case["sources"][0]["country"] if case["sources"] else "N/A",
"company_name": submission["organisation"].get("name"),
"deadline": due_at or "No deadline assigned",
"login_url": public_login_url(),
"description": submission.get("description"),
"submission_request_name": submission.get("name"),
"notice_type": submission.get("type", {}).get("name"),
"notice_url": submission["url"],
"notice_of_initiation_url": submission["url"],
"footer": footer,
}
template_list = []
if send_to:
for case_role, participant_list in (
self._client.get_case_participants(case_id) or {}
).items():
for participant in participant_list.get("parties"):
if participant.get("id") in send_to:
contact = participant.get("primary_contact")
if contact:
local_values = {
"full_name": contact.get("name"),
"email": contact.get("email"),
"company_name": participant.get("name"),
}
values.update(local_values)
template_list.append(
{
"values": local_values,
"preview": parse_notify_template(
notification_template["body"], values
),
}
)
else:
template_list[contact.get("email")] = parse_notify_template(
notification_template["body"], values
)
context = {
"form_action": f"/case/{case_id}/submission/{submission_id}/notify/",
"form_title": f"Invite {contact_name}",
"cancel_redirect_url": f"/case/{case_id}/submission/{submission_id}/",
"editable_fields": { # leaving one as an example
# 'full_name': {'title': '<NAME>', 'disabled': True},
},
"notification_template": notification_template,
"templates": template_list,
"submission": submission,
"case_id": str(case_id),
"contact": contact,
"values": values,
}
return render(request, template_name, context)
def post(self, request, case_id, submission_id, *args, **kwargs):
submission = self._client.get_submission(case_id, submission_id)
notify_keys = ["full_name", "product", "submission_request_name", "description"]
notify_data = {key: request.POST.get(key) for key in notify_keys if key in request.POST}
due_at = get_submission_deadline(submission, settings.FRIENDLY_DATE_FORMAT)
notify_data["deadline"] = due_at or "No deadline assigned"
if request.POST.get("multiple"):
return self.post_multiple(request, case_id, submission, context=notify_data)
self._client.submission_notify(
case_id=case_id,
organisation_id=submission["organisation"]["id"],
submission_id=submission["id"],
values=notify_data,
notice_type=SUBMISSION_NOTICE_TYPE_INVITE,
)
return HttpResponse(
json.dumps(
{
"redirect_url": f"/case/{case_id}/submission/{submission_id}/",
"error": None,
}
),
content_type="application/json",
)
def post_multiple(self, request, case_id, submission, context=None):
"""
Called to handle a notify post to multiple recipents.
We must clone the submission for each target and send the notification
"""
case = self._client.get_case(case_id)
json_data = from_json(submission.get("deficiency_notice_params"))
send_to = json_data.get("send_to")
# We need to know which is the last party in the list
# so we can modify the existing sub rather than clone it.
party_counter = len(send_to)
for case_role, participant_list in (
self._client.get_case_participants(case_id) or {}
).items():
for participant in participant_list.get("parties"):
if participant.get("id") in send_to:
contact = participant.get("primary_contact")
party_counter -= 1
if contact: # don't try to send if there is no contact
data = {
"case_id": case_id,
"submission_id": submission["id"],
"organisation_id": participant.get("id"),
"contact_id": contact.get("id"),
}
if party_counter:
cloned_submission = self._client.clone_submission(**data)
else:
cloned_submission = self._client.update_submission(**data).get(
"submission"
)
context["full_name"] = contact.get("full_name")
self._client.submission_notify(
case_id=case_id,
organisation_id=participant.get("id"),
submission_id=cloned_submission["id"],
values=context or {},
notice_type=SUBMISSION_NOTICE_TYPE_INVITE,
)
return HttpResponse(
json.dumps(
{
"alert": f'Sent {len(send_to)} request{"" if len(send_to) < 2 else "s"}',
"redirect_url": f'/case/{case_id}/submission/{submission.get("id")}/'
if len(send_to) < 2
else f"/case/{case_id}/submissions/",
"error": None,
}
),
content_type="application/json",
)
class OrganisationDetailsView(LoginRequiredMixin, View, TradeRemediesAPIClientMixin):
def get(self, request, case_id, organisation_id, *args, **kwargs):
client = self.client(request.user)
item = request.GET.get("item")
template = request.GET.get("template")
result = {}
case_submissions = client.get_submissions(case_id)
idx_submissions = deep_index_items_by(case_submissions, "organisation/id")
org_id = str(organisation_id)
third_party_contacts = []
if item == "contacts":
contacts = client.get_organisation_contacts(org_id, case_id)
for contact in contacts:
case = get(contact, "cases/" + str(case_id)) or {}
contact["primary"] = case.get("primary")
all_case_invites = client.get_contact_case_invitations(case_id)
if org_id in idx_submissions:
org_submission_idx = deep_index_items_by(idx_submissions[org_id], "id")
third_party_contacts = self.get_third_party_contacts(
org_id, org_submission_idx, all_case_invites
)
# `contacts` may also contain on-boarded third-party contacts that
# have a user, so we need to prune these out.
third_party_contact_ids = set([i["id"] for i in third_party_contacts])
contacts = [
i
for i in itertools.filterfalse(
lambda x: x["id"] in third_party_contact_ids, contacts
)
]
result = {
"contacts": contacts,
"pre_release_invitations": client.get_system_boolean("PRE_RELEASE_INVITATIONS"),
"invites": deep_index_items_by(all_case_invites, "contact/id"),
"third_party_contacts": third_party_contacts,
"case_role_id": request.GET.get("caserole"),
}
elif item == "submissions":
result["submissions"] = idx_submissions.get(org_id, [])
elif item == "details":
result["party"] = client.get_organisation(organisation_id=organisation_id)
if template:
deep_update(
result,
{
"case_id": case_id,
"case": {"id": case_id},
"organisation": {"id": org_id},
},
)
return render(request, template, result)
return HttpResponse(json.dumps({"result": result}), content_type="application/json")
@staticmethod
def get_third_party_contacts(organisation_id, submissions, invites):
"""Get third party contacts.
Given an organisation, its submissions and all invitations for a case,
build a list of third party invite contacts. We include the invite submissions
yet to be approved but flag the contact with `submission_sufficient`
:param (str) organisation_id: Organisation ID.
:param (dict) submissions: The organisation's submissions keyed on id.
:param (list) invites: All invites for a case.
:returns (list): Contacts arising from 3rd party invite submissions.
"""
third_party_contacts = []
for invite in invites:
if invite["submission"]:
submission_id = invite["submission"]["id"]
full_submission = submissions.get(submission_id)
if not full_submission:
# Submission not at this org
continue
if full_submission[0]["type"]["id"] != SUBMISSION_TYPE_THIRD_PARTY:
# Not a third party submission
continue
inviting_organisation = full_submission[0]["organisation"]["id"]
if inviting_organisation == organisation_id:
submission_sufficient = full_submission[0]["status"]["sufficient"]
invite["contact"]["is_third_party"] = True
invite["contact"]["submission_id"] = submission_id
invite["contact"]["submission_sufficient"] = submission_sufficient
invite["contact"]["invited"] = invite["email_sent"]
third_party_contacts.append(invite["contact"])
return third_party_contacts
class CaseOrganisationView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "organisations/organisation_in_case.html"
def add_page_data(self):
organisation = self._client.get_organisation(organisation_id=self.organisation_id)
caserole = None
case_submissions = self._client.get_submissions_public(self.case_id, self.organisation_id)
idx_submissions = deep_index_items_by(case_submissions, "organisation/id")
submissions = idx_submissions.get(str(self.organisation_id), [])
roi_app_submission = next(
filter(lambda x: get(x, "type/key") in ["interest", "application"], submissions),
None,
)
cases = self._client.organisation_cases(self.organisation_id)
user_cases = self._client.organisation_user_cases(self.organisation_id)
cases_idx = deep_index_items_by_exists(cases, "archived_at")
for case in cases:
if get(case, "id") == str(self.case_id):
caserole = case
invites = self._client.get_contact_case_invitations(
self.case_id,
)
return {
"case": self.case,
"invites": invites,
"party": organisation,
"organisation": organisation,
"cases_idx": cases_idx,
"submissions": submissions,
"user_cases": user_cases,
"roi_app_submission": roi_app_submission,
"caserole": caserole,
}
class OrganisationMatchView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/organisation_dedupe.html"
def add_page_data(self):
organisation = self._client.get_organisation(
organisation_id=self.organisation_id, case_id=self.case_id
)
org_matches = self._client.get_organisation_matches(self.organisation_id)
org_matches = decorate_orgs(org_matches, self.organisation_id)
return {
"case": self.case,
"organisation": organisation,
"org_matches": org_matches,
}
class FilesView(CaseBaseView):
"""
View all case documents
"""
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/files.html"
def add_page_data(self):
tab = self.request.GET.get("tab", "respondent")
sort = self.request.GET.get("sort")
direction = self.request.GET.get("dir", "asc")
submission_id = self.request.GET.get("submission_id")
collapse_identical = self.request.GET.get("collapse_identical", "false") in (
"true",
"1",
"Y",
)
tabs = {
"tabList": [
{"label": "Respondent", "value": "respondent"},
{"label": "Investigator", "value": "investigator"},
],
"value": tab,
}
case_enums = self._client.get_all_case_enums(direction=DIRECTION_TRA_TO_PUBLIC)
case_files = self._client.get_case_documents(
case_id=self.case_id,
source=tab,
submission_id=submission_id,
order_by=sort,
order_dir=direction,
)
submission = None
if submission_id:
submission = self._client.get_submission(self.case_id, submission_id)
return {
"tabs": tabs,
"tab": tab,
"case_enums": case_enums,
"file_list": case_files,
"sort": sort,
"dir": direction,
"collapse_identical": collapse_identical,
"submission": submission,
"pre_document_search": self._client.get_system_boolean("PRE_DOCUMENT_SEARCH"),
}
def post(self, request, case_id, *args, **kwargs):
action = request.POST.get("action")
name = request.POST.get("name")
confirm = request.POST.get("confirm") == "true"
tab = request.POST.get("tab", "respondent")
document_ids = request.POST.getlist("document_id")
if document_ids:
if action == "issue" and confirm:
submission_type_id = request.POST.get("submission_type_id")
response = self._client.issue_documents_to_case(
case_id=case_id,
name=name,
document_ids=document_ids,
submission_type_id=submission_type_id,
)
elif action == "confidential":
response = self._client.toggle_documents_confidentiality(
case_id=case_id, document_ids=document_ids
)
return redirect(f"/case/{case_id}/files/?tab={tab}")
class FileBrowseView(View, TradeRemediesAPIClientMixin):
def get(self, request, case_id, *args, **kwargs):
_client = self.client(request.user)
case_files = _client.get_case_documents(case_id=case_id, source="investigator")
# Add application bundle documents
case_files.extend(_client.get_system_documents())
return HttpResponse(json.dumps(case_files), content_type="application/json")
class WorkflowEditor(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
permission_required = ("workflow_editor",)
template_name = "cases/workflow_editor.html"
def add_page_data(self):
case_workflow = self._client.get_case_workflow(self.case_id)
return {
"workflow": case_workflow.get("workflow"),
"state": case_workflow.get("state"),
}
def post(self, request, case_id, *args, **kwargs):
workflow = request.POST.get("workflow")
self._client.save_case_workflow(case_id, workflow)
return HttpResponse(json.dumps({"saved": 1}), content_type="application/json")
class ActionsView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/actions.html"
def add_page_data(self):
permissions = {}
for permission_key in self.request.user.permissions:
permissions[permission_key] = 1
case_workflow = self._client.get_case_workflow(self.case_id)
return {
"workflow": case_workflow.get("workflow"),
"state": case_workflow.get("state"),
"permissions": permissions,
}
class StateView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/action.html"
def post(self, request, case_id, state_key=None, *args, **kwargs):
value = request.POST.get(state_key)
state_map = self._client.set_case_workflow_state(case_id, [state_key], {state_key: value})
return HttpResponse(
json.dumps({"workflow_state": state_map}), content_type="application/json"
)
class ActionView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/action.html"
def get_state_from_children(self, item):
any_mode = item.get("required") # this is a bodge and the logic is reverse
state = None
completed = False if any_mode else True
for child in item.get("children", []):
value = self.get_value(child.get("key"))
if value:
state = state or "in-progress"
if any_mode:
if value == "complete":
completed = True
else:
if value != "complete":
completed = False
return "complete" if state and completed else state
state_map = {}
def get_value(self, key):
return (self.state_map.get(key) or [""])[0]
def set_value(self, key, value):
arr = self.state_map.get(key) or [""]
arr[0] = value
self.state_map[key] = arr
def post(self, request, case_id, action_id=None, *args, **kwargs): # noqa: C901
values = {}
node_keys = []
action_key = request.POST.get("action-key")
btn_action = request.POST.get("btn_action")
complete = True
error = False
state = ""
wf = self._client.get_case_workflow(case_id)
workflow = wf.get("workflow")
self.state_map = wf.get("state")
index = key_by(workflow["root"], "key", "children")
action = index.get(action_key.lower(), {})
for task in action.get("children", []):
response_type = task.get("response_type", {}).get("name", "")
if response_type.lower() not in (
"notesection",
"timer",
"label",
): # notes don't count as in-progress
task_key = task.get("key")
old_val = self.get_value(task_key)
new_val = request.POST.get(task_key)
if old_val != new_val:
values[task_key] = new_val
node_keys.append(task_key)
if not new_val:
if task.get("required"):
complete = False
else:
if new_val != "na":
state = "in-progress"
if complete:
state = "complete"
if (self.get_value(action_key) or "") != state:
values[action_key] = state
node_keys.append(action_key)
self.set_value(action_key, state)
# ripple the state down the tree
loc_action = action
while loc_action.get("parent_key"):
loc_action = index.get(loc_action.get("parent_key"))
loc_key = loc_action.get("key")
loc_state = self.get_state_from_children(loc_action)
if (self.get_value(loc_key) or "") != loc_state:
values[loc_key] = loc_state
node_keys.append(loc_key)
self.set_value(loc_key, loc_state)
if any(values):
self.state_map = self._client.set_case_workflow_state(case_id, node_keys, values)
if error:
action_id = action.get("id")
return redirect(f"/case/{case_id}/action/{action_id}")
else:
return HttpResponse(
json.dumps({"workflow_state": self.state_map}),
content_type="application/json",
)
class NavSectionView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/nav_section.html"
def post(self, request, case_id, *args, **kwargs):
content_id = kwargs.get("nav_section_id")
response = self._client.set_case_content(
case_id, content_id=content_id, content=request.POST
)
content_id = response.get("id")
return redirect(f"/case/{case_id}/section/{content_id}")
def add_page_data(self):
return {}
class AuditView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/audit.html"
def add_page_data(self):
milestone = self.request.GET.get("milestone", "true") == "true"
limit = int(self.request.GET.get("limit", self.limit))
audit_data = self._client.get_audit(
case_id=self.case_id, start=self.start, limit=limit, milestone=milestone
)
url = reverse("case_audit", kwargs={"case_id": self.case_id})
prev_url = next_url = None
prev_page = max(0, self.start - limit)
milestone_flag = f"milestone={milestone}".lower()
if len(audit_data) >= limit:
next_page = max(0, self.start + limit)
next_url = f"{url}?{milestone_flag}&start={next_page}"
if next_page > limit:
prev_url = f"{url}?{milestone_flag}&start={prev_page}"
self.start = next_page
else:
self.start = prev_page + len(audit_data)
if prev_page:
prev_url = f"{url}?{milestone_flag}&start={prev_page}"
return {
"milestone": milestone,
"events": audit_data,
"next_url": next_url,
"prev_url": prev_url,
}
class CaseAuditExport(LoginRequiredMixin, View, TradeRemediesAPIClientMixin):
groups_required = SECURITY_GROUPS_TRA
def get(self, request, case_id, *args, **kwargs):
file = self.client(request.user).get_audit_export(case_id)
response = HttpResponse(file, content_type="application/vnd.ms-excel")
response["Content-Disposition"] = "attachment; filename=trade_remedies_export.xlsx"
return response
class NoteView(LoginRequiredMixin, View, TradeRemediesAPIClientMixin):
groups_required = SECURITY_GROUPS_TRA
def get(
self,
request,
case_id,
content_type=None,
model_id=None,
model_key=None,
*args,
**kwargs,
):
notes = self.client(request.user).get_notes(
case_id, content_type, model_id, model_key=model_key
)
return HttpResponse(json.dumps(notes), content_type="application/json")
def post(self, request, case_id, note_id=None, *args, **kwargs): # noqa: C901
entity_id = request.POST.get("model_id")
model_key = request.POST.get("model_key")
content_type = request.POST.get("content_type")
client = self.client(request.user)
content = request.POST.get("content")
if note_id is None:
result = client.create_note(
case_id=case_id,
content_type=content_type,
model_id=entity_id,
model_key=model_key,
note_text=content,
)
note_id = result.get("id")
else:
delete_list = request.POST.getlist("delete_list")
if delete_list:
for document_id in delete_list:
deleted = client.delete_note_document(case_id, note_id, document_id)
conf_list = request.POST.getlist("set_confidential")
if conf_list:
for document_id in conf_list:
result = client.update_note_document(
case_id, note_id, document_id, "confidential"
)
nonconf_list = request.POST.getlist("set_non-confidential")
if nonconf_list:
for document_id in nonconf_list:
result = client.update_note_document(
case_id, note_id, document_id, "non-confidential"
)
result = client.update_note(case_id, note_id, content)
file_meta = request.POST.getlist("file-meta")
files = request.FILES.getlist("files")
for idx, _file in enumerate(files):
try:
_file.readline() # Important, will raise VirusFoundInFileException if infected
except VirusFoundInFileException:
# Display a fake doc in the widget until
# a poll for success clears it
msg = "File upload aborted: malware detected in file!"
document = {
"name": msg,
"safe": False,
}
result["documents"].append(document)
else:
document = {
"document_name": _file.original_name,
"name": _file.name,
"size": _file.file_size,
}
result = client.add_note_document(
case_id=case_id,
note_id=note_id,
document=json.dumps(document),
confidentiality=file_meta[idx],
)
redirect_url = request.POST.get("redirect")
if redirect_url:
return internal_redirect(redirect_url, "/")
else:
# Return note json to be rendered at the client
return HttpResponse(json.dumps(result), content_type="application/json")
class PublicFileView(CaseBaseView):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/public_file.html"
def add_page_data(self):
tab = self.request.GET.get("tab", "all")
tabs = {
"tabList": [
{"label": "All", "value": "all"},
{"label": "Notices", "value": "tra"},
{"label": "Business", "value": "business"},
{"label": "Withdrawn", "value": "withdrawn"},
],
"value": tab,
}
case_submissions = self._client.get_submissions(self.case_id, show_global=True)
by_tra = deep_index_items_by_exists(case_submissions, "is_tra")
tra_by_published = deep_index_items_by_exists(by_tra.get("true"), "issued_at")
by_published = deep_index_items_by_exists(case_submissions, "issued_at")
if tab == "all":
submissions = by_published.get("true")
if tab == "tra":
submissions = deep_index_items_by(by_published.get("true"), "is_tra").get("true")
if tab == "business":
submissions = deep_index_items_by(by_published.get("true"), "is_tra").get("")
if tab == "withdrawn":
submissions = deep_index_items_by(by_published.get("false"), "is_tra").get("true")
return {
"tabs": tabs,
"submissions": submissions,
"public_base_url": settings.PUBLIC_BASE_URL,
}
class CaseFormView(LoginRequiredMixin, TemplateView, TradeRemediesAPIClientMixin):
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/case_form.html"
def get_context(self, client, case_id=None):
if case_id:
case = client.get_case(case_id)
else:
case = {
"new": True,
"id": "",
"organisation": {"id": ""},
"type": {"id": "1"},
}
enums = client.get_all_case_enums()
gov_bodies = client.get_organisations(gov_body=True)
country_dict = {}
for country in countries:
country_dict[country[0]] = country[1]
context = {
"body_classes": "full-width",
"case": case,
"organisations": gov_bodies,
"country_dict": country_dict,
"organisation_name": case.get("organisation", {}).get("name") or "Secretary of State",
"contact_country": "GB",
"submission": {"type": {"id": 4}},
"tra_team_names": [
settings.ORGANISATION_NAME,
settings.ORGANISATION_INITIALISM + " Team 1",
settings.ORGANISATION_INITIALISM + " Team 2",
settings.ORGANISATION_INITIALISM + " Team 3",
],
}
context.update(enums)
# context['countries'] = countries[0]
return context
def get(self, request, case_id=None, *args, **kwargs):
client = self.client(request.user)
context = self.get_context(client, case_id)
return render(request, self.template_name, context)
def post(self, request, case_id=None, *args, **kwargs):
post_data = {
"id": case_id,
}
non_required_fields = [
"submission_status_id",
"case_name",
"organisation_name",
"organisation_id",
# 'organisation_address', 'organisation_post_code', 'companies_house_id',
# 'contact_name', 'contact_email', 'contact_phone', 'contact_address',
# 'contact_country',
]
error_lookup = {
"case_type_id": "Case type",
"product_name": "Product name",
"submission_type_id": "Submission type",
"sector_id": "Product sector",
"product_description": "Product description",
"export_country_code": "Export country",
"hs_code": "Product code",
}
required_fields = list(error_lookup.keys())
list_fields = ["export_country_code", "hs_code"]
case_fields = required_fields + non_required_fields
errors = {}
client = self.client(request.user)
if request.POST.get("case_type_id") in ALL_REGION_ALLOWED_TYPE_IDS:
required_fields.remove("export_country_code")
for field in case_fields:
post_data[field] = (
compact_list(request.POST.getlist(field))
if field in list_fields
else request.POST.get(field)
)
for field in required_fields:
if field in error_lookup and not post_data.get(field):
fieldname = error_lookup.get(field)
errors[field] = f"{fieldname} is required"
for i, code in enumerate(post_data.get("hs_code")):
if len(str(code)) not in (6, 7, 8, 9, 10): # temporary validation
errors["hs_code"] = "HS codes should be between 6 and 10 digits"
if not errors:
post_data["ex_oficio"] = True
result = client.submit_full_case_data(post_data)
return redirect("/cases/")
else:
context = self.get_context(client, case_id)
context["errors"] = errors
context.update(post_data)
return render(request, self.template_name, context)
class InviteContactView(CaseBaseView):
"""
Invite a contact to the case
"""
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/invite.html"
raise_exception = True
def get_organisation_admin_user_contact(self, organisation_id):
contact = None
organisation = self._client.get_organisation(organisation_id)
admin_user = [
user
for user in organisation.get("users", [])
if user.get("security_group") == SECURITY_GROUP_ORGANISATION_OWNER
]
if admin_user:
user = self._client.get_user(admin_user[0]["user_id"])
contact = user.get("contact")
contact["organisation"] = organisation
return contact
def add_page_data(self):
contact = None
organisation = None
if self.kwargs.get("organisation_id"):
organisation = self._client.get_organisation(self.kwargs.get("organisation_id"))
if self.kwargs.get("contact_id"):
contact = self._client.get_contact(self.kwargs["contact_id"])
form_url = f"/case/{self.case['id']}/invite/{self.kwargs['contact_id']}/as/{self.kwargs['case_role_id']}/" # noqa: E501
if organisation:
form_url = f"{form_url}for/{organisation['id']}/"
elif self.kwargs.get("organisation_id"):
contact = self.get_organisation_admin_user_contact(self.kwargs["organisation_id"])
form_url = f"/case/{self.case['id']}/invite/organisation/{self.kwargs['organisation_id']}/as/{self.kwargs['case_role_id']}/" # noqa: E501
if not organisation:
organisation = contact["organisation"]
notification_template = self._client.get_notification_template(
"NOTIFY_INFORM_INTERESTED_PARTIES"
)
deep_update(
self.case,
self._client.get_case(
self.case_id,
fields=json.dumps(
{
"Case": {
"latest_notice_of_initiation_url": 0,
"registration_deadline": 0,
"product": 0,
}
}
),
),
)
case_number = self.case["reference"]
email = notify_contact_email(self._client, case_number)
footer = notify_footer(self._client, email)
values = {
"full_name": contact["name"],
"product": get(self.case, "product/name"),
"case_number": case_number,
"case_name": self.case["name"],
"notice_of_initiation_url": self.case.get("latest_notice_of_initiation_url"),
"company_name": organisation["name"],
"deadline": parse_api_datetime(
get(self.case, "registration_deadline"), settings.FRIENDLY_DATE_FORMAT
),
"footer": footer,
"guidance_url": self._client.get_system_parameters("LINK_HELP_BOX_GUIDANCE")["value"],
"email": email,
"login_url": f"{settings.PUBLIC_BASE_URL}",
}
context = {
"form_url": form_url,
"editable_fields": ["full_name", "product"],
"case": self.case,
"contact": contact,
"case_role_id": self.kwargs["case_role_id"],
"parsed_template": parse_notify_template(notification_template["body"], values),
"values": values,
"organisation": organisation,
"organisation_id": self.kwargs.get("organisation_id"),
}
return context
def post(
self,
request,
contact_id=None,
case_id=None,
case_role_id=None,
organisation_id=None,
*args,
**kwargs,
):
notify_keys = ["full_name", "product"]
notify_data = {key: request.POST.get(key) for key in notify_keys}
if organisation_id and contact_id:
notify_data["organisation_id"] = organisation_id
elif organisation_id and not contact_id:
contact = self.get_organisation_admin_user_contact(organisation_id)
contact_id = contact["id"]
response = self._client.invite_contact(case_id, contact_id, case_role_id, notify_data)
return HttpResponse(json.dumps(response), content_type="application/json")
class IssueFilesFormView(CaseBaseView):
"""
Issue files to case
"""
groups_required = SECURITY_GROUPS_TRA
template_name = "widgets/issue_files_form.html"
def add_page_data(self):
case_enums = self._client.get_all_case_enums()
return {
"case_enums": case_enums,
"case": self.case,
}
class CaseBundlesView(CaseBaseView):
"""
Assign documents to the case directly (not via submissions)
"""
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/case_bundles.html"
def add_page_data(self):
list_mode = self.request.GET.get("tab", "live")
tabs = {
"value": list_mode,
"tabList": [
{"label": "Live", "value": "live", "sr_text": "Show live bundles"},
{"label": "Draft", "value": "draft", "sr_text": "Show draft bundles"},
],
}
case_bundles = self._client.get_case_submission_bundles(
case_id=self.case["id"],
status=list_mode.upper(),
)
return {
"bundles": case_bundles,
"error": self.kwargs.get("error"),
"tabs": tabs,
"status": list_mode,
}
@method_decorator(csrf_exempt, name="dispatch")
class CaseBundleView(CaseBaseView):
"""
View and edit a specific bundle full of documents
"""
groups_required = SECURITY_GROUPS_TRA
template_name = "cases/case_bundle_builder.html"
def add_page_data(self):
case_enums = self._client.get_all_case_enums()
bundle = None
bundle_id = self.kwargs.get("bundle_id")
virus = self.request.GET.get("virus")
upload_error = self.request.GET.get("upload_error")
return_data = {
"virus": virus,
"upload_error": upload_error,
}
if bundle_id:
bundle = self._client.get_case_submission_bundles(
case_id=self.case["id"], bundle_id=self.kwargs.get("bundle_id")
)
return_data.update(
{
"bundle": bundle,
"submission_types": case_enums["submission_types"],
}
)
return return_data
def post(self, request, case_id, bundle_id=None, *args, **kwargs): # noqa: C901
name = request.POST.get("name")
data = pluck(request.POST, ["name", "description"])
btn_value = request.POST.get("btn-value")
if btn_value == "send":
data["status"] = "LIVE"
# Upload documents
if bundle_id:
meta_raw = request.POST.getlist("meta")
meta = [json.loads(block) for block in meta_raw]
file_details = deep_index_items_by(meta, "name")
for _file in request.FILES.getlist("files"):
try:
_file.readline() # Important, will raise VirusFoundInFileException if infected
original_file_name = _file.original_name
details = file_details.get(original_file_name.lower())[0]
confidential = details.get("confidential")
document_type = details.get("submission_document_type")
document = self._client.upload_document(
case_id=str(case_id),
data={
"bundle_id": bundle_id,
"confidential": confidential,
"submission_document_type": document_type,
"document_name": original_file_name,
"file_name": _file.name,
"file_size": _file.file_size,
},
)
except (VirusFoundInFileException, APIException) as e:
redirect_url = f"/case/{case_id}/bundle/{bundle_id}/?"
msg = "File upload aborted: "
if isinstance(e, VirusFoundInFileException):
redirect_url += "virus=true"
else:
msg += f"{e}"
redirect_url += f"upload_error={msg}"
logger.warning(f"{msg}")
return HttpResponse(
json.dumps({"redirect_url": redirect_url}),
content_type="application/json",
)
# Attach existing documents to this bundle
if case_files := request.POST.getlist("case_files"):
file_details_by_id = deep_index_items_by(meta, "file/id")
for case_file_id in case_files:
details = (file_details_by_id.get(case_file_id) or [])[0]
document = self._client.attach_document(
case_id=str(case_id),
data={
"bundle_id": bundle_id,
"submission_document_type": details.get("submission_document_type"),
},
document_id=case_file_id,
)
else:
data = pluck(request.POST, ["name", "submission_type_id"])
data["case_id"] = case_id
# Anything else to send?
response = None
if data:
response = self._client.set_case_submission_bundle(bundle_id=bundle_id, data=data)
ret = {"result": "ok", "status": data.get("status")}
response_id = response and response.get("id")
if response_id:
ret["redirect_url"] = f"/case/{case_id}/bundle/{response_id}/"
return HttpResponse(json.dumps(ret), content_type="application/json")
def delete(self, request, case_id, document_id, *args, **kwargs):
response = self._client.delete_case_submission_bundle(case_id, document_id)
return redirect(f"/case/{case_id}/documents/")
class SubmissionInviteNotifyView(CaseBaseView):
"""
Notify an invitee about an invitation to case.
"""
groups_required = SECURITY_GROUPS_TRA
raise_exception = True
template_name = "cases/invite.html"
def add_page_data(self):
"""Add page data.
CaseBaseView override.
"""
case_id = self.kwargs.get("case_id")
submission_id = self.kwargs.get("submission_id")
contact_id = self.kwargs.get("contact_id")
case = self._client.get_case(case_id)
submission = self._client.get_submission(case_id, submission_id)
inviting_organisation = submission["organisation"]
invited_contact = self._client.get_contact(contact_id)
inviting_contact = submission.get("contact") or {}
notification_template = self._client.get_notification_template("NOTIFY_THIRD_PARTY_INVITE")
form_url = f"/case/{case_id}/submission/{submission_id}/invite/{contact_id}/notify/"
# Attempt to infer the invite URL
login_url = f"{settings.PUBLIC_BASE_URL}"
invites = self._client.get_invitations(case_id, submission_id)
for i in invites:
if i["contact"]["id"] == str(contact_id):
invite = self._client.get_invite_details(i["id"])
code = invite.get("code")
login_url = f"{login_url}/invitation/{code}/{case_id}/"
break
case_number = case["reference"]
email = notify_contact_email(self._client, case_number)
footer = notify_footer(self._client, email)
values = {
"full_name": invited_contact["name"],
"case_name": case["name"],
"invited_by_organisation": inviting_organisation["name"],
"invited_by_name": inviting_contact["name"],
"notice_of_initiation_url": self.case.get("latest_notice_of_initiation_url"),
"login_url": login_url,
"deadline": parse_api_datetime(
get(self.case, "registration_deadline"), settings.FRIENDLY_DATE_FORMAT
),
"footer": footer,
"email": email,
}
context = {
"form_url": form_url,
"notification_template": notification_template,
"submission": submission,
"case": case,
"contact": invited_contact,
"parsed_template": parse_notify_template(notification_template["body"], values),
"values": values,
}
return context
def post(self, request, case_id, submission_id, contact_id, *args, **kwargs):
notify_data = {
"case_id": case_id,
"submission_id": submission_id,
"contact_id": contact_id,
}
response = self._client.action_third_party_invite(
case_id=case_id,
submission_id=submission_id,
contact_id=contact_id,
params=notify_data,
)
return HttpResponse(json.dumps(response), content_type="application/json")
class UpdateParentView(CaseBaseView):
template_name = "cases/update_parent.html"
linked_case_confirm_key = "LINKED_CASE_CONFIRM"
cases_fields = json.dumps(
{
"Case": {
"name": 0,
"id": 0,
"reference": 0,
}
}
)
case_fields = json.dumps(
{"Case": {"parent": {"id": 0}, "workflow_state": {linked_case_confirm_key: 0}}}
)
def add_page_data(self):
cases = self._client.get_cases(archived=True, all_cases=False, fields=self.cases_fields)
case = self._client.get_case(self.case_id, fields=self.case_fields)
return {"case": case, "cases": cases}
def post(self, request, case_id, *args, **kwargs):
link_confirm = request.POST.get("link_confirm")
parent_id = request.POST.get("parent_id")
_client = self.client(request.user)
case = _client.get_case(case_id, fields=self.case_fields)
if get(case, "parent/id") != parent_id:
_client.set_case_data(case_id, {"parent_id": parent_id})
if (get(case, f"workflow_state/{self.linked_case_confirm_key}") or [0])[0] != link_confirm:
_client.set_case_workflow_state(
case_id, values={f"{self.linked_case_confirm_key}": link_confirm}
)
return HttpResponse(
json.dumps({"parent_id": parent_id, "link_confirm": link_confirm}),
content_type="application/json",
)
class NoticesView(
LoginRequiredMixin, GroupRequiredMixin, TemplateView, TradeRemediesAPIClientMixin
):
groups_required = SECURITY_GROUPS_TRA_ADMINS
template_name = "cases/notices.html"
def get(self, request):
client = self.client(request.user)
notices = client.get_notices()
return render(
request,
self.template_name,
{
"body_classes": "full-width",
"notices": notices,
},
)
class NoticeView(LoginRequiredMixin, GroupRequiredMixin, TemplateView, TradeRemediesAPIClientMixin):
groups_required = SECURITY_GROUPS_TRA_ADMINS
template_name = "cases/notice.html"
cases_fields = json.dumps({"Case": {"name": 0, "id": 0, "reference": 0}})
def get(self, request, notice_id=None):
client = self.client(request.user)
enums = client.get_all_case_enums()
case_types = enums.get("case_types", [])
cases = client.get_cases(archived=True, all_cases=False, fields=self.cases_fields)
notice = {}
if notice_id:
notice = client.get_notice(notice_id)
return render(
request,
self.template_name,
{
"body_classes": "full-width",
"notice": notice,
"cases": cases,
"case_types": case_types,
},
)
def post(self, request, notice_id=None):
client = self.client(request.user)
notice = client.create_update_notice(
name=request.POST.get("name"),
reference=request.POST.get("reference"),
terminated_at=request.POST.get("terminated_at"),
published_at=request.POST.get("published_at"),
case_type=request.POST.get("case_type_id"),
review_case=request.POST.get("review_case_id"),
notice_id=notice_id,
)
return redirect("/cases/notices/")
class DocumentSearchView(CaseBaseView):
template_name = "documents/documents.html"
def add_page_data(self):
query = self.request.GET.get("query")
conf_status = self.request.GET.get("confidential_status")
user_type = self.request.GET.get("user_type")
response = self._client.search_documents(
case_id=self.case_id,
query=query,
confidential_status=conf_status,
user_type=user_type,
)
return {
"body_classes": "full-width",
"documents": response.pop("results", []),
"query": query,
"conf_status": conf_status,
**response,
}
class CaseTeamJsonView(LoginRequiredMixin, View, TradeRemediesAPIClientMixin):
def get(self, request, case_id, **kwargs):
team = self.client(request.user).get_case_team_members(case_id)
return HttpResponse(json.dumps(team), content_type="application/json")
| [
"logging.getLogger",
"core.utils.pluck",
"itertools.filterfalse",
"core.utils.deep_index_items_by",
"django.urls.reverse",
"core.utils.notify_contact_email",
"django.shortcuts.render",
"core.utils.key_by",
"re.split",
"django.http.HttpResponse",
"json.dumps",
"core.utils.to_json",
"core.utils.deep_index_items_by_exists",
"cases.utils.decorate_orgs",
"django.utils.timezone.now",
"django.shortcuts.redirect",
"core.utils.parse_notify_template",
"json.loads",
"core.utils.is_date",
"core.utils.internal_redirect",
"core.utils.index_users_by_group",
"cases.submissions.SUBMISSION_TYPE_HELPERS.get",
"core.utils.public_login_url",
"cases.submissions.get_submission_deadline",
"core.utils.notify_footer",
"core.utils.deep_update",
"django.utils.decorators.method_decorator",
"core.utils.submission_contact",
"core.utils.get"
]
| [((1891, 1918), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1908, 1918), False, 'import logging\n'), ((1933, 2031), 'json.dumps', 'json.dumps', (["{'Organisation': {'id': 0, 'has_non_draft_subs': 0, 'gov_body': 0,\n 'has_roi': 0}}"], {}), "({'Organisation': {'id': 0, 'has_non_draft_subs': 0, 'gov_body': \n 0, 'has_roi': 0}})\n", (1943, 2031), False, 'import json\n'), ((102937, 102983), 'django.utils.decorators.method_decorator', 'method_decorator', (['csrf_exempt'], {'name': '"""dispatch"""'}), "(csrf_exempt, name='dispatch')\n", (102953, 102983), False, 'from django.utils.decorators import method_decorator\n'), ((13054, 13462), 'json.dumps', 'json.dumps', (["{'Case': {'applicant': {'organisation': {'id': 0, 'name': 0,\n 'primary_contact': {'name': 0, 'email': 0, 'phone': 0, 'address': 0,\n 'post_code': 0, 'country': {'name': 0}, 'has_user': 0, 'user': {'id': 0,\n 'organisation': {'id': 0, 'name': 0}}}}}, 'parent': {'id': 0, 'name': 0,\n 'reference': 0, 'type': 0}, 'workflow_state': {'LINKED_CASE_CONFIRM': 0\n }, 'initiated_sequence': 0}}"], {}), "({'Case': {'applicant': {'organisation': {'id': 0, 'name': 0,\n 'primary_contact': {'name': 0, 'email': 0, 'phone': 0, 'address': 0,\n 'post_code': 0, 'country': {'name': 0}, 'has_user': 0, 'user': {'id': 0,\n 'organisation': {'id': 0, 'name': 0}}}}}, 'parent': {'id': 0, 'name': 0,\n 'reference': 0, 'type': 0}, 'workflow_state': {'LINKED_CASE_CONFIRM': 0\n }, 'initiated_sequence': 0}})\n", (13064, 13462), False, 'import json\n'), ((23916, 23982), 'json.dumps', 'json.dumps', (["{'Case': {'applicant': 0, 'product': 0, 'sources': 0}}"], {}), "({'Case': {'applicant': 0, 'product': 0, 'sources': 0}})\n", (23926, 23982), False, 'import json\n'), ((53700, 54073), 'json.dumps', 'json.dumps', (["{'Submission': {'id': 0, 'deficiency_notice_params': 0, 'organisation': {\n 'id': 0, 'name': 0}, 'contact': {'name': 0, 'email': 0, 'user': {'name':\n 0, 'email': 0, 'id': 0, 'organisation': {'organisation': {'id': 0,\n 'name': 0}}}, 'organisation': {'id': 0, 'name': 0}}, 'case': 0, 'type':\n 0, 'created_by': 0, 'organisation_case_role_outer': 0}}"], {}), "({'Submission': {'id': 0, 'deficiency_notice_params': 0,\n 'organisation': {'id': 0, 'name': 0}, 'contact': {'name': 0, 'email': 0,\n 'user': {'name': 0, 'email': 0, 'id': 0, 'organisation': {\n 'organisation': {'id': 0, 'name': 0}}}, 'organisation': {'id': 0,\n 'name': 0}}, 'case': 0, 'type': 0, 'created_by': 0,\n 'organisation_case_role_outer': 0}})\n", (53710, 54073), False, 'import json\n'), ((110832, 110890), 'json.dumps', 'json.dumps', (["{'Case': {'name': 0, 'id': 0, 'reference': 0}}"], {}), "({'Case': {'name': 0, 'id': 0, 'reference': 0}})\n", (110842, 110890), False, 'import json\n'), ((111009, 111105), 'json.dumps', 'json.dumps', (["{'Case': {'parent': {'id': 0}, 'workflow_state': {linked_case_confirm_key: 0}}}"], {}), "({'Case': {'parent': {'id': 0}, 'workflow_state': {\n linked_case_confirm_key: 0}}})\n", (111019, 111105), False, 'import json\n'), ((112877, 112935), 'json.dumps', 'json.dumps', (["{'Case': {'name': 0, 'id': 0, 'reference': 0}}"], {}), "({'Case': {'name': 0, 'id': 0, 'reference': 0}})\n", (112887, 112935), False, 'import json\n'), ((4588, 4682), 'django.shortcuts.render', 'render', (['request', 'template_name', "{'body_classes': body_class, 'cases': cases, 'tabs': tabs}"], {}), "(request, template_name, {'body_classes': body_class, 'cases': cases,\n 'tabs': tabs})\n", (4594, 4682), False, 'from django.shortcuts import render, redirect\n'), ((5520, 5846), 'json.dumps', 'json.dumps', (["{'Case': {'id': 0, 'name': 0, 'initiated_at': 0,\n 'decision_to_initiate,name': 0, 'reference': 0, 'sequence': 0, 'type': \n 0, 'archived_at': 0, 'archive_reason': {'name': 0}, 'submission_count':\n 0, 'participant_count': 0, 'stage': {'name': 0}, 'case_status': 0,\n 'organisation': {'id': 0, 'name': 0}}}"], {}), "({'Case': {'id': 0, 'name': 0, 'initiated_at': 0,\n 'decision_to_initiate,name': 0, 'reference': 0, 'sequence': 0, 'type': \n 0, 'archived_at': 0, 'archive_reason': {'name': 0}, 'submission_count':\n 0, 'participant_count': 0, 'stage': {'name': 0}, 'case_status': 0,\n 'organisation': {'id': 0, 'name': 0}}})\n", (5530, 5846), False, 'import json\n'), ((7301, 7345), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'context'], {}), '(request, self.template_name, context)\n', (7307, 7345), False, 'from django.shortcuts import render, redirect\n'), ((8893, 8934), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['sub_docs', '"""type/key"""'], {}), "(sub_docs, 'type/key')\n", (8912, 8934), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((9254, 9280), 'core.utils.key_by', 'key_by', (['confidential', '"""id"""'], {}), "(confidential, 'id')\n", (9260, 9280), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((11839, 11874), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/admin/"""'], {}), "(f'/case/{case_id}/admin/')\n", (11847, 11874), False, 'from django.shortcuts import render, redirect\n'), ((12847, 12887), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/milestones/"""'], {}), "(f'/case/{case_id}/milestones/')\n", (12855, 12887), False, 'from django.shortcuts import render, redirect\n'), ((15559, 15610), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_case_invites', '"""contact/id"""'], {}), "(all_case_invites, 'contact/id')\n", (15578, 15610), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((16843, 16874), 'core.utils.index_users_by_group', 'index_users_by_group', (['all_users'], {}), '(all_users)\n', (16863, 16874), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((17603, 17632), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/"""'], {}), "(f'/case/{case_id}/')\n", (17611, 17632), False, 'from django.shortcuts import render, redirect\n'), ((20960, 21009), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_submissions', '"""type/name"""'], {}), "(all_submissions, 'type/name')\n", (20979, 21009), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((22002, 22063), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['non_draft_submissions', '"""organisation/id"""'], {}), "(non_draft_submissions, 'organisation/id')\n", (22021, 22063), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((24947, 24998), 'cases.submissions.SUBMISSION_TYPE_HELPERS.get', 'SUBMISSION_TYPE_HELPERS.get', (["submission_type['key']"], {}), "(submission_type['key'])\n", (24974, 24998), False, 'from cases.submissions import SUBMISSION_TYPE_HELPERS, get_submission_deadline\n'), ((32964, 32997), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['meta', '"""name"""'], {}), "(meta, 'name')\n", (32983, 32997), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((33027, 33063), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['meta', '"""file/id"""'], {}), "(meta, 'file/id')\n", (33046, 33063), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((45672, 45728), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/submission/{submission_id}/"""'], {}), "(f'/case/{case_id}/submission/{submission_id}/')\n", (45680, 45728), False, 'from django.shortcuts import render, redirect\n'), ((47331, 47361), 'core.utils.submission_contact', 'submission_contact', (['submission'], {}), '(submission)\n', (47349, 47361), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((47777, 47843), 'cases.submissions.get_submission_deadline', 'get_submission_deadline', (['submission', 'settings.FRIENDLY_DATE_FORMAT'], {}), '(submission, settings.FRIENDLY_DATE_FORMAT)\n', (47800, 47843), False, 'from cases.submissions import SUBMISSION_TYPE_HELPERS, get_submission_deadline\n'), ((47914, 47961), 'core.utils.notify_contact_email', 'notify_contact_email', (['self._client', 'case_number'], {}), '(self._client, case_number)\n', (47934, 47961), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((47979, 48013), 'core.utils.notify_footer', 'notify_footer', (['self._client', 'email'], {}), '(self._client, email)\n', (47992, 48013), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((49138, 49177), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (49144, 49177), False, 'from django.shortcuts import render, redirect\n'), ((55780, 56006), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'submission': submission, 'organisation': organisation, 'caserole':\n caserole, 'org_matches': org_matches, 'page_data': {'submission':\n submission, 'organisation': organisation}}"], {}), "(request, self.template_name, {'submission': submission,\n 'organisation': organisation, 'caserole': caserole, 'org_matches':\n org_matches, 'page_data': {'submission': submission, 'organisation':\n organisation}})\n", (55786, 56006), False, 'from django.shortcuts import render, redirect\n'), ((58782, 58846), 'cases.utils.decorate_orgs', 'decorate_orgs', (['org_matches', 'test_org_id'], {'exclude_case_id': 'case_id'}), '(org_matches, test_org_id, exclude_case_id=case_id)\n', (58795, 58846), False, 'from cases.utils import decorate_orgs\n'), ((61979, 62005), 'core.utils.get', 'get', (['caserole', '"""role/name"""'], {}), "(caserole, 'role/name')\n", (61982, 62005), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((62386, 62416), 'core.utils.submission_contact', 'submission_contact', (['submission'], {}), '(submission)\n', (62404, 62416), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((63640, 63725), 'django.shortcuts.render', 'render', (['request', '"""cases/verify/notify.html"""', "{'parsed_template': parsed_template}"], {}), "(request, 'cases/verify/notify.html', {'parsed_template':\n parsed_template})\n", (63646, 63725), False, 'from django.shortcuts import render, redirect\n'), ((65040, 65106), 'cases.submissions.get_submission_deadline', 'get_submission_deadline', (['submission', 'settings.FRIENDLY_DATE_FORMAT'], {}), '(submission, settings.FRIENDLY_DATE_FORMAT)\n', (65063, 65106), False, 'from cases.submissions import SUBMISSION_TYPE_HELPERS, get_submission_deadline\n'), ((65163, 65210), 'core.utils.notify_contact_email', 'notify_contact_email', (['self._client', 'case_number'], {}), '(self._client, case_number)\n', (65183, 65210), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((65228, 65262), 'core.utils.notify_footer', 'notify_footer', (['self._client', 'email'], {}), '(self._client, email)\n', (65241, 65262), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((67989, 68028), 'django.shortcuts.render', 'render', (['request', 'template_name', 'context'], {}), '(request, template_name, context)\n', (67995, 68028), False, 'from django.shortcuts import render, redirect\n'), ((68377, 68443), 'cases.submissions.get_submission_deadline', 'get_submission_deadline', (['submission', 'settings.FRIENDLY_DATE_FORMAT'], {}), '(submission, settings.FRIENDLY_DATE_FORMAT)\n', (68400, 68443), False, 'from cases.submissions import SUBMISSION_TYPE_HELPERS, get_submission_deadline\n'), ((72133, 72189), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['case_submissions', '"""organisation/id"""'], {}), "(case_submissions, 'organisation/id')\n", (72152, 72189), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((76510, 76566), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['case_submissions', '"""organisation/id"""'], {}), "(case_submissions, 'organisation/id')\n", (76529, 76566), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((76970, 77018), 'core.utils.deep_index_items_by_exists', 'deep_index_items_by_exists', (['cases', '"""archived_at"""'], {}), "(cases, 'archived_at')\n", (76996, 77018), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((78008, 78056), 'cases.utils.decorate_orgs', 'decorate_orgs', (['org_matches', 'self.organisation_id'], {}), '(org_matches, self.organisation_id)\n', (78021, 78056), False, 'from cases.utils import decorate_orgs\n'), ((80738, 80783), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/files/?tab={tab}"""'], {}), "(f'/case/{case_id}/files/?tab={tab}')\n", (80746, 80783), False, 'from django.shortcuts import render, redirect\n'), ((84277, 84320), 'core.utils.key_by', 'key_by', (["workflow['root']", '"""key"""', '"""children"""'], {}), "(workflow['root'], 'key', 'children')\n", (84283, 84320), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((86729, 86778), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/section/{content_id}"""'], {}), "(f'/case/{case_id}/section/{content_id}')\n", (86737, 86778), False, 'from django.shortcuts import render, redirect\n'), ((87260, 87315), 'django.urls.reverse', 'reverse', (['"""case_audit"""'], {'kwargs': "{'case_id': self.case_id}"}), "('case_audit', kwargs={'case_id': self.case_id})\n", (87267, 87315), False, 'from django.urls import reverse\n'), ((88344, 88403), 'django.http.HttpResponse', 'HttpResponse', (['file'], {'content_type': '"""application/vnd.ms-excel"""'}), "(file, content_type='application/vnd.ms-excel')\n", (88356, 88403), False, 'from django.http import HttpResponse\n'), ((92576, 92630), 'core.utils.deep_index_items_by_exists', 'deep_index_items_by_exists', (['case_submissions', '"""is_tra"""'], {}), "(case_submissions, 'is_tra')\n", (92602, 92630), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((92741, 92798), 'core.utils.deep_index_items_by_exists', 'deep_index_items_by_exists', (['case_submissions', '"""issued_at"""'], {}), "(case_submissions, 'issued_at')\n", (92767, 92798), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((94985, 95029), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'context'], {}), '(request, self.template_name, context)\n', (94991, 95029), False, 'from django.shortcuts import render, redirect\n'), ((99626, 99673), 'core.utils.notify_contact_email', 'notify_contact_email', (['self._client', 'case_number'], {}), '(self._client, case_number)\n', (99646, 99673), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((99691, 99725), 'core.utils.notify_footer', 'notify_footer', (['self._client', 'email'], {}), '(self._client, email)\n', (99704, 99725), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((104068, 104112), 'core.utils.pluck', 'pluck', (['request.POST', "['name', 'description']"], {}), "(request.POST, ['name', 'description'])\n", (104073, 104112), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((107588, 107627), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/documents/"""'], {}), "(f'/case/{case_id}/documents/')\n", (107596, 107627), False, 'from django.shortcuts import render, redirect\n'), ((109103, 109150), 'core.utils.notify_contact_email', 'notify_contact_email', (['self._client', 'case_number'], {}), '(self._client, case_number)\n', (109123, 109150), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((109168, 109202), 'core.utils.notify_footer', 'notify_footer', (['self._client', 'email'], {}), '(self._client, email)\n', (109181, 109202), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((112484, 112575), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'body_classes': 'full-width', 'notices': notices}"], {}), "(request, self.template_name, {'body_classes': 'full-width',\n 'notices': notices})\n", (112490, 112575), False, 'from django.shortcuts import render, redirect\n'), ((113315, 113446), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'body_classes': 'full-width', 'notice': notice, 'cases': cases,\n 'case_types': case_types}"], {}), "(request, self.template_name, {'body_classes': 'full-width', 'notice':\n notice, 'cases': cases, 'case_types': case_types})\n", (113321, 113446), False, 'from django.shortcuts import render, redirect\n'), ((114094, 114121), 'django.shortcuts.redirect', 'redirect', (['"""/cases/notices/"""'], {}), "('/cases/notices/')\n", (114102, 114121), False, 'from django.shortcuts import render, redirect\n'), ((3231, 3339), 'core.utils.deep_update', 'deep_update', (['fields', "{'Case': {'workflow_state': {'MEASURE_EXPIRY': 0,\n 'DETERMINATION_ACTIVE_DATE': 0}}}"], {}), "(fields, {'Case': {'workflow_state': {'MEASURE_EXPIRY': 0,\n 'DETERMINATION_ACTIVE_DATE': 0}}})\n", (3242, 3339), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((7868, 7913), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_documents', '"""version"""'], {}), "(all_documents, 'version')\n", (7887, 7913), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((19940, 19970), 'core.utils.get', 'get', (['submission', '"""status/sent"""'], {}), "(submission, 'status/sent')\n", (19943, 19970), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((24683, 24715), 'core.utils.get', 'get', (['submission', '"""created_by/id"""'], {}), "(submission, 'created_by/id')\n", (24686, 24715), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((26560, 26603), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_parties', '"""sampled"""'], {}), "(all_parties, 'sampled')\n", (26579, 26603), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((32900, 32917), 'json.loads', 'json.loads', (['block'], {}), '(block)\n', (32910, 32917), False, 'import json\n'), ((37412, 37439), 'core.utils.get', 'get', (['submission', '"""type/key"""'], {}), "(submission, 'type/key')\n", (37415, 37439), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((40605, 40628), 'json.dumps', 'json.dumps', (['return_data'], {}), '(return_data)\n', (40615, 40628), False, 'import json\n'), ((43594, 43624), 'json.loads', 'json.loads', (['document_list_json'], {}), '(document_list_json)\n', (43604, 43624), False, 'import json\n'), ((44185, 44205), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (44195, 44205), False, 'import json\n'), ((44495, 44515), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (44505, 44515), False, 'import json\n'), ((48366, 48384), 'core.utils.public_login_url', 'public_login_url', ([], {}), '()\n', (48382, 48384), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((49051, 49111), 'core.utils.parse_notify_template', 'parse_notify_template', (["notification_template['body']", 'values'], {}), "(notification_template['body'], values)\n", (49072, 49111), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((51584, 51654), 'json.dumps', 'json.dumps', (["{'error': 'No deficiency status for this submission type'}"], {}), "({'error': 'No deficiency status for this submission type'})\n", (51594, 51654), False, 'import json\n'), ((52954, 52980), 're.split', 're.split', (['regex', 'param_key'], {}), '(regex, param_key)\n', (52962, 52980), False, 'import re\n'), ((57716, 57808), 'core.utils.pluck', 'pluck', (['request.POST', "['LOA_contact_id', 'name', 'email', 'address', 'org_name', 'phone']"], {}), "(request.POST, ['LOA_contact_id', 'name', 'email', 'address',\n 'org_name', 'phone'])\n", (57721, 57808), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((57891, 57909), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (57901, 57909), False, 'import json\n'), ((60215, 60243), 'json.dumps', 'json.dumps', (["{'result': True}"], {}), "({'result': True})\n", (60225, 60243), False, 'import json\n'), ((61620, 61652), 'core.utils.pluck', 'pluck', (['request.POST', "['approve']"], {}), "(request.POST, ['approve'])\n", (61625, 61652), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((61691, 61709), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (61701, 61709), False, 'import json\n'), ((63273, 63333), 'core.utils.parse_notify_template', 'parse_notify_template', (["notification_template['body']", 'values'], {}), "(notification_template['body'], values)\n", (63294, 63333), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((63545, 63577), 'core.utils.get', 'get', (['submission', '"""created_by/id"""'], {}), "(submission, 'created_by/id')\n", (63548, 63577), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((64068, 64096), 'json.dumps', 'json.dumps', (["{'result': True}"], {}), "({'result': True})\n", (64078, 64096), False, 'import json\n'), ((64614, 64644), 'core.utils.submission_contact', 'submission_contact', (['submission'], {}), '(submission)\n', (64632, 64644), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((65691, 65709), 'core.utils.public_login_url', 'public_login_url', ([], {}), '()\n', (65707, 65709), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((67251, 67311), 'core.utils.parse_notify_template', 'parse_notify_template', (["notification_template['body']", 'values'], {}), "(notification_template['body'], values)\n", (67272, 67311), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((68955, 69050), 'json.dumps', 'json.dumps', (["{'redirect_url': f'/case/{case_id}/submission/{submission_id}/', 'error': None}"], {}), "({'redirect_url': f'/case/{case_id}/submission/{submission_id}/',\n 'error': None})\n", (68965, 69050), False, 'import json\n'), ((73935, 74037), 'core.utils.deep_update', 'deep_update', (['result', "{'case_id': case_id, 'case': {'id': case_id}, 'organisation': {'id': org_id}}"], {}), "(result, {'case_id': case_id, 'case': {'id': case_id},\n 'organisation': {'id': org_id}})\n", (73946, 74037), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((74179, 74212), 'django.shortcuts.render', 'render', (['request', 'template', 'result'], {}), '(request, template, result)\n', (74185, 74212), False, 'from django.shortcuts import render, redirect\n'), ((74241, 74271), 'json.dumps', 'json.dumps', (["{'result': result}"], {}), "({'result': result})\n", (74251, 74271), False, 'import json\n'), ((81158, 81180), 'json.dumps', 'json.dumps', (['case_files'], {}), '(case_files)\n', (81168, 81180), False, 'import json\n'), ((81812, 81836), 'json.dumps', 'json.dumps', (["{'saved': 1}"], {}), "({'saved': 1})\n", (81822, 81836), False, 'import json\n'), ((82760, 82801), 'json.dumps', 'json.dumps', (["{'workflow_state': state_map}"], {}), "({'workflow_state': state_map})\n", (82770, 82801), False, 'import json\n'), ((86096, 86143), 'django.shortcuts.redirect', 'redirect', (['f"""/case/{case_id}/action/{action_id}"""'], {}), "(f'/case/{case_id}/action/{action_id}')\n", (86104, 86143), False, 'from django.shortcuts import render, redirect\n'), ((88967, 88984), 'json.dumps', 'json.dumps', (['notes'], {}), '(notes)\n', (88977, 88984), False, 'import json\n'), ((91751, 91787), 'core.utils.internal_redirect', 'internal_redirect', (['redirect_url', '"""/"""'], {}), "(redirect_url, '/')\n", (91768, 91787), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((97057, 97076), 'django.shortcuts.redirect', 'redirect', (['"""/cases/"""'], {}), "('/cases/')\n", (97065, 97076), False, 'from django.shortcuts import render, redirect\n'), ((97243, 97287), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'context'], {}), '(request, self.template_name, context)\n', (97249, 97287), False, 'from django.shortcuts import render, redirect\n'), ((99810, 99840), 'core.utils.get', 'get', (['self.case', '"""product/name"""'], {}), "(self.case, 'product/name')\n", (99813, 99840), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((100697, 100757), 'core.utils.parse_notify_template', 'parse_notify_template', (["notification_template['body']", 'values'], {}), "(notification_template['body'], values)\n", (100718, 100757), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((101637, 101657), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (101647, 101657), False, 'import json\n'), ((104420, 104453), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['meta', '"""name"""'], {}), "(meta, 'name')\n", (104439, 104453), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((106867, 106918), 'core.utils.pluck', 'pluck', (['request.POST', "['name', 'submission_type_id']"], {}), "(request.POST, ['name', 'submission_type_id'])\n", (106872, 106918), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((107368, 107383), 'json.dumps', 'json.dumps', (['ret'], {}), '(ret)\n', (107378, 107383), False, 'import json\n'), ((110028, 110088), 'core.utils.parse_notify_template', 'parse_notify_template', (["notification_template['body']", 'values'], {}), "(notification_template['body'], values)\n", (110049, 110088), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((110618, 110638), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (110628, 110638), False, 'import json\n'), ((111647, 111669), 'core.utils.get', 'get', (['case', '"""parent/id"""'], {}), "(case, 'parent/id')\n", (111650, 111669), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((112035, 112101), 'json.dumps', 'json.dumps', (["{'parent_id': parent_id, 'link_confirm': link_confirm}"], {}), "({'parent_id': parent_id, 'link_confirm': link_confirm})\n", (112045, 112101), False, 'import json\n'), ((115047, 115063), 'json.dumps', 'json.dumps', (['team'], {}), '(team)\n', (115057, 115063), False, 'import json\n'), ((3753, 3771), 'json.dumps', 'json.dumps', (['fields'], {}), '(fields)\n', (3763, 3771), False, 'import json\n'), ((10874, 10888), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (10886, 10888), False, 'from django.utils import timezone\n'), ((15080, 15108), 'json.dumps', 'json.dumps', (["{'result': 'ok'}"], {}), "({'result': 'ok'})\n", (15090, 15108), False, 'import json\n'), ((24870, 24903), 'core.utils.get', 'get', (['full_user', '"""organisations/0"""'], {}), "(full_user, 'organisations/0')\n", (24873, 24903), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((31808, 31826), 'json.loads', 'json.loads', (['errors'], {}), '(errors)\n', (31818, 31826), False, 'import json\n'), ((33647, 33708), 'json.dumps', 'json.dumps', (["{'redirect_url': f'/case/{case_id}/submissions/'}"], {}), "({'redirect_url': f'/case/{case_id}/submissions/'})\n", (33657, 33708), False, 'import json\n'), ((38666, 38717), 'cases.submissions.SUBMISSION_TYPE_HELPERS.get', 'SUBMISSION_TYPE_HELPERS.get', (["submission_type['key']"], {}), "(submission_type['key'])\n", (38693, 38717), False, 'from cases.submissions import SUBMISSION_TYPE_HELPERS, get_submission_deadline\n'), ((39484, 39510), 're.split', 're.split', (['regex', 'param_key'], {}), '(regex, param_key)\n', (39492, 39510), False, 'import re\n'), ((51234, 51310), 'json.dumps', 'json.dumps', (["{'redirect_url': f'/case/{case_id}/submission/{submission_id}/'}"], {}), "({'redirect_url': f'/case/{case_id}/submission/{submission_id}/'})\n", (51244, 51310), False, 'import json\n'), ((54970, 55094), 'json.dumps', 'json.dumps', (["{'error':\n 'You cannot verify this organisation as they have not yet registered interest in this case.'\n }"], {}), "({'error':\n 'You cannot verify this organisation as they have not yet registered interest in this case.'\n })\n", (54980, 55094), False, 'import json\n'), ((55621, 55655), 'core.utils.get', 'get', (['submission', '"""organisation/id"""'], {}), "(submission, 'organisation/id')\n", (55624, 55655), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((61320, 61346), 'core.utils.get', 'get', (['caserole', '"""role/name"""'], {}), "(caserole, 'role/name')\n", (61323, 61346), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((62049, 62074), 'core.utils.get', 'get', (['caserole', '"""role/key"""'], {}), "(caserole, 'role/key')\n", (62052, 62074), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((72682, 72732), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['idx_submissions[org_id]', '"""id"""'], {}), "(idx_submissions[org_id], 'id')\n", (72701, 72732), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((73489, 73540), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_case_invites', '"""contact/id"""'], {}), "(all_case_invites, 'contact/id')\n", (73508, 73540), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((77062, 77077), 'core.utils.get', 'get', (['case', '"""id"""'], {}), "(case, 'id')\n", (77065, 77077), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((86207, 86253), 'json.dumps', 'json.dumps', (["{'workflow_state': self.state_map}"], {}), "({'workflow_state': self.state_map})\n", (86217, 86253), False, 'import json\n'), ((91894, 91912), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (91904, 91912), False, 'import json\n'), ((100126, 100165), 'core.utils.get', 'get', (['self.case', '"""registration_deadline"""'], {}), "(self.case, 'registration_deadline')\n", (100129, 100165), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((104352, 104369), 'json.loads', 'json.loads', (['block'], {}), '(block)\n', (104362, 104369), False, 'import json\n'), ((106285, 106321), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['meta', '"""file/id"""'], {}), "(meta, 'file/id')\n", (106304, 106321), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((109624, 109663), 'core.utils.get', 'get', (['self.case', '"""registration_deadline"""'], {}), "(self.case, 'registration_deadline')\n", (109627, 109663), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((20033, 20066), 'core.utils.get', 'get', (['submission', '"""status/default"""'], {}), "(submission, 'status/default')\n", (20036, 20066), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((21152, 21206), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_submissions', '"""status/default"""'], {}), "(all_submissions, 'status/default')\n", (21171, 21206), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((21355, 21404), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['draft_submissions', '"""version"""'], {}), "(draft_submissions, 'version')\n", (21374, 21404), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((29432, 29489), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['draft_submissions', '"""organisation_id"""'], {}), "(draft_submissions, 'organisation_id')\n", (29451, 29489), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((34165, 34180), 'core.utils.is_date', 'is_date', (['due_at'], {}), '(due_at)\n', (34172, 34180), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((52315, 52356), 'json.dumps', 'json.dumps', (["{'id': 0, 'type': {'key': 0}}"], {}), "({'id': 0, 'type': {'key': 0}})\n", (52325, 52356), False, 'import json\n'), ((52434, 52461), 'core.utils.get', 'get', (['submission', '"""type/key"""'], {}), "(submission, 'type/key')\n", (52437, 52461), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((53344, 53365), 'core.utils.get', 'get', (['submission', '"""id"""'], {}), "(submission, 'id')\n", (53347, 53365), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((53408, 53441), 'core.utils.to_json', 'to_json', (['deficiency_notice_params'], {}), '(deficiency_notice_params)\n', (53415, 53441), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((59943, 59977), 'core.utils.get', 'get', (['submission', '"""organisation/id"""'], {}), "(submission, 'organisation/id')\n", (59946, 59977), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((63152, 63170), 'core.utils.public_login_url', 'public_login_url', ([], {}), '()\n', (63168, 63170), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((73174, 73251), 'itertools.filterfalse', 'itertools.filterfalse', (["(lambda x: x['id'] in third_party_contact_ids)", 'contacts'], {}), "(lambda x: x['id'] in third_party_contact_ids, contacts)\n", (73195, 73251), False, 'import itertools\n'), ((99241, 99347), 'json.dumps', 'json.dumps', (["{'Case': {'latest_notice_of_initiation_url': 0, 'registration_deadline': 0,\n 'product': 0}}"], {}), "({'Case': {'latest_notice_of_initiation_url': 0,\n 'registration_deadline': 0, 'product': 0}})\n", (99251, 99347), False, 'import json\n'), ((111765, 111824), 'core.utils.get', 'get', (['case', 'f"""workflow_state/{self.linked_case_confirm_key}"""'], {}), "(case, f'workflow_state/{self.linked_case_confirm_key}')\n", (111768, 111824), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((8523, 8546), 'core.utils.get', 'get', (['parent_doc', '"""name"""'], {}), "(parent_doc, 'name')\n", (8526, 8546), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((20071, 20104), 'core.utils.get', 'get', (['submission', '"""type/direction"""'], {}), "(submission, 'type/direction')\n", (20074, 20104), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((29138, 29192), 'core.utils.deep_index_items_by', 'deep_index_items_by', (['all_submissions', '"""status/default"""'], {}), "(all_submissions, 'status/default')\n", (29157, 29192), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((39764, 39805), 'core.utils.get', 'get', (['deficiency_notice_params', 'matches[1]'], {}), '(deficiency_notice_params, matches[1])\n', (39767, 39805), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((40203, 40236), 'core.utils.to_json', 'to_json', (['deficiency_notice_params'], {}), '(deficiency_notice_params)\n', (40210, 40236), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((76705, 76723), 'core.utils.get', 'get', (['x', '"""type/key"""'], {}), "(x, 'type/key')\n", (76708, 76723), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((91563, 91583), 'json.dumps', 'json.dumps', (['document'], {}), '(document)\n', (91573, 91583), False, 'import json\n'), ((8486, 8514), 'core.utils.get', 'get', (['parent_doc', '"""type/name"""'], {}), "(parent_doc, 'type/name')\n", (8489, 8514), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((8767, 8788), 'core.utils.get', 'get', (['document', '"""name"""'], {}), "(document, 'name')\n", (8770, 8788), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((19174, 19191), 'core.utils.get', 'get', (['pt', 'sort_key'], {}), '(pt, sort_key)\n', (19177, 19191), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((20215, 20246), 'core.utils.get', 'get', (['submission', '"""status/draft"""'], {}), "(submission, 'status/draft')\n", (20218, 20246), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((20270, 20297), 'core.utils.get', 'get', (['submission', '"""type/key"""'], {}), "(submission, 'type/key')\n", (20273, 20297), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((36667, 36709), 'json.dumps', 'json.dumps', (["{'redirect_url': redirect_url}"], {}), "({'redirect_url': redirect_url})\n", (36677, 36709), False, 'import json\n'), ((106004, 106046), 'json.dumps', 'json.dumps', (["{'redirect_url': redirect_url}"], {}), "({'redirect_url': redirect_url})\n", (106014, 106046), False, 'import json\n'), ((8732, 8758), 'core.utils.get', 'get', (['document', '"""type/name"""'], {}), "(document, 'type/name')\n", (8735, 8758), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n'), ((11514, 11528), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (11526, 11528), False, 'from django.utils import timezone\n'), ((66983, 67043), 'core.utils.parse_notify_template', 'parse_notify_template', (["notification_template['body']", 'values'], {}), "(notification_template['body'], values)\n", (67004, 67043), False, 'from core.utils import deep_index_items_by, deep_index_items_by_exists, get, key_by, index_users_by_group, compact_list, submission_contact, public_login_url, parse_notify_template, parse_api_datetime, pluck, to_json, from_json, deep_update, internal_redirect, is_date, notify_footer, notify_contact_email\n')] |
import json
import requests
from datetime import datetime
from playsound import playsound
tday=datetime.today().strftime('%Y-%m-%d')
right_now=datetime.today().strftime('%I-%M-%p')
response = requests.get("https://www.londonprayertimes.com/api/times/?format=json&key=0239f686-4423-408e-9a0c-7968a403d197&year=&month=")
data=response.json()
for key,value in data.items():
if value >= '03:30' and value < '06:00':
print('It is asr time')
#playsound('/home/danish/Downloads/adan.mp3') | [
"datetime.datetime.today",
"requests.get"
]
| [((198, 334), 'requests.get', 'requests.get', (['"""https://www.londonprayertimes.com/api/times/?format=json&key=0239f686-4423-408e-9a0c-7968a403d197&year=&month="""'], {}), "(\n 'https://www.londonprayertimes.com/api/times/?format=json&key=0239f686-4423-408e-9a0c-7968a403d197&year=&month='\n )\n", (210, 334), False, 'import requests\n'), ((99, 115), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (113, 115), False, 'from datetime import datetime\n'), ((148, 164), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (162, 164), False, 'from datetime import datetime\n')] |
import unittest
import datetime
import rdflib # needed for eval(repr(...)) below
from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN
from rdflib.namespace import XSD
def uformat(s):
return s.replace("u'", "'")
class TestLiteral(unittest.TestCase):
def setUp(self):
pass
def test_repr_apostrophe(self):
a = rdflib.Literal("'")
b = eval(repr(a))
self.assertEqual(a, b)
def test_repr_quote(self):
a = rdflib.Literal('"')
b = eval(repr(a))
self.assertEqual(a, b)
def test_backslash(self):
d = r"""
<rdf:RDF
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:foo="http://example.org/foo#">
<rdf:Description>
<foo:bar>a\b</foo:bar>
</rdf:Description>
</rdf:RDF>
"""
g = rdflib.Graph()
g.parse(data=d, format="xml")
a = rdflib.Literal("a\\b")
b = list(g.objects())[0]
self.assertEqual(a, b)
def test_literal_from_bool(self):
l = rdflib.Literal(True)
self.assertEqual(l.datatype, rdflib.XSD["boolean"])
class TestNew(unittest.TestCase):
def testCantPassLangAndDatatype(self):
self.assertRaises(
TypeError, Literal, "foo", lang="en", datatype=URIRef("http://example.com/")
)
def testFromOtherLiteral(self):
l = Literal(1)
l2 = Literal(l)
self.assertTrue(isinstance(l.value, int))
self.assertTrue(isinstance(l2.value, int))
# change datatype
l = Literal("1")
l2 = Literal(l, datatype=rdflib.XSD.integer)
self.assertTrue(isinstance(l2.value, int))
def testDatatypeGetsAutoURIRefConversion(self):
# drewp disapproves of this behavior, but it should be
# represented in the tests
x = Literal("foo", datatype="http://example.com/")
self.assertTrue(isinstance(x.datatype, URIRef))
x = Literal("foo", datatype=Literal("pennies"))
self.assertEqual(x.datatype, URIRef("pennies"))
class TestRepr(unittest.TestCase):
def testOmitsMissingDatatypeAndLang(self):
self.assertEqual(repr(Literal("foo")), uformat("rdflib.term.Literal(u'foo')"))
def testOmitsMissingDatatype(self):
self.assertEqual(
repr(Literal("foo", lang="en")),
uformat("rdflib.term.Literal(u'foo', lang='en')"),
)
def testOmitsMissingLang(self):
self.assertEqual(
repr(Literal("foo", datatype=URIRef("http://example.com/"))),
uformat(
"rdflib.term.Literal(u'foo', datatype=rdflib.term.URIRef(u'http://example.com/'))"
),
)
def testSubclassNameAppearsInRepr(self):
class MyLiteral(Literal):
pass
x = MyLiteral("foo")
self.assertEqual(repr(x), uformat("MyLiteral('foo')"))
class TestDoubleOutput(unittest.TestCase):
def testNoDanglingPoint(self):
"""confirms the fix for https://github.com/RDFLib/rdflib/issues/237"""
vv = Literal("0.88", datatype=_XSD_DOUBLE)
out = vv._literal_n3(use_plain=True)
self.assertTrue(out in ["8.8e-01", "0.88"], out)
class TestParseBoolean(unittest.TestCase):
"""confirms the fix for https://github.com/RDFLib/rdflib/issues/913"""
def testTrueBoolean(self):
test_value = Literal("tRue", datatype=_XSD_BOOLEAN)
self.assertTrue(test_value.value)
test_value = Literal("1", datatype=_XSD_BOOLEAN)
self.assertTrue(test_value.value)
def testFalseBoolean(self):
test_value = Literal("falsE", datatype=_XSD_BOOLEAN)
self.assertFalse(test_value.value)
test_value = Literal("0", datatype=_XSD_BOOLEAN)
self.assertFalse(test_value.value)
def testNonFalseBoolean(self):
test_value = Literal("abcd", datatype=_XSD_BOOLEAN)
self.assertRaises(DeprecationWarning)
self.assertFalse(test_value.value)
test_value = Literal("10", datatype=_XSD_BOOLEAN)
self.assertRaises(DeprecationWarning)
self.assertFalse(test_value.value)
class TestBindings(unittest.TestCase):
def testBinding(self):
class a:
def __init__(self, v):
self.v = v[3:-3]
def __str__(self):
return "<<<%s>>>" % self.v
dtA = rdflib.URIRef("urn:dt:a")
bind(dtA, a)
va = a("<<<2>>>")
la = Literal(va, normalize=True)
self.assertEqual(la.value, va)
self.assertEqual(la.datatype, dtA)
la2 = Literal("<<<2>>>", datatype=dtA)
self.assertTrue(isinstance(la2.value, a))
self.assertEqual(la2.value.v, va.v)
class b:
def __init__(self, v):
self.v = v[3:-3]
def __str__(self):
return "B%s" % self.v
dtB = rdflib.URIRef("urn:dt:b")
bind(dtB, b, None, lambda x: "<<<%s>>>" % x)
vb = b("<<<3>>>")
lb = Literal(vb, normalize=True)
self.assertEqual(lb.value, vb)
self.assertEqual(lb.datatype, dtB)
def testSpecificBinding(self):
def lexify(s):
return "--%s--" % s
def unlexify(s):
return s[2:-2]
datatype = rdflib.URIRef("urn:dt:mystring")
# Datatype-specific rule
bind(datatype, str, unlexify, lexify, datatype_specific=True)
s = "Hello"
normal_l = Literal(s)
self.assertEqual(str(normal_l), s)
self.assertEqual(normal_l.toPython(), s)
self.assertEqual(normal_l.datatype, None)
specific_l = Literal("--%s--" % s, datatype=datatype)
self.assertEqual(str(specific_l), lexify(s))
self.assertEqual(specific_l.toPython(), s)
self.assertEqual(specific_l.datatype, datatype)
class TestXsdLiterals(unittest.TestCase):
def test_make_literals(self):
"""
Tests literal construction.
"""
inputs = [
# these literals do not get conerted to python types
("ABCD", XSD.integer, None),
("ABCD", XSD.gYear, None),
("-10000", XSD.gYear, None),
("-1921-00", XSD.gYearMonth, None),
("1921-00", XSD.gMonthDay, None),
("1921-13", XSD.gMonthDay, None),
("-1921-00", XSD.gMonthDay, None),
("10", XSD.gDay, None),
("-1", XSD.gDay, None),
("0000", XSD.gYear, None),
("0000-00-00", XSD.date, None),
("NOT A VALID HEX STRING", XSD.hexBinary, None),
("NOT A VALID BASE64 STRING", XSD.base64Binary, None),
# these literals get converted to python types
("1921-05-01", XSD.date, datetime.date),
("1921-05-01T00:00:00", XSD.dateTime, datetime.datetime),
("1921-05", XSD.gYearMonth, datetime.date),
("0001-01", XSD.gYearMonth, datetime.date),
("0001-12", XSD.gYearMonth, datetime.date),
("2002-01", XSD.gYearMonth, datetime.date),
("9999-01", XSD.gYearMonth, datetime.date),
("9999-12", XSD.gYearMonth, datetime.date),
("1921", XSD.gYear, datetime.date),
("2000", XSD.gYear, datetime.date),
("0001", XSD.gYear, datetime.date),
("9999", XSD.gYear, datetime.date),
("1982", XSD.gYear, datetime.date),
("2002", XSD.gYear, datetime.date),
("1921-05-01T00:00:00+00:30", XSD.dateTime, datetime.datetime),
("1921-05-01T00:00:00-00:30", XSD.dateTime, datetime.datetime),
("abcdef0123", XSD.hexBinary, bytes),
("", XSD.hexBinary, bytes),
("UkRGTGli", XSD.base64Binary, bytes),
("", XSD.base64Binary, bytes),
]
self.check_make_literals(inputs)
@unittest.expectedFailure
def test_make_literals_ki(self):
"""
Known issues with literal construction.
"""
inputs = [
("1921-01Z", XSD.gYearMonth, datetime.date),
("1921Z", XSD.gYear, datetime.date),
("1921-00", XSD.gYearMonth, datetime.date),
("1921-05-01Z", XSD.date, datetime.date),
("1921-05-01+00:30", XSD.date, datetime.date),
("1921-05-01+00:30", XSD.date, datetime.date),
("1921-05-01+00:00", XSD.date, datetime.date),
("1921-05-01+00:00", XSD.date, datetime.date),
("1921-05-01T00:00:00Z", XSD.dateTime, datetime.datetime),
]
self.check_make_literals(inputs)
def check_make_literals(self, inputs):
for literal_pair in inputs:
(lexical, type, value_cls) = literal_pair
with self.subTest(f"tesing {literal_pair}"):
literal = Literal(lexical, datatype=type)
if value_cls is not None:
self.assertIsInstance(literal.value, value_cls)
else:
self.assertIsNone(literal.value)
self.assertEqual(lexical, f"{literal}")
if __name__ == "__main__":
unittest.main()
| [
"rdflib.term.URIRef",
"rdflib.term.Literal",
"rdflib.term.bind",
"rdflib.Literal",
"rdflib.Graph",
"unittest.main",
"rdflib.URIRef"
]
| [((9047, 9062), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9060, 9062), False, 'import unittest\n'), ((362, 381), 'rdflib.Literal', 'rdflib.Literal', (['"""\'"""'], {}), '("\'")\n', (376, 381), False, 'import rdflib\n'), ((483, 502), 'rdflib.Literal', 'rdflib.Literal', (['"""\\""""'], {}), '(\'"\')\n', (497, 502), False, 'import rdflib\n'), ((815, 829), 'rdflib.Graph', 'rdflib.Graph', ([], {}), '()\n', (827, 829), False, 'import rdflib\n'), ((880, 902), 'rdflib.Literal', 'rdflib.Literal', (['"""a\\\\b"""'], {}), "('a\\\\b')\n", (894, 902), False, 'import rdflib\n'), ((1018, 1038), 'rdflib.Literal', 'rdflib.Literal', (['(True)'], {}), '(True)\n', (1032, 1038), False, 'import rdflib\n'), ((1353, 1363), 'rdflib.term.Literal', 'Literal', (['(1)'], {}), '(1)\n', (1360, 1363), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((1377, 1387), 'rdflib.term.Literal', 'Literal', (['l'], {}), '(l)\n', (1384, 1387), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((1528, 1540), 'rdflib.term.Literal', 'Literal', (['"""1"""'], {}), "('1')\n", (1535, 1540), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((1554, 1593), 'rdflib.term.Literal', 'Literal', (['l'], {'datatype': 'rdflib.XSD.integer'}), '(l, datatype=rdflib.XSD.integer)\n', (1561, 1593), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((1808, 1854), 'rdflib.term.Literal', 'Literal', (['"""foo"""'], {'datatype': '"""http://example.com/"""'}), "('foo', datatype='http://example.com/')\n", (1815, 1854), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3024, 3061), 'rdflib.term.Literal', 'Literal', (['"""0.88"""'], {'datatype': '_XSD_DOUBLE'}), "('0.88', datatype=_XSD_DOUBLE)\n", (3031, 3061), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3337, 3375), 'rdflib.term.Literal', 'Literal', (['"""tRue"""'], {'datatype': '_XSD_BOOLEAN'}), "('tRue', datatype=_XSD_BOOLEAN)\n", (3344, 3375), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3439, 3474), 'rdflib.term.Literal', 'Literal', (['"""1"""'], {'datatype': '_XSD_BOOLEAN'}), "('1', datatype=_XSD_BOOLEAN)\n", (3446, 3474), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3571, 3610), 'rdflib.term.Literal', 'Literal', (['"""falsE"""'], {'datatype': '_XSD_BOOLEAN'}), "('falsE', datatype=_XSD_BOOLEAN)\n", (3578, 3610), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3675, 3710), 'rdflib.term.Literal', 'Literal', (['"""0"""'], {'datatype': '_XSD_BOOLEAN'}), "('0', datatype=_XSD_BOOLEAN)\n", (3682, 3710), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3811, 3849), 'rdflib.term.Literal', 'Literal', (['"""abcd"""'], {'datatype': '_XSD_BOOLEAN'}), "('abcd', datatype=_XSD_BOOLEAN)\n", (3818, 3849), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((3960, 3996), 'rdflib.term.Literal', 'Literal', (['"""10"""'], {'datatype': '_XSD_BOOLEAN'}), "('10', datatype=_XSD_BOOLEAN)\n", (3967, 3996), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((4329, 4354), 'rdflib.URIRef', 'rdflib.URIRef', (['"""urn:dt:a"""'], {}), "('urn:dt:a')\n", (4342, 4354), False, 'import rdflib\n'), ((4363, 4375), 'rdflib.term.bind', 'bind', (['dtA', 'a'], {}), '(dtA, a)\n', (4367, 4375), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((4416, 4443), 'rdflib.term.Literal', 'Literal', (['va'], {'normalize': '(True)'}), '(va, normalize=True)\n', (4423, 4443), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((4541, 4573), 'rdflib.term.Literal', 'Literal', (['"""<<<2>>>"""'], {'datatype': 'dtA'}), "('<<<2>>>', datatype=dtA)\n", (4548, 4573), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((4839, 4864), 'rdflib.URIRef', 'rdflib.URIRef', (['"""urn:dt:b"""'], {}), "('urn:dt:b')\n", (4852, 4864), False, 'import rdflib\n'), ((4873, 4917), 'rdflib.term.bind', 'bind', (['dtB', 'b', 'None', "(lambda x: '<<<%s>>>' % x)"], {}), "(dtB, b, None, lambda x: '<<<%s>>>' % x)\n", (4877, 4917), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((4958, 4985), 'rdflib.term.Literal', 'Literal', (['vb'], {'normalize': '(True)'}), '(vb, normalize=True)\n', (4965, 4985), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((5232, 5264), 'rdflib.URIRef', 'rdflib.URIRef', (['"""urn:dt:mystring"""'], {}), "('urn:dt:mystring')\n", (5245, 5264), False, 'import rdflib\n'), ((5307, 5368), 'rdflib.term.bind', 'bind', (['datatype', 'str', 'unlexify', 'lexify'], {'datatype_specific': '(True)'}), '(datatype, str, unlexify, lexify, datatype_specific=True)\n', (5311, 5368), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((5409, 5419), 'rdflib.term.Literal', 'Literal', (['s'], {}), '(s)\n', (5416, 5419), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((5584, 5624), 'rdflib.term.Literal', 'Literal', (["('--%s--' % s)"], {'datatype': 'datatype'}), "('--%s--' % s, datatype=datatype)\n", (5591, 5624), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((2005, 2022), 'rdflib.term.URIRef', 'URIRef', (['"""pennies"""'], {}), "('pennies')\n", (2011, 2022), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((1264, 1293), 'rdflib.term.URIRef', 'URIRef', (['"""http://example.com/"""'], {}), "('http://example.com/')\n", (1270, 1293), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((1948, 1966), 'rdflib.term.Literal', 'Literal', (['"""pennies"""'], {}), "('pennies')\n", (1955, 1966), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((2138, 2152), 'rdflib.term.Literal', 'Literal', (['"""foo"""'], {}), "('foo')\n", (2145, 2152), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((2279, 2304), 'rdflib.term.Literal', 'Literal', (['"""foo"""'], {'lang': '"""en"""'}), "('foo', lang='en')\n", (2286, 2304), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((8741, 8772), 'rdflib.term.Literal', 'Literal', (['lexical'], {'datatype': 'type'}), '(lexical, datatype=type)\n', (8748, 8772), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n'), ((2484, 2513), 'rdflib.term.URIRef', 'URIRef', (['"""http://example.com/"""'], {}), "('http://example.com/')\n", (2490, 2513), False, 'from rdflib.term import Literal, URIRef, _XSD_DOUBLE, bind, _XSD_BOOLEAN\n')] |
import telegram
from emoji import emojize
from .base import TextMessageBase
class RulingHelpTextMessage(TextMessageBase):
"""
Ruling help message.
Taken from:
https://www.iamexpat.nl/expat-info/taxation/30-percent-ruling/requirements
"""
def get_text(self):
message = emojize(
"<b>30% RULING INCOME REQUIREMENTS</b>\n\n"
"<a href=\"https://www.iamexpat.nl/expat-info/taxation/30-percent-ruling/requirements\">Go to Source</a>\n\n"
"<b>2019 salary requirements</b>\n\n"
":small_blue_diamond: Minimum taxable salary at 70%: <b>37743 EUR</b> \n"
":small_blue_diamond: Employee with master's degree: <b>28690 EUR</b> \n"
":small_blue_diamond: Scientific researchers: <b>No minimum</b> \n"
":small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n"
"<b>2018 salary requirements</b>\n\n"
":small_blue_diamond: Minimum taxable salary at 70%: <b>37296 EUR</b> \n"
":small_blue_diamond: Employee with master's degree: <b>28350 EUR</b> \n"
":small_blue_diamond: Scientific researchers: <b>No minimum</b> \n"
":small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n"
"<b>2017 salary requirements</b>\n\n"
":small_blue_diamond: Minimum taxable salary at 70%: <b>37000 EUR</b> \n"
":small_blue_diamond: Employee with master's degree: <b>28125 EUR</b> \n"
":small_blue_diamond: Scientific researchers: <b>No minimum</b> \n"
":small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n"
"<b>2016 salary requirements</b>\n\n"
":small_blue_diamond: Minimum taxable salary at 70%: <b>36889 EUR</b> \n"
":small_blue_diamond: Employee with master's degree: <b>28041 EUR</b> \n"
":small_blue_diamond: Scientific researchers: <b>No minimum</b> \n"
":small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n"
"Type /start to start calculation. \n"
"Type /help get more details. \n\n",
use_aliases=True
)
return message
def get_options(self):
"""
Disable link preview.
Add HTML tags render support.
"""
return {
"disable_web_page_preview": True,
"parse_mode": telegram.ParseMode.HTML,
}
| [
"emoji.emojize"
]
| [((306, 1789), 'emoji.emojize', 'emojize', (['"""<b>30% RULING INCOME REQUIREMENTS</b>\n\n<a href="https://www.iamexpat.nl/expat-info/taxation/30-percent-ruling/requirements">Go to Source</a>\n\n<b>2019 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>37743 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28690 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n<b>2018 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>37296 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28350 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n<b>2017 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>37000 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28125 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n<b>2016 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>36889 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28041 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\nType /start to start calculation. \nType /help get more details. \n\n"""'], {'use_aliases': '(True)'}), '(\n """<b>30% RULING INCOME REQUIREMENTS</b>\n\n<a href="https://www.iamexpat.nl/expat-info/taxation/30-percent-ruling/requirements">Go to Source</a>\n\n<b>2019 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>37743 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28690 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n<b>2018 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>37296 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28350 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n<b>2017 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>37000 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28125 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\n<b>2016 salary requirements</b>\n\n:small_blue_diamond: Minimum taxable salary at 70%: <b>36889 EUR</b> \n:small_blue_diamond: Employee with master\'s degree: <b>28041 EUR</b> \n:small_blue_diamond: Scientific researchers: <b>No minimum</b> \n:small_blue_diamond: Medical training specialists: <b>No minimum</b> \n\nType /start to start calculation. \nType /help get more details. \n\n"""\n , use_aliases=True)\n', (313, 1789), False, 'from emoji import emojize\n')] |
import os
os.environ['CUDA_VISIBLE_DEVICES']='0'
from common import *
from dataset import *
from model import *
def valid_augment(image, mask, infor):
return image, mask, infor
def train_augment(image, mask, infor):
u=np.random.choice(3)
if u==0:
pass
elif u==1:
image, mask = do_random_crop_rescale(image, mask, 1600-(256-224), 224)
elif u==2:
image, mask = do_random_crop_rotate_rescale(image, mask, 1600-(256-224), 224)
if np.random.rand()>0.5:
image = do_random_log_contast(image)
if np.random.rand()>0.5:
image, mask = do_flip_lr(image, mask)
if np.random.rand()>0.5:
image, mask = do_flip_ud(image, mask)
if np.random.rand()>0.5:
image, mask = do_noise(image, mask)
return image, mask, infor
def null_collate(batch):
batch_size = len(batch)
input = []
truth_mask = []
truth_label = []
infor = []
for b in range(batch_size):
input.append(batch[b][0])
#truth_mask.append(batch[b][1])
infor.append(batch[b][2])
mask = batch[b][1]
label = (mask.reshape(4,-1).sum(1)>0).astype(np.int32)
num_class,H,W = mask.shape
mask = mask.transpose(1,2,0)*[1,2,3,4]
mask = mask.reshape(-1,4)
mask = mask.max(-1).reshape(1,H,W)
truth_mask.append(mask)
truth_label.append(label)
input = np.stack(input)
input = image_to_input(input, IMAGE_RGB_MEAN,IMAGE_RGB_STD)
input = torch.from_numpy(input).float()
truth_mask = np.stack(truth_mask)
truth_mask = torch.from_numpy(truth_mask).long()
truth_label = np.array(truth_label)
truth_label = torch.from_numpy(truth_label).float()
return input, truth_mask, truth_label, infor
#------------------------------------
def do_valid(net, valid_loader, out_dir=None):
#out_dir=None
valid_num = np.zeros(11, np.float32)
valid_loss = np.zeros(11, np.float32)
for t, (input, truth_mask, truth_label, infor) in enumerate(valid_loader):
#if b==5: break
net.eval()
input = input.cuda()
truth_mask = truth_mask.cuda()
truth_label = truth_label.cuda()
with torch.no_grad():
logit = data_parallel(net, input) #net(input)
loss = criterion(logit, truth_mask)
tn,tp, num_neg,num_pos = metric_hit(logit, truth_mask)
dn,dp, num_neg,num_pos = metric_dice(logit, truth_mask, threshold=0.5, sum_threshold=100)
#zz=0
#---
batch_size = len(infor)
l = np.array([ loss.item(), tn,*tp, dn,*dp ])
n = np.array([ batch_size, num_neg,*num_pos, num_neg,*num_pos ])
valid_loss += l*n
valid_num += n
# debug-----------------------------
if out_dir is not None:
probability = torch.softmax(logit,1)
image = input_to_image(input, IMAGE_RGB_MEAN,IMAGE_RGB_STD)
probability = one_hot_encode_predict(probability)
truth_mask = one_hot_encode_truth(truth_mask)
probability_mask = probability.data.cpu().numpy()
truth_label = truth_label.data.cpu().numpy()
truth_mask = truth_mask.data.cpu().numpy()
for b in range(0, batch_size, 4):
image_id = infor[b].image_id[:-4]
result = draw_predict_result(image[b], truth_mask[b], truth_label[b], probability_mask[b], stack='vertical')
draw_shadow_text(result,'%05d %s.jpg'%(valid_num[0]-batch_size+b, image_id),(5,24),1,[255,255,255],2)
image_show('result',result,resize=1)
cv2.imwrite(out_dir +'/valid/%s.png'%(infor[b].image_id[:-4]), result)
cv2.waitKey(1)
pass
# debug-----------------------------
#print(valid_loss)
print('\r %8d /%8d'%(valid_num[0], len(valid_loader.dataset)),end='',flush=True)
pass #-- end of one data loader --
assert(valid_num[0] == len(valid_loader.dataset))
valid_loss = valid_loss/valid_num
return valid_loss
def run_train():
out_dir = \
'/root/share/project/kaggle/2019/steel/result1/resnet18-seg-full-softmax-foldb1-1-4balance'
initial_checkpoint = \
'/root/share/project/kaggle/2019/steel/result1/resnet18-seg-full-softmax-foldb1-1-4balance/checkpoint/00114000_model.pth'
schduler = NullScheduler(lr=0.001)
batch_size = 8 #8
iter_accum = 4
loss_weight = None#[5,5,2,5] #
train_sampler = FourBalanceClassSampler #RandomSampler
## setup -----------------------------------------------------------------------------
for f in ['checkpoint','train','valid','backup'] : os.makedirs(out_dir +'/'+f, exist_ok=True)
backup_project_as_zip(PROJECT_PATH, out_dir +'/backup/code.train.%s.zip'%IDENTIFIER)
log = Logger()
log.open(out_dir+'/log.train.txt',mode='a')
log.write('\n--- [START %s] %s\n\n' % (IDENTIFIER, '-' * 64))
log.write('\t%s\n' % COMMON_STRING)
log.write('\n')
log.write('\tSEED = %u\n' % SEED)
log.write('\tPROJECT_PATH = %s\n' % PROJECT_PATH)
log.write('\t__file__ = %s\n' % __file__)
log.write('\tout_dir = %s\n' % out_dir)
log.write('\n')
## dataset ----------------------------------------
log.write('** dataset setting **\n')
train_dataset = SteelDataset(
mode = 'train',
csv = ['train.csv',],
split = ['train_b1_11568.npy',],
augment = train_augment,
)
train_loader = DataLoader(
train_dataset,
sampler = train_sampler(train_dataset),
batch_size = batch_size,
drop_last = True,
num_workers = 8,
pin_memory = True,
collate_fn = null_collate
)
valid_dataset = SteelDataset(
mode = 'train',
csv = ['train.csv',],
split = ['valid_b1_1000.npy',],
augment = valid_augment,
)
valid_loader = DataLoader(
valid_dataset,
sampler = SequentialSampler(valid_dataset),
batch_size = 4,
drop_last = False,
num_workers = 8,
pin_memory = True,
collate_fn = null_collate
)
assert(len(train_dataset)>=batch_size)
log.write('batch_size = %d\n'%(batch_size))
log.write('train_dataset : \n%s\n'%(train_dataset))
log.write('valid_dataset : \n%s\n'%(valid_dataset))
log.write('\n')
## net ----------------------------------------
log.write('** net setting **\n')
net = Net().cuda()
log.write('\tinitial_checkpoint = %s\n' % initial_checkpoint)
if initial_checkpoint is not None:
state_dict = torch.load(initial_checkpoint, map_location=lambda storage, loc: storage)
##for k in ['logit.weight','logit.bias']: state_dict.pop(k, None) #tramsfer sigmoid feature to softmax network
##net.load_state_dict(state_dict,strict=False)
net.load_state_dict(state_dict,strict=False)
else:
net.load_pretrain(skip=['logit'], is_print=False)
log.write('%s\n'%(type(net)))
log.write('\tloss_weight = %s\n' % str(loss_weight))
log.write('\ttrain_loader.sampler = %s\n' % str(train_loader.sampler))
log.write('\n')
## optimiser ----------------------------------
# if 0: ##freeze
# for p in net.encoder1.parameters(): p.requires_grad = False
# pass
#net.set_mode('train',is_freeze_bn=True)
#-----------------------------------------------
#optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, net.parameters()),lr=schduler(0))
#optimizer = torch.optim.RMSprop(net.parameters(), lr =0.0005, alpha = 0.95)
optimizer = torch.optim.SGD(filter(lambda p: p.requires_grad, net.parameters()), lr=schduler(0), momentum=0.9, weight_decay=0.0001)
num_iters = 3000*1000
iter_smooth = 50
iter_log = 500
iter_valid = 1500
iter_save = [0, num_iters-1]\
+ list(range(0, num_iters, 1500))#1*1000
start_iter = 0
start_epoch= 0
rate = 0
if initial_checkpoint is not None:
initial_optimizer = initial_checkpoint.replace('_model.pth','_optimizer.pth')
if os.path.exists(initial_optimizer):
checkpoint = torch.load(initial_optimizer)
start_iter = checkpoint['iter' ]
start_epoch = checkpoint['epoch']
#optimizer.load_state_dict(checkpoint['optimizer'])
pass
log.write('optimizer\n %s\n'%(optimizer))
log.write('schduler\n %s\n'%(schduler))
log.write('\n')
## start training here! ##############################################
log.write('** start training here! **\n')
log.write(' batch_size=%d, iter_accum=%d\n'%(batch_size,iter_accum))
log.write(' experiment = %s\n' % __file__.split('/')[-2])
log.write(' |-------------------------------- VALID-----------------------------|---------- TRAIN/BATCH ------------------------------\n')
log.write('rate iter epoch | loss hit_neg,pos1,2,3,4 dice_neg,pos1,2,3,4 | loss hit_neg,pos1,2,3,4 | time \n')
log.write('------------------------------------------------------------------------------------------------------------------------------------------------\n')
#0.00000 0.0* 0.0 | 0.690 0.50 [0.00,1.00,0.00,1.00] 0.44 [0.00,0.02,0.00,0.15] | 0.000 0.00 [0.00,0.00,0.00,0.00] | 0 hr 00 min
train_loss = np.zeros(20,np.float32)
valid_loss = np.zeros(20,np.float32)
batch_loss = np.zeros(20,np.float32)
iter = 0
i = 0
start = timer()
while iter<num_iters:
sum_train_loss = np.zeros(20,np.float32)
sum = np.zeros(20,np.float32)
optimizer.zero_grad()
for t, (input, truth_mask, truth_label, infor) in enumerate(train_loader):
batch_size = len(infor)
iter = i + start_iter
epoch = (iter-start_iter)*batch_size/len(train_dataset) + start_epoch
#if 0:
if (iter % iter_valid==0):
valid_loss = do_valid(net, valid_loader, out_dir) #
#pass
if (iter % iter_log==0):
print('\r',end='',flush=True)
asterisk = '*' if iter in iter_save else ' '
log.write('%0.5f %5.1f%s %5.1f | %5.3f %4.2f [%4.2f,%4.2f,%4.2f,%4.2f] %4.2f [%4.2f,%4.2f,%4.2f,%4.2f] | %5.3f %4.2f [%4.2f,%4.2f,%4.2f,%4.2f] | %s' % (\
rate, iter/1000, asterisk, epoch,
*valid_loss[:11],
*train_loss[:6],
time_to_str((timer() - start),'min'))
)
log.write('\n')
#if 0:
if iter in iter_save:
torch.save(net.state_dict(),out_dir +'/checkpoint/%08d_model.pth'%(iter))
torch.save({
#'optimizer': optimizer.state_dict(),
'iter' : iter,
'epoch' : epoch,
}, out_dir +'/checkpoint/%08d_optimizer.pth'%(iter))
pass
# learning rate schduler -------------
lr = schduler(iter)
if lr<0 : break
adjust_learning_rate(optimizer, lr)
rate = get_learning_rate(optimizer)
# one iteration update -------------
#net.set_mode('train',is_freeze_bn=True)
net.train()
input = input.cuda()
truth_label = truth_label.cuda()
truth_mask = truth_mask.cuda()
logit = data_parallel(net,input) #net(input)
loss = criterion(logit, truth_mask, loss_weight)
tn,tp, num_neg,num_pos = metric_hit(logit, truth_mask)
(loss/iter_accum).backward()
if (iter % iter_accum)==0:
optimizer.step()
optimizer.zero_grad()
# print statistics ------------
l = np.array([ loss.item(), tn,*tp ])
n = np.array([ batch_size, num_neg,*num_pos ])
batch_loss[:6] = l
sum_train_loss[:6] += l*n
sum[:6] += n
if iter%iter_smooth == 0:
train_loss = sum_train_loss/(sum+1e-12)
sum_train_loss[...] = 0
sum[...] = 0
print('\r',end='',flush=True)
asterisk = ' '
print('%0.5f %5.1f%s %5.1f | %5.3f %4.2f [%4.2f,%4.2f,%4.2f,%4.2f] %4.2f [%4.2f,%4.2f,%4.2f,%4.2f] | %5.3f %4.2f [%4.2f,%4.2f,%4.2f,%4.2f] | %s' % (\
rate, iter/1000, asterisk, epoch,
*valid_loss[:11],
*batch_loss[:6],
time_to_str((timer() - start),'min'))
, end='',flush=True)
i=i+1
# debug-----------------------------
if 1:
for di in range(3):
if (iter+di)%1000==0:
probability = torch.softmax(logit,1)
image = input_to_image(input, IMAGE_RGB_MEAN,IMAGE_RGB_STD)
probability = one_hot_encode_predict(probability)
truth_mask = one_hot_encode_truth(truth_mask)
probability_mask = probability.data.cpu().numpy()
truth_label = truth_label.data.cpu().numpy()
truth_mask = truth_mask.data.cpu().numpy()
for b in range(batch_size):
result = draw_predict_result(image[b], truth_mask[b], truth_label[b], probability_mask[b], stack='vertical')
image_show('result',result,resize=1)
cv2.imwrite(out_dir +'/train/%05d.png'%(di*100+b), result)
cv2.waitKey(1)
pass
pass #-- end of one data loader --
pass #-- end of all iterations --
log.write('\n')
# main #################################################################
if __name__ == '__main__':
print( '%s: calling main function ... ' % os.path.basename(__file__))
run_train()
| [
"os.path.exists",
"os.path.basename",
"os.makedirs"
]
| [((4727, 4772), 'os.makedirs', 'os.makedirs', (["(out_dir + '/' + f)"], {'exist_ok': '(True)'}), "(out_dir + '/' + f, exist_ok=True)\n", (4738, 4772), False, 'import os\n'), ((8236, 8269), 'os.path.exists', 'os.path.exists', (['initial_optimizer'], {}), '(initial_optimizer)\n', (8250, 8269), False, 'import os\n'), ((14267, 14293), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (14283, 14293), False, 'import os\n')] |
# (c) Copyright [2018-2021] Micro Focus or one of its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest, warnings
from verticapy import vDataFrame, drop_table
from verticapy import set_option
set_option("print_info", False)
@pytest.fixture(scope="module")
def titanic_vd(base):
from verticapy.learn.datasets import load_titanic
titanic = load_titanic(cursor=base.cursor)
yield titanic
with warnings.catch_warnings(record=True) as w:
drop_table(name="public.titanic", cursor=base.cursor)
class TestvDFCreate:
def test_creating_vDF_using_input_relation(self, base, titanic_vd):
tvdf = vDataFrame(input_relation="public.titanic", cursor=base.cursor)
assert tvdf["pclass"].count() == 1234
def test_creating_vDF_using_input_relation_schema(self, base, titanic_vd):
tvdf = vDataFrame(input_relation="titanic", schema="public", cursor=base.cursor)
assert tvdf["pclass"].count() == 1234
def test_creating_vDF_using_input_relation_vcolumns(self, base, titanic_vd):
tvdf = vDataFrame(
input_relation="public.titanic",
usecols=["age", "survived"],
cursor=base.cursor,
)
assert tvdf["survived"].count() == 1234
@pytest.mark.skip(reason="test not implemented")
def test_creating_vDF_using_input_relation_dsn(self):
pass
| [
"verticapy.vDataFrame",
"verticapy.learn.datasets.load_titanic",
"pytest.mark.skip",
"verticapy.set_option",
"warnings.catch_warnings",
"verticapy.drop_table",
"pytest.fixture"
]
| [((711, 742), 'verticapy.set_option', 'set_option', (['"""print_info"""', '(False)'], {}), "('print_info', False)\n", (721, 742), False, 'from verticapy import set_option\n'), ((746, 776), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (760, 776), False, 'import pytest, warnings\n'), ((868, 900), 'verticapy.learn.datasets.load_titanic', 'load_titanic', ([], {'cursor': 'base.cursor'}), '(cursor=base.cursor)\n', (880, 900), False, 'from verticapy.learn.datasets import load_titanic\n'), ((1762, 1809), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""test not implemented"""'}), "(reason='test not implemented')\n", (1778, 1809), False, 'import pytest, warnings\n'), ((928, 964), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (951, 964), False, 'import pytest, warnings\n'), ((979, 1032), 'verticapy.drop_table', 'drop_table', ([], {'name': '"""public.titanic"""', 'cursor': 'base.cursor'}), "(name='public.titanic', cursor=base.cursor)\n", (989, 1032), False, 'from verticapy import vDataFrame, drop_table\n'), ((1143, 1206), 'verticapy.vDataFrame', 'vDataFrame', ([], {'input_relation': '"""public.titanic"""', 'cursor': 'base.cursor'}), "(input_relation='public.titanic', cursor=base.cursor)\n", (1153, 1206), False, 'from verticapy import vDataFrame, drop_table\n'), ((1349, 1422), 'verticapy.vDataFrame', 'vDataFrame', ([], {'input_relation': '"""titanic"""', 'schema': '"""public"""', 'cursor': 'base.cursor'}), "(input_relation='titanic', schema='public', cursor=base.cursor)\n", (1359, 1422), False, 'from verticapy import vDataFrame, drop_table\n'), ((1567, 1663), 'verticapy.vDataFrame', 'vDataFrame', ([], {'input_relation': '"""public.titanic"""', 'usecols': "['age', 'survived']", 'cursor': 'base.cursor'}), "(input_relation='public.titanic', usecols=['age', 'survived'],\n cursor=base.cursor)\n", (1577, 1663), False, 'from verticapy import vDataFrame, drop_table\n')] |
#!/usr/bin/env python
#
# Copyright (C) 2007 British Broadcasting Corporation and Kamaelia Contributors(1)
# All Rights Reserved.
#
# You may only modify and redistribute this under the terms of any of the
# following licenses(2): Mozilla Public License, V1.1, GNU General
# Public License, V2.0, GNU Lesser General Public License, V2.1
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://kamaelia.sourceforge.net/AUTHORS - please extend this file,
# not this notice.
# (2) Reproduced in the COPYING file, and at:
# http://kamaelia.sourceforge.net/COPYING
# Under section 3.5 of the MPL, we are using this text since we deem the MPL
# notice inappropriate for this file. As per MPL/GPL/LGPL removal of this
# notice is prohibited.
#
# Please contact us via: <EMAIL>
# to discuss alternative licensing.
# -------------------------------------------------------------------------
#
"""\
=============================================
Parsing and Creation of YUV4MPEG format files
=============================================
YUV4MPEGToFrame parses YUV4MPEG format data sent to its "inbox" inbox and sends
video fram data structures to its "outbox" outbox.
FrameToYUV4MPEG does the reverse - taking frame data structures sent to its
"inbox" inbox and outputting YUV4MPEG format data to its "outbox" outbox."
The YUV4MPEG file format is supported by many tools, such as mjpegtools,
mplayer/mencoder, and ffmpeg.
Example Usage
-------------
Playback a YUV4MPEG format file::
Pipeline( RateControlledFileReader("video.yuv4mpeg",readmode="bytes", ...),
YUV4MPEGToFrame(),
VideoOverlay()
).run()
Decode a dirac encoded video file to a YUV4MPEG format file::
Pipeline( RateControlledFileReader("video.dirac",readmode="bytes", ...),
DiracDecoder(),
FrameToYUV4MPEG(),
SimpleFileWriter("output.yuv4mpeg")
).run()
YUV4MPEGToFrame Behaviour
-------------------------
Send binary data as strings containing YUV4MPEG format data to the "inbox" inbox
and frame data structures will be sent out of the "outbox" outbox as soon as
they are parsed.
See below for a description of the uncompressed frame data structure format.
This component supports sending data out of its outbox to a size limited inbox.
If the size limited inbox is full, this component will pause until it is able
to send out the data. Data will not be consumed from the inbox if this component
is waiting to send to the outbox.
If a producerFinished message is received on the "control" inbox, this component
will complete parsing any data pending in its inbox, and finish sending any
resulting data to its outbox. It will then send the producerFinished message on
out of its "signal" outbox and terminate.
If a shutdownMicroprocess message is received on the "control" inbox, this
component will immediately send it on out of its "signal" outbox and immediately
terminate. It will not complete processing, or sending on any pending data.
FrameToYUV4MPEG Behaviour
-------------------------
Send frame data structures to the "inbox" inbox of this component. YUV4MPEG
format binary string data will be sent out of the "outbox" outbox.
See below for a description of the uncompressed frame data structure format.
The header data for the YUV4MPEG file is determined from the first frame.
All frames sent to this component must therefore be in the same pixel format and
size, otherwise the output data will not be valid YUV4MPEG.
This component supports sending data out of its outbox to a size limited inbox.
If the size limited inbox is full, this component will pause until it is able
to send out the data. Data will not be consumed from the inbox if this component
is waiting to send to the outbox.
If a producerFinished message is received on the "control" inbox, this component
will complete parsing any data pending in its inbox, and finish sending any
resulting data to its outbox. It will then send the producerFinished message on
out of its "signal" outbox and terminate.
If a shutdownMicroprocess message is received on the "control" inbox, this
component will immediately send it on out of its "signal" outbox and immediately
terminate. It will not complete processing, or sending on any pending data.
=========================
UNCOMPRESSED FRAME FORMAT
=========================
A frame is a dictionary data structure. It must, at minimum contain the first 3
("yuv", "size" and "pixformat")::
{
"yuv" : (y_data, u_data, v_data) # a tuple of strings
"size" : (width, height) # in pixels
"pixformat" : pixelformat # format of raw video data
"frame_rate" : fps # frames per second
"interlaced" : 0 or not 0 # non-zero if the frame is two interlaced fields
"topfieldfirst" : 0 or not 0 # non-zero the first field comes first in the data
"pixel_aspect" : fraction # aspect ratio of pixels
"sequence_meta" : metadata # string containing extended metadata
# (no whitespace or control characters)
}
All other fields are optional when providing frames to FrameToYUV4MPEG.
YUV4MPEGToFrame only guarantees to fill inthe YUV data itself. All other fields
will be filled in if the relevant header data is detected in the file.
The pixel formats recognised (and therefore supported) are::
"YUV420_planar"
"YUV411_planar"
"YUV422_planar"
"YUV444_planar"
"YUV4444_planar"
"Y_planar"
"""
from Axon.Component import component
#from Axon.Ipc import WaitComplete
from Axon.Ipc import shutdownMicroprocess, producerFinished
from Axon.AxonExceptions import noSpaceInBox
import re
from Kamaelia.Support.Data.Rationals import rational
class YUV4MPEGToFrame(component):
"""\
YUV4MPEGToFrame() -> new YUV4MPEGToFrame component.
Parses YUV4MPEG format binarydata, sent as strings to its "inbox" inbox
and outputs uncompressed video frame data structures to its "outbox" outbox.
"""
def __init__(self):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(YUV4MPEGToFrame,self).__init__()
self.remainder = ""
self.shutdownMsg = None
def checkShutdown(self):
"""\
Collects any new shutdown messages arriving at the "control" inbox, and
returns "NOW" if immediate shutdown is required, or "WHENEVER" if the
component can shutdown when it has finished processing pending data.
"""
while self.dataReady("control"):
newMsg = self.recv("control")
if isinstance(newMsg, shutdownMicroprocess):
self.shutdownMsg = newMsg
elif self.shutdownMsg is None and isinstance(newMsg, producerFinished):
self.shutdownMsg = newMsg
if isinstance(self.shutdownMsg, shutdownMicroprocess):
return "NOW"
elif self.shutdownMsg is not None:
return "WHENEVER"
else:
return None
def readline(self):
"""\
Generator.
Read up to the next newline char from the stream of chunks of binary
string data arriving at the "inbox" inbox.
Any excess data is placed into self.remainder ready for the next call
to self.readline or self.readbytes.
Data is only read from the inbox when required. It is not preemptively
fetched.
The read data is placed into self.bytesread
If a shutdown is detected, self.bytesread is set to "" and this
generator immediately returns.
"""
bytes = []
newdata = self.remainder
index = newdata.find("\x0a")
while index==-1:
bytes.append(newdata)
while not self.dataReady("inbox"):
if self.checkShutdown():
self.bytesread=""
return
self.pause()
yield 1
newdata = self.recv("inbox")
index = newdata.find("\x0a")
tail = newdata[:index+1]
self.remainder = newdata[index+1:]
bytes.append(tail)
self.bytesread = "".join(bytes)
return
def readbytes(self,size):
"""\
Generator.
Read the specified number of bytes from the stream of chunks of binary
string data arriving at the "inbox" inbox.
Any excess data is placed into self.remainder ready for the next call
to self.readline or self.readbytes.
Data is only read from the inbox when required. It is not preemptively
fetched.
The read data is placed into self.bytesread
If a shutdown is detected, self.bytesread is set to "" and this
generator immediately returns.
"""
buf = [self.remainder]
bufsize = len(self.remainder)
while bufsize < size:
if self.dataReady("inbox"):
newdata = self.recv("inbox")
buf.append(newdata)
bufsize += len(newdata)
shutdown = self.checkShutdown()
if shutdown == "NOW" or (shutdown and not self.dataReady("inbox") and bufsize<size):
self.bytesread=""
return
if bufsize<size and not self.anyReady():
self.pause()
yield 1
excess = bufsize-size
if excess:
wanted = buf[:-1]
tail, self.remainder = buf[-1][:-excess], buf[-1][-excess:]
wanted.append(tail)
else:
wanted = buf
self.remainder = ""
self.bytesread = "".join(wanted)
return
def safesend(self, data, boxname):
"""\
Generator.
Sends data out of the named outbox. If the destination is full
(noSpaceInBox exception) then it waits until there is space and retries
until it succeeds.
If a shutdownMicroprocess message is received, returns early.
"""
while 1:
try:
self.send(data, boxname)
return
except noSpaceInBox:
if self.checkShutdown() == "NOW":
return
self.pause()
yield 1
def main(self):
"""Main loop"""
# parse header
for _ in self.readline(): yield _
if self.checkShutdown() == "NOW" or (self.checkShutdown() and self.bytesread==""):
self.send(self.shutdownMsg,"signal")
return
line = self.bytesread
m = re.match("^YUV4MPEG2((?: .\S*)*)\n$", line)
assert(m)
fields = m.groups()[0]
seq_params = parse_seq_tags(fields)
yield 1
while 1:
for _ in self.readline(): yield _
line = self.bytesread
if self.checkShutdown() == "NOW" or (self.checkShutdown() and self.bytesread==""):
break
m = re.match("^FRAME((?: .\S*)*)\n$", line)
assert(m)
fields = m.groups()[0]
frame_params = parse_frame_tags(fields)
ysize = seq_params["size"][0] * seq_params["size"][1]
csize = seq_params["chroma_size"][0] * seq_params["chroma_size"][1]
for _ in self.readbytes(ysize): yield _
if self.checkShutdown() == "NOW" or (self.checkShutdown() and self.bytesread==""):
break
y = self.bytesread
for _ in self.readbytes(csize): yield _
if self.checkShutdown() == "NOW" or (self.checkShutdown() and self.bytesread==""):
break
u = self.bytesread
for _ in self.readbytes(csize): yield _
if self.checkShutdown() == "NOW" or (self.checkShutdown() and self.bytesread==""):
break
v = self.bytesread
frame = { "yuv" : (y,u,v) }
frame.update(seq_params)
frame.update(frame_params)
for _ in self.safesend(frame,"outbox"): yield _
if self.checkShutdown() == "NOW" or (self.checkShutdown() and not self.dataReady("inbox")):
break
yield 1
if self.shutdownMsg:
self.send(self.shutdownMsg, "signal")
else:
self.send(producerFinished(), "signal")
def parse_seq_tags(fields):
"""Parses YUV4MPEG header tags"""
params = {}
tags = {}
while fields:
m = re.match("^ (.)(\S*)(.*)$", fields)
(tag,value,fields) = m.groups()
tags[tag] = value
if "W" in tags and "H" in tags:
params['size'] = (int(tags["W"]), int(tags["H"]))
else:
raise
if "C" in tags:
C = tags["C"]
if C == "420jpeg": # 4:2:0 with JPEG/MPEG-1 siting (default)
params['pixformat'] = "YUV420_planar"
params['chroma_size'] = (params['size'][0]/2, params['size'][1]/2)
elif C == "420mpeg2": # 4:2:0 with MPEG-2 siting
params['pixformat'] = "YUV420_planar"
params['chroma_size'] = (params['size'][0]/2, params['size'][1]/2)
elif C == "420paldv": # 4:2:0 with PAL-DV siting
params['pixformat'] = "YUV420_planar"
params['chroma_size'] = (params['size'][0]/2, params['size'][1]/2)
elif C == "411": # 4:1:1, cosited
params['pixformat'] = "YUV411_planar"
params['chroma_size'] = (params['size'][0]/4, params['size'][1])
elif C == "422": # 4:2:2, cosited
params['pixformat'] = "YUV422_planar"
params['chroma_size'] = (params['size'][0]/2, params['size'][1])
elif C == "444": # 4:4:4 (no subsampling)
params['pixformat'] = "YUV444_planar"
params['chroma_size'] = (params['size'][0], params['size'][1])
elif C == "444alpha": # 4:4:4 with an alpha channel
params['pixformat'] = "YUV4444_planar"
params['chroma_size'] = (params['size'][0], params['size'][1])
elif C == "mono": # luma (Y') plane only
params['pixformat'] = "Y_planar"
params['chroma_size'] = (0,0)
else:
params['pixformat'] = "YUV420_planar"
params['chroma_size'] = (params['size'][0]/2, params['size'][1]/2)
if "I" in tags:
I = tags["I"]
if I == "?": # unknown (default)
pass
elif I == "p": # progressive/none
params["interlaced"] = False
elif I == "t": # top-field-first
params["interlaced"] = True
params["topfieldfirst"] = True
elif I == "b": # bottom-field-first
params["interlaced"] = True
params["topfieldfirst"] = False
elif I == "m": # mixed-mode: refer to 'I' tag in frame header
pass
if "F" in tags:
m = re.match("^(\d+):(\d+)$",tags["F"])
num, denom = float(m.groups()[0]), float(m.groups()[1])
if denom > 0:
params["frame_rate"] = num/denom
if "A" in tags:
m = re.match("^(\d+):(\d+)$",tags["A"])
num, denom = float(m.groups()[0]), float(m.groups()[1])
if denom > 0:
params["pixel_aspect"] = num/denom
if "X" in tags:
params["sequence_meta"] = tags["X"]
return params
def parse_frame_tags(fields):
"""\
Parses YUV4MPEG frame tags.
"""
params = {}
tags = {}
while fields:
m = re.match("^ (.)(\S*)(.*)$", fields)
(tag,value,fields) = m.groups()
tags[tag] = value
if "I" in tags:
x,y,z = tags["I"][0], tags["I"][1], tags["I"][2]
if x == "t": # top-field-first
params["interlaced"] = True
params["topfieldfirst"] = True
elif x == "T": # top-field-first and repeat
params["interlaced"] = True
params["topfieldfirst"] = True
elif x == "b": # bottom-field-first
params["interlaced"] = True
params["topfieldfirst"] = False
elif x == "B": # bottom-field-first and repeat
params["interlaced"] = True
params["topfieldfirst"] = False
elif x == "1": # single progressive frame
params["interlaced"] = False
elif x == "2": # double progressive frame (repeat)
params["interlaced"] = False
elif x == "3": # triple progressive frame (repeat)
params["interlaced"] = False
if y == "p": # fields sampled at same time
params["interlaced"] = False
elif y == "i": # fields sampled at different times
params["interlaced"] = True
if z == "p": # progressive (subsampling over whole frame)
pass
elif z == "i": # interlaced (each field subsampled independently)
pass
elif z == "?": # unknown (allowed only for non-4:2:0 subsampling)
pass
if "X" in tags:
params["meta"] = tags["X"]
return params
class FrameToYUV4MPEG(component):
"""\
FrameToYUV4MPEG() -> new FrameToYUV4MPEG component.
Parses uncompressed video frame data structures sent to its "inbox" inbox
and writes YUV4MPEG format binary data as strings to its "outbox" outbox.
"""
def checkShutdown(self):
"""\
Collects any new shutdown messages arriving at the "control" inbox, and
ensures self.shutdownMsg contains the highest priority one encountered
so far.
"""
while self.dataReady("control"):
msg = self.recv("control")
if isinstance(msg, producerFinished) and not isinstance(self.shutdownMsg,shutdownMicroprocess):
self.shutdownMsg = msg
elif isinstance(msg, shutdownMicroprocess):
self.shutdownMsg = msg
def canShutdown(self):
"""\
Returns true if the component should terminate when it has finished
processing any pending data.
"""
return isinstance(self.shutdownMsg, (producerFinished, shutdownMicroprocess))
def mustShutdown(self):
"""Returns true if the component should terminate immediately."""
return isinstance(self.shutdownMsg, shutdownMicroprocess)
def sendoutbox(self,data):
"""\
Generator.
Sends data out of the "outbox" outbox. If the destination is full
(noSpaceInBox exception) then it waits until there is space. It keeps
retrying until it succeeds.
If the component is ordered to immediately terminate then "STOP" is
raised as an exception.
"""
while 1:
try:
self.send(data,"outbox")
return
except noSpaceInBox:
self.checkShutdown()
if self.mustShutdown():
raise "STOP"
self.pause()
yield 1
self.checkShutdown()
if self.mustShutdown():
raise "STOP"
def main(self):
"""Main loop"""
self.shutdownMsg = None
try:
while not self.dataReady("inbox"):
self.checkShutdown()
if self.canShutdown():
raise "STOP"
self.pause()
yield 1
frame = self.recv("inbox")
for _ in self.write_header(frame):
yield _
for _ in self.write_frame(frame):
yield _
while 1:
while self.dataReady("inbox"):
frame = self.recv("inbox")
for _ in self.write_frame(frame):
yield _
self.checkShutdown()
if self.canShutdown():
raise "STOP"
self.pause()
yield 1
except "STOP":
self.send(self.shutdownMsg,"signal")
def write_header(self, frame):
"""\
Generator.
Sends the YUV4MPEG format header to the "outbox" outbox, based on
attributes of the supplied frame data structure.
"""
format = "YUV4MPEG2 W%d H%d" % tuple(frame['size'])
if frame['pixformat']=="YUV420_planar":
format += " C420mpeg2"
elif frame['pixformat']=="YUV411_planar":
format += " C411"
elif frame['pixformat']=="YUV422_planar":
format += " C422"
elif frame['pixformat']=="YUV444_planar":
format += " C444"
elif frame['pixformat']=="YUV4444_planar":
format += " C444alpha"
elif frame['pixformat']=="Y_planar":
format += " Cmono"
interlace = frame.get("interlaced",False)
topfieldfirst = frame.get("topfieldfirst",False)
if interlace and topfieldfirst:
format += " It"
elif interlace and not topfieldfirst:
format += " Ib"
elif not interlace:
format += " Ip"
rate = frame.get("frame_rate", 0)
if rate > 0:
num,denom = rational(rate)
format += " F%d:%d" % (num,denom)
rate = frame.get("pixel_aspect", 0)
if rate > 0:
num,denom = rational(rate)
format += " A%d:%d" % (num,denom)
if "sequence_meta" in frame:
format += " X"+frame['sequence_meta']
format += "\x0a"
for _ in self.sendoutbox(format):
yield _
def write_frame(self, frame):
"""\
Generator.
Writes out YUV4MPEG format frame marker and data.
"""
for _ in self.sendoutbox("FRAME\x0a"):
yield _
for component in frame['yuv']:
for _ in self.sendoutbox(component):
yield _
__kamaelia_components__ = ( YUV4MPEGToFrame, FrameToYUV4MPEG, )
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.File.Reading import RateControlledFileReader
from Kamaelia.UI.Pygame.VideoOverlay import VideoOverlay
Pipeline( RateControlledFileReader("/data/stream.yuv",readmode="bytes",rate=25*(608256+128)),
YUV4MPEGToFrame(),
FrameToYUV4MPEG(),
YUV4MPEGToFrame(),
VideoOverlay(),
).run()
| [
"re.match",
"Kamaelia.File.Reading.RateControlledFileReader",
"Kamaelia.Support.Data.Rationals.rational",
"Axon.Ipc.producerFinished",
"Kamaelia.UI.Pygame.VideoOverlay.VideoOverlay"
]
| [((10896, 10943), 're.match', 're.match', (['"""^YUV4MPEG2((?: .\\\\S*)*)\n$"""', 'line'], {}), '("""^YUV4MPEG2((?: .\\\\S*)*)\n$""", line)\n', (10904, 10943), False, 'import re\n'), ((12858, 12894), 're.match', 're.match', (['"""^ (.)(\\\\S*)(.*)$"""', 'fields'], {}), "('^ (.)(\\\\S*)(.*)$', fields)\n", (12866, 12894), False, 'import re\n'), ((15312, 15350), 're.match', 're.match', (['"""^(\\\\d+):(\\\\d+)$"""', "tags['F']"], {}), "('^(\\\\d+):(\\\\d+)$', tags['F'])\n", (15320, 15350), False, 'import re\n'), ((15516, 15554), 're.match', 're.match', (['"""^(\\\\d+):(\\\\d+)$"""', "tags['A']"], {}), "('^(\\\\d+):(\\\\d+)$', tags['A'])\n", (15524, 15554), False, 'import re\n'), ((15919, 15955), 're.match', 're.match', (['"""^ (.)(\\\\S*)(.*)$"""', 'fields'], {}), "('^ (.)(\\\\S*)(.*)$', fields)\n", (15927, 15955), False, 'import re\n'), ((11301, 11344), 're.match', 're.match', (['"""^FRAME((?: .\\\\S*)*)\n$"""', 'line'], {}), '("""^FRAME((?: .\\\\S*)*)\n$""", line)\n', (11309, 11344), False, 'import re\n'), ((21815, 21829), 'Kamaelia.Support.Data.Rationals.rational', 'rational', (['rate'], {}), '(rate)\n', (21823, 21829), False, 'from Kamaelia.Support.Data.Rationals import rational\n'), ((21978, 21992), 'Kamaelia.Support.Data.Rationals.rational', 'rational', (['rate'], {}), '(rate)\n', (21986, 21992), False, 'from Kamaelia.Support.Data.Rationals import rational\n'), ((12699, 12717), 'Axon.Ipc.producerFinished', 'producerFinished', ([], {}), '()\n', (12715, 12717), False, 'from Axon.Ipc import shutdownMicroprocess, producerFinished\n'), ((22872, 22965), 'Kamaelia.File.Reading.RateControlledFileReader', 'RateControlledFileReader', (['"""/data/stream.yuv"""'], {'readmode': '"""bytes"""', 'rate': '(25 * (608256 + 128))'}), "('/data/stream.yuv', readmode='bytes', rate=25 * (\n 608256 + 128))\n", (22896, 22965), False, 'from Kamaelia.File.Reading import RateControlledFileReader\n'), ((23069, 23083), 'Kamaelia.UI.Pygame.VideoOverlay.VideoOverlay', 'VideoOverlay', ([], {}), '()\n', (23081, 23083), False, 'from Kamaelia.UI.Pygame.VideoOverlay import VideoOverlay\n')] |
import click
from flask.cli import AppGroup
from project import app, db
from project.dateutils import berlin_tz
from project.services.event import (
get_recurring_events,
update_event_dates_with_recurrence_rule,
)
event_cli = AppGroup("event")
@event_cli.command("update-recurring-dates")
def update_recurring_dates():
# Setting the timezone is neccessary for cli command
db.session.execute("SET timezone TO :val;", {"val": berlin_tz.zone})
events = get_recurring_events()
for event in events:
update_event_dates_with_recurrence_rule(event)
db.session.commit()
click.echo(f"{len(events)} event(s) were updated.")
app.cli.add_command(event_cli)
| [
"project.db.session.execute",
"flask.cli.AppGroup",
"project.services.event.update_event_dates_with_recurrence_rule",
"project.services.event.get_recurring_events",
"project.app.cli.add_command",
"project.db.session.commit"
]
| [((236, 253), 'flask.cli.AppGroup', 'AppGroup', (['"""event"""'], {}), "('event')\n", (244, 253), False, 'from flask.cli import AppGroup\n'), ((666, 696), 'project.app.cli.add_command', 'app.cli.add_command', (['event_cli'], {}), '(event_cli)\n', (685, 696), False, 'from project import app, db\n'), ((392, 460), 'project.db.session.execute', 'db.session.execute', (['"""SET timezone TO :val;"""', "{'val': berlin_tz.zone}"], {}), "('SET timezone TO :val;', {'val': berlin_tz.zone})\n", (410, 460), False, 'from project import app, db\n'), ((475, 497), 'project.services.event.get_recurring_events', 'get_recurring_events', ([], {}), '()\n', (495, 497), False, 'from project.services.event import get_recurring_events, update_event_dates_with_recurrence_rule\n'), ((532, 578), 'project.services.event.update_event_dates_with_recurrence_rule', 'update_event_dates_with_recurrence_rule', (['event'], {}), '(event)\n', (571, 578), False, 'from project.services.event import get_recurring_events, update_event_dates_with_recurrence_rule\n'), ((587, 606), 'project.db.session.commit', 'db.session.commit', ([], {}), '()\n', (604, 606), False, 'from project import app, db\n')] |
import os
import re
import sys
import subprocess
import pytest
from testplan.common.utils.path import change_directory
import platform
ON_WINDOWS = platform.system() == 'Windows'
KNOWN_EXCEPTIONS = [
"TclError: Can't find a usable init\.tcl in the following directories:", # Matplotlib module improperly installed. Will skip Data Science example.
"ImportError: lib.*\.so\..+: cannot open shared object file: No such file or directory", # Matplotlib module improperly installed. Will skip Data Science example.
"ImportError: No module named sklearn.*", # Missing module sklearn. Will skip Data Science example.
"ImportError: No module named Tkinter", # Missing module Tkinter. Will skip Data Science example.
"ImportError: No module named _tkinter.*", # Missing module Tkinter. Will skip Data Science example.
"RuntimeError: Download pyfixmsg library .*", # Missing module pyfixmsg. Will skip FIX example.
"No spec file set\. You should download .*", # Missing FIX spec file. Will skip FIX example.
"AttributeError: 'module' object has no attribute 'poll'",
"RuntimeError: You need to compile test binary first." # Need to compile cpp binary first. Will skip GTest example.
]
SKIP_ON_WINDOWS = [
os.path.join('Cpp', 'GTest', 'test_plan.py'),
]
ROOT_DIR_CONTENTS = [
"setup.py",
"requirements.txt",
"README.rst",
"LICENSE.md"
]
def _depth_from_repo_root():
cwd = os.getcwd()
depth = []
while True:
contents = os.listdir(cwd)
if all([entry in contents for entry in ROOT_DIR_CONTENTS]):
return depth
parent_dir = os.path.dirname(cwd)
if os.path.realpath(cwd) == os.path.realpath(parent_dir):
raise RuntimeError('Could not find repo directory')
depth.append(os.pardir)
cwd = parent_dir
def _relative_dir(directory):
path_args = _depth_from_repo_root() + [directory]
return os.path.join(*path_args)
def _param_formatter(param):
if 'examples' in param:
return repr(param.rsplit('examples')[1])
return repr(param)
@pytest.mark.parametrize(
'root,filename',
[
(os.path.abspath(root), filename)
for root, _, files in os.walk(
_relative_dir(os.path.join('testplan', 'examples')))
for filename in files
if 'test_plan' in filename
],
ids=_param_formatter,
)
def test_example(root, filename):
file_path = os.path.join(root, filename)
if ON_WINDOWS and any(
[file_path.endswith(skip_name) for skip_name in SKIP_ON_WINDOWS]
):
pytest.skip()
with change_directory(root), open(filename) as file_obj:
file_obj.readline()
second_line = file_obj.readline()
try:
subprocess.check_output(
[sys.executable, filename],
stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as e:
out = e.output.decode()
for exception in KNOWN_EXCEPTIONS:
if re.search(exception, out):
pytest.xfail()
assert 'Exception in test_plan definition' not in out, \
'Exception raised in test_plan definition.'
assert 'Traceback (most recent call last):' not in out, \
'Exception raised during test:\n{}'.format(out)
assert \
('# This plan contains tests that demonstrate failures '
'as well.') == second_line.strip(), \
"Expected \'{}\' example to pass, it failed.\n{}".format(
file_path,
out
)
| [
"subprocess.check_output",
"os.listdir",
"os.path.join",
"os.getcwd",
"os.path.realpath",
"os.path.dirname",
"platform.system",
"testplan.common.utils.path.change_directory",
"os.path.abspath",
"pytest.skip",
"pytest.xfail",
"re.search"
]
| [((151, 168), 'platform.system', 'platform.system', ([], {}), '()\n', (166, 168), False, 'import platform\n'), ((1240, 1284), 'os.path.join', 'os.path.join', (['"""Cpp"""', '"""GTest"""', '"""test_plan.py"""'], {}), "('Cpp', 'GTest', 'test_plan.py')\n", (1252, 1284), False, 'import os\n'), ((1429, 1440), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1438, 1440), False, 'import os\n'), ((1926, 1950), 'os.path.join', 'os.path.join', (['*path_args'], {}), '(*path_args)\n', (1938, 1950), False, 'import os\n'), ((2433, 2461), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (2445, 2461), False, 'import os\n'), ((1491, 1506), 'os.listdir', 'os.listdir', (['cwd'], {}), '(cwd)\n', (1501, 1506), False, 'import os\n'), ((1621, 1641), 'os.path.dirname', 'os.path.dirname', (['cwd'], {}), '(cwd)\n', (1636, 1641), False, 'import os\n'), ((2578, 2591), 'pytest.skip', 'pytest.skip', ([], {}), '()\n', (2589, 2591), False, 'import pytest\n'), ((2602, 2624), 'testplan.common.utils.path.change_directory', 'change_directory', (['root'], {}), '(root)\n', (2618, 2624), False, 'from testplan.common.utils.path import change_directory\n'), ((1653, 1674), 'os.path.realpath', 'os.path.realpath', (['cwd'], {}), '(cwd)\n', (1669, 1674), False, 'import os\n'), ((1678, 1706), 'os.path.realpath', 'os.path.realpath', (['parent_dir'], {}), '(parent_dir)\n', (1694, 1706), False, 'import os\n'), ((2749, 2826), 'subprocess.check_output', 'subprocess.check_output', (['[sys.executable, filename]'], {'stderr': 'subprocess.STDOUT'}), '([sys.executable, filename], stderr=subprocess.STDOUT)\n', (2772, 2826), False, 'import subprocess\n'), ((2146, 2167), 'os.path.abspath', 'os.path.abspath', (['root'], {}), '(root)\n', (2161, 2167), False, 'import os\n'), ((3026, 3051), 're.search', 're.search', (['exception', 'out'], {}), '(exception, out)\n', (3035, 3051), False, 'import re\n'), ((2244, 2280), 'os.path.join', 'os.path.join', (['"""testplan"""', '"""examples"""'], {}), "('testplan', 'examples')\n", (2256, 2280), False, 'import os\n'), ((3073, 3087), 'pytest.xfail', 'pytest.xfail', ([], {}), '()\n', (3085, 3087), False, 'import pytest\n')] |
import datetime
import getpass
import logging
import os
import pathlib
import platform
import re
import smtplib
import sys
from contextlib import contextmanager
from email.message import EmailMessage
from functools import wraps
import azure.functions as func
import click
import gspread
import pandas as pd
from apscheduler.schedulers.background import BlockingScheduler
from oauth2client.service_account import ServiceAccountCredentials
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
@contextmanager
def get_driver(*args, **kwargs):
options = Options()
options.headless = True
options.add_argument("--window-size=1920,1200")
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
options.add_argument('--disable-crash-reporter')
options.add_argument('--disable-logging')
options.add_argument('--log-level=3')
if platform.system() == 'Linux':
DRIVER_PATH = 'chromedriver'
elif platform.system() == "Darwin":
DRIVER_PATH = (pathlib.Path(__file__).parent.parent /
'chromedriver').resolve()
else:
log.error('Unsupported OS')
exit(0)
driver = webdriver.Chrome(options=options, executable_path=DRIVER_PATH)
yield driver
driver.close()
driver.quit()
def get_browser(func):
@wraps(func)
def wrapper(*args, **kwargs):
with get_driver() as d:
kwargs['driver'] = d
return func(*args, **kwargs)
return wrapper
@click.group()
@click.option('--email', is_flag=True, help='A flag for sending email with results.')
@click.option('--email_to', help='CSV of email addresses to send notification to.')
@click.option('--username', help='SMTP account username.')
@click.option('--gsheet', is_flag=True, help='A flag for updating google sheet with results')
@click.option('--doc_key', help='Google Doc Key to update')
@click.pass_context
def cli(ctx, email, email_to, username, gsheet, doc_key):
ctx.ensure_object(dict)
if email and (not username or not email_to):
log.error('Please provide email sending parameters')
exit(0)
elif email:
password = getpass.getpass(
"Please enter your google account password for sending email:\n")
ctx.obj['password'] = password
if gsheet and not doc_key:
log.error('Please provide a gsheet doc key')
exit(0)
pass
@cli.command('schedule')
@click.option('--hour', default='*/1', help='Cron hour expression')
@click.pass_context
def schedule(ctx, hour):
email = ctx.parent.params['email']
username = ctx.parent.params['username']
email_to = ctx.parent.params['email_to']
password = ctx.obj.get('password', None)
gsheet = ctx.parent.params['gsheet']
doc_key = ctx.parent.params['doc_key']
schedule = BlockingScheduler()
schedule.add_job(run, kwargs={"email": email, "gsheet": gsheet, "doc_key": doc_key,
"username": username, "email_to": email_to, "password": password}, trigger='cron', hour=hour)
try:
schedule.start()
except (KeyboardInterrupt, SystemExit):
schedule.shutdown()
@cli.command('run')
@click.pass_context
def once(ctx):
email = ctx.parent.params['email']
gsheet = ctx.parent.params['gsheet']
username = ctx.parent.params['username']
email_to = ctx.parent.params['email_to']
password = ctx.obj.get('password', None)
doc_key = ctx.parent.params['doc_key']
run(email, username, email_to, password, gsheet, doc_key)
def run(email, username, email_to, password, gsheet, doc_key):
log.info('In run')
content = []
for link in os.environ["searchLinks"].split():
content += get_prometheus_apartments(link)
formatted_content = format_email(content)
if gsheet:
log.info('Updating gsheet')
update_historical_data(doc_key, content)
formatted_content += f'For historical data click the link below:\nhttps://docs.google.com/spreadsheets/d/1XZocxmyQ91e1exBvwDAaSR8Rhavy9WPnwLSz0Z5SKsM/edit?usp=sharing'
if email:
log.info('Sending email')
send_email(username, password, email_to, formatted_content)
log.info(content)
@get_browser
def get_prometheus_apartments(url, driver):
driver.get(url)
content = []
log.info(f'Getting apartments: {url}')
try:
anchors = driver.find_elements_by_xpath(
"//div[@id='results-cards']/div/a[@class='card-wrapper']")
except Exception as e:
log.exception(f'{e}')
return content
links = [a.get_attribute('href') for a in anchors]
apartments = []
for apt in links:
name = apt.strip('/').split('/')[-1]
apartments.append({'name': name, 'url': f'{apt}lease'})
# Scrape each appartment in parallel
for apt in apartments:
results = get_availability(apt)
if results:
content.append(results)
# with Pool() as pool:
# results = [pool.apply_async(get_availability, args=(apt,)) for apt in apartments]
# for result in results:
# data = result.get()
# if data:
# content.append(data)
return content
def update_historical_data(doc_key, content):
date = datetime.datetime.today().strftime('%Y-%m-%d')
all_content = []
for apt in content:
complex = apt['meta']['name']
data = apt['data']
for row in data:
cleaned_values = [f'{date}', f'{complex}'] + \
[value.replace('$', '').replace(',', '') for value in row]
all_content.append(cleaned_values)
update_gdoc(doc_key, all_content)
def format_email(content):
result = ''
for apt in content:
complex = apt['meta']['name']
data = apt['data']
if complex != 'mansion-grove':
continue
result += f'------------ {complex} ----------------\n'
total_available = sum(int(row[-1]) for row in data)
result += '\n'.join(', '.join(row) for row in data)
result += f'\nTotal Available: {total_available}\n'
return result
@get_browser
def get_availability(data, driver):
"""
Returns apartment availability information
"""
url = data['url']
content = []
log.info(f'Processing {url}')
driver.get(url)
delay = 60 # seconds
try:
WebDriverWait(driver, delay).until(
EC.frame_to_be_available_and_switch_to_it('rp-leasing-widget'))
WebDriverWait(driver, delay).until(EC.presence_of_element_located(
(By.XPATH, "//button[contains(@class, 'primary')][contains(text(), 'Start')]")))
except TimeoutException:
log.info(f'Page did not load: {url}')
return content
try:
driver.find_element_by_xpath(
"//button[contains(@class, 'primary')][contains(text(), 'Start')]").click()
WebDriverWait(driver, delay).until(
EC.presence_of_element_located((By.XPATH, "//div[contains(@class, 'floorplan-tile')]/div/span[contains(@class, 'name')]")))
# Print plan prices
names = [n.text for n in driver.find_elements_by_xpath(
"//div[contains(@class, 'floorplan-tile')]/div/span[contains(@class, 'name')]")]
specs = [n.text for n in driver.find_elements_by_xpath(
"//div[contains(@class, 'floorplan-tile')]/div/span[contains(@class, 'specs')]")]
prices = [n.text for n in driver.find_elements_by_xpath(
"//div[contains(@class, 'floorplan-tile')]/div/span[contains(@class, 'range')]")]
availability = [n.text for n in driver.find_elements_by_xpath(
"//div[contains(@class, 'floorplan-tile')]/div[@class='tile-buttons']/button")]
except Exception:
log.exception(f'Unable to parse {url}')
return content
for i in range(len(names)):
match = re.match(
r'\((\d+)\).*', availability[i]) if len(availability) > i else None
units = int(match.groups()[0]) if match else '0'
match = re.match(
r'(\$\d*)( \- \$\d*\*)*', prices[i].split(' - ')[0].replace(',', '').replace('From ', '')) if len(prices) > i else None
min_price = match.groups()[0] if match else '$0'
content.append((names[i], specs[i], min_price, str(units)))
return {'meta': data, 'data': content}
def send_email(username, password, to, content):
if not content:
log.info('Nothing to send')
return
msg = EmailMessage()
msg.set_content(content)
msg['Subject'] = f'Apartment availability'
msg['From'] = username
msg['To'] = to
# Send the message via our own SMTP server.
s = smtplib.SMTP_SSL('smtp.gmail.com', 465)
s.login(username, password)
s.send_message(msg)
s.quit()
def update_gdoc(doc_key, cells):
scope = [
"https://spreadsheets.google.com/feeds",
"https://www.googleapis.com/auth/drive",
]
CREDENTIALS_PATH = pathlib.Path(__file__).parent.parent / 'credentials.json'
credentials = ServiceAccountCredentials.from_json_keyfile_name(
CREDENTIALS_PATH.resolve(), scope,
)
docs = gspread.authorize(credentials)
sheet = docs.open_by_key(doc_key).sheet1
new = pd.DataFrame(cells)
new.columns = ['Date', 'Complex', 'Plan', 'Specs', 'Price', 'Availability']
existing = pd.DataFrame(sheet.get_all_values()[1:])
if existing.size:
existing.columns = ['Date', 'Complex',
'Plan', 'Specs', 'Price', 'Availability']
updated = existing.append(new)
updated = updated.groupby(['Date', 'Complex', 'Plan', 'Specs']).min()
updated.reset_index(inplace=True)
sheet.update([updated.columns.values.tolist()] +
updated.values.tolist(), value_input_option='USER_ENTERED')
if __name__ == '__main__':
cli()
def azurefunc(PrometheusScrapper: func.TimerRequest) -> None:
email = os.environ["SendEmail"]
email_to = os.environ["EmailTo"]
username = os.environ["GmailUsername"]
password = os.environ["GmailPassword"]
gsheet = os.environ["UpdateGSheet"]
doc_key = os.environ["GSheetKey"]
run(email, username, email_to, password, gsheet, doc_key)
| [
"logging.getLogger",
"selenium.webdriver.chrome.options.Options",
"logging.StreamHandler",
"gspread.authorize",
"smtplib.SMTP_SSL",
"datetime.datetime.today",
"email.message.EmailMessage",
"pathlib.Path",
"click.group",
"click.option",
"functools.wraps",
"getpass.getpass",
"platform.system",
"selenium.webdriver.support.expected_conditions.frame_to_be_available_and_switch_to_it",
"pandas.DataFrame",
"selenium.webdriver.support.ui.WebDriverWait",
"re.match",
"apscheduler.schedulers.background.BlockingScheduler",
"azure.functions",
"logging.Formatter",
"selenium.webdriver.Chrome",
"selenium.webdriver.support.expected_conditions.presence_of_element_located"
]
| [((752, 779), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (769, 779), False, 'import logging\n'), ((819, 852), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (840, 852), False, 'import logging\n'), ((897, 959), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(levelname)s - %(message)s')\n", (914, 959), False, 'import logging\n'), ((2027, 2040), 'click.group', 'click.group', ([], {}), '()\n', (2038, 2040), False, 'import click\n'), ((2042, 2131), 'click.option', 'click.option', (['"""--email"""'], {'is_flag': '(True)', 'help': '"""A flag for sending email with results."""'}), "('--email', is_flag=True, help=\n 'A flag for sending email with results.')\n", (2054, 2131), False, 'import click\n'), ((2128, 2215), 'click.option', 'click.option', (['"""--email_to"""'], {'help': '"""CSV of email addresses to send notification to."""'}), "('--email_to', help=\n 'CSV of email addresses to send notification to.')\n", (2140, 2215), False, 'import click\n'), ((2212, 2269), 'click.option', 'click.option', (['"""--username"""'], {'help': '"""SMTP account username."""'}), "('--username', help='SMTP account username.')\n", (2224, 2269), False, 'import click\n'), ((2271, 2368), 'click.option', 'click.option', (['"""--gsheet"""'], {'is_flag': '(True)', 'help': '"""A flag for updating google sheet with results"""'}), "('--gsheet', is_flag=True, help=\n 'A flag for updating google sheet with results')\n", (2283, 2368), False, 'import click\n'), ((2365, 2423), 'click.option', 'click.option', (['"""--doc_key"""'], {'help': '"""Google Doc Key to update"""'}), "('--doc_key', help='Google Doc Key to update')\n", (2377, 2423), False, 'import click\n'), ((2962, 3028), 'click.option', 'click.option', (['"""--hour"""'], {'default': '"""*/1"""', 'help': '"""Cron hour expression"""'}), "('--hour', default='*/1', help='Cron hour expression')\n", (2974, 3028), False, 'import click\n'), ((1081, 1090), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (1088, 1090), False, 'from selenium.webdriver.chrome.options import Options\n'), ((1706, 1768), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'options': 'options', 'executable_path': 'DRIVER_PATH'}), '(options=options, executable_path=DRIVER_PATH)\n', (1722, 1768), False, 'from selenium import webdriver\n'), ((1853, 1864), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (1858, 1864), False, 'from functools import wraps\n'), ((3348, 3367), 'apscheduler.schedulers.background.BlockingScheduler', 'BlockingScheduler', ([], {}), '()\n', (3365, 3367), False, 'from apscheduler.schedulers.background import BlockingScheduler\n'), ((9001, 9015), 'email.message.EmailMessage', 'EmailMessage', ([], {}), '()\n', (9013, 9015), False, 'from email.message import EmailMessage\n'), ((9195, 9234), 'smtplib.SMTP_SSL', 'smtplib.SMTP_SSL', (['"""smtp.gmail.com"""', '(465)'], {}), "('smtp.gmail.com', 465)\n", (9211, 9234), False, 'import smtplib\n'), ((9668, 9698), 'gspread.authorize', 'gspread.authorize', (['credentials'], {}), '(credentials)\n', (9685, 9698), False, 'import gspread\n'), ((9754, 9773), 'pandas.DataFrame', 'pd.DataFrame', (['cells'], {}), '(cells)\n', (9766, 9773), True, 'import pandas as pd\n'), ((1412, 1429), 'platform.system', 'platform.system', ([], {}), '()\n', (1427, 1429), False, 'import platform\n'), ((1488, 1505), 'platform.system', 'platform.system', ([], {}), '()\n', (1503, 1505), False, 'import platform\n'), ((1983, 2004), 'azure.functions', 'func', (['*args'], {}), '(*args, **kwargs)\n', (1987, 2004), True, 'import azure.functions as func\n'), ((2691, 2777), 'getpass.getpass', 'getpass.getpass', (['"""Please enter your google account password for sending email:\n"""'], {}), "(\n 'Please enter your google account password for sending email:\\n')\n", (2706, 2777), False, 'import getpass\n'), ((5783, 5808), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (5806, 5808), False, 'import datetime\n'), ((6941, 7003), 'selenium.webdriver.support.expected_conditions.frame_to_be_available_and_switch_to_it', 'EC.frame_to_be_available_and_switch_to_it', (['"""rp-leasing-widget"""'], {}), "('rp-leasing-widget')\n", (6982, 7003), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((7048, 7162), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (['(By.XPATH, "//button[contains(@class, \'primary\')][contains(text(), \'Start\')]")'], {}), '((By.XPATH,\n "//button[contains(@class, \'primary\')][contains(text(), \'Start\')]"))\n', (7078, 7162), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((7464, 7595), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (['(By.XPATH,\n "//div[contains(@class, \'floorplan-tile\')]/div/span[contains(@class, \'name\')]"\n )'], {}), '((By.XPATH,\n "//div[contains(@class, \'floorplan-tile\')]/div/span[contains(@class, \'name\')]"\n ))\n', (7494, 7595), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((8396, 8439), 're.match', 're.match', (['"""\\\\((\\\\d+)\\\\).*"""', 'availability[i]'], {}), "('\\\\((\\\\d+)\\\\).*', availability[i])\n", (8404, 8439), False, 'import re\n'), ((6893, 6921), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', 'delay'], {}), '(driver, delay)\n', (6906, 6921), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((7013, 7041), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', 'delay'], {}), '(driver, delay)\n', (7026, 7041), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((7416, 7444), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', 'delay'], {}), '(driver, delay)\n', (7429, 7444), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((9481, 9503), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (9493, 9503), False, 'import pathlib\n'), ((1542, 1564), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1554, 1564), False, 'import pathlib\n')] |
# -*- coding: utf-8 -*-
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Demo CLI tool for Azure."""
import os
from datetime import datetime
from typing import TYPE_CHECKING
from Crypto.PublicKey import RSA
from libcloudforensics import logging_utils
from libcloudforensics.providers.azure.internal import account
from libcloudforensics.providers.azure.internal import monitoring
from libcloudforensics.providers.azure import forensics
logging_utils.SetUpLogger(__name__)
logger = logging_utils.GetLogger(__name__)
if TYPE_CHECKING:
import argparse
def ListInstances(args: 'argparse.Namespace') -> None:
"""List instances in Azure subscription.
Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""
az_account = account.AZAccount(args.default_resource_group_name)
instances = az_account.compute.ListInstances(
resource_group_name=args.resource_group_name)
logger.info('Instances found:')
for instance in instances.values():
boot_disk = instance.GetBootDisk()
logger.info(
'Name: {0:s}, Boot disk: {1:s}'.format(instance.name, boot_disk.name))
def ListDisks(args: 'argparse.Namespace') -> None:
"""List disks in Azure subscription.
Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""
az_account = account.AZAccount(args.default_resource_group_name)
disks = az_account.compute.ListDisks(
resource_group_name=args.resource_group_name)
logger.info('Disks found:')
for disk_name, disk in disks.items():
logger.info('Name: {0:s}, Region: {1:s}'.format(disk_name, disk.region))
def CreateDiskCopy(args: 'argparse.Namespace') -> None:
"""Create an Azure disk copy.
Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""
logger.info('Starting disk copy...')
disk_copy = forensics.CreateDiskCopy(args.default_resource_group_name,
instance_name=args.instance_name,
disk_name=args.disk_name,
disk_type=args.disk_type,
region=args.region,
src_profile=args.src_profile,
dst_profile=args.dst_profile)
logger.info(
'Done! Disk {0:s} successfully created. You will find it in '
'your Azure subscription under the name {1:s}.'.format(
disk_copy.resource_id, disk_copy.name))
def StartAnalysisVm(args: 'argparse.Namespace') -> None:
"""Start forensic analysis VM.
Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""
attach_disks = []
if args.attach_disks:
attach_disks = args.attach_disks.split(',')
# Check if attach_disks parameter exists and if there
# are any empty entries.
if not (attach_disks and all(elements for elements in attach_disks)):
logger.error('error: parameter --attach_disks: {0:s}'.format(
args.attach_disks))
return
ssh_public_key = args.ssh_public_key
if not ssh_public_key:
# According to https://docs.microsoft.com/cs-cz/samples/azure-samples/
# resource-manager-python-template-deployment/resource-manager-python-
# template-deployment/ there's no API to generate a new SSH key pair in
# Azure, so we do this manually...
ssh_public_key = _GenerateSSHKeyPair(args.instance_name)
logger.info('Starting analysis VM...')
vm = forensics.StartAnalysisVm(args.default_resource_group_name,
args.instance_name,
int(args.disk_size),
ssh_public_key,
cpu_cores=int(args.cpu_cores),
memory_in_mb=int(args.memory_in_mb),
region=args.region,
attach_disks=attach_disks,
dst_profile=args.dst_profile)
logger.info('Analysis VM started.')
logger.info('Name: {0:s}, Started: {1:s}'.format(vm[0].name, str(vm[1])))
def _GenerateSSHKeyPair(vm_name: str) -> str:
"""Generate a SSH key pair and returns its public key.
Both public and private keys will be saved in the current directory.
Args:
vm_name (str): The VM name for which to generate the key pair.
Returns:
str: The public key for the generated SSH key pair.
Raises:
ValueError: If vm_name is None.
"""
if not vm_name:
raise ValueError('Parameter vm_name must not be None.')
logger.info('Generating a new SSH key pair for VM: {0:s}'.format(vm_name))
key = RSA.generate(2048)
key_name = '{0:s}-ssh'.format(vm_name)
public_key = key.publickey().exportKey('OpenSSH')
path_public_key = os.path.join(os.getcwd(), key_name + '.pub')
private_key = key.exportKey('PEM')
path_private_key = os.path.join(os.getcwd(), key_name + '.pem')
with open(path_private_key, 'wb') as f:
f.write(private_key)
with open(path_public_key, 'wb') as f:
f.write(public_key)
logger.info('SSH key pair generated. Public key saved in {0:s}, private key '
'saved in {1:s}'.format(path_public_key, path_private_key))
return public_key.decode('utf-8')
def ListMetrics(args: 'argparse.Namespace') -> None:
"""List Azure Monitoring metrics for a resource.
Args:
args (argparse.Namespace): Arguments from ArgumentParser.
"""
az_account = account.AZAccount(args.default_resource_group_name)
az_monitoring = monitoring.AZMonitoring(az_account)
metrics = az_monitoring.ListAvailableMetricsForResource(args.resource_id)
for metric in metrics:
logger.info('Available metric: {0:s}'.format(metric))
def QueryMetrics(args: 'argparse.Namespace') -> None:
"""Query Azure Monitoring metrics for a resource.
Args:
args (argparse.Namespace): Arguments from ArgumentParser.
Raises:
RuntimeError: If from_date or to_date could not be parsed.
"""
az_account = account.AZAccount(args.default_resource_group_name)
az_monitoring = monitoring.AZMonitoring(az_account)
from_date, to_date = args.from_date, args.to_date
if from_date and to_date:
try:
from_date = datetime.strptime(from_date, '%Y-%m-%dT%H:%M:%SZ')
to_date = datetime.strptime(to_date, '%Y-%m-%dT%H:%M:%SZ')
except ValueError as exception:
raise RuntimeError(
'Cannot parse date: {0!s}'.format(exception)) from exception
metrics = az_monitoring.GetMetricsForResource(
args.resource_id,
metrics=args.metrics,
from_date=from_date,
to_date=to_date,
interval=args.interval,
aggregation=args.aggregation or 'Total',
qfilter=args.qfilter)
for metric, metric_value in metrics.items():
logger.info('Metric: {0:s}'.format(metric))
for timestamp, value in metric_value.items():
logger.info(' Timestamp: {0:s}, value: {1:s}'.format(timestamp, value))
| [
"datetime.datetime.strptime",
"libcloudforensics.providers.azure.forensics.CreateDiskCopy",
"Crypto.PublicKey.RSA.generate",
"os.getcwd",
"libcloudforensics.providers.azure.internal.account.AZAccount",
"libcloudforensics.logging_utils.GetLogger",
"libcloudforensics.logging_utils.SetUpLogger",
"libcloudforensics.providers.azure.internal.monitoring.AZMonitoring"
]
| [((969, 1004), 'libcloudforensics.logging_utils.SetUpLogger', 'logging_utils.SetUpLogger', (['__name__'], {}), '(__name__)\n', (994, 1004), False, 'from libcloudforensics import logging_utils\n'), ((1014, 1047), 'libcloudforensics.logging_utils.GetLogger', 'logging_utils.GetLogger', (['__name__'], {}), '(__name__)\n', (1037, 1047), False, 'from libcloudforensics import logging_utils\n'), ((1278, 1329), 'libcloudforensics.providers.azure.internal.account.AZAccount', 'account.AZAccount', (['args.default_resource_group_name'], {}), '(args.default_resource_group_name)\n', (1295, 1329), False, 'from libcloudforensics.providers.azure.internal import account\n'), ((1823, 1874), 'libcloudforensics.providers.azure.internal.account.AZAccount', 'account.AZAccount', (['args.default_resource_group_name'], {}), '(args.default_resource_group_name)\n', (1840, 1874), False, 'from libcloudforensics.providers.azure.internal import account\n'), ((2335, 2573), 'libcloudforensics.providers.azure.forensics.CreateDiskCopy', 'forensics.CreateDiskCopy', (['args.default_resource_group_name'], {'instance_name': 'args.instance_name', 'disk_name': 'args.disk_name', 'disk_type': 'args.disk_type', 'region': 'args.region', 'src_profile': 'args.src_profile', 'dst_profile': 'args.dst_profile'}), '(args.default_resource_group_name, instance_name=\n args.instance_name, disk_name=args.disk_name, disk_type=args.disk_type,\n region=args.region, src_profile=args.src_profile, dst_profile=args.\n dst_profile)\n', (2359, 2573), False, 'from libcloudforensics.providers.azure import forensics\n'), ((5142, 5160), 'Crypto.PublicKey.RSA.generate', 'RSA.generate', (['(2048)'], {}), '(2048)\n', (5154, 5160), False, 'from Crypto.PublicKey import RSA\n'), ((5947, 5998), 'libcloudforensics.providers.azure.internal.account.AZAccount', 'account.AZAccount', (['args.default_resource_group_name'], {}), '(args.default_resource_group_name)\n', (5964, 5998), False, 'from libcloudforensics.providers.azure.internal import account\n'), ((6017, 6052), 'libcloudforensics.providers.azure.internal.monitoring.AZMonitoring', 'monitoring.AZMonitoring', (['az_account'], {}), '(az_account)\n', (6040, 6052), False, 'from libcloudforensics.providers.azure.internal import monitoring\n'), ((6486, 6537), 'libcloudforensics.providers.azure.internal.account.AZAccount', 'account.AZAccount', (['args.default_resource_group_name'], {}), '(args.default_resource_group_name)\n', (6503, 6537), False, 'from libcloudforensics.providers.azure.internal import account\n'), ((6556, 6591), 'libcloudforensics.providers.azure.internal.monitoring.AZMonitoring', 'monitoring.AZMonitoring', (['az_account'], {}), '(az_account)\n', (6579, 6591), False, 'from libcloudforensics.providers.azure.internal import monitoring\n'), ((5288, 5299), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5297, 5299), False, 'import os\n'), ((5392, 5403), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5401, 5403), False, 'import os\n'), ((6699, 6749), 'datetime.datetime.strptime', 'datetime.strptime', (['from_date', '"""%Y-%m-%dT%H:%M:%SZ"""'], {}), "(from_date, '%Y-%m-%dT%H:%M:%SZ')\n", (6716, 6749), False, 'from datetime import datetime\n'), ((6766, 6814), 'datetime.datetime.strptime', 'datetime.strptime', (['to_date', '"""%Y-%m-%dT%H:%M:%SZ"""'], {}), "(to_date, '%Y-%m-%dT%H:%M:%SZ')\n", (6783, 6814), False, 'from datetime import datetime\n')] |
import torch.nn as nn
import torch.nn.functional as F
from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg,PointNetFeaturePropagation
class get_shared_model(nn.Module):
def __init__(self, use_batch_norm, num_classes, num_input_channel=7):
super(get_shared_model, self).__init__()
self.sa1 = PointNetSetAbstractionMsg(1024, [0.05, 0.1], [16, 32], num_input_channel, [[16, 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)
self.sa2 = PointNetSetAbstractionMsg(256, [0.1, 0.2], [16, 32], 32+64, [[64, 64, 128], [64, 96, 128]], use_batch_norm=use_batch_norm)
self.sa3 = PointNetSetAbstractionMsg(64, [0.2, 0.4], [16, 32], 128+128, [[128, 196, 256], [128, 196, 256]], use_batch_norm=use_batch_norm)
self.sa4 = PointNetSetAbstractionMsg(16, [0.4, 0.8], [16, 32], 256+256, [[256, 256, 512], [256, 384, 512]], use_batch_norm=use_batch_norm)
self.fp4 = PointNetFeaturePropagation(512+512+256+256, [256, 256], use_batch_norm=use_batch_norm)
self.fp3 = PointNetFeaturePropagation(128+128+256, [256, 256], use_batch_norm=use_batch_norm)
self.fp2 = PointNetFeaturePropagation(32+64+256, [256, 128], use_batch_norm=use_batch_norm)
self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128], use_batch_norm=use_batch_norm)
self.conv1 = nn.Conv1d(128, 128, 1)
if use_batch_norm:
self.bn1 = nn.BatchNorm1d(128)
self.drop1 = nn.Dropout(0.5)
self.conv2 = nn.Conv1d(128, num_classes, 1)
# for normal prediction
self.conv_normal = nn.Conv1d(128, 3, 1)
# for force prediction
self.conv_force = nn.Conv1d(128, 1, 1)
self.use_batch_norm = use_batch_norm
def forward(self, xyz):
l0_points = xyz
l0_xyz = xyz[:,:3,:]
l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
l3_points = self.fp4(l3_xyz, l4_xyz, l3_points, l4_points)
l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
if self.use_batch_norm:
x = self.drop1(F.relu(self.bn1(self.conv1(l0_points))))
else:
x = F.relu(self.conv1(l0_points))
contact = self.conv2(x)
normal = self.conv_normal(x)
normal = F.normalize(normal, dim=1)
force = self.conv_force(x)
# this is not needed with BCElogit loss
# x = F.log_softmax(x, dim=1)
contact = contact.permute(0, 2, 1)
normal = normal.permute(0, 2, 1)
force = force.permute(0, 2, 1)
return (contact, normal, force), l4_points
class get_model(nn.Module):
def __init__(self, use_batch_norm, num_out_channel, num_in_channel=7, target='contact',
radius_list=[[0.05, 0.1], [0.1, 0.2], [0.2, 0.4], [0.4, 0.8]],
npoint_list=[1024, 256, 64, 16],
sample_point_1_list=[16, 16, 16, 16],
sample_point_2_list=[32, 32, 32, 32],
layer=4,
downsample=True,
dropout=True,
track_running_stats=True,
mlp1_size=[16, 16, 32],
mlp2_size=[32, 32, 64],
interpolation_mlp_size=[128, 128, 128]
):
print("using layer: ", layer)
super(get_model, self).__init__()
self.layer = layer
if self.layer == 4:
self.sa1 = PointNetSetAbstractionMsg(npoint_list[0], radius_list[0], [sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [[16, 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)
self.sa2 = PointNetSetAbstractionMsg(npoint_list[1], radius_list[1], [sample_point_1_list[1], sample_point_2_list[1]], 32+64, [[64, 64, 128], [64, 96, 128]], use_batch_norm=use_batch_norm)
self.sa3 = PointNetSetAbstractionMsg(npoint_list[2], radius_list[2], [sample_point_1_list[2], sample_point_2_list[2]], 128+128, [[128, 196, 256], [128, 196, 256]], use_batch_norm=use_batch_norm)
self.sa4 = PointNetSetAbstractionMsg(npoint_list[3], radius_list[3], [sample_point_1_list[3], sample_point_2_list[3]], 256+256, [[256, 256, 512], [256, 384, 512]], use_batch_norm=use_batch_norm)
self.fp4 = PointNetFeaturePropagation(512+512+256+256, [256, 256], use_batch_norm=use_batch_norm)
self.fp3 = PointNetFeaturePropagation(128+128+256, [256, 256], use_batch_norm=use_batch_norm)
self.fp2 = PointNetFeaturePropagation(32+64+256, [256, 128], use_batch_norm=use_batch_norm)
self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128], use_batch_norm=use_batch_norm)
elif self.layer == 3:
self.sa1 = PointNetSetAbstractionMsg(npoint_list[0], radius_list[0], [sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [[16, 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)
self.sa2 = PointNetSetAbstractionMsg(npoint_list[1], radius_list[1], [sample_point_1_list[1], sample_point_2_list[1]], 32+64, [[64, 64, 128], [64, 96, 128]], use_batch_norm=use_batch_norm)
self.sa3 = PointNetSetAbstractionMsg(npoint_list[2], radius_list[2], [sample_point_1_list[2], sample_point_2_list[2]], 128+128, [[128, 196, 256], [128, 196, 256]], use_batch_norm=use_batch_norm)
self.fp3 = PointNetFeaturePropagation(128+128+256+256, [256, 256], use_batch_norm=use_batch_norm)
self.fp2 = PointNetFeaturePropagation(32+64+256, [256, 128], use_batch_norm=use_batch_norm)
self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128], use_batch_norm=use_batch_norm)
elif self.layer == 2:
self.sa1 = PointNetSetAbstractionMsg(npoint_list[0], radius_list[0], [sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [[16, 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)
self.sa2 = PointNetSetAbstractionMsg(npoint_list[1], radius_list[1], [sample_point_1_list[1], sample_point_2_list[1]], 32+64, [[64, 64, 128], [64, 96, 128]], use_batch_norm=use_batch_norm)
self.fp2 = PointNetFeaturePropagation(32+64+128+128, [256, 128], use_batch_norm=use_batch_norm)
self.fp1 = PointNetFeaturePropagation(128, [128, 128, 128], use_batch_norm=use_batch_norm)
elif self.layer == 1:
self.sa1 = PointNetSetAbstractionMsg(npoint_list[0], radius_list[0], [sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [mlp1_size, mlp2_size], use_batch_norm=use_batch_norm,
downsample=downsample, track_running_stats=track_running_stats)
self.fp1 = PointNetFeaturePropagation(mlp1_size[-1] + mlp2_size[-1], interpolation_mlp_size, use_batch_norm=use_batch_norm, track_running_stats=track_running_stats)
self.drop_out = dropout
self.conv1 = nn.Conv1d(128, 128, 1)
if use_batch_norm:
self.bn1 = nn.BatchNorm1d(128, track_running_stats=track_running_stats)
if self.drop_out:
self.drop1 = nn.Dropout(0.5)
self.conv2 = nn.Conv1d(128, num_out_channel, 1)
self.use_batch_norm = use_batch_norm
self.target = target
def forward(self, xyz):
l0_points = xyz
l0_xyz = xyz[:,:3,:]
if self.layer == 4:
l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
l4_xyz, l4_points = self.sa4(l3_xyz, l3_points)
l3_points = self.fp4(l3_xyz, l4_xyz, l3_points, l4_points)
l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
elif self.layer == 3:
l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
l3_xyz, l3_points = self.sa3(l2_xyz, l2_points)
l2_points = self.fp3(l2_xyz, l3_xyz, l2_points, l3_points)
l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
elif self.layer == 2:
l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
l2_xyz, l2_points = self.sa2(l1_xyz, l1_points)
l1_points = self.fp2(l1_xyz, l2_xyz, l1_points, l2_points)
l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
elif self.layer == 1:
l1_xyz, l1_points = self.sa1(l0_xyz, l0_points)
l0_points = self.fp1(l0_xyz, l1_xyz, None, l1_points)
if self.use_batch_norm:
if self.drop_out:
x = self.drop1(F.relu(self.bn1(self.conv1(l0_points))))
else:
x = F.relu(self.bn1(self.conv1(l0_points)))
else:
x = F.relu(self.conv1(l0_points))
x = self.conv2(x)
# this is not needed with BCElogit loss
# x = F.log_softmax(x, dim=1)
if self.target == 'normal':
x = F.normalize(x, dim=1)
x = x.permute(0, 2, 1)
# return x, l4_points
return x, None
class get_loss_original(nn.Module):
def __init__(self):
super(get_loss_original, self).__init__()
def forward(self, pred, target, trans_feat, weight):
total_loss = F.nll_loss(pred, target, weight=weight)
return total_loss
class get_loss(nn.Module):
def __init__(self):
super(get_loss, self).__init__()
self.loss = nn.BCEWithLogitsLoss()
def forward(self, pred, target, trans_feat, weight):
total_loss = self.loss(pred, target)
return total_loss
if __name__ == '__main__':
import torch
model = get_model(13)
xyz = torch.rand(6, 9, 2048)
(model(xyz)) | [
"torch.nn.Dropout",
"torch.nn.functional.nll_loss",
"haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation",
"torch.nn.functional.normalize",
"torch.nn.BatchNorm1d",
"haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg",
"torch.nn.BCEWithLogitsLoss",
"torch.nn.Conv1d",
"torch.rand"
]
| [((10025, 10047), 'torch.rand', 'torch.rand', (['(6)', '(9)', '(2048)'], {}), '(6, 9, 2048)\n', (10035, 10047), False, 'import torch\n'), ((356, 495), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['(1024)', '[0.05, 0.1]', '[16, 32]', 'num_input_channel', '[[16, 16, 32], [32, 32, 64]]'], {'use_batch_norm': 'use_batch_norm'}), '(1024, [0.05, 0.1], [16, 32], num_input_channel, [\n [16, 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)\n', (381, 495), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((510, 639), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['(256)', '[0.1, 0.2]', '[16, 32]', '(32 + 64)', '[[64, 64, 128], [64, 96, 128]]'], {'use_batch_norm': 'use_batch_norm'}), '(256, [0.1, 0.2], [16, 32], 32 + 64, [[64, 64, 128\n ], [64, 96, 128]], use_batch_norm=use_batch_norm)\n', (535, 639), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((652, 786), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['(64)', '[0.2, 0.4]', '[16, 32]', '(128 + 128)', '[[128, 196, 256], [128, 196, 256]]'], {'use_batch_norm': 'use_batch_norm'}), '(64, [0.2, 0.4], [16, 32], 128 + 128, [[128, 196, \n 256], [128, 196, 256]], use_batch_norm=use_batch_norm)\n', (677, 786), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((799, 933), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['(16)', '[0.4, 0.8]', '[16, 32]', '(256 + 256)', '[[256, 256, 512], [256, 384, 512]]'], {'use_batch_norm': 'use_batch_norm'}), '(16, [0.4, 0.8], [16, 32], 256 + 256, [[256, 256, \n 512], [256, 384, 512]], use_batch_norm=use_batch_norm)\n', (824, 933), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((946, 1042), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(512 + 512 + 256 + 256)', '[256, 256]'], {'use_batch_norm': 'use_batch_norm'}), '(512 + 512 + 256 + 256, [256, 256],\n use_batch_norm=use_batch_norm)\n', (972, 1042), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((1052, 1143), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128 + 128 + 256)', '[256, 256]'], {'use_batch_norm': 'use_batch_norm'}), '(128 + 128 + 256, [256, 256], use_batch_norm=\n use_batch_norm)\n', (1078, 1143), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((1154, 1243), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(32 + 64 + 256)', '[256, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(32 + 64 + 256, [256, 128], use_batch_norm=\n use_batch_norm)\n', (1180, 1243), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((1254, 1333), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128)', '[128, 128, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(128, [128, 128, 128], use_batch_norm=use_batch_norm)\n', (1280, 1333), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((1355, 1377), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(128)', '(1)'], {}), '(128, 128, 1)\n', (1364, 1377), True, 'import torch.nn as nn\n'), ((1510, 1540), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', 'num_classes', '(1)'], {}), '(128, num_classes, 1)\n', (1519, 1540), True, 'import torch.nn as nn\n'), ((1600, 1620), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(3)', '(1)'], {}), '(128, 3, 1)\n', (1609, 1620), True, 'import torch.nn as nn\n'), ((1678, 1698), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(1)', '(1)'], {}), '(128, 1, 1)\n', (1687, 1698), True, 'import torch.nn as nn\n'), ((2571, 2597), 'torch.nn.functional.normalize', 'F.normalize', (['normal'], {'dim': '(1)'}), '(normal, dim=1)\n', (2582, 2597), True, 'import torch.nn.functional as F\n'), ((7036, 7058), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', '(128)', '(1)'], {}), '(128, 128, 1)\n', (7045, 7058), True, 'import torch.nn as nn\n'), ((7266, 7300), 'torch.nn.Conv1d', 'nn.Conv1d', (['(128)', 'num_out_channel', '(1)'], {}), '(128, num_out_channel, 1)\n', (7275, 7300), True, 'import torch.nn as nn\n'), ((9611, 9650), 'torch.nn.functional.nll_loss', 'F.nll_loss', (['pred', 'target'], {'weight': 'weight'}), '(pred, target, weight=weight)\n', (9621, 9650), True, 'import torch.nn.functional as F\n'), ((9791, 9813), 'torch.nn.BCEWithLogitsLoss', 'nn.BCEWithLogitsLoss', ([], {}), '()\n', (9811, 9813), True, 'import torch.nn as nn\n'), ((1428, 1447), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(128)'], {}), '(128)\n', (1442, 1447), True, 'import torch.nn as nn\n'), ((1473, 1488), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (1483, 1488), True, 'import torch.nn as nn\n'), ((3657, 3851), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[0]', 'radius_list[0]', '[sample_point_1_list[0], sample_point_2_list[0]]', 'num_in_channel', '[[16, 16, 32], [32, 32, 64]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[0], radius_list[0], [\n sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [[16, \n 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)\n', (3682, 3851), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((3865, 4054), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[1]', 'radius_list[1]', '[sample_point_1_list[1], sample_point_2_list[1]]', '(32 + 64)', '[[64, 64, 128], [64, 96, 128]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[1], radius_list[1], [\n sample_point_1_list[1], sample_point_2_list[1]], 32 + 64, [[64, 64, 128\n ], [64, 96, 128]], use_batch_norm=use_batch_norm)\n', (3890, 4054), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((4066, 4260), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[2]', 'radius_list[2]', '[sample_point_1_list[2], sample_point_2_list[2]]', '(128 + 128)', '[[128, 196, 256], [128, 196, 256]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[2], radius_list[2], [\n sample_point_1_list[2], sample_point_2_list[2]], 128 + 128, [[128, 196,\n 256], [128, 196, 256]], use_batch_norm=use_batch_norm)\n', (4091, 4260), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((4273, 4467), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[3]', 'radius_list[3]', '[sample_point_1_list[3], sample_point_2_list[3]]', '(256 + 256)', '[[256, 256, 512], [256, 384, 512]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[3], radius_list[3], [\n sample_point_1_list[3], sample_point_2_list[3]], 256 + 256, [[256, 256,\n 512], [256, 384, 512]], use_batch_norm=use_batch_norm)\n', (4298, 4467), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((4480, 4576), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(512 + 512 + 256 + 256)', '[256, 256]'], {'use_batch_norm': 'use_batch_norm'}), '(512 + 512 + 256 + 256, [256, 256],\n use_batch_norm=use_batch_norm)\n', (4506, 4576), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((4590, 4681), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128 + 128 + 256)', '[256, 256]'], {'use_batch_norm': 'use_batch_norm'}), '(128 + 128 + 256, [256, 256], use_batch_norm=\n use_batch_norm)\n', (4616, 4681), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((4696, 4785), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(32 + 64 + 256)', '[256, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(32 + 64 + 256, [256, 128], use_batch_norm=\n use_batch_norm)\n', (4722, 4785), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((4800, 4879), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128)', '[128, 128, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(128, [128, 128, 128], use_batch_norm=use_batch_norm)\n', (4826, 4879), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((7109, 7169), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['(128)'], {'track_running_stats': 'track_running_stats'}), '(128, track_running_stats=track_running_stats)\n', (7123, 7169), True, 'import torch.nn as nn\n'), ((9315, 9336), 'torch.nn.functional.normalize', 'F.normalize', (['x'], {'dim': '(1)'}), '(x, dim=1)\n', (9326, 9336), True, 'import torch.nn.functional as F\n'), ((4933, 5127), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[0]', 'radius_list[0]', '[sample_point_1_list[0], sample_point_2_list[0]]', 'num_in_channel', '[[16, 16, 32], [32, 32, 64]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[0], radius_list[0], [\n sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [[16, \n 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)\n', (4958, 5127), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((5141, 5330), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[1]', 'radius_list[1]', '[sample_point_1_list[1], sample_point_2_list[1]]', '(32 + 64)', '[[64, 64, 128], [64, 96, 128]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[1], radius_list[1], [\n sample_point_1_list[1], sample_point_2_list[1]], 32 + 64, [[64, 64, 128\n ], [64, 96, 128]], use_batch_norm=use_batch_norm)\n', (5166, 5330), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((5342, 5536), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[2]', 'radius_list[2]', '[sample_point_1_list[2], sample_point_2_list[2]]', '(128 + 128)', '[[128, 196, 256], [128, 196, 256]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[2], radius_list[2], [\n sample_point_1_list[2], sample_point_2_list[2]], 128 + 128, [[128, 196,\n 256], [128, 196, 256]], use_batch_norm=use_batch_norm)\n', (5367, 5536), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((5549, 5645), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128 + 128 + 256 + 256)', '[256, 256]'], {'use_batch_norm': 'use_batch_norm'}), '(128 + 128 + 256 + 256, [256, 256],\n use_batch_norm=use_batch_norm)\n', (5575, 5645), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((5659, 5748), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(32 + 64 + 256)', '[256, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(32 + 64 + 256, [256, 128], use_batch_norm=\n use_batch_norm)\n', (5685, 5748), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((5763, 5842), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128)', '[128, 128, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(128, [128, 128, 128], use_batch_norm=use_batch_norm)\n', (5789, 5842), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((7229, 7244), 'torch.nn.Dropout', 'nn.Dropout', (['(0.5)'], {}), '(0.5)\n', (7239, 7244), True, 'import torch.nn as nn\n'), ((5896, 6090), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[0]', 'radius_list[0]', '[sample_point_1_list[0], sample_point_2_list[0]]', 'num_in_channel', '[[16, 16, 32], [32, 32, 64]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[0], radius_list[0], [\n sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [[16, \n 16, 32], [32, 32, 64]], use_batch_norm=use_batch_norm)\n', (5921, 6090), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((6104, 6293), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[1]', 'radius_list[1]', '[sample_point_1_list[1], sample_point_2_list[1]]', '(32 + 64)', '[[64, 64, 128], [64, 96, 128]]'], {'use_batch_norm': 'use_batch_norm'}), '(npoint_list[1], radius_list[1], [\n sample_point_1_list[1], sample_point_2_list[1]], 32 + 64, [[64, 64, 128\n ], [64, 96, 128]], use_batch_norm=use_batch_norm)\n', (6129, 6293), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((6305, 6400), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(32 + 64 + 128 + 128)', '[256, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(32 + 64 + 128 + 128, [256, 128], use_batch_norm=\n use_batch_norm)\n', (6331, 6400), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((6413, 6492), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(128)', '[128, 128, 128]'], {'use_batch_norm': 'use_batch_norm'}), '(128, [128, 128, 128], use_batch_norm=use_batch_norm)\n', (6439, 6492), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((6546, 6803), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetSetAbstractionMsg', 'PointNetSetAbstractionMsg', (['npoint_list[0]', 'radius_list[0]', '[sample_point_1_list[0], sample_point_2_list[0]]', 'num_in_channel', '[mlp1_size, mlp2_size]'], {'use_batch_norm': 'use_batch_norm', 'downsample': 'downsample', 'track_running_stats': 'track_running_stats'}), '(npoint_list[0], radius_list[0], [\n sample_point_1_list[0], sample_point_2_list[0]], num_in_channel, [\n mlp1_size, mlp2_size], use_batch_norm=use_batch_norm, downsample=\n downsample, track_running_stats=track_running_stats)\n', (6571, 6803), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n'), ((6828, 6989), 'haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils.PointNetFeaturePropagation', 'PointNetFeaturePropagation', (['(mlp1_size[-1] + mlp2_size[-1])', 'interpolation_mlp_size'], {'use_batch_norm': 'use_batch_norm', 'track_running_stats': 'track_running_stats'}), '(mlp1_size[-1] + mlp2_size[-1],\n interpolation_mlp_size, use_batch_norm=use_batch_norm,\n track_running_stats=track_running_stats)\n', (6854, 6989), False, 'from haptic.Pointnet_Pointnet2_pytorch.models.pointnet2_utils import PointNetSetAbstractionMsg, PointNetFeaturePropagation\n')] |
import krpc
import time
import math
from simple_pid import PID
conn = krpc.connect(name="UI Test")
vessel = conn.space_center.active_vessel
kerbin_frame = vessel.orbit.body.reference_frame
orb_frame = vessel.orbital_reference_frame
srf_frame = vessel.surface_reference_frame
surface_gravity = vessel.orbit.body.surface_gravity
current_met = conn.add_stream(getattr, vessel, 'met')
current_roll = conn.add_stream(getattr, vessel.flight(), 'roll')
current_pitch = conn.add_stream(getattr, vessel.flight(), 'pitch')
current_heading = conn.add_stream(getattr, vessel.flight(), 'heading')
current_alt = conn.add_stream(getattr, vessel.flight(), 'surface_altitude')
lowest = conn.add_stream(vessel.bounding_box, srf_frame)
current_drag = conn.add_stream(getattr, vessel.flight(), 'drag')
current_aero = conn.add_stream(getattr, vessel.flight(), 'aerodynamic_force')
current_speed = conn.add_stream(getattr, vessel.flight(kerbin_frame), 'speed')
vessel.control.activate_next_stage()
vessel.control.sas = True
time.sleep(.2)
vessel.control.sas_mode = conn.space_center.SASMode.retrograde
def bottom_altitude():
return max(0, current_alt() - abs(lowest()[0][0]))
for engine in vessel.parts.engines:
engine.gimbal_locked = True
while True:
aero_amp = math.sqrt(current_aero()[0] ** 2
+ current_aero()[1] ** 2
+ current_aero()[2] ** 2)
time_to_zero = current_speed() / ((((vessel.max_thrust * .9) + aero_amp) / vessel.mass)
+ vessel.orbit.body.surface_gravity)
if (time_to_zero * current_speed()) >= bottom_altitude() - current_speed():
print(current_speed())
print(f"Start Hover Slam Burn")
vessel.control.throttle = .9
break
while current_speed() > 50:
print(current_speed())
time.sleep(.01)
pass
print(f"Switch to Stab")
for leg in vessel.parts.legs:
leg.deployed = True
pid1 = PID(.15, 0, .5, setpoint=0)
pid1.output_limits = (0, 1)
pid1.sample_time = 0.01
while bottom_altitude() > 1:
vessel.control.throttle = pid1(bottom_altitude())
# pid1.setpoint *= .98
time.sleep(.01)
vessel.control.sas_mode = conn.space_center.SASMode.radial
vessel.control.throttle = 0
| [
"krpc.connect",
"simple_pid.PID",
"time.sleep"
]
| [((71, 99), 'krpc.connect', 'krpc.connect', ([], {'name': '"""UI Test"""'}), "(name='UI Test')\n", (83, 99), False, 'import krpc\n'), ((1005, 1020), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (1015, 1020), False, 'import time\n'), ((1936, 1965), 'simple_pid.PID', 'PID', (['(0.15)', '(0)', '(0.5)'], {'setpoint': '(0)'}), '(0.15, 0, 0.5, setpoint=0)\n', (1939, 1965), False, 'from simple_pid import PID\n'), ((1823, 1839), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1833, 1839), False, 'import time\n'), ((2131, 2147), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (2141, 2147), False, 'import time\n')] |
import datetime
import os
from io import BytesIO
import logging
from functools import wraps
from copy import deepcopy
from collections import Counter
import slugify
import yaml
import mistune
import requests
from flask import \
Blueprint, Flask, render_template, abort, send_file, make_response
from flask_cors import CORS
from flask_jsonpify import jsonify
from flask_basicauth import BasicAuth
from datapackage_pipelines.status import status_mgr
from datapackage_pipelines.utilities.stat_utils import user_facing_stats
YAML_DUMPER = yaml.CDumper if 'CDumper' in yaml.__dict__ else yaml.Dumper
def datestr(x):
if x is None:
return ''
return str(datetime.datetime.fromtimestamp(x))
def yamlize(x):
ret = yaml.dump(x, default_flow_style=False, Dumper=YAML_DUMPER)
return ret
markdown = mistune.Markdown(hard_wrap=True)
status = status_mgr()
def make_hierarchies(statuses):
def group(lvl):
pipelines = list(filter(lambda x: len(x['id']) == 1, lvl))
children_ = list(filter(lambda x: len(x['id']) > 1, lvl))
groups_ = {}
for child in children_:
child_key = child['id'].pop(0)
groups_.setdefault(child_key, []).append(child)
children_ = dict(
(k, group(v))
for k, v in groups_.items()
)
for p in pipelines:
p['id'] = p['id'][0]
return {
'pipelines': pipelines,
'children': children_
}
def flatten(children_):
for k, v in children_.items():
v['children'] = flatten(v['children'])
child_keys = list(v['children'].keys())
if len(child_keys) == 1 and len(v['pipelines']) == 0:
child_key = child_keys[0]
children_['/'.join([k, child_key])] = v['children'][child_key]
del children_[k]
return children_
statuses = [
{
'id': st['id'].split('/'),
'title': st.get('title'),
'stats': st.get('stats'),
'slug': st.get('slug')
}
for st in statuses
]
groups = group(statuses)
children = groups.get('children', {})
groups['children'] = flatten(children)
return groups
def basic_auth_required(view_func):
"""
A decorator that can be used to protect specific views with HTTP basic
access authentication. Conditional on having BASIC_AUTH_USERNAME and
BASIC_AUTH_PASSWORD set as env vars.
"""
@wraps(view_func)
def wrapper(*args, **kwargs):
if app.config.get('BASIC_AUTH_ACTIVE', False):
if basic_auth.authenticate():
return view_func(*args, **kwargs)
else:
return basic_auth.challenge()
else:
return view_func(*args, **kwargs)
return wrapper
blueprint = Blueprint('dpp', 'dpp')
@blueprint.route("")
@blueprint.route("<path:pipeline_path>")
@basic_auth_required
def main(pipeline_path=None):
pipeline_ids = sorted(status.all_pipeline_ids())
# If we have a pipeline_path, filter the pipeline ids.
if pipeline_path is not None:
if not pipeline_path.startswith('./'):
pipeline_path = './' + pipeline_path
pipeline_ids = [p for p in pipeline_ids if p.startswith(pipeline_path)]
statuses = []
for pipeline_id in pipeline_ids:
pipeline_status = status.get(pipeline_id)
ex = pipeline_status.get_last_execution()
success_ex = pipeline_status.get_last_successful_execution()
pipeline_obj = {
'id': pipeline_id.lstrip('./'),
'title': pipeline_status.pipeline_details.get('title'),
'stats': user_facing_stats(ex.stats) if ex else None,
'slug': slugify.slugify(pipeline_id),
'trigger': ex.trigger if ex else None,
'error_log': pipeline_status.errors(),
'state': pipeline_status.state(),
'pipeline': pipeline_status.pipeline_details,
'message': pipeline_status.state().capitalize(),
'dirty': pipeline_status.dirty(),
'runnable': pipeline_status.runnable(),
'class': {'INIT': 'primary',
'QUEUED': 'primary',
'INVALID': 'danger',
'RUNNING': 'warning',
'SUCCEEDED': 'success',
'FAILED': 'danger'
}[pipeline_status.state()],
'ended': datestr(ex.finish_time) if ex else None,
'started': datestr(ex.start_time) if ex else None,
'last_success':
datestr(success_ex.finish_time) if success_ex else None,
}
statuses.append(pipeline_obj)
def state_and_not_dirty(state, p):
return p.get('state') == state and not p.get('dirty')
def state_or_dirty(state, p):
return p.get('state') == state or p.get('dirty')
categories = [
['ALL', 'All Pipelines', lambda _, __: True],
['INVALID', "Can't start", lambda _, p: not p['runnable']],
['QUEUED', 'Waiting to run', lambda state, p: p['state'] == state],
['RUNNING', 'Running', state_and_not_dirty],
['FAILED', 'Failed Execution', state_and_not_dirty],
['SUCCEEDED', 'Successful Execution', state_and_not_dirty],
]
for item in categories:
item.append([p for p in deepcopy(statuses)
if item[2](item[0], p)])
item.append(len(item[-1]))
item.append(make_hierarchies(item[-2]))
return render_template('dashboard.html',
categories=categories,
yamlize=yamlize,
markdown=markdown)
@blueprint.route("api/raw/status")
@basic_auth_required
def pipeline_raw_api_status():
pipelines = sorted(status.all_statuses(), key=lambda x: x.get('id'))
for pipeline in pipelines:
# can get the full details from api/raw/<path:pipeline_id>
for attr in ["pipeline", "reason", "error_log"]:
if attr in pipeline:
del pipeline[attr]
return jsonify(pipelines)
@blueprint.route("api/raw/<path:pipeline_id>")
@basic_auth_required
def pipeline_raw_api(pipeline_id):
if not pipeline_id.startswith('./'):
pipeline_id = './' + pipeline_id
pipeline_status = status.get(pipeline_id)
if not pipeline_status.pipeline_details:
abort(404)
last_execution = pipeline_status.get_last_execution()
last_successful_execution = pipeline_status.get_last_successful_execution()
ret = {
"id": pipeline_id,
"cache_hash": pipeline_status.cache_hash,
"dirty": pipeline_status.dirty(),
"queued": last_execution.queue_time if last_execution else None,
"started": last_execution.start_time if last_execution else None,
"ended": last_execution.finish_time if last_execution else None,
"reason": last_execution.log if last_execution else None,
"error_log": pipeline_status.errors(),
"stats": last_execution.stats if last_execution else None,
"success": last_execution.success if last_execution else None,
"last_success":
last_successful_execution.finish_time
if last_successful_execution else None,
"trigger": last_execution.trigger if last_execution else None,
"pipeline": pipeline_status.pipeline_details,
"source": pipeline_status.source_spec,
"message": pipeline_status.state().capitalize(),
"state": pipeline_status.state(),
}
return jsonify(ret)
@blueprint.route("api/<field>/<path:pipeline_id>")
@basic_auth_required
def pipeline_api(field, pipeline_id):
if not pipeline_id.startswith('./'):
pipeline_id = './' + pipeline_id
pipeline_status = status.get(pipeline_id)
if not pipeline_status.pipeline_details:
abort(404)
ret = None
if field == 'pipeline':
ret = pipeline_status.pipeline_details
ret = yamlize(ret)
elif field == 'source':
ret = pipeline_status.source_spec
ret = yamlize(ret)
elif field == 'log':
ex = pipeline_status.get_last_execution()
ret = ex.log if ex else ''
else:
abort(400)
ret = ret.split('\n')
ret = {'text': ret}
return jsonify(ret)
def _make_badge_response(subject, text, colour):
image_url = 'https://img.shields.io/badge/{}-{}-{}.svg'.format(
subject, text, colour)
r = requests.get(image_url)
buffer_image = BytesIO(r.content)
buffer_image.seek(0)
res = make_response(send_file(buffer_image, mimetype='image/svg+xml'))
res.headers['Cache-Control'] = \
'max-age=0, no-cache, no-store, must-revalidate'
res.headers['Expires'] = '0'
return res
@blueprint.route("badge/<path:pipeline_id>")
def badge(pipeline_id):
'''An individual pipeline status'''
if not pipeline_id.startswith('./'):
pipeline_id = './' + pipeline_id
pipeline_status = status.get(pipeline_id)
status_color = 'lightgray'
if pipeline_status.pipeline_details:
status_text = pipeline_status.state().lower()
last_execution = pipeline_status.get_last_execution()
success = last_execution.success if last_execution else None
if success is True:
stats = last_execution.stats if last_execution else None
record_count = stats.get('count_of_rows')
if record_count is not None:
status_text += ' (%d records)' % record_count
status_color = 'brightgreen'
elif success is False:
status_color = 'red'
else:
status_text = "not found"
return _make_badge_response('pipeline', status_text, status_color)
@blueprint.route("badge/collection/<path:pipeline_path>")
def badge_collection(pipeline_path):
'''Status badge for a collection of pipelines.'''
all_pipeline_ids = sorted(status.all_pipeline_ids())
if not pipeline_path.startswith('./'):
pipeline_path = './' + pipeline_path
# Filter pipeline ids to only include those that start with pipeline_path.
path_pipeline_ids = \
[p for p in all_pipeline_ids if p.startswith(pipeline_path)]
statuses = []
for pipeline_id in path_pipeline_ids:
pipeline_status = status.get(pipeline_id)
if pipeline_status is None:
abort(404)
status_text = pipeline_status.state().lower()
statuses.append(status_text)
status_color = 'lightgray'
status_counter = Counter(statuses)
if status_counter:
if len(status_counter) == 1 and status_counter['succeeded'] > 0:
status_color = 'brightgreen'
elif status_counter['failed'] > 0:
status_color = 'red'
elif status_counter['failed'] == 0:
status_color = 'yellow'
status_text = \
', '.join(['{} {}'.format(v, k)
for k, v in status_counter.items()])
else:
status_text = "not found"
return _make_badge_response('pipelines', status_text, status_color)
app = Flask(__name__)
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = True
if os.environ.get('DPP_BASIC_AUTH_USERNAME', False) \
and os.environ.get('DPP_BASIC_AUTH_PASSWORD', False):
app.config['BASIC_AUTH_USERNAME'] = os.environ['DPP_BASIC_AUTH_USERNAME']
app.config['BASIC_AUTH_PASSWORD'] = os.environ['DPP_BASIC_AUTH_PASSWORD']
app.config['BASIC_AUTH_ACTIVE'] = True
basic_auth = BasicAuth(app)
CORS(app)
url_prefix = os.environ.get('DPP_BASE_PATH', '/')
if not url_prefix.endswith('/'):
url_prefix += '/'
logging.info('Serving on path %s', url_prefix)
app.register_blueprint(blueprint, url_prefix=url_prefix)
| [
"flask.render_template",
"mistune.Markdown",
"flask_cors.CORS",
"flask.Flask",
"datapackage_pipelines.utilities.stat_utils.user_facing_stats",
"io.BytesIO",
"copy.deepcopy",
"logging.info",
"functools.wraps",
"flask.abort",
"yaml.dump",
"requests.get",
"flask_jsonpify.jsonify",
"flask.send_file",
"slugify.slugify",
"datetime.datetime.fromtimestamp",
"os.environ.get",
"collections.Counter",
"datapackage_pipelines.status.status_mgr",
"flask.Blueprint",
"flask_basicauth.BasicAuth"
]
| [((823, 855), 'mistune.Markdown', 'mistune.Markdown', ([], {'hard_wrap': '(True)'}), '(hard_wrap=True)\n', (839, 855), False, 'import mistune\n'), ((865, 877), 'datapackage_pipelines.status.status_mgr', 'status_mgr', ([], {}), '()\n', (875, 877), False, 'from datapackage_pipelines.status import status_mgr\n'), ((2852, 2875), 'flask.Blueprint', 'Blueprint', (['"""dpp"""', '"""dpp"""'], {}), "('dpp', 'dpp')\n", (2861, 2875), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((11132, 11147), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (11137, 11147), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((11522, 11536), 'flask_basicauth.BasicAuth', 'BasicAuth', (['app'], {}), '(app)\n', (11531, 11536), False, 'from flask_basicauth import BasicAuth\n'), ((11539, 11548), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (11543, 11548), False, 'from flask_cors import CORS\n'), ((11563, 11599), 'os.environ.get', 'os.environ.get', (['"""DPP_BASE_PATH"""', '"""/"""'], {}), "('DPP_BASE_PATH', '/')\n", (11577, 11599), False, 'import os\n'), ((11655, 11701), 'logging.info', 'logging.info', (['"""Serving on path %s"""', 'url_prefix'], {}), "('Serving on path %s', url_prefix)\n", (11667, 11701), False, 'import logging\n'), ((736, 794), 'yaml.dump', 'yaml.dump', (['x'], {'default_flow_style': '(False)', 'Dumper': 'YAML_DUMPER'}), '(x, default_flow_style=False, Dumper=YAML_DUMPER)\n', (745, 794), False, 'import yaml\n'), ((2497, 2513), 'functools.wraps', 'wraps', (['view_func'], {}), '(view_func)\n', (2502, 2513), False, 'from functools import wraps\n'), ((5559, 5655), 'flask.render_template', 'render_template', (['"""dashboard.html"""'], {'categories': 'categories', 'yamlize': 'yamlize', 'markdown': 'markdown'}), "('dashboard.html', categories=categories, yamlize=yamlize,\n markdown=markdown)\n", (5574, 5655), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((6129, 6147), 'flask_jsonpify.jsonify', 'jsonify', (['pipelines'], {}), '(pipelines)\n', (6136, 6147), False, 'from flask_jsonpify import jsonify\n'), ((7602, 7614), 'flask_jsonpify.jsonify', 'jsonify', (['ret'], {}), '(ret)\n', (7609, 7614), False, 'from flask_jsonpify import jsonify\n'), ((8336, 8348), 'flask_jsonpify.jsonify', 'jsonify', (['ret'], {}), '(ret)\n', (8343, 8348), False, 'from flask_jsonpify import jsonify\n'), ((8507, 8530), 'requests.get', 'requests.get', (['image_url'], {}), '(image_url)\n', (8519, 8530), False, 'import requests\n'), ((8550, 8568), 'io.BytesIO', 'BytesIO', (['r.content'], {}), '(r.content)\n', (8557, 8568), False, 'from io import BytesIO\n'), ((10568, 10585), 'collections.Counter', 'Counter', (['statuses'], {}), '(statuses)\n', (10575, 10585), False, 'from collections import Counter\n'), ((11201, 11249), 'os.environ.get', 'os.environ.get', (['"""DPP_BASIC_AUTH_USERNAME"""', '(False)'], {}), "('DPP_BASIC_AUTH_USERNAME', False)\n", (11215, 11249), False, 'import os\n'), ((11259, 11307), 'os.environ.get', 'os.environ.get', (['"""DPP_BASIC_AUTH_PASSWORD"""', '(False)'], {}), "('DPP_BASIC_AUTH_PASSWORD', False)\n", (11273, 11307), False, 'import os\n'), ((672, 706), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['x'], {}), '(x)\n', (703, 706), False, 'import datetime\n'), ((6434, 6444), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (6439, 6444), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((7909, 7919), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (7914, 7919), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((8618, 8667), 'flask.send_file', 'send_file', (['buffer_image'], {'mimetype': '"""image/svg+xml"""'}), "(buffer_image, mimetype='image/svg+xml')\n", (8627, 8667), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((3763, 3791), 'slugify.slugify', 'slugify.slugify', (['pipeline_id'], {}), '(pipeline_id)\n', (3778, 3791), False, 'import slugify\n'), ((10413, 10423), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (10418, 10423), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((3698, 3725), 'datapackage_pipelines.utilities.stat_utils.user_facing_stats', 'user_facing_stats', (['ex.stats'], {}), '(ex.stats)\n', (3715, 3725), False, 'from datapackage_pipelines.utilities.stat_utils import user_facing_stats\n'), ((8263, 8273), 'flask.abort', 'abort', (['(400)'], {}), '(400)\n', (8268, 8273), False, 'from flask import Blueprint, Flask, render_template, abort, send_file, make_response\n'), ((5400, 5418), 'copy.deepcopy', 'deepcopy', (['statuses'], {}), '(statuses)\n', (5408, 5418), False, 'from copy import deepcopy\n')] |
import pygame as pg
from pygame.locals import *
import sys
import board.chess_board as board
w = 60 * 8
h = 60 * 8
class Game:
"""
Class to setup and start a game
"""
def __init__(self):
self.b = board.Board(w, h)
def get_board(self):
"""
Returns board
:return: Board-class
"""
return self.b
def run(self):
"""
Where the game is created and launched
:return:
"""
# While loop to show display
while True:
for event in pg.event.get():
# Quitting game
if event.type == QUIT:
pg.quit()
sys.exit()
# If game can continue
if self.b.get_status() == "-":
# Pressing mouse
if event.type == MOUSEBUTTONDOWN:
pos = pg.mouse.get_pos()
for r in self.b.get_board_array():
for square in r:
if square.get_visual().collidepoint(pos):
square.click()
self.b.update_board()
if __name__ == "__main__":
# Launch main-function if running this script
game = Game()
game.run()
| [
"pygame.quit",
"pygame.event.get",
"pygame.mouse.get_pos",
"sys.exit",
"board.chess_board.Board"
]
| [((224, 241), 'board.chess_board.Board', 'board.Board', (['w', 'h'], {}), '(w, h)\n', (235, 241), True, 'import board.chess_board as board\n'), ((555, 569), 'pygame.event.get', 'pg.event.get', ([], {}), '()\n', (567, 569), True, 'import pygame as pg\n'), ((662, 671), 'pygame.quit', 'pg.quit', ([], {}), '()\n', (669, 671), True, 'import pygame as pg\n'), ((692, 702), 'sys.exit', 'sys.exit', ([], {}), '()\n', (700, 702), False, 'import sys\n'), ((910, 928), 'pygame.mouse.get_pos', 'pg.mouse.get_pos', ([], {}), '()\n', (926, 928), True, 'import pygame as pg\n')] |
#importing libraries
import torch
import torch.utils.data as data
import os
import random
from PIL import Image
class CreateDataset(data.Dataset):
def __init__(self , imagedir , subfolder='train' , direction = 'AtoB' , flip = False , transform = None ,resize_scale = None , crop_size = None):
super(CreateDataset , self).__init__()
self.images_path = os.path.join(imagedir , subfolder)
self.image_filenames = [name for name in sorted(os.listdir(self.images_path))]
self.flip = flip
self.transform = transform
self.resize_scale = resize_scale
self.crop_size = crop_size
self.direction = direction
def __getitem__(self , index):
image_path = os.path.join(self.images_path , self.image_filenames[index])
img = Image.open(image_path)
if self.direction == 'AtoB':
inp_img = img.crop((0,0,img.width//2 , img.height))
target_img = img.crop((img.width//2 , 0 , img.width , img.height))
elif self.direction == 'BtoA':
inp_img = img.crop((img.width//2 , 0 , img.width , img.height))
target_img = img.crop((0,0,img.width//2 , img.height))
if self.resize_scale:
inp_img = inp_img.resize((self.resize_scale , self.resize_scale) , Image.BILINEAR)
target_img = target_img.resize((self.resize_scale , self.resize_scale) , Image.BILINEAR)
if self.crop_size:
x = random.randint(0 , self.resize_scale - self.crop_size + 1)
y = random.randint(0 , self.resize_scale - self.crop_size + 1)
inp_img = inp_img.crop((x , y , x + self.crop_size , y + self.crop_size))
target_img = target_img.crop((x , y , x + self.crop_size , y + self.crop_size))
if self.flip:
if random.random() < 0.5:
inp_img = inp_img.transpose(Image.FLIP_LEFT_RIGHT)
target_img = target_img.transpose(Image.FLIP_LEFT_RIGHT)
if self.transform is not None:
inp_img = self.transform(inp_img)
target_img = self.transform(target_img)
return inp_img , target_img
def __len__(self):
return len(self.image_filenames)
| [
"os.listdir",
"PIL.Image.open",
"os.path.join",
"random.random",
"random.randint"
]
| [((402, 435), 'os.path.join', 'os.path.join', (['imagedir', 'subfolder'], {}), '(imagedir, subfolder)\n', (414, 435), False, 'import os\n'), ((785, 844), 'os.path.join', 'os.path.join', (['self.images_path', 'self.image_filenames[index]'], {}), '(self.images_path, self.image_filenames[index])\n', (797, 844), False, 'import os\n'), ((861, 883), 'PIL.Image.open', 'Image.open', (['image_path'], {}), '(image_path)\n', (871, 883), False, 'from PIL import Image\n'), ((1630, 1687), 'random.randint', 'random.randint', (['(0)', '(self.resize_scale - self.crop_size + 1)'], {}), '(0, self.resize_scale - self.crop_size + 1)\n', (1644, 1687), False, 'import random\n'), ((1706, 1763), 'random.randint', 'random.randint', (['(0)', '(self.resize_scale - self.crop_size + 1)'], {}), '(0, self.resize_scale - self.crop_size + 1)\n', (1720, 1763), False, 'import random\n'), ((2026, 2041), 'random.random', 'random.random', ([], {}), '()\n', (2039, 2041), False, 'import random\n'), ((494, 522), 'os.listdir', 'os.listdir', (['self.images_path'], {}), '(self.images_path)\n', (504, 522), False, 'import os\n')] |
#Prediction model using an instance of the Monte Carlo simulation and Brownian Motion equation
#import of libraries
import numpy as np
import pandas as pd
from pandas_datareader import data as wb
import matplotlib.pyplot as plt
from scipy.stats import norm
#ticker selection
def mainFunction(tradingSymbol):
data = pd.DataFrame()
data[tradingSymbol] = wb.DataReader(tradingSymbol, data_source='yahoo', start='2019-1-1')['Adj Close']
#percent change of asset price
log_returns = np.log(1+ data.pct_change())
#graph showing growth over time beginning from 2015
data.plot(figsize = (10,6));
plt.show()
#graph of log returns of input ticker
#returns are normally distributed and have a consistent mean
log_returns.plot(figsize = (10,6))
plt.show()
#calculations
averageDailyReturn = log_returns.mean()
variance = log_returns.var()
drift = averageDailyReturn-(variance/2)
standardDeviation = log_returns.std()
#Brownian Motion equation
#r = drift + standardDeviation * (e^r)
#prediction of future stock price based on simulation below using numpy for storing data into array
np.array(drift)
drift.values
standardDeviation.values
#Brownian motion variable correlating to the distance between the mean and the number of standard deviation
norm.ppf(0.95)
#10 x 2 Matrix
x = np.random.rand(10,2)
norm.ppf(x)
#stores distances from the mean value, 0, into the 10 x 2 matrix
Z = norm.ppf(np.random.rand(10,2))
#time interval for the stock price forecast
timeInterval = 365
iterations = 5
#r = drift + standardDeviation * (e^r)
#10 sets of 365 random future stock prices of the ticker symbol
dailyReturns = np.exp(drift.values + standardDeviation.values * norm.ppf(np.random.rand(timeInterval,iterations)))
#returns into price points
presentPrice = data.iloc[-1]
priceList = np.zeros_like(dailyReturns)
priceList[0] = presentPrice
#iteration for the time interavl of 365
for t in range(1, timeInterval):
priceList[t] = priceList[t-1] * dailyReturns[t]
#showcases 10 paths of the future stock price
plt.figure(figsize =(10,6))
plt.plot(priceList)
plt.show()
| [
"numpy.random.rand",
"pandas_datareader.data.DataReader",
"matplotlib.pyplot.plot",
"scipy.stats.norm.ppf",
"numpy.array",
"matplotlib.pyplot.figure",
"pandas.DataFrame",
"numpy.zeros_like",
"matplotlib.pyplot.show"
]
| [((322, 336), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (334, 336), True, 'import pandas as pd\n'), ((621, 631), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (629, 631), True, 'import matplotlib.pyplot as plt\n'), ((782, 792), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (790, 792), True, 'import matplotlib.pyplot as plt\n'), ((1149, 1164), 'numpy.array', 'np.array', (['drift'], {}), '(drift)\n', (1157, 1164), True, 'import numpy as np\n'), ((1328, 1342), 'scipy.stats.norm.ppf', 'norm.ppf', (['(0.95)'], {}), '(0.95)\n', (1336, 1342), False, 'from scipy.stats import norm\n'), ((1371, 1392), 'numpy.random.rand', 'np.random.rand', (['(10)', '(2)'], {}), '(10, 2)\n', (1385, 1392), True, 'import numpy as np\n'), ((1396, 1407), 'scipy.stats.norm.ppf', 'norm.ppf', (['x'], {}), '(x)\n', (1404, 1407), False, 'from scipy.stats import norm\n'), ((1923, 1950), 'numpy.zeros_like', 'np.zeros_like', (['dailyReturns'], {}), '(dailyReturns)\n', (1936, 1950), True, 'import numpy as np\n'), ((2175, 2202), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 6)'}), '(figsize=(10, 6))\n', (2185, 2202), True, 'import matplotlib.pyplot as plt\n'), ((2207, 2226), 'matplotlib.pyplot.plot', 'plt.plot', (['priceList'], {}), '(priceList)\n', (2215, 2226), True, 'import matplotlib.pyplot as plt\n'), ((2231, 2241), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2239, 2241), True, 'import matplotlib.pyplot as plt\n'), ((363, 430), 'pandas_datareader.data.DataReader', 'wb.DataReader', (['tradingSymbol'], {'data_source': '"""yahoo"""', 'start': '"""2019-1-1"""'}), "(tradingSymbol, data_source='yahoo', start='2019-1-1')\n", (376, 430), True, 'from pandas_datareader import data as wb\n'), ((1495, 1516), 'numpy.random.rand', 'np.random.rand', (['(10)', '(2)'], {}), '(10, 2)\n', (1509, 1516), True, 'import numpy as np\n'), ((1799, 1839), 'numpy.random.rand', 'np.random.rand', (['timeInterval', 'iterations'], {}), '(timeInterval, iterations)\n', (1813, 1839), True, 'import numpy as np\n')] |
import os
import csv
import numpy as np
from sklearn.utils import shuffle
## Read in frame data
samples = []
with open('/../opt/carnd_p3/data/driving_log.csv') as csvfile: #open the log file
reader = csv.reader(csvfile) #as a readable csv
for line in reader:
samples.append(line) #add each line of the log file to samples
samples = samples[1:] # to remove table header
samples = shuffle(samples) # shuffle entire sample set before splitting into training and validation so that training isn't biased
from sklearn.model_selection import train_test_split
train_samples, validation_samples = train_test_split(samples, test_size=0.2) #split samples into 80% training, 20% validation
from scipy import ndimage #because cv2.imread() imports the image as BGR, and we want RGB
## Define generator to handle small portions of images at a time so that training is not as memory-heavy
def generator(samples, batch_size=32):
num_samples = len(samples)
while 1: # Loop forever so the generator never terminates
# shuffle(samples) #shuffle within the training/validation sets, NOT NECESSARY SINCE SHUFFLING ALREADY SHUFFLED
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size] #collect the images for this batch
images = []
angles = []
for batch_sample in batch_samples:
path = '/../opt/carnd_p3/data/IMG/' #assign the location from which to read images
# read in images from all 3 cameras MAKING SURE TO READ IN AS RGB
center_image = ndimage.imread(path+batch_sample[0].split('/')[-1])
left_image = ndimage.imread(path+batch_sample[1].split('/')[-1])
right_image = ndimage.imread(path+batch_sample[2].split('/')[-1])
# read in steering angle
center_angle = float(batch_sample[3]) #read the steering angle
# apply a steering correction for the left and right images, in a way to generate "new" samples
correction = 0.2
left_angle = center_angle + correction
right_angle = center_angle - correction
# add images and angles to batch set
images.extend([center_image, left_image, right_image])
angles.extend([center_angle, left_angle, right_angle])
# copy all batches' images to final numpy array
X_train = np.array(images)
y_train = np.array(angles)
yield shuffle(X_train, y_train) #shuffle before yielding result
# compile and train the model using the generator function
train_generator = generator(train_samples, batch_size=32)
validation_generator = generator(validation_samples, batch_size=32)
ch, row, col = 3, 160, 320 # Full image format
#import Keras model layers
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda
from keras.layers.convolutional import Conv2D, Cropping2D
from keras.layers.pooling import MaxPooling2D
# BUILD MODEL
model = Sequential()
# Preprocess incoming data, centered around zero with small standard deviation
model.add(Lambda(lambda x: x/127.5 - 1.0, input_shape=(row,col,ch)))
# Crop incoming data (training, validation, and autonomous so that everything is consistent)
model.add(Cropping2D(cropping=((60,20), (0,0)))) # could be first layer to reduce memory used in Lambda calculation, and thus faster training
# Begin CNN (similar to NVIDIA architecture)
# Convolution layer 1-3, kernel size 5 with stride of 2
model.add(Conv2D(24,(5,5),strides=(2,2),activation='relu'))
model.add(Conv2D(36,(5,5),strides=(2,2),activation='relu'))
model.add(Conv2D(48,(5,5),strides=(2,2),activation='relu'))
# Convolution layers 4-5, kernel size 3 wth stride of 1
model.add(Conv2D(64,(3,3),activation='relu'))
model.add(Conv2D(64,(3,3),activation='relu'))
# Flatten convolution output to yield single numerical result
model.add(Flatten())
# Fully connected layers to complete computations, gradually decreasing in parameters until final value
model.add(Dense(100))
model.add(Dense(50))
model.add(Dense(10))
model.add(Dense(1))
## Training hyper parameters to play with
## Stop training checkpoints...
# save_path = 'model{epoch:02d}-{val_loss:.2f}.h5'
# checkpoint = ModelCheckpoint(filepath=save_path, monitor='val_loss', save_best_only=True)
# stopper = EarlyStopping(monitor='val_acc', min_delta=0.0003, patience=5)
## OR
batch_size = 32
epochs = 5 #***
## Compile and train the model
model.compile(loss='mse', optimizer='adam', metrics=['accuracy']) #use Mean Squared Error to measure loss, use Adam optimizer for tuning
model.fit_generator(train_generator, steps_per_epoch= len(train_samples)/batch_size,validation_data=validation_generator, validation_steps=len(validation_samples)/batch_size, epochs=5, verbose = 1) # train using generators
#save the trained model
model.save('model.h5') | [
"keras.layers.core.Flatten",
"keras.layers.convolutional.Cropping2D",
"sklearn.model_selection.train_test_split",
"sklearn.utils.shuffle",
"keras.layers.core.Lambda",
"keras.models.Sequential",
"numpy.array",
"keras.layers.convolutional.Conv2D",
"csv.reader",
"keras.layers.core.Dense"
]
| [((399, 415), 'sklearn.utils.shuffle', 'shuffle', (['samples'], {}), '(samples)\n', (406, 415), False, 'from sklearn.utils import shuffle\n'), ((610, 650), 'sklearn.model_selection.train_test_split', 'train_test_split', (['samples'], {'test_size': '(0.2)'}), '(samples, test_size=0.2)\n', (626, 650), False, 'from sklearn.model_selection import train_test_split\n'), ((3171, 3183), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (3181, 3183), False, 'from keras.models import Sequential\n'), ((206, 225), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (216, 225), False, 'import csv\n'), ((3274, 3335), 'keras.layers.core.Lambda', 'Lambda', (['(lambda x: x / 127.5 - 1.0)'], {'input_shape': '(row, col, ch)'}), '(lambda x: x / 127.5 - 1.0, input_shape=(row, col, ch))\n', (3280, 3335), False, 'from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda\n'), ((3436, 3475), 'keras.layers.convolutional.Cropping2D', 'Cropping2D', ([], {'cropping': '((60, 20), (0, 0))'}), '(cropping=((60, 20), (0, 0)))\n', (3446, 3475), False, 'from keras.layers.convolutional import Conv2D, Cropping2D\n'), ((3680, 3733), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['(24)', '(5, 5)'], {'strides': '(2, 2)', 'activation': '"""relu"""'}), "(24, (5, 5), strides=(2, 2), activation='relu')\n", (3686, 3733), False, 'from keras.layers.convolutional import Conv2D, Cropping2D\n'), ((3740, 3793), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['(36)', '(5, 5)'], {'strides': '(2, 2)', 'activation': '"""relu"""'}), "(36, (5, 5), strides=(2, 2), activation='relu')\n", (3746, 3793), False, 'from keras.layers.convolutional import Conv2D, Cropping2D\n'), ((3800, 3853), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['(48)', '(5, 5)'], {'strides': '(2, 2)', 'activation': '"""relu"""'}), "(48, (5, 5), strides=(2, 2), activation='relu')\n", (3806, 3853), False, 'from keras.layers.convolutional import Conv2D, Cropping2D\n'), ((3916, 3953), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (3922, 3953), False, 'from keras.layers.convolutional import Conv2D, Cropping2D\n'), ((3962, 3999), 'keras.layers.convolutional.Conv2D', 'Conv2D', (['(64)', '(3, 3)'], {'activation': '"""relu"""'}), "(64, (3, 3), activation='relu')\n", (3968, 3999), False, 'from keras.layers.convolutional import Conv2D, Cropping2D\n'), ((4070, 4079), 'keras.layers.core.Flatten', 'Flatten', ([], {}), '()\n', (4077, 4079), False, 'from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda\n'), ((4195, 4205), 'keras.layers.core.Dense', 'Dense', (['(100)'], {}), '(100)\n', (4200, 4205), False, 'from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda\n'), ((4217, 4226), 'keras.layers.core.Dense', 'Dense', (['(50)'], {}), '(50)\n', (4222, 4226), False, 'from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda\n'), ((4238, 4247), 'keras.layers.core.Dense', 'Dense', (['(10)'], {}), '(10)\n', (4243, 4247), False, 'from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda\n'), ((4259, 4267), 'keras.layers.core.Dense', 'Dense', (['(1)'], {}), '(1)\n', (4264, 4267), False, 'from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda\n'), ((2527, 2543), 'numpy.array', 'np.array', (['images'], {}), '(images)\n', (2535, 2543), True, 'import numpy as np\n'), ((2566, 2582), 'numpy.array', 'np.array', (['angles'], {}), '(angles)\n', (2574, 2582), True, 'import numpy as np\n'), ((2601, 2626), 'sklearn.utils.shuffle', 'shuffle', (['X_train', 'y_train'], {}), '(X_train, y_train)\n', (2608, 2626), False, 'from sklearn.utils import shuffle\n')] |
#!/usr/bin/env python3
""" A script containing the basic principles of the extraction primitive inner
workings"""
from __future__ import division, print_function
from ghostdr import polyfit
import numpy as pn
# Firstly, let's find all the needed files
fitsdir='/Users/mireland/data/ghost/cal_frames/'
#Define the files in use (NB xmod.txt and wavemod.txt should be correct)
arc_file = fitsdir+"arc_extracted.fits"
# load it in now:
extracted_flux,extracted_vars=pyfits.getdata(arc_file)
# Where is the default location for the model? By default it is a parameter
# in the ghost class. If this needs to be overwritten, go ahead.
# This is the xmod file. Wherever it is saved from the flat reduction.
xmodel_file=fitsdir+'GHOST_1_1_blue_std_xmodPolyfit.fits'
# All the other models... which are currently in the "test" directory.
wmodel_file=test_files_dir+'wparams_blue_std.fits'
spatmod_file=test_files_dir+'spatmod.fits'
specmod_file=test_files_dir+'specmod.fits'
rotmod_file=test_files_dir+'rotmod2.fits'
# Find the arc line list file
arclinefile='/home/jbento/code/ghostdr/ghostdr/ADCONFIG_GHOST/lookups/GHOST/Polyfit/mnras0378-0221-SD1.txt'
arcwaves, arcfluxes= np.loadtxt(arclinefile,usecols=[1,2]).T
#instantiate the ghost arm
arm = polyfit.GhostArm('blue',mode='std')
arm.spectral_format_with_matrix(xpars,wpars,spatpars,specpars,rotpars)
#Get the initial default model from the lookup location
xpars=pyfits.getdata(xmodel_file)
wpars=pyfits.getdata(wmodel_file)
spatpars=pyfits.getdata(spatmod_file)
specpars=pyfits.getdata(specmod_file)
rotpars=pyfits.getdata(rotmod_file)
slitview = polyfit.SlitView(image_array, flat_image_array, mode='std')
# The extractor is given the polyfit "arm" object, and a slitview object which has
# been instantiated with the slit viewer data.
extractor = polyfit.Extractor(arm, slitview)
#Now find the other lines, after first re-loading into the extractor.
# the inspect parameter is a verbose option for visualising the line
# finding results
lines_out=extractor.find_lines(extracted_flux, arcwaves, inspect=False)
#Now finally do the wavelength fit!
fitted_params, wave_and_resid = arm.read_lines_and_fit(wpars,lines_out,ydeg=3,xdeg=3)
# Optionally show residuals?
#Now write the output to a file, in whatever format suits the recipe system best.
pyfits.writeto('outputs.fits',fitted_params)
| [
"ghostdr.polyfit.Extractor",
"ghostdr.polyfit.GhostArm",
"ghostdr.polyfit.SlitView"
]
| [((1251, 1287), 'ghostdr.polyfit.GhostArm', 'polyfit.GhostArm', (['"""blue"""'], {'mode': '"""std"""'}), "('blue', mode='std')\n", (1267, 1287), False, 'from ghostdr import polyfit\n'), ((1607, 1666), 'ghostdr.polyfit.SlitView', 'polyfit.SlitView', (['image_array', 'flat_image_array'], {'mode': '"""std"""'}), "(image_array, flat_image_array, mode='std')\n", (1623, 1666), False, 'from ghostdr import polyfit\n'), ((1811, 1843), 'ghostdr.polyfit.Extractor', 'polyfit.Extractor', (['arm', 'slitview'], {}), '(arm, slitview)\n', (1828, 1843), False, 'from ghostdr import polyfit\n')] |
import unittest
import datetime
import kronos
string_format_time = "%Y-%m-%d %H:%M:%S"
date_time_str = "2020-07-19 18:14:21"
class KronosTest(unittest.TestCase):
def test_get_day_of_week(self):
for i in range(len(kronos.week_days)):
date = kronos.get_date_time_from_string(f"2020-08-{10 + i} 13:00:00")
self.assertEqual(kronos.week_days.get(i), kronos.get_day_of_week(date))
def test_is_yesterday(self):
date_time = kronos.get_date_time_from_string("2020-07-20 18:14:21")
self.assertTrue(kronos.is_yesterday(date_time_str, today=date_time))
date_time = kronos.get_date_time_from_string("2020-07-19 18:14:21")
self.assertFalse(kronos.is_yesterday(date_time_str, today=date_time))
def test_is_previous_friday(self):
last_friday = "2020-08-14 13:00:00"
last_monday = kronos.get_date_time_from_string("2020-08-17 13:00:00")
self.assertTrue(kronos.is_previous_friday(last_friday, last_monday))
last_tuesday = kronos.get_date_time_from_string("2020-08-18 13:00:00")
self.assertFalse(kronos.is_previous_friday(last_friday, last_tuesday))
def test_is_overdue_checks_correctly(self):
creation_date = "2020-08-10 13:00:00"
completion_goal = 5
self.assertTrue(kronos.is_overdue(creation_date, completion_goal))
on_time_date = kronos.get_date_time_as_string()
on_time_goal = 100
self.assertFalse(kronos.is_overdue(on_time_date, on_time_goal))
| [
"kronos.is_previous_friday",
"kronos.is_yesterday",
"kronos.is_overdue",
"kronos.get_date_time_from_string",
"kronos.get_day_of_week",
"kronos.get_date_time_as_string",
"kronos.week_days.get"
]
| [((468, 523), 'kronos.get_date_time_from_string', 'kronos.get_date_time_from_string', (['"""2020-07-20 18:14:21"""'], {}), "('2020-07-20 18:14:21')\n", (500, 523), False, 'import kronos\n'), ((621, 676), 'kronos.get_date_time_from_string', 'kronos.get_date_time_from_string', (['"""2020-07-19 18:14:21"""'], {}), "('2020-07-19 18:14:21')\n", (653, 676), False, 'import kronos\n'), ((861, 916), 'kronos.get_date_time_from_string', 'kronos.get_date_time_from_string', (['"""2020-08-17 13:00:00"""'], {}), "('2020-08-17 13:00:00')\n", (893, 916), False, 'import kronos\n'), ((1017, 1072), 'kronos.get_date_time_from_string', 'kronos.get_date_time_from_string', (['"""2020-08-18 13:00:00"""'], {}), "('2020-08-18 13:00:00')\n", (1049, 1072), False, 'import kronos\n'), ((1374, 1406), 'kronos.get_date_time_as_string', 'kronos.get_date_time_as_string', ([], {}), '()\n', (1404, 1406), False, 'import kronos\n'), ((267, 329), 'kronos.get_date_time_from_string', 'kronos.get_date_time_from_string', (['f"""2020-08-{10 + i} 13:00:00"""'], {}), "(f'2020-08-{10 + i} 13:00:00')\n", (299, 329), False, 'import kronos\n'), ((548, 599), 'kronos.is_yesterday', 'kronos.is_yesterday', (['date_time_str'], {'today': 'date_time'}), '(date_time_str, today=date_time)\n', (567, 599), False, 'import kronos\n'), ((702, 753), 'kronos.is_yesterday', 'kronos.is_yesterday', (['date_time_str'], {'today': 'date_time'}), '(date_time_str, today=date_time)\n', (721, 753), False, 'import kronos\n'), ((941, 992), 'kronos.is_previous_friday', 'kronos.is_previous_friday', (['last_friday', 'last_monday'], {}), '(last_friday, last_monday)\n', (966, 992), False, 'import kronos\n'), ((1098, 1150), 'kronos.is_previous_friday', 'kronos.is_previous_friday', (['last_friday', 'last_tuesday'], {}), '(last_friday, last_tuesday)\n', (1123, 1150), False, 'import kronos\n'), ((1299, 1348), 'kronos.is_overdue', 'kronos.is_overdue', (['creation_date', 'completion_goal'], {}), '(creation_date, completion_goal)\n', (1316, 1348), False, 'import kronos\n'), ((1459, 1504), 'kronos.is_overdue', 'kronos.is_overdue', (['on_time_date', 'on_time_goal'], {}), '(on_time_date, on_time_goal)\n', (1476, 1504), False, 'import kronos\n'), ((359, 382), 'kronos.week_days.get', 'kronos.week_days.get', (['i'], {}), '(i)\n', (379, 382), False, 'import kronos\n'), ((384, 412), 'kronos.get_day_of_week', 'kronos.get_day_of_week', (['date'], {}), '(date)\n', (406, 412), False, 'import kronos\n')] |
from asyncio import Future
from greenlet import getcurrent
import psycopg2
from psycopg2 import * # noqa
from psycopg2 import extensions, OperationalError
__version__ = psycopg2.__version__
def psycopg2_wait_callback(conn):
"""A wait callback to allow greenlet to work with Psycopg.
The caller must be from a greenlet other than the main one.
:param conn: psycopg2 connection or file number
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
"""
while True:
state = conn.poll()
if state == extensions.POLL_OK:
# Done with waiting
break
elif state == extensions.POLL_READ:
_wait_fd(conn)
elif state == extensions.POLL_WRITE:
_wait_fd(conn, read=False)
else: # pragma nocover
raise OperationalError("Bad result from poll: %r" % state)
# INTERNALS
def _wait_fd(conn, read=True):
'''Wait for an event on file descriptor ``fd``.
:param conn: file descriptor
:param read: wait for a read event if ``True``, otherwise a wait
for write event.
This function must be invoked from a coroutine with parent, therefore
invoking it from the main greenlet will raise an exception.
'''
current = getcurrent()
parent = current.parent
assert parent, '"_wait_fd" must be called by greenlet with a parent'
try:
fileno = conn.fileno()
except AttributeError:
fileno = conn
future = Future()
# When the event on fd occurs switch back to the current greenlet
if read:
future._loop.add_reader(fileno, _done_wait_fd, fileno, future, read)
else:
future._loop.add_writer(fileno, _done_wait_fd, fileno, future, read)
# switch back to parent greenlet
parent.switch(future)
# Back on the child greenlet. Raise error if there is one
future.result()
def _done_wait_fd(fd, future, read):
try:
if read:
future._loop.remove_reader(fd)
else:
future._loop.remove_writer(fd)
except Exception as exc:
future.set_exception(exc)
else:
future.set_result(None)
try:
extensions.POLL_OK
except AttributeError: # pragma nocover
from pulsar import ImproperlyConfigured
raise ImproperlyConfigured(
'Psycopg2 does not have support for asynchronous connections. '
'You need at least version 2.2.0 of Psycopg2.')
extensions.set_wait_callback(psycopg2_wait_callback)
| [
"psycopg2.OperationalError",
"psycopg2.extensions.set_wait_callback",
"pulsar.ImproperlyConfigured",
"greenlet.getcurrent",
"asyncio.Future"
]
| [((2505, 2557), 'psycopg2.extensions.set_wait_callback', 'extensions.set_wait_callback', (['psycopg2_wait_callback'], {}), '(psycopg2_wait_callback)\n', (2533, 2557), False, 'from psycopg2 import extensions, OperationalError\n'), ((1339, 1351), 'greenlet.getcurrent', 'getcurrent', ([], {}), '()\n', (1349, 1351), False, 'from greenlet import getcurrent\n'), ((1555, 1563), 'asyncio.Future', 'Future', ([], {}), '()\n', (1561, 1563), False, 'from asyncio import Future\n'), ((2354, 2493), 'pulsar.ImproperlyConfigured', 'ImproperlyConfigured', (['"""Psycopg2 does not have support for asynchronous connections. You need at least version 2.2.0 of Psycopg2."""'], {}), "(\n 'Psycopg2 does not have support for asynchronous connections. You need at least version 2.2.0 of Psycopg2.'\n )\n", (2374, 2493), False, 'from pulsar import ImproperlyConfigured\n'), ((899, 951), 'psycopg2.OperationalError', 'OperationalError', (["('Bad result from poll: %r' % state)"], {}), "('Bad result from poll: %r' % state)\n", (915, 951), False, 'from psycopg2 import extensions, OperationalError\n')] |
# Copyright 2011-2012 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the replica_set_connection module."""
import copy
import datetime
import os
import signal
import socket
import sys
import time
import thread
import traceback
import unittest
sys.path[0:0] = [""]
from nose.plugins.skip import SkipTest
from bson.son import SON
from bson.tz_util import utc
from pymongo.connection import Connection
from pymongo.read_preferences import ReadPreference
from pymongo.replica_set_connection import ReplicaSetConnection
from pymongo.replica_set_connection import _partition_node
from pymongo.database import Database
from pymongo.errors import (AutoReconnect,
ConfigurationError,
ConnectionFailure,
InvalidName,
OperationFailure)
from test import version
from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host
host = os.environ.get("DB_IP", 'localhost')
port = int(os.environ.get("DB_PORT", 27017))
pair = '%s:%d' % (host, port)
class TestReplicaSetConnectionAgainstStandalone(unittest.TestCase):
"""This is a funny beast -- we want to run tests for ReplicaSetConnection
but only if the database at DB_IP and DB_PORT is a standalone.
"""
def setUp(self):
conn = Connection(pair)
response = conn.admin.command('ismaster')
if 'setName' in response:
raise SkipTest()
def test_connect(self):
self.assertRaises(ConfigurationError, ReplicaSetConnection,
pair, replicaSet='anything',
connectTimeoutMS=600)
class TestConnectionReplicaSetBase(unittest.TestCase):
def setUp(self):
conn = Connection(pair)
response = conn.admin.command('ismaster')
if 'setName' in response:
self.name = str(response['setName'])
self.w = len(response['hosts'])
self.hosts = set([_partition_node(h)
for h in response["hosts"]])
self.arbiters = set([_partition_node(h)
for h in response.get("arbiters", [])])
repl_set_status = conn.admin.command('replSetGetStatus')
primary_info = [
m for m in repl_set_status['members']
if m['stateStr'] == 'PRIMARY'
][0]
self.primary = _partition_node(primary_info['name'])
self.secondaries = [
_partition_node(m['name']) for m in repl_set_status['members']
if m['stateStr'] == 'SECONDARY'
]
else:
raise SkipTest()
def _get_connection(self, **kwargs):
return ReplicaSetConnection(pair,
replicaSet=self.name,
**kwargs)
class TestConnection(TestConnectionReplicaSetBase):
def test_connect(self):
self.assertRaises(ConnectionFailure, ReplicaSetConnection,
"somedomainthatdoesntexist.org:27017",
replicaSet=self.name,
connectTimeoutMS=600)
self.assertRaises(ConfigurationError, ReplicaSetConnection,
pair, replicaSet='fdlksjfdslkjfd')
self.assertTrue(ReplicaSetConnection(pair, replicaSet=self.name))
def test_repr(self):
connection = self._get_connection()
self.assertEqual(repr(connection),
"ReplicaSetConnection(%r)" % (["%s:%d" % n
for n in
self.hosts],))
def test_properties(self):
c = ReplicaSetConnection(pair, replicaSet=self.name)
c.admin.command('ping')
self.assertEqual(c.primary, self.primary)
self.assertEqual(c.hosts, self.hosts)
self.assertEqual(c.arbiters, self.arbiters)
self.assertEqual(c.max_pool_size, 10)
self.assertEqual(c.document_class, dict)
self.assertEqual(c.tz_aware, False)
# Make sure RSC's properties are copied to Database and Collection
for obj in c, c.pymongo_test, c.pymongo_test.test:
self.assertEqual(obj.read_preference, ReadPreference.PRIMARY)
self.assertEqual(obj.tag_sets, [{}])
self.assertEqual(obj.secondary_acceptable_latency_ms, 15)
self.assertEqual(obj.slave_okay, False)
self.assertEqual(obj.safe, False)
cursor = c.pymongo_test.test.find()
self.assertEqual(
ReadPreference.PRIMARY, cursor._Cursor__read_preference)
self.assertEqual([{}], cursor._Cursor__tag_sets)
self.assertEqual(15, cursor._Cursor__secondary_acceptable_latency_ms)
self.assertEqual(False, cursor._Cursor__slave_okay)
c.close()
tag_sets = [{'dc': 'la', 'rack': '2'}, {'foo': 'bar'}]
c = ReplicaSetConnection(pair, replicaSet=self.name, max_pool_size=25,
document_class=SON, tz_aware=True,
slaveOk=False, safe=True,
read_preference=ReadPreference.SECONDARY,
tag_sets=copy.deepcopy(tag_sets),
secondary_acceptable_latency_ms=77)
c.admin.command('ping')
self.assertEqual(c.primary, self.primary)
self.assertEqual(c.hosts, self.hosts)
self.assertEqual(c.arbiters, self.arbiters)
self.assertEqual(c.max_pool_size, 25)
self.assertEqual(c.document_class, SON)
self.assertEqual(c.tz_aware, True)
for obj in c, c.pymongo_test, c.pymongo_test.test:
self.assertEqual(obj.read_preference, ReadPreference.SECONDARY)
self.assertEqual(obj.tag_sets, tag_sets)
self.assertEqual(obj.secondary_acceptable_latency_ms, 77)
self.assertEqual(obj.slave_okay, False)
self.assertEqual(obj.safe, True)
cursor = c.pymongo_test.test.find()
self.assertEqual(
ReadPreference.SECONDARY, cursor._Cursor__read_preference)
self.assertEqual(tag_sets, cursor._Cursor__tag_sets)
self.assertEqual(77, cursor._Cursor__secondary_acceptable_latency_ms)
self.assertEqual(False, cursor._Cursor__slave_okay)
cursor = c.pymongo_test.test.find(
read_preference=ReadPreference.NEAREST,
tag_sets=[{'dc':'ny'}, {}],
secondary_acceptable_latency_ms=123)
self.assertEqual(
ReadPreference.NEAREST, cursor._Cursor__read_preference)
self.assertEqual([{'dc':'ny'}, {}], cursor._Cursor__tag_sets)
self.assertEqual(123, cursor._Cursor__secondary_acceptable_latency_ms)
self.assertEqual(False, cursor._Cursor__slave_okay)
if version.at_least(c, (1, 7, 4)):
self.assertEqual(c.max_bson_size, 16777216)
else:
self.assertEqual(c.max_bson_size, 4194304)
c.close()
def test_get_db(self):
connection = self._get_connection()
def make_db(base, name):
return base[name]
self.assertRaises(InvalidName, make_db, connection, "")
self.assertRaises(InvalidName, make_db, connection, "te$t")
self.assertRaises(InvalidName, make_db, connection, "te.t")
self.assertRaises(InvalidName, make_db, connection, "te\\t")
self.assertRaises(InvalidName, make_db, connection, "te/t")
self.assertRaises(InvalidName, make_db, connection, "te st")
self.assertTrue(isinstance(connection.test, Database))
self.assertEqual(connection.test, connection["test"])
self.assertEqual(connection.test, Database(connection, "test"))
connection.close()
def test_auto_reconnect_exception_when_read_preference_is_secondary(self):
c = self._get_connection()
db = c.pymongo_test
def raise_socket_error(*args, **kwargs):
raise socket.error
old_sendall = socket.socket.sendall
socket.socket.sendall = raise_socket_error
try:
cursor = db.test.find(read_preference=ReadPreference.SECONDARY)
self.assertRaises(AutoReconnect, cursor.next)
finally:
socket.socket.sendall = old_sendall
def test_operations(self):
c = self._get_connection()
# Check explicitly for a case we've commonly hit in tests:
# a replica set is started with a tiny oplog, a previous
# test does a big insert that leaves the secondaries
# permanently "RECOVERING", and our insert(w=self.w) hangs
# forever.
rs_status = c.admin.command('replSetGetStatus')
members = rs_status['members']
self.assertFalse(
[m for m in members if m['stateStr'] == 'RECOVERING'],
"Replica set is recovering, try a larger oplogSize next time"
)
db = c.pymongo_test
db.test.remove({}, safe=True)
self.assertEqual(0, db.test.count())
db.test.insert({'foo': 'x'}, safe=True, w=self.w, wtimeout=10000)
self.assertEqual(1, db.test.count())
cursor = db.test.find()
doc = cursor.next()
self.assertEqual('x', doc['foo'])
# Ensure we read from the primary
self.assertEqual(c.primary, cursor._Cursor__connection_id)
cursor = db.test.find(read_preference=ReadPreference.SECONDARY)
doc = cursor.next()
self.assertEqual('x', doc['foo'])
# Ensure we didn't read from the primary
self.assertTrue(cursor._Cursor__connection_id in c.secondaries)
self.assertEqual(1, db.test.count())
db.test.remove({}, safe=True)
self.assertEqual(0, db.test.count())
db.test.drop()
c.close()
def test_database_names(self):
connection = self._get_connection()
connection.pymongo_test.test.save({"dummy": u"object"})
connection.pymongo_test_mike.test.save({"dummy": u"object"})
dbs = connection.database_names()
self.assertTrue("pymongo_test" in dbs)
self.assertTrue("pymongo_test_mike" in dbs)
connection.close()
def test_drop_database(self):
connection = self._get_connection()
self.assertRaises(TypeError, connection.drop_database, 5)
self.assertRaises(TypeError, connection.drop_database, None)
connection.pymongo_test.test.save({"dummy": u"object"})
dbs = connection.database_names()
self.assertTrue("pymongo_test" in dbs)
connection.drop_database("pymongo_test")
dbs = connection.database_names()
self.assertTrue("pymongo_test" not in dbs)
connection.pymongo_test.test.save({"dummy": u"object"})
dbs = connection.database_names()
self.assertTrue("pymongo_test" in dbs)
connection.drop_database(connection.pymongo_test)
dbs = connection.database_names()
self.assertTrue("pymongo_test" not in dbs)
connection.close()
def test_copy_db(self):
c = self._get_connection()
self.assertTrue(c.in_request())
self.assertRaises(TypeError, c.copy_database, 4, "foo")
self.assertRaises(TypeError, c.copy_database, "foo", 4)
self.assertRaises(InvalidName, c.copy_database, "foo", "$foo")
c.pymongo_test.test.drop()
c.drop_database("pymongo_test1")
c.drop_database("pymongo_test2")
c.pymongo_test.test.insert({"foo": "bar"})
self.assertFalse("pymongo_test1" in c.database_names())
self.assertFalse("pymongo_test2" in c.database_names())
c.copy_database("pymongo_test", "pymongo_test1")
# copy_database() didn't accidentally end the request
self.assertTrue(c.in_request())
self.assertTrue("pymongo_test1" in c.database_names())
self.assertEqual("bar", c.pymongo_test1.test.find_one()["foo"])
c.end_request()
self.assertFalse(c.in_request())
c.copy_database("pymongo_test", "pymongo_test2", pair)
# copy_database() didn't accidentally restart the request
self.assertFalse(c.in_request())
time.sleep(1)
self.assertTrue("pymongo_test2" in c.database_names())
self.assertEqual("bar", c.pymongo_test2.test.find_one()["foo"])
if version.at_least(c, (1, 3, 3, 1)):
c.drop_database("pymongo_test1")
c.pymongo_test.add_user("mike", "password")
self.assertRaises(OperationFailure, c.copy_database,
"pymongo_test", "pymongo_test1",
username="foo", password="<PASSWORD>")
self.assertFalse("pymongo_test1" in c.database_names())
self.assertRaises(OperationFailure, c.copy_database,
"pymongo_test", "pymongo_test1",
username="mike", password="<PASSWORD>")
self.assertFalse("pymongo_test1" in c.database_names())
c.copy_database("pymongo_test", "pymongo_test1",
username="mike", password="password")
self.assertTrue("pymongo_test1" in c.database_names())
time.sleep(2)
self.assertEqual("bar", c.pymongo_test1.test.find_one()["foo"])
c.close()
def test_iteration(self):
connection = self._get_connection()
def iterate():
[a for a in connection]
self.assertRaises(TypeError, iterate)
connection.close()
def test_disconnect(self):
c = self._get_connection()
coll = c.foo.bar
c.disconnect()
c.disconnect()
coll.count()
c.disconnect()
c.disconnect()
coll.count()
def test_fork(self):
"""Test using a connection before and after a fork.
"""
if sys.platform == "win32":
raise SkipTest()
try:
from multiprocessing import Process, Pipe
except ImportError:
raise SkipTest()
db = self._get_connection().pymongo_test
# Failure occurs if the connection is used before the fork
db.test.find_one()
#db.connection.end_request()
def loop(pipe):
while True:
try:
db.test.insert({"a": "b"}, safe=True)
for _ in db.test.find():
pass
except:
traceback.print_exc()
pipe.send(True)
os._exit(1)
cp1, cc1 = Pipe()
cp2, cc2 = Pipe()
p1 = Process(target=loop, args=(cc1,))
p2 = Process(target=loop, args=(cc2,))
p1.start()
p2.start()
p1.join(1)
p2.join(1)
p1.terminate()
p2.terminate()
p1.join()
p2.join()
cc1.close()
cc2.close()
# recv will only have data if the subprocess failed
try:
cp1.recv()
self.fail()
except EOFError:
pass
try:
cp2.recv()
self.fail()
except EOFError:
pass
db.connection.close()
def test_document_class(self):
c = self._get_connection()
db = c.pymongo_test
db.test.insert({"x": 1})
self.assertEqual(dict, c.document_class)
self.assertTrue(isinstance(db.test.find_one(), dict))
self.assertFalse(isinstance(db.test.find_one(), SON))
c.document_class = SON
self.assertEqual(SON, c.document_class)
self.assertTrue(isinstance(db.test.find_one(), SON))
self.assertFalse(isinstance(db.test.find_one(as_class=dict), SON))
c.close()
c = self._get_connection(document_class=SON)
db = c.pymongo_test
self.assertEqual(SON, c.document_class)
self.assertTrue(isinstance(db.test.find_one(), SON))
self.assertFalse(isinstance(db.test.find_one(as_class=dict), SON))
c.document_class = dict
self.assertEqual(dict, c.document_class)
self.assertTrue(isinstance(db.test.find_one(), dict))
self.assertFalse(isinstance(db.test.find_one(), SON))
c.close()
def test_network_timeout(self):
no_timeout = self._get_connection()
timeout_sec = 1
timeout = self._get_connection(socketTimeoutMS=timeout_sec*1000)
no_timeout.pymongo_test.drop_collection("test")
no_timeout.pymongo_test.test.insert({"x": 1}, safe=True)
# A $where clause that takes a second longer than the timeout
where_func = delay(1 + timeout_sec)
def get_x(db):
doc = db.test.find().where(where_func).next()
return doc["x"]
self.assertEqual(1, get_x(no_timeout.pymongo_test))
self.assertRaises(ConnectionFailure, get_x, timeout.pymongo_test)
def get_x_timeout(db, t):
doc = db.test.find(network_timeout=t).where(where_func).next()
return doc["x"]
self.assertEqual(1, get_x_timeout(timeout.pymongo_test, None))
self.assertRaises(ConnectionFailure, get_x_timeout,
no_timeout.pymongo_test, 0.1)
no_timeout.close()
timeout.close()
def test_tz_aware(self):
self.assertRaises(ConfigurationError, ReplicaSetConnection,
tz_aware='foo', replicaSet=self.name)
aware = self._get_connection(tz_aware=True)
naive = self._get_connection()
aware.pymongo_test.drop_collection("test")
now = datetime.datetime.utcnow()
aware.pymongo_test.test.insert({"x": now}, safe=True)
time.sleep(1)
self.assertEqual(None, naive.pymongo_test.test.find_one()["x"].tzinfo)
self.assertEqual(utc, aware.pymongo_test.test.find_one()["x"].tzinfo)
self.assertEqual(
aware.pymongo_test.test.find_one()["x"].replace(tzinfo=None),
naive.pymongo_test.test.find_one()["x"])
def test_ipv6(self):
try:
connection = ReplicaSetConnection("[::1]:%d" % (port,),
replicaSet=self.name)
except:
# Either mongod was started without --ipv6
# or the OS doesn't support it (or both).
raise SkipTest()
# Try a few simple things
connection = ReplicaSetConnection("mongodb://[::1]:%d" % (port,),
replicaSet=self.name)
connection = ReplicaSetConnection("mongodb://[::1]:%d/?safe=true;"
"replicaSet=%s" % (port, self.name))
connection = ReplicaSetConnection("[::1]:%d,localhost:"
"%d" % (port, port),
replicaSet=self.name)
connection = ReplicaSetConnection("localhost:%d,[::1]:"
"%d" % (port, port),
replicaSet=self.name)
connection.pymongo_test.test.save({"dummy": u"object"})
connection.pymongo_test_bernie.test.save({"dummy": u"object"})
dbs = connection.database_names()
self.assertTrue("pymongo_test" in dbs)
self.assertTrue("pymongo_test_bernie" in dbs)
connection.close()
def _test_kill_cursor_explicit(self, read_pref):
c = self._get_connection(read_preference=read_pref)
db = c.pymongo_test
db.drop_collection("test")
test = db.test
test.insert([{"i": i} for i in range(20)], w=1 + len(c.secondaries))
# Partially evaluate cursor so it's left alive, then kill it
cursor = test.find().batch_size(10)
cursor.next()
self.assertNotEqual(0, cursor.cursor_id)
connection_id = cursor._Cursor__connection_id
writer = c._ReplicaSetConnection__writer
if read_pref == ReadPreference.PRIMARY:
msg = "Expected cursor's connection_id to be %s, got %s" % (
writer, connection_id)
self.assertEqual(connection_id, writer, msg)
else:
self.assertNotEqual(connection_id, writer,
"Expected cursor's connection_id not to be primary")
cursor_id = cursor.cursor_id
# Cursor dead on server - trigger a getMore on the same cursor_id and
# check that the server returns an error.
cursor2 = cursor.clone()
cursor2._Cursor__id = cursor_id
if (sys.platform.startswith('java') or
'PyPy' in sys.version):
# Explicitly kill cursor.
cursor.close()
else:
# Implicitly kill it in CPython.
del cursor
self.assertRaises(OperationFailure, lambda: list(cursor2))
def test_kill_cursor_explicit_primary(self):
self._test_kill_cursor_explicit(ReadPreference.PRIMARY)
def test_kill_cursor_explicit_secondary(self):
self._test_kill_cursor_explicit(ReadPreference.SECONDARY)
def test_interrupt_signal(self):
if sys.platform.startswith('java'):
raise SkipTest("Can't test interrupts in Jython")
# Test fix for PYTHON-294 -- make sure Connection closes its
# socket if it gets an interrupt while waiting to recv() from it.
c = self._get_connection()
db = c.pymongo_test
# A $where clause which takes 1.5 sec to execute
where = delay(1.5)
# Need exactly 1 document so find() will execute its $where clause once
db.drop_collection('foo')
db.foo.insert({'_id': 1}, safe=True)
old_signal_handler = None
try:
# Platform-specific hacks for raising a KeyboardInterrupt on the main
# thread while find() is in-progress: On Windows, SIGALRM is unavailable
# so we use second thread. In our Bamboo setup on Linux, the thread
# technique causes an error in the test at sock.recv():
# TypeError: 'int' object is not callable
# We don't know what causes this in Bamboo, so we hack around it.
if sys.platform == 'win32':
def interrupter():
time.sleep(0.25)
# Raises KeyboardInterrupt in the main thread
thread.interrupt_main()
thread.start_new_thread(interrupter, ())
else:
# Convert SIGALRM to SIGINT -- it's hard to schedule a SIGINT for one
# second in the future, but easy to schedule SIGALRM.
def sigalarm(num, frame):
raise KeyboardInterrupt
old_signal_handler = signal.signal(signal.SIGALRM, sigalarm)
signal.alarm(1)
raised = False
try:
# Will be interrupted by a KeyboardInterrupt.
db.foo.find({'$where': where}).next()
except KeyboardInterrupt:
raised = True
# Can't use self.assertRaises() because it doesn't catch system
# exceptions
self.assertTrue(raised, "Didn't raise expected ConnectionFailure")
# Raises AssertionError due to PYTHON-294 -- Mongo's response to the
# previous find() is still waiting to be read on the socket, so the
# request id's don't match.
self.assertEqual(
{'_id': 1},
db.foo.find().next()
)
finally:
if old_signal_handler:
signal.signal(signal.SIGALRM, old_signal_handler)
def test_auto_start_request(self):
for bad_horrible_value in (None, 5, 'hi!'):
self.assertRaises(
(TypeError, ConfigurationError),
lambda: self._get_connection(auto_start_request=bad_horrible_value)
)
# auto_start_request should default to True
conn = self._get_connection()
pools = [mongo.pool for mongo in
conn._ReplicaSetConnection__members.values()]
self.assertTrue(conn.auto_start_request)
self.assertTrue(conn.in_request())
# Trigger the RSC to actually start a request
conn.test.test.find_one()
for pool in pools:
self.assertTrue(pool.in_request())
conn.end_request()
self.assertFalse(conn.in_request())
for pool in pools:
self.assertFalse(pool.in_request())
conn.start_request()
self.assertTrue(conn.in_request())
conn.close()
conn = self._get_connection(auto_start_request=False)
self.assertFalse(conn.in_request())
conn.start_request()
self.assertTrue(conn.in_request())
conn.end_request()
self.assertFalse(conn.in_request())
conn.close()
def test_schedule_refresh(self):
# Monitor thread starts waiting for _refresh_interval, 30 seconds
conn = self._get_connection()
# Reconnect if necessary
conn.pymongo_test.test.find_one()
secondaries = conn.secondaries
for secondary in secondaries:
conn._ReplicaSetConnection__members[secondary].up = False
conn._ReplicaSetConnection__members[conn.primary].up = False
# Wake up monitor thread
conn._ReplicaSetConnection__schedule_refresh()
# Refresh interval is 30 seconds; scheduling a refresh tells the
# monitor thread / greenlet to start a refresh now. We still need to
# sleep a few seconds for it to complete.
time.sleep(5)
for secondary in secondaries:
self.assertTrue(conn._ReplicaSetConnection__members[secondary].up,
"ReplicaSetConnection didn't detect secondary is up")
self.assertTrue(conn._ReplicaSetConnection__members[conn.primary].up,
"ReplicaSetConnection didn't detect primary is up")
conn.close()
def test_pinned_member(self):
latency = 1000 * 1000
conn = self._get_connection(
auto_start_request=False, secondary_acceptable_latency_ms=latency)
host = read_from_which_host(conn, ReadPreference.SECONDARY)
self.assertTrue(host in conn.secondaries)
# No pinning since we're not in a request
assertReadFromAll(
self, conn, conn.secondaries,
ReadPreference.SECONDARY, None, latency)
assertReadFromAll(
self, conn, list(conn.secondaries) + [conn.primary],
ReadPreference.NEAREST, None, latency)
conn.start_request()
host = read_from_which_host(conn, ReadPreference.SECONDARY)
self.assertTrue(host in conn.secondaries)
assertReadFrom(self, conn, host, ReadPreference.SECONDARY)
# Repin
primary = read_from_which_host(conn, ReadPreference.PRIMARY)
self.assertEqual(conn.primary, primary)
assertReadFrom(self, conn, primary, ReadPreference.NEAREST)
# Repin again
host = read_from_which_host(conn, ReadPreference.SECONDARY)
self.assertTrue(host in conn.secondaries)
assertReadFrom(self, conn, host, ReadPreference.SECONDARY)
# Unpin
conn.end_request()
assertReadFromAll(
self, conn, list(conn.secondaries) + [conn.primary],
ReadPreference.NEAREST, None, latency)
if __name__ == "__main__":
unittest.main()
| [
"pymongo.database.Database",
"multiprocessing.Process",
"sys.platform.startswith",
"time.sleep",
"pymongo.replica_set_connection.ReplicaSetConnection",
"signal.alarm",
"copy.deepcopy",
"unittest.main",
"test.utils.delay",
"test.utils.read_from_which_host",
"test.utils.assertReadFrom",
"pymongo.connection.Connection",
"pymongo.replica_set_connection._partition_node",
"thread.interrupt_main",
"traceback.print_exc",
"test.utils.assertReadFromAll",
"nose.plugins.skip.SkipTest",
"multiprocessing.Pipe",
"signal.signal",
"datetime.datetime.utcnow",
"os.environ.get",
"test.version.at_least",
"os._exit",
"thread.start_new_thread"
]
| [((1477, 1513), 'os.environ.get', 'os.environ.get', (['"""DB_IP"""', '"""localhost"""'], {}), "('DB_IP', 'localhost')\n", (1491, 1513), False, 'import os\n'), ((1525, 1557), 'os.environ.get', 'os.environ.get', (['"""DB_PORT"""', '(27017)'], {}), "('DB_PORT', 27017)\n", (1539, 1557), False, 'import os\n'), ((27982, 27997), 'unittest.main', 'unittest.main', ([], {}), '()\n', (27995, 27997), False, 'import unittest\n'), ((1847, 1863), 'pymongo.connection.Connection', 'Connection', (['pair'], {}), '(pair)\n', (1857, 1863), False, 'from pymongo.connection import Connection\n'), ((2270, 2286), 'pymongo.connection.Connection', 'Connection', (['pair'], {}), '(pair)\n', (2280, 2286), False, 'from pymongo.connection import Connection\n'), ((3253, 3311), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (['pair'], {'replicaSet': 'self.name'}), '(pair, replicaSet=self.name, **kwargs)\n', (3273, 3311), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((4212, 4260), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (['pair'], {'replicaSet': 'self.name'}), '(pair, replicaSet=self.name)\n', (4232, 4260), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((7356, 7386), 'test.version.at_least', 'version.at_least', (['c', '(1, 7, 4)'], {}), '(c, (1, 7, 4))\n', (7372, 7386), False, 'from test import version\n'), ((12700, 12713), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (12710, 12713), False, 'import time\n'), ((12862, 12895), 'test.version.at_least', 'version.at_least', (['c', '(1, 3, 3, 1)'], {}), '(c, (1, 3, 3, 1))\n', (12878, 12895), False, 'from test import version\n'), ((15112, 15118), 'multiprocessing.Pipe', 'Pipe', ([], {}), '()\n', (15116, 15118), False, 'from multiprocessing import Process, Pipe\n'), ((15138, 15144), 'multiprocessing.Pipe', 'Pipe', ([], {}), '()\n', (15142, 15144), False, 'from multiprocessing import Process, Pipe\n'), ((15159, 15192), 'multiprocessing.Process', 'Process', ([], {'target': 'loop', 'args': '(cc1,)'}), '(target=loop, args=(cc1,))\n', (15166, 15192), False, 'from multiprocessing import Process, Pipe\n'), ((15206, 15239), 'multiprocessing.Process', 'Process', ([], {'target': 'loop', 'args': '(cc2,)'}), '(target=loop, args=(cc2,))\n', (15213, 15239), False, 'from multiprocessing import Process, Pipe\n'), ((17164, 17186), 'test.utils.delay', 'delay', (['(1 + timeout_sec)'], {}), '(1 + timeout_sec)\n', (17169, 17186), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((18127, 18153), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (18151, 18153), False, 'import datetime\n'), ((18224, 18237), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (18234, 18237), False, 'import time\n'), ((18942, 19016), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (["('mongodb://[::1]:%d' % (port,))"], {'replicaSet': 'self.name'}), "('mongodb://[::1]:%d' % (port,), replicaSet=self.name)\n", (18962, 19016), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((19080, 19171), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (["('mongodb://[::1]:%d/?safe=true;replicaSet=%s' % (port, self.name))"], {}), "('mongodb://[::1]:%d/?safe=true;replicaSet=%s' % (port,\n self.name))\n", (19100, 19171), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((19234, 19321), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (["('[::1]:%d,localhost:%d' % (port, port))"], {'replicaSet': 'self.name'}), "('[::1]:%d,localhost:%d' % (port, port), replicaSet=\n self.name)\n", (19254, 19321), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((19425, 19512), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (["('localhost:%d,[::1]:%d' % (port, port))"], {'replicaSet': 'self.name'}), "('localhost:%d,[::1]:%d' % (port, port), replicaSet=\n self.name)\n", (19445, 19512), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((21644, 21675), 'sys.platform.startswith', 'sys.platform.startswith', (['"""java"""'], {}), "('java')\n", (21667, 21675), False, 'import sys\n'), ((22020, 22030), 'test.utils.delay', 'delay', (['(1.5)'], {}), '(1.5)\n', (22025, 22030), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((26154, 26167), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (26164, 26167), False, 'import time\n'), ((26717, 26769), 'test.utils.read_from_which_host', 'read_from_which_host', (['conn', 'ReadPreference.SECONDARY'], {}), '(conn, ReadPreference.SECONDARY)\n', (26737, 26769), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((26879, 26971), 'test.utils.assertReadFromAll', 'assertReadFromAll', (['self', 'conn', 'conn.secondaries', 'ReadPreference.SECONDARY', 'None', 'latency'], {}), '(self, conn, conn.secondaries, ReadPreference.SECONDARY,\n None, latency)\n', (26896, 26971), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((27182, 27234), 'test.utils.read_from_which_host', 'read_from_which_host', (['conn', 'ReadPreference.SECONDARY'], {}), '(conn, ReadPreference.SECONDARY)\n', (27202, 27234), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((27293, 27351), 'test.utils.assertReadFrom', 'assertReadFrom', (['self', 'conn', 'host', 'ReadPreference.SECONDARY'], {}), '(self, conn, host, ReadPreference.SECONDARY)\n', (27307, 27351), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((27387, 27437), 'test.utils.read_from_which_host', 'read_from_which_host', (['conn', 'ReadPreference.PRIMARY'], {}), '(conn, ReadPreference.PRIMARY)\n', (27407, 27437), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((27494, 27553), 'test.utils.assertReadFrom', 'assertReadFrom', (['self', 'conn', 'primary', 'ReadPreference.NEAREST'], {}), '(self, conn, primary, ReadPreference.NEAREST)\n', (27508, 27553), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((27592, 27644), 'test.utils.read_from_which_host', 'read_from_which_host', (['conn', 'ReadPreference.SECONDARY'], {}), '(conn, ReadPreference.SECONDARY)\n', (27612, 27644), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((27703, 27761), 'test.utils.assertReadFrom', 'assertReadFrom', (['self', 'conn', 'host', 'ReadPreference.SECONDARY'], {}), '(self, conn, host, ReadPreference.SECONDARY)\n', (27717, 27761), False, 'from test.utils import delay, assertReadFrom, assertReadFromAll, read_from_which_host\n'), ((1966, 1976), 'nose.plugins.skip.SkipTest', 'SkipTest', ([], {}), '()\n', (1974, 1976), False, 'from nose.plugins.skip import SkipTest\n'), ((2941, 2978), 'pymongo.replica_set_connection._partition_node', '_partition_node', (["primary_info['name']"], {}), "(primary_info['name'])\n", (2956, 2978), False, 'from pymongo.replica_set_connection import _partition_node\n'), ((3185, 3195), 'nose.plugins.skip.SkipTest', 'SkipTest', ([], {}), '()\n', (3193, 3195), False, 'from nose.plugins.skip import SkipTest\n'), ((3798, 3846), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (['pair'], {'replicaSet': 'self.name'}), '(pair, replicaSet=self.name)\n', (3818, 3846), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((8242, 8270), 'pymongo.database.Database', 'Database', (['connection', '"""test"""'], {}), "(connection, 'test')\n", (8250, 8270), False, 'from pymongo.database import Database\n'), ((13739, 13752), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (13749, 13752), False, 'import time\n'), ((14438, 14448), 'nose.plugins.skip.SkipTest', 'SkipTest', ([], {}), '()\n', (14446, 14448), False, 'from nose.plugins.skip import SkipTest\n'), ((18621, 18685), 'pymongo.replica_set_connection.ReplicaSetConnection', 'ReplicaSetConnection', (["('[::1]:%d' % (port,))"], {'replicaSet': 'self.name'}), "('[::1]:%d' % (port,), replicaSet=self.name)\n", (18641, 18685), False, 'from pymongo.replica_set_connection import ReplicaSetConnection\n'), ((21077, 21108), 'sys.platform.startswith', 'sys.platform.startswith', (['"""java"""'], {}), "('java')\n", (21100, 21108), False, 'import sys\n'), ((21695, 21738), 'nose.plugins.skip.SkipTest', 'SkipTest', (['"""Can\'t test interrupts in Jython"""'], {}), '("Can\'t test interrupts in Jython")\n', (21703, 21738), False, 'from nose.plugins.skip import SkipTest\n'), ((3028, 3054), 'pymongo.replica_set_connection._partition_node', '_partition_node', (["m['name']"], {}), "(m['name'])\n", (3043, 3054), False, 'from pymongo.replica_set_connection import _partition_node\n'), ((5746, 5769), 'copy.deepcopy', 'copy.deepcopy', (['tag_sets'], {}), '(tag_sets)\n', (5759, 5769), False, 'import copy\n'), ((14563, 14573), 'nose.plugins.skip.SkipTest', 'SkipTest', ([], {}), '()\n', (14571, 14573), False, 'from nose.plugins.skip import SkipTest\n'), ((18875, 18885), 'nose.plugins.skip.SkipTest', 'SkipTest', ([], {}), '()\n', (18883, 18885), False, 'from nose.plugins.skip import SkipTest\n'), ((22930, 22970), 'thread.start_new_thread', 'thread.start_new_thread', (['interrupter', '()'], {}), '(interrupter, ())\n', (22953, 22970), False, 'import thread\n'), ((23269, 23308), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'sigalarm'], {}), '(signal.SIGALRM, sigalarm)\n', (23282, 23308), False, 'import signal\n'), ((23325, 23340), 'signal.alarm', 'signal.alarm', (['(1)'], {}), '(1)\n', (23337, 23340), False, 'import signal\n'), ((24130, 24179), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'old_signal_handler'], {}), '(signal.SIGALRM, old_signal_handler)\n', (24143, 24179), False, 'import signal\n'), ((2494, 2512), 'pymongo.replica_set_connection._partition_node', '_partition_node', (['h'], {}), '(h)\n', (2509, 2512), False, 'from pymongo.replica_set_connection import _partition_node\n'), ((2605, 2623), 'pymongo.replica_set_connection._partition_node', '_partition_node', (['h'], {}), '(h)\n', (2620, 2623), False, 'from pymongo.replica_set_connection import _partition_node\n'), ((22785, 22801), 'time.sleep', 'time.sleep', (['(0.25)'], {}), '(0.25)\n', (22795, 22801), False, 'import time\n'), ((22889, 22912), 'thread.interrupt_main', 'thread.interrupt_main', ([], {}), '()\n', (22910, 22912), False, 'import thread\n'), ((15002, 15023), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (15021, 15023), False, 'import traceback\n'), ((15080, 15091), 'os._exit', 'os._exit', (['(1)'], {}), '(1)\n', (15088, 15091), False, 'import os\n')] |
import argparse_helper as argparse
import config_dir
import sys
from .editor import Editor
def main(*args):
if len(args) > 0:
args = [args]
parser = argparse.ArgumentParser()
parser.add_argument("-f", dest="cfg_file", help="query save name")
parser.add_argument("-x", default=False, action="store_true", dest="run", help="run immediately")
parser.add_argument("-l", default=False, action="count", dest="list", help="list saved queries")
parser.add_argument("-p", default=False, action="store_true", dest="previous", help="use previous query")
parser.add_argument("pattern", nargs="?", help="override saved pattern")
parser.add_argument("file", nargs="?", help="file to operate on")
args = parser.parse_args(*args)
if args.cfg_file is None and args.previous:
args.cfg_file = "previous"
if args.cfg_file is not None and args.file is None:
args.file = args.pattern
args.pattern = None
editor = Editor(file=args.cfg_file, pattern=args.pattern)
if args.list > 0:
if args.cfg_file is not None:
cfg = config_dir.load_config(name=".jqi", sub_dir="query", sub_name=args.cfg_file, create=False)
print(cfg["pattern"])
else:
list_stored(args.list > 1)
return
if args.file is None:
text = sys.stdin.read()
else:
with open(args.file) as f:
text = f.read()
if args.run:
editor.jq(text, stdio=True)
else:
result = editor.run(text)
if result == 0:
editor.save()
editor.save("previous")
else:
sys.exit(result)
def list_stored(long=False):
d = config_dir.config_dir(name=".jqi", sub_dir="query")
for f in d.iterdir():
name = f.name
cfg = config_dir.load_config(name=".jqi", sub_dir="query", sub_name=name, create=False)
if long:
print(name)
for line in cfg["pattern"].splitlines():
print("\t{}".format(line))
else:
print("{}\t{}".format(name, cfg["pattern"].splitlines()[0]))
if __name__ == '__main__':
main("-f", "foo", "/tmp/x")
| [
"config_dir.config_dir",
"config_dir.load_config",
"sys.exit",
"argparse_helper.ArgumentParser",
"sys.stdin.read"
]
| [((168, 193), 'argparse_helper.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (191, 193), True, 'import argparse_helper as argparse\n'), ((1696, 1747), 'config_dir.config_dir', 'config_dir.config_dir', ([], {'name': '""".jqi"""', 'sub_dir': '"""query"""'}), "(name='.jqi', sub_dir='query')\n", (1717, 1747), False, 'import config_dir\n'), ((1340, 1356), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (1354, 1356), False, 'import sys\n'), ((1810, 1896), 'config_dir.load_config', 'config_dir.load_config', ([], {'name': '""".jqi"""', 'sub_dir': '"""query"""', 'sub_name': 'name', 'create': '(False)'}), "(name='.jqi', sub_dir='query', sub_name=name, create=\n False)\n", (1832, 1896), False, 'import config_dir\n'), ((1105, 1199), 'config_dir.load_config', 'config_dir.load_config', ([], {'name': '""".jqi"""', 'sub_dir': '"""query"""', 'sub_name': 'args.cfg_file', 'create': '(False)'}), "(name='.jqi', sub_dir='query', sub_name=args.cfg_file,\n create=False)\n", (1127, 1199), False, 'import config_dir\n'), ((1640, 1656), 'sys.exit', 'sys.exit', (['result'], {}), '(result)\n', (1648, 1656), False, 'import sys\n')] |
import numpy as np
from visual_dynamics.policies import CameraTargetPolicy
class RandomOffsetCameraTargetPolicy(CameraTargetPolicy):
def __init__(self, env, target_env, camera_node_name, agent_node_name, target_node_name,
height=12.0, radius=16.0, angle=(-np.pi/4, np.pi/4), tightness=0.1, hra_interpolation=True):
self.height = height
self.radius = radius
self.angle = angle
offset = self.sample_offset()
super(RandomOffsetCameraTargetPolicy, self).__init__(env, target_env, camera_node_name, agent_node_name,
target_node_name, offset, tightness=tightness,
hra_interpolation=hra_interpolation)
def reset(self):
self.offset = self.sample_offset()
state = super(RandomOffsetCameraTargetPolicy, self).reset()
# self.offset = self.sample_offset()
return state
def sample_offset(self):
height = np.random.uniform(*self.height) if isinstance(self.height, (list, tuple)) else self.height
radius = np.random.uniform(*self.radius) if isinstance(self.radius, (list, tuple)) else self.radius
angle = np.random.uniform(*self.angle) if isinstance(self.angle, (list, tuple)) else self.angle
return np.array([radius * np.sin(angle), -radius * np.cos(angle), height])
def _get_config(self):
config = super(RandomOffsetCameraTargetPolicy, self)._get_config()
config.pop('offset')
config.update({'height': self.height,
'radius': self.radius,
'angle': self.angle})
return config
| [
"numpy.sin",
"numpy.cos",
"numpy.random.uniform"
]
| [((1027, 1058), 'numpy.random.uniform', 'np.random.uniform', (['*self.height'], {}), '(*self.height)\n', (1044, 1058), True, 'import numpy as np\n'), ((1135, 1166), 'numpy.random.uniform', 'np.random.uniform', (['*self.radius'], {}), '(*self.radius)\n', (1152, 1166), True, 'import numpy as np\n'), ((1242, 1272), 'numpy.random.uniform', 'np.random.uniform', (['*self.angle'], {}), '(*self.angle)\n', (1259, 1272), True, 'import numpy as np\n'), ((1364, 1377), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (1370, 1377), True, 'import numpy as np\n'), ((1389, 1402), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (1395, 1402), True, 'import numpy as np\n')] |
import json
import logging
import os
from typing import Optional
from mir import scm
from mir.tools import mir_storage
def mir_check_repo_dvc_dirty(mir_root: str = ".") -> bool:
names = [name for name in mir_storage.get_all_mir_paths() if os.path.isfile(os.path.join(mir_root, name))]
if names:
dvc_cmd_args = ["--show-json", "--targets"]
dvc_cmd_args.extend(names)
dvc_scm = scm.Scm(mir_root, scm_executable="dvc")
dvc_result = dvc_scm.diff(dvc_cmd_args)
json_object = json.loads(dvc_result)
keys = ['added', 'deleted', 'modified', 'renamed', 'not in cache']
dvc_dirty = False
for key in keys:
dirty_value = json_object.get(key, None)
if dirty_value:
logging.info(f"{key}: {dirty_value}")
dvc_dirty = True
return dvc_dirty
else:
# if no mir files in this mir repo, it's clean
return False
def mir_check_repo_git_dirty(mir_root: str = ".") -> bool:
git_scm = scm.Scm(mir_root, scm_executable="git")
git_result = git_scm.status("-s") # if clean, returns nothing
if (git_result or len(git_result) > 0):
logging.info(f"{git_result}")
return True
return False # clean
def mir_check_repo_dirty(mir_root: str = '.') -> bool:
return mir_check_repo_dvc_dirty(mir_root) or mir_check_repo_git_dirty(mir_root)
def mir_check_branch_exists(mir_root: str, branch: str) -> bool:
try:
git_scm = scm.Scm(mir_root, scm_executable="git")
git_scm.rev_parse(branch)
return True
except Exception:
# git rev-parse will return non-zero code when can not find branch
# and cmd.py packs non-zero return code as an error
return False
def work_dir_to_monitor_file(work_dir: Optional[str]) -> Optional[str]:
return os.path.join(work_dir, 'out', 'monitor.txt') if work_dir else None
| [
"json.loads",
"mir.scm.Scm",
"os.path.join",
"mir.tools.mir_storage.get_all_mir_paths",
"logging.info"
]
| [((1026, 1065), 'mir.scm.Scm', 'scm.Scm', (['mir_root'], {'scm_executable': '"""git"""'}), "(mir_root, scm_executable='git')\n", (1033, 1065), False, 'from mir import scm\n'), ((411, 450), 'mir.scm.Scm', 'scm.Scm', (['mir_root'], {'scm_executable': '"""dvc"""'}), "(mir_root, scm_executable='dvc')\n", (418, 450), False, 'from mir import scm\n'), ((521, 543), 'json.loads', 'json.loads', (['dvc_result'], {}), '(dvc_result)\n', (531, 543), False, 'import json\n'), ((1185, 1214), 'logging.info', 'logging.info', (['f"""{git_result}"""'], {}), "(f'{git_result}')\n", (1197, 1214), False, 'import logging\n'), ((1496, 1535), 'mir.scm.Scm', 'scm.Scm', (['mir_root'], {'scm_executable': '"""git"""'}), "(mir_root, scm_executable='git')\n", (1503, 1535), False, 'from mir import scm\n'), ((1853, 1897), 'os.path.join', 'os.path.join', (['work_dir', '"""out"""', '"""monitor.txt"""'], {}), "(work_dir, 'out', 'monitor.txt')\n", (1865, 1897), False, 'import os\n'), ((211, 242), 'mir.tools.mir_storage.get_all_mir_paths', 'mir_storage.get_all_mir_paths', ([], {}), '()\n', (240, 242), False, 'from mir.tools import mir_storage\n'), ((261, 289), 'os.path.join', 'os.path.join', (['mir_root', 'name'], {}), '(mir_root, name)\n', (273, 289), False, 'import os\n'), ((768, 805), 'logging.info', 'logging.info', (['f"""{key}: {dirty_value}"""'], {}), "(f'{key}: {dirty_value}')\n", (780, 805), False, 'import logging\n')] |
# coding:utf-8
from sqlalchemy import text
from db.basic_db import db_session
from db.models import SeedIds
from decorators.decorator import db_commit_decorator
def get_seed():
"""
Get all user id to be crawled
:return: user ids
"""
return db_session.query(SeedIds).filter(text('status=0')).all()
def get_seed_ids():
"""
Get all user id to be crawled
:return: user ids
"""
return db_session.query(SeedIds.uid).filter(text('is_crawled=0')).all()
def get_home_ids():
"""
Get all user id who's home pages need to be crawled
:return: user ids
"""
return db_session.query(SeedIds.uid).filter(text('home_crawled=0')).all()
@db_commit_decorator
def set_seed_crawled(uid, result):
"""
:param uid: user id that is crawled
:param result: crawling result
:return: None
"""
seed = db_session.query(SeedIds).filter(SeedIds.uid == uid).first()
if seed:
if seed.is_crawled == 0:
seed.is_crawled = result
else:
seed = SeedIds(uid=uid, is_crawled=result)
db_session.add(seed)
db_session.commit()
def get_seed_by_id(uid):
return db_session.query(SeedIds).filter(SeedIds.uid == uid).first()
@db_commit_decorator
def insert_seeds(ids):
db_session.execute(SeedIds.__table__.insert().prefix_with('IGNORE'), [{'uid': i} for i in ids])
db_session.commit()
@db_commit_decorator
def set_seed_other_crawled(uid):
"""
update it if user id already exists, else insert
:param uid: user id
:return: None
"""
seed = get_seed_by_id(uid)
if seed is None:
seed = SeedIds(uid=uid, is_crawled=1, other_crawled=1, home_crawled=1)
db_session.add(seed)
else:
seed.other_crawled = 1
db_session.commit()
@db_commit_decorator
def set_seed_home_crawled(uid):
"""
:param uid: user id
:return: None
"""
seed = get_seed_by_id(uid)
if seed is None:
seed = SeedIds(uid=uid, is_crawled=0, other_crawled=0, home_crawled=1)
db_session.add(seed)
else:
seed.home_crawled = 1
db_session.commit()
| [
"db.basic_db.db_session.query",
"sqlalchemy.text",
"db.models.SeedIds.__table__.insert",
"db.basic_db.db_session.commit",
"db.models.SeedIds",
"db.basic_db.db_session.add"
]
| [((1097, 1116), 'db.basic_db.db_session.commit', 'db_session.commit', ([], {}), '()\n', (1114, 1116), False, 'from db.basic_db import db_session\n'), ((1366, 1385), 'db.basic_db.db_session.commit', 'db_session.commit', ([], {}), '()\n', (1383, 1385), False, 'from db.basic_db import db_session\n'), ((1758, 1777), 'db.basic_db.db_session.commit', 'db_session.commit', ([], {}), '()\n', (1775, 1777), False, 'from db.basic_db import db_session\n'), ((2095, 2114), 'db.basic_db.db_session.commit', 'db_session.commit', ([], {}), '()\n', (2112, 2114), False, 'from db.basic_db import db_session\n'), ((1028, 1063), 'db.models.SeedIds', 'SeedIds', ([], {'uid': 'uid', 'is_crawled': 'result'}), '(uid=uid, is_crawled=result)\n', (1035, 1063), False, 'from db.models import SeedIds\n'), ((1072, 1092), 'db.basic_db.db_session.add', 'db_session.add', (['seed'], {}), '(seed)\n', (1086, 1092), False, 'from db.basic_db import db_session\n'), ((1620, 1683), 'db.models.SeedIds', 'SeedIds', ([], {'uid': 'uid', 'is_crawled': '(1)', 'other_crawled': '(1)', 'home_crawled': '(1)'}), '(uid=uid, is_crawled=1, other_crawled=1, home_crawled=1)\n', (1627, 1683), False, 'from db.models import SeedIds\n'), ((1692, 1712), 'db.basic_db.db_session.add', 'db_session.add', (['seed'], {}), '(seed)\n', (1706, 1712), False, 'from db.basic_db import db_session\n'), ((1958, 2021), 'db.models.SeedIds', 'SeedIds', ([], {'uid': 'uid', 'is_crawled': '(0)', 'other_crawled': '(0)', 'home_crawled': '(1)'}), '(uid=uid, is_crawled=0, other_crawled=0, home_crawled=1)\n', (1965, 2021), False, 'from db.models import SeedIds\n'), ((2030, 2050), 'db.basic_db.db_session.add', 'db_session.add', (['seed'], {}), '(seed)\n', (2044, 2050), False, 'from db.basic_db import db_session\n'), ((294, 310), 'sqlalchemy.text', 'text', (['"""status=0"""'], {}), "('status=0')\n", (298, 310), False, 'from sqlalchemy import text\n'), ((459, 479), 'sqlalchemy.text', 'text', (['"""is_crawled=0"""'], {}), "('is_crawled=0')\n", (463, 479), False, 'from sqlalchemy import text\n'), ((651, 673), 'sqlalchemy.text', 'text', (['"""home_crawled=0"""'], {}), "('home_crawled=0')\n", (655, 673), False, 'from sqlalchemy import text\n'), ((1285, 1311), 'db.models.SeedIds.__table__.insert', 'SeedIds.__table__.insert', ([], {}), '()\n', (1309, 1311), False, 'from db.models import SeedIds\n'), ((261, 286), 'db.basic_db.db_session.query', 'db_session.query', (['SeedIds'], {}), '(SeedIds)\n', (277, 286), False, 'from db.basic_db import db_session\n'), ((422, 451), 'db.basic_db.db_session.query', 'db_session.query', (['SeedIds.uid'], {}), '(SeedIds.uid)\n', (438, 451), False, 'from db.basic_db import db_session\n'), ((614, 643), 'db.basic_db.db_session.query', 'db_session.query', (['SeedIds.uid'], {}), '(SeedIds.uid)\n', (630, 643), False, 'from db.basic_db import db_session\n'), ((859, 884), 'db.basic_db.db_session.query', 'db_session.query', (['SeedIds'], {}), '(SeedIds)\n', (875, 884), False, 'from db.basic_db import db_session\n'), ((1155, 1180), 'db.basic_db.db_session.query', 'db_session.query', (['SeedIds'], {}), '(SeedIds)\n', (1171, 1180), False, 'from db.basic_db import db_session\n')] |
"""
Area Weighted Interpolation
"""
import numpy as np
import geopandas as gpd
from ._vectorized_raster_interpolation import _fast_append_profile_in_gdf
import warnings
from scipy.sparse import dok_matrix, diags, coo_matrix
import pandas as pd
from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs
def _area_tables_binning(source_df, target_df, spatial_index):
"""Construct area allocation and source-target correspondence tables using a spatial indexing approach
...
NOTE: this currently relies on Geopandas' spatial index machinery
Parameters
----------
source_df : geopandas.GeoDataFrame
GeoDataFrame containing input data and polygons
target_df : geopandas.GeoDataFramee
GeoDataFrame defining the output geometries
spatial_index : str
Spatial index to use to build the allocation of area from source to
target tables. It currently support the following values:
- "source": build the spatial index on `source_df`
- "target": build the spatial index on `target_df`
- "auto": attempts to guess the most efficient alternative.
Currently, this option uses the largest table to build the
index, and performs a `bulk_query` on the shorter table.
Returns
-------
tables : scipy.sparse.dok_matrix
"""
if _check_crs(source_df, target_df):
pass
else:
return None
df1 = source_df.copy()
df2 = target_df.copy()
# it is generally more performant to use the longer df as spatial index
if spatial_index == "auto":
if df1.shape[0] > df2.shape[0]:
spatial_index = "source"
else:
spatial_index = "target"
if spatial_index == "source":
ids_tgt, ids_src = df1.sindex.query_bulk(df2.geometry, predicate="intersects")
elif spatial_index == "target":
ids_src, ids_tgt = df2.sindex.query_bulk(df1.geometry, predicate="intersects")
else:
raise ValueError(
f"'{spatial_index}' is not a valid option. Use 'auto', 'source' or 'target'."
)
areas = df1.geometry.values[ids_src].intersection(df2.geometry.values[ids_tgt]).area
table = coo_matrix(
(areas, (ids_src, ids_tgt),),
shape=(df1.shape[0], df2.shape[0]),
dtype=np.float32,
)
table = table.todok()
return table
def _area_tables(source_df, target_df):
"""
Construct area allocation and source-target correspondence tables.
Parameters
----------
source_df : geopandas.GeoDataFrame
target_df : geopandas.GeoDataFrame
Returns
-------
tables : tuple (optional)
two 2-D numpy arrays
SU: area of intersection of source geometry i with union geometry j
UT: binary mapping of union geometry j to target geometry t
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
Union geometry is a geometry formed by the intersection of a source geometry and a target geometry
SU Maps source geometry to union geometry, UT maps union geometry to target geometry
"""
if _check_crs(source_df, target_df):
pass
else:
return None
source_df = source_df.copy()
source_df = source_df.copy()
n_s = source_df.shape[0]
n_t = target_df.shape[0]
_left = np.arange(n_s)
_right = np.arange(n_t)
source_df.loc[:, "_left"] = _left # create temporary index for union
target_df.loc[:, "_right"] = _right # create temporary index for union
res_union = gpd.overlay(source_df, target_df, how="union")
n_u, _ = res_union.shape
SU = np.zeros(
(n_s, n_u)
) # holds area of intersection of source geom with union geom
UT = np.zeros((n_u, n_t)) # binary table mapping union geom to target geom
for index, row in res_union.iterrows():
# only union polygons that intersect both a source and a target geometry matter
if not np.isnan(row["_left"]) and not np.isnan(row["_right"]):
s_id = int(row["_left"])
t_id = int(row["_right"])
SU[s_id, index] = row[row.geometry.name].area
UT[index, t_id] = 1
source_df.drop(["_left"], axis=1, inplace=True)
target_df.drop(["_right"], axis=1, inplace=True)
return SU, UT
def _area_interpolate_binning(
source_df,
target_df,
extensive_variables=None,
intensive_variables=None,
table=None,
allocate_total=True,
spatial_index="auto",
):
"""
Area interpolation for extensive and intensive variables.
Parameters
----------
source_df : geopandas.GeoDataFrame
target_df : geopandas.GeoDataFrame
extensive_variables : list
[Optional. Default=None] Columns in dataframes for extensive variables
intensive_variables : list
[Optional. Default=None] Columns in dataframes for intensive variables
table : scipy.sparse.dok_matrix
[Optional. Default=None] Area allocation source-target correspondence
table. If not provided, it will be built from `source_df` and
`target_df` using `tobler.area_interpolate._area_tables_binning`
allocate_total : boolean
[Optional. Default=True] True if total value of source area should be
allocated. False if denominator is area of i. Note that the two cases
would be identical when the area of the source polygon is exhausted by
intersections. See Notes for more details.
spatial_index : str
[Optional. Default="auto"] Spatial index to use to build the
allocation of area from source to target tables. It currently support
the following values:
- "source": build the spatial index on `source_df`
- "target": build the spatial index on `target_df`
- "auto": attempts to guess the most efficient alternative.
Currently, this option uses the largest table to build the
index, and performs a `bulk_query` on the shorter table.
Returns
-------
estimates : geopandas.GeoDataFrame
new geodaraframe with interpolated variables as columns and target_df geometry
as output geometry
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
For an extensive variable, the estimate at target polygon j (default case) is:
.. math::
v_j = \\sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \\sum_k a_{i,k}
If the area of the source polygon is not exhausted by intersections with
target polygons and there is reason to not allocate the complete value of
an extensive attribute, then setting allocate_total=False will use the
following weights:
.. math::
v_j = \\sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / a_i
where a_i is the total area of source polygon i.
For an intensive variable, the estimate at target polygon j is:
.. math::
v_j = \\sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \\sum_k a_{k,j}
"""
source_df = source_df.copy()
target_df = target_df.copy()
if _check_crs(source_df, target_df):
pass
else:
return None
if table is None:
table = _area_tables_binning(source_df, target_df, spatial_index)
den = source_df[source_df.geometry.name].area.values
if allocate_total:
den = np.asarray(table.sum(axis=1))
den = den + (den == 0)
den = 1.0 / den
n = den.shape[0]
den = den.reshape((n,))
den = diags([den], [0])
weights = den.dot(table) # row standardize table
dfs = []
extensive = []
if extensive_variables:
for variable in extensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
estimates = diags([vals], [0]).dot(weights)
estimates = estimates.sum(axis=0)
extensive.append(estimates.tolist()[0])
extensive = np.asarray(extensive)
extensive = np.array(extensive)
extensive = pd.DataFrame(extensive.T, columns=extensive_variables)
area = np.asarray(table.sum(axis=0))
den = 1.0 / (area + (area == 0))
n, k = den.shape
den = den.reshape((k,))
den = diags([den], [0])
weights = table.dot(den)
intensive = []
if intensive_variables:
for variable in intensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
n = vals.shape[0]
vals = vals.reshape((n,))
estimates = diags([vals], [0])
estimates = estimates.dot(weights).sum(axis=0)
intensive.append(estimates.tolist()[0])
intensive = np.asarray(intensive)
intensive = pd.DataFrame(intensive.T, columns=intensive_variables)
if extensive_variables:
dfs.append(extensive)
if intensive_variables:
dfs.append(intensive)
df = pd.concat(dfs, axis=1)
df["geometry"] = target_df[target_df.geometry.name].reset_index(drop=True)
df = gpd.GeoDataFrame(df.replace(np.inf, np.nan))
return df
def _area_interpolate(
source_df,
target_df,
extensive_variables=None,
intensive_variables=None,
tables=None,
allocate_total=True,
):
"""
Area interpolation for extensive and intensive variables.
Parameters
----------
source_df : geopandas.GeoDataFrame (required)
geodataframe with polygon geometries
target_df : geopandas.GeoDataFrame (required)
geodataframe with polygon geometries
extensive_variables : list, (optional)
columns in dataframes for extensive variables
intensive_variables : list, (optional)
columns in dataframes for intensive variables
tables : tuple (optional)
two 2-D numpy arrays
SU: area of intersection of source geometry i with union geometry j
UT: binary mapping of union geometry j to target geometry t
allocate_total : boolean
True if total value of source area should be allocated.
False if denominator is area of i. Note that the two cases
would be identical when the area of the source polygon is
exhausted by intersections. See Notes for more details.
Returns
-------
estimates : geopandas.GeoDataFrame
new geodaraframe with interpolated variables as columns and target_df geometry
as output geometry
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
For an extensive variable, the estimate at target polygon j (default case) is:
v_j = \sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \sum_k a_{i,k}
If the area of the source polygon is not exhausted by intersections with
target polygons and there is reason to not allocate the complete value of
an extensive attribute, then setting allocate_total=False will use the
following weights:
v_j = \sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / a_i
where a_i is the total area of source polygon i.
For an intensive variable, the estimate at target polygon j is:
v_j = \sum_i v_i w_{i,j}
w_{i,j} = a_{i,j} / \sum_k a_{k,j}
"""
source_df = source_df.copy()
target_df = target_df.copy()
if _check_crs(source_df, target_df):
pass
else:
return None
if tables is None:
SU, UT = _area_tables(source_df, target_df)
else:
SU, UT = tables
den = source_df[source_df.geometry.name].area.values
if allocate_total:
den = SU.sum(axis=1)
den = den + (den == 0)
weights = np.dot(np.diag(1 / den), SU)
dfs = []
extensive = []
if extensive_variables:
for variable in extensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
estimates = np.dot(np.diag(vals), weights)
estimates = np.dot(estimates, UT)
estimates = estimates.sum(axis=0)
extensive.append(estimates)
extensive = np.array(extensive)
extensive = pd.DataFrame(extensive.T, columns=extensive_variables)
ST = np.dot(SU, UT)
area = ST.sum(axis=0)
den = np.diag(1.0 / (area + (area == 0)))
weights = np.dot(ST, den)
intensive = []
if intensive_variables:
for variable in intensive_variables:
vals = _nan_check(source_df, variable)
vals = _inf_check(source_df, variable)
vals.shape = (len(vals), 1)
est = (vals * weights).sum(axis=0)
intensive.append(est)
intensive = np.array(intensive)
intensive = pd.DataFrame(intensive.T, columns=intensive_variables)
if extensive_variables:
dfs.append(extensive)
if intensive_variables:
dfs.append(intensive)
df = pd.concat(dfs, axis=1)
df["geometry"] = target_df[target_df.geometry.name].reset_index(drop=True)
df = gpd.GeoDataFrame(df.replace(np.inf, np.nan))
return df
def _area_tables_raster(
source_df, target_df, raster_path, codes=[21, 22, 23, 24], force_crs_match=True
):
"""
Construct area allocation and source-target correspondence tables according to a raster 'populated' areas
Parameters
----------
source_df : geopandas.GeoDataFrame
geeodataframe with geometry column of polygon type
target_df : geopandas.GeoDataFrame
geodataframe with geometry column of polygon type
raster_path : str
the path to the associated raster image.
codes : list
list of integer code values that should be considered as 'populated'.
Since this draw inspiration using the National Land Cover Database (NLCD), the default is 21 (Developed, Open Space), 22 (Developed, Low Intensity), 23 (Developed, Medium Intensity) and 24 (Developed, High Intensity).
The description of each code can be found here: https://www.mrlc.gov/sites/default/files/metadata/landcover.html
Only taken into consideration for harmonization raster based.
force_crs_match : bool (default is True)
Whether the Coordinate Reference System (CRS) of the polygon will be reprojected to the CRS of the raster file.
It is recommended to let this argument as True.
Returns
-------
tables: tuple (optional)
two 2-D numpy arrays
SU: area of intersection of source geometry i with union geometry j
UT: binary mapping of union geometry j to target geometry t
Notes
-----
The assumption is both dataframes have the same coordinate reference system.
Union geometry is a geometry formed by the intersection of a source geometry and a target geometry
SU Maps source geometry to union geometry, UT maps union geometry to target geometry
"""
if _check_crs(source_df, target_df):
pass
else:
return None
source_df = source_df.copy()
target_df = target_df.copy()
n_s = source_df.shape[0]
n_t = target_df.shape[0]
_left = np.arange(n_s)
_right = np.arange(n_t)
source_df.loc[:, "_left"] = _left # create temporary index for union
target_df.loc[:, "_right"] = _right # create temporary index for union
res_union_pre = gpd.overlay(source_df, target_df, how="union")
# Establishing a CRS for the generated union
warnings.warn(
"The CRS for the generated union will be set to be the same as source_df."
)
res_union_pre.crs = source_df.crs
# The 'append_profile_in_gdf' function is present in nlcd.py script
res_union = _fast_append_profile_in_gdf(
res_union_pre, raster_path, force_crs_match=force_crs_match
)
str_codes = [str(i) for i in codes]
str_list = ["Type_" + i for i in str_codes]
# Extract list of code names that actually appear in the appended dataset
str_list_ok = [col for col in res_union.columns if col in str_list]
res_union["Populated_Pixels"] = res_union[str_list_ok].sum(axis=1)
n_u, _ = res_union.shape
SU = np.zeros(
(n_s, n_u)
) # holds area of intersection of source geom with union geom
UT = np.zeros((n_u, n_t)) # binary table mapping union geom to target geom
for index, row in res_union.iterrows():
# only union polygons that intersect both a source and a target geometry matter
if not np.isnan(row["_left"]) and not np.isnan(row["_right"]):
s_id = int(row["_left"])
t_id = int(row["_right"])
SU[s_id, index] = row["Populated_Pixels"]
UT[index, t_id] = 1
source_df.drop(["_left"], axis=1, inplace=True)
target_df.drop(["_right"], axis=1, inplace=True)
return SU, UT
| [
"scipy.sparse.diags",
"tobler.util.util._check_crs",
"pandas.DataFrame",
"numpy.asarray",
"numpy.diag",
"numpy.array",
"numpy.zeros",
"numpy.dot",
"tobler.util.util._nan_check",
"numpy.isnan",
"geopandas.overlay",
"scipy.sparse.coo_matrix",
"warnings.warn",
"tobler.util.util._inf_check",
"pandas.concat",
"numpy.arange"
]
| [((1390, 1422), 'tobler.util.util._check_crs', '_check_crs', (['source_df', 'target_df'], {}), '(source_df, target_df)\n', (1400, 1422), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((2243, 2340), 'scipy.sparse.coo_matrix', 'coo_matrix', (['(areas, (ids_src, ids_tgt))'], {'shape': '(df1.shape[0], df2.shape[0])', 'dtype': 'np.float32'}), '((areas, (ids_src, ids_tgt)), shape=(df1.shape[0], df2.shape[0]),\n dtype=np.float32)\n', (2253, 2340), False, 'from scipy.sparse import dok_matrix, diags, coo_matrix\n'), ((3198, 3230), 'tobler.util.util._check_crs', '_check_crs', (['source_df', 'target_df'], {}), '(source_df, target_df)\n', (3208, 3230), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((3412, 3426), 'numpy.arange', 'np.arange', (['n_s'], {}), '(n_s)\n', (3421, 3426), True, 'import numpy as np\n'), ((3440, 3454), 'numpy.arange', 'np.arange', (['n_t'], {}), '(n_t)\n', (3449, 3454), True, 'import numpy as np\n'), ((3621, 3667), 'geopandas.overlay', 'gpd.overlay', (['source_df', 'target_df'], {'how': '"""union"""'}), "(source_df, target_df, how='union')\n", (3632, 3667), True, 'import geopandas as gpd\n'), ((3706, 3726), 'numpy.zeros', 'np.zeros', (['(n_s, n_u)'], {}), '((n_s, n_u))\n', (3714, 3726), True, 'import numpy as np\n'), ((3811, 3831), 'numpy.zeros', 'np.zeros', (['(n_u, n_t)'], {}), '((n_u, n_t))\n', (3819, 3831), True, 'import numpy as np\n'), ((7155, 7187), 'tobler.util.util._check_crs', '_check_crs', (['source_df', 'target_df'], {}), '(source_df, target_df)\n', (7165, 7187), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((7560, 7577), 'scipy.sparse.diags', 'diags', (['[den]', '[0]'], {}), '([den], [0])\n', (7565, 7577), False, 'from scipy.sparse import dok_matrix, diags, coo_matrix\n'), ((8290, 8307), 'scipy.sparse.diags', 'diags', (['[den]', '[0]'], {}), '([den], [0])\n', (8295, 8307), False, 'from scipy.sparse import dok_matrix, diags, coo_matrix\n'), ((8999, 9021), 'pandas.concat', 'pd.concat', (['dfs'], {'axis': '(1)'}), '(dfs, axis=1)\n', (9008, 9021), True, 'import pandas as pd\n'), ((11334, 11366), 'tobler.util.util._check_crs', '_check_crs', (['source_df', 'target_df'], {}), '(source_df, target_df)\n', (11344, 11366), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((12220, 12234), 'numpy.dot', 'np.dot', (['SU', 'UT'], {}), '(SU, UT)\n', (12226, 12234), True, 'import numpy as np\n'), ((12271, 12306), 'numpy.diag', 'np.diag', (['(1.0 / (area + (area == 0)))'], {}), '(1.0 / (area + (area == 0)))\n', (12278, 12306), True, 'import numpy as np\n'), ((12321, 12336), 'numpy.dot', 'np.dot', (['ST', 'den'], {}), '(ST, den)\n', (12327, 12336), True, 'import numpy as np\n'), ((12894, 12916), 'pandas.concat', 'pd.concat', (['dfs'], {'axis': '(1)'}), '(dfs, axis=1)\n', (12903, 12916), True, 'import pandas as pd\n'), ((14868, 14900), 'tobler.util.util._check_crs', '_check_crs', (['source_df', 'target_df'], {}), '(source_df, target_df)\n', (14878, 14900), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((15081, 15095), 'numpy.arange', 'np.arange', (['n_s'], {}), '(n_s)\n', (15090, 15095), True, 'import numpy as np\n'), ((15109, 15123), 'numpy.arange', 'np.arange', (['n_t'], {}), '(n_t)\n', (15118, 15123), True, 'import numpy as np\n'), ((15295, 15341), 'geopandas.overlay', 'gpd.overlay', (['source_df', 'target_df'], {'how': '"""union"""'}), "(source_df, target_df, how='union')\n", (15306, 15341), True, 'import geopandas as gpd\n'), ((15396, 15490), 'warnings.warn', 'warnings.warn', (['"""The CRS for the generated union will be set to be the same as source_df."""'], {}), "(\n 'The CRS for the generated union will be set to be the same as source_df.')\n", (15409, 15490), False, 'import warnings\n'), ((16081, 16101), 'numpy.zeros', 'np.zeros', (['(n_s, n_u)'], {}), '((n_s, n_u))\n', (16089, 16101), True, 'import numpy as np\n'), ((16186, 16206), 'numpy.zeros', 'np.zeros', (['(n_u, n_t)'], {}), '((n_u, n_t))\n', (16194, 16206), True, 'import numpy as np\n'), ((8015, 8036), 'numpy.asarray', 'np.asarray', (['extensive'], {}), '(extensive)\n', (8025, 8036), True, 'import numpy as np\n'), ((8057, 8076), 'numpy.array', 'np.array', (['extensive'], {}), '(extensive)\n', (8065, 8076), True, 'import numpy as np\n'), ((8097, 8151), 'pandas.DataFrame', 'pd.DataFrame', (['extensive.T'], {'columns': 'extensive_variables'}), '(extensive.T, columns=extensive_variables)\n', (8109, 8151), True, 'import pandas as pd\n'), ((8775, 8796), 'numpy.asarray', 'np.asarray', (['intensive'], {}), '(intensive)\n', (8785, 8796), True, 'import numpy as np\n'), ((8817, 8871), 'pandas.DataFrame', 'pd.DataFrame', (['intensive.T'], {'columns': 'intensive_variables'}), '(intensive.T, columns=intensive_variables)\n', (8829, 8871), True, 'import pandas as pd\n'), ((11678, 11694), 'numpy.diag', 'np.diag', (['(1 / den)'], {}), '(1 / den)\n', (11685, 11694), True, 'import numpy as np\n'), ((12115, 12134), 'numpy.array', 'np.array', (['extensive'], {}), '(extensive)\n', (12123, 12134), True, 'import numpy as np\n'), ((12155, 12209), 'pandas.DataFrame', 'pd.DataFrame', (['extensive.T'], {'columns': 'extensive_variables'}), '(extensive.T, columns=extensive_variables)\n', (12167, 12209), True, 'import pandas as pd\n'), ((12672, 12691), 'numpy.array', 'np.array', (['intensive'], {}), '(intensive)\n', (12680, 12691), True, 'import numpy as np\n'), ((12712, 12766), 'pandas.DataFrame', 'pd.DataFrame', (['intensive.T'], {'columns': 'intensive_variables'}), '(intensive.T, columns=intensive_variables)\n', (12724, 12766), True, 'import pandas as pd\n'), ((7757, 7788), 'tobler.util.util._nan_check', '_nan_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (7767, 7788), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((7808, 7839), 'tobler.util.util._inf_check', '_inf_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (7818, 7839), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((8449, 8480), 'tobler.util.util._nan_check', '_nan_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (8459, 8480), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((8500, 8531), 'tobler.util.util._inf_check', '_inf_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (8510, 8531), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((8624, 8642), 'scipy.sparse.diags', 'diags', (['[vals]', '[0]'], {}), '([vals], [0])\n', (8629, 8642), False, 'from scipy.sparse import dok_matrix, diags, coo_matrix\n'), ((11825, 11856), 'tobler.util.util._nan_check', '_nan_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (11835, 11856), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((11876, 11907), 'tobler.util.util._inf_check', '_inf_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (11886, 11907), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((11987, 12008), 'numpy.dot', 'np.dot', (['estimates', 'UT'], {}), '(estimates, UT)\n', (11993, 12008), True, 'import numpy as np\n'), ((12448, 12479), 'tobler.util.util._nan_check', '_nan_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (12458, 12479), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((12499, 12530), 'tobler.util.util._inf_check', '_inf_check', (['source_df', 'variable'], {}), '(source_df, variable)\n', (12509, 12530), False, 'from tobler.util.util import _check_crs, _nan_check, _inf_check, _check_presence_of_crs\n'), ((4029, 4051), 'numpy.isnan', 'np.isnan', (["row['_left']"], {}), "(row['_left'])\n", (4037, 4051), True, 'import numpy as np\n'), ((4060, 4083), 'numpy.isnan', 'np.isnan', (["row['_right']"], {}), "(row['_right'])\n", (4068, 4083), True, 'import numpy as np\n'), ((11939, 11952), 'numpy.diag', 'np.diag', (['vals'], {}), '(vals)\n', (11946, 11952), True, 'import numpy as np\n'), ((16405, 16427), 'numpy.isnan', 'np.isnan', (["row['_left']"], {}), "(row['_left'])\n", (16413, 16427), True, 'import numpy as np\n'), ((16436, 16459), 'numpy.isnan', 'np.isnan', (["row['_right']"], {}), "(row['_right'])\n", (16444, 16459), True, 'import numpy as np\n'), ((7864, 7882), 'scipy.sparse.diags', 'diags', (['[vals]', '[0]'], {}), '([vals], [0])\n', (7869, 7882), False, 'from scipy.sparse import dok_matrix, diags, coo_matrix\n')] |
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: <NAME>
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
# ----------------------------------------------------------------------------
# This software is in the public domain, furnished "as is", without technical
# support, and with no warranty, express or implied, as to its usefulness for
# any purpose.
#
# Headlines Timing
#
# Author:
# ----------------------------------------------------------------------------
#set up to test area names and part of states
# without locationName defined
areaT1 = """
AreaDictionary['FLZ050']['fullStateName'] = 'Florida'
AreaDictionary['FLZ050']['partOfState'] = 'western'
AreaDictionary['FLZ057']['fullStateName'] = 'Florida'
AreaDictionary['FLZ057']['partOfState'] = 'western'
AreaDictionary['FLZ160']['fullStateName'] = 'Florida'
AreaDictionary['FLZ160']['partOfState'] = 'central'
AreaDictionary['FLZ151']['fullStateName'] = 'Florida'
AreaDictionary['FLZ151']['partOfState'] = 'central'
AreaDictionary['FLZ043']['fullStateName'] = 'Florida'
AreaDictionary['FLZ043']['partOfState'] = 'central'
AreaDictionary['FLZ162']['fullStateName'] = 'Florida'
AreaDictionary['FLZ162']['partOfState'] = 'central'
AreaDictionary['FLZ165']['fullStateName'] = 'Florida'
AreaDictionary['FLZ165']['partOfState'] = 'central'
AreaDictionary['FLZ056']['fullStateName'] = 'Florida'
AreaDictionary['FLZ056']['partOfState'] = 'southern'
AreaDictionary['FLZ052']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ052']['partOfState'] = 'western'
AreaDictionary['FLZ155']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ155']['partOfState'] = 'western'
AreaDictionary['FLZ061']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ061']['partOfState'] = 'southern'
AreaDictionary['FLZ148']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ148']['partOfState'] = 'southern'
AreaDictionary['FLZ142']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ142']['partOfState'] = 'western'
AreaDictionary['FLZ043']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ043']['partOfState'] = 'western'
"""
#with location name defined
areaT2= """
AreaDictionary['FLZ050']['fullStateName'] = 'Florida'
AreaDictionary['FLZ050']['partOfState'] = 'western'
AreaDictionary['FLZ050']['locationName'] = 'Clearfield'
AreaDictionary['FLZ057']['fullStateName'] = 'Florida'
AreaDictionary['FLZ057']['partOfState'] = 'western'
AreaDictionary['FLZ057']['locationName'] = 'Clearfield'
AreaDictionary['FLZ160']['fullStateName'] = 'Florida'
AreaDictionary['FLZ160']['partOfState'] = 'central'
AreaDictionary['FLZ160']['locationName'] = 'Aunt Ruby'
AreaDictionary['FLZ151']['fullStateName'] = 'Florida'
AreaDictionary['FLZ151']['partOfState'] = 'central'
AreaDictionary['FLZ151']['locationName'] = 'Aunt Ruby'
AreaDictionary['FLZ043']['fullStateName'] = 'Florida'
AreaDictionary['FLZ043']['partOfState'] = 'central'
AreaDictionary['FLZ043']['locationName'] = 'Adams'
AreaDictionary['FLZ162']['fullStateName'] = 'Florida'
AreaDictionary['FLZ162']['partOfState'] = 'central'
AreaDictionary['FLZ162']['locationName'] = 'Adams'
AreaDictionary['FLZ165']['fullStateName'] = 'Florida'
AreaDictionary['FLZ165']['partOfState'] = 'central'
#AreaDictionary['FLZ165']['locationName'] = 'western'
AreaDictionary['FLZ056']['fullStateName'] = 'Florida'
AreaDictionary['FLZ056']['partOfState'] = 'southern'
AreaDictionary['FLZ056']['locationName'] = 'Tampa'
AreaDictionary['FLZ052']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ052']['partOfState'] = 'western'
AreaDictionary['FLZ052']['locationName'] = 'Tampa'
AreaDictionary['FLZ155']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ155']['partOfState'] = 'western'
AreaDictionary['FLZ155']['locationName'] = 'Atlanta'
AreaDictionary['FLZ061']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ061']['partOfState'] = 'southern'
AreaDictionary['FLZ061']['locationName'] = 'Beach'
AreaDictionary['FLZ148']['fullStateName'] = 'Georgia'
AreaDictionary['FLZ148']['partOfState'] = 'southern'
AreaDictionary['FLZ148']['locationName'] = 'Beach'
AreaDictionary['FLZ142']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ142']['partOfState'] = 'western'
AreaDictionary['FLZ142']['locationName'] = 'South Park'
AreaDictionary['FLZ043']['fullStateName'] = 'South Carolina'
AreaDictionary['FLZ043']['partOfState'] = 'western'
AreaDictionary['FLZ043']['locationName'] = 'South Park'
"""
#for testing of parishes, counties, and areas
areaT3 = """
AreaDictionary['FLC017']['fullStateName'] = 'Louisiana'
AreaDictionary['FLC017']['partOfState'] = 'western'
AreaDictionary['FLC017']['independentCity'] = 1
AreaDictionary['FLC105']['fullStateName'] = 'Louisiana'
AreaDictionary['FLC105']['partOfState'] = 'western'
AreaDictionary['FLC027']['fullStateName'] = 'Louisiana'
AreaDictionary['FLC027']['partOfState'] = 'western'
AreaDictionary['FLC053']['fullStateName'] = 'Florida'
AreaDictionary['FLC053']['partOfState'] = 'western'
"""
areaT3FIPS0= '#Definition["areaType"] = "FIPS"'
areaT3FIPS1= 'Definition["areaType"] = "FIPS"'
scripts = [
{
"commentary": "Clear out all Hazards Table and Grids.",
"name": "Hazard_FFA_0",
"productType": None,
"clearHazardsTable": 1,
"checkStrings": [],
},
{
"commentary": "NEW FFA",
"name": "Hazard_FFA_1",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ149"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ149-",
"/X.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Coastal Pasco-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 3 AM EST early this morning",
],
},
{
"commentary": "CON FFA",
"name": "Hazard_FFA_2",
"drtTime": "20100101_0530",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'SM '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ149-",
"/X.CON.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.SM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 3 AM EST early this morning",
],
},
{
"commentary": "EXA FFA",
"name": "Hazard_FFA_3",
"drtTime": "20100101_0700",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'DM '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ149","FLZ057"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.EXA.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.DM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has expanded the",
"* Flood Watch to include a portion of south central Florida, including the following area, Highlands.",
"* Until 3 AM EST early this morning",
"FLZ149-",
"/X.CON.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.DM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 3 AM EST early this morning",
],
},
{
"commentary": "CAN FFA, NEW FFA",
"name": "Hazard_FFA_4",
"drtTime": "20100101_0720",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'IJ '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 8, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CAN.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/X.NEW.KTBW.FF.A.0001.100101T0720Z-100101T1300Z/",
"/X.NEW.KTBW.FF.A.0002.100102T0500Z-100102T1300Z/",
"/00000.0.IJ.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH IN EFFECT UNTIL 8 AM EST THIS MORNING...",
"...FLASH FLOOD WATCH IN EFFECT FROM LATE TONIGHT THROUGH SATURDAY MORNING...",
"...FLOOD WATCH IS CANCELLED...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flash Flood Watch for a portion of south central Florida, including the following area, Highlands.",
"* Until 8 AM EST this morning",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flash Flood Watch for a portion of south central Florida, including the following area, Highlands.",
"* From late tonight through Saturday morning",
"The Flood Watch for a portion of south central Florida has been cancelled.",
"FLZ149-",
"/X.CAN.KTBW.FA.A.0001.000000T0000Z-100101T0800Z/",
"/00000.0.IJ.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IS CANCELLED...",
"The Flood Watch for a portion of west central Florida has been cancelled."
],
},
{
"commentary": "EXP FFA, 2 NEW FFA",
"name": "Hazard_FFA_5",
"drtTime": "20100101_1300",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'FS '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 46, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.EXP.KTBW.FF.A.0001.000000T0000Z-100101T1300Z/",
"/X.NEW.KTBW.FF.A.0003.100103T0300Z-100103T1900Z/",
"/X.CON.KTBW.FF.A.0002.100102T0500Z-100102T1300Z/",
"/00000.0.FS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT FROM LATE TONIGHT THROUGH SATURDAY MORNING...",
"...FLASH FLOOD WATCH IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY AFTERNOON...",
"...FLASH FLOOD WATCH HAS EXPIRED...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* From late tonight through Saturday morning",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flash Flood Watch for a portion of south central Florida, including the following area, Highlands.",
"* From Saturday evening through Sunday afternoon",
"The Flash Flood Watch for a portion of south central Florida has expired.",
"FLZ149-",
"/X.NEW.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.FS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY EVENING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of west central Florida, including the following area, Coastal Pasco.",
"* From Saturday evening through Sunday evening",
],
},
{
"commentary": "CON test of multiple events",
"name": "Hazard_FFA_6",
"drtTime": "20100102_0300",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'RS '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 46, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CON.KTBW.FF.A.0002.100102T0500Z-100102T1300Z/",
"/X.CON.KTBW.FF.A.0003.100103T0300Z-100103T1900Z/",
"/00000.0.RS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT UNTIL 8 AM EST SATURDAY...",
"...FLASH FLOOD WATCH REMAINS IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY AFTERNOON...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Until 8 AM EST Saturday",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* From Saturday evening through Sunday afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.RS.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT FROM SATURDAY EVENING THROUGH SUNDAY EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* From Saturday evening through Sunday evening",
],
},
{
"commentary": "middle of 1st event",
"name": "Hazard_FFA_7",
"drtTime": "20100102_0700",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 32, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 46, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 46, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CON.KTBW.FF.A.0002.000000T0000Z-100102T1300Z/",
"/X.CON.KTBW.FF.A.0003.100103T0300Z-100103T1900Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT UNTIL 8 AM EST THIS MORNING...",
"...FLASH FLOOD WATCH REMAINS IN EFFECT FROM THIS EVENING THROUGH SUNDAY AFTERNOON...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Until 8 AM EST this morning",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* From this evening through Sunday afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT FROM THIS EVENING THROUGH SUNDAY EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* From this evening through Sunday evening",
],
},
{
"commentary": "joining two events",
"name": "Hazard_FFA_8",
"drtTime": "20100102_1200",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'IC '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 45, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CAN.KTBW.FF.A.0002.000000T0000Z-100102T1300Z/",
"/X.EXT.KTBW.FF.A.0003.100102T1200Z-100103T1900Z/",
"/00000.0.IC.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH NOW IN EFFECT THROUGH SUNDAY AFTERNOON...",
"The Flash Flood Watch is now in effect for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Through Sunday afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.100103T0200Z-100104T0100Z/",
"/00000.0.IC.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT FROM THIS EVENING THROUGH SUNDAY EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* From this evening through Sunday evening",
],
},
{
"commentary": "into the tail end of the events",
"name": "Hazard_FFA_9",
"drtTime": "20100103_1100",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'SM '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 45, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.CON.KTBW.FF.A.0003.000000T0000Z-100103T1900Z/",
"/00000.0.SM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH REMAINS IN EFFECT UNTIL 2 PM EST THIS AFTERNOON...",
"The Flash Flood Watch continues for",
"* A portion of south central Florida, including the following area, Highlands.",
"* Until 2 PM EST this afternoon",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.000000T0000Z-100104T0100Z/",
"/00000.0.SM.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT THROUGH THIS EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Through this evening",
],
},
{
"commentary": "exp 1st event, continue 2nd event",
"name": "Hazard_FFA_10",
"drtTime": "20100103_1855",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'DR '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 24, 45, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FF.A", ["FLZ057"]),
("Fcst", "Hazards", "DISCRETE", 45, 62, "FA.A", ["FLZ149"]),
("Fcst", "Hazards", "DISCRETE", 62, 68, "FA.A", ["FLZ149"]),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ057-",
"/X.EXP.KTBW.FF.A.0003.000000T0000Z-100103T1900Z/",
"/00000.0.DR.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLASH FLOOD WATCH WILL EXPIRE AT 2 PM EST THIS AFTERNOON...",
"The Flash Flood Watch for a portion of south central Florida will expire at 2 PM EST this afternoon.",
"FLZ149-",
"/X.CON.KTBW.FA.A.0002.000000T0000Z-100104T0100Z/",
"/00000.0.DR.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH REMAINS IN EFFECT UNTIL 8 PM EST THIS EVENING...",
"The Flood Watch continues for",
"* A portion of west central Florida, including the following area, Coastal Pasco.",
"* Until 8 PM EST this evening",
],
},
{
"commentary": "cancel 2nd event",
"name": "Hazard_FFA_11",
"drtTime": "20100104_0000",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'GO '}",
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
],
"checkStrings": ["Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"FLZ149-",
"/X.CAN.KTBW.FA.A.0002.000000T0000Z-100104T0100Z/",
"/00000.0.GO.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"...FLOOD WATCH IS CANCELLED...",
"The Flood Watch for a portion of west central Florida has been cancelled.",
],
},
{
"commentary": "Deleting hazard grids.",
"name": "Hazard_FFA_12",
"productType": None,
"checkStrings": [],
"clearHazardsTable": 1,
},
# Begin detailed phrasing of location tests
{
"commentary": "one state, single area, w/o location",
"name": "Hazard_FFA_13a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of western Florida, including the following area, Pinellas.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "one state, single area, w location",
"name": "Hazard_FFA_13b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for a portion of western Florida, including the following area, Clearfield.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area, w/o location",
"name": "Hazard_FFA_14a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ052","FLZ155"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-Coastal Manatee-",
# "Including the cities of St. Petersburg, Clearwater, Largo, ",
# "Lakeland, Winter Haven, Bradenton, Bayshore Gardens, ",
# "Palmetto, Sebring, Avon Park, Placid Lakes",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Georgia, including the following areas, in western Florida, Highlands and Pinellas. In western Georgia, Coastal Manatee and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area, w location",
"name": "Hazard_FFA_14b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ052","FLZ155"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-Coastal Manatee-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Georgia, including the following areas, in western Florida, Clearfield. In western Georgia, Atlanta and Tampa.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "one state, multiple areas, w/o location",
"name": "Hazard_FFA_15a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ160",
"FLZ057","FLZ151","FLZ056"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-056-057-151-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Hardee-Highlands-Coastal Hillsborough-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of central Florida, southern Florida, and western Florida, including the following areas, in central Florida, Coastal Hillsborough and Coastal Sarasota. In southern Florida, Hardee. In western Florida, Highlands and Pinellas.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "one state, multiple areas, w location",
"name": "Hazard_FFA_15b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ160",
"FLZ057","FLZ151","FLZ056"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-056-057-151-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Hardee-Highlands-Coastal Hillsborough-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of central Florida, southern Florida, and western Florida, including the following areas, in central Florida, Aunt Ruby. In southern Florida, Tampa. In western Florida, Clearfield.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area 1st, mulitple area 2nd, w/o location",
"name": "Hazard_FFA_16a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ052",
"FLZ155","FLZ061"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-061-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-DeSoto-Coastal Manatee-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and Georgia, including the following areas, in western Florida, Pinellas. In Georgia, Coastal Manatee, DeSoto, and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, single area 1st, mulitple area 2nd, w location",
"name": "Hazard_FFA_16b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ052",
"FLZ155","FLZ061"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-061-155-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-DeSoto-Coastal Manatee-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and Georgia, including the following areas, in western Florida, Clearfield. In Georgia, Atlanta, Beach, and Tampa.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, multiple areas, w/o location",
"name": "Hazard_FFA_17a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT1, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ160","FLZ151","FLZ052","FLZ155","FLZ061","FLZ148"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-061-148-151-155-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-DeSoto-Coastal Hernando-",
"Coastal Hillsborough-Coastal Manatee-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of Florida and Georgia, including the following areas, in Florida, Coastal Hillsborough, Coastal Sarasota, Highlands, and Pinellas. In Georgia, Coastal Hernando, Coastal Manatee, DeSoto, and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "two states, multiple areas, w location",
"name": "Hazard_FFA_17b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [("AreaDictionary", "TextUtility", "add", areaT2, "delete"),],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLZ050","FLZ057",
"FLZ160","FLZ151","FLZ052","FLZ155","FLZ061","FLZ148"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLZ050-052-057-061-148-151-155-160-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Pinellas-Polk-Highlands-DeSoto-Coastal Hernando-",
"Coastal Hillsborough-Coastal Manatee-Coastal Sarasota-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of Florida and Georgia, including the following areas, in Florida, Aunt Ruby and Clearfield. In Georgia, Atlanta, Beach, and Tampa.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "parishes 1, independent 1, counties 1",
"name": "Hazard_FFA_18a",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [
("AreaDictionary", "TextUtility", "add", areaT3, "delete"),
("Hazard_FFA_Local", "TextProduct", "replace",
(areaT3FIPS0, areaT3FIPS1), "delete"),
],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLC017","FLC027",
"FLC053"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLC017-027-053-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Citrus-DeSoto-Hernando-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Louisiana, including the following county, independent city, and parish, in western Florida, Hernando. In western Louisiana, Citrus and DeSoto.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
{
"commentary": "parishes 2, independent 1, counties 1",
"name": "Hazard_FFA_18b",
"drtTime": "20100101_0510",
"productType": "Hazard_FFA_Local",
"cmdLineVars": "{('Flood Reason', 'floodReason'): 'ER '}",
"decodeVTEC": 0,
"vtecMode": "O",
"fileChanges": [
("AreaDictionary", "TextUtility", "add", areaT3, "delete"),
("Hazard_FFA_Local", "TextProduct", "replace",
(areaT3FIPS0, areaT3FIPS1), "delete"),
],
"createGrids": [
("Fcst", "Hazards", "DISCRETE", -100, 100, "<None>", "all"),
("Fcst", "Hazards", "DISCRETE", 0, 3, "FA.A", ["FLC017","FLC027",
"FLC053","FLC105"]),
],
"checkStrings": [
"WGUS62 KTBW 010510",
"FFATBW",
"URGENT - IMMEDIATE BROADCAST REQUESTED",
"Flood Watch",
"National Weather Service Tampa Bay Ruskin FL",
"1210 AM EST Fri Jan 1 2010",
"...|*Overview headline (must edit)*|...",
".|*Overview (must edit)*|.",
"FLC017-027-053-105-010800-",
"/O.NEW.KTBW.FA.A.0001.100101T0510Z-100101T0800Z/",
"/00000.0.ER.000000T0000Z.000000T0000Z.000000T0000Z.OO/",
"Citrus-DeSoto-Hernando-Polk-",
"1210 AM EST Fri Jan 1 2010",
"...FLOOD WATCH IN EFFECT UNTIL 3 AM EST EARLY THIS MORNING...",
"The National Weather Service in Tampa Bay Ruskin has issued a",
"* Flood Watch for portions of western Florida and western Louisiana, including the following county, independent city, and parishes, in western Florida, Hernando. In western Louisiana, Citrus, DeSoto, and Polk.",
"* Until 3 AM EST early this morning",
"* |* Basis for the watch *|",
"* |* (optional) potential impacts of flooding *|",
"PRECAUTIONARY/PREPAREDNESS ACTIONS...",
"A Flood Watch means there is a potential for flooding based on current forecasts.",
"You should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.",
"&&",
"$$",
],
},
]
import TestScript
def testScript(self, dataMgr):
defaults = {
"database": "<site>_GRID__Fcst_00000000_0000",
"publishGrids": 0,
"decodeVTEC": 1,
"gridsStartTime": "20100101_0500",
"orderStrings": 1,
"vtecMode": "X",
"deleteGrids": [("Fcst", "Hazards", "SFC", "all", "all")],
}
return TestScript.generalTestScript(self, dataMgr, scripts, defaults)
| [
"TestScript.generalTestScript"
]
| [((49491, 49553), 'TestScript.generalTestScript', 'TestScript.generalTestScript', (['self', 'dataMgr', 'scripts', 'defaults'], {}), '(self, dataMgr, scripts, defaults)\n', (49519, 49553), False, 'import TestScript\n')] |
import time
old_input_value = False
flag_falling_edge = None
start = None
flag_output_mask = False
DELAY_CONST = 10 # delay time from falling edge ... .
output = None
def response_function():
global old_input_value, flag_falling_edge, start, flag_output_mask, output
if flag_falling_edge:
output = True
end = time.perf_counter()
if end - start > DELAY_CONST:
output = 0
flag_falling_edge = 0
flag_output_mask = False
input_value = bool(int(input('Please Enter your Input Value: ')))
if old_input_value == False and input_value == True:
if not flag_output_mask: output = input_value
old_input_value = input_value
print('Input Rising Edge detected ... ')
print(f'output is: {output}')
elif old_input_value == False and input_value == False:
if not flag_output_mask: output = input_value
old_input_value = input_value
print(f'output is: {output}')
elif old_input_value == True and input_value == True:
old_input_value = input_value
if not flag_output_mask: output = input_value
print(f'output is: {output}')
elif old_input_value == True and input_value == False:
start = time.perf_counter()
print('Input Falling Edge detected ... ')
flag_falling_edge = True
flag_output_mask = True
old_input_value = input_value
print(f'output is: {output}')
if __name__ == '__main__':
DELAY_CONST=int(input("Hello \nPlease Enter Your delay value here :"))
while True:
response_function()
| [
"time.perf_counter"
]
| [((365, 384), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (382, 384), False, 'import time\n'), ((1306, 1325), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1323, 1325), False, 'import time\n')] |
""" This module does validation for data input in incidents """
import re
class Validate():
"""
methods for validatin incidents input data
"""
def valid_email(self, email):
self.vemail = re.match(
r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)", email)
if not self.vemail:
return None
return True
def valid_password(self, password):
self.password = re.match(r'[A-Za-z0-9@#$%^&+=]{8,}', password)
if self.password is None:
return None
return True
def valid_string(self, value):
"""
checks if value in data is empty
"""
self.value = value
if not isinstance(self.value, str):
return None
return True
| [
"re.match"
]
| [((217, 287), 're.match', 're.match', (['"""(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\\\.[a-zA-Z0-9-.]+$)"""', 'email'], {}), "('(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\\\\.[a-zA-Z0-9-.]+$)', email)\n", (225, 287), False, 'import re\n'), ((434, 479), 're.match', 're.match', (['"""[A-Za-z0-9@#$%^&+=]{8,}"""', 'password'], {}), "('[A-Za-z0-9@#$%^&+=]{8,}', password)\n", (442, 479), False, 'import re\n')] |
from .terraform import TerraformManager
import pytest
from _pytest.tmpdir import TempPathFactory
@pytest.fixture(scope='session')
def tfenv(tmp_path_factory: TempPathFactory):
env_vars = {
}
with TerraformManager(path_factory=tmp_path_factory, env_vars=env_vars) as deployment:
yield deployment
| [
"pytest.fixture"
]
| [((100, 131), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (114, 131), False, 'import pytest\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
# vim: encoding=utf-8
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from datetime import date
# import os
# import sys
PATH = 'content'
TIMEZONE = 'UTC'
DEFAULT_LANG = u'en'
AUTHOR = u'Treasurer Team'
SITENAME = u'Apache Treasurer'
SITEDOMAIN = 'treasurer.apache.org'
SITEURL = 'https://treasurer.apache.org'
# SITELOGO = 'https://treasurer.apache.org/images/logo.png'
# SITEDESC = u'<blank>'
SITEREPOSITORY = 'https://github.com/apache/treasurer-site/blob/main/content/pages/'
TRADEMARKS = u'Apache and the Apache feather logo are trademarks or registered trademarks'
CURRENTYEAR = date.today().year
# Save pages using full directory preservation
PAGES_PATHS = ['content']
# PATH_METADATA= '(?P<path_no_ext>.*)\..*'
# PAGE_SAVE_AS= '{path_no_ext}.html'
PAGE_URL = '{slug}.html'
SLUGIFY_SOURCE = 'basename'
PAGE_SAVE_AS = '{slug}.html'
# We want to serve any images
STATIC_PATHS = ['.htaccess', 'images']
# We don't use articles, but we don't want pelican to think
# that content/ contains articles.
ARTICLE_PATHS = ['articles']
# Disable these pages
ARCHIVES_SAVE_AS = ''
ARTICLE_SAVE_AS = ''
AUTHORS_SAVE_AS = ''
CATEGORIES_SAVE_AS = ''
INDEX_SAVE_AS = ''
TAGS_SAVE_AS = ''
# Enable ATOM feed and Disable other feeds
FEED_DOMAIN = SITEURL
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Pelican Plugins
# The provided location. If the buildbot does not have a new plugin then look into requirements.txt
PLUGIN_PATHS = ['./theme/plugins']
PLUGINS = ['toc', 'pelican-gfm', 'sitemap']
# TOC Generator
TOC_HEADERS = r"h[1-6]"
# Sitemap Generator
SITEMAP = {
"exclude": ["tag/", "category/"],
"format": "xml",
"priorities": {
"articles": 0.1,
"indexes": 0.1,
"pages": 0.8
},
"changefreqs": {
"articles": "never",
"indexes": "never",
"pages": "monthly"
}
}
# Unused links
LINKS = ( )
SOCIAL = ( )
DEFAULT_PAGINATION = False
# Uncomment following line if you want document-relative URLs when developing
# RELATIVE_URLS = True
| [
"datetime.date.today"
]
| [((1419, 1431), 'datetime.date.today', 'date.today', ([], {}), '()\n', (1429, 1431), False, 'from datetime import date\n')] |
# -*- coding: utf-8 -*-
#Copyright (c) 2010,12 <NAME>
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in
#all copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
#THE SOFTWARE.
from random import uniform
from math import sin, cos, pi, sqrt
from gettext import gettext as _
import gtk
import cairo
from taconstants import TURTLE_LAYER, DEFAULT_TURTLE_COLORS
from tasprite_factory import SVG, svg_str_to_pixbuf
from tacanvas import wrap100, COLOR_TABLE
from sprites import Sprite
from tautils import debug_output
SHAPES = 36
def generate_turtle_pixbufs(colors):
""" Generate pixbufs for generic turtles """
shapes = []
svg = SVG()
svg.set_scale(1.0)
for i in range(SHAPES):
svg.set_orientation(i * 10)
shapes.append(svg_str_to_pixbuf(svg.turtle(colors)))
return shapes
class Turtles:
def __init__(self, sprite_list):
""" Class to hold turtles """
self.dict = dict()
self.sprite_list = sprite_list
self.default_pixbufs = []
def get_turtle(self, k, append=False, colors=None):
""" Find a turtle """
if k in self.dict:
return self.dict[k]
elif not append:
return None
else:
if colors == None:
Turtle(self, k)
elif type(colors) in [list, tuple]:
Turtle(self, k, colors)
else:
Turtle(self, k, colors.split(','))
return self.dict[k]
def get_turtle_key(self, turtle):
""" Find a turtle's name """
for k in iter(self.dict):
if self.dict[k] == turtle:
return k
return None
def turtle_count(self):
""" How many turtles are there? """
return(len(self.dict))
def add_to_dict(self, k, turtle):
""" Add a new turtle """
self.dict[k] = turtle
def remove_from_dict(self, k):
""" Delete a turtle """
if k in self.dict:
del(self.dict[k])
def show_all(self):
""" Make all turtles visible """
for k in iter(self.dict):
self.dict[k].show()
def spr_to_turtle(self, spr):
""" Find the turtle that corresponds to sprite spr. """
for k in iter(self.dict):
if spr == self.dict[k].spr:
return self.dict[k]
return None
def get_pixbufs(self):
""" Get the pixbufs for the default turtle shapes. """
if self.default_pixbufs == []:
self.default_pixbufs = generate_turtle_pixbufs(
["#008000", "#00A000"])
return(self.default_pixbufs)
class Turtle:
def __init__(self, turtles, key, turtle_colors=None):
""" The turtle is not a block, just a sprite with an orientation """
self.x = 0
self.y = 0
self.hidden = False
self.shapes = []
self.custom_shapes = False
self.type = 'turtle'
self.name = key
self.heading = 0
self.pen_shade = 50
self.pen_color = 0
self.pen_gray = 100
self.pen_size = 5
self.pen_state = True
self.label_block = None
self._prep_shapes(key, turtles, turtle_colors)
# Choose a random angle from which to attach the turtle label.
if turtles.sprite_list is not None:
self.spr = Sprite(turtles.sprite_list, 0, 0, self.shapes[0])
angle = uniform(0, pi * 4 / 3.0) # 240 degrees
w = self.shapes[0].get_width()
r = w * 0.67
# Restrict angle the the sides 30-150; 210-330
if angle > pi * 2 / 3.0:
angle += pi / 2.0 # + 90
self.label_xy = [int(r * sin(angle)),
int(r * cos(angle) + w / 2.0)]
else:
angle += pi / 6.0 # + 30
self.label_xy = [int(r * sin(angle) + w / 2.0),
int(r * cos(angle) + w / 2.0)]
else:
self.spr = None
turtles.add_to_dict(key, self)
def _prep_shapes(self, name, turtles=None, turtle_colors=None):
# If the turtle name is an int, we'll use a palette color as the
# turtle color
try:
int_key = int(name)
use_color_table = True
except ValueError:
use_color_table = False
if turtle_colors is not None:
self.colors = turtle_colors[:]
self.shapes = generate_turtle_pixbufs(self.colors)
elif use_color_table:
fill = wrap100(int_key)
stroke = wrap100(fill + 10)
self.colors = ['#%06x' % (COLOR_TABLE[fill]),
'#%06x' % (COLOR_TABLE[stroke])]
self.shapes = generate_turtle_pixbufs(self.colors)
else:
if turtles is not None:
self.colors = DEFAULT_TURTLE_COLORS
self.shapes = turtles.get_pixbufs()
def set_turtle_colors(self, turtle_colors):
''' reset the colors of a preloaded turtle '''
if turtle_colors is not None:
self.colors = turtle_colors[:]
self.shapes = generate_turtle_pixbufs(self.colors)
self.set_heading(self.heading)
def set_shapes(self, shapes, i=0):
""" Reskin the turtle """
n = len(shapes)
if n == 1 and i > 0: # set shape[i]
if i < len(self.shapes):
self.shapes[i] = shapes[0]
elif n == SHAPES: # all shapes have been precomputed
self.shapes = shapes[:]
else: # rotate shapes
if n != 1:
debug_output("%d images passed to set_shapes: ignoring" % (n),
self.tw.running_sugar)
if self.heading == 0: # rotate the shapes
images = []
w, h = shapes[0].get_width(), shapes[0].get_height()
nw = nh = int(sqrt(w * w + h * h))
for i in range(SHAPES):
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, nw, nh)
context = cairo.Context(surface)
context = gtk.gdk.CairoContext(context)
context.translate(nw / 2., nh / 2.)
context.rotate(i * 10 * pi / 180.)
context.translate(-nw / 2., -nh / 2.)
context.set_source_pixbuf(shapes[0], (nw - w) / 2.,
(nh - h) / 2.)
context.rectangle(0, 0, nw, nh)
context.fill()
images.append(surface)
self.shapes = images[:]
else: # associate shape with image at current heading
j = int(self.heading + 5) % 360 / (360 / SHAPES)
self.shapes[j] = shapes[0]
self.custom_shapes = True
self.show()
def reset_shapes(self):
""" Reset the shapes to the standard turtle """
if self.custom_shapes:
self.shapes = generate_turtle_pixbufs(self.colors)
self.custom_shapes = False
def set_heading(self, heading):
""" Set the turtle heading (one shape per 360/SHAPES degrees) """
self.heading = heading
i = (int(self.heading + 5) % 360) / (360 / SHAPES)
if not self.hidden and self.spr is not None:
try:
self.spr.set_shape(self.shapes[i])
except IndexError:
self.spr.set_shape(self.shapes[0])
def set_color(self, color):
""" Set the pen color for this turtle. """
self.pen_color = color
def set_gray(self, gray):
""" Set the pen gray level for this turtle. """
self.pen_gray = gray
def set_shade(self, shade):
""" Set the pen shade for this turtle. """
self.pen_shade = shade
def set_pen_size(self, pen_size):
""" Set the pen size for this turtle. """
self.pen_size = pen_size
def set_pen_state(self, pen_state):
""" Set the pen state (down==True) for this turtle. """
self.pen_state = pen_state
def hide(self):
""" Hide the turtle. """
if self.spr is not None:
self.spr.hide()
if self.label_block is not None:
self.label_block.spr.hide()
self.hidden = True
def show(self):
""" Show the turtle. """
if self.spr is not None:
self.spr.set_layer(TURTLE_LAYER)
self.hidden = False
self.move((self.x, self.y))
self.set_heading(self.heading)
if self.label_block is not None:
self.label_block.spr.move((self.x + self.label_xy[0],
self.y + self.label_xy[1]))
self.label_block.spr.set_layer(TURTLE_LAYER + 1)
def move(self, pos):
""" Move the turtle. """
self.x, self.y = int(pos[0]), int(pos[1])
if not self.hidden and self.spr is not None:
self.spr.move(pos)
if self.label_block is not None:
self.label_block.spr.move((pos[0] + self.label_xy[0],
pos[1] + self.label_xy[1]))
return(self.x, self.y)
def get_name(self):
''' return turtle name (key) '''
return self.name
def get_xy(self):
""" Return the turtle's x, y coordinates. """
return(self.x, self.y)
def get_heading(self):
""" Return the turtle's heading. """
return(self.heading)
def get_color(self):
""" Return the turtle's color. """
return(self.pen_color)
def get_gray(self):
""" Return the turtle's gray level. """
return(self.pen_gray)
def get_shade(self):
""" Return the turtle's shade. """
return(self.pen_shade)
def get_pen_size(self):
""" Return the turtle's pen size. """
return(self.pen_size)
def get_pen_state(self):
""" Return the turtle's pen state. """
return(self.pen_state)
| [
"random.uniform",
"cairo.ImageSurface",
"tacanvas.wrap100",
"cairo.Context",
"math.sqrt",
"tautils.debug_output",
"math.cos",
"gtk.gdk.CairoContext",
"tasprite_factory.SVG",
"sprites.Sprite",
"math.sin"
]
| [((1556, 1561), 'tasprite_factory.SVG', 'SVG', ([], {}), '()\n', (1559, 1561), False, 'from tasprite_factory import SVG, svg_str_to_pixbuf\n'), ((4257, 4306), 'sprites.Sprite', 'Sprite', (['turtles.sprite_list', '(0)', '(0)', 'self.shapes[0]'], {}), '(turtles.sprite_list, 0, 0, self.shapes[0])\n', (4263, 4306), False, 'from sprites import Sprite\n'), ((4327, 4351), 'random.uniform', 'uniform', (['(0)', '(pi * 4 / 3.0)'], {}), '(0, pi * 4 / 3.0)\n', (4334, 4351), False, 'from random import uniform\n'), ((5462, 5478), 'tacanvas.wrap100', 'wrap100', (['int_key'], {}), '(int_key)\n', (5469, 5478), False, 'from tacanvas import wrap100, COLOR_TABLE\n'), ((5500, 5518), 'tacanvas.wrap100', 'wrap100', (['(fill + 10)'], {}), '(fill + 10)\n', (5507, 5518), False, 'from tacanvas import wrap100, COLOR_TABLE\n'), ((6536, 6624), 'tautils.debug_output', 'debug_output', (["('%d images passed to set_shapes: ignoring' % n)", 'self.tw.running_sugar'], {}), "('%d images passed to set_shapes: ignoring' % n, self.tw.\n running_sugar)\n", (6548, 6624), False, 'from tautils import debug_output\n'), ((6833, 6852), 'math.sqrt', 'sqrt', (['(w * w + h * h)'], {}), '(w * w + h * h)\n', (6837, 6852), False, 'from math import sin, cos, pi, sqrt\n'), ((6924, 6971), 'cairo.ImageSurface', 'cairo.ImageSurface', (['cairo.FORMAT_ARGB32', 'nw', 'nh'], {}), '(cairo.FORMAT_ARGB32, nw, nh)\n', (6942, 6971), False, 'import cairo\n'), ((7002, 7024), 'cairo.Context', 'cairo.Context', (['surface'], {}), '(surface)\n', (7015, 7024), False, 'import cairo\n'), ((7055, 7084), 'gtk.gdk.CairoContext', 'gtk.gdk.CairoContext', (['context'], {}), '(context)\n', (7075, 7084), False, 'import gtk\n'), ((4614, 4624), 'math.sin', 'sin', (['angle'], {}), '(angle)\n', (4617, 4624), False, 'from math import sin, cos, pi, sqrt\n'), ((4668, 4678), 'math.cos', 'cos', (['angle'], {}), '(angle)\n', (4671, 4678), False, 'from math import sin, cos, pi, sqrt\n'), ((4792, 4802), 'math.sin', 'sin', (['angle'], {}), '(angle)\n', (4795, 4802), False, 'from math import sin, cos, pi, sqrt\n'), ((4856, 4866), 'math.cos', 'cos', (['angle'], {}), '(angle)\n', (4859, 4866), False, 'from math import sin, cos, pi, sqrt\n')] |
try:
from django.forms.utils import pretty_name
except ImportError:
from django.forms.forms import pretty_name
from django.template import Context
from django.template.loader import render_to_string
from .compat import context_flatten
class Group(list):
"""
A simplistic representation of backends that are related and should be
displayed as one "group" in the backend (e.g. as one box in the sidebar).
"""
template_name = 'django_backend/_group.html'
def __init__(self, id, name=None, position=0, template_name=None):
self.id = id
if name is None:
name = pretty_name(id)
self.template_name = template_name or self.template_name
self.name = name
self.position = position
super(Group, self).__init__()
@property
def backends(self):
return list(self)
def get_context_data(self, context, **kwargs):
data = {
'group': self,
}
data.update(kwargs)
return data
def get_template_name(self):
return self.template_name
def render(self, context):
context_data = {}
if isinstance(context, Context):
context_data.update(context_flatten(context))
context_data = self.get_context_data(context, **context_data)
return render_to_string(self.get_template_name(), context_data)
| [
"django.forms.forms.pretty_name"
]
| [((621, 636), 'django.forms.forms.pretty_name', 'pretty_name', (['id'], {}), '(id)\n', (632, 636), False, 'from django.forms.forms import pretty_name\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from flask import Flask, request, abort, render_template
from datetime import timedelta
import pymysql
from search import start_search, decorate
page_dir = "E:/WEBPAGES_RAW"
app = Flask(__name__)
app.config['DEBUG'] = True
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = timedelta(seconds=1)
connection = pymysql.connect(host="localhost",port=3306,user="root",db="spicy_pot")
cursor = connection.cursor()
@app.route('/')
def homepage():
return render_template("root.html")
@app.route('/search')
def search():
word = request.args.get('s')
page = int(request.args.get('p'))
all_res = start_search(word,cursor)
if len(all_res) == 0:
return render_template("result.html",result={"word":word,"pages":-1,"currentPage":1,"res":[]})
pages = ((len(all_res)-1)//10) + 1
res = decorate(all_res[(page-1)*10:page*10])
content = {"word":word,"pages":pages,"currentPage":page,"res":res}
return render_template("result.html",result=content)
@app.route('/cache')
def cache():
p = request.args.get('p')
c = request.args.get('c')
read = open(page_dir+"/"+p+"/"+c,'r',encoding="utf-8")
save = open("templates/temp.html",'w',encoding="utf-8")
for line in read:
save.write(line)
read.close()
save.close()
return render_template("temp.html")
app.run(host='0.0.0.0',port=80,debug=True)
| [
"flask.render_template",
"flask.request.args.get",
"flask.Flask",
"pymysql.connect",
"search.start_search",
"datetime.timedelta",
"search.decorate"
]
| [((235, 250), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (240, 250), False, 'from flask import Flask, request, abort, render_template\n'), ((322, 342), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (331, 342), False, 'from datetime import timedelta\n'), ((359, 432), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""localhost"""', 'port': '(3306)', 'user': '"""root"""', 'db': '"""spicy_pot"""'}), "(host='localhost', port=3306, user='root', db='spicy_pot')\n", (374, 432), False, 'import pymysql\n'), ((508, 536), 'flask.render_template', 'render_template', (['"""root.html"""'], {}), "('root.html')\n", (523, 536), False, 'from flask import Flask, request, abort, render_template\n'), ((589, 610), 'flask.request.args.get', 'request.args.get', (['"""s"""'], {}), "('s')\n", (605, 610), False, 'from flask import Flask, request, abort, render_template\n'), ((665, 691), 'search.start_search', 'start_search', (['word', 'cursor'], {}), '(word, cursor)\n', (677, 691), False, 'from search import start_search, decorate\n'), ((879, 923), 'search.decorate', 'decorate', (['all_res[(page - 1) * 10:page * 10]'], {}), '(all_res[(page - 1) * 10:page * 10])\n', (887, 923), False, 'from search import start_search, decorate\n'), ((1002, 1048), 'flask.render_template', 'render_template', (['"""result.html"""'], {'result': 'content'}), "('result.html', result=content)\n", (1017, 1048), False, 'from flask import Flask, request, abort, render_template\n'), ((1095, 1116), 'flask.request.args.get', 'request.args.get', (['"""p"""'], {}), "('p')\n", (1111, 1116), False, 'from flask import Flask, request, abort, render_template\n'), ((1126, 1147), 'flask.request.args.get', 'request.args.get', (['"""c"""'], {}), "('c')\n", (1142, 1147), False, 'from flask import Flask, request, abort, render_template\n'), ((1366, 1394), 'flask.render_template', 'render_template', (['"""temp.html"""'], {}), "('temp.html')\n", (1381, 1394), False, 'from flask import Flask, request, abort, render_template\n'), ((627, 648), 'flask.request.args.get', 'request.args.get', (['"""p"""'], {}), "('p')\n", (643, 648), False, 'from flask import Flask, request, abort, render_template\n'), ((734, 833), 'flask.render_template', 'render_template', (['"""result.html"""'], {'result': "{'word': word, 'pages': -1, 'currentPage': 1, 'res': []}"}), "('result.html', result={'word': word, 'pages': -1,\n 'currentPage': 1, 'res': []})\n", (749, 833), False, 'from flask import Flask, request, abort, render_template\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. codeauthor:: <NAME> <<EMAIL>>
"""
import logging
import sys, os, json
from cdumay_rest_client.client import RESTClient
from cdumay_rest_client.exceptions import NotFound, HTTPException
class NoSuchFile(NotFound):
"""NoSuchFile"""
def oncritical(exc):
"""description of oncritical"""
if isinstance(exc, HTTPException):
logging.critical(exc.message)
else:
logging.critical(str(exc))
sys.exit(1)
def file_exists(filename):
"""description of file_exists"""
filename = os.path.realpath(filename)
logging.debug("Checking file: {}".format(filename))
if not os.path.exists(filename):
raise NoSuchFile(
message="No such file '{}'".format(filename),
extra=dict(filename=filename)
)
return filename
def file_write(dst, data):
"""description of file_write"""
if dst:
dst = os.path.realpath(dst)
logging.debug("Saving to: {}".format(dst))
out = open(dst, "w")
else:
logging.debug("Current std will be used")
out = sys.stdout
json.dump(
data, out, ensure_ascii=False, sort_keys=True, indent=2,
separators=(',', ': ')
)
def from_local(src, dst=None):
"""description of from_local"""
try:
file_write(dst, json.load(open(file_exists(src), "r")))
except Exception as exc:
oncritical(exc)
def from_remote(src, dst=None):
"""description of fromurl"""
try:
file_write(
dst, RESTClient(server=src).do_request(method="GET", path="")
)
except Exception as exc:
oncritical(exc)
| [
"os.path.exists",
"cdumay_rest_client.client.RESTClient",
"logging.debug",
"os.path.realpath",
"logging.critical",
"sys.exit",
"json.dump"
]
| [((477, 488), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (485, 488), False, 'import sys, os, json\n'), ((570, 596), 'os.path.realpath', 'os.path.realpath', (['filename'], {}), '(filename)\n', (586, 596), False, 'import sys, os, json\n'), ((1129, 1223), 'json.dump', 'json.dump', (['data', 'out'], {'ensure_ascii': '(False)', 'sort_keys': '(True)', 'indent': '(2)', 'separators': "(',', ': ')"}), "(data, out, ensure_ascii=False, sort_keys=True, indent=2,\n separators=(',', ': '))\n", (1138, 1223), False, 'import sys, os, json\n'), ((398, 427), 'logging.critical', 'logging.critical', (['exc.message'], {}), '(exc.message)\n', (414, 427), False, 'import logging\n'), ((664, 688), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (678, 688), False, 'import sys, os, json\n'), ((937, 958), 'os.path.realpath', 'os.path.realpath', (['dst'], {}), '(dst)\n', (953, 958), False, 'import sys, os, json\n'), ((1057, 1098), 'logging.debug', 'logging.debug', (['"""Current std will be used"""'], {}), "('Current std will be used')\n", (1070, 1098), False, 'import logging\n'), ((1550, 1572), 'cdumay_rest_client.client.RESTClient', 'RESTClient', ([], {'server': 'src'}), '(server=src)\n', (1560, 1572), False, 'from cdumay_rest_client.client import RESTClient\n')] |
import numpy as np
import pytest
import apexpy
import tempfile
import os
import h5py
from ttools import create_dataset, config, io, utils
map_periods = [np.timedelta64(10, 'm'), np.timedelta64(30, 'm'), np.timedelta64(1, 'h'), np.timedelta64(2, 'h')]
@pytest.fixture
def times():
yield np.datetime64('2010-01-01T00:00:00') + np.arange(100) * np.timedelta64(5, 'm')
@pytest.mark.parametrize('map_period', map_periods)
def test_assemble_args(times, map_period):
mlat = np.arange(10)
mlt = np.arange(10)
ssmlon = np.random.rand(times.shape[0])
mlt, mlat = np.meshgrid(mlt, mlat)
mlat = mlat[None, :, :] * np.ones((times.shape[0], 1, 1))
mlt = mlt[None, :, :] * np.ones((times.shape[0], 1, 1))
tec = np.random.rand(*mlat.shape)
bin_edges = np.arange(-.5, 10)
bins = [bin_edges, bin_edges]
args = create_dataset.assemble_binning_args(mlat, mlt, tec, times, ssmlon, bins, map_period)
assert len(args) == np.ceil((times[-1] - times[0]) / map_period)
assert args[0][3][0] == times[0]
assert args[-1][3][0] + map_period >= times[-1]
assert args[-1][3][0] < times[-1]
assert args[-1][3][-1] == times[-1]
for i in range(len(args) - 1):
assert args[i][3][-1] == args[i + 1][3][0] - np.timedelta64(5, 'm')
@pytest.mark.parametrize('map_period', map_periods)
def test_process_file(madrigal_data_dir, map_period):
"""not that good of a test: wait for bugs and add asserts
"""
start_date = np.datetime64('2012-06-08')
end_date = np.datetime64('2012-06-13')
converter = apexpy.Apex()
mlat, mlon = create_dataset.get_mag_grid(config.madrigal_lat, config.madrigal_lon, converter)
bin_edges = np.arange(-.5, 10)
bins = [bin_edges + 30, bin_edges]
times, tec, ssmlon, n, std = create_dataset.process_file(start_date, end_date, mlat, mlon, converter, bins,
map_period, madrigal_data_dir)
assert times.shape[0] == tec.shape[0] == n.shape[0] == std.shape[0] == ssmlon.shape[0]
assert np.isnan(tec[times < np.datetime64('2012-06-10')]).all()
assert np.isnan(tec[times >= np.datetime64('2012-06-11')]).all()
assert np.isfinite(tec[(times >= np.datetime64('2012-06-10')) * (times < np.datetime64('2012-06-11'))]).any()
assert not np.isnan(tec).all(axis=(0, 1)).any()
assert not np.isnan(tec).all(axis=(0, 2)).any()
def test_calculate_bins():
mlat = np.arange(10)[None, :, None] * np.ones((1, 1, 10))
mlt = np.arange(10)[None, None, :] * np.ones((1, 10, 1))
tec = np.zeros((1, 10, 10))
tec[0, 0, 0] = 10
tec[0, 0, -1] = 20
tec[0, -1, 0] = 30
times = ssmlon = np.ones(1) * np.nan
be = np.array([-.5, 4.5, 9.5])
bins = [be, be]
out_t, out_tec, out_ssm, out_n, out_std = create_dataset.calculate_bins(mlat.ravel(), mlt.ravel(), tec.ravel(),
times, ssmlon, bins)
assert np.isnan(out_t)
assert np.isnan(out_ssm)
assert out_tec.shape == (2, 2)
assert out_tec[0, 0] == 10 / 25
assert out_tec[0, 1] == 20 / 25
assert out_tec[1, 0] == 30 / 25
assert out_tec[1, 1] == 0
assert np.all(out_n == 25)
def test_process_dataset():
start_date = np.datetime64("2012-03-07")
end_date = np.datetime64("2012-03-08")
file_dt = np.timedelta64(12, 'h')
mlat_bins = np.array([35, 45, 55, 65])
mlt_bins = np.array([-1.5, -.5, .5, 1.5])
def fn_pattern(date):
return f"{date.astype('datetime64[h]')}.h5"
dates = np.arange(start_date, end_date, file_dt)
with tempfile.TemporaryDirectory() as tempdir:
files = [os.path.join(tempdir, fn_pattern(d)) for d in dates]
create_dataset.process_dataset(start_date, end_date, mlat_bins, mlt_bins, apex_dt=np.timedelta64(365, 'D'),
file_dt=file_dt, output_dir=tempdir, file_name_pattern=fn_pattern)
grid_fn = os.path.join(tempdir, 'grid.h5')
assert os.path.exists(grid_fn)
with h5py.File(grid_fn, 'r') as f:
mlt_vals = f['mlt'][()]
mlat_vals = f['mlat'][()]
assert np.all(mlt_vals == [-1, 0, 1])
assert np.all(mlat_vals == [40, 50, 60])
for f, d in zip(files, dates):
assert os.path.exists(f)
tec, times, ssmlon, n, std = io.open_tec_file(f)
assert tec.shape == (12, 3, 3)
assert utils.datetime64_to_timestamp(d) == times[0]
| [
"numpy.random.rand",
"numpy.array",
"numpy.arange",
"os.path.exists",
"numpy.datetime64",
"numpy.meshgrid",
"ttools.create_dataset.process_file",
"numpy.ceil",
"numpy.ones",
"h5py.File",
"numpy.isnan",
"ttools.io.open_tec_file",
"numpy.timedelta64",
"apexpy.Apex",
"tempfile.TemporaryDirectory",
"ttools.utils.datetime64_to_timestamp",
"ttools.create_dataset.assemble_binning_args",
"os.path.join",
"pytest.mark.parametrize",
"numpy.zeros",
"ttools.create_dataset.get_mag_grid",
"numpy.all"
]
| [((376, 426), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""map_period"""', 'map_periods'], {}), "('map_period', map_periods)\n", (399, 426), False, 'import pytest\n'), ((1278, 1328), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""map_period"""', 'map_periods'], {}), "('map_period', map_periods)\n", (1301, 1328), False, 'import pytest\n'), ((155, 178), 'numpy.timedelta64', 'np.timedelta64', (['(10)', '"""m"""'], {}), "(10, 'm')\n", (169, 178), True, 'import numpy as np\n'), ((180, 203), 'numpy.timedelta64', 'np.timedelta64', (['(30)', '"""m"""'], {}), "(30, 'm')\n", (194, 203), True, 'import numpy as np\n'), ((205, 227), 'numpy.timedelta64', 'np.timedelta64', (['(1)', '"""h"""'], {}), "(1, 'h')\n", (219, 227), True, 'import numpy as np\n'), ((229, 251), 'numpy.timedelta64', 'np.timedelta64', (['(2)', '"""h"""'], {}), "(2, 'h')\n", (243, 251), True, 'import numpy as np\n'), ((481, 494), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (490, 494), True, 'import numpy as np\n'), ((505, 518), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (514, 518), True, 'import numpy as np\n'), ((532, 562), 'numpy.random.rand', 'np.random.rand', (['times.shape[0]'], {}), '(times.shape[0])\n', (546, 562), True, 'import numpy as np\n'), ((579, 601), 'numpy.meshgrid', 'np.meshgrid', (['mlt', 'mlat'], {}), '(mlt, mlat)\n', (590, 601), True, 'import numpy as np\n'), ((734, 761), 'numpy.random.rand', 'np.random.rand', (['*mlat.shape'], {}), '(*mlat.shape)\n', (748, 761), True, 'import numpy as np\n'), ((778, 797), 'numpy.arange', 'np.arange', (['(-0.5)', '(10)'], {}), '(-0.5, 10)\n', (787, 797), True, 'import numpy as np\n'), ((842, 931), 'ttools.create_dataset.assemble_binning_args', 'create_dataset.assemble_binning_args', (['mlat', 'mlt', 'tec', 'times', 'ssmlon', 'bins', 'map_period'], {}), '(mlat, mlt, tec, times, ssmlon, bins,\n map_period)\n', (878, 931), False, 'from ttools import create_dataset, config, io, utils\n'), ((1470, 1497), 'numpy.datetime64', 'np.datetime64', (['"""2012-06-08"""'], {}), "('2012-06-08')\n", (1483, 1497), True, 'import numpy as np\n'), ((1513, 1540), 'numpy.datetime64', 'np.datetime64', (['"""2012-06-13"""'], {}), "('2012-06-13')\n", (1526, 1540), True, 'import numpy as np\n'), ((1557, 1570), 'apexpy.Apex', 'apexpy.Apex', ([], {}), '()\n', (1568, 1570), False, 'import apexpy\n'), ((1588, 1673), 'ttools.create_dataset.get_mag_grid', 'create_dataset.get_mag_grid', (['config.madrigal_lat', 'config.madrigal_lon', 'converter'], {}), '(config.madrigal_lat, config.madrigal_lon, converter\n )\n', (1615, 1673), False, 'from ttools import create_dataset, config, io, utils\n'), ((1685, 1704), 'numpy.arange', 'np.arange', (['(-0.5)', '(10)'], {}), '(-0.5, 10)\n', (1694, 1704), True, 'import numpy as np\n'), ((1776, 1889), 'ttools.create_dataset.process_file', 'create_dataset.process_file', (['start_date', 'end_date', 'mlat', 'mlon', 'converter', 'bins', 'map_period', 'madrigal_data_dir'], {}), '(start_date, end_date, mlat, mlon, converter,\n bins, map_period, madrigal_data_dir)\n', (1803, 1889), False, 'from ttools import create_dataset, config, io, utils\n'), ((2555, 2576), 'numpy.zeros', 'np.zeros', (['(1, 10, 10)'], {}), '((1, 10, 10))\n', (2563, 2576), True, 'import numpy as np\n'), ((2695, 2721), 'numpy.array', 'np.array', (['[-0.5, 4.5, 9.5]'], {}), '([-0.5, 4.5, 9.5])\n', (2703, 2721), True, 'import numpy as np\n'), ((2965, 2980), 'numpy.isnan', 'np.isnan', (['out_t'], {}), '(out_t)\n', (2973, 2980), True, 'import numpy as np\n'), ((2992, 3009), 'numpy.isnan', 'np.isnan', (['out_ssm'], {}), '(out_ssm)\n', (3000, 3009), True, 'import numpy as np\n'), ((3194, 3213), 'numpy.all', 'np.all', (['(out_n == 25)'], {}), '(out_n == 25)\n', (3200, 3213), True, 'import numpy as np\n'), ((3261, 3288), 'numpy.datetime64', 'np.datetime64', (['"""2012-03-07"""'], {}), "('2012-03-07')\n", (3274, 3288), True, 'import numpy as np\n'), ((3304, 3331), 'numpy.datetime64', 'np.datetime64', (['"""2012-03-08"""'], {}), "('2012-03-08')\n", (3317, 3331), True, 'import numpy as np\n'), ((3346, 3369), 'numpy.timedelta64', 'np.timedelta64', (['(12)', '"""h"""'], {}), "(12, 'h')\n", (3360, 3369), True, 'import numpy as np\n'), ((3386, 3412), 'numpy.array', 'np.array', (['[35, 45, 55, 65]'], {}), '([35, 45, 55, 65])\n', (3394, 3412), True, 'import numpy as np\n'), ((3428, 3460), 'numpy.array', 'np.array', (['[-1.5, -0.5, 0.5, 1.5]'], {}), '([-1.5, -0.5, 0.5, 1.5])\n', (3436, 3460), True, 'import numpy as np\n'), ((3551, 3591), 'numpy.arange', 'np.arange', (['start_date', 'end_date', 'file_dt'], {}), '(start_date, end_date, file_dt)\n', (3560, 3591), True, 'import numpy as np\n'), ((632, 663), 'numpy.ones', 'np.ones', (['(times.shape[0], 1, 1)'], {}), '((times.shape[0], 1, 1))\n', (639, 663), True, 'import numpy as np\n'), ((692, 723), 'numpy.ones', 'np.ones', (['(times.shape[0], 1, 1)'], {}), '((times.shape[0], 1, 1))\n', (699, 723), True, 'import numpy as np\n'), ((952, 996), 'numpy.ceil', 'np.ceil', (['((times[-1] - times[0]) / map_period)'], {}), '((times[-1] - times[0]) / map_period)\n', (959, 996), True, 'import numpy as np\n'), ((2464, 2483), 'numpy.ones', 'np.ones', (['(1, 1, 10)'], {}), '((1, 1, 10))\n', (2471, 2483), True, 'import numpy as np\n'), ((2525, 2544), 'numpy.ones', 'np.ones', (['(1, 10, 1)'], {}), '((1, 10, 1))\n', (2532, 2544), True, 'import numpy as np\n'), ((2666, 2676), 'numpy.ones', 'np.ones', (['(1)'], {}), '(1)\n', (2673, 2676), True, 'import numpy as np\n'), ((3602, 3631), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3629, 3631), False, 'import tempfile\n'), ((3955, 3987), 'os.path.join', 'os.path.join', (['tempdir', '"""grid.h5"""'], {}), "(tempdir, 'grid.h5')\n", (3967, 3987), False, 'import os\n'), ((4003, 4026), 'os.path.exists', 'os.path.exists', (['grid_fn'], {}), '(grid_fn)\n', (4017, 4026), False, 'import os\n'), ((4159, 4189), 'numpy.all', 'np.all', (['(mlt_vals == [-1, 0, 1])'], {}), '(mlt_vals == [-1, 0, 1])\n', (4165, 4189), True, 'import numpy as np\n'), ((4205, 4238), 'numpy.all', 'np.all', (['(mlat_vals == [40, 50, 60])'], {}), '(mlat_vals == [40, 50, 60])\n', (4211, 4238), True, 'import numpy as np\n'), ((294, 330), 'numpy.datetime64', 'np.datetime64', (['"""2010-01-01T00:00:00"""'], {}), "('2010-01-01T00:00:00')\n", (307, 330), True, 'import numpy as np\n'), ((2433, 2446), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (2442, 2446), True, 'import numpy as np\n'), ((2494, 2507), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (2503, 2507), True, 'import numpy as np\n'), ((4040, 4063), 'h5py.File', 'h5py.File', (['grid_fn', '"""r"""'], {}), "(grid_fn, 'r')\n", (4049, 4063), False, 'import h5py\n'), ((4298, 4315), 'os.path.exists', 'os.path.exists', (['f'], {}), '(f)\n', (4312, 4315), False, 'import os\n'), ((4357, 4376), 'ttools.io.open_tec_file', 'io.open_tec_file', (['f'], {}), '(f)\n', (4373, 4376), False, 'from ttools import create_dataset, config, io, utils\n'), ((333, 347), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (342, 347), True, 'import numpy as np\n'), ((350, 372), 'numpy.timedelta64', 'np.timedelta64', (['(5)', '"""m"""'], {}), "(5, 'm')\n", (364, 372), True, 'import numpy as np\n'), ((1252, 1274), 'numpy.timedelta64', 'np.timedelta64', (['(5)', '"""m"""'], {}), "(5, 'm')\n", (1266, 1274), True, 'import numpy as np\n'), ((3804, 3828), 'numpy.timedelta64', 'np.timedelta64', (['(365)', '"""D"""'], {}), "(365, 'D')\n", (3818, 3828), True, 'import numpy as np\n'), ((4439, 4471), 'ttools.utils.datetime64_to_timestamp', 'utils.datetime64_to_timestamp', (['d'], {}), '(d)\n', (4468, 4471), False, 'from ttools import create_dataset, config, io, utils\n'), ((2070, 2097), 'numpy.datetime64', 'np.datetime64', (['"""2012-06-10"""'], {}), "('2012-06-10')\n", (2083, 2097), True, 'import numpy as np\n'), ((2139, 2166), 'numpy.datetime64', 'np.datetime64', (['"""2012-06-11"""'], {}), "('2012-06-11')\n", (2152, 2166), True, 'import numpy as np\n'), ((2304, 2317), 'numpy.isnan', 'np.isnan', (['tec'], {}), '(tec)\n', (2312, 2317), True, 'import numpy as np\n'), ((2356, 2369), 'numpy.isnan', 'np.isnan', (['tec'], {}), '(tec)\n', (2364, 2369), True, 'import numpy as np\n'), ((2212, 2239), 'numpy.datetime64', 'np.datetime64', (['"""2012-06-10"""'], {}), "('2012-06-10')\n", (2225, 2239), True, 'import numpy as np\n'), ((2252, 2279), 'numpy.datetime64', 'np.datetime64', (['"""2012-06-11"""'], {}), "('2012-06-11')\n", (2265, 2279), True, 'import numpy as np\n')] |
import typer
def name_callback(value: str):
if value != "Camila":
raise typer.BadParameter("Only Camila is allowed")
return value
def main(name: str = typer.Option(..., callback=name_callback)):
typer.echo(f"Hello {name}")
if __name__ == "__main__":
typer.run(main)
| [
"typer.Option",
"typer.run",
"typer.echo",
"typer.BadParameter"
]
| [((171, 212), 'typer.Option', 'typer.Option', (['...'], {'callback': 'name_callback'}), '(..., callback=name_callback)\n', (183, 212), False, 'import typer\n'), ((219, 246), 'typer.echo', 'typer.echo', (['f"""Hello {name}"""'], {}), "(f'Hello {name}')\n", (229, 246), False, 'import typer\n'), ((280, 295), 'typer.run', 'typer.run', (['main'], {}), '(main)\n', (289, 295), False, 'import typer\n'), ((86, 130), 'typer.BadParameter', 'typer.BadParameter', (['"""Only Camila is allowed"""'], {}), "('Only Camila is allowed')\n", (104, 130), False, 'import typer\n')] |
from math import sqrt
def stream_primes(num):
primes = []
candidate = 2
for i in range(num):
prime = next_prime(primes, candidate)
primes.append(prime)
candidate = prime + 1
yield prime
def next_prime(primes, candidate):
while True:
for prime in primes:
if candidate % prime == 0:
break
elif prime > sqrt(candidate):
return candidate
else:
return candidate
candidate += 1
for prime in stream_primes(10001):
print(prime)
| [
"math.sqrt"
]
| [((398, 413), 'math.sqrt', 'sqrt', (['candidate'], {}), '(candidate)\n', (402, 413), False, 'from math import sqrt\n')] |
from typing import List, Dict
import json
from gtmcore.http import ConcurrentRequestManager, ConcurrentRequest
from gtmcore.environment.packagemanager import PackageManager, PackageResult, PackageMetadata
from gtmcore.container import container_for_context
from gtmcore.labbook import LabBook
from gtmcore.logging import LMLogger
logger = LMLogger.get_logger()
class CondaPackageManagerBase(PackageManager):
"""Class to implement the conda package manager
"""
def __init__(self):
# String to be set in child classes indicating which python version you are checking. Typically should be either
# python 3.6* or python 2.7*
self.python_depends_str = None
# String of the name of the conda environment (e.g. py36 or py27, as created via container build)
self.python_env = None
# Note, currently we hard code channel config. Future changes to support the user specifying channels
# will modify this behavior
self.channel_priority = ['conda-forge', 'anaconda']
self.request_mgr = ConcurrentRequestManager()
def list_versions(self, package_name: str, labbook: LabBook, username: str) -> List[str]:
"""Method to list all available versions of a package based on the package name
Args:
package_name: Name of the package to query
labbook: Subject LabBook
username: username of current user
Returns:
list(str): Version strings
"""
# Check for package in channels, picking out version by priority
request_list = list()
for channel in self.channel_priority:
request_list.append(ConcurrentRequest(f"https://api.anaconda.org/package/{channel}/{package_name}",
headers={'Accept': 'application/json'}))
responses = self.request_mgr.resolve_many(request_list)
versions = None
for response in responses:
if response.status_code != 200:
continue
versions = response.json.get('versions')
break
if not versions:
raise ValueError(f"Package {package_name} not found in channels {' ,'.join(self.channel_priority)}.")
versions.reverse()
return versions
def list_installed_packages(self, labbook: LabBook, username: str) -> List[Dict[str, str]]:
"""Method to get a list of all packages that are currently installed
Note, this will return results for the computer/container in which it is executed. To get the properties of
a LabBook container, a docker exec command would be needed from the Gigantum application container.
return format is a list of dicts with the format (name: <package name>, version: <version string>)
Returns:
list
"""
project_container = container_for_context(username, labbook=labbook)
result = project_container.run_container("conda list --no-pip --json", wait_for_output=True)
if result:
data = json.loads(result)
if data:
return [{"name": x['name'], 'version': x['version']} for x in data]
else:
return []
def validate_packages(self, package_list: List[Dict[str, str]], labbook: LabBook, username: str) \
-> List[PackageResult]:
"""Method to validate a list of packages, and if needed fill in any missing versions
Should check both the provided package name and version. If the version is omitted, it should be generated
from the latest version.
Args:
package_list(list): A list of dictionaries of packages to validate
labbook(str): The labbook instance
username(str): The username for the logged in user
Returns:
namedtuple: namedtuple indicating if the package and version are valid
"""
result = list()
# Check for package in channels, picking out version by priority
request_list = list()
for pkg in package_list:
for channel in self.channel_priority:
request_list.append(ConcurrentRequest(f"https://api.anaconda.org/package/{channel}/{pkg['package']}",
headers={'Accept': 'application/json'}))
responses = self.request_mgr.resolve_many(request_list)
# Repack into groups by package
responses_per_package = list(zip(*(iter(responses),) * len(self.channel_priority)))
for package, responses in zip(package_list, responses_per_package):
versions = None
latest_version = None
for response in responses:
if response.status_code != 200:
continue
versions = response.json.get('versions')
latest_version = response.json.get('latest_version')
break
if not versions:
# Package is not found
result.append(PackageResult(package=package['package'], version=package.get('version'), error=True))
continue
if package.get('version'):
# Package has been set, so validate it
if package.get('version') in versions:
# Both package name and version are valid
result.append(PackageResult(package=package['package'], version=package.get('version'),
error=False))
else:
# The package version is not in the list, so invalid
result.append(PackageResult(package=package['package'], version=package.get('version'), error=True))
else:
# You need to look up the latest version since not included
result.append(PackageResult(package=package['package'], version=str(latest_version),
error=False))
return result
def get_packages_metadata(self, package_list: List[str], labbook: LabBook, username: str) -> List[PackageMetadata]:
"""Method to get package metadata
Args:
package_list: List of package names
labbook(str): The labbook instance
username(str): The username for the logged in user
Returns:
list
"""
def _extract_metadata(data):
"""Extraction method to pull out the docs URL and description"""
latest_val = data.get('latest_version')
description_val = data.get('summary').strip()
docs_val = data.get('doc_url')
if not docs_val:
docs_val = data.get('html_url')
return latest_val, description_val, docs_val
# Check for package in channels, picking out version by priority
request_list = list()
for pkg in package_list:
for channel in self.channel_priority:
request_list.append(ConcurrentRequest(f"https://api.anaconda.org/package/{channel}/{pkg}",
headers={'Accept': 'application/json'},
extraction_function=_extract_metadata))
responses = self.request_mgr.resolve_many(request_list)
# Repack into groups by package
responses_per_package = list(zip(*(iter(responses),) * len(self.channel_priority)))
result = list()
for package, responses in zip(package_list, responses_per_package):
data = None
for response in responses:
if response.status_code == 200:
data = response.extracted_json
break
if data:
latest_version, description, docs_url = data
result.append(PackageMetadata(package_manager="conda", package=package, latest_version=latest_version,
description=description, docs_url=docs_url))
else:
result.append(PackageMetadata(package_manager="conda", package=package, latest_version=None,
description=None, docs_url=None))
return result
def generate_docker_install_snippet(self, packages: List[Dict[str, str]], single_line: bool = False) -> List[str]:
"""Method to generate a docker snippet to install 1 or more packages
Note: Because conda be so slow to solve environments with conda-forge included, always single line it.
Args:
packages(list(dict)): A list of package names and versions to install
single_line(bool): If true, collapse
Returns:
list
"""
package_strings = [f"{x['name']}={x['version']}" for x in packages]
if single_line:
return [f"RUN conda install -yq {' '.join(package_strings)}"]
else:
return [f"RUN conda install -yq {' '.join(package_strings)}"]
class Conda3PackageManager(CondaPackageManagerBase):
"""Class to implement the conda3 package manager
"""
def __init__(self):
super().__init__()
self.python_depends_str = 'python 3.6*'
self.python_env = 'py36'
class Conda2PackageManager(CondaPackageManagerBase):
"""Class to implement the conda2 package manager
"""
def __init__(self):
super().__init__()
self.python_depends_str = 'python 2.7*'
self.python_env = 'py27'
| [
"json.loads",
"gtmcore.http.ConcurrentRequest",
"gtmcore.environment.packagemanager.PackageMetadata",
"gtmcore.logging.LMLogger.get_logger",
"gtmcore.container.container_for_context",
"gtmcore.http.ConcurrentRequestManager"
]
| [((341, 362), 'gtmcore.logging.LMLogger.get_logger', 'LMLogger.get_logger', ([], {}), '()\n', (360, 362), False, 'from gtmcore.logging import LMLogger\n'), ((1065, 1091), 'gtmcore.http.ConcurrentRequestManager', 'ConcurrentRequestManager', ([], {}), '()\n', (1089, 1091), False, 'from gtmcore.http import ConcurrentRequestManager, ConcurrentRequest\n'), ((2890, 2938), 'gtmcore.container.container_for_context', 'container_for_context', (['username'], {'labbook': 'labbook'}), '(username, labbook=labbook)\n', (2911, 2938), False, 'from gtmcore.container import container_for_context\n'), ((3078, 3096), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (3088, 3096), False, 'import json\n'), ((1679, 1802), 'gtmcore.http.ConcurrentRequest', 'ConcurrentRequest', (['f"""https://api.anaconda.org/package/{channel}/{package_name}"""'], {'headers': "{'Accept': 'application/json'}"}), "(f'https://api.anaconda.org/package/{channel}/{package_name}',\n headers={'Accept': 'application/json'})\n", (1696, 1802), False, 'from gtmcore.http import ConcurrentRequestManager, ConcurrentRequest\n'), ((4177, 4308), 'gtmcore.http.ConcurrentRequest', 'ConcurrentRequest', (['f"""https://api.anaconda.org/package/{channel}/{pkg[\'package\']}"""'], {'headers': "{'Accept': 'application/json'}"}), '(\n f"https://api.anaconda.org/package/{channel}/{pkg[\'package\']}", headers\n ={\'Accept\': \'application/json\'})\n', (4194, 4308), False, 'from gtmcore.http import ConcurrentRequestManager, ConcurrentRequest\n'), ((7051, 7209), 'gtmcore.http.ConcurrentRequest', 'ConcurrentRequest', (['f"""https://api.anaconda.org/package/{channel}/{pkg}"""'], {'headers': "{'Accept': 'application/json'}", 'extraction_function': '_extract_metadata'}), "(f'https://api.anaconda.org/package/{channel}/{pkg}',\n headers={'Accept': 'application/json'}, extraction_function=\n _extract_metadata)\n", (7068, 7209), False, 'from gtmcore.http import ConcurrentRequestManager, ConcurrentRequest\n'), ((7909, 8046), 'gtmcore.environment.packagemanager.PackageMetadata', 'PackageMetadata', ([], {'package_manager': '"""conda"""', 'package': 'package', 'latest_version': 'latest_version', 'description': 'description', 'docs_url': 'docs_url'}), "(package_manager='conda', package=package, latest_version=\n latest_version, description=description, docs_url=docs_url)\n", (7924, 8046), False, 'from gtmcore.environment.packagemanager import PackageManager, PackageResult, PackageMetadata\n'), ((8137, 8253), 'gtmcore.environment.packagemanager.PackageMetadata', 'PackageMetadata', ([], {'package_manager': '"""conda"""', 'package': 'package', 'latest_version': 'None', 'description': 'None', 'docs_url': 'None'}), "(package_manager='conda', package=package, latest_version=\n None, description=None, docs_url=None)\n", (8152, 8253), False, 'from gtmcore.environment.packagemanager import PackageManager, PackageResult, PackageMetadata\n')] |
from tensor.main_module import Tensor
import numpy as np
def getTensor(value):
if type(value) is np.ndarray:
return Tensor.numpy2Tensor(value)
elif type(value) is Tensor:
return value
else:
raise Exception | [
"tensor.main_module.Tensor.numpy2Tensor"
]
| [((129, 155), 'tensor.main_module.Tensor.numpy2Tensor', 'Tensor.numpy2Tensor', (['value'], {}), '(value)\n', (148, 155), False, 'from tensor.main_module import Tensor\n')] |
# coding: utf-8
"""
TGS API
A production scale tool for BYOND server management # noqa: E501
OpenAPI spec version: 9.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.watchdog_status import WatchdogStatus # noqa: E501
from swagger_client.rest import ApiException
class TestWatchdogStatus(unittest.TestCase):
"""WatchdogStatus unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testWatchdogStatus(self):
"""Test WatchdogStatus"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.watchdog_status.WatchdogStatus() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
]
| [((862, 877), 'unittest.main', 'unittest.main', ([], {}), '()\n', (875, 877), False, 'import unittest\n')] |
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Python codebase for the housing classification ML problem',
author='Joesan',
license='',
)
| [
"setuptools.find_packages"
]
| [((81, 96), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (94, 96), False, 'from setuptools import find_packages, setup\n')] |
# Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
from mmedit.models.backbones.sr_backbones.edvr_net import (EDVRNet,
PCDAlignment,
TSAFusion)
def test_pcd_alignment():
"""Test PCDAlignment."""
# cpu
pcd_alignment = PCDAlignment(mid_channels=4, deform_groups=2)
input_list = []
for i in range(3, 0, -1):
input_list.append(torch.rand(1, 4, 2**i, 2**i))
pcd_alignment = pcd_alignment
input_list = [v for v in input_list]
output = pcd_alignment(input_list, input_list)
assert output.shape == (1, 4, 8, 8)
with pytest.raises(AssertionError):
pcd_alignment(input_list[0:2], input_list)
# gpu
if torch.cuda.is_available():
pcd_alignment = PCDAlignment(mid_channels=4, deform_groups=2)
input_list = []
for i in range(3, 0, -1):
input_list.append(torch.rand(1, 4, 2**i, 2**i))
pcd_alignment = pcd_alignment.cuda()
input_list = [v.cuda() for v in input_list]
output = pcd_alignment(input_list, input_list)
assert output.shape == (1, 4, 8, 8)
with pytest.raises(AssertionError):
pcd_alignment(input_list[0:2], input_list)
def test_tsa_fusion():
"""Test TSAFusion."""
# cpu
tsa_fusion = TSAFusion(mid_channels=4, num_frames=5, center_frame_idx=2)
input_tensor = torch.rand(1, 5, 4, 8, 8)
output = tsa_fusion(input_tensor)
assert output.shape == (1, 4, 8, 8)
# gpu
if torch.cuda.is_available():
tsa_fusion = tsa_fusion.cuda()
input_tensor = input_tensor.cuda()
output = tsa_fusion(input_tensor)
assert output.shape == (1, 4, 8, 8)
def test_edvrnet():
"""Test EDVRNet."""
# cpu
# with tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=True)
input_tensor = torch.rand(1, 5, 3, 8, 8)
edvrnet.init_weights(pretrained=None)
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
# without tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=False)
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
with pytest.raises(AssertionError):
# The height and width of inputs should be a multiple of 4
input_tensor = torch.rand(1, 5, 3, 3, 3)
edvrnet(input_tensor)
with pytest.raises(TypeError):
# pretrained should be str or None
edvrnet.init_weights(pretrained=[1])
# gpu
if torch.cuda.is_available():
# with tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=True).cuda()
input_tensor = torch.rand(1, 5, 3, 8, 8).cuda()
edvrnet.init_weights(pretrained=None)
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
# without tsa
edvrnet = EDVRNet(
3,
3,
mid_channels=8,
num_frames=5,
deform_groups=2,
num_blocks_extraction=1,
num_blocks_reconstruction=1,
center_frame_idx=2,
with_tsa=False).cuda()
output = edvrnet(input_tensor)
assert output.shape == (1, 3, 32, 32)
with pytest.raises(AssertionError):
# The height and width of inputs should be a multiple of 4
input_tensor = torch.rand(1, 5, 3, 3, 3).cuda()
edvrnet(input_tensor)
with pytest.raises(TypeError):
# pretrained should be str or None
edvrnet.init_weights(pretrained=[1])
| [
"mmedit.models.backbones.sr_backbones.edvr_net.PCDAlignment",
"mmedit.models.backbones.sr_backbones.edvr_net.TSAFusion",
"mmedit.models.backbones.sr_backbones.edvr_net.EDVRNet",
"torch.cuda.is_available",
"pytest.raises",
"torch.rand"
]
| [((375, 420), 'mmedit.models.backbones.sr_backbones.edvr_net.PCDAlignment', 'PCDAlignment', ([], {'mid_channels': '(4)', 'deform_groups': '(2)'}), '(mid_channels=4, deform_groups=2)\n', (387, 420), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((804, 829), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (827, 829), False, 'import torch\n'), ((1395, 1454), 'mmedit.models.backbones.sr_backbones.edvr_net.TSAFusion', 'TSAFusion', ([], {'mid_channels': '(4)', 'num_frames': '(5)', 'center_frame_idx': '(2)'}), '(mid_channels=4, num_frames=5, center_frame_idx=2)\n', (1404, 1454), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((1474, 1499), 'torch.rand', 'torch.rand', (['(1)', '(5)', '(4)', '(8)', '(8)'], {}), '(1, 5, 4, 8, 8)\n', (1484, 1499), False, 'import torch\n'), ((1597, 1622), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1620, 1622), False, 'import torch\n'), ((1879, 2037), 'mmedit.models.backbones.sr_backbones.edvr_net.EDVRNet', 'EDVRNet', (['(3)', '(3)'], {'mid_channels': '(8)', 'num_frames': '(5)', 'deform_groups': '(2)', 'num_blocks_extraction': '(1)', 'num_blocks_reconstruction': '(1)', 'center_frame_idx': '(2)', 'with_tsa': '(True)'}), '(3, 3, mid_channels=8, num_frames=5, deform_groups=2,\n num_blocks_extraction=1, num_blocks_reconstruction=1, center_frame_idx=\n 2, with_tsa=True)\n', (1886, 2037), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((2121, 2146), 'torch.rand', 'torch.rand', (['(1)', '(5)', '(3)', '(8)', '(8)'], {}), '(1, 5, 3, 8, 8)\n', (2131, 2146), False, 'import torch\n'), ((2299, 2458), 'mmedit.models.backbones.sr_backbones.edvr_net.EDVRNet', 'EDVRNet', (['(3)', '(3)'], {'mid_channels': '(8)', 'num_frames': '(5)', 'deform_groups': '(2)', 'num_blocks_extraction': '(1)', 'num_blocks_reconstruction': '(1)', 'center_frame_idx': '(2)', 'with_tsa': '(False)'}), '(3, 3, mid_channels=8, num_frames=5, deform_groups=2,\n num_blocks_extraction=1, num_blocks_reconstruction=1, center_frame_idx=\n 2, with_tsa=False)\n', (2306, 2458), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((2930, 2955), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2953, 2955), False, 'import torch\n'), ((704, 733), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (717, 733), False, 'import pytest\n'), ((855, 900), 'mmedit.models.backbones.sr_backbones.edvr_net.PCDAlignment', 'PCDAlignment', ([], {'mid_channels': '(4)', 'deform_groups': '(2)'}), '(mid_channels=4, deform_groups=2)\n', (867, 900), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((2611, 2640), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (2624, 2640), False, 'import pytest\n'), ((2732, 2757), 'torch.rand', 'torch.rand', (['(1)', '(5)', '(3)', '(3)', '(3)'], {}), '(1, 5, 3, 3, 3)\n', (2742, 2757), False, 'import torch\n'), ((2798, 2822), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2811, 2822), False, 'import pytest\n'), ((497, 529), 'torch.rand', 'torch.rand', (['(1)', '(4)', '(2 ** i)', '(2 ** i)'], {}), '(1, 4, 2 ** i, 2 ** i)\n', (507, 529), False, 'import torch\n'), ((1230, 1259), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1243, 1259), False, 'import pytest\n'), ((3855, 3884), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (3868, 3884), False, 'import pytest\n'), ((4065, 4089), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (4078, 4089), False, 'import pytest\n'), ((989, 1021), 'torch.rand', 'torch.rand', (['(1)', '(4)', '(2 ** i)', '(2 ** i)'], {}), '(1, 4, 2 ** i, 2 ** i)\n', (999, 1021), False, 'import torch\n'), ((2994, 3152), 'mmedit.models.backbones.sr_backbones.edvr_net.EDVRNet', 'EDVRNet', (['(3)', '(3)'], {'mid_channels': '(8)', 'num_frames': '(5)', 'deform_groups': '(2)', 'num_blocks_extraction': '(1)', 'num_blocks_reconstruction': '(1)', 'center_frame_idx': '(2)', 'with_tsa': '(True)'}), '(3, 3, mid_channels=8, num_frames=5, deform_groups=2,\n num_blocks_extraction=1, num_blocks_reconstruction=1, center_frame_idx=\n 2, with_tsa=True)\n', (3001, 3152), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((3283, 3308), 'torch.rand', 'torch.rand', (['(1)', '(5)', '(3)', '(8)', '(8)'], {}), '(1, 5, 3, 8, 8)\n', (3293, 3308), False, 'import torch\n'), ((3488, 3647), 'mmedit.models.backbones.sr_backbones.edvr_net.EDVRNet', 'EDVRNet', (['(3)', '(3)'], {'mid_channels': '(8)', 'num_frames': '(5)', 'deform_groups': '(2)', 'num_blocks_extraction': '(1)', 'num_blocks_reconstruction': '(1)', 'center_frame_idx': '(2)', 'with_tsa': '(False)'}), '(3, 3, mid_channels=8, num_frames=5, deform_groups=2,\n num_blocks_extraction=1, num_blocks_reconstruction=1, center_frame_idx=\n 2, with_tsa=False)\n', (3495, 3647), False, 'from mmedit.models.backbones.sr_backbones.edvr_net import EDVRNet, PCDAlignment, TSAFusion\n'), ((3984, 4009), 'torch.rand', 'torch.rand', (['(1)', '(5)', '(3)', '(3)', '(3)'], {}), '(1, 5, 3, 3, 3)\n', (3994, 4009), False, 'import torch\n')] |
import os
from shutil import rmtree
from tempfile import mkdtemp
from unittest import TestCase
from enjoliver import generator
class GenerateGroupTestCase(TestCase):
api_uri = None
test_matchbox_path = None
test_resources_path = None
tests_path = None
@classmethod
def setUpClass(cls):
cls.tests_path = mkdtemp(dir='/tmp')
cls.test_matchbox_path = os.path.join(cls.tests_path, 'test_matchbox')
cls.test_resources_path = os.path.join(cls.tests_path, 'test_resources')
os.mkdir(cls.test_matchbox_path)
os.mkdir(cls.test_resources_path)
os.mkdir(os.path.join(cls.test_matchbox_path, 'groups'))
cls.api_uri = "http://127.0.0.1:5000"
@classmethod
def tearDownClass(cls):
rmtree(cls.tests_path)
class TestGenerateGroups(GenerateGroupTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.gen = generator.GenerateGroup(
api_uri=cls.api_uri,
_id="etcd-proxy",
name="etcd-proxy",
profile="TestGenerateProfiles",
matchbox_path=cls.test_matchbox_path
)
cls.gen.profiles_path = cls.test_resources_path
def test_instantiate_generate_group_with_incorrect_parameters(self):
with self.assertRaises(TypeError):
generator.GenerateGroup()
def test_instantiate_generate_group_with_non_existing_matchbox_path(self):
with self.assertRaises(OSError):
generator.GenerateGroup(
api_uri='foobar',
_id='foo',
name='foo-bar',
profile='foo-bar-baz',
matchbox_path='/foo/bar'
)
def test_instantiate_generate_group(self):
sandbox = mkdtemp(dir='/tmp')
os.mkdir(os.path.join(sandbox, 'groups'))
generator.GenerateGroup(
api_uri='foobar',
_id='foo',
name='foo-bar',
profile='foo-bar-baz',
matchbox_path=sandbox
)
rmtree(sandbox)
def test_00_uri(self):
ip = self.gen.api_uri
self.assertIsNotNone(ip)
def test_01_metadata(self):
expect = {'etcd_initial_cluster': '',
'api_uri': '%s' % self.gen.api_uri,
'ssh_authorized_keys': []}
self.gen._metadata()
self.assertEqual(expect['api_uri'], self.gen._target_data["metadata"]["api_uri"])
def test_990_generate(self):
expect = {
'profile': 'etcd-proxy.yaml',
'metadata': {
'api_uri': '%s' % self.gen.api_uri,
'ssh_authorized_keys': []
},
'id': 'etcd-proxy',
'name': 'etcd-proxy'
}
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id="etcd-proxy",
name="etcd-proxy",
profile="etcd-proxy.yaml",
matchbox_path=self.test_matchbox_path
)
result = new.generate()
self.assertEqual(expect["profile"], result["profile"])
self.assertEqual(expect["id"], result["id"])
self.assertEqual(expect["name"], result["name"])
self.assertEqual(expect["metadata"]["api_uri"], result["metadata"]["api_uri"])
def test_991_dump(self):
_id = "etcd-test-%s" % self.test_991_dump.__name__
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id=_id,
name="etcd-test",
profile="etcd-test.yaml",
matchbox_path=self.test_matchbox_path
)
self.assertTrue(new.dump())
self.assertTrue(os.path.isfile("%s/groups/%s.json" % (self.test_matchbox_path, _id)))
self.assertFalse(new.dump())
self.assertTrue(os.path.isfile("%s/groups/%s.json" % (self.test_matchbox_path, _id)))
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id=_id,
name="etcd-test",
profile="etcd-test.yaml",
matchbox_path=self.test_matchbox_path,
selector={"one": "selector"}
)
self.assertTrue(new.dump())
self.assertTrue(os.path.isfile("%s/groups/%s.json" % (self.test_matchbox_path, _id)))
os.remove("%s/groups/%s.json" % (self.test_matchbox_path, _id))
class TestGenerateGroupsSelectorLower(GenerateGroupTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
os.environ["MATCHBOX_URI"] = "http://127.0.0.1:8080"
os.environ["API_URI"] = "http://127.0.0.1:5000"
cls.gen = generator.GenerateGroup(
api_uri=cls.api_uri,
_id="etcd-proxy",
name="etcd-proxy",
profile="TestGenerateProfiles",
selector={"mac": "08:00:27:37:28:2e"},
matchbox_path=cls.test_matchbox_path
)
def test_00_api_uri(self):
ip = self.gen.api_uri
self.assertIsNotNone(ip)
def test_01_metadata(self):
expect = {
'api_uri': "%s" % self.gen.api_uri,
'ssh_authorized_keys': []
}
self.gen._metadata()
self.gen._target_data["metadata"]['ssh_authorized_keys'] = []
self.assertEqual(expect, self.gen._target_data["metadata"])
def test_02_selector(self):
expect = {'mac': '08:00:27:37:28:2e'}
self.gen._selector()
self.assertEqual(expect, self.gen._target_data["selector"])
def test_990_generate(self):
expect = {
'profile': 'etcd-proxy.yaml',
'metadata': {
'api_uri': self.gen.api_uri,
'selector': {'mac': '08:00:27:37:28:2e'},
'ssh_authorized_keys': []
},
'id': 'etcd-proxy',
'name': 'etcd-proxy',
'selector': {'mac': '08:00:27:37:28:2e'}
}
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id="etcd-proxy", name="etcd-proxy", profile="etcd-proxy.yaml",
selector={"mac": "08:00:27:37:28:2e"},
matchbox_path=self.test_matchbox_path)
result = new.generate()
result["metadata"]['ssh_authorized_keys'] = []
self.assertEqual(expect, result)
def test_991_dump(self):
_id = "etcd-test-%s" % self.test_991_dump.__name__
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id="%s" % _id, name="etcd-test", profile="etcd-test.yaml",
matchbox_path=self.test_matchbox_path,
selector={"mac": "08:00:27:37:28:2e"}
)
self.assertTrue(new.dump())
self.assertTrue(os.path.isfile("%s/groups/%s.json" % (self.test_matchbox_path, _id)))
os.remove("%s/groups/%s.json" % (self.test_matchbox_path, _id))
class TestGenerateGroupsSelectorUpper(GenerateGroupTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
os.environ["MATCHBOX_URI"] = "http://127.0.0.1:8080"
os.environ["API_URI"] = "http://127.0.0.1:5000"
cls.gen = generator.GenerateGroup(
api_uri=cls.api_uri,
_id="etcd-proxy",
name="etcd-proxy",
profile="TestGenerateProfiles",
selector={"mac": "08:00:27:37:28:2E"},
matchbox_path=cls.test_matchbox_path
)
def test_00_ip_address(self):
ip = self.gen.api_uri
self.assertIsNotNone(ip)
def test_01_metadata(self):
expect = {
'api_uri': "%s" % self.gen.api_uri,
'ssh_authorized_keys': []
}
self.gen._metadata()
self.gen._target_data["metadata"]['ssh_authorized_keys'] = []
self.assertEqual(expect, self.gen._target_data["metadata"])
def test_02_selector(self):
expect = {'mac': '08:00:27:37:28:2e'}
self.gen._selector()
self.assertEqual(expect, self.gen._target_data["selector"])
def test_990_generate(self):
expect = {
'profile': 'etcd-proxy.yaml',
'metadata': {
'api_uri': "%s" % self.gen.api_uri,
'selector': {'mac': '08:00:27:37:28:2e'},
'ssh_authorized_keys': []
},
'id': 'etcd-proxy',
'name': 'etcd-proxy',
'selector': {'mac': '08:00:27:37:28:2e'}
}
new = generator.GenerateGroup(
api_uri=self.api_uri, _id="etcd-proxy",
name="etcd-proxy",
profile="etcd-proxy.yaml",
selector={"mac": "08:00:27:37:28:2e"},
matchbox_path=self.test_matchbox_path
)
result = new.generate()
result["metadata"]['ssh_authorized_keys'] = []
self.assertEqual(expect, result)
def test_991_dump(self):
_id = "etcd-test-%s" % self.test_991_dump.__name__
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id="%s" % _id, name="etcd-test", profile="etcd-test.yaml",
matchbox_path=self.test_matchbox_path,
selector={"mac": "08:00:27:37:28:2e"}
)
new.dump()
self.assertTrue(os.path.isfile("%s/groups/%s.json" % (self.test_matchbox_path, _id)))
os.remove("%s/groups/%s.json" % (self.test_matchbox_path, _id))
class TestGenerateGroupsExtraMetadata(GenerateGroupTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
os.environ["MATCHBOX_URI"] = "http://127.0.0.1:8080"
os.environ["API_URI"] = "http://127.0.0.1:5000"
cls.gen = generator.GenerateGroup(
api_uri=cls.api_uri,
_id="etcd-proxy",
name="etcd-proxy",
profile="TestGenerateProfiles",
selector={"mac": "08:00:27:37:28:2E"},
metadata={"etcd_initial_cluster": "static0=http://192.168.1.1:2379",
"api_seed": "http://192.168.1.2:5000"},
matchbox_path=cls.test_matchbox_path
)
def test_00_api_uri(self):
ip = self.gen.api_uri
self.assertIsNotNone(ip)
def test_01_metadata(self):
expect = {'etcd_initial_cluster': 'static0=http://192.168.1.1:2379',
'api_uri': "%s" % self.gen.api_uri,
'api_seed': 'http://192.168.1.2:5000',
'ssh_authorized_keys': []}
self.gen._metadata()
self.gen._target_data["metadata"]['ssh_authorized_keys'] = []
self.assertEqual(expect, self.gen._target_data["metadata"])
def test_02_selector(self):
expect = {'mac': '08:00:27:37:28:2e'}
self.gen._selector()
self.assertEqual(expect, self.gen._target_data["selector"])
def test_990_generate(self):
expect = {
'profile': 'etcd-proxy.yaml',
'metadata': {
'api_uri': "%s" % self.gen.api_uri,
'selector': {'mac': '08:00:27:37:28:2e'},
'ssh_authorized_keys': []
},
'id': 'etcd-proxy',
'name': 'etcd-proxy',
'selector': {'mac': '08:00:27:37:28:2e'}
}
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id="etcd-proxy", name="etcd-proxy", profile="etcd-proxy.yaml",
selector={"mac": "08:00:27:37:28:2e"},
matchbox_path=self.test_matchbox_path
)
result = new.generate()
result["metadata"]["ssh_authorized_keys"] = []
self.assertEqual(expect, result)
def test_991_dump(self):
_id = "etcd-test-%s" % self.test_991_dump.__name__
new = generator.GenerateGroup(
api_uri=self.api_uri,
_id="%s" % _id, name="etcd-test", profile="etcd-test.yaml",
matchbox_path=self.test_matchbox_path,
selector={"mac": "08:00:27:37:28:2e"}
)
self.assertTrue(new.dump())
self.assertTrue(os.path.isfile("%s/groups/%s.json" % (self.test_matchbox_path, _id)))
os.remove("%s/groups/%s.json" % (self.test_matchbox_path, _id))
self.assertTrue(new.dump())
for i in range(10):
self.assertFalse(new.dump())
new.api_uri = "http://google.com"
self.assertTrue(new.dump())
self.assertFalse(new.dump())
| [
"os.path.join",
"os.path.isfile",
"tempfile.mkdtemp",
"os.mkdir",
"enjoliver.generator.GenerateGroup",
"shutil.rmtree",
"os.remove"
]
| [((339, 358), 'tempfile.mkdtemp', 'mkdtemp', ([], {'dir': '"""/tmp"""'}), "(dir='/tmp')\n", (346, 358), False, 'from tempfile import mkdtemp\n'), ((392, 437), 'os.path.join', 'os.path.join', (['cls.tests_path', '"""test_matchbox"""'], {}), "(cls.tests_path, 'test_matchbox')\n", (404, 437), False, 'import os\n'), ((472, 518), 'os.path.join', 'os.path.join', (['cls.tests_path', '"""test_resources"""'], {}), "(cls.tests_path, 'test_resources')\n", (484, 518), False, 'import os\n'), ((528, 560), 'os.mkdir', 'os.mkdir', (['cls.test_matchbox_path'], {}), '(cls.test_matchbox_path)\n', (536, 560), False, 'import os\n'), ((569, 602), 'os.mkdir', 'os.mkdir', (['cls.test_resources_path'], {}), '(cls.test_resources_path)\n', (577, 602), False, 'import os\n'), ((769, 791), 'shutil.rmtree', 'rmtree', (['cls.tests_path'], {}), '(cls.tests_path)\n', (775, 791), False, 'from shutil import rmtree\n'), ((932, 1093), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'cls.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""TestGenerateProfiles"""', 'matchbox_path': 'cls.test_matchbox_path'}), "(api_uri=cls.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='TestGenerateProfiles', matchbox_path=cls.\n test_matchbox_path)\n", (955, 1093), False, 'from enjoliver import generator\n'), ((1776, 1795), 'tempfile.mkdtemp', 'mkdtemp', ([], {'dir': '"""/tmp"""'}), "(dir='/tmp')\n", (1783, 1795), False, 'from tempfile import mkdtemp\n'), ((1855, 1973), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': '"""foobar"""', '_id': '"""foo"""', 'name': '"""foo-bar"""', 'profile': '"""foo-bar-baz"""', 'matchbox_path': 'sandbox'}), "(api_uri='foobar', _id='foo', name='foo-bar',\n profile='foo-bar-baz', matchbox_path=sandbox)\n", (1878, 1973), False, 'from enjoliver import generator\n'), ((2048, 2063), 'shutil.rmtree', 'rmtree', (['sandbox'], {}), '(sandbox)\n', (2054, 2063), False, 'from shutil import rmtree\n'), ((2772, 2930), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""etcd-proxy.yaml"""', 'matchbox_path': 'self.test_matchbox_path'}), "(api_uri=self.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='etcd-proxy.yaml', matchbox_path=self.\n test_matchbox_path)\n", (2795, 2930), False, 'from enjoliver import generator\n'), ((3386, 3527), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': '_id', 'name': '"""etcd-test"""', 'profile': '"""etcd-test.yaml"""', 'matchbox_path': 'self.test_matchbox_path'}), "(api_uri=self.api_uri, _id=_id, name='etcd-test',\n profile='etcd-test.yaml', matchbox_path=self.test_matchbox_path)\n", (3409, 3527), False, 'from enjoliver import generator\n'), ((3871, 4046), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': '_id', 'name': '"""etcd-test"""', 'profile': '"""etcd-test.yaml"""', 'matchbox_path': 'self.test_matchbox_path', 'selector': "{'one': 'selector'}"}), "(api_uri=self.api_uri, _id=_id, name='etcd-test',\n profile='etcd-test.yaml', matchbox_path=self.test_matchbox_path,\n selector={'one': 'selector'})\n", (3894, 4046), False, 'from enjoliver import generator\n'), ((4259, 4322), 'os.remove', 'os.remove', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (4268, 4322), False, 'import os\n'), ((4593, 4792), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'cls.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""TestGenerateProfiles"""', 'selector': "{'mac': '08:00:27:37:28:2e'}", 'matchbox_path': 'cls.test_matchbox_path'}), "(api_uri=cls.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='TestGenerateProfiles', selector={'mac':\n '08:00:27:37:28:2e'}, matchbox_path=cls.test_matchbox_path)\n", (4616, 4792), False, 'from enjoliver import generator\n'), ((5876, 6072), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""etcd-proxy.yaml"""', 'selector': "{'mac': '08:00:27:37:28:2e'}", 'matchbox_path': 'self.test_matchbox_path'}), "(api_uri=self.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='etcd-proxy.yaml', selector={'mac':\n '08:00:27:37:28:2e'}, matchbox_path=self.test_matchbox_path)\n", (5899, 6072), False, 'from enjoliver import generator\n'), ((6344, 6537), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': "('%s' % _id)", 'name': '"""etcd-test"""', 'profile': '"""etcd-test.yaml"""', 'matchbox_path': 'self.test_matchbox_path', 'selector': "{'mac': '08:00:27:37:28:2e'}"}), "(api_uri=self.api_uri, _id='%s' % _id, name=\n 'etcd-test', profile='etcd-test.yaml', matchbox_path=self.\n test_matchbox_path, selector={'mac': '08:00:27:37:28:2e'})\n", (6367, 6537), False, 'from enjoliver import generator\n'), ((6724, 6787), 'os.remove', 'os.remove', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (6733, 6787), False, 'import os\n'), ((7058, 7257), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'cls.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""TestGenerateProfiles"""', 'selector': "{'mac': '08:00:27:37:28:2E'}", 'matchbox_path': 'cls.test_matchbox_path'}), "(api_uri=cls.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='TestGenerateProfiles', selector={'mac':\n '08:00:27:37:28:2E'}, matchbox_path=cls.test_matchbox_path)\n", (7081, 7257), False, 'from enjoliver import generator\n'), ((8351, 8547), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""etcd-proxy.yaml"""', 'selector': "{'mac': '08:00:27:37:28:2e'}", 'matchbox_path': 'self.test_matchbox_path'}), "(api_uri=self.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='etcd-proxy.yaml', selector={'mac':\n '08:00:27:37:28:2e'}, matchbox_path=self.test_matchbox_path)\n", (8374, 8547), False, 'from enjoliver import generator\n'), ((8840, 9033), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': "('%s' % _id)", 'name': '"""etcd-test"""', 'profile': '"""etcd-test.yaml"""', 'matchbox_path': 'self.test_matchbox_path', 'selector': "{'mac': '08:00:27:37:28:2e'}"}), "(api_uri=self.api_uri, _id='%s' % _id, name=\n 'etcd-test', profile='etcd-test.yaml', matchbox_path=self.\n test_matchbox_path, selector={'mac': '08:00:27:37:28:2e'})\n", (8863, 9033), False, 'from enjoliver import generator\n'), ((9203, 9266), 'os.remove', 'os.remove', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (9212, 9266), False, 'import os\n'), ((9537, 9853), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'cls.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""TestGenerateProfiles"""', 'selector': "{'mac': '08:00:27:37:28:2E'}", 'metadata': "{'etcd_initial_cluster': 'static0=http://192.168.1.1:2379', 'api_seed':\n 'http://192.168.1.2:5000'}", 'matchbox_path': 'cls.test_matchbox_path'}), "(api_uri=cls.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='TestGenerateProfiles', selector={'mac':\n '08:00:27:37:28:2E'}, metadata={'etcd_initial_cluster':\n 'static0=http://192.168.1.1:2379', 'api_seed':\n 'http://192.168.1.2:5000'}, matchbox_path=cls.test_matchbox_path)\n", (9560, 9853), False, 'from enjoliver import generator\n'), ((11088, 11284), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': '"""etcd-proxy"""', 'name': '"""etcd-proxy"""', 'profile': '"""etcd-proxy.yaml"""', 'selector': "{'mac': '08:00:27:37:28:2e'}", 'matchbox_path': 'self.test_matchbox_path'}), "(api_uri=self.api_uri, _id='etcd-proxy', name=\n 'etcd-proxy', profile='etcd-proxy.yaml', selector={'mac':\n '08:00:27:37:28:2e'}, matchbox_path=self.test_matchbox_path)\n", (11111, 11284), False, 'from enjoliver import generator\n'), ((11565, 11758), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': 'self.api_uri', '_id': "('%s' % _id)", 'name': '"""etcd-test"""', 'profile': '"""etcd-test.yaml"""', 'matchbox_path': 'self.test_matchbox_path', 'selector': "{'mac': '08:00:27:37:28:2e'}"}), "(api_uri=self.api_uri, _id='%s' % _id, name=\n 'etcd-test', profile='etcd-test.yaml', matchbox_path=self.\n test_matchbox_path, selector={'mac': '08:00:27:37:28:2e'})\n", (11588, 11758), False, 'from enjoliver import generator\n'), ((11945, 12008), 'os.remove', 'os.remove', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (11954, 12008), False, 'import os\n'), ((620, 666), 'os.path.join', 'os.path.join', (['cls.test_matchbox_path', '"""groups"""'], {}), "(cls.test_matchbox_path, 'groups')\n", (632, 666), False, 'import os\n'), ((1339, 1364), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {}), '()\n', (1362, 1364), False, 'from enjoliver import generator\n'), ((1498, 1619), 'enjoliver.generator.GenerateGroup', 'generator.GenerateGroup', ([], {'api_uri': '"""foobar"""', '_id': '"""foo"""', 'name': '"""foo-bar"""', 'profile': '"""foo-bar-baz"""', 'matchbox_path': '"""/foo/bar"""'}), "(api_uri='foobar', _id='foo', name='foo-bar',\n profile='foo-bar-baz', matchbox_path='/foo/bar')\n", (1521, 1619), False, 'from enjoliver import generator\n'), ((1813, 1844), 'os.path.join', 'os.path.join', (['sandbox', '"""groups"""'], {}), "(sandbox, 'groups')\n", (1825, 1844), False, 'import os\n'), ((3654, 3722), 'os.path.isfile', 'os.path.isfile', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (3668, 3722), False, 'import os\n'), ((3786, 3854), 'os.path.isfile', 'os.path.isfile', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (3800, 3854), False, 'import os\n'), ((4181, 4249), 'os.path.isfile', 'os.path.isfile', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (4195, 4249), False, 'import os\n'), ((6646, 6714), 'os.path.isfile', 'os.path.isfile', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (6660, 6714), False, 'import os\n'), ((9125, 9193), 'os.path.isfile', 'os.path.isfile', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (9139, 9193), False, 'import os\n'), ((11867, 11935), 'os.path.isfile', 'os.path.isfile', (["('%s/groups/%s.json' % (self.test_matchbox_path, _id))"], {}), "('%s/groups/%s.json' % (self.test_matchbox_path, _id))\n", (11881, 11935), False, 'import os\n')] |
# Enter your code here. Read input from STDIN. Print output to STDOUT
import calendar
mm,dd,yyyy = map(int,input().split())
day = ["MONDAY","TUESDAY","WEDNESDAY","THURSDAY","FRIDAY","SATURDAY","SUNDAY"]
val = int (calendar.weekday(yyyy,mm,dd))
print(day[val])
| [
"calendar.weekday"
]
| [((217, 247), 'calendar.weekday', 'calendar.weekday', (['yyyy', 'mm', 'dd'], {}), '(yyyy, mm, dd)\n', (233, 247), False, 'import calendar\n')] |
"""Defines the models for trigger rules and events"""
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import models, transaction
from django.utils.timezone import now
class TriggerEventManager(models.Manager):
"""Provides additional methods for handling trigger events
"""
def create_trigger_event(self, trigger_type, rule, description, occurred):
"""Creates a new trigger event and returns the event model. The given rule model, if not None, must have already
been saved in the database (it must have an ID). The returned trigger event model will be saved in the database.
:param trigger_type: The type of the trigger that occurred
:type trigger_type: str
:param rule: The rule that triggered the event, possibly None
:type rule: :class:`trigger.models.TriggerRule`
:param description: The JSON description of the event as a dict
:type description: dict
:param occurred: When the event occurred
:type occurred: :class:`datetime.datetime`
:returns: The new trigger event
:rtype: :class:`trigger.models.TriggerEvent`
"""
if trigger_type is None:
raise Exception('Trigger event must have a type')
if description is None:
raise Exception('Trigger event must have a JSON description')
if occurred is None:
raise Exception('Trigger event must have a timestamp')
event = TriggerEvent()
event.type = trigger_type
event.rule = rule
event.description = description
event.occurred = occurred
event.save()
return event
class TriggerEvent(models.Model):
"""Represents an event where a trigger occurred
:keyword type: The type of the trigger that occurred
:type type: :class:`django.db.models.CharField`
:keyword rule: The rule that triggered this event, possibly None (some events are not triggered by rules)
:type rule: :class:`django.db.models.ForeignKey`
:keyword description: JSON description of the event. This will contain fields specific to the type of the trigger
that occurred.
:type description: :class:`django.contrib.postgres.fields.JSONField`
:keyword occurred: When the event occurred
:type occurred: :class:`django.db.models.DateTimeField`
"""
type = models.CharField(db_index=True, max_length=50)
rule = models.ForeignKey('trigger.TriggerRule', blank=True, null=True, on_delete=models.PROTECT)
description = django.contrib.postgres.fields.JSONField(default=dict)
occurred = models.DateTimeField(db_index=True)
objects = TriggerEventManager()
class Meta(object):
"""meta information for the db"""
db_table = 'trigger_event'
class TriggerRuleManager(models.Manager):
"""Provides additional methods for handling trigger rules
"""
@transaction.atomic
def archive_trigger_rule(self, trigger_rule_id):
"""Archives the trigger rule (will no longer be active) with the given ID
:param trigger_rule_id: The ID of the trigger rule to archive
:type trigger_rule_id: int
"""
rule = TriggerRule.objects.select_for_update().get(pk=trigger_rule_id)
rule.is_active = False
rule.archived = now()
rule.save()
def create_trigger_rule(self, trigger_type, configuration, name='', is_active=True):
"""Creates a new trigger rule and returns the rule model. The returned trigger rule model will be saved in the
database.
:param trigger_type: The type of this trigger rule
:type trigger_type: str
:param configuration: The rule configuration
:type configuration: :class:`trigger.configuration.TriggerRuleConfiguration`
:param name: An optional name for the trigger
:type name: str
:param is_active: Whether or not the trigger should be active
:type is_active: bool
:returns: The new trigger rule
:rtype: :class:`trigger.models.TriggerRule`
:raises trigger.configuration.exceptions.InvalidTriggerRule: If the configuration is invalid
"""
if not trigger_type:
raise Exception('Trigger rule must have a type')
if not configuration:
raise Exception('Trigger rule must have a configuration')
configuration.validate()
rule = TriggerRule()
rule.type = trigger_type
rule.name = name
rule.is_active = is_active
rule.configuration = configuration.get_dict()
rule.save()
return rule
def get_by_natural_key(self, name):
"""Django method to retrieve a trigger rule for the given natural key. NOTE: All trigger rule names are NOT
unique. This is implemented to allow the loading of defined system trigger rules which do have unique names.
:param name: The name of the trigger rule
:type name: str
:returns: The trigger rule defined by the natural key
:rtype: :class:`error.models.Error`
"""
return self.get(name=name)
class TriggerRule(models.Model):
"""Represents a rule that, when triggered, creates a trigger event
:keyword type: The type of the trigger for the rule
:type type: :class:`django.db.models.CharField`
:keyword name: The identifying name of the trigger rule used by clients for queries
:type name: :class:`django.db.models.CharField`
:keyword configuration: JSON configuration for the rule. This will contain fields specific to the type of the
trigger.
:type configuration: :class:`django.contrib.postgres.fields.JSONField`
:keyword is_active: Whether the rule is still active (false once rule is archived)
:type is_active: :class:`django.db.models.BooleanField`
:keyword created: When the rule was created
:type created: :class:`django.db.models.DateTimeField`
:keyword archived: When the rule was archived (no longer active)
:type archived: :class:`django.db.models.DateTimeField`
:keyword last_modified: When the rule was last modified
:type last_modified: :class:`django.db.models.DateTimeField`
"""
type = models.CharField(max_length=50, db_index=True)
name = models.CharField(blank=True, max_length=50)
configuration = django.contrib.postgres.fields.JSONField(default=dict)
is_active = models.BooleanField(default=True, db_index=True)
created = models.DateTimeField(auto_now_add=True)
archived = models.DateTimeField(blank=True, null=True)
last_modified = models.DateTimeField(auto_now=True)
objects = TriggerRuleManager()
def get_configuration(self):
"""Returns the configuration for this trigger rule
:returns: The configuration for this trigger rule
:rtype: :class:`trigger.configuration.trigger_rule.TriggerRuleConfiguration`
:raises :class:`trigger.configuration.exceptions.InvalidTriggerType`: If the trigger type is invalid
"""
from trigger.handler import get_trigger_rule_handler
handler = get_trigger_rule_handler(self.type)
return handler.create_configuration(self.configuration)
def natural_key(self):
"""Django method to define the natural key for a trigger rule as the name
:returns: A tuple representing the natural key
:rtype: tuple(str,)
"""
return (self.name,)
class Meta(object):
"""meta information for the db"""
db_table = 'trigger_rule'
| [
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.utils.timezone.now",
"trigger.handler.get_trigger_rule_handler",
"django.db.models.DateTimeField",
"django.db.models.CharField"
]
| [((2395, 2441), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(50)'}), '(db_index=True, max_length=50)\n', (2411, 2441), False, 'from django.db import models, transaction\n'), ((2453, 2547), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""trigger.TriggerRule"""'], {'blank': '(True)', 'null': '(True)', 'on_delete': 'models.PROTECT'}), "('trigger.TriggerRule', blank=True, null=True, on_delete=\n models.PROTECT)\n", (2470, 2547), False, 'from django.db import models, transaction\n'), ((2631, 2666), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'db_index': '(True)'}), '(db_index=True)\n', (2651, 2666), False, 'from django.db import models, transaction\n'), ((6235, 6281), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'db_index': '(True)'}), '(max_length=50, db_index=True)\n', (6251, 6281), False, 'from django.db import models, transaction\n'), ((6293, 6336), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(50)'}), '(blank=True, max_length=50)\n', (6309, 6336), False, 'from django.db import models, transaction\n'), ((6429, 6477), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)', 'db_index': '(True)'}), '(default=True, db_index=True)\n', (6448, 6477), False, 'from django.db import models, transaction\n'), ((6493, 6532), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (6513, 6532), False, 'from django.db import models, transaction\n'), ((6548, 6591), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (6568, 6591), False, 'from django.db import models, transaction\n'), ((6612, 6647), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (6632, 6647), False, 'from django.db import models, transaction\n'), ((3333, 3338), 'django.utils.timezone.now', 'now', ([], {}), '()\n', (3336, 3338), False, 'from django.utils.timezone import now\n'), ((7124, 7159), 'trigger.handler.get_trigger_rule_handler', 'get_trigger_rule_handler', (['self.type'], {}), '(self.type)\n', (7148, 7159), False, 'from trigger.handler import get_trigger_rule_handler\n')] |
# Copyright 2018 Flight Lab authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for components related to running apps."""
import subprocess
import threading
from components import base
from protos import controller_pb2
from utils import app
class AppComponent(base.Component):
"""Component to run command-line based app on any platform.
This component can start app, restart app upon crash, and stop app.
Events:
"status_changed": when status of the app is changed.
Args:
app_component: instance of this class.
"""
def __init__(self, proto, *args, **kwargs):
"""Initializes the component.
Args:
proto: flightlab.App proto defining app details and options.
"""
super(AppComponent, self).__init__(proto, *args, **kwargs)
self._app = app.Application(
name=self.name,
bin_path=self.settings.executable_path,
arguments=(list(self.settings.arguments)
if self.settings.arguments else []),
working_dir=self.settings.working_dir,
restart_on_crash=(self.settings.restart_on_crash
if self.settings.restart_on_crash else False),
env=(self.settings.env if self.settings.env else None))
self._app.on('started', self._on_app_started)
self._app.on('stopped', self._on_app_stopped)
self._monitor = threading.Timer(1, self._check_status)
self._monitor.start()
def close(self):
if self._monitor:
self._monitor.cancel()
self._monitor = None
self._app.stop()
def _check_status(self):
if self._app.has_running_instance():
component_status = controller_pb2.Component.ON
app_status = controller_pb2.App.RUNNING
else:
component_status = controller_pb2.Component.OFF
app_status = controller_pb2.App.NOT_RUNNING
if (self.proto.status != component_status or
self.settings.status != app_status):
self.proto.status = component_status
self.settings.status = app_status
self.emit('status_changed', self)
def _start(self):
self.logger.info('[App - {0}] Starting...'.format(self.name))
self._app.start()
def _stop(self):
self.logger.info('[App - {0}] Stopping...'.format(self.name))
self._app.stop()
def _restart(self):
self._stop()
self._start()
def _on_app_started(self, app):
self.logger.info('[App - {0}] Started.'.format(self.name))
self.settings.status = controller_pb2.App.RUNNING
self.proto.status = controller_pb2.Component.ON
self.emit('status_changed', self)
def _on_app_stopped(self, app):
self.logger.info('[App - {0}] Stopped.'.format(self.name))
self.settings.status = controller_pb2.App.NOT_RUNNING
self.proto.status = controller_pb2.Component.OFF
self.emit('status_changed', self)
class CommandLineComponent(base.Component):
"""Component to run command-line based apps on any platform."""
def _start(self):
for cmd in self.settings.when_on:
self.logger.info('[{0}] Running: {1}'.format(self.name, cmd))
ret = subprocess.call(cmd)
self.logger.info('[{0}] Done (return code={1})'.format(self.name, ret))
def _stop(self):
for cmd in self.settings.when_off:
self.logger.info('[{0}] Running: {1}'.format(self.name, cmd))
ret = subprocess.call(cmd)
self.logger.info('[{0}] Done (return code={1})'.format(self.name, ret)) | [
"threading.Timer",
"subprocess.call"
]
| [((1865, 1903), 'threading.Timer', 'threading.Timer', (['(1)', 'self._check_status'], {}), '(1, self._check_status)\n', (1880, 1903), False, 'import threading\n'), ((3563, 3583), 'subprocess.call', 'subprocess.call', (['cmd'], {}), '(cmd)\n', (3578, 3583), False, 'import subprocess\n'), ((3801, 3821), 'subprocess.call', 'subprocess.call', (['cmd'], {}), '(cmd)\n', (3816, 3821), False, 'import subprocess\n')] |
"""Randomize the minitaur_gym_alternating_leg_env when reset() is called.
The randomization include swing_offset, extension_offset of all legs that mimics
bent legs, desired_pitch from user input, battery voltage and motor damping.
"""
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
parentdir = os.path.dirname(os.path.dirname(parentdir))
os.sys.path.insert(0, parentdir)
import numpy as np
import tf.compat.v1 as tf
from pybullet_envs.minitaur.envs import env_randomizer_base
# Absolute range.
NUM_LEGS = 4
BATTERY_VOLTAGE_RANGE = (14.8, 16.8)
MOTOR_VISCOUS_DAMPING_RANGE = (0, 0.01)
class MinitaurAlternatingLegsEnvRandomizer(env_randomizer_base.EnvRandomizerBase):
"""A randomizer that changes the minitaur_gym_alternating_leg_env."""
def __init__(self,
perturb_swing_bound=0.1,
perturb_extension_bound=0.1,
perturb_desired_pitch_bound=0.01):
super(MinitaurAlternatingLegsEnvRandomizer, self).__init__()
self.perturb_swing_bound = perturb_swing_bound
self.perturb_extension_bound = perturb_extension_bound
self.perturb_desired_pitch_bound = perturb_desired_pitch_bound
def randomize_env(self, env):
perturb_magnitude = np.random.uniform(low=-self.perturb_swing_bound,
high=self.perturb_swing_bound,
size=NUM_LEGS)
env.set_swing_offset(perturb_magnitude)
tf.logging.info("swing_offset: {}".format(perturb_magnitude))
perturb_magnitude = np.random.uniform(low=-self.perturb_extension_bound,
high=self.perturb_extension_bound,
size=NUM_LEGS)
env.set_extension_offset(perturb_magnitude)
tf.logging.info("extension_offset: {}".format(perturb_magnitude))
perturb_magnitude = np.random.uniform(low=-self.perturb_desired_pitch_bound,
high=self.perturb_desired_pitch_bound)
env.set_desired_pitch(perturb_magnitude)
tf.logging.info("desired_pitch: {}".format(perturb_magnitude))
randomized_battery_voltage = np.random.uniform(BATTERY_VOLTAGE_RANGE[0],
BATTERY_VOLTAGE_RANGE[1])
env.minitaur.SetBatteryVoltage(randomized_battery_voltage)
tf.logging.info("battery_voltage: {}".format(randomized_battery_voltage))
randomized_motor_damping = np.random.uniform(MOTOR_VISCOUS_DAMPING_RANGE[0],
MOTOR_VISCOUS_DAMPING_RANGE[1])
env.minitaur.SetMotorViscousDamping(randomized_motor_damping)
tf.logging.info("motor_damping: {}".format(randomized_motor_damping))
| [
"os.path.dirname",
"inspect.currentframe",
"os.sys.path.insert",
"numpy.random.uniform"
]
| [((457, 489), 'os.sys.path.insert', 'os.sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (475, 489), False, 'import os, inspect\n'), ((372, 399), 'os.path.dirname', 'os.path.dirname', (['currentdir'], {}), '(currentdir)\n', (387, 399), False, 'import os, inspect\n'), ((429, 455), 'os.path.dirname', 'os.path.dirname', (['parentdir'], {}), '(parentdir)\n', (444, 455), False, 'import os, inspect\n'), ((1317, 1416), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-self.perturb_swing_bound)', 'high': 'self.perturb_swing_bound', 'size': 'NUM_LEGS'}), '(low=-self.perturb_swing_bound, high=self.\n perturb_swing_bound, size=NUM_LEGS)\n', (1334, 1416), True, 'import numpy as np\n'), ((1631, 1738), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-self.perturb_extension_bound)', 'high': 'self.perturb_extension_bound', 'size': 'NUM_LEGS'}), '(low=-self.perturb_extension_bound, high=self.\n perturb_extension_bound, size=NUM_LEGS)\n', (1648, 1738), True, 'import numpy as np\n'), ((1961, 2061), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-self.perturb_desired_pitch_bound)', 'high': 'self.perturb_desired_pitch_bound'}), '(low=-self.perturb_desired_pitch_bound, high=self.\n perturb_desired_pitch_bound)\n', (1978, 2061), True, 'import numpy as np\n'), ((2245, 2314), 'numpy.random.uniform', 'np.random.uniform', (['BATTERY_VOLTAGE_RANGE[0]', 'BATTERY_VOLTAGE_RANGE[1]'], {}), '(BATTERY_VOLTAGE_RANGE[0], BATTERY_VOLTAGE_RANGE[1])\n', (2262, 2314), True, 'import numpy as np\n'), ((2539, 2624), 'numpy.random.uniform', 'np.random.uniform', (['MOTOR_VISCOUS_DAMPING_RANGE[0]', 'MOTOR_VISCOUS_DAMPING_RANGE[1]'], {}), '(MOTOR_VISCOUS_DAMPING_RANGE[0],\n MOTOR_VISCOUS_DAMPING_RANGE[1])\n', (2556, 2624), True, 'import numpy as np\n'), ((318, 340), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (338, 340), False, 'import os, inspect\n')] |
"""
The TensorProductState class and supporting functionality.
"""
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
import functools as _functools
import itertools as _itertools
import numpy as _np
from pygsti.modelmembers.states.state import State as _State
from pygsti.modelmembers import modelmember as _modelmember, term as _term
from pygsti.baseobjs import statespace as _statespace
from pygsti.tools import listtools as _lt
from pygsti.tools import matrixtools as _mt
class TensorProductState(_State):
"""
A state vector that is a tensor-product of other state vectors.
Parameters
----------
factors : list of States
a list of the component states to take the tensor product of.
state_space : StateSpace, optional
The state space for this operation.
"""
def __init__(self, factors, state_space):
assert(len(factors) > 0), "Must have at least one factor!"
self.factors = factors # do *not* copy - needs to reference common objects
evotype = self.factors[0]._evotype
rep = evotype.create_tensorproduct_state_rep([f._rep for f in factors], state_space)
_State.__init__(self, rep, evotype)
self.init_gpindices() # initialize our gpindices based on sub-members
self._update_rep() # initializes rep data
#Note: no to_memoized_dict needed, as ModelMember version does all we need.
@classmethod
def _from_memoized_dict(cls, mm_dict, serial_memo):
state_space = _statespace.StateSpace.from_nice_serialization(mm_dict['state_space'])
factors = [serial_memo[i] for i in mm_dict['submembers']]
return cls(factors, state_space)
def submembers(self):
"""
Get the ModelMember-derived objects contained in this one.
Returns
-------
list
"""
return self.factors # factor POVM object
def _update_rep(self):
self._rep.reps_have_changed()
@property
def parameter_labels(self):
"""
An array of labels (usually strings) describing this model member's parameters.
"""
vl = _np.empty(self.num_params, dtype=object)
for factor_state, factor_local_inds in zip(self.factors, self._submember_rpindices):
vl[factor_local_inds] = factor_state.parameter_labels
return vl
def to_dense(self, on_space='minimal', scratch=None):
"""
Return this state vector as a (dense) numpy array.
The memory in `scratch` maybe used when it is not-None.
Parameters
----------
on_space : {'minimal', 'Hilbert', 'HilbertSchmidt'}
The space that the returned dense operation acts upon. For unitary matrices and bra/ket vectors,
use `'Hilbert'`. For superoperator matrices and super-bra/super-ket vectors use `'HilbertSchmidt'`.
`'minimal'` means that `'Hilbert'` is used if possible given this operator's evolution type, and
otherwise `'HilbertSchmidt'` is used.
scratch : numpy.ndarray, optional
scratch space available for use.
Returns
-------
numpy.ndarray
"""
return self._rep.to_dense(on_space)
def taylor_order_terms(self, order, max_polynomial_vars=100, return_coeff_polys=False):
"""
Get the `order`-th order Taylor-expansion terms of this state vector.
This function either constructs or returns a cached list of the terms at
the given order. Each term is "rank-1", meaning that it is a state
preparation followed by or POVM effect preceded by actions on a
density matrix `rho` of the form:
`rho -> A rho B`
The coefficients of these terms are typically polynomials of the
State's parameters, where the polynomial's variable indices index the
*global* parameters of the State's parent (usually a :class:`Model`)
, not the State's local parameter array (i.e. that returned from
`to_vector`).
Parameters
----------
order : int
The order of terms to get.
max_polynomial_vars : int, optional
maximum number of variables the created polynomials can have.
return_coeff_polys : bool
Whether a parallel list of locally-indexed (using variable indices
corresponding to *this* object's parameters rather than its parent's)
polynomial coefficients should be returned as well.
Returns
-------
terms : list
A list of :class:`RankOneTerm` objects.
coefficients : list
Only present when `return_coeff_polys == True`.
A list of *compact* polynomial objects, meaning that each element
is a `(vtape,ctape)` 2-tuple formed by concatenating together the
output of :method:`Polynomial.compact`.
"""
terms = []
fnq = [int(round(_np.log2(f.dim))) // 2 for f in self.factors] # num of qubits per factor
# assumes density matrix evolution
total_nQ = sum(fnq) # total number of qubits
for p in _lt.partition_into(order, len(self.factors)):
factor_lists = [self.factors[i].taylor_order_terms(pi, max_polynomial_vars) for i, pi in enumerate(p)]
# When possible, create COLLAPSED factor_lists so each factor has just a single
# (State) pre & post op, which can be formed into the new terms'
# TensorProdState ops.
# - DON'T collapse stabilizer states & clifford ops - can't for POVMs
collapsible = False # bool(self._evotype =="svterm") # need to use reps for collapsing now... TODO?
if collapsible:
factor_lists = [[t.collapse_vec() for t in fterms] for fterms in factor_lists]
for factors in _itertools.product(*factor_lists):
# create a term with a TensorProdState - Note we always create
# "prep"-mode vectors, since even when self._prep_or_effect == "effect" these
# vectors are created with factor (prep- or effect-type) States not factor POVMs
# we workaround this by still allowing such "prep"-mode
# TensorProdStates to be represented as effects (i.e. in torep('effect'...) works)
coeff = _functools.reduce(lambda x, y: x.mult(y), [f.coeff for f in factors])
pre_rep = self._evotype.create_tensorproduct_state_rep(
[f.pre_state for f in factors if (f.pre_state is not None)], self.state_space)
post_rep = self._evotype.create_tensorproduct_state_rep(
[f.post_state for f in factors if (f.post_state is not None)], self.state_space)
term = _term.RankOnePolynomialPrepTerm.create_from(coeff, pre_rep, post_rep,
self._evotype, self.state_space)
if not collapsible: # then may need to add more ops. Assume factor ops are clifford gates
# Embed each factors ops according to their target qubit(s) and just daisy chain them
ss = _statespace.QubitSpace(total_nQ); curQ = 0
for f, nq in zip(factors, fnq):
targetLabels = tuple(range(curQ, curQ + nq)); curQ += nq
term._rep.pre_ops.extend([self._evotype.create_embedded_rep(ss, targetLabels, op)
for op in f.pre_ops]) # embed and add ops
term._rep.post_ops.extend([self._evotype.create_embedded_rep(ss, targetLabels, op)
for op in f.post_ops]) # embed and add ops
terms.append(term)
if return_coeff_polys:
def _decompose_indices(x):
return tuple(_modelmember._decompose_gpindices(
self.gpindices, _np.array(x, _np.int64)))
poly_coeffs = [t.coeff.map_indices(_decompose_indices) for t in terms] # with *local* indices
tapes = [poly.compact(complex_coeff_tape=True) for poly in poly_coeffs]
if len(tapes) > 0:
vtape = _np.concatenate([t[0] for t in tapes])
ctape = _np.concatenate([t[1] for t in tapes])
else:
vtape = _np.empty(0, _np.int64)
ctape = _np.empty(0, complex)
coeffs_as_compact_polys = (vtape, ctape)
#self.local_term_poly_coeffs[order] = coeffs_as_compact_polys #FUTURE?
return terms, coeffs_as_compact_polys
else:
return terms # Cache terms in FUTURE?
@property
def num_params(self):
"""
Get the number of independent parameters which specify this state vector.
Returns
-------
int
the number of independent parameters.
"""
return len(self.gpindices_as_array())
def to_vector(self):
"""
Get the state vector parameters as an array of values.
Returns
-------
numpy array
The parameters as a 1D array with length num_params().
"""
v = _np.empty(self.num_params, 'd')
for factor_state, factor_local_inds in zip(self.factors, self._submember_rpindices):
v[factor_local_inds] = factor_state.to_vector()
return v
def from_vector(self, v, close=False, dirty_value=True):
"""
Initialize the state vector using a 1D array of parameters.
Parameters
----------
v : numpy array
The 1D vector of state vector parameters. Length
must == num_params()
close : bool, optional
Whether `v` is close to this state vector's current
set of parameters. Under some circumstances, when this
is true this call can be completed more quickly.
dirty_value : bool, optional
The value to set this object's "dirty flag" to before exiting this
call. This is passed as an argument so it can be updated *recursively*.
Leave this set to `True` unless you know what you're doing.
Returns
-------
None
"""
for factor_state, factor_local_inds in zip(self.factors, self._submember_rpindices):
factor_state.from_vector(v[factor_local_inds], close, dirty_value)
#Update representation, which may be a dense matrix or
# just fast-kron arrays or a stabilizer state.
self._update_rep() # TODO - how does this apply to state reps??
def deriv_wrt_params(self, wrt_filter=None):
"""
The element-wise derivative this state vector.
Construct a matrix whose columns are the derivatives of the state vector
with respect to a single param. Thus, each column is of length
dimension and there is one column per state vector parameter.
An empty 2D array in the StaticState case (num_params == 0).
Parameters
----------
wrt_filter : list or numpy.ndarray
List of parameter indices to take derivative with respect to.
(None means to use all the this operation's parameters.)
Returns
-------
numpy array
Array of derivatives, shape == (dimension, num_params)
"""
typ = self.factors[0].to_dense(on_space='minimal').dtype if len(self.factors) > 0 else 'd'
#HACK to deal with fact that output of to_dense is really what is differentiated
# but this may not match self.dim == self.state_space.dim, e.g. for pure state vecs.
dims = [len(fct.to_dense(on_space='minimal')) for fct in self.factors]
dim = int(_np.product(dims))
derivMx = _np.zeros((dim, self.num_params), typ)
#Product rule to compute jacobian
# loop over the spamvec/povm we differentiate wrt:
for i, (fct, fct_local_inds, fct_dim) in enumerate(zip(self.factors, self._submember_rpindices, dims)):
vec = fct
if vec.num_params == 0: continue # no contribution
deriv = vec.deriv_wrt_params(None) # TODO: use filter?? / make relative to this gate...
deriv.shape = (fct_dim, vec.num_params)
if i > 0: # factors before ith
pre = self.factors[0].to_dense(on_space='minimal')
for vecA in self.factors[1:i]:
pre = _np.kron(pre, vecA.to_dense(on_space='minimal'))
deriv = _np.kron(pre[:, None], deriv) # add a dummy 1-dim to 'pre' and do kron properly...
if i + 1 < len(self.factors): # factors after ith
post = self.factors[i + 1].to_dense(on_space='minimal')
for vecA in self.factors[i + 2:]:
post = _np.kron(post, vecA.to_dense(on_space='minimal'))
deriv = _np.kron(deriv, post[:, None]) # add a dummy 1-dim to 'post' and do kron properly...
assert(fct_local_inds is not None), \
"Error: gpindices has not been initialized for factor %d - cannot compute derivative!" % i
derivMx[:, fct_local_inds] += deriv
derivMx.shape = (dim, self.num_params) # necessary?
if wrt_filter is None:
return derivMx
else:
return _np.take(derivMx, wrt_filter, axis=1)
def has_nonzero_hessian(self):
"""
Whether this state vector has a non-zero Hessian with respect to its parameters.
Returns
-------
bool
"""
return False
def __str__(self):
s = "Tensor product %s vector with length %d\n" % (self._prep_or_effect, self.dim)
#ar = self.to_dense()
#s += _mt.mx_to_string(ar, width=4, prec=2)
# factors are just other States
s += " x ".join([_mt.mx_to_string(fct.to_dense(on_space='minimal'), width=4, prec=2) for fct in self.factors])
return s
| [
"numpy.product",
"itertools.product",
"pygsti.modelmembers.states.state.State.__init__",
"numpy.take",
"numpy.kron",
"numpy.zeros",
"numpy.array",
"numpy.empty",
"pygsti.baseobjs.statespace.QubitSpace",
"numpy.concatenate",
"pygsti.modelmembers.term.RankOnePolynomialPrepTerm.create_from",
"numpy.log2",
"pygsti.baseobjs.statespace.StateSpace.from_nice_serialization"
]
| [((1791, 1826), 'pygsti.modelmembers.states.state.State.__init__', '_State.__init__', (['self', 'rep', 'evotype'], {}), '(self, rep, evotype)\n', (1806, 1826), True, 'from pygsti.modelmembers.states.state import State as _State\n'), ((2134, 2204), 'pygsti.baseobjs.statespace.StateSpace.from_nice_serialization', '_statespace.StateSpace.from_nice_serialization', (["mm_dict['state_space']"], {}), "(mm_dict['state_space'])\n", (2180, 2204), True, 'from pygsti.baseobjs import statespace as _statespace\n'), ((2764, 2804), 'numpy.empty', '_np.empty', (['self.num_params'], {'dtype': 'object'}), '(self.num_params, dtype=object)\n', (2773, 2804), True, 'import numpy as _np\n'), ((9902, 9933), 'numpy.empty', '_np.empty', (['self.num_params', '"""d"""'], {}), "(self.num_params, 'd')\n", (9911, 9933), True, 'import numpy as _np\n'), ((12509, 12547), 'numpy.zeros', '_np.zeros', (['(dim, self.num_params)', 'typ'], {}), '((dim, self.num_params), typ)\n', (12518, 12547), True, 'import numpy as _np\n'), ((6499, 6532), 'itertools.product', '_itertools.product', (['*factor_lists'], {}), '(*factor_lists)\n', (6517, 6532), True, 'import itertools as _itertools\n'), ((12471, 12488), 'numpy.product', '_np.product', (['dims'], {}), '(dims)\n', (12482, 12488), True, 'import numpy as _np\n'), ((14076, 14113), 'numpy.take', '_np.take', (['derivMx', 'wrt_filter'], {'axis': '(1)'}), '(derivMx, wrt_filter, axis=1)\n', (14084, 14113), True, 'import numpy as _np\n'), ((7437, 7544), 'pygsti.modelmembers.term.RankOnePolynomialPrepTerm.create_from', '_term.RankOnePolynomialPrepTerm.create_from', (['coeff', 'pre_rep', 'post_rep', 'self._evotype', 'self.state_space'], {}), '(coeff, pre_rep, post_rep, self.\n _evotype, self.state_space)\n', (7480, 7544), True, 'from pygsti.modelmembers import modelmember as _modelmember, term as _term\n'), ((8904, 8942), 'numpy.concatenate', '_np.concatenate', (['[t[0] for t in tapes]'], {}), '([t[0] for t in tapes])\n', (8919, 8942), True, 'import numpy as _np\n'), ((8967, 9005), 'numpy.concatenate', '_np.concatenate', (['[t[1] for t in tapes]'], {}), '([t[1] for t in tapes])\n', (8982, 9005), True, 'import numpy as _np\n'), ((9048, 9071), 'numpy.empty', '_np.empty', (['(0)', '_np.int64'], {}), '(0, _np.int64)\n', (9057, 9071), True, 'import numpy as _np\n'), ((9096, 9117), 'numpy.empty', '_np.empty', (['(0)', 'complex'], {}), '(0, complex)\n', (9105, 9117), True, 'import numpy as _np\n'), ((13260, 13289), 'numpy.kron', '_np.kron', (['pre[:, None]', 'deriv'], {}), '(pre[:, None], deriv)\n', (13268, 13289), True, 'import numpy as _np\n'), ((13631, 13661), 'numpy.kron', '_np.kron', (['deriv', 'post[:, None]'], {}), '(deriv, post[:, None])\n', (13639, 13661), True, 'import numpy as _np\n'), ((7847, 7879), 'pygsti.baseobjs.statespace.QubitSpace', '_statespace.QubitSpace', (['total_nQ'], {}), '(total_nQ)\n', (7869, 7879), True, 'from pygsti.baseobjs import statespace as _statespace\n'), ((5597, 5612), 'numpy.log2', '_np.log2', (['f.dim'], {}), '(f.dim)\n', (5605, 5612), True, 'import numpy as _np\n'), ((8631, 8654), 'numpy.array', '_np.array', (['x', '_np.int64'], {}), '(x, _np.int64)\n', (8640, 8654), True, 'import numpy as _np\n')] |
from .exceptions import MazeNotSolved, AlgorithmNotFound
from .dijkstra import Dijkstra
from .astar import Astar
from functools import wraps
import warnings
from daedalus import Maze as _maze
from PIL import Image
warnings.simplefilter("once", UserWarning)
class Maze:
"""
Create a maze and solve it.
Available algorithms:
dijkstra
astar (WIP)
Steps:
1. Create maze using the daedalus library.
2. Convert maze to graph.
3. Solve maze with algorithm.
"""
WHITE = (0, 0, 0)
BLACK = (255, 255, 255)
RED = (255, 0, 0)
def __init__(self, width, height, algorithm="dijkstra"):
"""Set algorithm to be used when solving.
Args:
algorithm (str) to be used when solving maze
width (int) of maze in pixels
height (int) of maze in pixels
"""
self.algorithm = algorithm
if not width % 2 or not height % 2:
warnings.warn(
"Using even width or height, use even numbers for optimal images"
)
self._create_maze(width, height)
self._create_graph()
self.width = width
self.height = height
def _create_maze(self, width, height):
"""Make maze to be solved and add border to maze.
Args:
width (int) of maze
height (int) of maze
"""
# create maze
self.maze = _maze(width, height)
self.maze.create_perfect()
# define maze variables
self.entrance = self.maze.entrance
self.exit = self.maze.exit
# add index to maze
self.maze = {
row_i: {item_i: item for item_i, item in enumerate(row)}
for row_i, row in enumerate(self.maze)
}
def _create_graph(self):
"""Remove unnecessary states from maze and convert maze to graph to be
solved."""
self.graph = {}
# convert to graph
for column in self.maze.keys():
for row in self.maze[column].keys():
item = self.maze[column][row]
if item != 1:
neighbours = []
try:
if self.maze[column][row - 1] != 1:
neighbours.append(["left", (column, row - 1)])
except KeyError:
None
try:
if self.maze[column][row + 1] != 1:
neighbours.append(["right", (column, row + 1)])
except KeyError:
None
try:
if self.maze[column - 1][row] != 1:
neighbours.append(["above", (column - 1, row)])
except KeyError:
None
try:
if self.maze[column + 1][row] != 1:
neighbours.append(["below", (column + 1, row)])
except KeyError:
None
self.graph[(column, row)] = {x[:][1]: 1 for x in neighbours}
# TODO: remove unnecessary states
def _maze_maker(file_name):
def real_decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
data = []
for row_i, row in enumerate(list(self.maze)):
for item_i, item in enumerate(self.maze[row].values()):
func(self, data, item, row_i=row_i, item_i=item_i)
# save maze
image = Image.new("RGB", (self.width, self.height))
image.putdata(data)
image.save(file_name)
return wrapper
return real_decorator
@_maze_maker("maze.png")
def save(self, data, item, row_i=None, item_i=None):
"""Save maze locally as an image."""
# invert maze because maze is incorrect
if item:
data.append(self.WHITE)
else:
data.append(self.BLACK)
def solve(self):
""" Solve maze using specified algorithm.
Returns:
shortest path as a queue from start to finish of maze
"""
if self.algorithm == "astar":
algorithm = Astar()
elif self.algorithm == "dijkstra":
algorithm = Dijkstra()
else:
raise AlgorithmNotFound(
f"Invalid algorithm: {self.algorithm}. See help({type(self).__name__}) for available algorithms."
)
# add nodes to graph
for node in self.graph:
algorithm.add_node(node, self.graph[node])
# pydaedalus stores y then x value which need to be reversed
self.entrance = tuple(reversed(self.entrance))
self.exit = tuple(reversed(self.exit))
self.path = algorithm.shortest_path(self.entrance, self.exit)
@_maze_maker("solution.png")
def save_solution(self, data, item, row_i=None, item_i=None):
"""Save maze image and the shortest path."""
if not hasattr(self, "path"):
raise MazeNotSolved(
f"Maze must be solved to save solution. Run {type(self).__name__}.solve() first."
)
if (row_i, item_i) in self.path:
data.append(self.RED)
elif item:
data.append(self.WHITE)
else:
data.append(self.BLACK)
def __str__(self):
"""Just cause it looks nice."""
string = []
for row in self.maze:
string.append(["█" if item else " " for item in self.maze[row].values()])
return "\n".join(["".join(line) for line in string])
def __repr__(self):
"""Easier on the eyes."""
return f"Maze(algorithm='{self.algorithm}', width={self.width}, height={self.height})"
| [
"daedalus.Maze",
"PIL.Image.new",
"functools.wraps",
"warnings.simplefilter",
"warnings.warn"
]
| [((217, 259), 'warnings.simplefilter', 'warnings.simplefilter', (['"""once"""', 'UserWarning'], {}), "('once', UserWarning)\n", (238, 259), False, 'import warnings\n'), ((1427, 1447), 'daedalus.Maze', '_maze', (['width', 'height'], {}), '(width, height)\n', (1432, 1447), True, 'from daedalus import Maze as _maze\n'), ((954, 1039), 'warnings.warn', 'warnings.warn', (['"""Using even width or height, use even numbers for optimal images"""'], {}), "('Using even width or height, use even numbers for optimal images'\n )\n", (967, 1039), False, 'import warnings\n'), ((3267, 3278), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (3272, 3278), False, 'from functools import wraps\n'), ((3619, 3662), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(self.width, self.height)'], {}), "('RGB', (self.width, self.height))\n", (3628, 3662), False, 'from PIL import Image\n')] |
#!/usr/bin/python
"""Interface to OpenShift oc command"""
import os
import shlex
import shutil
import subprocess
from ansible.module_utils.basic import AnsibleModule
ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')]
def locate_oc_binary():
"""Find and return oc binary file"""
# https://github.com/openshift/openshift-ansible/issues/3410
# oc can be in /usr/local/bin in some cases, but that may not
# be in $PATH due to ansible/sudo
paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS
oc_binary = 'oc'
# Use shutil.which if it is available, otherwise fallback to a naive path search
try:
which_result = shutil.which(oc_binary, path=os.pathsep.join(paths))
if which_result is not None:
oc_binary = which_result
except AttributeError:
for path in paths:
if os.path.exists(os.path.join(path, oc_binary)):
oc_binary = os.path.join(path, oc_binary)
break
return oc_binary
def main():
"""Module that executes commands on a remote OpenShift cluster"""
module = AnsibleModule(
argument_spec=dict(
namespace=dict(type="str", required=False),
config_file=dict(type="str", required=True),
cmd=dict(type="str", required=True),
extra_args=dict(type="list", default=[]),
),
)
cmd = [locate_oc_binary(), '--config', module.params["config_file"]]
if module.params["namespace"]:
cmd += ['-n', module.params["namespace"]]
cmd += shlex.split(module.params["cmd"]) + module.params["extra_args"]
failed = True
try:
cmd_result = subprocess.check_output(list(cmd), stderr=subprocess.STDOUT)
failed = False
except subprocess.CalledProcessError as exc:
cmd_result = '[rc {}] {}\n{}'.format(exc.returncode, ' '.join(exc.cmd), exc.output)
except OSError as exc:
# we get this when 'oc' is not there
cmd_result = str(exc)
module.exit_json(
changed=False,
failed=failed,
result=cmd_result,
)
if __name__ == '__main__':
main()
| [
"shlex.split",
"os.pathsep.join",
"os.path.join",
"os.environ.get",
"os.path.expanduser"
]
| [((215, 242), 'os.path.expanduser', 'os.path.expanduser', (['"""~/bin"""'], {}), "('~/bin')\n", (233, 242), False, 'import os\n'), ((1600, 1633), 'shlex.split', 'shlex.split', (["module.params['cmd']"], {}), "(module.params['cmd'])\n", (1611, 1633), False, 'import shlex\n'), ((492, 526), 'os.environ.get', 'os.environ.get', (['"""PATH"""', 'os.defpath'], {}), "('PATH', os.defpath)\n", (506, 526), False, 'import os\n'), ((740, 762), 'os.pathsep.join', 'os.pathsep.join', (['paths'], {}), '(paths)\n', (755, 762), False, 'import os\n'), ((922, 951), 'os.path.join', 'os.path.join', (['path', 'oc_binary'], {}), '(path, oc_binary)\n', (934, 951), False, 'import os\n'), ((982, 1011), 'os.path.join', 'os.path.join', (['path', 'oc_binary'], {}), '(path, oc_binary)\n', (994, 1011), False, 'import os\n')] |
import fractions
class Network(object):
def __init__(self, network):
self.network = network
def degree(self, link_type, key):
return len(self.network.get(link_type).get(key))
def average_degree(self, link_type):
degree = 0
for link in self.network.get(link_type).itervalues():
degree += len(link)
return float(degree) / float(len(self.network.get(link_type)))
def nn_degree(self, link_type, link_n_type, key):
degree = self.degree(link_type, key)
nn_degree = 0
for n_key in self.network.get(link_type, key):
nn_degree += self.degree(link_n_type, n_key)
return '%d/%d' % (nn_degree, degree)
def jaccard_index(self, set_a, set_b):
n = len(set_a & set_b)
return float(n)/float(len(set_a) + len(set_b) - n)
def jaccard_similarity(self, link_type, key_a, key_b, return_string=False):
key_a = int(key_a)
key_b = int(key_b)
set_a = set(self.network.get(link_type).get(key_a).values())
set_b = set(self.network.get(link_type).get(key_b).values())
if return_string:
intersection = len(set_a & set_b)
union = len(set_a | set_b)
gcd = fractions.gcd(intersection, union)
return '%d/%d' % (intersection/gcd, union/gcd)
return self.jaccard_index(set_a, set_b)
def collaborative_similarity(self, link_type, link_n_type, key, return_string=False):
degree = self.degree(link_type, key)
if degree <= 1:
return 0
similarity_sum = 0
for n_key_1 in self.network.get(link_type).get(key).itervalues():
for n_key_2 in self.network.get(link_type).get(key).itervalues():
if n_key_1 == n_key_2:
continue
similarity_sum += self.jaccard_similarity(link_n_type, n_key_1, n_key_2)
if return_string:
precision = 1e3
new_similarity_sum = round(similarity_sum * degree*(degree-1) * precision)
gcd = fractions.gcd(new_similarity_sum, degree*(degree-1) * precision)
new_similarity_sum /= gcd
return '%d/%d' % (new_similarity_sum, degree*(degree-1)*round(new_similarity_sum/similarity_sum))
return similarity_sum / (degree*(degree-1))
def average_jaccard_similarity(self, link_type, link_n_type, return_string=False):
nodes = 0
similarity_sum = 0
for key_links in self.network.get(link_type).itervalues():
for n_key_1 in key_links.itervalues():
for n_key_2 in key_links.itervalues():
if n_key_1 == n_key_2:
continue
nodes += 1
similarity_sum += self.jaccard_similarity(link_n_type, n_key_1, n_key_2)
if nodes == 0:
return 0
if return_string:
precision = 1e3
new_similarity_sum = round(similarity_sum * nodes * precision)
gcd = fractions.gcd(new_similarity_sum, nodes * precision)
new_similarity_sum /= gcd
return '%d/%d' % (new_similarity_sum, nodes*round(new_similarity_sum/similarity_sum))
return similarity_sum / nodes
def network_collaborative_similarity(self, link_type, link_n_type, return_string=False):
nodes = 0
similarity_sum = 0
for key, key_links in self.network.get(link_type).iteritems():
if self.degree(link_type, key) <= 1:
continue
nodes += 1
collaborative_similarity = self.collaborative_similarity(link_type, link_n_type, key)
similarity_sum += collaborative_similarity
if nodes == 0:
return 0
if return_string:
precision = 1e3
new_similarity_sum = round(similarity_sum * nodes * precision)
gcd = fractions.gcd(new_similarity_sum, nodes * precision)
new_similarity_sum /= gcd
return '%d/%d' % (new_similarity_sum, nodes*(new_similarity_sum/similarity_sum))
return similarity_sum/nodes
| [
"fractions.gcd"
]
| [((1247, 1281), 'fractions.gcd', 'fractions.gcd', (['intersection', 'union'], {}), '(intersection, union)\n', (1260, 1281), False, 'import fractions\n'), ((2065, 2133), 'fractions.gcd', 'fractions.gcd', (['new_similarity_sum', '(degree * (degree - 1) * precision)'], {}), '(new_similarity_sum, degree * (degree - 1) * precision)\n', (2078, 2133), False, 'import fractions\n'), ((3027, 3079), 'fractions.gcd', 'fractions.gcd', (['new_similarity_sum', '(nodes * precision)'], {}), '(new_similarity_sum, nodes * precision)\n', (3040, 3079), False, 'import fractions\n'), ((3905, 3957), 'fractions.gcd', 'fractions.gcd', (['new_similarity_sum', '(nodes * precision)'], {}), '(new_similarity_sum, nodes * precision)\n', (3918, 3957), False, 'import fractions\n')] |
import ansible
import pprint
from ansible import utils
from jinja2 import Environment, PackageLoader
from collections import namedtuple
from ansible import utils
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager
from ansible.inventory import Inventory
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.plugins.callback import CallbackBase
from callbacks import PlaybookCallback
def invoke_ansible_playbook(module_path, e_vars, playbook_path="site.yml", console=True):
""" Invokes playbook """
loader = DataLoader()
variable_manager = VariableManager()
variable_manager.extra_vars = e_vars
inventory = Inventory(loader=loader,
variable_manager=variable_manager,
host_list=['localhost'])
passwords = {}
utils.VERBOSITY = 4
Options = namedtuple('Options', ['listtags',
'listtasks',
'listhosts',
'syntax',
'connection',
'module_path',
'forks',
'remote_user',
'private_key_file',
'ssh_common_args',
'ssh_extra_args',
'sftp_extra_args',
'scp_extra_args',
'become',
'become_method',
'become_user',
'verbosity',
'check'])
options = Options(listtags=False,
listtasks=False,
listhosts=False,
syntax=False,
connection='ssh',
module_path=module_path,
forks=100,
remote_user='root',
private_key_file=None,
ssh_common_args=None,
ssh_extra_args=None,
sftp_extra_args=None,
scp_extra_args=None,
become=False,
become_method=None,
become_user='root',
verbosity=utils.VERBOSITY,
check=False)
pbex = PlaybookExecutor(playbooks=[playbook_path],
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
options=options,
passwords=passwords)
if not console:
cb = PlaybookCallback()
pbex._tqm._stdout_callback = cb
return_code = pbex.run()
results = cb.results
else:
results = pbex.run()
return results
| [
"ansible.vars.VariableManager",
"collections.namedtuple",
"ansible.inventory.Inventory",
"callbacks.PlaybookCallback",
"ansible.parsing.dataloader.DataLoader",
"ansible.executor.playbook_executor.PlaybookExecutor"
]
| [((585, 597), 'ansible.parsing.dataloader.DataLoader', 'DataLoader', ([], {}), '()\n', (595, 597), False, 'from ansible.parsing.dataloader import DataLoader\n'), ((621, 638), 'ansible.vars.VariableManager', 'VariableManager', ([], {}), '()\n', (636, 638), False, 'from ansible.vars import VariableManager\n'), ((696, 785), 'ansible.inventory.Inventory', 'Inventory', ([], {'loader': 'loader', 'variable_manager': 'variable_manager', 'host_list': "['localhost']"}), "(loader=loader, variable_manager=variable_manager, host_list=[\n 'localhost'])\n", (705, 785), False, 'from ansible.inventory import Inventory\n'), ((890, 1188), 'collections.namedtuple', 'namedtuple', (['"""Options"""', "['listtags', 'listtasks', 'listhosts', 'syntax', 'connection',\n 'module_path', 'forks', 'remote_user', 'private_key_file',\n 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args',\n 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity',\n 'check']"], {}), "('Options', ['listtags', 'listtasks', 'listhosts', 'syntax',\n 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file',\n 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args',\n 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity',\n 'check'])\n", (900, 1188), False, 'from collections import namedtuple\n'), ((2550, 2710), 'ansible.executor.playbook_executor.PlaybookExecutor', 'PlaybookExecutor', ([], {'playbooks': '[playbook_path]', 'inventory': 'inventory', 'variable_manager': 'variable_manager', 'loader': 'loader', 'options': 'options', 'passwords': 'passwords'}), '(playbooks=[playbook_path], inventory=inventory,\n variable_manager=variable_manager, loader=loader, options=options,\n passwords=passwords)\n', (2566, 2710), False, 'from ansible.executor.playbook_executor import PlaybookExecutor\n'), ((2876, 2894), 'callbacks.PlaybookCallback', 'PlaybookCallback', ([], {}), '()\n', (2892, 2894), False, 'from callbacks import PlaybookCallback\n')] |
#!/usr/bin/env python3
import itertools
import string
from elasticsearch import Elasticsearch,helpers
import sys
import os
from glob import glob
import pandas as pd
import json
host = sys.argv[1]
port = int(sys.argv[2])
alias = sys.argv[3]
print(host)
print(port)
print(alias)
es = Elasticsearch([{'host': host, 'port': port}])
# create our test index
# Get all csv files in /root/data
files = [y for x in os.walk('/root/data') for y in glob(os.path.join(x[0], '*.csv'))]
count = 0
def clean_field(val):
val = val.split('.')
val = [i for i in val if i != '']
val = '_'.join(val)
val = val.split()
val = [i for i in val if i != '']
val = '_'.join(val)
val = val.split('/')
val = [i for i in val if i != '']
val = '_'.join(val)
return val
es.indices.delete(index=alias + '*', ignore=[400, 404])
indices = []
for file in files:
data = pd.read_csv(file, sep=None, engine='python')
index = alias + '_'.join(file.split('/'))
index = clean_field(index).lower().split('_csv')[0]
indices.append(index)
es.indices.create(index)
for col in data.columns:
if col.startswith('Unnamed'):
del data[col]
else:
data.rename(columns= { col : clean_field(col) },inplace=True )
data = data.reset_index() # Make sure there is no duplicate indexing
data.rename(columns={'index':'row'},inplace =True)
data['File'] = file
data['_id'] = data['File'] + '.{}.'.format(str(count)) + data.reset_index()['index'].apply(str)
data['_type'] = "document"
data['_index'] = index
records = data.to_json(orient='records')
records = json.loads(records)
helpers.bulk(es, records, chunk_size=100)
count += 1
print(es.count(index=index))
# Create an index table in elasticsearch to locate the files
indices_table = pd.DataFrame()
indices_table['Index'] = pd.Series(indices)
indices_table['File'] = pd.Series(files)
indices_table['Alias'] = alias
indices_table['_id'] = indices_table['Alias'] + '.' + indices_table['File']
indices_table['_type'] = "document"
indices_table['_index'] = alias + '_indices'
es.indices.create(alias + '_indices')
records = indices_table.to_json(orient='records')
records = json.loads(records)
helpers.bulk(es, records, chunk_size=100)
print(es.count(index=alias + '_indices'))
| [
"pandas.Series",
"json.loads",
"pandas.read_csv",
"elasticsearch.helpers.bulk",
"elasticsearch.Elasticsearch",
"os.path.join",
"pandas.DataFrame",
"os.walk"
]
| [((297, 342), 'elasticsearch.Elasticsearch', 'Elasticsearch', (["[{'host': host, 'port': port}]"], {}), "([{'host': host, 'port': port}])\n", (310, 342), False, 'from elasticsearch import Elasticsearch, helpers\n'), ((1842, 1856), 'pandas.DataFrame', 'pd.DataFrame', ([], {}), '()\n', (1854, 1856), True, 'import pandas as pd\n'), ((1882, 1900), 'pandas.Series', 'pd.Series', (['indices'], {}), '(indices)\n', (1891, 1900), True, 'import pandas as pd\n'), ((1925, 1941), 'pandas.Series', 'pd.Series', (['files'], {}), '(files)\n', (1934, 1941), True, 'import pandas as pd\n'), ((2229, 2248), 'json.loads', 'json.loads', (['records'], {}), '(records)\n', (2239, 2248), False, 'import json\n'), ((2249, 2290), 'elasticsearch.helpers.bulk', 'helpers.bulk', (['es', 'records'], {'chunk_size': '(100)'}), '(es, records, chunk_size=100)\n', (2261, 2290), False, 'from elasticsearch import Elasticsearch, helpers\n'), ((897, 941), 'pandas.read_csv', 'pd.read_csv', (['file'], {'sep': 'None', 'engine': '"""python"""'}), "(file, sep=None, engine='python')\n", (908, 941), True, 'import pandas as pd\n'), ((1650, 1669), 'json.loads', 'json.loads', (['records'], {}), '(records)\n', (1660, 1669), False, 'import json\n'), ((1674, 1715), 'elasticsearch.helpers.bulk', 'helpers.bulk', (['es', 'records'], {'chunk_size': '(100)'}), '(es, records, chunk_size=100)\n', (1686, 1715), False, 'from elasticsearch import Elasticsearch, helpers\n'), ((423, 444), 'os.walk', 'os.walk', (['"""/root/data"""'], {}), "('/root/data')\n", (430, 444), False, 'import os\n'), ((459, 486), 'os.path.join', 'os.path.join', (['x[0]', '"""*.csv"""'], {}), "(x[0], '*.csv')\n", (471, 486), False, 'import os\n')] |
from absl import app
from mainLoop import main
if __name__ == '__main__':
app.run(main)
| [
"absl.app.run"
]
| [((91, 104), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (98, 104), False, 'from absl import app\n')] |
#!/usr/bin/env python
# vgm2electron.py
# Tool for converting SN76489-based PSG VGM data to Acorn Electron
# By <NAME> (https://github.com/simondotm/)
# See https://github.com/simondotm/vgm-packer
#
# Copyright (c) 2019 <NAME>. All rights reserved.
#
# "MIT License":
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import functools
import itertools
import struct
import sys
import time
import binascii
import math
import operator
import os
from modules.vgmparser import VgmStream
class VgmElectron:
OUTPUT_RAWDATA = False # output raw dumps of the data that was compressed by LZ4/Huffman
VERBOSE = True
# 0-3 represents approx the loudest 50% of volumes (=ON), 4-15 are the quietest 50% (=OFF)
ATTENTUATION_THRESHOLD1 = 10
ATTENTUATION_THRESHOLD2 = 10
ATTENTUATION_THRESHOLD3 = 10
# define the number of octaves to transpose whole song by, in case too much bass getting lost
TRANSPOSE_OCTAVES1 = 0
TRANSPOSE_OCTAVES2 = 0
TRANSPOSE_OCTAVES3 = 0 #-1
ENABLE_CHANNEL1 = True
ENABLE_CHANNEL2 = True
ENABLE_CHANNEL3 = True
USE_TECHNIQUE = 2
def __init__(self):
print("init")
#----------------------------------------------------------
# Utilities
#----------------------------------------------------------
# split the packed raw data into 11 separate streams
# returns array of 11 bytearrays
def split_raw(self, rawData, stripCommands = True):
registers = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
registers_opt = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
latched_channel = -1
output_block = bytearray()
output_blocks = []
for o in range(11):
output_blocks.append( bytearray() )
if stripCommands:
register_mask = 15
else:
register_mask = 255
# unpack the raw binary data in 11 arrays of register data without any deltas between them
# eg. the raw chip writes to all 11 registers every frame
n = 0
Packet = True
verbose = False
while (Packet):
packet_size = rawData[n]
if verbose:
print("packet_size=" + str(packet_size))
n += 1
if packet_size == 255:
Packet = False
else:
for x in range(packet_size):
d = rawData[n+x]
#if verbose:
# print " frame byte number=" +str(x)
# print " frame byte=" +str(d)
if d & 128:
# latch
c = (d>>5)&3
latched_channel = c
if d & 16:
# volume
if verbose:
print(" volume on channel " + str(c))
registers[c+7] = d & register_mask
else:
# tone
if verbose:
print(" tone on channel " + str(c))
registers[c*2+0] = d & register_mask
else:
if verbose:
print(" tone data on latched channel " + str(latched_channel))
registers[latched_channel*2+1] = d # we no longer do any masking here # d & 63 # tone data only contains 6 bits of info anyway, so no need for mask
if latched_channel == 3:
print("ERROR CHANNEL")
# emit current state of each of the 11 registers to 11 different bytearrays
for x in range(11):
output_blocks[x].append( registers[x] )
# next packet
n += packet_size
#print(output_blocks[6])
#IGNORE we no longer do this - let the decoder do it instead.
if False:
# make sure we only emit tone3 when it changes, or 15 for no-change
# this prevents the LFSR from being reset
lastTone3 = 255
for x in range(len(output_blocks[6])):
t = output_blocks[6][x]
if t == lastTone3:
output_blocks[6][x] = 15
lastTone3 = t
# print(output_blocks[6])
# Add EOF marker (0x08) to tone3 byte stream
output_blocks[6].append(0x08) # 0x08 is an invalid noise tone.
# return the split blocks
return output_blocks
# given an array of data points, serialize it to a bytearray
# size is the number of bytes to be used to represent each element in the source array.
def toByteArray(self, array, size = 1):
r = bytearray()
for v in array:
if size < 2:
r.append(v & 255)
else:
r.append(v & 255)
r.append(v >> 8)
return r
#----------------------------------------------------------
# Process(filename)
# Convert the given VGM file to an electron VGM file
#----------------------------------------------------------
def process(self, src_filename, dst_filename):
# load the VGM file, or alternatively interpret as a binary
if src_filename.lower()[-4:] != ".vgm":
print("ERROR: Not a VGM source")
return
vgm = VgmStream(src_filename)
data_block = vgm.as_binary()
data_offset = 0
# parse the header
header_size = data_block[0] # header size
play_rate = data_block[1] # play rate
if header_size == 5 and play_rate == 50:
packet_count = data_block[2] + data_block[3]*256 # packet count LO
duration_mm = data_block[4] # duration mm
duration_ss = data_block[5] # duration ss
data_offset = header_size+1
data_offset += data_block[data_offset]+1
data_offset += data_block[data_offset]+1
print("header_size=" +str(header_size))
print("play_rate="+str(play_rate))
print("packet_count="+str(packet_count))
print("duration_mm="+str(duration_mm))
print("duration_ss="+str(duration_ss))
print("data_offset="+str(data_offset))
else:
print("No header.")
print("")
# Trim off the header data. The rest is raw data.
data_block = data_block[data_offset:]
#----------------------------------------------------------
# Unpack the register data into 11 separate data streams
#----------------------------------------------------------
registers = self.split_raw(data_block, True)
#----------------------------------------------------------
# Begin VGM conversion to Electron
#----------------------------------------------------------
# Filter out channels we do not need
# Modify all volumes to full or none
# Interleave sound to a single channel
# output final VGM
vgm_stream = bytearray()
vgm_time = 0
electron_data = bytearray()
# given an SN76489 tone register value, return the equivalent Electron ULA register setting
def sn_to_electron(tone_value):
# hack to protect against divbyzero
if (tone_value == 0):
tone_value = 1
hz = float(vgm.vgm_source_clock) / ( 2.0 * float(tone_value) * 16.0)
print(" sn_to_electron freq " + str(hz) + "hz")
# electron
# Sound frequency = 1 MHz / [32 * (S + 1)]
# f * 32*(S+1) = 1Mhz
# 32*(S+1) = 1Mhz / f
# (S+1) = 1Mhz / f*32
#print ("SN freq is " + str(hz))
ula6 = int( 1000000.0 / (hz * 32.0) ) - 1
# check we are within range
if ula6 < 0:
print(" WARNING: Electron freqency '" + str(ula6) + "' too high (" + str(hz) + ")")
ula6 = 0
if ula6 > 255:
print(" WARNING: Electron frequency '" + str(ula6) + "' too low (" + str(hz) + ")")
ula6 = 255
return ula6
#--------------------------------------------------------------
# conversion settings
#--------------------------------------------------------------
# convert the register data to a vgm stream
sample_interval = int(44100 / vgm.metadata['rate']) # 882 # 50hz - TODO: use frame rate
print("sample_interval=" + str(sample_interval))
USE_TONE3 = VgmElectron.ENABLE_CHANNEL3 # True
# TODO: make these all parameters
# Add channel filter option
# Add mix type options
# --attentuation 468 --filter 123 --transpose 00F --mix 123 --arpeggio 2 --rate 50
# Add option to clamp or transpose out of range frequencies
# Make the .ula output file filename.electron.ula
# Add 0x01 as a terminating byte in the output ULA
MIX_RATE = 2 # modulo 2 for interleaving channels
# other options
# bias for channels
# transpose or silence out of range notes
channel_mix = 0
#--------------------------------------------------------------
# pre-process music to suit Electron capabilities
#--------------------------------------------------------------
for i in range(len(registers[0])):
print("Frame " + str(i))
#--------------------------------------------------------------
# step 1- map volumes to 1-bit precision
#--------------------------------------------------------------
# 11 registers per frame
# Tone 0 HL Tone 1 HL Tone 2 HL Tone 3 Vol 0123
for r in range(11):
if r > 6:
register_data = registers[r][i]
# apply the threshold for each channel
threshold = VgmElectron.ATTENTUATION_THRESHOLD1
if r == 8:
threshold = VgmElectron.ATTENTUATION_THRESHOLD2
if r == 9:
threshold = VgmElectron.ATTENTUATION_THRESHOLD3
# if its a volume, map to loudest volume or no volume (using logarithmic scale)
if register_data < threshold:
register_data = 0 # full volume
else:
register_data = 15 # zero volume
if r == 7 and VgmElectron.ENABLE_CHANNEL1 == False:
register_data = 15 # zero volume
if r == 8 and VgmElectron.ENABLE_CHANNEL2 == False:
register_data = 15 # zero volume
if r == 9 and VgmElectron.ENABLE_CHANNEL3 == False:
register_data = 15 # zero volume
registers[r][i] = register_data
#--------------------------------------------------------------
# step 2 - transpose to fit frequency range
#--------------------------------------------------------------
# final step - bring tone1 into the frequency range of the electron
# if the frequency goes below the range of the ULA capabilities, add an octave
def retune(octaves, l,h,v):
#if (octaves == 0):
# print(" No transpose performed, octaves set to 0")
# return
print( " tonehi=" + str(registers[h][i]) + ", tonelo=" + str(registers[l][i]))
tone_value = (registers[h][i] << 4) + registers[l][i]
if tone_value > 0:
tone_freq = float(vgm.vgm_source_clock) / ( 2.0 * float(tone_value) * 16.0)
print(" Retune, Channel " + str(int(l/2)) + " tone=" + str(tone_value) + ", freq=" + str(tone_freq))
# electron baseline is 122Hz not 244Hz as the AUG states.
baseline_freq = 1000000.0 / (32.0*256.0)
target_freq = tone_freq
retuned = 0
transpose = abs(octaves)
while retuned != transpose: # target_freq < baseline_freq:
if (octaves < 0):
target_freq /= 2.0
else:
target_freq *= 2.0
retuned += 1
# if cant reach baseline freq, transpose once, then silence if still too low :(
if target_freq < baseline_freq:
print(" WARNING: Freq too low - Added " + str(1) + " octave(s) - from " + str(target_freq) + " to " + str(target_freq*2.0) + "Hz")
# better to just clamp low frequencies at the bottom, and risk tuning issues rather than transposition jumps
target_freq = baseline_freq #*= 2.0
retuned = 1
if target_freq < baseline_freq:
registers[v][i] = 15
print(" Tone " + str(i) + " silenced because frequency too low - " + str(target_freq))
#target_freq *= 2.0
#retuned += 1
if retuned:
#print(" WARNING: Freq too low - Added " + str(retuned) + " octave(s) - from " + str(tone_freq) + " to " + str(target_freq) + "Hz")
tone_value = int( round( float(vgm.vgm_source_clock) / (2.0 * target_freq * 16.0 ) ) )
registers[h][i] = tone_value >> 4
registers[l][i] = tone_value & 15
# transpose
#if TRANSPOSE_OCTAVES > 0:
print(" Transposing ")
retune(VgmElectron.TRANSPOSE_OCTAVES1, 0,1,7)
retune(VgmElectron.TRANSPOSE_OCTAVES2, 2,3,8)
retune(VgmElectron.TRANSPOSE_OCTAVES3, 4,5,9)
#--------------------------------------------------------------
# Step 3 - mix the 2 primary channels down to 1 channel
#--------------------------------------------------------------
# map channel 2 to channel 1
# noise channel is completely ignored
ENABLE_DOWNMIX = True
if ENABLE_DOWNMIX:
print(" Downmix channels ")
#print("Frame " + str(i))
vol1 = registers[7][i]
vol2 = registers[8][i]
vol3 = registers[9][i]
tone1_active = vol1 != 15
tone2_active = vol2 != 15
tone3_active = vol3 != 15
tone_active = tone1_active or tone2_active or tone3_active
if tone_active:
print(" Tone active, mixing")
output_tone = 1
if self.USE_TECHNIQUE == 2:
c1f = (registers[1][i] << 4) + registers[0][i]
c2f = (registers[3][i] << 4) + registers[2][i]
c3f = (registers[5][i] << 4) + registers[4][i]
active_channels = [ False, False, False ]
if tone1_active:
active_channels[0] = True
print("Channel 1 is active volume")
if tone2_active:
active_channels[1] = True
print("Channel 2 is active volume")
if tone3_active:
active_channels[2] = True
print("Channel 3 is active volume")
# any channels playing the same frequency are filtered out
if tone1_active and tone2_active and c2f == c1f:
active_channels[1] = False
print("Channel 2 is same freq as Channel 1, filtered")
if tone1_active and tone3_active and c3f == c1f:
active_channels[2] = False
print("Channel 3 is same freq as Channel 1, filtered")
if tone2_active and tone3_active and c2f == c3f:
active_channels[2] = False
print("Channel 3 is same freq as Channel 2, filtered")
channel_count = 0
if active_channels[0]: channel_count += 1
if active_channels[1]: channel_count += 1
if active_channels[2]: channel_count += 1
print("channel_count=" + str(channel_count))
output_mix = []
if active_channels[0]: output_mix.append(1)
if active_channels[1]: output_mix.append(2)
if active_channels[2]: output_mix.append(3)
mix = (i % channel_count)
output_tone = output_mix[mix]
if self.USE_TECHNIQUE == 1:
# interleaving of channels 1+2 is done on odd/even frames for a consistent effect
mix = (i % MIX_RATE) == 0 #(i & 1) == 0
# random is no good, thought it might average out but it sounds , well random
#mix = random.random() < 0.5
# test code to see if modulo 3 any good, it wasn't
if False:
if channel_mix == 0 and vol1 != 0:
channel_mix = (channel_mix + 1) % 3
if channel_mix == 1 and vol2 != 0:
channel_mix = (channel_mix + 1) % 3
if channel_mix == 1 and vol3 != 0:
channel_mix = (channel_mix + 1) % 3
output_tone = (channel_mix % 3) + 1
print("output tone=" + str(output_tone))
channel_mix = (channel_mix + 1) % 3
if True:
# detect if channel 1 needs priority this frame
# - its volume is on, and the alternative frame mix flag is good
c1p = vol1 == 0 and mix
# don't give channel 2 priority if tone is the same and channel1 is playing
c1f = (registers[1][i] << 4) + registers[0][i]
c2f = (registers[3][i] << 4) + registers[2][i]
sametone = (c1f == c2f/2) or (c1f == c2f * 2) or (c1f == c2f)
sametone = sametone and (vol1 == vol2) and (vol1 == 0)
if vol1 == 0 and sametone: #diff < 100: #registers[0][i] == registers[2][i] and registers[1][i] == registers[2][i] and vol1 == 0:
c1p = True
print(" NOTE: channel 1 & channel 2 have same tone")
# replace channel 1 data with channel 2 data
# if, channel2 is active, but c1 doesn't have priority this frame
if vol2 == 0 and not c1p:# and vol1 != 0:
output_tone = 2
# if no volume on tone1, we can look at channel 3 too
if USE_TONE3:
#if registers[7][i] == 15:
if vol1 == 15 and vol2 == 15 and vol3 == 0 and not mix:# and not c1p and output_tone != 2:
print("tone3 active")
output_tone = 3
# pick which tone to output
if output_tone == 1:
# do nothing, because tone1 register frequency already setup
output_tone = 1
elif output_tone == 2:
# replace tone 1 frequency with tone 2 frequency
registers[0][i] = registers[2][i]
registers[1][i] = registers[3][i]
registers[7][i] = registers[8][i]
elif output_tone == 3:
# replace tone 1 frequency with tone 3 frequency
registers[0][i] = registers[4][i]
registers[1][i] = registers[5][i]
registers[7][i] = registers[9][i]
else:
print("UNHANDLED CASE - output_tone not set")
# output ULA data
final_volume = registers[7][i]
ula_tone = 0 # zero is highest freq. so inaudible, so thats how we handle volume
if final_volume == 0:
final_tone1 = (registers[1][i] << 4) + registers[0][i]
ula_tone = sn_to_electron(final_tone1)
electron_data.append( ula_tone )
# write to output ULA file
ula_file = open(dst_filename + ".ula.bin", 'wb')
ula_file.write(electron_data)
ula_file.close()
#--------------------------------------------------------------
# Final stage - output to vgm
#--------------------------------------------------------------
# Tone1----- Tone2----- Tone3----- Tone4 Vol1 Vol2 Vol3 Vol4
control = [ 0x80, 0x00, 0xa0, 0x00, 0xc0, 0x00, 0xe0, 0x90, 0xb0, 0xd0, 0xf0 ]
#filter = [ 0,1,2,3,7,8 ]
#filter = [ 2,3,8 ]
#filter = [ 0,1,2,3,4,5,6,7,8,9,10 ]
filter = [ 0,1,2,3,4,5,7,8,9 ]
if ENABLE_DOWNMIX:
filter = [ 0,1,7 ]
last_tone3 = 255
for i in range(len(registers[0])):
# 11 registers per frame
# Tone 0 HL Tone 1 HL Tone 2 HL Tone 3 Vol 0123
for r in range(11):
register_data = registers[r][i]
# dont update noise register unless different
update = True
if r == 6:
if register_data == last_tone3:
update = False
else:
last_tone3 = register_data
if not r in filter:
update = False
if update:
register_data |= control[r]
vgm_stream.extend( struct.pack('B', 0x50) ) # COMMAND
vgm_stream.extend( struct.pack('B', register_data) ) # DATA
# next frame
if sample_interval == 882: # wait 50
vgm_stream.extend( struct.pack('B', 0x63) )
elif sample_interval == 735: # wait 60
vgm_stream.extend( struct.pack('B', 0x62) )
else:
vgm_stream.extend( struct.pack('B', 0x61) )
vgm_stream.extend( struct.pack('B', int(sample_interval % 256)) )
vgm_stream.extend( struct.pack('B', int(sample_interval / 256)) )
# END command
vgm_stream.extend( struct.pack('B', 0x66) )
vgm.write_vgm(vgm_stream, dst_filename)
#output = bytearray()
# write the electron vgm file
#open(dst_filename, "wb").write( output )
#------------------------------------------------------------------------
# Main()
#------------------------------------------------------------------------
import argparse
# Determine if running as a script
if __name__ == '__main__':
print("Vgm2Electron.py : VGM music converter for Acorn Electron")
print("Written in 2019 by <NAME>, https://github.com/simondotm/vgm-packer")
print("")
epilog_string = ""
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=epilog_string)
parser.add_argument("input", help="VGM source file (must be single SN76489 PSG format) [input]")
parser.add_argument("-o", "--output", metavar="<output>", help="write VGC file <output> (default is '[input].vgc')")
parser.add_argument("-v", "--verbose", help="Enable verbose mode", action="store_true")
parser.add_argument("-a", "--attenuation", default="444", metavar="<nnn>", help="Set attenuation threshold for each channel, 3 character string where each character is 0-F and 0 is loudest, 4 is 50%, F is quietest, default: 444")
parser.add_argument("-t", "--transpose", default="000", metavar="<nnn>", help="Set octaves to transpose for each channel, where 1 is +1 octave and F is -1 octave.")
parser.add_argument("-c", "--channels", default="123", metavar="[1][2][3]", help="Set which channels will be included in the conversion, default 123, which means all 3 channels")
parser.add_argument("-q", "--technique", default=2, metavar="<n>", help="Set which downmix technique to use 1 or 2.")
args = parser.parse_args()
src = args.input
dst = args.output
if dst == None:
dst = os.path.splitext(src)[0] + ".electron.vgm"
# attenuation options
attenuation = args.attenuation
if (len(attenuation) != 3):
print("ERROR: attenuation must be 3 values eg. '444'")
sys.exit()
#print("attenuation=" + attenuation)
VgmElectron.ATTENTUATION_THRESHOLD1 = int(attenuation[0],16)
VgmElectron.ATTENTUATION_THRESHOLD2 = int(attenuation[1],16)
VgmElectron.ATTENTUATION_THRESHOLD3 = int(attenuation[2],16)
# transpose options
transpose = args.transpose
if (len(transpose) != 3):
print("ERROR: transpose must be 3 values eg. '000'")
sys.exit()
#print("transpose=" + transpose)
# 0 1 2 3 4 5 6 7 8 9 a b c d e f
ttable = [0,1,2,3,4,5,6,7,-8,-7,-6,-5,-4,-3,-2,-1]
VgmElectron.TRANSPOSE_OCTAVES1 = ttable[ int(transpose[0],16) ]
VgmElectron.TRANSPOSE_OCTAVES2 = ttable[ int(transpose[1],16) ]
VgmElectron.TRANSPOSE_OCTAVES3 = ttable[ int(transpose[2],16) ]
# channel options
print(args.channels)
VgmElectron.ENABLE_CHANNEL1 = args.channels.find("1") >= 0
VgmElectron.ENABLE_CHANNEL2 = args.channels.find("2") >= 0
VgmElectron.ENABLE_CHANNEL3 = args.channels.find("3") >= 0
print("Channel 1: Enabled=" + str(VgmElectron.ENABLE_CHANNEL1) + ", Transpose=" + str(VgmElectron.TRANSPOSE_OCTAVES1) + ", Attenuation="+str(VgmElectron.ATTENTUATION_THRESHOLD1))
print("Channel 2: Enabled=" + str(VgmElectron.ENABLE_CHANNEL2) + ", Transpose=" + str(VgmElectron.TRANSPOSE_OCTAVES2) + ", Attenuation="+str(VgmElectron.ATTENTUATION_THRESHOLD2))
print("Channel 3: Enabled=" + str(VgmElectron.ENABLE_CHANNEL3) + ", Transpose=" + str(VgmElectron.TRANSPOSE_OCTAVES3) + ", Attenuation="+str(VgmElectron.ATTENTUATION_THRESHOLD3))
# technique
VgmElectron.USE_TECHNIQUE = int(args.technique)
print("Using technique " + str(VgmElectron.USE_TECHNIQUE))
# check for missing files
if not os.path.isfile(src):
print("ERROR: File '" + src + "' not found")
sys.exit()
packer = VgmElectron()
packer.VERBOSE = args.verbose
packer.process(src, dst)
| [
"argparse.ArgumentParser",
"os.path.splitext",
"struct.pack",
"os.path.isfile",
"modules.vgmparser.VgmStream",
"sys.exit"
]
| [((19976, 20080), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawDescriptionHelpFormatter', 'epilog': 'epilog_string'}), '(formatter_class=argparse.\n RawDescriptionHelpFormatter, epilog=epilog_string)\n', (19999, 20080), False, 'import argparse\n'), ((5444, 5467), 'modules.vgmparser.VgmStream', 'VgmStream', (['src_filename'], {}), '(src_filename)\n', (5453, 5467), False, 'from modules.vgmparser import VgmStream\n'), ((21363, 21373), 'sys.exit', 'sys.exit', ([], {}), '()\n', (21371, 21373), False, 'import sys\n'), ((21732, 21742), 'sys.exit', 'sys.exit', ([], {}), '()\n', (21740, 21742), False, 'import sys\n'), ((22997, 23016), 'os.path.isfile', 'os.path.isfile', (['src'], {}), '(src)\n', (23011, 23016), False, 'import os\n'), ((23067, 23077), 'sys.exit', 'sys.exit', ([], {}), '()\n', (23075, 23077), False, 'import sys\n'), ((19367, 19388), 'struct.pack', 'struct.pack', (['"""B"""', '(102)'], {}), "('B', 102)\n", (19378, 19388), False, 'import struct\n'), ((21176, 21197), 'os.path.splitext', 'os.path.splitext', (['src'], {}), '(src)\n', (21192, 21197), False, 'import os\n'), ((18984, 19004), 'struct.pack', 'struct.pack', (['"""B"""', '(99)'], {}), "('B', 99)\n", (18995, 19004), False, 'import struct\n'), ((18804, 18824), 'struct.pack', 'struct.pack', (['"""B"""', '(80)'], {}), "('B', 80)\n", (18815, 18824), False, 'import struct\n'), ((18863, 18894), 'struct.pack', 'struct.pack', (['"""B"""', 'register_data'], {}), "('B', register_data)\n", (18874, 18894), False, 'import struct\n'), ((19075, 19095), 'struct.pack', 'struct.pack', (['"""B"""', '(98)'], {}), "('B', 98)\n", (19086, 19095), False, 'import struct\n'), ((19133, 19153), 'struct.pack', 'struct.pack', (['"""B"""', '(97)'], {}), "('B', 97)\n", (19144, 19153), False, 'import struct\n')] |
# ██╗░░░░░██╗███╗░░██╗░██████╗░░░░██████╗░██╗░░░░░░█████╗░░█████╗░██╗░░██╗
# ██║░░░░░██║████╗░██║██╔════╝░░░░██╔══██╗██║░░░░░██╔══██╗██╔══██╗██║░██╔╝
# ██║░░░░░██║██╔██╗██║██║░░██╗░░░░██████╦╝██║░░░░░███████║██║░░╚═╝█████═╝░
# ██║░░░░░██║██║╚████║██║░░╚██╗░░░██╔══██╗██║░░░░░██╔══██║██║░░██╗██╔═██╗░
# ███████╗██║██║░╚███║╚██████╔╝░░░██████╦╝███████╗██║░░██║╚█████╔╝██║░╚██╗
# ╚══════╝╚═╝╚═╝░░╚══╝░╚═════╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝
#
# Developed by <NAME> (C) Ling • Black 2020
# @site http://ling.black
# ██╗░░░░░██╗███╗░░██╗░██████╗░░░░██████╗░██╗░░░░░░█████╗░░█████╗░██╗░░██╗
# ██║░░░░░██║████╗░██║██╔════╝░░░░██╔══██╗██║░░░░░██╔══██╗██╔══██╗██║░██╔╝
# ██║░░░░░██║██╔██╗██║██║░░██╗░░░░██████╦╝██║░░░░░███████║██║░░╚═╝█████═╝░
# ██║░░░░░██║██║╚████║██║░░╚██╗░░░██╔══██╗██║░░░░░██╔══██║██║░░██╗██╔═██╗░
# ███████╗██║██║░╚███║╚██████╔╝░░░██████╦╝███████╗██║░░██║╚█████╔╝██║░╚██╗
# ╚══════╝╚═╝╚═╝░░╚══╝░╚═════╝░░░░╚═════╝░╚══════╝╚═╝░░╚═╝░╚════╝░╚═╝░░╚═╝
#
# Developed by <NAME> (C) Ling • Black 2020
# @site http://ling.black
from typing import List
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from core.response import RequestLimit
from database import get_db, DatabaseUtils
from database.wow.models import PostModel, PostCommentsModel
from wow.interface.entity import PostCategory, Post, PostCategoryCreate, PostCreate, PostLikeCreate, PostCommentCreate
from wow.utils.posts import PostsUtils
from wow.utils.users import BlizzardUsersUtils
router = APIRouter()
class TokenArgs(BaseModel):
token: str
class TokenPostIdArgs(BaseModel):
token: str
post_id: int
class CommentIdAndToken(TokenArgs):
comment_id: int
class PostAPIList(BaseModel):
items: List[Post]
count: int
class PostAPIListResponse(BaseModel):
response: PostAPIList
request: RequestLimit
# -----------------------------------
# CATEGORIES
# -----------------------------------
@router.post(
"/categories",
response_model=PostCategory,
summary='Adds the category'
)
def add_category(body: PostCategoryCreate):
"""
Adds the category
:param body:
:return:
"""
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
return PostsUtils.add_category(user_id=blizzard_id, url=body.url, title=body.title)
@router.get(
"/categories",
response_model=List[PostCategory],
summary='Returns the categories'
)
def get_categories():
"""
Returns the categories list
:return:
"""
return PostsUtils.get_categories()
# -----------------------------------
# POSTS
# -----------------------------------
@router.get(
"/",
response_model=PostAPIListResponse,
summary='Returns all the posts'
)
def get_posts_all(limit: int = 100, offset: int = 0):
return PostsUtils.get_posts_limit(
limit=limit,
offset=offset
)
@router.get(
"/category/{category_url}",
response_model=PostAPIListResponse,
summary='Returns the posts in category'
)
def get_posts_all(category_url: int, limit: int = 100, offset: int = 0):
"""
Returns all the posts by category
:param category_url:
:param limit:
:param offset:
:return:
"""
return PostsUtils.get_posts_by_category_limit(
category_id=category_url,
limit=limit,
offset=offset
)
@router.get(
"/user/{blizzard_id}",
response_model=PostAPIListResponse,
summary='Returns the posts by users'
)
def get_posts_all(blizzard_id: int, limit: int = 100, offset: int = 0):
"""
Returns all the posts by category
:param blizzard_id:
:param limit:
:param offset:
:return:
"""
return PostsUtils.get_posts_by_blizzard_id(
blizzard_id=blizzard_id,
limit=limit,
offset=offset
)
@router.post(
"/like",
summary='Likes the post',
tags=['Лайки']
)
def like_post(body: PostLikeCreate):
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
return PostsUtils.add_like(
user_id=blizzard_id,
post_id=body.post_id,
)
@router.post(
"/unlike",
summary='Unlikes the post',
tags=['Лайки']
)
def like_post(body: PostLikeCreate):
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
return PostsUtils.remove_like(
user_id=blizzard_id,
post_id=body.post_id,
)
@router.post(
"/comment",
summary='Adds the comment',
tags=['Комментарии']
)
def like_post(body: PostCommentCreate):
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
return PostsUtils.add_comment(
user_id=blizzard_id,
post_id=body.post_id,
reply_id=body.reply_id,
text=body.text,
)
@router.delete(
"/comment",
summary='Removes the comment',
tags=['Комментарии']
)
def removes_post(body: CommentIdAndToken, db=Depends(get_db)):
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
com = db.query(PostCommentsModel).filter(PostCommentsModel.id == body.comment_id).filter(
PostCommentsModel.user_id == blizzard_id)
if com.count() > 0:
com.delete()
db.commit()
return True
return False
@router.post(
"/",
response_model=Post,
summary='Adds the post'
)
def add_post(body: PostCreate):
"""
Adds the post item
:param body:
:return:
"""
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
return PostsUtils.add_post(
user_id=blizzard_id,
category_id=body.category_id,
title=body.title,
content=body.content,
tags=body.tags,
image=body.image
)
@router.delete(
"/{post_id}",
summary='Deletes the post'
)
def delete_post(post_id: int, body: TokenArgs, db=Depends(get_db)):
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
q = db.query(PostModel).filter(PostModel.id == post_id).filter(PostModel.user_id == blizzard_id)
if q.count() == 0:
raise HTTPException(status_code=404, detail='Post is undefined')
return DatabaseUtils.remove_query(db, q)
@router.post(
"/{post_id}",
summary='Edits the post'
)
def edit_post(post_id: int, body: PostCreate, db=Depends(get_db)):
blizzard_id = BlizzardUsersUtils.id__safe(body.token)
q = db.query(PostModel).filter(PostModel.id == post_id).filter(PostModel.user_id == blizzard_id)
if q.count() == 0:
raise HTTPException(status_code=404, detail='Post is undefined')
q.update({
'title': body.title,
'content': body.content,
'category_id': body.category_id,
'image': body.image,
'tags': body.tags,
})
db.commit()
return True
@router.get(
"/{post_id}",
response_model=Post,
summary='Returns the post'
)
def get_post(post_id: int, db=Depends(get_db)):
return db.query(PostModel).filter(PostModel.id == post_id).first()
| [
"wow.utils.posts.PostsUtils.get_posts_by_blizzard_id",
"wow.utils.posts.PostsUtils.add_category",
"fastapi.HTTPException",
"wow.utils.posts.PostsUtils.get_categories",
"fastapi.Depends",
"wow.utils.posts.PostsUtils.get_posts_by_category_limit",
"fastapi.APIRouter",
"wow.utils.users.BlizzardUsersUtils.id__safe",
"wow.utils.posts.PostsUtils.add_comment",
"wow.utils.posts.PostsUtils.add_post",
"database.DatabaseUtils.remove_query",
"wow.utils.posts.PostsUtils.remove_like",
"wow.utils.posts.PostsUtils.add_like",
"wow.utils.posts.PostsUtils.get_posts_limit"
]
| [((1530, 1541), 'fastapi.APIRouter', 'APIRouter', ([], {}), '()\n', (1539, 1541), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((2208, 2247), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (2235, 2247), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((2259, 2335), 'wow.utils.posts.PostsUtils.add_category', 'PostsUtils.add_category', ([], {'user_id': 'blizzard_id', 'url': 'body.url', 'title': 'body.title'}), '(user_id=blizzard_id, url=body.url, title=body.title)\n', (2282, 2335), False, 'from wow.utils.posts import PostsUtils\n'), ((2542, 2569), 'wow.utils.posts.PostsUtils.get_categories', 'PostsUtils.get_categories', ([], {}), '()\n', (2567, 2569), False, 'from wow.utils.posts import PostsUtils\n'), ((2835, 2889), 'wow.utils.posts.PostsUtils.get_posts_limit', 'PostsUtils.get_posts_limit', ([], {'limit': 'limit', 'offset': 'offset'}), '(limit=limit, offset=offset)\n', (2861, 2889), False, 'from wow.utils.posts import PostsUtils\n'), ((3258, 3355), 'wow.utils.posts.PostsUtils.get_posts_by_category_limit', 'PostsUtils.get_posts_by_category_limit', ([], {'category_id': 'category_url', 'limit': 'limit', 'offset': 'offset'}), '(category_id=category_url, limit=\n limit, offset=offset)\n', (3296, 3355), False, 'from wow.utils.posts import PostsUtils\n'), ((3717, 3809), 'wow.utils.posts.PostsUtils.get_posts_by_blizzard_id', 'PostsUtils.get_posts_by_blizzard_id', ([], {'blizzard_id': 'blizzard_id', 'limit': 'limit', 'offset': 'offset'}), '(blizzard_id=blizzard_id, limit=limit,\n offset=offset)\n', (3752, 3809), False, 'from wow.utils.posts import PostsUtils\n'), ((3971, 4010), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (3998, 4010), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((4022, 4084), 'wow.utils.posts.PostsUtils.add_like', 'PostsUtils.add_like', ([], {'user_id': 'blizzard_id', 'post_id': 'body.post_id'}), '(user_id=blizzard_id, post_id=body.post_id)\n', (4041, 4084), False, 'from wow.utils.posts import PostsUtils\n'), ((4247, 4286), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (4274, 4286), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((4298, 4363), 'wow.utils.posts.PostsUtils.remove_like', 'PostsUtils.remove_like', ([], {'user_id': 'blizzard_id', 'post_id': 'body.post_id'}), '(user_id=blizzard_id, post_id=body.post_id)\n', (4320, 4363), False, 'from wow.utils.posts import PostsUtils\n'), ((4536, 4575), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (4563, 4575), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((4587, 4697), 'wow.utils.posts.PostsUtils.add_comment', 'PostsUtils.add_comment', ([], {'user_id': 'blizzard_id', 'post_id': 'body.post_id', 'reply_id': 'body.reply_id', 'text': 'body.text'}), '(user_id=blizzard_id, post_id=body.post_id, reply_id=\n body.reply_id, text=body.text)\n', (4609, 4697), False, 'from wow.utils.posts import PostsUtils\n'), ((4873, 4888), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (4880, 4888), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((4909, 4948), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (4936, 4948), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((5395, 5434), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (5422, 5434), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((5446, 5594), 'wow.utils.posts.PostsUtils.add_post', 'PostsUtils.add_post', ([], {'user_id': 'blizzard_id', 'category_id': 'body.category_id', 'title': 'body.title', 'content': 'body.content', 'tags': 'body.tags', 'image': 'body.image'}), '(user_id=blizzard_id, category_id=body.category_id,\n title=body.title, content=body.content, tags=body.tags, image=body.image)\n', (5465, 5594), False, 'from wow.utils.posts import PostsUtils\n'), ((5765, 5780), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (5772, 5780), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((5801, 5840), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (5828, 5840), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((6049, 6082), 'database.DatabaseUtils.remove_query', 'DatabaseUtils.remove_query', (['db', 'q'], {}), '(db, q)\n', (6075, 6082), False, 'from database import get_db, DatabaseUtils\n'), ((6197, 6212), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (6204, 6212), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((6233, 6272), 'wow.utils.users.BlizzardUsersUtils.id__safe', 'BlizzardUsersUtils.id__safe', (['body.token'], {}), '(body.token)\n', (6260, 6272), False, 'from wow.utils.users import BlizzardUsersUtils\n'), ((6804, 6819), 'fastapi.Depends', 'Depends', (['get_db'], {}), '(get_db)\n', (6811, 6819), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((5979, 6037), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Post is undefined"""'}), "(status_code=404, detail='Post is undefined')\n", (5992, 6037), False, 'from fastapi import APIRouter, Depends, HTTPException\n'), ((6411, 6469), 'fastapi.HTTPException', 'HTTPException', ([], {'status_code': '(404)', 'detail': '"""Post is undefined"""'}), "(status_code=404, detail='Post is undefined')\n", (6424, 6469), False, 'from fastapi import APIRouter, Depends, HTTPException\n')] |
import datetime
import pickle
import tensorflow as tf
def save_checkpoint(model, current_step, epoch, output_path, **kwargs):
""" Save TF Vocoder model """
state = {
'model': model.weights,
'step': current_step,
'epoch': epoch,
'date': datetime.date.today().strftime("%B %d, %Y"),
}
state.update(kwargs)
pickle.dump(state, open(output_path, 'wb'))
def load_checkpoint(model, checkpoint_path):
""" Load TF Vocoder model """
checkpoint = pickle.load(open(checkpoint_path, 'rb'))
chkp_var_dict = {var.name: var.numpy() for var in checkpoint['model']}
tf_vars = model.weights
for tf_var in tf_vars:
layer_name = tf_var.name
chkp_var_value = chkp_var_dict[layer_name]
tf.keras.backend.set_value(tf_var, chkp_var_value)
return model
| [
"datetime.date.today",
"tensorflow.keras.backend.set_value"
]
| [((763, 813), 'tensorflow.keras.backend.set_value', 'tf.keras.backend.set_value', (['tf_var', 'chkp_var_value'], {}), '(tf_var, chkp_var_value)\n', (789, 813), True, 'import tensorflow as tf\n'), ((278, 299), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (297, 299), False, 'import datetime\n')] |
import sys
import copy
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
from collections import Counter
from .utils import *
import numpy as np
import pandas as pd
class plotFeatures:
usage = """Produces different feature plots given a data table and peak table.
Initial_Parameters
----------
peaktable : Pandas dataframe containing peak data. Must contain 'Name' and 'Label'.
datatable : Pandas dataframe containing matrix of values to plot (N samples x N features). Columns/features must be same as 'Name' from Peak Table.
Methods
-------
set_params : Set parameters -
plot_type: The type of plot. Either "point", "violin", "box", "swarm", "violin-swarm" or "box-swarm" (default: 'point')
column_numbers: The number of columns to display in the plots (default: 4)
log_data: Perform a log ('natural', base 2 or base 10) on all data (default: (True, 2))
scale_data: Scale the data ('standard' (centers to the mean and scales to unit variance), 'minmax' (scales between 0 and 1), 'maxabs' (scales to the absolute maximum value), 'robust' (centers to the median and scales to between 25th and 75th quantile range) (default: (True, 'minmax'))
impute_data: Impute any missing values using KNN impute with a set number of nearest neighbours (default: (True, 3))
style: Set the matplotlib style (see https://matplotlib.org/stable/tutorials/introductory/customizing.html) (default: 'seaborn-white')
transparent: Setting to 'True' will make the background transparent (default: False)
figSize: The figure size as a tuple (width,height) (default: (15,10))
fontSize: The font size for all text (default: 12)
colour_palette: The colour palette to use for the plot (default: None)
y_axis_label: The label to customise the y axis (default: None)
x_axis_rotation: Rotate the x axis labels this number of degrees (default: 0)
group_column_name: The group column name used in the datatable (e.g. 'Class') (default: None)
point_estimator: The statistical function to use for the point plot. Either "mean" or "median" (default: 'mean')
point_ci: The bootstrapped confidence interval for the point plot. Can also be standard deviation ("sd") (default: 95)
violin_distribution_type: The representation of the distribution of data points within the violin plot. Either "quartile", "box", "point", "stick" or None (default: 'box')
violin_width_scale: The method used to scale the width of the violin plot. Either "area", "count" or "width" (default: "width")
box_iqr: The proportion past the lower and upper quartiles to extend the plot whiskers for the box plot. Points outside this range will be identified as outliers (default: 1.5)
saveImage: Setting to 'True' will save the image to file (default: True)
imageFileName: The image file name to save to (default: [plot_type]_features.png')
dpi: The number of Dots Per Inch (DPI) for the image (default: 200)
help : Print this help text
plot : Generates feature plots
"""
def __init__(self, peaktable, datatable):
peaktable = self.__checkPeakTable(self.__checkData(peaktable))
datatable = self.__checkData(datatable)
# Slice the meta-data, and select only peaks from the peaktable for processing, and add the meta-data back
meta = datatable.T[~datatable.T.index.isin(peaktable['Name'])].T.reset_index(drop=True)
dat = datatable[peaktable['Name']].reset_index()
datatable = pd.concat([meta, dat], axis=1).set_index(['index'])
datatable.index.name = None
self.__peaktable = peaktable
# Search for duplicate labels and amend with a suffix, to avoid issues when relabelling the datatable
labels = copy.deepcopy(list(peaktable['Label']))
label_counts = {k: v for k, v in Counter(labels).items() if v > 1}
for i in reversed(range(len(labels))):
item = str(labels[i])
if item in label_counts and label_counts[item]:
labels[i] += "_" + str(label_counts[item])
label_counts[item] -= 1
#Label datatable with peak labels instead of names for ease of feature plotting
col_label_dict = dict(zip(list(peaktable['Name']), labels))
datatable.rename(columns=col_label_dict, inplace=True)
self.__peak_labels = labels
self.__datatable = datatable
self.set_params()
def help(self):
print(plotFeatures.usage)
def set_params(self, plot_type='point', column_numbers=4, log_data=(True, 2), scale_data=(True, 'minmax'), impute_data=(True, 3), style='seaborn-white', transparent=False, figSize = (15, 10), fontSize = 12, colour_palette=None, y_axis_label=None, x_axis_rotation=0, group_column_name=None, point_estimator='mean', point_ci=95, violin_distribution_type='box', violin_width_scale='width', box_iqr=1.5, saveImage=True, imageFileName='_features.png', dpi = 200):
plot_type, column_numbers, log_data, scale_data, impute_data, style, transparent, figSize, fontSize, colour_palette, y_axis_label, x_axis_rotation, group_column_name, point_estimator, point_ci, violin_distribution_type, violin_width_scale, box_iqr, saveImage, imageFileName, dpi = self.__paramCheck(plot_type, column_numbers, log_data, scale_data, impute_data, style, transparent, figSize, fontSize, colour_palette, y_axis_label, x_axis_rotation, group_column_name, point_estimator, point_ci, violin_distribution_type, violin_width_scale, box_iqr, saveImage, imageFileName, dpi)
self.__plot_type = plot_type;
self.__column_numbers = column_numbers;
self.__log_data = log_data;
self.__scale_data = scale_data;
self.__impute_data = impute_data;
self.__style = style;
self.__transparent = transparent;
self.__figSize = figSize;
self.__fontSize = fontSize;
self.__colour_palette = colour_palette;
self.__y_axis_label = y_axis_label;
self.__x_axis_rotation = x_axis_rotation;
self.__group_column_name = group_column_name;
self.__point_estimator = point_estimator;
self.__point_ci = point_ci;
self.__violin_distribution_type = violin_distribution_type;
self.__violin_width_scale = violin_width_scale;
self.__box_iqr = box_iqr;
self.__saveImage = saveImage;
self.__imageFileName = imageFileName;
self.__dpi = dpi;
def plot(self):
datatable = copy.deepcopy(self.__datatable)
labels = self.__peak_labels
plot_type = self.__plot_type
group_column_name = self.__group_column_name
column_numbers = self.__column_numbers
colour_palette = self.__colour_palette
point_ci = self.__point_ci
point_estimator = self.__point_estimator
log_data = self.__log_data
scale_data = self.__scale_data
impute_data = self.__impute_data
x_axis_rotation = self.__x_axis_rotation
y_axis_label = self.__y_axis_label
violin_distribution_type = self.__violin_distribution_type
violin_width_scale = self.__violin_width_scale
box_iqr = self.__box_iqr
imageFileName = self.__imageFileName
saveImage = self.__saveImage
fontSize = self.__fontSize
style = self.__style
transparent = self.__transparent
dpi = self.__dpi
figSize = self.__figSize
meta = datatable.T[~datatable.T.index.isin(labels)].T.reset_index(drop=True)
X = datatable[labels].reset_index(drop=True)
(log_bool, log_base) = log_data;
if log_bool:
if isinstance(log_base, str) and log_base.lower() == 'natural':
X = X.applymap(np.log);
elif log_base == 2:
X = X.applymap(np.log2);
elif log_base == 10:
X = X.applymap(np.log10);
else:
print("Error: The chosen log type is invalid.")
sys.exit()
(scale_bool, scale_type) = scale_data
if scale_bool:
if isinstance(scale_type, str) and scale_type.lower() == 'standard':
X = scaler(X, type=scale_type.lower()).reset_index(drop=True)
elif isinstance(scale_type, str) and scale_type.lower() == 'minmax':
X = scaler(X, type=scale_type.lower()).reset_index(drop=True)
elif isinstance(scale_type, str) and scale_type.lower() == 'maxabs':
X = scaler(X, type=scale_type.lower()).reset_index(drop=True)
elif isinstance(scale_type, str) and scale_type.lower() == 'robust':
X = scaler(X, type=scale_type.lower()).reset_index(drop=True)
else:
print("Error: The chosen scale type is invalid.")
sys.exit()
(impute_bool, k) = impute_data;
if impute_bool:
X = imputeData(X, k=k).reset_index(drop=True)
if not isinstance(X, pd.DataFrame):
X = pd.DataFrame(X, columns=labels)
# Add the meta data back in with the logged, scaled, or imputed data
datatable = pd.concat([meta, X], axis=1).reset_index(drop=True)
with plt.style.context(style):
fig, axes = plt.subplots(nrows=int(np.ceil(float(len(labels) / column_numbers))), ncols=column_numbers, sharey=True, figsize=figSize)
if plot_type == 'point':
for peak_index, peak in enumerate(labels):
if point_estimator.lower() == 'mean':
point_estimator = 'Mean'
ax = sns.pointplot(data=datatable, x=group_column_name, y=peak, estimator=np.nanmean, capsize=0.1, ci=point_ci, palette=colour_palette, ax=axes.flat[peak_index])
elif point_estimator.lower() == 'median':
point_estimator = 'Median'
ax = sns.pointplot(data=datatable, x=group_column_name, y=peak, estimator=np.nanmedian, capsize=0.1, ci=point_ci, palette=colour_palette, ax=axes.flat[peak_index])
else:
print("Error: Invalid point plot estimator type.")
sys.exit()
ax.tick_params(labelrotation=x_axis_rotation, labelsize=fontSize)
if log_bool:
if scale_data:
if isinstance(point_ci, str):
if point_ci == 'sd':
ax.set_title(peak + ' within SD', fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) {} Peak Area within SD'.format(log_base, scale_type, point_estimator), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
ax.set_title(peak + ' with {}% CI'.format(point_ci), fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) {} Peak Area & {}% CI'.format(log_base, scale_type, point_estimator, point_ci), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if isinstance(point_ci, str):
if point_ci == 'sd':
ax.set_title(peak + ' within SD', fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('Log({}) {} Peak Area within SD'.format(log_base, point_estimator), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
ax.set_title(peak + ' with {}% CI'.format(point_ci), fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('Log({}) {} Peak Area & {}% CI'.format(log_base, point_estimator, point_ci), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if scale_data:
if isinstance(point_ci, str):
if point_ci == 'sd':
ax.set_title(peak + ' within SD', fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) {} Peak Area within SD'.format(scale_type, point_estimator), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
ax.set_title(peak + ' with {}% CI'.format(point_ci), fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) {} Peak Area & {}% CI'.format(scale_type, point_estimator, point_ci), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if isinstance(point_ci, str):
if point_ci == 'sd':
ax.set_title(peak + ' within SD', fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('{} Peak Area within SD'.format(point_estimator), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
ax.set_title(peak + ' with {}% CI'.format(point_ci), fontsize=fontSize)
ax.set_xlabel('')
if y_axis_label is None:
ax.set_ylabel('{} Peak Area & {}% CI'.format(point_estimator, point_ci), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
elif plot_type.lower() == 'violin':
for peak_index, peak in enumerate(labels):
ax = sns.violinplot(data=datatable, x=group_column_name, y=peak, linewidth=1, inner=violin_distribution_type, scale=violin_width_scale, palette=colour_palette, ax=axes.flat[peak_index])
ax.tick_params(labelrotation=x_axis_rotation, labelsize=fontSize)
ax.set_title(peak, fontsize=fontSize)
ax.set_xlabel('')
if log_bool:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) Peak Area'.format(log_base, scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Log({}) Peak Area'.format(log_base), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) Peak Area'.format(scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Peak Area', fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
elif plot_type.lower() == 'box':
for peak_index, peak in enumerate(labels):
ax = sns.boxplot(data=datatable, x=group_column_name, y=peak, palette=colour_palette, whis=box_iqr, ax=axes.flat[peak_index])
ax.tick_params(labelrotation=x_axis_rotation, labelsize=fontSize)
ax.set_title(peak, fontsize=fontSize)
ax.set_xlabel('')
if log_bool:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) Peak Area'.format(log_base, scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Log({}) Peak Area'.format(log_base), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) Peak Area'.format(scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Peak Area', fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
elif plot_type.lower() == 'swarm':
for peak_index, peak in enumerate(labels):
ax = sns.swarmplot(data=datatable, x=group_column_name, y=peak, size=10, palette=colour_palette, ax=axes.flat[peak_index])
ax.tick_params(labelrotation=x_axis_rotation, labelsize=fontSize)
ax.set_title(peak, fontsize=fontSize)
ax.set_xlabel('')
if log_bool:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) Peak Area'.format(log_base, scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Log({}) Peak Area'.format(log_base), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) Peak Area'.format(scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Peak Area', fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
elif plot_type.lower() == 'violin-swarm':
for peak_index, peak in enumerate(labels):
ax = sns.violinplot(data=datatable, x=group_column_name, y=peak, linewidth=1, inner=None, scale=violin_width_scale, palette=colour_palette, ax=axes.flat[peak_index])
ax = sns.swarmplot(data=datatable, x=group_column_name, y=peak, color="white", edgecolor="gray", ax=axes.flat[peak_index])
ax.tick_params(labelrotation=x_axis_rotation, labelsize=fontSize)
ax.set_title(peak, fontsize=fontSize)
ax.set_xlabel('')
if log_bool:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) Peak Area'.format(log_base, scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Log({}) Peak Area'.format(log_base), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) Peak Area'.format(scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Peak Area', fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
elif plot_type.lower() == 'box-swarm':
for peak_index, peak in enumerate(labels):
ax = sns.boxplot(data=datatable, x=group_column_name, y=peak, palette=colour_palette, whis=np.inf, ax=axes.flat[peak_index])
ax = sns.swarmplot(data=datatable, x=group_column_name, y=peak, color="0.2", ax=axes.flat[peak_index])
ax.tick_params(labelrotation=x_axis_rotation, labelsize=fontSize)
ax.set_title(peak, fontsize=fontSize)
ax.set_xlabel('')
if log_bool:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Log({}) scaled ({}) Peak Area'.format(log_base, scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Log({}) Peak Area'.format(log_base), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if scale_data:
if y_axis_label is None:
ax.set_ylabel('Scaled ({}) Peak Area'.format(scale_type), fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
else:
if y_axis_label is None:
ax.set_ylabel('Peak Area', fontsize=fontSize)
else:
ax.set_ylabel(y_axis_label, fontsize=fontSize)
fig.tight_layout(h_pad=5, w_pad=2)
if saveImage:
plt.savefig(plot_type + 'Plot' + imageFileName, dpi=dpi, transparent=transparent)
plt.show()
def __paramCheck(self, plot_type, column_numbers, log_data, scale_data, impute_data, style, transparent, figSize, fontSize, colour_palette, y_axis_label, x_axis_rotation, group_column_name, point_estimator, point_ci, violin_distribution_type, violin_width_scale, box_iqr, saveImage, imageFileName, dpi):
cmap_list = list(matplotlib.cm.cmaps_listed) + list(matplotlib.cm.datad)
cmap_list_r = [cmap + '_r' for cmap in cmap_list]
cmap_list = cmap_list + cmap_list_r
plot_types = ['point', 'violin', 'box', 'swarm', 'violin-swarm', 'box-swarm']
estimator_types = ['mean', 'median']
datatable = self.__datatable
if plot_type.lower() not in plot_types:
print("Error: Plot type is not valid. Choose one of the following: {}.".format(', '.join(plot_types)))
sys.exit()
if not isinstance(column_numbers, int):
print("Error: Column numbers is not valid. Choose a integer value.")
sys.exit()
if not isinstance(log_data, tuple):
print("Error: Log data type if not a tuple. Please ensure the value is a tuple (e.g. (True, 2).")
sys.exit()
else:
(log_bool, log_base) = log_data
if not isinstance(log_bool, bool):
print("Error: Log data first tuple item is not a boolean value. Choose either \"True\" or \"False\".")
sys.exit()
base_types = ['natural', 2, 10]
if isinstance(log_base, str):
log_base = log_base.lower()
if log_base not in base_types:
print("Error: Log data second tuple item is not valid. Choose one of {}.".format(', '.join(base_types)))
sys.exit()
if not isinstance(scale_data, tuple):
print("Error: Scale data type if not a tuple. Please ensure the value is a tuple (e.g. (True, 'standard').")
sys.exit()
else:
(scale_bool, scale_type) = scale_data
if not isinstance(scale_bool, bool):
print("Error: Scale data first tuple item is not a boolean value. Choose either \"True\" or \"False\".")
sys.exit()
scale_types = ['standard', 'minmax', 'maxabs', 'robust']
if isinstance(scale_type, str):
scale_type = scale_type.lower()
if scale_type not in scale_types:
print("Error: Scale data second tuple item is not valid. Choose one of {}.".format(', '.join(scale_types)))
sys.exit()
if not isinstance(impute_data, tuple):
print("Error: Impute data type if not a tuple. Please ensure the value is a tuple (e.g. (True, 3).")
sys.exit()
else:
(impute_bool, k) = impute_data
if not isinstance(impute_bool, bool):
print("Error: Impute data first tuple item is not a boolean value. Choose either \"True\" or \"False\".")
sys.exit()
if not isinstance(k, float):
if not isinstance(k, int):
print("Error: Impute data second tuple item, the nearest neighbours k value, is not valid. Choose a float or integer value.")
sys.exit()
if not isinstance(style, str):
print("Error: Seaborn style is not valid. Choose a string value.")
sys.exit()
else:
styleList = list(plt.style.available)
if style not in styleList:
print("Error: Chosen style is not valid. Choose one of the following: {}.".format(', '.join(styleList)))
sys.exit()
if not isinstance(transparent, bool):
print("Error: The transparent value is not valid. Choose either \"True\" or \"False\".")
sys.exit()
if not isinstance(figSize, tuple):
print("Error: Figure size is not valid. Choose a tuple of length 2.")
sys.exit()
else:
for length in figSize:
if not isinstance(length, float):
if not isinstance(length, int):
print("Error: Figure size value is not valid. Choose a float or integer value.")
sys.exit()
if not isinstance(fontSize, float):
if not isinstance(fontSize, int):
print("Error: Font size is not valid. Choose a float or integer value.")
sys.exit()
if colour_palette is not None:
if not isinstance(colour_palette, str):
print("Error: The colour palette is not valid. Choose a string value.")
sys.exit()
else:
if colour_palette not in cmap_list:
print("Error: The colour palette is not valid. Choose one of the following: {}.".format(', '.join(cmap_list)))
sys.exit()
if y_axis_label is not None:
if isinstance(y_axis_label, str):
print("Error: The y axis label is not valid. Choose a string value.")
sys.exit()
if not isinstance(x_axis_rotation, float):
if not isinstance(x_axis_rotation, int):
print("Error: The x axis rotation value is not valid. Choose a float or integer value.")
sys.exit()
if ((x_axis_rotation < 0) or (x_axis_rotation > 360)):
print("Error: The x axis rotation value is not valid. Choose a value >=0 or <= 360.")
sys.exit()
if group_column_name is not None:
if not isinstance(group_column_name, str):
print("Error: Group column name is not valid. Choose a string value.")
sys.exit()
else:
if group_column_name not in list(datatable.columns):
print("Error: Group column name not valid. Choose one of {}.".format(', '.join(list(datatable.columns))))
sys.exit()
if point_estimator.lower() not in estimator_types:
print("Error: The chosen point plot estimator is invalid. Choose one of \"{}\".".format('\" or \"'.join(estimator_types)))
sys.exit()
if isinstance(point_ci, str):
if point_ci != 'sd':
print("Error: The string value for point plot ci is invalid. Choose a float, integer or 'sd' value for standard deviation.")
sys.exit()
else:
if not isinstance(point_ci, float):
if not isinstance(point_ci, int):
print("Error: The value for point plot ci is invalid. Choose a float, integer or 'sd' value for standard deviation.")
sys.exit()
violin_distribution_types = ['quartile', 'box', 'point', 'stick', None]
violin_width_scale_types = ['area', 'count', 'width']
if plot_type.lower() == "violin":
if violin_distribution_type not in violin_distribution_types:
print("Error: Violin distribution type not valid. Choose one of the following: {}.".format(', '.join(violin_distribution_types)))
sys.exit()
if violin_width_scale not in violin_width_scale_types:
print("Error: Violin width scale type not valid. Choose one of the following: {}.".format(', '.join(violin_width_scale_types)))
sys.exit()
if plot_type.lower == "box":
if not isinstance(box_iqr, float):
if not isinstance(box_iqr, int):
print(
"Error: The box plot interquartile range extension beyond whiskers is not valid. Choose a float or integer value.")
sys.exit()
if not isinstance(saveImage, bool):
print("Error: Save image is not valid. Choose either \"True\" or \"False\".")
sys.exit()
if not isinstance(imageFileName, str):
print("Error: Image file name is not valid. Choose a string value.")
sys.exit()
if not isinstance(dpi, float):
if not isinstance(dpi, int):
print("Error: Dpi is not valid. Choose a float or integer value.")
sys.exit()
return plot_type, column_numbers, log_data, scale_data, impute_data, style, transparent, figSize, fontSize, colour_palette, y_axis_label, x_axis_rotation, group_column_name, point_estimator, point_ci, violin_distribution_type, violin_width_scale, box_iqr, saveImage, imageFileName, dpi
def __checkData(self, df):
if not isinstance(df, pd.DataFrame):
print("Error: A dataframe was not entered. Please check your data.")
return df
def __checkPeakTable(self, PeakTable):
if "Name" not in PeakTable.columns:
print("Error: \"Name\" column not in Peak Table. Please check your data.")
sys.exit()
if "Label" not in PeakTable.columns:
print("Error: \"Label\" column not in Peak Table. Please check your data.")
sys.exit()
# Do not assume the peaks/nodes have been indexed correctly. Remove any index columns and reindex.
column_list = [column.lower() for column in PeakTable.columns]
if 'idx' in column_list:
index = column_list.index('idx')
column_name = PeakTable.columns[index]
PeakTable = PeakTable.drop(columns=[column_name])
if 'index' in column_list:
index = column_list.index('index')
column_name = PeakTable.columns[index]
PeakTable = PeakTable.drop(columns=[column_name])
PeakTable = PeakTable.reset_index(drop=True)
PeakTable.index.name = 'Idx'
PeakTable = PeakTable.reset_index()
return PeakTable | [
"copy.deepcopy",
"matplotlib.pyplot.savefig",
"collections.Counter",
"seaborn.boxplot",
"matplotlib.pyplot.style.context",
"seaborn.violinplot",
"sys.exit",
"pandas.DataFrame",
"seaborn.pointplot",
"pandas.concat",
"seaborn.swarmplot",
"matplotlib.pyplot.show"
]
| [((6879, 6910), 'copy.deepcopy', 'copy.deepcopy', (['self.__datatable'], {}), '(self.__datatable)\n', (6892, 6910), False, 'import copy\n'), ((9401, 9432), 'pandas.DataFrame', 'pd.DataFrame', (['X'], {'columns': 'labels'}), '(X, columns=labels)\n', (9413, 9432), True, 'import pandas as pd\n'), ((9597, 9621), 'matplotlib.pyplot.style.context', 'plt.style.context', (['style'], {}), '(style)\n', (9614, 9621), True, 'import matplotlib.pyplot as plt\n'), ((24435, 24445), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (24443, 24445), True, 'import matplotlib.pyplot as plt\n'), ((25285, 25295), 'sys.exit', 'sys.exit', ([], {}), '()\n', (25293, 25295), False, 'import sys\n'), ((25438, 25448), 'sys.exit', 'sys.exit', ([], {}), '()\n', (25446, 25448), False, 'import sys\n'), ((25616, 25626), 'sys.exit', 'sys.exit', ([], {}), '()\n', (25624, 25626), False, 'import sys\n'), ((26383, 26393), 'sys.exit', 'sys.exit', ([], {}), '()\n', (26391, 26393), False, 'import sys\n'), ((27190, 27200), 'sys.exit', 'sys.exit', ([], {}), '()\n', (27198, 27200), False, 'import sys\n'), ((27851, 27861), 'sys.exit', 'sys.exit', ([], {}), '()\n', (27859, 27861), False, 'import sys\n'), ((28274, 28284), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28282, 28284), False, 'import sys\n'), ((28423, 28433), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28431, 28433), False, 'import sys\n'), ((29979, 29989), 'sys.exit', 'sys.exit', ([], {}), '()\n', (29987, 29989), False, 'import sys\n'), ((30653, 30663), 'sys.exit', 'sys.exit', ([], {}), '()\n', (30661, 30663), False, 'import sys\n'), ((32335, 32345), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32343, 32345), False, 'import sys\n'), ((32487, 32497), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32495, 32497), False, 'import sys\n'), ((33350, 33360), 'sys.exit', 'sys.exit', ([], {}), '()\n', (33358, 33360), False, 'import sys\n'), ((33507, 33517), 'sys.exit', 'sys.exit', ([], {}), '()\n', (33515, 33517), False, 'import sys\n'), ((3901, 3931), 'pandas.concat', 'pd.concat', (['[meta, dat]'], {'axis': '(1)'}), '([meta, dat], axis=1)\n', (3910, 3931), True, 'import pandas as pd\n'), ((9531, 9559), 'pandas.concat', 'pd.concat', (['[meta, X]'], {'axis': '(1)'}), '([meta, X], axis=1)\n', (9540, 9559), True, 'import pandas as pd\n'), ((24340, 24426), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(plot_type + 'Plot' + imageFileName)"], {'dpi': 'dpi', 'transparent': 'transparent'}), "(plot_type + 'Plot' + imageFileName, dpi=dpi, transparent=\n transparent)\n", (24351, 24426), True, 'import matplotlib.pyplot as plt\n'), ((25868, 25878), 'sys.exit', 'sys.exit', ([], {}), '()\n', (25876, 25878), False, 'import sys\n'), ((26192, 26202), 'sys.exit', 'sys.exit', ([], {}), '()\n', (26200, 26202), False, 'import sys\n'), ((26645, 26655), 'sys.exit', 'sys.exit', ([], {}), '()\n', (26653, 26655), False, 'import sys\n'), ((27006, 27016), 'sys.exit', 'sys.exit', ([], {}), '()\n', (27014, 27016), False, 'import sys\n'), ((27447, 27457), 'sys.exit', 'sys.exit', ([], {}), '()\n', (27455, 27457), False, 'import sys\n'), ((28103, 28113), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28111, 28113), False, 'import sys\n'), ((28921, 28931), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28929, 28931), False, 'import sys\n'), ((29128, 29138), 'sys.exit', 'sys.exit', ([], {}), '()\n', (29136, 29138), False, 'import sys\n'), ((29557, 29567), 'sys.exit', 'sys.exit', ([], {}), '()\n', (29565, 29567), False, 'import sys\n'), ((29794, 29804), 'sys.exit', 'sys.exit', ([], {}), '()\n', (29802, 29804), False, 'import sys\n'), ((30191, 30201), 'sys.exit', 'sys.exit', ([], {}), '()\n', (30199, 30201), False, 'import sys\n'), ((30893, 30903), 'sys.exit', 'sys.exit', ([], {}), '()\n', (30901, 30903), False, 'import sys\n'), ((31606, 31616), 'sys.exit', 'sys.exit', ([], {}), '()\n', (31614, 31616), False, 'import sys\n'), ((31845, 31855), 'sys.exit', 'sys.exit', ([], {}), '()\n', (31853, 31855), False, 'import sys\n'), ((32678, 32688), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32686, 32688), False, 'import sys\n'), ((27709, 27719), 'sys.exit', 'sys.exit', ([], {}), '()\n', (27717, 27719), False, 'import sys\n'), ((29360, 29370), 'sys.exit', 'sys.exit', ([], {}), '()\n', (29368, 29370), False, 'import sys\n'), ((30435, 30445), 'sys.exit', 'sys.exit', ([], {}), '()\n', (30443, 30445), False, 'import sys\n'), ((31174, 31184), 'sys.exit', 'sys.exit', ([], {}), '()\n', (31182, 31184), False, 'import sys\n'), ((32177, 32187), 'sys.exit', 'sys.exit', ([], {}), '()\n', (32185, 32187), False, 'import sys\n'), ((4236, 4251), 'collections.Counter', 'Counter', (['labels'], {}), '(labels)\n', (4243, 4251), False, 'from collections import Counter\n'), ((8387, 8397), 'sys.exit', 'sys.exit', ([], {}), '()\n', (8395, 8397), False, 'import sys\n'), ((10003, 10169), 'seaborn.pointplot', 'sns.pointplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'estimator': 'np.nanmean', 'capsize': '(0.1)', 'ci': 'point_ci', 'palette': 'colour_palette', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, estimator=np.\n nanmean, capsize=0.1, ci=point_ci, palette=colour_palette, ax=axes.flat\n [peak_index])\n', (10016, 10169), True, 'import seaborn as sns\n'), ((15428, 15617), 'seaborn.violinplot', 'sns.violinplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'linewidth': '(1)', 'inner': 'violin_distribution_type', 'scale': 'violin_width_scale', 'palette': 'colour_palette', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, linewidth=1,\n inner=violin_distribution_type, scale=violin_width_scale, palette=\n colour_palette, ax=axes.flat[peak_index])\n', (15442, 15617), True, 'import seaborn as sns\n'), ((28714, 28724), 'sys.exit', 'sys.exit', ([], {}), '()\n', (28722, 28724), False, 'import sys\n'), ((9205, 9215), 'sys.exit', 'sys.exit', ([], {}), '()\n', (9213, 9215), False, 'import sys\n'), ((10302, 10470), 'seaborn.pointplot', 'sns.pointplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'estimator': 'np.nanmedian', 'capsize': '(0.1)', 'ci': 'point_ci', 'palette': 'colour_palette', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, estimator=np.\n nanmedian, capsize=0.1, ci=point_ci, palette=colour_palette, ax=axes.\n flat[peak_index])\n', (10315, 10470), True, 'import seaborn as sns\n'), ((10586, 10596), 'sys.exit', 'sys.exit', ([], {}), '()\n', (10594, 10596), False, 'import sys\n'), ((17203, 17328), 'seaborn.boxplot', 'sns.boxplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'palette': 'colour_palette', 'whis': 'box_iqr', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, palette=\n colour_palette, whis=box_iqr, ax=axes.flat[peak_index])\n', (17214, 17328), True, 'import seaborn as sns\n'), ((18920, 19042), 'seaborn.swarmplot', 'sns.swarmplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'size': '(10)', 'palette': 'colour_palette', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, size=10, palette\n =colour_palette, ax=axes.flat[peak_index])\n', (18933, 19042), True, 'import seaborn as sns\n'), ((20640, 20809), 'seaborn.violinplot', 'sns.violinplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'linewidth': '(1)', 'inner': 'None', 'scale': 'violin_width_scale', 'palette': 'colour_palette', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, linewidth=1,\n inner=None, scale=violin_width_scale, palette=colour_palette, ax=axes.\n flat[peak_index])\n', (20654, 20809), True, 'import seaborn as sns\n'), ((20826, 20947), 'seaborn.swarmplot', 'sns.swarmplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'color': '"""white"""', 'edgecolor': '"""gray"""', 'ax': 'axes.flat[peak_index]'}), "(data=datatable, x=group_column_name, y=peak, color='white',\n edgecolor='gray', ax=axes.flat[peak_index])\n", (20839, 20947), True, 'import seaborn as sns\n'), ((22543, 22667), 'seaborn.boxplot', 'sns.boxplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'palette': 'colour_palette', 'whis': 'np.inf', 'ax': 'axes.flat[peak_index]'}), '(data=datatable, x=group_column_name, y=peak, palette=\n colour_palette, whis=np.inf, ax=axes.flat[peak_index])\n', (22554, 22667), True, 'import seaborn as sns\n'), ((22688, 22790), 'seaborn.swarmplot', 'sns.swarmplot', ([], {'data': 'datatable', 'x': 'group_column_name', 'y': 'peak', 'color': '"""0.2"""', 'ax': 'axes.flat[peak_index]'}), "(data=datatable, x=group_column_name, y=peak, color='0.2', ax=\n axes.flat[peak_index])\n", (22701, 22790), True, 'import seaborn as sns\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.