hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d5e2b817212060ef7c5fee7505c4febd057adc71
| 5,827 |
py
|
Python
|
collection/cp/algorithms-master/python/binary_tree.py
|
daemonslayer/Notebook
|
a9880be9bd86955afd6b8f7352822bc18673eda3
|
[
"Apache-2.0"
] | 1 |
2019-03-24T13:12:01.000Z
|
2019-03-24T13:12:01.000Z
|
collection/cp/algorithms-master/python/binary_tree.py
|
daemonslayer/Notebook
|
a9880be9bd86955afd6b8f7352822bc18673eda3
|
[
"Apache-2.0"
] | null | null | null |
collection/cp/algorithms-master/python/binary_tree.py
|
daemonslayer/Notebook
|
a9880be9bd86955afd6b8f7352822bc18673eda3
|
[
"Apache-2.0"
] | null | null | null |
"""
Binary Tree and basic properties
1. In-Order Traversal
2. Pre-Order Traversal
3. Post-Order Traversal
4. Level-Order Traversal
"""
from collections import deque
def main():
"""
Tree Structure:
1
/ \
2 3
/ \
4 5
"""
tree = BinaryTree(1)
tree.left = BinaryTree(2)
tree.right = BinaryTree(3)
tree.left.left = BinaryTree(4)
tree.left.right = BinaryTree(5)
assert tree.inorder_traversal(write=False) == [4, 2, 5, 1, 3]
assert tree.preorder_traversal(write=False) == [1, 2, 4, 5, 3]
assert tree.postorder_traversal(write=False) == [1, 3, 2, 5, 4]
assert tree.levelorder_traversal(write=False) == [1, 2, 3, 4, 5]
if __name__ == '__main__':
main()
| 33.488506 | 120 | 0.579372 |
d5e30ec5517ff0e5f54798d022557ddc8306de32
| 445 |
py
|
Python
|
custom_components/vaddio_conferenceshot/const.py
|
rohankapoorcom/vaddio_conferenceshot
|
71744710df10f77e21e9e7568e3f6c7175b0d11d
|
[
"Apache-2.0"
] | null | null | null |
custom_components/vaddio_conferenceshot/const.py
|
rohankapoorcom/vaddio_conferenceshot
|
71744710df10f77e21e9e7568e3f6c7175b0d11d
|
[
"Apache-2.0"
] | null | null | null |
custom_components/vaddio_conferenceshot/const.py
|
rohankapoorcom/vaddio_conferenceshot
|
71744710df10f77e21e9e7568e3f6c7175b0d11d
|
[
"Apache-2.0"
] | null | null | null |
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PATH, CONF_USERNAME
DOMAIN = "vaddio_conferenceshot"
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
}
)
SERVICE_RECALL_PRESET = "move_to_preset"
ATTR_PRESET_ID = "preset"
| 24.722222 | 82 | 0.750562 |
d5e3869d32d3fe51b72766bc724a95897a33b8c9
| 32,841 |
py
|
Python
|
lightonml/opu.py
|
lightonai/lightonml
|
451327cccecdca4e8ec65df30f30d3fd8ad2194f
|
[
"Apache-2.0"
] | 27 |
2021-02-24T15:37:20.000Z
|
2022-01-12T00:28:22.000Z
|
lightonml/opu.py
|
lightonai/lightonml
|
451327cccecdca4e8ec65df30f30d3fd8ad2194f
|
[
"Apache-2.0"
] | 4 |
2021-02-26T12:58:21.000Z
|
2021-09-10T09:54:49.000Z
|
lightonml/opu.py
|
lightonai/lightonml
|
451327cccecdca4e8ec65df30f30d3fd8ad2194f
|
[
"Apache-2.0"
] | 9 |
2021-02-26T15:58:32.000Z
|
2021-06-21T09:18:48.000Z
|
# Copyright (c) 2020 LightOn, All Rights Reserved.
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
"""
This module contains the OPU class
"""
import time
from math import sqrt
import pkg_resources
from lightonml.encoding.base import NoEncoding, NoDecoding
import warnings
from typing import Optional, Union, Tuple, TYPE_CHECKING
import numpy as np
from contextlib import ExitStack
import attr
import inspect
import lightonml
from lightonml.internal.config import get_host_option, opu_version
from lightonml.internal import config, output_roi, utils, types
from lightonml.internal.user_input import OpuUserInput, InputTraits
from lightonml.internal.simulated_device import SimulatedOpuDevice
from lightonml.context import ContextArray
from lightonml.internal.settings import OpuSettings, TransformSettings
from lightonml.internal.runner import TransformRunner, FitTransformRunner
from lightonml.internal.types import InputRoiStrategy, IntOrTuple, TransformOutput, AcqState
from lightonml.types import OutputRescaling
# Import lightonopu only for typechecking, as it's an optional module and may not be present
if TYPE_CHECKING:
from lightonopu.internal.device import OpuDevice
# noinspection PyPep8Naming
def version(self, devices=False):
"""Returns a multi-line string containing name and versions of the OPU"""
version = []
# Build OPU name
if not self._s.simulated:
version.append(opu_version(self.__opu_config))
# module version
version.append(f"lightonml version {lightonml.__version__}")
try:
# noinspection PyUnresolvedReferences
import lightonopu
version.append(f"lightonopu version {lightonopu.__version__}")
except ImportError:
pass
if devices:
version.append(self.device.versions())
return '\n'.join(version)
| 44.319838 | 114 | 0.645595 |
d5e4c8d6143747e9fa0113815e838834d857b208
| 1,022 |
py
|
Python
|
example/shovel/bar.py
|
demiurgestudios/shovel
|
3db497164907d3765fae182959147d19064671c7
|
[
"MIT"
] | 202 |
2015-01-12T13:47:29.000Z
|
2022-02-09T19:13:36.000Z
|
example/shovel/bar.py
|
demiurgestudios/shovel
|
3db497164907d3765fae182959147d19064671c7
|
[
"MIT"
] | 14 |
2017-04-09T17:04:53.000Z
|
2021-05-16T11:08:34.000Z
|
example/shovel/bar.py
|
demiurgestudios/shovel
|
3db497164907d3765fae182959147d19064671c7
|
[
"MIT"
] | 22 |
2015-09-11T18:35:10.000Z
|
2021-05-16T11:04:56.000Z
|
from shovel import task
| 27.621622 | 65 | 0.614481 |
d5e5a12f0690f68a0f2da693b51965dfe681eeea
| 22,938 |
py
|
Python
|
scripts/external_libs/scapy-2.4.3/scapy/config.py
|
timgates42/trex-core
|
efe94752fcb2d0734c83d4877afe92a3dbf8eccd
|
[
"Apache-2.0"
] | 956 |
2015-06-24T15:04:55.000Z
|
2022-03-30T06:25:04.000Z
|
scripts/external_libs/scapy-2.4.3/scapy/config.py
|
angelyouyou/trex-core
|
fddf78584cae285d9298ef23f9f5c8725e16911e
|
[
"Apache-2.0"
] | 782 |
2015-09-20T15:19:00.000Z
|
2022-03-31T23:52:05.000Z
|
scripts/external_libs/scapy-2.4.3/scapy/config.py
|
angelyouyou/trex-core
|
fddf78584cae285d9298ef23f9f5c8725e16911e
|
[
"Apache-2.0"
] | 429 |
2015-06-27T19:34:21.000Z
|
2022-03-23T11:02:51.000Z
|
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Philippe Biondi <[email protected]>
# This program is published under a GPLv2 license
"""
Implementation of the configuration object.
"""
from __future__ import absolute_import
from __future__ import print_function
import functools
import os
import re
import time
import socket
import sys
from scapy import VERSION, base_classes
from scapy.consts import DARWIN, WINDOWS, LINUX, BSD, SOLARIS
from scapy.error import log_scapy, warning, ScapyInvalidPlatformException
from scapy.modules import six
from scapy.themes import NoTheme, apply_ipython_style
############
# Config #
############
def _readonly(name):
default = Conf.__dict__[name].default
Interceptor.set_from_hook(conf, name, default)
raise ValueError("Read-only value !")
ReadOnlyAttribute = functools.partial(
Interceptor,
hook=(lambda name, *args, **kwargs: _readonly(name))
)
ReadOnlyAttribute.__doc__ = "Read-only class attribute"
def lsc():
"""Displays Scapy's default commands"""
print(repr(conf.commands))
def _version_checker(module, minver):
"""Checks that module has a higher version that minver.
params:
- module: a module to test
- minver: a tuple of versions
"""
# We could use LooseVersion, but distutils imports imp which is deprecated
version_regexp = r'[a-z]?((?:\d|\.)+\d+)(?:\.dev[0-9]+)?'
version_tags = re.match(version_regexp, module.__version__)
if not version_tags:
return False
version_tags = version_tags.group(1).split(".")
version_tags = tuple(int(x) for x in version_tags)
return version_tags >= minver
def isCryptographyValid():
"""
Check if the cryptography library is present, and if it is recent enough
for most usages in scapy (v1.7 or later).
"""
try:
import cryptography
except ImportError:
return False
return _version_checker(cryptography, (1, 7))
def isCryptographyRecent():
"""
Check if the cryptography library is recent (2.0 and later)
"""
try:
import cryptography
except ImportError:
return False
return _version_checker(cryptography, (2, 0))
def isCryptographyAdvanced():
"""
Check if the cryptography library is present, and if it supports X25519,
ChaCha20Poly1305 and such (v2.0 or later).
"""
try:
from cryptography.hazmat.primitives.asymmetric.x25519 import X25519PrivateKey # noqa: E501
X25519PrivateKey.generate()
except Exception:
return False
else:
return True
def isPyPy():
"""Returns either scapy is running under PyPy or not"""
try:
import __pypy__ # noqa: F401
return True
except ImportError:
return False
def _prompt_changer(attr, val):
"""Change the current prompt theme"""
try:
sys.ps1 = conf.color_theme.prompt(conf.prompt)
except Exception:
pass
try:
apply_ipython_style(get_ipython())
except NameError:
pass
def _set_conf_sockets():
"""Populate the conf.L2Socket and conf.L3Socket
according to the various use_* parameters
"""
from scapy.main import _load
if conf.use_bpf and not BSD:
Interceptor.set_from_hook(conf, "use_bpf", False)
raise ScapyInvalidPlatformException("BSD-like (OSX, *BSD...) only !")
if not conf.use_pcap and SOLARIS:
Interceptor.set_from_hook(conf, "use_pcap", True)
raise ScapyInvalidPlatformException(
"Scapy only supports libpcap on Solaris !"
)
# we are already in an Interceptor hook, use Interceptor.set_from_hook
if conf.use_pcap or conf.use_dnet:
try:
from scapy.arch.pcapdnet import L2pcapListenSocket, L2pcapSocket, \
L3pcapSocket
except (OSError, ImportError):
warning("No libpcap provider available ! pcap won't be used")
Interceptor.set_from_hook(conf, "use_pcap", False)
else:
conf.L3socket = L3pcapSocket
conf.L3socket6 = functools.partial(L3pcapSocket, filter="ip6")
conf.L2socket = L2pcapSocket
conf.L2listen = L2pcapListenSocket
# Update globals
_load("scapy.arch.pcapdnet")
return
if conf.use_bpf:
from scapy.arch.bpf.supersocket import L2bpfListenSocket, \
L2bpfSocket, L3bpfSocket
conf.L3socket = L3bpfSocket
conf.L3socket6 = functools.partial(L3bpfSocket, filter="ip6")
conf.L2socket = L2bpfSocket
conf.L2listen = L2bpfListenSocket
# Update globals
_load("scapy.arch.bpf")
return
if LINUX:
from scapy.arch.linux import L3PacketSocket, L2Socket, L2ListenSocket
conf.L3socket = L3PacketSocket
conf.L3socket6 = functools.partial(L3PacketSocket, filter="ip6")
conf.L2socket = L2Socket
conf.L2listen = L2ListenSocket
# Update globals
_load("scapy.arch.linux")
return
if WINDOWS:
from scapy.arch.windows import _NotAvailableSocket
from scapy.arch.windows.native import L3WinSocket, L3WinSocket6
conf.L3socket = L3WinSocket
conf.L3socket6 = L3WinSocket6
conf.L2socket = _NotAvailableSocket
conf.L2listen = _NotAvailableSocket
# No need to update globals on Windows
return
from scapy.supersocket import L3RawSocket
from scapy.layers.inet6 import L3RawSocket6
conf.L3socket = L3RawSocket
conf.L3socket6 = L3RawSocket6
def _loglevel_changer(attr, val):
"""Handle a change of conf.logLevel"""
log_scapy.setLevel(val)
if not Conf.ipv6_enabled:
log_scapy.warning("IPv6 support disabled in Python. Cannot load Scapy IPv6 layers.") # noqa: E501
for m in ["inet6", "dhcp6"]:
if m in Conf.load_layers:
Conf.load_layers.remove(m)
conf = Conf()
def crypto_validator(func):
"""
This a decorator to be used for any method relying on the cryptography library. # noqa: E501
Its behaviour depends on the 'crypto_valid' attribute of the global 'conf'.
"""
return func_in
| 33.584187 | 122 | 0.624161 |
d5e70f438163ee68472f800dcc1f45bfb446e30f
| 5,797 |
py
|
Python
|
tests/base/test_server.py
|
Prodigy123/rasa_nlu_zh
|
b85717063a493f6b148504ee550a0642c6c379ae
|
[
"Apache-2.0"
] | 4 |
2017-07-20T03:06:29.000Z
|
2021-04-20T03:25:17.000Z
|
tests/base/test_server.py
|
imsakshi/rasa_nlu
|
6dafc37825b99139248fdea9e9745f416734d4dd
|
[
"Apache-2.0"
] | null | null | null |
tests/base/test_server.py
|
imsakshi/rasa_nlu
|
6dafc37825b99139248fdea9e9745f416734d4dd
|
[
"Apache-2.0"
] | 2 |
2017-10-03T00:56:22.000Z
|
2018-08-15T10:41:41.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import tempfile
import pytest
import time
from treq.testing import StubTreq
from rasa_nlu.config import RasaNLUConfig
import json
import io
from tests import utilities
from tests.utilities import ResponseTest
from rasa_nlu.server import RasaNLU
| 32.205556 | 104 | 0.656719 |
d5e7507528f57c95fde0e247aa2531f1d8579112
| 15,277 |
py
|
Python
|
bugsnag/configuration.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | null | null | null |
bugsnag/configuration.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | null | null | null |
bugsnag/configuration.py
|
ForroKulcs/bugsnag-python
|
107c1add31a2202cc08ef944aa00ab96996b247a
|
[
"MIT"
] | null | null | null |
import os
import platform
import socket
import sysconfig
from typing import List, Any, Tuple, Union
import warnings
from bugsnag.sessiontracker import SessionMiddleware
from bugsnag.middleware import DefaultMiddleware, MiddlewareStack
from bugsnag.utils import (fully_qualified_class_name, validate_str_setter,
validate_bool_setter, validate_iterable_setter,
validate_required_str_setter)
from bugsnag.delivery import (create_default_delivery, DEFAULT_ENDPOINT,
DEFAULT_SESSIONS_ENDPOINT)
from bugsnag.uwsgi import warn_if_running_uwsgi_without_threads
try:
from contextvars import ContextVar
_request_info = ContextVar('bugsnag-request', default=None) # type: ignore
except ImportError:
from bugsnag.utils import ThreadContextVar
_request_info = ThreadContextVar('bugsnag-request', default=None) # type: ignore # noqa: E501
__all__ = ('Configuration', 'RequestConfiguration')
class RequestConfiguration:
"""
Per-request Bugsnag configuration settings.
"""
def __init__(self):
self.context = None
self.grouping_hash = None
self.user = {}
self.metadata = {}
# legacy fields
self.user_id = None
self.extra_data = {}
self.request_data = {}
self.environment_data = {}
self.session_data = {}
def get(self, name) -> Any:
"""
Get a single configuration option
"""
return getattr(self, name)
def configure(self, **options):
"""
Set one or more configuration settings.
"""
for name, value in options.items():
setattr(self, name, value)
return self
| 31.695021 | 99 | 0.636185 |
d5e7f6433ef2aafee2885217cc2a65201e60c31e
| 587 |
py
|
Python
|
secret_injector/secret.py
|
failk8s/failk8s-operator
|
457890a09a2551b9002eec73386b11a37469569f
|
[
"Apache-2.0"
] | null | null | null |
secret_injector/secret.py
|
failk8s/failk8s-operator
|
457890a09a2551b9002eec73386b11a37469569f
|
[
"Apache-2.0"
] | null | null | null |
secret_injector/secret.py
|
failk8s/failk8s-operator
|
457890a09a2551b9002eec73386b11a37469569f
|
[
"Apache-2.0"
] | null | null | null |
import kopf
from .functions import global_logger, reconcile_secret
| 30.894737 | 67 | 0.67632 |
d5e86c6edc684a9da3a98d63325e3f3c6ab77abb
| 25,390 |
py
|
Python
|
src/py/gee/utils.py
|
openforis/collectearthonline
|
1af48e373c393a1d8c48b17472f6aa6c41f65769
|
[
"MIT"
] | null | null | null |
src/py/gee/utils.py
|
openforis/collectearthonline
|
1af48e373c393a1d8c48b17472f6aa6c41f65769
|
[
"MIT"
] | null | null | null |
src/py/gee/utils.py
|
openforis/collectearthonline
|
1af48e373c393a1d8c48b17472f6aa6c41f65769
|
[
"MIT"
] | null | null | null |
import datetime
import os
import ee
import math
import sys
import json
from ee.ee_exception import EEException
from gee.inputs import getLandsat, getS1
########## Helper functions ##########
########## Helper routes ##########
########## ee.Image ##########
########## ee.ImageCollection ##########
# TODO, should we allow user to select first cloud free image again?
########## ee.FeatureCollection ##########
########## Pre defined ee.ImageCollection ##########
# Index Image Collection
########## Time Series ##########
########## Degradation##########
########## Stats ##########
| 38.704268 | 130 | 0.605593 |
d5e8cedec4a5704ab1636f88d9b806e93b86ff8a
| 1,186 |
py
|
Python
|
userManagement/management/urls.py
|
shubhamguptaorg/user_managementl
|
ad98e0e4886d9b0547b05ae424c10d8f6268d470
|
[
"MIT"
] | null | null | null |
userManagement/management/urls.py
|
shubhamguptaorg/user_managementl
|
ad98e0e4886d9b0547b05ae424c10d8f6268d470
|
[
"MIT"
] | 4 |
2021-03-19T03:22:44.000Z
|
2022-03-11T23:58:10.000Z
|
userManagement/management/urls.py
|
shubhamguptaorg/user_managementl
|
ad98e0e4886d9b0547b05ae424c10d8f6268d470
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from django.urls import path,include
from django.views.generic import TemplateView
from .views import Index,SignUp,UserDashboard,AdminDashboard,logout,showAdminData,deleteuser,activeUser,deactiveUser,UserDetailEdit,uploadImage
# from .views import Index,UserDashboard,SignUp,AdminDashboard
app_name='management'
urlpatterns = [
# path('',homepage,name="index"),
path('',Index.as_view(), name='index'),
path('signup',SignUp.as_view(),name="signup"),
path('userdashboard',UserDashboard.as_view(),name="userDashboard"),
path('admindashboard',AdminDashboard.as_view(),name="adminDashboard"),
path('admindashboard/showuserdata/',showAdminData.as_view(),name='showAdminData'),
path('admindashboard/showuserdata/deleteuser/<userId>',deleteuser,name='deleteuser'),
path('admindashboard/showuserdata/activeUser/<userId>', activeUser, name='activeUser'),
path('admindashboard/showuserdata/deactiveUser/<userId>', deactiveUser, name='deactiveUser'),
path('uploadimage/',uploadImage,name="uploadImage"),
path('editUserDetail/',UserDetailEdit.as_view(),name='userEditDetail'),
path('logout',logout,name='logout')
]
| 49.416667 | 143 | 0.764755 |
d5e96b9312873b5f396a18010caddd4d11bd8888
| 16,962 |
py
|
Python
|
sickbeard/lib/hachoir_parser/container/riff.py
|
Branlala/docker-sickbeardfr
|
3ac85092dc4cc8a4171fb3c83e9682162245e13e
|
[
"MIT"
] | null | null | null |
sickbeard/lib/hachoir_parser/container/riff.py
|
Branlala/docker-sickbeardfr
|
3ac85092dc4cc8a4171fb3c83e9682162245e13e
|
[
"MIT"
] | null | null | null |
sickbeard/lib/hachoir_parser/container/riff.py
|
Branlala/docker-sickbeardfr
|
3ac85092dc4cc8a4171fb3c83e9682162245e13e
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
"""
RIFF parser, able to parse:
* AVI video container
* WAV audio container
* CDA file
Documents:
- libavformat source code from ffmpeg library
http://ffmpeg.mplayerhq.hu/
- Video for Windows Programmer's Guide
http://www.opennet.ru/docs/formats/avi.txt
- What is an animated cursor?
http://www.gdgsoft.com/anituner/help/aniformat.htm
Authors:
* Aurlien Jacobs
* Mickal KENIKSSI
* Victor Stinner
Changelog:
* 2007-03-30: support ACON (animated icons)
* 2006-08-08: merge AVI, WAV and CDA parsers into RIFF parser
* 2006-08-03: creation of CDA parser by Mickal KENIKSSI
* 2005-06-21: creation of WAV parser by Victor Stinner
* 2005-06-08: creation of AVI parser by Victor Stinner and Aurlien Jacobs
Thanks to:
* Wojtek Kaniewski (wojtekka AT logonet.com.pl) for its CDA file
format information
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, Enum,
Bit, NullBits, NullBytes,
RawBytes, String, PaddingBytes,
SubFile)
from lib.hachoir_core.tools import alignValue, humanDuration
from lib.hachoir_core.endian import LITTLE_ENDIAN
from lib.hachoir_core.text_handler import filesizeHandler, textHandler
from lib.hachoir_parser.video.fourcc import audio_codec_name, video_fourcc_name
from lib.hachoir_parser.image.ico import IcoFile
from datetime import timedelta
def formatSerialNumber(field):
"""
Format an disc serial number.
Eg. 0x00085C48 => "0008-5C48"
"""
sn = field.value
return "%04X-%04X" % (sn >> 16, sn & 0xFFFF)
def parseAnimationHeader(self):
yield UInt32(self, "hdr_size", "Size of header (36 bytes)")
if self["hdr_size"].value != 36:
self.warning("Animation header with unknown size (%s)" % self["size"].value)
yield UInt32(self, "nb_frame", "Number of unique Icons in this cursor")
yield UInt32(self, "nb_step", "Number of Blits before the animation cycles")
yield UInt32(self, "cx")
yield UInt32(self, "cy")
yield UInt32(self, "bit_count")
yield UInt32(self, "planes")
yield UInt32(self, "jiffie_rate", "Default Jiffies (1/60th of a second) if rate chunk not present")
yield Bit(self, "is_icon")
yield NullBits(self, "padding", 31)
def parseAnimationSequence(self):
while not self.eof:
yield UInt32(self, "icon[]")
| 38.55 | 103 | 0.614432 |
d5eae8227c1380d3fce1267b4a1949ca968db82b
| 1,041 |
py
|
Python
|
Utils.py
|
MartinEngen/NaiveBayesianClassifier
|
a28813708a4d2adcdcd629e6d4d8b4f438a9c799
|
[
"MIT"
] | null | null | null |
Utils.py
|
MartinEngen/NaiveBayesianClassifier
|
a28813708a4d2adcdcd629e6d4d8b4f438a9c799
|
[
"MIT"
] | null | null | null |
Utils.py
|
MartinEngen/NaiveBayesianClassifier
|
a28813708a4d2adcdcd629e6d4d8b4f438a9c799
|
[
"MIT"
] | null | null | null |
import os
import re
def get_subfolder_paths(folder_relative_path: str) -> list:
"""
Gets all subfolders of a given path
:param folder_relative_path: Relative path of folder to find subfolders of
:return: list of relative paths to any subfolders
"""
return [f.path for f in os.scandir(folder_relative_path) if f.is_dir()]
| 30.617647 | 83 | 0.659942 |
d5eb56662663b212c6709a52f8fbe61a75880b3c
| 800 |
py
|
Python
|
tools/ldbc_benchmark/neo4j/load_scripts/time_index.py
|
carlboudreau007/ecosys
|
d415143837a85ceb6213a0f0588128a86a4a3984
|
[
"Apache-2.0"
] | 245 |
2018-04-07T00:14:56.000Z
|
2022-03-28T05:51:35.000Z
|
tools/ldbc_benchmark/neo4j/load_scripts/time_index.py
|
carlboudreau007/ecosys
|
d415143837a85ceb6213a0f0588128a86a4a3984
|
[
"Apache-2.0"
] | 47 |
2018-04-02T16:41:22.000Z
|
2022-03-24T01:40:46.000Z
|
tools/ldbc_benchmark/neo4j/load_scripts/time_index.py
|
carlboudreau007/ecosys
|
d415143837a85ceb6213a0f0588128a86a4a3984
|
[
"Apache-2.0"
] | 140 |
2018-08-09T15:54:47.000Z
|
2022-03-30T12:44:48.000Z
|
from datetime import datetime
with open('/home/neo4j/neo4j-community-3.5.1/logs/debug.log', 'r') as log:
begin = []
end = []
for line in log:
if 'Index population started' in line:
begin.append(line[:23])
elif 'Index creation finished' in line:
end.append(line[:23])
if len(begin) == 0 or len(begin) > 9:
print("Something went wrong. Please check debug.log")
elif len(begin) != len(end):
print("{}/{} Done. Please come back later.".format(len(end), len(begin)))
else:
elapsed_time = 0
for i in range(0,9):
begin_tmp = datetime.strptime(begin[i], '%Y-%m-%d %H:%M:%S.%f')
end_tmp = datetime.strptime(end[i],'%Y-%m-%d %H:%M:%S.%f')
elapsed_time += (end_tmp-begin_tmp).total_seconds()
print("Done in {} s".format(elapsed_time))
| 34.782609 | 77 | 0.6175 |
d5ec93a99d9c113668c2693c8d65499328f692cd
| 1,489 |
py
|
Python
|
zf-setup.py
|
Ziki2001/new-school-sdk
|
b606e666888e1c9813e2f1a6a64bbede3744026e
|
[
"MIT"
] | null | null | null |
zf-setup.py
|
Ziki2001/new-school-sdk
|
b606e666888e1c9813e2f1a6a64bbede3744026e
|
[
"MIT"
] | null | null | null |
zf-setup.py
|
Ziki2001/new-school-sdk
|
b606e666888e1c9813e2f1a6a64bbede3744026e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
:file: setup.py
:author: -Farmer
:url: https://blog.farmer233.top
:date: 2021/09/20 11:11:54
'''
from os import path
from setuptools import setup, find_packages
basedir = path.abspath(path.dirname(__file__))
with open(path.join(basedir, "README.md"), encoding='utf-8') as f:
long_description = f.read()
setup(
name="zf-school-sdk",
author="farmer.chillax",
version="1.3.2",
license='MIT',
author_email="[email protected]",
description="zf School SDK for Python",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/Farmer-chong/new-school-sdk',
packages=find_packages(),
# package_data={},
package_data={"school_sdk": ['check_code/model.pkl']},
include_package_data=True,
platforms='any',
zip_safe=False,
install_requires=[
'requests',
'pyquery',
'bs4',
'Pillow',
'fake-headers',
'torch',
'torchvision',
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.8',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
# python zf-setup.py bdist_wheel sdist
# twine upload dist/*
| 26.589286 | 70 | 0.620551 |
d5ecb68fc8ba51b00e1a946759c8f1a77d41211f
| 1,635 |
py
|
Python
|
RunIt/airt/poker_cards.py
|
antx-code/funcode
|
a8a9b99274e169562771b488a3a9551277ef4b99
|
[
"MIT"
] | 3 |
2021-09-27T08:07:07.000Z
|
2022-03-11T04:46:30.000Z
|
RunIt/airt/poker_cards.py
|
antx-code/funcode
|
a8a9b99274e169562771b488a3a9551277ef4b99
|
[
"MIT"
] | null | null | null |
RunIt/airt/poker_cards.py
|
antx-code/funcode
|
a8a9b99274e169562771b488a3a9551277ef4b99
|
[
"MIT"
] | null | null | null |
# Square => sq => RGB(Blue)
# Plum => pl => RGB(Green)
# Spade => sp => RGB(Black)
# Heart => he => RGB(Red)
init_poker = {
'local': {
'head': [-1, -1, -1],
'mid': [-1, -1, -1, -1, -1],
'tail': [-1, -1, -1, -1, -1],
'drop': [-1, -1, -1, -1],
'hand': [-1, -1, -1]
},
'player1': {
'head': [-1, -1, -1],
'mid': [-1, -1, -1, -1, -1],
'tail': [-1, -1, -1, -1, -1],
'drop': [-1, -1, -1, -1],
'hand': [-1, -1, -1]
},
'player2': {
'head': [-1, -1, -1],
'mid': [-1, -1, -1, -1, -1],
'tail': [-1, -1, -1, -1, -1],
'drop': [-1, -1, -1, -1],
'hand': [-1, -1, -1]
}
}
# Square
Blue = {
'2': 0,
'3': 1,
'4': 2,
'5': 3,
'6': 4,
'7': 5,
'8': 6,
'9': 7,
'10': 8,
'J': 9,
'Q': 10,
'K': 11,
'A': 12
}
# Plum
Green = {
'2': 13,
'3': 14,
'4': 15,
'5': 16,
'6': 17,
'7': 18,
'8': 19,
'9': 20,
'10': 21,
'J': 22,
'Q': 23,
'K': 24,
'A': 25
}
# Heart
Red = {
'2': 26,
'3': 27,
'4': 28,
'5': 29,
'6': 30,
'7': 31,
'8': 32,
'9': 33,
'10': 34,
'J': 35,
'Q': 36,
'K': 37,
'A': 38
}
# Spade
Black = {
'2': 39,
'3': 40,
'4': 41,
'5': 42,
'6': 43,
'7': 44,
'8': 45,
'9': 46,
'10': 47,
'J': 48,
'Q': 49,
'K': 50,
'A': 51
}
POKER_SCOPE = [
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'10',
'J',
'Q',
'K',
'A'
]
| 14.469027 | 40 | 0.263609 |
d5edbea518993ed30402ca6ed7151f569ce035ff
| 42 |
py
|
Python
|
main.py
|
reflective21/iportfolio
|
39db626a9754c1df44ac698f3d8988fdc4e7c6d5
|
[
"MIT"
] | null | null | null |
main.py
|
reflective21/iportfolio
|
39db626a9754c1df44ac698f3d8988fdc4e7c6d5
|
[
"MIT"
] | null | null | null |
main.py
|
reflective21/iportfolio
|
39db626a9754c1df44ac698f3d8988fdc4e7c6d5
|
[
"MIT"
] | null | null | null |
name = "David Asiru Adetomiwa"
print(name)
| 21 | 30 | 0.761905 |
d5edd2119227be04c5621c163a6292b04c441de0
| 10,716 |
py
|
Python
|
tcex/services/api_service.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | null | null | null |
tcex/services/api_service.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | null | null | null |
tcex/services/api_service.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | null | null | null |
"""TcEx Framework API Service module."""
# standard library
import json
import sys
import threading
import traceback
from io import BytesIO
from typing import Any
from .common_service import CommonService
| 38.271429 | 100 | 0.535741 |
d5ee43eaf3c3033dcd289654572ab9b3e0e7b99a
| 1,540 |
py
|
Python
|
mmpose/core/optimizer/builder.py
|
vsatyakumar/mmpose
|
2fffccb19dad3b59184b41be94653f75523b8585
|
[
"Apache-2.0"
] | 1 |
2021-05-06T08:40:13.000Z
|
2021-05-06T08:40:13.000Z
|
mmpose/core/optimizer/builder.py
|
CV-IP/mmpose
|
3ef8e6dbbeb6262b7ed6c51faa74b83c23f4c6a1
|
[
"Apache-2.0"
] | null | null | null |
mmpose/core/optimizer/builder.py
|
CV-IP/mmpose
|
3ef8e6dbbeb6262b7ed6c51faa74b83c23f4c6a1
|
[
"Apache-2.0"
] | null | null | null |
from mmcv.runner import build_optimizer
def build_optimizers(model, cfgs):
"""Build multiple optimizers from configs.
If `cfgs` contains several dicts for optimizers, then a dict for each
constructed optimizers will be returned.
If `cfgs` only contains one optimizer config, the constructed optimizer
itself will be returned.
For example,
1) Multiple optimizer configs:
.. code-block:: python
optimizer_cfg = dict(
model1=dict(type='SGD', lr=lr),
model2=dict(type='SGD', lr=lr))
The return dict is
``dict('model1': torch.optim.Optimizer, 'model2': torch.optim.Optimizer)``
2) Single optimizer config:
.. code-block:: python
optimizer_cfg = dict(type='SGD', lr=lr)
The return is ``torch.optim.Optimizer``.
Args:
model (:obj:`nn.Module`): The model with parameters to be optimized.
cfgs (dict): The config dict of the optimizer.
Returns:
dict[:obj:`torch.optim.Optimizer`] | :obj:`torch.optim.Optimizer`:
The initialized optimizers.
"""
optimizers = {}
if hasattr(model, 'module'):
model = model.module
# determine whether 'cfgs' has several dicts for optimizers
if all(isinstance(v, dict) for v in cfgs.values()):
for key, cfg in cfgs.items():
cfg_ = cfg.copy()
module = getattr(model, key)
optimizers[key] = build_optimizer(module, cfg_)
return optimizers
else:
return build_optimizer(model, cfgs)
| 29.056604 | 78 | 0.635065 |
d5eeb5cec1758e31f96e4ef111f8b5ec32383697
| 1,189 |
py
|
Python
|
register/views.py
|
angel-vazquez25/My-Backlog-Handler
|
60880cfc6bcc5a7fb2d5c752c11bdfe741f76531
|
[
"MIT"
] | 3 |
2021-05-11T20:56:41.000Z
|
2022-03-15T01:26:13.000Z
|
register/views.py
|
angel-vazquez25/My-Backlog-Handler
|
60880cfc6bcc5a7fb2d5c752c11bdfe741f76531
|
[
"MIT"
] | null | null | null |
register/views.py
|
angel-vazquez25/My-Backlog-Handler
|
60880cfc6bcc5a7fb2d5c752c11bdfe741f76531
|
[
"MIT"
] | 1 |
2021-05-19T15:46:32.000Z
|
2021-05-19T15:46:32.000Z
|
import datetime
from django.contrib.auth import logout
from django.shortcuts import render, redirect
from .forms import RegisterForm
from django.http import HttpResponse
from django.contrib.auth.forms import AuthenticationForm
from django.conf import settings
from django.contrib.auth import authenticate, login
from django.http import HttpResponseRedirect
from django.contrib import messages
# Create your views here.
| 33.971429 | 91 | 0.641716 |
d5eefeb4c414f13bc2793346ebb57b29f5de79db
| 572 |
py
|
Python
|
forum/migrations/0001_initial.py
|
Aerodlyn/mu
|
2c3b95e5a83d0f651dd8ad287b471803e1fec3a1
|
[
"MIT"
] | 1 |
2021-06-25T22:27:39.000Z
|
2021-06-25T22:27:39.000Z
|
forum/migrations/0001_initial.py
|
Aerodlyn/mu
|
2c3b95e5a83d0f651dd8ad287b471803e1fec3a1
|
[
"MIT"
] | 1 |
2022-03-12T00:55:31.000Z
|
2022-03-12T00:55:31.000Z
|
forum/migrations/0001_initial.py
|
Aerodlyn/mu
|
2c3b95e5a83d0f651dd8ad287b471803e1fec3a1
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-26 01:27
from django.db import migrations, models
| 23.833333 | 93 | 0.552448 |
d5efef002e68abbec6057f8677301ab26bdc9a66
| 16,846 |
py
|
Python
|
custom_train.py
|
shirley-wu/text_to_table
|
44cb100b8ff2543b5b4efe1461502c00c34ef846
|
[
"MIT"
] | 3 |
2022-03-17T05:55:23.000Z
|
2022-03-30T08:34:14.000Z
|
custom_train.py
|
shirley-wu/text_to_table
|
44cb100b8ff2543b5b4efe1461502c00c34ef846
|
[
"MIT"
] | 1 |
2022-03-30T09:04:54.000Z
|
2022-03-30T09:04:54.000Z
|
custom_train.py
|
shirley-wu/text_to_table
|
44cb100b8ff2543b5b4efe1461502c00c34ef846
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3 -u
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Train a new model on one or across multiple GPUs.
"""
import collections
import logging
import math
import os
import sys
import numpy as np
import torch
from fairseq import (
checkpoint_utils,
distributed_utils,
options,
quantization_utils,
tasks,
utils,
)
from fairseq import meters
from fairseq.checkpoint_utils import checkpoint_paths
from fairseq.data import iterators
from fairseq.file_io import PathManager
from fairseq.logging import metrics, progress_bar
from fairseq.model_parallel.megatron_trainer import MegatronTrainer
from fairseq.trainer import Trainer
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=os.environ.get("LOGLEVEL", "INFO").upper(),
stream=sys.stdout,
)
logger = logging.getLogger("fairseq_cli.train")
def validate(args, trainer, task, epoch_itr, subsets, saver):
"""Evaluate the model on the validation set(s) and return the losses."""
if args.fixed_validation_seed is not None:
# set fixed seed for every validation
utils.set_torch_seed(args.fixed_validation_seed)
trainer.begin_valid_epoch(epoch_itr.epoch)
valid_losses = []
for subset in subsets:
logger.info('begin validation on "{}" subset'.format(subset))
# Initialize data iterator
itr = trainer.get_valid_iterator(subset).next_epoch_itr(shuffle=False)
if getattr(args, "tpu", False):
itr = utils.tpu_data_loader(itr)
progress = progress_bar.progress_bar(
itr,
log_format=args.log_format,
log_interval=args.log_interval,
epoch=epoch_itr.epoch,
prefix=f"valid on '{subset}' subset",
tensorboard_logdir=(
args.tensorboard_logdir if distributed_utils.is_master(args) else None
),
default_log_format=("tqdm" if not args.no_progress_bar else "simple"),
)
# create a new root metrics aggregator so validation metrics
# don't pollute other aggregators (e.g., train meters)
with metrics.aggregate(new_root=True) as agg:
for sample in progress:
trainer.valid_step(sample)
# log validation stats
stats = get_valid_stats(args, trainer, agg.get_smoothed_values(), saver)
progress.print(stats, tag=subset, step=trainer.get_num_updates())
valid_losses.append(stats[args.best_checkpoint_metric])
return valid_losses
if __name__ == "__main__":
cli_main()
| 36.306034 | 114 | 0.633919 |
d5eff585130a0defb51fd844556d3dea1143c55d
| 18,862 |
py
|
Python
|
src/ucar/unidata/idv/resources/python/griddiag.py
|
JessicaWiedemeier/IDV
|
e5f67c755cc95f8ad2123bdc45a91f0e5eca0d64
|
[
"CNRI-Jython"
] | 1 |
2021-06-09T11:24:48.000Z
|
2021-06-09T11:24:48.000Z
|
src/ucar/unidata/idv/resources/python/griddiag.py
|
JessicaWiedemeier/IDV
|
e5f67c755cc95f8ad2123bdc45a91f0e5eca0d64
|
[
"CNRI-Jython"
] | null | null | null |
src/ucar/unidata/idv/resources/python/griddiag.py
|
JessicaWiedemeier/IDV
|
e5f67c755cc95f8ad2123bdc45a91f0e5eca0d64
|
[
"CNRI-Jython"
] | null | null | null |
"""
This is the doc for the Grid Diagnostics module. These functions
are based on the grid diagnostics from the GEneral Meteorological
PAcKage (GEMPAK). Note that the names are case sensitive and some
are named slightly different from GEMPAK functions to avoid conflicts
with Jython built-ins (e.g. str).
<P>
In the following operators, scalar operands are named S<sub>n</sub> and
vector operands are named V<sub>n</sub>. Lowercase u and v refer to the
grid relative components of a vector.
"""
def GRAVITY():
""" Gravity constant """
return DerivedGridFactory.GRAVITY;
# Math functions
def atn2(S1,S2,WA=0):
""" Wrapper for atan2 built-in
<div class=jython>
ATN2 (S1, S2) = ATAN ( S1 / S2 )<br>
WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR)
</div>
"""
return GridMath.atan2(S1,S2,WA)
def add(S1,S2,WA=0):
""" Addition
<div class=jython>
ADD (S1, S2) = S1 + S2<br>
WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR)
</div>
"""
return GridMath.add(S1,S2,WA)
def mul(S1,S2,WA=0):
""" Multiply
<div class=jython>
MUL (S1, S2) = S1 * S2<br>
WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR)
</div>
"""
return GridMath.multiply(S1,S2,WA)
def quo(S1,S2,WA=0):
""" Divide
<div class=jython>
QUO (S1, S2) = S1 / S2<br>
WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR)
</div>
"""
return GridMath.divide(S1,S2,WA)
def sub(S1,S2,WA=0):
""" Subtract
<div class=jython>
SUB (S1, S2) = S1 - S2<br>
WA = use WEIGHTED_AVERAGE (default NEAREST_NEIGHBOR)
</div>
"""
return GridMath.subtract(S1,S2,WA)
# Scalar quantities
def adv(S,V):
""" Horizontal Advection, negative by convention
<div class=jython>
ADV ( S, V ) = - ( u * DDX (S) + v * DDY (S) )
</div>
"""
return -add(mul(ur(V),ddx(S)),mul(vr(V),ddy(S)))
def avg(S1,S2):
""" Average of 2 scalars
<div class=jython>
AVG (S1, S2) = ( S1 + S2 ) / 2
</div>
"""
return add(S1,S2)/2
def avor(V):
""" Absolute Vorticity
<div class=jython>
AVOR ( V ) = VOR ( V ) + CORL(V)
</div>
"""
relv = vor(V)
return add(relv,corl(relv))
def circs(S, D=2):
"""
<div class=jython>
Apply a circular aperature smoothing to the grid points. The weighting
function is the circular aperature diffraction function. D is
the radius of influence in grid increments, increasing D increases
the smoothing. (default D=2)
</div>
"""
return GridUtil.smooth(S, "CIRC", int(D))
def corl(S):
""" Coriolis Parameter for all points in a grid
<div class=jython>
CORL = TWO_OMEGA*sin(latr)
</div>
"""
return DerivedGridFactory.createCoriolisGrid(S)
def cress(S, D=2):
"""
<div class=jython>
Apply a Cressman smoothing to the grid points. The smoothed value
is given by a weighted average of surrounding grid points. D is
the radius of influence in grid increments,
increasing D increases the smoothing. (default D=2)
</div>
"""
return GridUtil.smooth(S, "CRES", int(D))
def cros(V1,V2):
""" Vector cross product magnitude
<div class=jython>
CROS ( V1, V2 ) = u1 * v2 - u2 * v1
</div>
"""
return sub(mul(ur(V1),vr(V2)),mul(ur(V2),vr(V1)))
def ddx(S):
""" Take the derivative with respect to the domain's X coordinate
"""
return GridMath.ddx(S);
def ddy(S):
""" Take the derivative with respect to the domain's Y coordinate
"""
return GridMath.ddy(S);
def defr(V):
""" Total deformation
<div class=jython>
DEF ( V ) = ( STRD (V) ** 2 + SHR (V) ** 2 ) ** .5
</div>
"""
return mag(strd(V),shr(V))
def div(V):
""" Horizontal Divergence
<div class=jython>
DIV ( V ) = DDX ( u ) + DDY ( v )
</div>
"""
return add(ddx(ur(V)),ddy(vr(V)))
def dirn(V):
""" North relative direction of a vector
<div class=jython>
DIRN ( V ) = DIRR ( un(v), vn(v) )
</div>
"""
return dirr(DerivedGridFactory.createTrueFlowVector(V))
def dirr(V):
""" Grid relative direction of a vector
"""
return DerivedGridFactory.createVectorDirection(V)
def dot(V1,V2):
""" Vector dot product
<div class=jython>
DOT ( V1, V2 ) = u1 * u2 + v1 * v2
</div>
"""
product = mul(V1,V2)
return add(ur(product),vr(product))
def gwfs(S, N=6):
"""
<div class=jython>
Horizontal smoothing using normally distributed weights
with theoretical response of 1/e for N * delta-x wave.
Increasing N increases the smoothing. (default N=6)
</div>
"""
return GridUtil.smooth(S, "GWFS", int(N))
def jcbn(S1,S2):
""" Jacobian Determinant
<div class=jython>
JCBN ( S1, S2 ) = DDX (S1) * DDY (S2) - DDY (S1) * DDX (S2)
</div>
"""
return sub(mul(ddx(S1),ddy(S2)),mul(ddy(S1),ddx(S2)))
def latr(S):
""" Latitudue all points in a grid
"""
return DerivedGridFactory.createLatitudeGrid(S)
def lap(S):
""" Laplacian operator
<div class=jython>
LAP ( S ) = DIV ( GRAD (S) )
</div>
"""
grads = grad(S)
return div(grads)
def lav(S,level1=None,level2=None, unit=None):
""" Layer Average of a multi layer grid
<div class=jython>
LAV ( S ) = ( S (level1) + S (level2) ) / 2.
</div>
"""
if level1 == None:
return GridMath.applyFunctionOverLevels(S, GridMath.FUNC_AVERAGE)
else:
return layerAverage(S,level1,level2, unit)
def ldf(S,level1,level2, unit=None):
""" Layer Difference
<div class=jython>
LDF ( S ) = S (level1) - S (level2)
</div>
"""
return layerDiff(S,level1,level2, unit);
def mag(*a):
""" Magnitude of a vector
"""
if (len(a) == 1):
return DerivedGridFactory.createVectorMagnitude(a[0]);
else:
return DerivedGridFactory.createVectorMagnitude(a[0],a[1]);
def mixr(temp,rh):
""" Mixing Ratio from Temperature, RH (requires pressure domain)
"""
return DerivedGridFactory.createMixingRatio(temp,rh)
def relh(temp,mixr):
""" Create Relative Humidity from Temperature, mixing ratio (requires pressure domain)
"""
return DerivedGridFactory.createRelativeHumidity(temp,mixr)
def pvor(S,V):
""" Potetial Vorticity (usually from theta and wind)
"""
return DerivedGridFactory.createPotentialVorticity(S,V)
def rects(S, D=2):
"""
<div class=jython>
Apply a rectangular aperature smoothing to the grid points. The weighting
function is the product of the rectangular aperature diffraction function
in the x and y directions. D is the radius of influence in grid
increments, increasing D increases the smoothing. (default D=2)
</div>
"""
return GridUtil.smooth(S, "RECT", int(D))
def savg(S):
""" Average over whole grid
<div class=jython>
SAVG ( S ) = average of all non-missing grid point values
</div>
"""
return GridMath.applyFunctionToLevels(S, GridMath.FUNC_AVERAGE)
def savs(S):
""" Average over grid subset
<div class=jython>
SAVS ( S ) = average of all non-missing grid point values in the subset
area
</div>
"""
return savg(S)
def sdiv(S,V):
""" Horizontal Flux Divergence
<div class=jython>
SDIV ( S, V ) = S * DIV ( V ) + DOT ( V, GRAD ( S ) )
</div>
"""
return add(mul(S,(div(V))) , dot(V,grad(S)))
def shr(V):
""" Shear Deformation
<div class=jython>
SHR ( V ) = DDX ( v ) + DDY ( u )
</div>
"""
return add(ddx(vr(V)),ddy(ur(V)))
def sm5s(S):
""" Smooth a scalar grid using a 5-point smoother
<div class=jython>
SM5S ( S ) = .5 * S (i,j) + .125 * ( S (i+1,j) + S (i,j+1) +
S (i-1,j) + S (i,j-1) )
</div>
"""
return GridUtil.smooth(S, "SM5S")
def sm9s(S):
""" Smooth a scalar grid using a 9-point smoother
<div class=jython>
SM9S ( S ) = .25 * S (i,j) + .125 * ( S (i+1,j) + S (i,j+1) +
S (i-1,j) + S (i,j-1) )
+ .0625 * ( S (i+1,j+1) +
S (i+1,j-1) +
S (i-1,j+1) +
S (i-1,j-1) )
</div>
"""
return GridUtil.smooth(S, "SM9S")
def strd(V):
""" Stretching Deformation
<div class=jython>
STRD ( V ) = DDX ( u ) - DDY ( v )
</div>
"""
return sub(ddx(ur(V)),ddy(vr(V)))
def thta(temp):
""" Potential Temperature from Temperature (requires pressure domain)
"""
return DerivedGridFactory.createPotentialTemperature(temp)
def thte(temp,rh):
""" Equivalent Potential Temperature from Temperature and Relative
humidity (requires pressure domain)
"""
return DerivedGridFactory.createEquivalentPotentialTemperature(temp,rh)
def un(V):
""" North relative u component
"""
return ur(DerivedGridFactory.createTrueFlowVector(V))
def ur(V):
""" Grid relative u component
"""
return DerivedGridFactory.getUComponent(V)
def vn(V):
""" North relative v component
"""
return vr(DerivedGridFactory.createTrueFlowVector(V))
def vor(V):
""" Relative Vorticity
<div class=jython>
VOR ( V ) = DDX ( v ) - DDY ( u )
</div>
"""
return sub(ddx(vr(V)),ddy(ur(V)))
def vr(V):
""" Grid relative v component
"""
return DerivedGridFactory.getVComponent(V)
def xav(S):
""" Average along a grid row
<div class=jython>
XAV (S) = ( S (X1) + S (X2) + ... + S (KXD) ) / KNT
KXD = number of points in row
KNT = number of non-missing points in row
XAV for a row is stored at every point in that row.
</div>
"""
return GridMath.applyFunctionToAxis(S, GridMath.FUNC_AVERAGE, GridMath.AXIS_X)
def xsum(S):
""" Sum along a grid row
<div class=jython>
XSUM (S) = ( S (X1) + S (X2) + ... + S (KXD) )
KXD = number of points in row
XSUM for a row is stored at every point in that row.
</div>
"""
return GridMath.applyFunctionToAxis(S, GridMath.FUNC_SUM, GridMath.AXIS_X)
def yav(S):
""" Average along a grid column
<div class=jython>
YAV (S) = ( S (Y1) + S (Y2) + ... + S (KYD) ) / KNT
KYD = number of points in column
KNT = number of non-missing points in column
</div>
"""
return GridMath.applyFunctionToAxis(S, GridMath.FUNC_AVERAGE, GridMath.AXIS_Y)
def ysum(S):
""" Sum along a grid column
<div class=jython>
YSUM (S) = ( S (Y1) + S (Y2) + ... + S (KYD) )
KYD = number of points in row
YSUM for a column is stored at every point in that column.
</div>
"""
return GridMath.applyFunctionToAxis(S, GridMath.FUNC_SUM, GridMath.AXIS_Y)
def zav(S):
""" Average across the levels of a grid at all points
<div class=jython>
ZAV (S) = ( S (Z1) + S (Z2) + ... + S (KZD) ) / KNT
KZD = number of levels
KNT = number of non-missing points in column
</div>
"""
return GridMath.applyFunctionToLevels(S, GridMath.FUNC_AVERAGE)
def zsum(S):
""" Sum across the levels of a grid at all points
<div class=jython>
ZSUM (S) = ( S (Z1) + S (Z2) + ... + S (KZD) )
KZD = number of levels
ZSUM for a vertical column is stored at every point
</div>
"""
return GridMath.applyFunctionOverLevels(S, GridMath.FUNC_SUM)
def wshr(V, Z, top, bottom):
""" Magnitude of the vertical wind shear in a layer
<div class=jython>
WSHR ( V ) = MAG [ VLDF (V) ] / LDF (Z)
</div>
"""
dv = mag(vldf(V,top,bottom))
dz = ldf(Z,top,bottom)
return quo(dv,dz)
# Vector output
def age(obs,geo):
""" Ageostrophic wind
<div class=jython>
AGE ( S ) = [ u (OBS) - u (GEO(S)), v (OBS) - v (GEO(S)) ]
</div>
"""
return sub(obs,geo)
def circv(S, D=2):
"""
<div class=jython>
Apply a circular aperature smoothing to the grid points. The weighting
function is the circular aperature diffraction function. D is
the radius of influence in grid increments, increasing D increases
the smoothing. (default D=2)
</div>
"""
return GridUtil.smooth(S, "CIRC", int(D))
def cresv(S, D=2):
"""
<div class=jython>
Apply a Cressman smoothing to the grid points. The smoothed value
is given by a weighted average of surrounding grid points. D is
the radius of influence in grid increments,
increasing D increases the smoothing. (default D=2)
</div>
"""
return GridUtil.smooth(S, "CRES", int(D))
def dvdx(V):
""" Partial x derivative of a vector
<div class=jython>
DVDX ( V ) = [ DDX (u), DDX (v) ]
</div>
"""
return vecr(ddx(ur(V)), ddx(vr(V)))
def dvdy(V):
""" Partial x derivative of a vector
<div class=jython>
DVDY ( V ) = [ DDY (u), DDY (v) ]
</div>
"""
return vecr(ddy(ur(V)), ddy(vr(V)))
def frnt(S,V):
""" Frontogenesis function from theta and the wind
<div class=jython>
FRNT ( THTA, V ) = 1/2 * MAG ( GRAD (THTA) ) *
( DEF * COS (2 * BETA) - DIV ) <p>
Where: BETA = ASIN ( (-DDX (THTA) * COS (PSI) <br>
- DDY (THTA) * SIN (PSI))/ <br>
MAG ( GRAD (THTA) ) ) <br>
PSI = 1/2 ATAN2 ( SHR / STR ) <br>
</div>
"""
shear = shr(V)
strch = strd(V)
psi = .5*atn2(shear,strch)
dxt = ddx(S)
dyt = ddy(S)
cosd = cos(psi)
sind = sin(psi)
gradt = grad(S)
mgradt = mag(gradt)
a = -cosd*dxt-sind*dyt
beta = asin(a/mgradt)
frnto = .5*mgradt*(defr(V)*cos(2*beta)-div(V))
return frnto
def geo(z):
""" geostrophic wind from height
<div class=jython>
GEO ( S ) = [ - DDY (S) * const / CORL, DDX (S) * const / CORL ]
</div>
"""
return DerivedGridFactory.createGeostrophicWindVector(z)
def grad(S):
""" Gradient of a scalar
<div class=jython>
GRAD ( S ) = [ DDX ( S ), DDY ( S ) ]
</div>
"""
return vecr(ddx(S),ddy(S))
def gwfv(V, N=6):
"""
<div class=jython>
Horizontal smoothing using normally distributed weights
with theoretical response of 1/e for N * delta-x wave.
Increasing N increases the smoothing. (default N=6)
</div>
"""
return gwfs(V, N)
def inad(V1,V2):
""" Inertial advective wind
<div class=jython>
INAD ( V1, V2 ) = [ DOT ( V1, GRAD (u2) ),
DOT ( V1, GRAD (v2) ) ]
</div>
"""
return vecr(dot(V1,grad(ur(V2))),dot(V1,grad(vr(V2))))
def qvec(S,V):
""" Q-vector at a level ( K / m / s )
<div class=jython>
QVEC ( S, V ) = [ - ( DOT ( DVDX (V), GRAD (S) ) ),
- ( DOT ( DVDY (V), GRAD (S) ) ) ]
where S can be any thermal paramenter, usually THTA.
</div>
"""
grads = grad(S)
qvecu = newName(-dot(dvdx(V),grads),"qvecu")
qvecv = newName(-dot(dvdy(V),grads),"qvecv")
return vecr(qvecu,qvecv)
def qvcl(THTA,V):
""" Q-vector ( K / m / s )
<div class=jython>
QVCL ( THTA, V ) = ( 1/( D (THTA) / DP ) ) *
[ ( DOT ( DVDX (V), GRAD (THTA) ) ),
( DOT ( DVDY (V), GRAD (THTA) ) ) ]
</div>
"""
dtdp = GridMath.partial(THTA,2)
gradt = grad(THTA)
qvecudp = newName(quo(dot(dvdx(V),gradt),dtdp),"qvecudp")
qvecvdp = newName(quo(dot(dvdy(V),gradt),dtdp),"qvecvdp")
return vecr(qvecudp,qvecvdp)
def rectv(S, D=2):
"""
<div class=jython>
Apply a rectangular aperature smoothing to the grid points. The weighting
function is the product of the rectangular aperature diffraction function
in the x and y directions. D is the radius of influence in grid
increments, increasing D increases the smoothing. (default D=2)
</div>
"""
return GridUtil.smooth(S, "RECT", int(D))
def sm5v(V):
""" Smooth a scalar grid using a 5-point smoother (see sm5s)
"""
return sm5s(V)
def sm9v(V):
""" Smooth a scalar grid using a 9-point smoother (see sm9s)
"""
return sm9s(V)
def thrm(S, level1, level2, unit=None):
""" Thermal wind
<div class=jython>
THRM ( S ) = [ u (GEO(S)) (level1) - u (GEO(S)) (level2),
v (GEO(S)) (level1) - v (GEO(S)) (level2) ]
</div>
"""
return vldf(geo(S),level1,level2, unit)
def vadd(V1,V2):
""" add the components of 2 vectors
<div class=jython>
VADD (V1, V2) = [ u1+u2, v1+v2 ]
</div>
"""
return add(V1,V2)
def vecn(S1,S2):
""" Make a true north vector from two components
<div class=jython>
VECN ( S1, S2 ) = [ S1, S2 ]
</div>
"""
return makeTrueVector(S1,S2)
def vecr(S1,S2):
""" Make a vector from two components
<div class=jython>
VECR ( S1, S2 ) = [ S1, S2 ]
</div>
"""
return makeVector(S1,S2)
def vlav(V,level1,level2, unit=None):
""" calculate the vector layer average
<div class=jython>
VLDF(V) = [(u(level1) - u(level2))/2,
(v(level1) - v(level2))/2]
</div>
"""
return layerAverage(V, level1, level2, unit)
def vldf(V,level1,level2, unit=None):
""" calculate the vector layer difference
<div class=jython>
VLDF(V) = [u(level1) - u(level2),
v(level1) - v(level2)]
</div>
"""
return layerDiff(V,level1,level2, unit)
def vmul(V1,V2):
""" Multiply the components of 2 vectors
<div class=jython>
VMUL (V1, V2) = [ u1*u2, v1*v2 ]
</div>
"""
return mul(V1,V2)
def vquo(V1,V2):
""" Divide the components of 2 vectors
<div class=jython>
VQUO (V1, V2) = [ u1/u2, v1/v2 ]
</div>
"""
return quo(V1,V2)
def vsub(V1,V2):
""" subtract the components of 2 vectors
<div class=jython>
VSUB (V1, V2) = [ u1-u2, v1-v2 ]
</div>
"""
return sub(V1,V2)
def LPIndex(u, v, z, t, top, bottom, unit):
""" calculate the wind shear between discrete layers
<div class=jython>
LP = 7.268DUDZ + 0.718DTDN + 0.318DUDN - 2.52
</div>
"""
Z = windShear(u, v, z, top, bottom, unit)*7.268
uwind = getSliceAtLevel(u, top)
vwind = getSliceAtLevel(v, top)
temp = newUnit(getSliceAtLevel(t, top), "temperature", "celsius")
HT = sqrt(ddx(temp)*ddx(temp) + ddy(temp)*ddy(temp))*0.718
HU = (ddx(vwind) + ddy(uwind))*0.318
L = add(noUnit(Z), add(noUnit(HU), noUnit(HT)))
L = (L - 2.520)*(-0.59)
P= 1.0/(1.0 + GridMath.applyFunctionOverGridsExt(L,"exp"))
LP = setLevel(P ,top, unit)
return LP
def EllrodIndex(u, v, z, top, bottom, unit):
""" calculate the wind shear between discrete layers
<div class=jython>
EI = VWS X ( DEF + DIV)
</div>
"""
VWS = windShear(u, v, z, top, bottom, unit)*100.0
#
uwind = getSliceAtLevel(u, top)
vwind = getSliceAtLevel(v, top)
DIV = (ddx(uwind) + ddy(vwind))* (-1.0)
#
DSH = ddx(vwind) + ddy(uwind)
DST = ddx(uwind) - ddy(vwind)
DEF = sqrt(DSH * DSH + DST * DST)
EI = mul(noUnit(VWS), add(noUnit(DEF), noUnit(DIV)))
return setLevel(EI, top, unit)
| 26.75461 | 89 | 0.584721 |
d5effb4acc4b4904be8e5099e47cd060230843fe
| 2,376 |
py
|
Python
|
app.py
|
DevilBit/Twitter-Bot
|
6f1b285aeb5faf37906d575775a927e69a5321d6
|
[
"MIT"
] | null | null | null |
app.py
|
DevilBit/Twitter-Bot
|
6f1b285aeb5faf37906d575775a927e69a5321d6
|
[
"MIT"
] | null | null | null |
app.py
|
DevilBit/Twitter-Bot
|
6f1b285aeb5faf37906d575775a927e69a5321d6
|
[
"MIT"
] | 1 |
2021-03-08T20:05:23.000Z
|
2021-03-08T20:05:23.000Z
|
from selenium import webdriver #to get the browser
from selenium.webdriver.common.keys import Keys #to send key to browser
import getpass #to get password safely
import time #to pause the program
#a calss to store all twetter related objects and functions
if __name__ == '__main__':
username = input('Email: ')
password = getpass.getpass('Password: ')
search = input('Please enter keyword: ')
user = twitter_bot(username, password)
user.login()
time.sleep(10)
user.like_tweet(search)
| 34.941176 | 95 | 0.603114 |
d5f13f54fb0bf75e7d45a4d1bb426a38fb3fb255
| 3,403 |
py
|
Python
|
visualization.py
|
shyhyawJou/GradCAM-pytorch
|
8159f077552fc71055fe97c17bf8544d32cc8b0f
|
[
"Apache-2.0"
] | null | null | null |
visualization.py
|
shyhyawJou/GradCAM-pytorch
|
8159f077552fc71055fe97c17bf8544d32cc8b0f
|
[
"Apache-2.0"
] | null | null | null |
visualization.py
|
shyhyawJou/GradCAM-pytorch
|
8159f077552fc71055fe97c17bf8544d32cc8b0f
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn as nn
from torch.nn import functional as F
from PIL import Image
import cv2 as cv
from matplotlib import cm
import numpy as np
| 34.72449 | 85 | 0.528945 |
d5f302c5d8d693812839ea69e155909e598db642
| 19,149 |
py
|
Python
|
frame_2D_alg/alternative versions/intra_blob_xy.py
|
Mechachleopteryx/CogAlg
|
723104e1f57010e52f1dc249ba53ba58db0a991b
|
[
"MIT"
] | null | null | null |
frame_2D_alg/alternative versions/intra_blob_xy.py
|
Mechachleopteryx/CogAlg
|
723104e1f57010e52f1dc249ba53ba58db0a991b
|
[
"MIT"
] | null | null | null |
frame_2D_alg/alternative versions/intra_blob_xy.py
|
Mechachleopteryx/CogAlg
|
723104e1f57010e52f1dc249ba53ba58db0a991b
|
[
"MIT"
] | null | null | null |
'''
2D version of 1st-level algorithm is a combination of frame_blobs, intra_blob, and comp_P: optional raster-to-vector conversion.
intra_blob recursively evaluates each blob for two forks of extended internal cross-comparison and sub-clustering:
der+: incremental derivation cross-comp in high-variation edge areas of +vg: positive deviation of gradient triggers comp_g,
rng+: incremental range cross-comp in low-variation flat areas of +v--vg: positive deviation of negated -vg triggers comp_r.
Each adds a layer of sub_blobs per blob.
Please see diagram: https://github.com/boris-kz/CogAlg/blob/master/frame_2D_alg/Illustrations/intra_blob_2_fork_scheme.png
Blob structure, for all layers of blob hierarchy:
root_dert__,
Dert = I, iDy, iDx, G, Dy, Dx, M, S (area), Ly (vertical dimension)
# I: input, (iDy, iDx): angle of input gradient, G: gradient, (Dy, Dx): vertical and lateral Ds, M: match
sign,
box, # y0, yn, x0, xn
dert__, # box of derts, each = i, idy, idx, g, dy, dx, m
stack_[ stack_params, Py_ [(P_params, dert_)]]: refs down blob formation tree, in vertical (horizontal) order
# next fork:
fcr, # flag comp rng, also clustering criterion in dert and Dert: g in der+ fork, i+m in rng+ fork?
fig, # flag input is gradient
rdn, # redundancy to higher layers
rng, # comp range
sub_layers # [sub_blobs ]: list of layers across sub_blob derivation tree
# deeper layers are nested, multiple forks: no single set of fork params?
'''
from collections import deque, defaultdict
from class_cluster import ClusterStructure, NoneType
from class_bind import AdjBinder
from frame_blobs_yx import assign_adjacents
from intra_comp_g import comp_g, comp_r
from itertools import zip_longest
from class_stream import BlobStreamer
from utils import pairwise
import numpy as np
# from comp_P_draft import comp_P_blob
# filters, All *= rdn:
ave = 50 # fixed cost per dert, from average m, reflects blob definition cost, may be different for comp_a?
aveB = 50 # fixed cost per intra_blob comp and clustering
# --------------------------------------------------------------------------------------------------------------
# functions, ALL WORK-IN-PROGRESS:
def intra_blob(blob, rdn, rng, fig, fcr, **kwargs): # recursive input rng+ | der+ cross-comp within blob
# fig: flag input is g | p, fcr: flag comp over rng+ | der+
if kwargs.get('render', None) is not None: # stop rendering sub-blobs when blob is too small
if blob.Dert['S'] < 100:
kwargs['render'] = False
spliced_layers = [] # to extend root_blob sub_layers
ext_dert__, ext_mask = extend_dert(blob)
if fcr:
dert__, mask = comp_r(ext_dert__, fig, fcr, ext_mask) # -> m sub_blobs
else:
dert__, mask = comp_g(ext_dert__, ext_mask) # -> g sub_blobs:
if dert__[0].shape[0] > 2 and dert__[0].shape[1] > 2 and False in mask: # min size in y and x, least one dert in dert__
sub_blobs = cluster_derts(dert__, mask, ave * rdn, fcr, fig, **kwargs)
# fork params:
blob.fcr = fcr
blob.fig = fig
blob.rdn = rdn
blob.rng = rng
blob.Ls = len(sub_blobs) # for visibility and next-fork rdn
blob.sub_layers = [sub_blobs] # 1st layer of sub_blobs
for sub_blob in sub_blobs: # evaluate for intra_blob comp_g | comp_r:
G = blob.Dert['G']; adj_G = blob.adj_blobs[2]
borrow = min(abs(G), abs(adj_G) / 2) # or adjacent M if negative sign?
if sub_blob.sign:
if sub_blob.Dert['M'] - borrow > aveB * rdn: # M - (intra_comp value lend to edge blob)
# comp_r fork:
blob.sub_layers += intra_blob(sub_blob, rdn + 1 + 1 / blob.Ls, rng * 2, fig=fig, fcr=1, **kwargs)
# else: comp_P_
elif sub_blob.Dert['G'] + borrow > aveB * rdn: # G + (intra_comp value borrow from flat blob)
# comp_g fork:
blob.sub_layers += intra_blob(sub_blob, rdn + 1 + 1 / blob.Ls, rng=rng, fig=1, fcr=0, **kwargs)
# else: comp_P_
spliced_layers = [spliced_layers + sub_layers for spliced_layers, sub_layers in
zip_longest(spliced_layers, blob.sub_layers, fillvalue=[])]
return spliced_layers
def cluster_derts(dert__, mask, Ave, fcr, fig, render=False): # similar to frame_to_blobs
if fcr: # comp_r output; form clustering criterion:
if fig:
crit__ = dert__[0] + dert__[6] - Ave # eval by i + m, accum in rng; dert__[:,:,0] if not transposed
else:
crit__ = Ave - dert__[3] # eval by -g, accum in rng
else: # comp_g output
crit__ = dert__[6] - Ave # comp_g output eval by m, or clustering is always by m?
root_dert__ = dert__ # derts after the comps operation, which is the root_dert__
dert__ = [*zip(*dert__)] # transpose dert__ into shape [y, params, x]
sub_blobs = [] # from form_blob:
stack_ = deque() # buffer of running vertical stacks of Ps
stack_binder = AdjBinder(CDeepStack)
if render:
streamer = BlobStreamer(CDeepBlob, crit__, mask)
if render:
streamer = BlobStreamer(CDeepBlob, crit__, mask)
for y, dert_ in enumerate(dert__): # in height, first and last row are discarded; print(f'Processing intra line {y}...')
# if False in mask[i]: # [y,x,params], there is at least one dert in line
P_binder = AdjBinder(CDeepP) # binder needs data about clusters of the same level
P_ = form_P_(zip(*dert_), crit__[y], mask[y], P_binder) # horizontal clustering, adds a row of Ps
if render:
render = streamer.update_blob_conversion(y, P_) # if return False, stop rendering
P_ = scan_P_(P_, stack_, root_dert__, sub_blobs, P_binder) # vertical clustering, adds up_connects per P and down_connect_cnt per stack
stack_ = form_stack_(P_, root_dert__, sub_blobs, y)
stack_binder.bind_from_lower(P_binder)
while stack_: # frame ends, last-line stacks are merged into their blobs:
form_blob(stack_.popleft(), root_dert__, sub_blobs)
blob_binder = AdjBinder(CDeepBlob)
blob_binder.bind_from_lower(stack_binder)
assign_adjacents(blob_binder) # add adj_blobs to each blob
# sub_blobs = find_adjacent(sub_blobs)
if render: # rendering mode after blob conversion
streamer.end_blob_conversion(y)
return sub_blobs
# clustering functions:
# -------------------------------------------------------------------------------------------------------------------
| 44.740654 | 144 | 0.576584 |
d5f33371ef4b57ee6f5f8e58e37840bbabd0819e
| 10,275 |
py
|
Python
|
examples/pybullet/gym/pybullet_envs/minitaur/envs/env_randomizers/minitaur_terrain_randomizer.py
|
felipeek/bullet3
|
6a59241074720e9df119f2f86bc01765917feb1e
|
[
"Zlib"
] | 9,136 |
2015-01-02T00:41:45.000Z
|
2022-03-31T15:30:02.000Z
|
examples/pybullet/gym/pybullet_envs/minitaur/envs/env_randomizers/minitaur_terrain_randomizer.py
|
felipeek/bullet3
|
6a59241074720e9df119f2f86bc01765917feb1e
|
[
"Zlib"
] | 2,424 |
2015-01-05T08:55:58.000Z
|
2022-03-30T19:34:55.000Z
|
examples/pybullet/gym/pybullet_envs/minitaur/envs/env_randomizers/minitaur_terrain_randomizer.py
|
felipeek/bullet3
|
6a59241074720e9df119f2f86bc01765917feb1e
|
[
"Zlib"
] | 2,921 |
2015-01-02T10:19:30.000Z
|
2022-03-31T02:48:42.000Z
|
"""Generates a random terrain at Minitaur gym environment reset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
parentdir = os.path.dirname(os.path.dirname(parentdir))
os.sys.path.insert(0, parentdir)
import itertools
import math
import enum
import numpy as np
from pybullet_envs.minitaur.envs import env_randomizer_base
_GRID_LENGTH = 15
_GRID_WIDTH = 10
_MAX_SAMPLE_SIZE = 30
_MIN_BLOCK_DISTANCE = 0.7
_MAX_BLOCK_LENGTH = _MIN_BLOCK_DISTANCE
_MIN_BLOCK_LENGTH = _MAX_BLOCK_LENGTH / 2
_MAX_BLOCK_HEIGHT = 0.05
_MIN_BLOCK_HEIGHT = _MAX_BLOCK_HEIGHT / 2
| 35.309278 | 98 | 0.706667 |
d5f35dd267171d89db5d5ed7c57d46dbcf723ae2
| 2,502 |
py
|
Python
|
polecat/db/sql/expression/values.py
|
furious-luke/polecat
|
7be5110f76dc42b15c922c1bb7d49220e916246d
|
[
"MIT"
] | 4 |
2019-08-10T12:56:12.000Z
|
2020-01-21T09:51:20.000Z
|
polecat/db/sql/expression/values.py
|
furious-luke/polecat
|
7be5110f76dc42b15c922c1bb7d49220e916246d
|
[
"MIT"
] | 71 |
2019-04-09T05:39:21.000Z
|
2020-05-16T23:09:24.000Z
|
polecat/db/sql/expression/values.py
|
furious-luke/polecat
|
7be5110f76dc42b15c922c1bb7d49220e916246d
|
[
"MIT"
] | null | null | null |
from functools import partial
from polecat.db.query import query as query_module
from psycopg2.sql import SQL, Placeholder
from .expression import Expression
| 34.75 | 79 | 0.61311 |
d5f3f84aa262b2485923b0060a6795013deae56c
| 1,292 |
py
|
Python
|
python/day3p1.py
|
swilcox/2019adventofcode
|
b67261aae74805ba8c2f4b72f09dd79277224ebb
|
[
"MIT"
] | 1 |
2020-01-18T18:24:18.000Z
|
2020-01-18T18:24:18.000Z
|
python/day3p1.py
|
swilcox/2019adventofcode
|
b67261aae74805ba8c2f4b72f09dd79277224ebb
|
[
"MIT"
] | null | null | null |
python/day3p1.py
|
swilcox/2019adventofcode
|
b67261aae74805ba8c2f4b72f09dd79277224ebb
|
[
"MIT"
] | null | null | null |
# 2019 advent day 3
MOVES = {
'R': (lambda x: (x[0], x[1] + 1)),
'L': (lambda x: (x[0], x[1] - 1)),
'U': (lambda x: (x[0] + 1, x[1])),
'D': (lambda x: (x[0] - 1, x[1])),
}
#R1 = 'R75,D30,R83,U83,L12,D49,R71,U7,L72'
#R2 = 'U62,R66,U55,R34,D71,R55,D58,R83'
#R1 = 'R98,U47,R26,D63,R33,U87,L62,D20,R33,U53,R51'
#R2 = 'U98,R91,D20,R16,D67,R40,U7,R15,U6,R7'
if __name__ == "__main__":
main()
| 26.367347 | 79 | 0.600619 |
d5f42d830df55813fe6234674e4d597dccbd7f59
| 1,054 |
py
|
Python
|
examples/demo/python/catalog.py
|
JavDomGom/mist
|
83ae9f67df61ff2387a7d424cff0f8591a6a645f
|
[
"Apache-2.0"
] | 1 |
2021-04-23T17:13:31.000Z
|
2021-04-23T17:13:31.000Z
|
examples/demo/python/catalog.py
|
JavDomGom/mist
|
83ae9f67df61ff2387a7d424cff0f8591a6a645f
|
[
"Apache-2.0"
] | null | null | null |
examples/demo/python/catalog.py
|
JavDomGom/mist
|
83ae9f67df61ff2387a7d424cff0f8591a6a645f
|
[
"Apache-2.0"
] | null | null | null |
import asyncio
| 36.344828 | 112 | 0.588235 |
d5f5577604a264eefbdbdf102a315e607e68f2da
| 15,156 |
py
|
Python
|
tests/api/v3_1_0/test_security_groups_acls.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 36 |
2021-05-18T16:24:19.000Z
|
2022-03-05T13:44:41.000Z
|
tests/api/v3_1_0/test_security_groups_acls.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 15 |
2021-06-08T19:03:37.000Z
|
2022-02-25T14:47:33.000Z
|
tests/api/v3_1_0/test_security_groups_acls.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 6 |
2021-06-10T09:32:01.000Z
|
2022-01-12T08:34:39.000Z
|
# -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI security_groups_acls API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from ciscoisesdk.exceptions import ciscoisesdkException
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.1.0', reason='version does not match')
| 31.509356 | 109 | 0.720375 |
d5f5d714834d96889f873a0d7ec900fdf1926bca
| 21,522 |
py
|
Python
|
geomstats/geometry/riemannian_metric.py
|
stefanheyder/geomstats
|
c4e6d959db7b1bcc99b00b535b8aa5d832b62e28
|
[
"MIT"
] | null | null | null |
geomstats/geometry/riemannian_metric.py
|
stefanheyder/geomstats
|
c4e6d959db7b1bcc99b00b535b8aa5d832b62e28
|
[
"MIT"
] | null | null | null |
geomstats/geometry/riemannian_metric.py
|
stefanheyder/geomstats
|
c4e6d959db7b1bcc99b00b535b8aa5d832b62e28
|
[
"MIT"
] | null | null | null |
"""Riemannian and pseudo-Riemannian metrics."""
import math
import warnings
import autograd
import geomstats.backend as gs
from geomstats.geometry.connection import Connection
EPSILON = 1e-4
N_CENTERS = 10
TOLERANCE = 1e-5
N_REPETITIONS = 20
N_MAX_ITERATIONS = 50000
N_STEPS = 10
def loss(y_pred, y_true, metric):
"""Compute loss function between prediction and ground truth.
Loss function given by a Riemannian metric,
expressed as the squared geodesic distance between the prediction
and the ground truth.
Parameters
----------
y_pred
y_true
metric
Returns
-------
loss
"""
loss = metric.squared_dist(y_pred, y_true)
return loss
def grad(y_pred, y_true, metric):
"""Closed-form for the gradient of the loss function."""
tangent_vec = metric.log(base_point=y_pred, point=y_true)
grad_vec = - 2. * tangent_vec
inner_prod_mat = metric.inner_product_matrix(base_point=y_pred)
grad = gs.einsum('ni,nij->ni',
grad_vec,
gs.transpose(inner_prod_mat, axes=(0, 2, 1)))
return grad
| 33.315789 | 79 | 0.557987 |
d5f67147c5059c64bf2090a7f0dd93d9aec0092b
| 9,842 |
py
|
Python
|
app/main/pages/instrument/hrs/red/order/plots.py
|
hettlage/salt-data-quality-site
|
da9ff4a51e8affa47e0bc1c0383c7fdeaac2155e
|
[
"MIT"
] | null | null | null |
app/main/pages/instrument/hrs/red/order/plots.py
|
hettlage/salt-data-quality-site
|
da9ff4a51e8affa47e0bc1c0383c7fdeaac2155e
|
[
"MIT"
] | null | null | null |
app/main/pages/instrument/hrs/red/order/plots.py
|
hettlage/salt-data-quality-site
|
da9ff4a51e8affa47e0bc1c0383c7fdeaac2155e
|
[
"MIT"
] | null | null | null |
import pandas as pd
from bokeh.models import HoverTool
from bokeh.models.formatters import DatetimeTickFormatter
from bokeh.palettes import Plasma256
from bokeh.plotting import figure, ColumnDataSource
from app import db
from app.decorators import data_quality
# creates your plot
date_formatter = DatetimeTickFormatter(microseconds=['%f'],
milliseconds=['%S.%2Ns'],
seconds=[':%Ss'],
minsec=[':%Mm:%Ss'],
minutes=['%H:%M:%S'],
hourmin=['%H:%M:'],
hours=["%H:%M"],
days=["%d %b"],
months=["%d %b %Y"],
years=["%b %Y"])
| 35.530686 | 109 | 0.517984 |
d5f72b6bb8de932265e3494ed6520e23b33d2b72
| 705 |
py
|
Python
|
p6e8.py
|
yannickbf-prog/python
|
da4bd2c8668966359b829a8ac2a896afeca2b150
|
[
"MIT"
] | null | null | null |
p6e8.py
|
yannickbf-prog/python
|
da4bd2c8668966359b829a8ac2a896afeca2b150
|
[
"MIT"
] | null | null | null |
p6e8.py
|
yannickbf-prog/python
|
da4bd2c8668966359b829a8ac2a896afeca2b150
|
[
"MIT"
] | null | null | null |
#Yannick p6e8 Escribe un programa que te pida primero un nmero y luego te pida nmeros hasta que la suma de los nmeros introducidos coincida con el nmero inicial. El programa termina escribiendo la lista de nmeros.
limite = int(input("Escribe limite:"))
valores = int(input("Escribe un valor:"))
listavalores = []
listavalores.append(valores)
while limite > sum(listavalores):
valores = int(input("Escribe otro valor"))
listavalores.append(valores)
print(f"El limite a superar es {limite}. La lista creada es ", end="")
for i in range(len(listavalores)):
print (listavalores[i], end=" ")
print(f"ya que la suma de estos numeros es {sum(listavalores)}")
| 30.652174 | 219 | 0.704965 |
d5f73b66aea43800edd9e2977d37ade872174872
| 1,574 |
py
|
Python
|
.venv/lib/python3.8/site-packages/cleo/application.py
|
RivtLib/replit01
|
ce1ae18b446a9c844f40e88a51c71fbc45ab3ad7
|
[
"MIT"
] | 1 |
2020-08-07T16:09:57.000Z
|
2020-08-07T16:09:57.000Z
|
.venv/lib/python3.8/site-packages/cleo/application.py
|
RivtLib/replit01
|
ce1ae18b446a9c844f40e88a51c71fbc45ab3ad7
|
[
"MIT"
] | null | null | null |
.venv/lib/python3.8/site-packages/cleo/application.py
|
RivtLib/replit01
|
ce1ae18b446a9c844f40e88a51c71fbc45ab3ad7
|
[
"MIT"
] | null | null | null |
from typing import Optional
from typing import Tuple
from clikit.console_application import ConsoleApplication
from .commands import BaseCommand
from .commands.completions_command import CompletionsCommand
from .config import ApplicationConfig
| 29.148148 | 77 | 0.623888 |
d5f79839ee8447226cf398d9edd00a635f9c473c
| 499 |
py
|
Python
|
simone/person/management.py
|
zuhalcakir/simone
|
88e04e6a228570d7d2a4c8bbf683e4903eeb592b
|
[
"BSD-3-Clause"
] | 16 |
2015-03-27T12:39:23.000Z
|
2020-05-09T13:10:24.000Z
|
simone/person/management.py
|
zuhalcakir/simone
|
88e04e6a228570d7d2a4c8bbf683e4903eeb592b
|
[
"BSD-3-Clause"
] | 2 |
2016-09-03T18:22:01.000Z
|
2018-01-04T12:20:01.000Z
|
simone/person/management.py
|
zuhalcakir/simone
|
88e04e6a228570d7d2a4c8bbf683e4903eeb592b
|
[
"BSD-3-Clause"
] | 8 |
2015-08-26T18:17:54.000Z
|
2017-05-17T13:28:32.000Z
|
#from django.dispatch import dispatcher
#def UserProfilePostInsert(sender, instance, signal, *args, **kwargs):
#"""
#Inserts a blank imap server entry (if necessary) and associates it with the user
#"""
#user = instance
#i = user.get_profile().imap_servers.create()
#user.get_profile().about = 'test'
#i.save()
#user.save_profile()
## we want this called after every user is inserted
#dispatcher.connect(UserProfilePostInsert, signal=signals.pre_save, sender=User)
| 33.266667 | 85 | 0.707415 |
d5f7c879e6735f223e0344e0abf1f6975431be03
| 1,009 |
py
|
Python
|
watcher/fly.py
|
cog-isa/htm-rl
|
baf5b67a11283d37165bf6a29d6808a234d6d98c
|
[
"MIT"
] | 1 |
2021-12-09T22:09:24.000Z
|
2021-12-09T22:09:24.000Z
|
watcher/fly.py
|
cog-isa/htm-rl
|
baf5b67a11283d37165bf6a29d6808a234d6d98c
|
[
"MIT"
] | null | null | null |
watcher/fly.py
|
cog-isa/htm-rl
|
baf5b67a11283d37165bf6a29d6808a234d6d98c
|
[
"MIT"
] | 1 |
2021-11-18T08:54:20.000Z
|
2021-11-18T08:54:20.000Z
|
from utils.drawer import Drawer
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("name", help="the name of the datafile")
parser.add_argument("--size", help="width,height")
args = parser.parse_args()
if args.size is None:
width, height = 1280, 720
else:
width, height = args.size.split(',')
drawer = Drawer('data/'+args.name, [int(width), int(height)])
while not drawer.window.should_close():
drawer.update()
# the main application loop
while not drawer.window.should_close() and not drawer.window.next and not drawer.window.previous:
drawer.process()
if drawer.window.next and drawer.current + 2 < len(drawer.data_base.keys()): drawer.current = drawer.current + 1
if drawer.window.previous and drawer.current > 0: drawer.current = drawer.current - 1
drawer.window.next = False
drawer.window.previous = False
drawer.window.terminate()
| 36.035714 | 120 | 0.663033 |
d5f85a460ddcb48e089b11f2309816efd46bb61e
| 3,263 |
py
|
Python
|
test/unit/test_structures.py
|
ourobouros/aws-encryption-sdk-python
|
1d0e40de7fef1b1131127a6f8626ef6a60739289
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_structures.py
|
ourobouros/aws-encryption-sdk-python
|
1d0e40de7fef1b1131127a6f8626ef6a60739289
|
[
"Apache-2.0"
] | 1 |
2019-05-30T22:14:47.000Z
|
2019-05-30T22:14:47.000Z
|
test/unit/test_structures.py
|
ourobouros/aws-encryption-sdk-python
|
1d0e40de7fef1b1131127a6f8626ef6a60739289
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Unit test suite for aws_encryption_sdk.structures"""
import pytest
from aws_encryption_sdk.identifiers import Algorithm, ContentType, ObjectType, SerializationVersion
from aws_encryption_sdk.structures import DataKey, EncryptedDataKey, MasterKeyInfo, MessageHeader, RawDataKey
from .unit_test_utils import all_invalid_kwargs, all_valid_kwargs
pytestmark = [pytest.mark.unit, pytest.mark.local]
VALID_KWARGS = {
MessageHeader: [
dict(
version=SerializationVersion.V1,
type=ObjectType.CUSTOMER_AE_DATA,
algorithm=Algorithm.AES_256_GCM_IV12_TAG16_HKDF_SHA384_ECDSA_P384,
message_id=b"aosiejfoaiwej",
encryption_context={},
encrypted_data_keys=set([]),
content_type=ContentType.FRAMED_DATA,
content_aad_length=32456,
header_iv_length=32456,
frame_length=234567,
)
],
MasterKeyInfo: [
dict(provider_id="fawnofijawef", key_info="ajsnoiajerofi"),
dict(provider_id=b"fawnofijawef", key_info="ajsnoiajerofi"),
dict(provider_id="fawnofijawef", key_info=b"ajsnoiajerofi"),
dict(provider_id=b"fawnofijawef", key_info=b"ajsnoiajerofi"),
],
RawDataKey: [
dict(key_provider=MasterKeyInfo(provider_id="asjnoa", key_info=b"aosjfoaiwej"), data_key=b"aosijfoewaijf")
],
DataKey: [
dict(
key_provider=MasterKeyInfo(provider_id="asjnoa", key_info=b"aosjfoaiwej"),
data_key=b"oaijefoawiejf",
encrypted_data_key=b"aisofiawjef",
)
],
EncryptedDataKey: [
dict(
key_provider=MasterKeyInfo(provider_id="asjnoa", key_info=b"aosjfoaiwej"), encrypted_data_key=b"aisofiawjef"
)
],
}
| 37.505747 | 120 | 0.70426 |
d5f884d302908ab9fba8e534f212148aba1c42a3
| 1,745 |
py
|
Python
|
codes/utils/mygraph.py
|
CristianLazoQuispe/Datathon-Interbank-2020
|
54f5d11fe83eb5a8ea8284be13d96e9e12978354
|
[
"MIT"
] | null | null | null |
codes/utils/mygraph.py
|
CristianLazoQuispe/Datathon-Interbank-2020
|
54f5d11fe83eb5a8ea8284be13d96e9e12978354
|
[
"MIT"
] | null | null | null |
codes/utils/mygraph.py
|
CristianLazoQuispe/Datathon-Interbank-2020
|
54f5d11fe83eb5a8ea8284be13d96e9e12978354
|
[
"MIT"
] | null | null | null |
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
path_results = '../results/images/'
# this function receives a dataset with binary target and it will graph a hist of values
# plot histograms of train and test to understand the differences between them
| 51.323529 | 140 | 0.671633 |
d5f8c3fa603dfdb79ab13ebb13d4e8e23422a12c
| 1,134 |
py
|
Python
|
src/pretix/base/validators.py
|
td00/pretix
|
e31bd7600c85598de135f2eb5012e2f33fdb1d11
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/base/validators.py
|
td00/pretix
|
e31bd7600c85598de135f2eb5012e2f33fdb1d11
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/base/validators.py
|
td00/pretix
|
e31bd7600c85598de135f2eb5012e2f33fdb1d11
|
[
"ECL-2.0",
"Apache-2.0"
] | 1 |
2017-08-09T17:11:28.000Z
|
2017-08-09T17:11:28.000Z
|
from django.core.exceptions import ValidationError
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
| 20.618182 | 64 | 0.552028 |
d5f8e7bbb353d3c7f7fae4eb9baaff7822b54512
| 32,192 |
py
|
Python
|
fortnitepy/ext/commands/bot.py
|
gfdb/fortnitepy
|
1cedbddee1f81c96fc60b586cd2c16398bc2d45f
|
[
"MIT"
] | 127 |
2019-07-15T15:55:30.000Z
|
2022-03-22T07:39:29.000Z
|
fortnitepy/ext/commands/bot.py
|
xMistt/fortnitepy
|
c64d72572e188a938e0b39a6d1fd1e8ee4842d31
|
[
"MIT"
] | 65 |
2019-07-15T22:48:35.000Z
|
2022-01-30T05:18:36.000Z
|
fortnitepy/ext/commands/bot.py
|
xMistt/fortnitepy
|
c64d72572e188a938e0b39a6d1fd1e8ee4842d31
|
[
"MIT"
] | 83 |
2019-07-18T12:37:58.000Z
|
2022-03-19T20:56:47.000Z
|
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import logging
import inspect
import asyncio
import types
import sys
import importlib
import collections
import traceback
from typing import Any, List, Optional, Mapping, Set
from fortnitepy.client import Client
from fortnitepy.auth import Auth
from fortnitepy.typedefs import MaybeCoro, ListOrTuple
from ._types import _BaseCommand
from .errors import (ExtensionFailed, ExtensionMissingEntryPoint,
ExtensionNotLoaded, ExtensionAlreadyLoaded,
ExtensionNotFound, CheckFailure, CommandError,
CommandNotFound)
from .core import GroupMixin
from .cog import Cog
from .view import StringView
from .context import Context
from .help import HelpCommand, FortniteHelpCommand
from .typedefs import Message
log = logging.getLogger(__name__)
_default = _DefaultRepr()
def after_invoke(self, coro: MaybeCoro) -> MaybeCoro:
r"""A decorator that registers a coroutine as a post-invoke hook.
A post-invoke hook is called directly after the command is
called. This makes it a useful function to clean-up database
connections or any type of clean up required.
This post-invoke hook takes a sole parameter, a :class:`.Context`.
.. note::
Similar to :meth:`~.Bot.before_invoke`\, this is not called unless
checks and argument parsing procedures succeed. This hook is,
however, **always** called regardless of the internal command
callback raising an error (i.e. :exc:`.CommandInvokeError`\).
This makes it ideal for clean-up scenarios.
Parameters
----------
coro:
The coroutine to register as the post-invoke hook.
Raises
------
TypeError
The coroutine passed is not actually a coroutine.
"""
if not asyncio.iscoroutinefunction(coro):
raise TypeError('The post-invoke hook must be a coroutine.')
self._after_invoke = coro
return coro
def add_cog(self, cog: Cog) -> None:
"""Adds a "cog" to the bot.
A cog is a class that has its own event listeners and commands.
Parameters
----------
cog: :class:`.Cog`
The cog to register to the bot.
Raises
------
TypeError
The cog does not inherit from :class:`.Cog`.
CommandError
An error happened during loading.
"""
if not isinstance(cog, Cog):
raise TypeError('Cogs must derive from Cog.')
cog = cog._inject(self)
self.__cogs[cog.__cog_name__] = cog
def remove_cog(self, name: str) -> None:
"""Removes a cog from the bot.
All registered commands and event listeners that the
cog has registered will be removed as well.
If no cog is found then this method has no effect.
Parameters
----------
name: :class:`str`
The name of the cog to remove.
"""
cog = self.__cogs.pop(name, None)
if cog is None:
return
help_command = self.help_command
if help_command and help_command.cog is cog:
help_command.cog = None
cog._eject(self)
def get_cog(self, name: str) -> Optional[Cog]:
"""Gets the cog instance requested.
If the cog is not found, ``None`` is returned instead.
Parameters
-----------
name: :class:`str`
The name of the cog you are requesting.
This is equivalent to the name passed via keyword
argument in class creation or the class name if unspecified.
"""
return self.__cogs.get(name)
def load_extension(self, name: str) -> None:
"""Loads an extension.
An extension is a python module that contains commands, cogs, or
listeners.
An extension must have a global function, ``extension_setup`` defined
as the entry point on what to do when the extension is loaded. This
entry point must have a single argument, the ``bot``.
Parameters
----------
name: :class:`str`
The extension name to load. It must be dot separated like
regular Python imports if accessing a sub-module. e.g.
``foo.test`` if you want to import ``foo/test.py``.
Raises
------
ExtensionNotFound
The extension could not be imported.
ExtensionAlreadyLoaded
The extension is already loaded.
ExtensionMissingEntryPoint
The extension does not have a extension_setup function.
ExtensionFailed
The extension or its setup function had an execution error.
"""
if name in self.__extensions:
raise ExtensionAlreadyLoaded(name)
spec = importlib.util.find_spec(name)
if spec is None:
raise ExtensionNotFound(name)
self._load_from_module_spec(spec, name)
def unload_extension(self, name: str) -> None:
"""Unloads an extension.
When the extension is unloaded, all commands, listeners, and cogs are
removed from the bot and the module is un-imported.
The extension can provide an optional global function,
``cog_teardown``, to do miscellaneous clean-up if necessary. This
function takes a single parameter, the ``bot``, similar to
``extension_setup`` from :meth:`~.Bot.load_extension`.
Parameters
------------
name: :class:`str`
The extension name to unload. It must be dot separated like
regular Python imports if accessing a sub-module. e.g.
``foo.test`` if you want to import ``foo/test.py``.
Raises
-------
ExtensionNotLoaded
The extension was not loaded.
"""
lib = self.__extensions.get(name)
if lib is None:
raise ExtensionNotLoaded(name)
self._remove_module_references(lib.__name__)
self._call_module_finalizers(lib, name)
def reload_extension(self, name: str) -> None:
"""Atomically reloads an extension.
This replaces the extension with the same extension, only refreshed.
This is equivalent to a :meth:`unload_extension` followed by
a :meth:`load_extension` except done in an atomic way. That is, if an
operation fails mid-reload then the bot will roll-back to the prior
working state.
Parameters
------------
name: :class:`str`
The extension name to reload. It must be dot separated like
regular Python imports if accessing a sub-module. e.g.
``foo.test`` if you want to import ``foo/test.py``.
Raises
-------
ExtensionNotLoaded
The extension was not loaded.
ExtensionNotFound
The extension could not be imported.
ExtensionMissingEntryPoint
The extension does not have a extension_setup function.
ExtensionFailed
The extension setup function had an execution error.
"""
lib = self.__extensions.get(name)
if lib is None:
raise ExtensionNotLoaded(name)
# get the previous module states from sys modules
modules = {
name: module
for name, module in sys.modules.items()
if _is_submodule(lib.__name__, name)
}
try:
# Unload and then load the module...
self._remove_module_references(lib.__name__)
self._call_module_finalizers(lib, name)
self.load_extension(name)
except Exception:
# if the load failed, the remnants should have been
# cleaned from the load_extension function call
# so let's load it from our old compiled library.
lib.extension_setup(self)
self.__extensions[name] = lib
# revert sys.modules back to normal and raise back to caller
sys.modules.update(modules)
raise
async def get_prefix(self, message: Message) -> Any:
"""|coro|
Retrieves the prefix the bot is listening to with the message as
a context.
Parameters
----------
message: Union[:class:`fortnitepy.FriendMessage`, :class:`fortnitepy.PartyMessage`]
The message context to get the prefix of.
Returns
--------
Union[List[:class:`str`], :class:`str`]
A list of prefixes or a single prefix that the bot is
listening for.
""" # noqa
prefix = ret = self.command_prefix
if callable(prefix):
if asyncio.iscoroutinefunction(prefix):
ret = await prefix(self, message)
else:
ret = prefix(self, message)
if not isinstance(ret, str):
try:
ret = list(ret)
except TypeError:
# It's possible that a generator raised this exception. Don't
# replace it with our own error if that's the case.
if isinstance(ret, collections.abc.Iterable):
raise
raise TypeError('command_prefix must be plain string, '
'iterable of strings, or callable '
'returning either of these, not '
'{}'.format(ret.__class__.__name__))
if not ret:
raise ValueError('Iterable command_prefix must contain at '
'least one prefix')
return ret
def dispatch_error(self, ctx: Context, error: Exception) -> None:
if self._event_has_handler('command_error'):
futures = self.dispatch_event('command_error', ctx, error)
asyncio.ensure_future(self._wait_for_error_return(
futures,
ctx,
error
))
else:
self._print_error(ctx, error)
| 35.02938 | 91 | 0.584493 |
d5fad574122cf8647545ad83e7dc43147679cc22
| 1,129 |
py
|
Python
|
paths_win.py
|
tankbusta/rescache
|
86ca7f3fb66e28a8761f0995a300f57a73a9561d
|
[
"MIT"
] | 15 |
2015-03-05T17:03:08.000Z
|
2022-01-28T07:49:38.000Z
|
paths_win.py
|
tankbusta/rescache
|
86ca7f3fb66e28a8761f0995a300f57a73a9561d
|
[
"MIT"
] | null | null | null |
paths_win.py
|
tankbusta/rescache
|
86ca7f3fb66e28a8761f0995a300f57a73a9561d
|
[
"MIT"
] | 9 |
2015-03-06T09:56:30.000Z
|
2017-11-07T00:24:17.000Z
|
import _winreg
import os
def get_shared_cache_folder():
"""
Look in the registry for the configured cache folder.
If there is no entry, then we create one.
:return:
"""
_winreg.aReg = _winreg.ConnectRegistry(None, _winreg.HKEY_CURRENT_USER)
try:
key = _winreg.OpenKey(_winreg.aReg, r"SOFTWARE\CCP\EVEONLINE")
path, _ = _winreg.QueryValueEx(key, "CACHEFOLDER")
except OSError:
return None
return path
| 30.513514 | 84 | 0.70062 |
d5fb061a3a4378d9720ff3a451d5983678f6ed08
| 2,712 |
py
|
Python
|
venv/lib/python3.8/site-packages/dateparser/data/date_translation_data/ebu.py
|
yuta-komura/vishnu
|
67173b674d5f4f3be189474103612447ef69ab44
|
[
"MIT"
] | 1 |
2021-11-17T04:55:14.000Z
|
2021-11-17T04:55:14.000Z
|
dateparser/data/date_translation_data/ebu.py
|
cool-RR/dateparser
|
c38336df521cc57d947dc2c9111539a72f801652
|
[
"BSD-3-Clause"
] | null | null | null |
dateparser/data/date_translation_data/ebu.py
|
cool-RR/dateparser
|
c38336df521cc57d947dc2c9111539a72f801652
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
info = {
"name": "ebu",
"date_order": "DMY",
"january": [
"mweri wa mbere",
"mbe"
],
"february": [
"mweri wa kari",
"kai"
],
"march": [
"mweri wa kathat",
"kat"
],
"april": [
"mweri wa kana",
"kan"
],
"may": [
"mweri wa gatano",
"gat"
],
"june": [
"mweri wa gatantat",
"gan"
],
"july": [
"mweri wa mgwanja",
"mug"
],
"august": [
"mweri wa kanana",
"knn"
],
"september": [
"mweri wa kenda",
"ken"
],
"october": [
"mweri wa ikmi",
"iku"
],
"november": [
"mweri wa ikmi na mwe",
"imw"
],
"december": [
"mweri wa ikmi na kar",
"igi"
],
"monday": [
"njumatatu",
"tat"
],
"tuesday": [
"njumaine",
"ine"
],
"wednesday": [
"njumatano",
"tan"
],
"thursday": [
"aramithi",
"arm"
],
"friday": [
"njumaa",
"maa"
],
"saturday": [
"njumamothii",
"nmm"
],
"sunday": [
"kiumia",
"kma"
],
"am": [
"ki"
],
"pm": [
"ut"
],
"year": [
"mwaka"
],
"month": [
"mweri"
],
"week": [
"kiumia"
],
"day": [
"mthenya"
],
"hour": [
"ithaa"
],
"minute": [
"ndagka"
],
"second": [
"sekondi"
],
"relative-type": {
"1 year ago": [
"last year"
],
"0 year ago": [
"this year"
],
"in 1 year": [
"next year"
],
"1 month ago": [
"last month"
],
"0 month ago": [
"this month"
],
"in 1 month": [
"next month"
],
"1 week ago": [
"last week"
],
"0 week ago": [
"this week"
],
"in 1 week": [
"next week"
],
"1 day ago": [
"goro"
],
"0 day ago": [
"mnth"
],
"in 1 day": [
"rci"
],
"0 hour ago": [
"this hour"
],
"0 minute ago": [
"this minute"
],
"0 second ago": [
"now"
]
},
"locale_specific": {},
"skip": [
" ",
".",
",",
";",
"-",
"/",
"'",
"|",
"@",
"[",
"]",
""
]
}
| 15.859649 | 34 | 0.289823 |
d5fc2fcc2b0439d566be57074eaeae0f3e82e072
| 129 |
py
|
Python
|
deepa2/preptrain/__init__.py
|
debatelab/deepa2
|
1a9e8c357d7e3924808c703ec9f4a6611a4b5f93
|
[
"Apache-2.0"
] | null | null | null |
deepa2/preptrain/__init__.py
|
debatelab/deepa2
|
1a9e8c357d7e3924808c703ec9f4a6611a4b5f93
|
[
"Apache-2.0"
] | null | null | null |
deepa2/preptrain/__init__.py
|
debatelab/deepa2
|
1a9e8c357d7e3924808c703ec9f4a6611a4b5f93
|
[
"Apache-2.0"
] | null | null | null |
"""Preprocessing DeepA2 datasets for LM training"""
# flake8: noqa
from deepa2.preptrain.t2tpreprocessor import T2TPreprocessor
| 25.8 | 60 | 0.813953 |
d5fcff660972d9337742f70ae81e7f0f26eaadac
| 310 |
py
|
Python
|
setup.py
|
martinfarrow/awspk
|
c3b5f8ede44ca96473b95f52ddb2291a45828565
|
[
"MIT"
] | null | null | null |
setup.py
|
martinfarrow/awspk
|
c3b5f8ede44ca96473b95f52ddb2291a45828565
|
[
"MIT"
] | null | null | null |
setup.py
|
martinfarrow/awspk
|
c3b5f8ede44ca96473b95f52ddb2291a45828565
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(name='awspk',
version='0.1',
description='A aws cli pen knife with loads of interested stuff',
author='Martin Farrow',
author_email='[email protected]',
py_modules=['awspk'],
license='LICENSE',
)
| 23.846154 | 71 | 0.651613 |
d5fd3faa9866127caab32ba61fdd34ab4ec39ea3
| 36,968 |
py
|
Python
|
pyclicker/lib/python3.7/site-packages/Xlib/display.py
|
JayRovacsek/pyautoclick
|
e136a58c129332933eb8455dd7c8e16222d54fb2
|
[
"MIT"
] | 1 |
2022-01-25T22:52:58.000Z
|
2022-01-25T22:52:58.000Z
|
Xlib/display.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
Xlib/display.py
|
EnjoyLifeFund/Debian_py36_packages
|
1985d4c73fabd5f08f54b922e73a9306e09c77a5
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
# Xlib.display -- high level display object
#
# Copyright (C) 2000 Peter Liljenberg <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
# Python modules
import types
# Python 2/3 compatibility.
from six import create_unbound_method
# Xlib modules
from . import error
from . import ext
from . import X
# Xlib.protocol modules
from .protocol import display as protocol_display
from .protocol import request, event, rq
# Xlib.xobjects modules
from .xobject import resource
from .xobject import drawable
from .xobject import fontable
from .xobject import colormap
from .xobject import cursor
_resource_baseclasses = {
'resource': resource.Resource,
'drawable': drawable.Drawable,
'window': drawable.Window,
'pixmap': drawable.Pixmap,
'fontable': fontable.Fontable,
'font': fontable.Font,
'gc': fontable.GC,
'colormap': colormap.Colormap,
'cursor': cursor.Cursor,
}
_resource_hierarchy = {
'resource': ('drawable', 'window', 'pixmap',
'fontable', 'font', 'gc',
'colormap', 'cursor'),
'drawable': ('window', 'pixmap'),
'fontable': ('font', 'gc')
}
| 38.872766 | 123 | 0.599681 |
d5fd49cbaa7ded4b224914739446f1a0434a93af
| 657 |
py
|
Python
|
Others/qupc/qupc2014/c/main.py
|
KATO-Hiro/AtCoder
|
cbbdb18e95110b604728a54aed83a6ed6b993fde
|
[
"CC0-1.0"
] | 2 |
2020-06-12T09:54:23.000Z
|
2021-05-04T01:34:07.000Z
|
Others/qupc/qupc2014/c/main.py
|
KATO-Hiro/AtCoder
|
cbbdb18e95110b604728a54aed83a6ed6b993fde
|
[
"CC0-1.0"
] | 961 |
2020-06-23T07:26:22.000Z
|
2022-03-31T21:34:52.000Z
|
Others/qupc/qupc2014/c/main.py
|
KATO-Hiro/AtCoder
|
cbbdb18e95110b604728a54aed83a6ed6b993fde
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
if __name__ == "__main__":
main()
| 21.193548 | 50 | 0.47032 |
d5fe5092d56595790c2072c2485827d644f9fbac
| 1,104 |
py
|
Python
|
NetCatKS/DProtocol/api/interfaces/subscribers/__init__.py
|
dimddev/NetCatKS-CP
|
2d9e72b2422e344569fd4eb154866b98e9707561
|
[
"BSD-2-Clause"
] | null | null | null |
NetCatKS/DProtocol/api/interfaces/subscribers/__init__.py
|
dimddev/NetCatKS-CP
|
2d9e72b2422e344569fd4eb154866b98e9707561
|
[
"BSD-2-Clause"
] | null | null | null |
NetCatKS/DProtocol/api/interfaces/subscribers/__init__.py
|
dimddev/NetCatKS-CP
|
2d9e72b2422e344569fd4eb154866b98e9707561
|
[
"BSD-2-Clause"
] | null | null | null |
__author__ = 'dimd'
from zope.interface import Interface, Attribute
| 26.285714 | 112 | 0.729167 |
d5fedd3bf29602a1334d3dbff567321747bbca26
| 4,652 |
py
|
Python
|
analysis/notebooks/helper/anova.py
|
dpedrosac/DBSgait
|
6df44cf975d43f9e932ef10144bfb7c1b5390b7b
|
[
"MIT"
] | 1 |
2021-09-29T05:53:38.000Z
|
2021-09-29T05:53:38.000Z
|
analysis/notebooks/helper/anova.py
|
dpedrosac/DBSgait
|
6df44cf975d43f9e932ef10144bfb7c1b5390b7b
|
[
"MIT"
] | null | null | null |
analysis/notebooks/helper/anova.py
|
dpedrosac/DBSgait
|
6df44cf975d43f9e932ef10144bfb7c1b5390b7b
|
[
"MIT"
] | 1 |
2021-09-22T08:48:47.000Z
|
2021-09-22T08:48:47.000Z
|
import numpy as np
import pandas as pd
from scipy.stats import f_oneway
from typing import Dict, Tuple, Set
def extract_significant_p(df: pd.DataFrame, p_value_limit: float):
"""Return a df, which replaces values that are above p_value_limit with `None`"""
return (
df.loc(axis=1)[f"p-value"]
.where(df[f"p-value"] < p_value_limit)
.dropna(axis=0, how="all")
)
def _calculate_anova(data: pd.DataFrame) -> Tuple:
"""Calculate one-way anova using each column as a different measurement."""
parameter = [column for column in data.columns if column != "configuration"][0]
data_ = [
data[data["configuration"] == configuration][parameter].T.to_numpy()
for configuration in set(data["configuration"])
]
return f_oneway(*data_)
def anova(
dataset: Dict, gait_test: str, gait_parameter: str
) -> Tuple[pd.DataFrame, Set]:
"""Calculat a one-way anova for a single gait test and gait parameter.
Parameters
----------
dataset
A dictionary, where the keys are descriptions for different subjects. The values are dataframes, which have a
pd.MultiIndex as columns. The first level describes the test paradigm, e.g. "slow" / "fast". The second level
describes the DBS configureation, e.g. "130", "100", "OFF". The third level is the gait parameter,
e.g. stride length.
gait_test
Used to select the first level of the columns
gait_parameter
Used to select the thrid level of the columns
Returns
-------
d
A dictionary where the keys are equal to the passed argument `dataset`. The values are dataframes,
where the columns correspond to the two feet and the rows are different gait parameters. The values are anova
p-values between all DBS configurations and the OFF state for this specific `gait_test`
"""
anova_dict = {}
anova_df = pd.DataFrame()
not_evaluated = []
for patient, patient_data in dataset.items():
anova_dict[patient] = {"LeftFoot": (None, None), "RightFoot": (None, None)}
for foot in set(patient_data["foot"]):
missing_condition = None
foot_data = patient_data[
(patient_data["foot"] == foot) & (patient_data["test"] == gait_test)
][[gait_parameter, "configuration"]]
possible_configurations = {
"030",
"033",
"040",
"066",
"085",
"090",
"100",
"130",
"OFF",
}
actual_configurations = set(foot_data["configuration"])
missing_configurations = possible_configurations - actual_configurations
if missing_configurations:
not_evaluated.append(
" ".join([gait_test, patient, *missing_configurations, foot])
)
if len(missing_configurations) > (len(possible_configurations) - 2):
print(
"Not evaluating this foot, because to few configurations available."
)
continue
# print(set(foot_data.columns) - set(foot_data_valid.columns))
anova_dict[patient][foot] = _calculate_anova(foot_data)
row = pd.DataFrame(
index=[patient],
columns=pd.MultiIndex.from_arrays(
[["p-value"] * 2, ["LeftFoot", "RightFoot"]]
),
data=[
[
anova_dict[patient]["LeftFoot"][1],
anova_dict[patient]["RightFoot"][1],
]
],
)
anova_df = pd.concat([anova_df, row])
return anova_df, set(not_evaluated)
| 36.34375 | 117 | 0.589209 |
d5ff5d19a2e1fbd8c3dcb000fc779bc359c47c61
| 2,251 |
py
|
Python
|
bux_recorder/utils.py
|
roaldarbol/bux
|
356817bbc7139c972d640c64fb8fcba27b70b3f7
|
[
"MIT"
] | null | null | null |
bux_recorder/utils.py
|
roaldarbol/bux
|
356817bbc7139c972d640c64fb8fcba27b70b3f7
|
[
"MIT"
] | 9 |
2021-12-09T18:07:25.000Z
|
2022-03-30T23:22:45.000Z
|
bux_recorder/utils.py
|
roaldarbol/bux
|
356817bbc7139c972d640c64fb8fcba27b70b3f7
|
[
"MIT"
] | null | null | null |
import os
import platform
import time
import csv
import serial
import cv2
import tkinter as tk
from tkinter.filedialog import askdirectory
from serial.tools import list_ports
# From https://raspberrypi.stackexchange.com/a/118473
def list_ports():
"""
Test the ports and returns a tuple with the available ports and the ones that are working.
"""
non_working_ports = []
dev_port = 0
working_ports = []
available_ports = []
while len(non_working_ports) < 6: # if there are more than 5 non working ports stop the testing.
camera = cv2.VideoCapture(dev_port)
if not camera.isOpened():
non_working_ports.append(dev_port)
# print("Port %s is not working." %dev_port)
else:
is_reading, img = camera.read()
w = camera.get(3)
h = camera.get(4)
if is_reading:
# print("Port %s is working and reads images (%s x %s)" %(dev_port,h,w))
working_ports.append(dev_port)
else:
# print("Port %s for camera ( %s x %s) is present but does not reads." %(dev_port,h,w))
available_ports.append(dev_port)
dev_port +=1
return available_ports,working_ports,non_working_ports
| 30.835616 | 103 | 0.631719 |
d5fff25cf4828ce6ee852dfb013719288c2e6acf
| 1,712 |
py
|
Python
|
a2e/optimizer/hpbandster/_model_worker.py
|
maechler/a2e
|
c28f546ca5fc3fdb9c740ea5f0f85d2aca044a00
|
[
"MIT"
] | 1 |
2021-03-19T09:09:41.000Z
|
2021-03-19T09:09:41.000Z
|
a2e/optimizer/hpbandster/_model_worker.py
|
maechler/a2e
|
c28f546ca5fc3fdb9c740ea5f0f85d2aca044a00
|
[
"MIT"
] | null | null | null |
a2e/optimizer/hpbandster/_model_worker.py
|
maechler/a2e
|
c28f546ca5fc3fdb9c740ea5f0f85d2aca044a00
|
[
"MIT"
] | null | null | null |
from hpbandster.core.worker import Worker
from a2e.model import AbstractModel
from a2e.optimizer import EvaluationResultAggregator
from a2e.utility import inf_nan_to_float_max
| 31.703704 | 138 | 0.657126 |
9101c6f835f7bccd8700b747bc71f0d2474bb905
| 1,245 |
py
|
Python
|
xagents/__init__.py
|
schissmantics/xagents
|
04f1b96f767903c62138b7d63986f16edfe5f240
|
[
"MIT"
] | 37 |
2021-08-05T16:31:54.000Z
|
2022-01-16T11:49:46.000Z
|
xagents/__init__.py
|
schissmantics/xagents
|
04f1b96f767903c62138b7d63986f16edfe5f240
|
[
"MIT"
] | 1 |
2022-01-08T17:22:53.000Z
|
2022-01-08T17:22:53.000Z
|
xagents/__init__.py
|
schissmantics/xagents
|
04f1b96f767903c62138b7d63986f16edfe5f240
|
[
"MIT"
] | 3 |
2021-08-13T06:25:22.000Z
|
2021-08-20T01:37:15.000Z
|
from xagents import a2c, acer, ddpg, dqn, ppo, td3, trpo
from xagents.a2c.agent import A2C
from xagents.acer.agent import ACER
from xagents.base import OffPolicy
from xagents.ddpg.agent import DDPG
from xagents.dqn.agent import DQN
from xagents.ppo.agent import PPO
from xagents.td3.agent import TD3
from xagents.trpo.agent import TRPO
from xagents.utils.cli import play_args, train_args, tune_args
from xagents.utils.common import register_models
__author__ = 'schissmantics'
__email__ = '[email protected]'
__license__ = 'MIT'
__version__ = '1.0.1'
agents = {
'a2c': {'module': a2c, 'agent': A2C},
'acer': {'module': acer, 'agent': ACER},
'dqn': {'module': dqn, 'agent': DQN},
'ppo': {'module': ppo, 'agent': PPO},
'td3': {'module': td3, 'agent': TD3},
'trpo': {'module': trpo, 'agent': TRPO},
'ddpg': {'module': ddpg, 'agent': DDPG},
}
register_models(agents)
commands = {
'train': (train_args, 'fit', 'Train given an agent and environment'),
'play': (
play_args,
'play',
'Play a game given a trained agent and environment',
),
'tune': (
tune_args,
'',
'Tune hyperparameters given an agent, hyperparameter specs, and environment',
),
}
| 30.365854 | 85 | 0.658635 |
910239e4d64bcd7a23fd58a2e98cbfc09b91c703
| 65 |
py
|
Python
|
IsraeliQueue/__init__.py
|
YonLiud/Israeli-Queue
|
53e14e68701c06efdd23ba6584a2e8a561e60cd9
|
[
"MIT"
] | 2 |
2021-06-20T23:47:58.000Z
|
2021-06-28T19:15:41.000Z
|
IsraeliQueue/__init__.py
|
YonLiud/Israeli-Queue
|
53e14e68701c06efdd23ba6584a2e8a561e60cd9
|
[
"MIT"
] | null | null | null |
IsraeliQueue/__init__.py
|
YonLiud/Israeli-Queue
|
53e14e68701c06efdd23ba6584a2e8a561e60cd9
|
[
"MIT"
] | null | null | null |
from .IsraeliQueue import IsraeliQueue, Item, IsraeliQueueByType
| 32.5 | 64 | 0.861538 |
91031df628ba0d6e12adf7ed9e0154be2d4256a3
| 3,794 |
py
|
Python
|
examples/MMPT/mmpt_cli/localjob.py
|
Este1le/fairseq
|
0fa073e0e0ddd90ff6850588e655c9566bb222ff
|
[
"MIT"
] | null | null | null |
examples/MMPT/mmpt_cli/localjob.py
|
Este1le/fairseq
|
0fa073e0e0ddd90ff6850588e655c9566bb222ff
|
[
"MIT"
] | null | null | null |
examples/MMPT/mmpt_cli/localjob.py
|
Este1le/fairseq
|
0fa073e0e0ddd90ff6850588e655c9566bb222ff
|
[
"MIT"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
from mmpt.utils import recursive_config
| 32.152542 | 120 | 0.524512 |
9103b4aa5d2e6a5156212d03a9f3245d1c26b5fe
| 1,154 |
py
|
Python
|
tron/Nubs/deprecated/tcc25m-old.py
|
sdss/tron
|
886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322
|
[
"BSD-3-Clause"
] | null | null | null |
tron/Nubs/deprecated/tcc25m-old.py
|
sdss/tron
|
886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322
|
[
"BSD-3-Clause"
] | null | null | null |
tron/Nubs/deprecated/tcc25m-old.py
|
sdss/tron
|
886c5c5fb6341ad85e4a9f5d6f5ecb6bbc0d8322
|
[
"BSD-3-Clause"
] | null | null | null |
import os.path
from tron import g, hub
from tron.Hub.Command.Encoders.ASCIICmdEncoder import ASCIICmdEncoder
from tron.Hub.Nub.TCCShellNub import TCCShellNub
from tron.Hub.Reply.Decoders.ASCIIReplyDecoder import ASCIIReplyDecoder
name = 'tcc'
| 25.644444 | 97 | 0.598787 |
91041f909c9548e67cfcf291bb438d3ff51c9d02
| 1,207 |
py
|
Python
|
src/PtDb/test.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | 1 |
2020-02-17T08:18:29.000Z
|
2020-02-17T08:18:29.000Z
|
src/PtDb/test.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
src/PtDb/test.py
|
ptphp/PyLib
|
07ac99cf2deb725475f5771b123b9ea1375f5e65
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding=utf-8 -*-
'''
Created on 2013-3-31
@author: Joseph
'''
import PtDb
if __name__ == '__main__':
PtDb.config = {
'sqlite':{
'type':'sqlite',
'dbname':"data1.db"
},
'default':{
'type':'mysql',
'host':'localhost',
'port':3306,
'dbname':'game110_dev',
'dbuser':'root',
'dbpass':'root',
'charset':'utf8',
},
'default1':{
'type':'mysql',
'host':'localhost',
'port':3306,
'dbname':'game110_dev',
'dbuser':'root',
'dbpass':'root',
'charset':'utf8',
},
}
PtDb.init('sqlite').open("test.db")
PtDb.init('sqlite').open("test1.db")
PtDb.init()
print PtDb.init().getAll("select * from orders")
print PtDb.init().getOne("select * from orders limit 1")
| 30.175 | 60 | 0.351284 |
9104cf33f9cb7c9a9e220cded851c4d2434c8d05
| 49,016 |
py
|
Python
|
services/object_storage/tests/integ/test_object_storage_bulk_operations.py
|
honzajavorek/oci-cli
|
6ea058afba323c6b3b70e98212ffaebb0d31985e
|
[
"Apache-2.0"
] | null | null | null |
services/object_storage/tests/integ/test_object_storage_bulk_operations.py
|
honzajavorek/oci-cli
|
6ea058afba323c6b3b70e98212ffaebb0d31985e
|
[
"Apache-2.0"
] | null | null | null |
services/object_storage/tests/integ/test_object_storage_bulk_operations.py
|
honzajavorek/oci-cli
|
6ea058afba323c6b3b70e98212ffaebb0d31985e
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2019, Oracle and/or its affiliates. All rights reserved.
import filecmp
import json
import pytest
import oci
import services.object_storage.src.oci_cli_object_storage as oci_cli_object_storage
import os
import random
import shutil
import six
import string
from tests import util
from tests import test_config_container
from mimetypes import guess_type
OBJECTS_TO_CREATE_IN_BUCKET_FOR_BULK_GET = 100
OBJECTS_TO_CREATE_IN_FOLDER_FOR_BULK_PUT = 20
CONTENT_STRING_LENGTH = 5000
MID_SIZED_FILE_IN_MEBIBTYES = 20
LARGE_CONTENT_FILE_SIZE_IN_MEBIBYTES = 150 # Default multipart is 128MiB
# Holds the objects we create and their content so that we can verify results
bulk_get_object_to_content = {}
bulk_get_prefix_to_object = {
'a/b/c/d': [],
'a/b/c': [],
'a/b': [],
'/a': [],
'': []
}
bulk_get_bucket_name = None
bulk_put_large_files = set()
bulk_put_mid_sized_files = set()
root_bulk_put_folder = None
bulk_put_bucket_name = None
# Generate test data for different operations:
#
# Bulk Get: create a new bucket and populate it with some objects, then tear it all down afterwards
# Bulk Put: create a folder structure containing small and large files, then tear it all down afterwards
# Bulk Delete: uses the folders and files generated for bulk put
# Since we've created a reasonable number of objects in this test suite, it's a good opportunity to test using the --all and --limit parameters
# Bulk puts objects, uses multipart where appropriate (when we breach the default of 128MiB)
# Bulk puts objects with --content-type as auto
# Tests that multipart params are applied:
#
# - Try to upload with a part size of 10MiB (this will force the large and mid-sized files to be multipart uploaded)
# - Try to upload with multipart disabled
def invoke(commands, debug=False, ** args):
if debug is True:
commands = ['--debug'] + commands
return util.invoke_command(commands, ** args)
def get_count_of_files_in_folder_and_subfolders(directory):
file_count = 0
for dir_name, subdir_list, file_list in os.walk(directory):
file_count = file_count + len(file_list)
return file_count
def generate_random_string(length):
if test_config_container.using_vcr_with_mock_responses():
return 'a' * length
else:
return ''.join(random.choice(string.ascii_lowercase) for i in range(length))
# Pull JSON data out of output which may have stuff other than JSON in it. Assumes that nothing
# comes after the JSON data
# For the bulk operations, object names are taken from the file path of the thing we uploaded. Normalize to
# / in the paths (Windows can go both ways) then chop the front bit off
| 50.118609 | 275 | 0.718643 |
9104fd2a412765ae4aa352d6517c087a930d10a7
| 2,304 |
py
|
Python
|
Extras/benchmark/simple-benchmark.py
|
yunhaom94/redis-writeanywhere
|
1fefed820811fb89585b2b153d916c3b0fa507a6
|
[
"BSD-3-Clause"
] | null | null | null |
Extras/benchmark/simple-benchmark.py
|
yunhaom94/redis-writeanywhere
|
1fefed820811fb89585b2b153d916c3b0fa507a6
|
[
"BSD-3-Clause"
] | null | null | null |
Extras/benchmark/simple-benchmark.py
|
yunhaom94/redis-writeanywhere
|
1fefed820811fb89585b2b153d916c3b0fa507a6
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python3
import random
import string
import time
import subprocess
import os
import redis
import threading
def generate_string(string_size, size, dict):
'''
https://stackoverflow.com/questions/16308989/fastest-method-to-generate-big-random-string-with-lower-latin-letters
'''
for i in range(size):
min_lc = ord(b'a')
len_lc = 26
key = bytearray(random.getrandbits(8*string_size).to_bytes(string_size, 'big'))
for i, b in enumerate(key):
key[i] = min_lc + b % len_lc # convert 0..255 to 97..122
key = key.decode()
val = key
dict[key] = val
if __name__ == "__main__":
size = 1000 # TODO: make is an command line argument
port = 7000
FNULL = open(os.devnull, 'w')
string_size = 100000
partition = int(size/4)
print("generating test sets")
d1 = {}
d2 = {}
d3 = {}
d4 = {}
t1 = threading.Thread(target=generate_string, args = (string_size, partition, d1))
t2 = threading.Thread(target=generate_string, args = (string_size, partition, d2))
t3 = threading.Thread(target=generate_string, args = (string_size, partition, d3))
t4 = threading.Thread(target=generate_string, args = (string_size, partition, d4))
t1.start()
t2.start()
t3.start()
t4.start()
t1.join()
t1.join()
t1.join()
t1.join()
test_set = {}
test_set.update(d1)
test_set.update(d2)
test_set.update(d3)
test_set.update(d4)
print(len(test_set))
print("running tests...")
r = redis.StrictRedis(host='localhost', port=port, db=0)
start = time.time()
print("testing set")
for k,v in test_set.items():
r.set(k, v)
r.wait(3, 0)
print("testing get")
for k,v in test_set.items():
r.get(k)
r.wait(3, 0)
end = time.time()
runtime = end - start
ops = size * 2
throughput = float(ops/runtime)
latency = float(1/throughput)
print("total run time: {runtime}s \n\
number of total operations with 50% Set and 50% Get: {ops} \n\
avg. throughput: {throughput} ops/s \n\
avg. latency: {latency} s".format(
runtime=runtime,
ops=ops,
throughput=throughput,
latency=latency
))
| 22.368932 | 118 | 0.598524 |
910584bb0f10ffc80b6bcbf199bcc87aa47ac74d
| 1,302 |
py
|
Python
|
challenges/015-setintersection.py
|
Widdershin/CodeEval
|
c1c769363763d6f7e1ac5bf3707de2731c3bd926
|
[
"MIT"
] | null | null | null |
challenges/015-setintersection.py
|
Widdershin/CodeEval
|
c1c769363763d6f7e1ac5bf3707de2731c3bd926
|
[
"MIT"
] | null | null | null |
challenges/015-setintersection.py
|
Widdershin/CodeEval
|
c1c769363763d6f7e1ac5bf3707de2731c3bd926
|
[
"MIT"
] | null | null | null |
"""
https://www.codeeval.com/browse/30/
Set Intersection
Challenge Description:
You are given two sorted list of numbers (ascending order). The lists
themselves are comma delimited and the two lists are semicolon
delimited. Print out the intersection of these two sets.
Input Sample:
File containing two lists of ascending order sorted integers, comma
delimited, one per line. E.g.
1,2,3,4;4,5,6
20,21,22;45,46,47
7,8,9;8,9,10,11,12
Output Sample:
Print out the ascending order sorted intersection of the two lists,
one per line. Print empty new line in case the lists have
no intersection. E.g.
4
8,9
"""
###### IO Boilerplate ######
import sys
if len(sys.argv) < 2:
input_file_name = "15-setintersection-in.txt"
else:
input_file_name = sys.argv[1]
with open(input_file_name) as input_file:
input_lines = map(lambda x: x.strip(), filter(lambda x: x != '', input_file.readlines()))
###### /IO Boilerplate ######
if __name__ == '__main__':
main()
| 20.666667 | 93 | 0.654378 |
9106509f9ec5a979f79cad4305026bbe9239af41
| 9,920 |
py
|
Python
|
python/arch/api/table/session.py
|
GentleWang1011/eggroll
|
417b029958e0e0ec6f0e1eb03d9ecdf4d5cff47c
|
[
"Apache-2.0"
] | 1 |
2020-10-23T03:18:54.000Z
|
2020-10-23T03:18:54.000Z
|
python/arch/api/table/session.py
|
GentleWang1011/eggroll
|
417b029958e0e0ec6f0e1eb03d9ecdf4d5cff47c
|
[
"Apache-2.0"
] | null | null | null |
python/arch/api/table/session.py
|
GentleWang1011/eggroll
|
417b029958e0e0ec6f0e1eb03d9ecdf4d5cff47c
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import datetime
import threading
from typing import Iterable
import six
from arch.api import WorkMode, Backend
from arch.api.table.table import Table
from eggroll.core.constants import StoreTypes
| 39.055118 | 113 | 0.541431 |
91085824641d29cf6a64bb1d7961d3c8c9b1d9df
| 10,481 |
py
|
Python
|
experiments/vitchyr/vaes/learn_swirl_vae.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
experiments/vitchyr/vaes/learn_swirl_vae.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
experiments/vitchyr/vaes/learn_swirl_vae.py
|
Asap7772/rail-rl-franka-eval
|
4bf99072376828193d05b53cf83c7e8f4efbd3ba
|
[
"MIT"
] | null | null | null |
"""
VAE on the swirl task.
Basically, VAEs don't work. It's probably because the prior isn't very good
and/or because the learning signal is pretty weak when both the encoder and
decoder change quickly. However, I tried also alternating between the two,
and that didn't seem to help.
"""
from torch.distributions import Normal
from torch.optim import Adam
import torch
import numpy as np
import matplotlib.pyplot as plt
from torch import nn as nn
import railrl.torch.pytorch_util as ptu
SWIRL_RATE = 1
T = 10
BS = 128
N_BATCHES = 2000
N_VIS = 1000
HIDDEN_SIZE = 32
VERBOSE = False
def kl_to_prior(means, log_stds, stds):
"""
KL between a Gaussian and a standard Gaussian.
https://stats.stackexchange.com/questions/60680/kl-divergence-between-two-multivariate-gaussians
"""
return 0.5 * (
- 2 * log_stds # log std_prior = 0
- 1 # d = 1
+ stds ** 2
+ means ** 2
)
def t_to_xy(t):
if len(t.shape) == 2:
t = t[:, 0]
x = t * np.cos(t * SWIRL_RATE) / T
y = t * np.sin(t * SWIRL_RATE) / T
return np.array([x, y]).T
def pretrain_encoder(encoder, opt):
losses = []
for _ in range(1000):
x_np, y_np = swirl_data(BS)
x = ptu.np_to_var(x_np)
y = ptu.np_to_var(y_np)
y_hat = encoder.encode(x)
loss = ((y_hat - y) ** 2).mean()
opt.zero_grad()
loss.backward()
opt.step()
losses.append(loss.data.numpy())
if VERBOSE:
x_np, y_np = swirl_data(N_VIS)
x = ptu.np_to_var(x_np)
y_hat = encoder.encode(x)
y_hat_np = y_hat.data.numpy()
x_hat_np = t_to_xy(y_hat_np[:, 0])
plt.subplot(2, 1, 1)
plt.plot(np.array(losses))
plt.title("Training Loss")
plt.subplot(2, 1, 2)
plt.plot(x_np[:, 0], x_np[:, 1], '.')
plt.plot(x_hat_np[:, 0], x_hat_np[:, 1], '.')
plt.title("Samples")
plt.legend(["Samples", "Estimates"])
plt.show()
if __name__ == '__main__':
train_alternating()
# train()
| 30.556851 | 100 | 0.621983 |
91087a71b49d992aa86f465838203ca33ae315a2
| 893 |
py
|
Python
|
litex/build/openfpgaloader.py
|
JosephBushagour/litex
|
2b49430f2c53c4a8caa66b678af4660127b546e4
|
[
"ADSL"
] | null | null | null |
litex/build/openfpgaloader.py
|
JosephBushagour/litex
|
2b49430f2c53c4a8caa66b678af4660127b546e4
|
[
"ADSL"
] | null | null | null |
litex/build/openfpgaloader.py
|
JosephBushagour/litex
|
2b49430f2c53c4a8caa66b678af4660127b546e4
|
[
"ADSL"
] | null | null | null |
#
# This file is part of LiteX.
#
# Copyright (c) 2020 Florent Kermarrec <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
from litex.build.tools import write_to_file
from litex.build.generic_programmer import GenericProgrammer
# openFPGAloader ------------------------------------------------------------------------------------------
| 31.892857 | 107 | 0.603583 |
91095212fe94005bb0badaf0b1144da0c2a0e7f0
| 300 |
py
|
Python
|
freehackquest_libclient_py/__init__.py
|
freehackquest/libfhqcli-py
|
382242943047b63861aad0f41bb89c82e755963c
|
[
"Apache-2.0"
] | null | null | null |
freehackquest_libclient_py/__init__.py
|
freehackquest/libfhqcli-py
|
382242943047b63861aad0f41bb89c82e755963c
|
[
"Apache-2.0"
] | null | null | null |
freehackquest_libclient_py/__init__.py
|
freehackquest/libfhqcli-py
|
382242943047b63861aad0f41bb89c82e755963c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 FreeHackQuest Team <[email protected]>
"""This file was automatically generated by fhq-server
Version: v0.2.47
Date: 2022-01-01 07:15:35
"""
from freehackquest_libclient_py.freehackquest_client import FreeHackQuestClient
| 33.333333 | 79 | 0.77 |
9109df56e39b2986de46c0b2bc4cedc05e614932
| 5,234 |
py
|
Python
|
exchange_sockets/bitstamp_websocket.py
|
SpiralDevelopment/crypto-hft-data
|
205f01fd555eab4f636ffbb701dfcde53d27becc
|
[
"MIT"
] | 31 |
2020-07-20T14:11:39.000Z
|
2022-03-17T03:18:33.000Z
|
exchange_sockets/bitstamp_websocket.py
|
SpiralDevelopment/crypto-hft-data
|
205f01fd555eab4f636ffbb701dfcde53d27becc
|
[
"MIT"
] | null | null | null |
exchange_sockets/bitstamp_websocket.py
|
SpiralDevelopment/crypto-hft-data
|
205f01fd555eab4f636ffbb701dfcde53d27becc
|
[
"MIT"
] | 11 |
2020-07-20T14:11:52.000Z
|
2022-03-14T04:20:19.000Z
|
from exchange_sockets.exchange_websocket import ExchangeWebSocket
from singletones.custom_logger import MyLogger
import websocket
import threading
from time import sleep
from time import time
import json
import ssl
logger = MyLogger()
| 35.364865 | 108 | 0.488154 |
910a76a4ae610e5e78371c5e387ad8044c415dcd
| 2,509 |
py
|
Python
|
src/data_loading.py
|
katerakelly/pytorch-maml
|
75907aca148ad053dfaf75fc138319f0d89534a8
|
[
"MIT"
] | 565 |
2017-08-29T02:02:30.000Z
|
2022-03-28T13:44:55.000Z
|
src/data_loading.py
|
lolinkun/pytorch-maml
|
75907aca148ad053dfaf75fc138319f0d89534a8
|
[
"MIT"
] | 20 |
2017-10-23T02:19:51.000Z
|
2021-06-02T07:17:28.000Z
|
src/data_loading.py
|
lolinkun/pytorch-maml
|
75907aca148ad053dfaf75fc138319f0d89534a8
|
[
"MIT"
] | 140 |
2017-09-09T09:18:15.000Z
|
2022-03-28T04:15:26.000Z
|
import numpy as np
import random
import torch
from torch.utils.data import DataLoader
from torch.utils.data.sampler import Sampler
import torchvision.transforms as transforms
from dataset import Omniglot, MNIST
'''
Helpers for loading class-balanced few-shot tasks
from datasets
'''
| 39.825397 | 121 | 0.697489 |
910af5706d2a9981705d65b7f790c5595e73aa3e
| 1,823 |
py
|
Python
|
DoChaP-db/UnusedScripts/main.py
|
Tal-Shay-Group/DoChaP
|
e721c6742fdff5f771bb947d92fa6cf66831939a
|
[
"MIT"
] | 2 |
2021-05-28T04:59:17.000Z
|
2021-09-03T13:25:40.000Z
|
DoChaP-db/UnusedScripts/main.py
|
Tal-Shay-Group/DoChaP
|
e721c6742fdff5f771bb947d92fa6cf66831939a
|
[
"MIT"
] | null | null | null |
DoChaP-db/UnusedScripts/main.py
|
Tal-Shay-Group/DoChaP
|
e721c6742fdff5f771bb947d92fa6cf66831939a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import sys
import os
sys.path.append(os.getcwd())
from Director import Director
from OrthologsBuilder import *
from SpeciesDB import *
if __name__ == "__main__":
inputDict = {}
for inarg in sys.argv[1:]:
try:
splitArg = inarg.strip("-").split("=")
if splitArg[0] in ("download", "withEns"):
inputDict[splitArg[0]] = splitArg[1]
else:
raise ValueError("Wrong input arguments. only accepts arguments 'download' and 'withEns'")
except AttributeError or IndexError:
raise ValueError("Make sure that input arguments are argumentName=argumentValue")
species = ['M_musculus', 'H_sapiens', 'R_norvegicus', 'D_rerio', 'X_tropicalis']
download = inputDict['download'] == 'True'
withEns = inputDict['withEns'] == 'True'
print("Running DBbuilder with Download {} and withENS {}".format(download, withEns))
print(type(download))
print(type(withEns))
director = Director()
orthologs = OrthologsBuilder(species=species, download=download)
director.setBuilder(orthologs)
director.collectFromSource(download=download)
spl = len(species)
spnum = 1
for sp in species:
print("===========Current Species: {}===========".format(sp))
dbBuild = dbBuilder(sp, download=download, withEns=withEns)
dbBuild.create_tables_db(merged=False)
dbBuild.fill_in_db(merged=False)
print("Filling {} completed!".format(dbBuild.dbName))
if spnum == 1:
dbBuild.create_tables_db(merged=True)
dbBuild.fill_in_db(merged=True)
if spnum == spl:
dbBuild.create_index()
dbBuild.AddOrthology(orthologs.OrthoTable)
spnum += 1
print("Filling {} completed!".format(dbBuild.dbName))
| 37.979167 | 106 | 0.638508 |
910b7a1f1887af22123d521897d29cfa202ce555
| 397 |
py
|
Python
|
tests/constants.py
|
eigenein/sqlitemap
|
25846178dee90cfe45a2bc951309301bc7f3694b
|
[
"MIT"
] | 4 |
2020-04-28T05:48:05.000Z
|
2020-10-05T06:56:13.000Z
|
tests/constants.py
|
eigenein/sqlitemap
|
25846178dee90cfe45a2bc951309301bc7f3694b
|
[
"MIT"
] | 1 |
2019-04-02T20:13:55.000Z
|
2019-04-02T20:45:26.000Z
|
tests/constants.py
|
eigenein/sqlitemap
|
25846178dee90cfe45a2bc951309301bc7f3694b
|
[
"MIT"
] | null | null | null |
# See also: https://stackoverflow.com/questions/3694276/what-are-valid-table-names-in-sqlite
good_table_names = [
'foo',
'123abc',
'123abc.txt',
'123abc-ABC.txt',
'foo""bar',
'',
'_sqlite',
]
# See also: https://stackoverflow.com/questions/3694276/what-are-valid-table-names-in-sqlite
bad_table_names = [
'"',
'"foo"',
'sqlite_',
'sqlite_reserved',
]
| 20.894737 | 92 | 0.622166 |
910ba6c11fb3b85edca95edcb1ac441727f03f60
| 16,258 |
py
|
Python
|
TWLight/settings/base.py
|
amire80/TWLight
|
063a385ea46c61a4889ba88e3fded4183c3a6bd3
|
[
"MIT"
] | null | null | null |
TWLight/settings/base.py
|
amire80/TWLight
|
063a385ea46c61a4889ba88e3fded4183c3a6bd3
|
[
"MIT"
] | 56 |
2021-07-03T12:34:47.000Z
|
2022-03-29T12:20:08.000Z
|
TWLight/settings/base.py
|
amire80/TWLight
|
063a385ea46c61a4889ba88e3fded4183c3a6bd3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Base settings for twlight project.
This is not intended to be used as the live settings file for a project and will
not work as one. You should instead use production.py, local.py, heroku.py, or
another file that you write. These files should live in the settings directory;
start with 'from .base import *'; and proceed to add or override settings as
appropriate to their context. In particular, you will need to set ALLOWED_HOSTS
before your app will run.
If you want to use production settings, you are now done. If not, you will also
need to set the environment variables indicated in the README.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import json
from django.contrib import messages
from django.urls import reverse_lazy
from django.utils.translation import gettext_lazy as _
# Import available locales from Faker, so we can determine what languages we fake in tests.
from faker.config import AVAILABLE_LOCALES as FAKER_AVAILABLE_LOCALES
# We're going to replace Django's default logging config.
import logging.config
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TWLIGHT_HOME = os.path.dirname(
os.path.dirname(os.path.abspath(os.path.join(os.path.abspath(__file__), os.pardir)))
)
TWLIGHT_ENV = os.environ.get("TWLIGHT_ENV")
# An atypical way of setting django languages for TranslateWiki integration:
# https://translatewiki.net/wiki/Thread:Support/_The_following_issue_is_unconfirmed,_still_to_be_investigated._Adding_TheWikipediaLibrary_Card_Platform_TranslateWiki
# Get the language codes from the locale directories, and compare them to the
# languages in Wikimedia CLDR. Use langauge autonyms from Wikimedia.
# We periodically pull:
# https://raw.githubusercontent.com/wikimedia/language-data/master/data/language-data.json
# into locale/language-data.json
# Get the intersection of available Faker locales and the specified language set.
# ------------------------------------------------------------------------------
# ------------------------> core django configurations <------------------------
# ------------------------------------------------------------------------------
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = [
"django.contrib.admin",
"django.contrib.admindocs",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"whitenoise.runserver_nostatic", # Not a django app; replaces staticfiles
"django.contrib.staticfiles",
"django.contrib.sites", # required by django.contrib.comments
]
THIRD_PARTY_APPS = [
"annoying",
"crispy_forms",
"reversion",
"dal",
"dal_select2",
"django_comments",
"django_cron",
"django_filters",
"modeltranslation",
# DO NOT CONFUSE THIS with requests, the Python URL library! This is
# django-request, the user analytics package.
"request",
"django_countries",
"rest_framework",
"rest_framework.authtoken",
"django_extensions",
]
TWLIGHT_APPS = [
"TWLight.i18n",
"TWLight.users",
"TWLight.resources",
"TWLight.applications",
"TWLight.emails",
"TWLight.graphs",
"TWLight.comments",
"TWLight.api",
"TWLight.ezproxy",
]
# dal (autocomplete_light) and modeltranslation must go before django.contrib.admin.
INSTALLED_APPS = THIRD_PARTY_APPS + DJANGO_APPS + TWLIGHT_APPS
# CRON CONFIGURATION
# ------------------------------------------------------------------------------
CRON_CLASSES = [
"TWLight.crons.BackupCronJob",
"TWLight.crons.SendCoordinatorRemindersCronJob",
"TWLight.crons.UserRenewalNoticeCronJob",
"TWLight.crons.ProxyWaitlistDisableCronJob",
"TWLight.crons.UserUpdateEligibilityCronJob",
"TWLight.crons.ClearSessions",
]
# REST FRAMEWORK CONFIG
# ------------------------------------------------------------------------------
REST_FRAMEWORK = {
"DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.NamespaceVersioning"
}
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
# WhiteNoise should be loaded before everything but security.
"whitenoise.middleware.WhiteNoiseMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
# LocaleMiddleware must go after Session (and Cache, if used), but before
# Common.
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.contrib.admindocs.middleware.XViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
# The default storage backend relies on sessions.
# Thats why SessionMiddleware must be enabled and appear before
# MessageMiddleware.
"django.contrib.messages.middleware.MessageMiddleware",
]
# DEBUG
# ------------------------------------------------------------------------------
# By setting this an an environment variable, it is easy to switch debug on in
# servers to do a quick test.
# DEBUG SHOULD BE FALSE ON PRODUCTION for security reasons.
DEBUG = bool(os.environ.get("DEBUG", "False").lower() == "true")
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# WMF sysadmins strongly prefer mysql, so use that.
# If you're deploying to Heroku, heroku.py will override this.
DATABASES = {
"default": {
"ENGINE": "django.db.backends.mysql",
"NAME": os.environ.get("DJANGO_DB_NAME", None),
"USER": os.environ.get("DJANGO_DB_USER", None),
"PASSWORD": os.environ.get("DJANGO_DB_PASSWORD", None),
"HOST": os.environ.get("DJANGO_DB_HOST", None),
"PORT": "3306",
# This is critical for handling Unicode data due to stupid properties
# of MySQL; see https://stackoverflow.com/questions/2108824/mysql-incorrect-string-value-error-when-save-unicode-string-in-django .
"OPTIONS": {
"charset": "utf8mb4",
"init_command": "SET sql_mode='STRICT_ALL_TABLES'; SET storage_engine='INNODB';",
},
}
}
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get("SECRET_KEY")
# In production, this list should contain the URL of the server and nothing
# else, for security reasons. For local testing '*' is OK.
ALLOWED_HOSTS = os.environ.get("ALLOWED_HOSTS", "localhost 127.0.0.1 [::1]").split(" ")
# Let Django know about external URLs in case they differ from internal
# Needed to be added for /admin
USE_X_FORWARDED_HOST = True
REQUEST_BASE_URL = os.environ.get("REQUEST_BASE_URL", None)
ROOT_URLCONF = "TWLight.urls"
WSGI_APPLICATION = "TWLight.wsgi.application"
SITE_ID = 1
# Overwrite messages.ERROR to use danger instead, to play nice with bootstrap
MESSAGE_TAGS = {messages.ERROR: "danger"}
# INTERNATIONALIZATION CONFIGURATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = "en" # Sets site default language.
# https://django-modeltranslation.readthedocs.io/en/latest/installation.html#advanced-settings
MODELTRANSLATION_DEFAULT_LANGUAGE = (
LANGUAGE_CODE # sets the modeltranslation default language.
)
LOCALE_PATHS = [
# makemessages looks for locale/ in the top level, not the project level.
os.path.join(os.path.dirname(BASE_DIR), "locale")
]
# We're letting the file-based translation contributions dictate the languages
# available to the system. This keeps our column and index count for db-stored
# translations as low as possible while allowing translatewiki contributions to
# be used without reconfiguring the site.
LANGUAGES = get_languages_from_locale_subdirectories(LOCALE_PATHS[0])
FAKER_LOCALES = get_django_faker_languages_intersection(LANGUAGES)
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(BASE_DIR, "templates")],
"OPTIONS": {
# Reiterating the default so we can add to it later.
"context_processors": (
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
),
# We cache templates by default.
"loaders": [
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
],
},
}
]
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, "collectedstatic")
STATIC_URL = "/static/"
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# MEDIA FILE CONFIGURATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/1.8/topics/files/
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media")
MEDIA_URL = "/media/"
# ------------------------------------------------------------------------------
# -----------------> third-party and TWLight configurations <-------------------
# ------------------------------------------------------------------------------
CRISPY_TEMPLATE_PACK = "bootstrap3"
# EZPROXY CONFIGURATION
# ------------------------------------------------------------------------------
TWLIGHT_EZPROXY_URL = os.environ.get("TWLIGHT_EZPROXY_URL", None)
TWLIGHT_EZPROXY_SECRET = os.environ.get("TWLIGHT_EZPROXY_SECRET", None)
# OAUTH CONFIGURATION
# ------------------------------------------------------------------------------
LOGIN_URL = reverse_lazy("oauth_login")
LOGIN_REDIRECT_URL = reverse_lazy("users:home")
AUTHENTICATION_BACKENDS = [
"TWLight.users.oauth.OAuthBackend",
"django.contrib.auth.backends.ModelBackend",
]
TWLIGHT_OAUTH_PROVIDER_URL = os.environ.get("TWLIGHT_OAUTH_PROVIDER_URL", None)
TWLIGHT_OAUTH_CONSUMER_KEY = os.environ.get("TWLIGHT_OAUTH_CONSUMER_KEY", None)
TWLIGHT_OAUTH_CONSUMER_SECRET = os.environ.get("TWLIGHT_OAUTH_CONSUMER_SECRET", None)
# API CONFIGURATION
# ------------------------------------------------------------------------------
TWLIGHT_API_PROVIDER_ENDPOINT = os.environ.get("TWLIGHT_API_PROVIDER_ENDPOINT", None)
# COMMENTS CONFIGURATION
# ------------------------------------------------------------------------------
COMMENTS_APP = "TWLight.comments"
# REVERSION CONFIGURATION
# ------------------------------------------------------------------------------
# See https://django-reversion.readthedocs.org/ .
# We are NOT using reversion middleware, because that creates revisions when
# save() is called in the context of some http requests, but not on all database
# saves. This makes it untestable. Instead we decorate the Application.save().
# DJMAIL CONFIGURATION
# ------------------------------------------------------------------------------
DJMAIL_REAL_BACKEND = os.environ.get(
"DJANGO_EMAIL_BACKEND", "django.core.mail.backends.console.EmailBackend"
)
EMAIL_BACKEND = "djmail.backends.async.EmailBackend"
EMAIL_HOST = os.environ.get("DJANGO_EMAIL_HOST", "localhost")
EMAIL_PORT = 25
EMAIL_HOST_USER = ""
EMAIL_HOST_PASSWORD = ""
EMAIL_USE_TLS = False
INSTALLED_APPS += ["djmail"]
# DJANGO_REQUEST CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE += ["request.middleware.RequestMiddleware"]
# The following are set for privacy purposes. Note that, if some amount of
# geographic tracking is desired, there is a REQUEST_ANONYMOUS_IP setting which
# scrubs the last octet of the IP address, which could be used instead of
# REQUEST_LOG_IP. There is not a way to get semi-granular user tracking (such
# as tracking only authenticated vs anonymous users).
REQUEST_LOG_IP = False
REQUEST_LOG_USER = False
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# We're replacing the default logging config to get better control of the
# mail_admins behavior.
LOGGING_CONFIG = None
logging.config.dictConfig(
{
"version": 1,
"disable_existing_loggers": False,
"filters": {
"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"},
"require_debug_true": {"()": "django.utils.log.RequireDebugTrue"},
},
"formatters": {
"django.server": {
"()": "django.utils.log.ServerFormatter",
"format": "[%(server_time)s] %(message)s",
}
},
"handlers": {
"nodebug_console": {
"level": "WARNING",
"filters": ["require_debug_false"],
"class": "logging.StreamHandler",
},
"debug_console": {
"level": "INFO",
"filters": ["require_debug_true"],
"class": "logging.StreamHandler",
},
"django.server": {
"level": "INFO",
"class": "logging.StreamHandler",
"formatter": "django.server",
},
},
"loggers": {
"django": {
"handlers": ["nodebug_console", "debug_console"],
"level": os.environ.get("DJANGO_LOG_LEVEL", "INFO"),
},
"django.server": {
"handlers": ["django.server"],
"level": os.environ.get("DJANGO_LOG_LEVEL", "INFO"),
"propagate": False,
},
"TWLight": {
"handlers": ["nodebug_console", "debug_console"],
"level": os.environ.get("DJANGO_LOG_LEVEL", "INFO"),
},
},
}
)
| 35.969027 | 165 | 0.60807 |
910c31b853b8a837a994aa06e68742ed3449818b
| 19,836 |
py
|
Python
|
modelator_py/util/tla/_optable.py
|
informalsystems/modelator-py
|
d66464096c022799e680e6201590a2ead69be32d
|
[
"Apache-2.0"
] | null | null | null |
modelator_py/util/tla/_optable.py
|
informalsystems/modelator-py
|
d66464096c022799e680e6201590a2ead69be32d
|
[
"Apache-2.0"
] | 3 |
2022-03-30T16:01:49.000Z
|
2022-03-31T13:40:03.000Z
|
modelator_py/util/tla/_optable.py
|
informalsystems/modelator-py
|
d66464096c022799e680e6201590a2ead69be32d
|
[
"Apache-2.0"
] | null | null | null |
"""Table of operators."""
# Copyright 2020 by California Institute of Technology
# Copyright (c) 2008-2013 INRIA and Microsoft Corporation
# All rights reserved. Licensed under 3-clause BSD.
#
# This module is based on the file:
#
# <https://github.com/tlaplus/tlapm/blob/main/src/optable.ml>
#
import pprint
from .ast import Nodes as nodes
# open Builtin
# type fixity =
# | Nonfix
# | Prefix | Postfix
# | Infix of assoc
# and assoc =
# | Left | Non | Right
# and dom =
# (* primitive operators *)
# | Logic | Sets | Modal
# (* user-definable operators *)
# | User
dom = {"Logic", "Sets", "Modal", "User"}
# type prec = int * int
# let withdef (name, prec, fix, als, defn) = (
# name, prec, fix, als, Some defn);;
def withdef(tuple_):
name, prec, fix, als, defn = tuple_
return (name, prec, fix, als, defn)
# let tlaops = [
# Logic,
# List.map withdef [
# '=>', ( 1, 1), Infix(Non()), [], Implies ;
# '<=>', ( 2, 2), Infix(Non()), [ '\\equiv' ], Equiv ;
# '/\\', ( 3, 3), Infix(Left()), [ '\\land' ], Conj ;
# '\\/', ( 3, 3), Infix(Left()), [ '\\lor' ], Disj ;
# '~', ( 4, 4), Prefix, [ '\\neg' ; '\\lnot' ], Neg ;
# '=', ( 5, 5), Infix(Non()), [], Eq ;
# '#', ( 5, 5), Infix(Non()), [ '/=' ], Neq ;
# ] ;
# Sets,
# List.map withdef [
# 'SUBSET', ( 8, 8), Prefix, [], SUBSET ;
# 'UNION', ( 8, 8), Prefix, [], UNION ;
# 'DOMAIN', ( 9, 9), Prefix, [], DOMAIN ;
# '\\subseteq', ( 5, 5), Infix(Non()), [], Subseteq ;
# '\\in', ( 5, 5), Infix(Non()), [], Mem ;
# '\\notin', ( 5, 5), Infix(Non()), [], Notmem ;
# '\\', ( 8, 8), Infix(Non()), [], Setminus ;
# '\\cap', ( 8, 8), Infix(Left()), [ '\\intersect' ], Cap ;
# '\\cup', ( 8, 8), Infix(Left()), [ '\\union' ], Cup ;
# ] ;
# Sets,
# [ '\\X', (10,13), Prefix, [ '\\times' ], None ] ;
# Modal,
# List.map withdef [
# ''', (15,15), Postfix, [], Prime ;
# '~>', ( 2, 2), Infix(Non()), [ '\\leadsto' ], Leadsto ;
# 'ENABLED', ( 4,15), Prefix, [], ENABLED ;
# 'UNCHANGED', ( 4,15), Prefix, [], UNCHANGED ;
# '\\cdot', ( 5,14), Infix(Left()), [], Cdot ;
# '-+->', ( 2, 2), Infix(Non()), [], Actplus ;
# '[]', ( 4,15), Prefix, [], Box true ;
# '<>', ( 4,15), Prefix, [], Diamond ;
# ] ;
# User,
# List.map (fun (name, prec, fix, als) -> (name, prec, fix, als, None)) [
# '^', (14,14), Infix(Non()), [] ;
# '/', (13,13), Infix(Non()), [] ;
# '*', (13,13), Infix(Left()), [] ;
# '-.', (12,12), Prefix, [ '-' ] ;
# '-', (11,11), Infix(Left()), [] ;
# '+', (10,10), Infix(Left()), [] ;
# '^+', (15,15), Postfix, [] ;
# '^*', (15,15), Postfix, [] ;
# '^#', (15,15), Postfix, [] ;
# '<', ( 5, 5), Infix(Non()), [] ;
# '=<', ( 5, 5), Infix(Non()), [ '<=' ; '\\leq' ] ;
# '>', ( 5, 5), Infix(Non()), [] ;
# '>=', ( 5, 5), Infix(Non()), [ '\\geq' ] ;
# '...', ( 9, 9), Infix(Non()), [] ;
# '..', ( 9, 9), Infix(Non()), [] ;
# '|', (10,11), Infix(Left()), [] ;
# '||', (10,11), Infix(Left()), [] ;
# '&&', (13,13), Infix(Left()), [] ;
# '&', (13,13), Infix(Left()), [] ;
# '$$', ( 9,13), Infix(Left()), [] ;
# '$', ( 9,13), Infix(Left()), [] ;
# '??', ( 9,13), Infix(Left()), [] ;
# '%%', (10,11), Infix(Left()), [] ;
# '%', (10,11), Infix(Non()), [ '\\mod' ] ;
# '##', ( 9,13), Infix(Left()), [] ;
# '++', (10,10), Infix(Left()), [] ;
# '--', (11,11), Infix(Left()), [] ;
# '**', (13,13), Infix(Left()), [] ;
# '//', (13,13), Infix(Non()), [] ;
# '^^', (14,14), Infix(Non()), [] ;
# '@@', ( 6, 6), Infix(Left()), [] ;
# '!!', ( 9,13), Infix(Non()), [] ;
# '|-', ( 5, 5), Infix(Non()), [] ;
# '|=', ( 5, 5), Infix(Non()), [] ;
# '-|', ( 5, 5), Infix(Non()), [] ;
# '=|', ( 5, 5), Infix(Non()), [] ;
# '<:', ( 7, 7), Infix(Non()), [] ;
# ':>', ( 7, 7), Infix(Non()), [] ;
# ':=', ( 5, 5), Infix(Non()), [] ;
# '::=', ( 5, 5), Infix(Non()), [] ;
# '(+)', (10,10), Infix(Left()), [ '\\oplus' ] ;
# '(-)', (11,11), Infix(Left()), [ '\\ominus' ] ;
# '(.)', (13,13), Infix(Left()), [ '\\odot' ] ;
# '(/)', (13,13), Infix(Non()), [ '\\oslash' ] ;
# '(\\X)', (13,13), Infix(Left()), [ '\\otimes' ] ;
# '\\uplus', ( 9,13), Infix(Left()), [] ;
# '\\sqcap', ( 9,13), Infix(Left()), [] ;
# '\\sqcup', ( 9,13), Infix(Left()), [] ;
# '\\div', (13,13), Infix(Non()), [] ;
# '\\wr', ( 9,14), Infix(Non()), [] ;
# '\\star', (13,13), Infix(Left()), [] ;
# '\\o', (13,13), Infix(Left()), [ '\\circ' ] ;
# '\\bigcirc', (13,13), Infix(Left()), [] ;
# '\\bullet', (13,13), Infix(Left()), [] ;
# '\\prec', ( 5, 5), Infix(Non()), [] ;
# '\\succ', ( 5, 5), Infix(Non()), [] ;
# '\\preceq', ( 5, 5), Infix(Non()), [] ;
# '\\succeq', ( 5, 5), Infix(Non()), [] ;
# '\\sim', ( 5, 5), Infix(Non()), [] ;
# '\\simeq', ( 5, 5), Infix(Non()), [] ;
# '\\ll', ( 5, 5), Infix(Non()), [] ;
# '\\gg', ( 5, 5), Infix(Non()), [] ;
# '\\asymp', ( 5, 5), Infix(Non()), [] ;
# '\\subset', ( 5, 5), Infix(Non()), [] ;
# '\\supset', ( 5, 5), Infix(Non()), [] ;
# '\\supseteq', ( 5, 5), Infix(Non()), [] ;
# '\\approx', ( 5, 5), Infix(Non()), [] ;
# '\\cong', ( 5, 5), Infix(Non()), [] ;
# '\\sqsubset', ( 5, 5), Infix(Non()), [] ;
# '\\sqsubseteq', ( 5, 5), Infix(Non()), [] ;
# '\\sqsupset', ( 5, 5), Infix(Non()), [] ;
# '\\sqsupseteq', ( 5, 5), Infix(Non()), [] ;
# '\\doteq', ( 5, 5), Infix(Non()), [] ;
# '\\propto', ( 5, 5), Infix(Non()), [] ;
# ] ;
# ]
# type tlaop = {
# name : string ;
# prec : prec ;
# fix : fixity ;
# dom : dom ;
# defn : Builtin.builtin option ;
# }
# let optable =
# let module H = Hashtbl in
# let tab = H.create 109 in
# List.iter begin
# fun (dom, ops) ->
# List.iter begin
# fun (name, prec, fix, als, defn) ->
# let op = { name = name ;
# prec = prec ;
# fix = fix ; dom = dom ;
# defn = defn }
# in
# H.add tab name op ;
# List.iter (fun s -> H.add tab s op) als
# end ops
# end tlaops ;
# tab
def _generate_optable():
tlaops = _generate_tlaops()
optable = dict()
for dom, ops in tlaops:
for name, prec, fixity, alternatives, defn in ops:
op = TLAOP(name, prec, fixity, dom, defn)
optable.setdefault(name, list())
optable[name].append(op)
for s in alternatives:
optable.setdefault(s, list())
optable[s].append(op)
return optable
optable = _generate_optable()
# pprint.pprint(optable)
# let nonfix name defn =
# { name = name ; prec = (-1, -1) ;
# fix = Nonfix ; dom = User ; defn = defn }
#
# let lookup name =
# if Hashtbl.mem optable name then
# Hashtbl.find optable name
# else
# nonfix name None
#
# (** Mapping from builtins to standard tlaops *)
# let standard_form b =
# match b with
# | TRUE -> nonfix 'TRUE' (Some TRUE)
# | FALSE -> nonfix 'FALSE' (Some FALSE)
# | Implies -> lookup '=>'
# | Equiv -> lookup '<=>'
# | Conj -> lookup '/\\'
# | Disj -> lookup '\\/'
# | Neg -> lookup '~'
# | Eq -> lookup '='
# | Neq -> lookup '#'
# | Divides ->
# {
# name = '?|';
# prec = (10, 11);
# fix = Infix(Non());
# dom = Logic;
# defn = Some Divides;
# }
#
# | STRING -> nonfix 'STRING' (Some STRING)
# | BOOLEAN -> nonfix 'BOOLEAN' (Some BOOLEAN)
# | SUBSET -> lookup 'SUBSET'
# | UNION -> lookup 'UNION'
# | DOMAIN -> lookup 'DOMAIN'
# | Subseteq -> lookup '\\subseteq'
# | Mem -> lookup '\\in'
# | Notmem -> lookup '\\notin'
# | Setminus -> lookup '\\'
# | Cap -> lookup '\\cap'
# | Cup -> lookup '\\cup'
#
# | Prime -> lookup '''
# | StrongPrime -> lookup '''
# | Leadsto -> lookup '~>'
# | ENABLED -> lookup 'ENABLED'
# | UNCHANGED -> lookup 'UNCHANGED'
# | Cdot -> lookup '\\cdot'
# | Actplus -> lookup '-+->'
# | Box _ -> lookup '[]'
# | Diamond -> lookup '<>'
#
# | Plus -> { (lookup '+') with defn = Some Plus }
# | Minus -> { (lookup '-') with defn = Some Minus }
# | Uminus -> { (lookup '-.') with defn = Some Uminus ; name = '-' }
# | Times -> { (lookup '*') with defn = Some Times }
# | Ratio -> { (lookup '/') with defn = Some Ratio }
# | Quotient -> { (lookup '\\div') with defn = Some Quotient }
# | Remainder -> { (lookup '%') with defn = Some Remainder }
# | Exp -> { (lookup '^') with defn = Some Exp }
# | Lteq -> { (lookup '=<') with defn = Some Lteq }
# | Lt -> { (lookup '<') with defn = Some Lt }
# | Gteq -> { (lookup '>=') with defn = Some Gteq }
# | Gt -> { (lookup '>') with defn = Some Gt }
# | Range -> { (lookup '..') with defn = Some Range }
# | Nat -> nonfix 'Nat' (Some Nat)
# | Int -> nonfix 'Int' (Some Int)
# | Real -> nonfix 'Real' (Some Real)
# | Infinity -> nonfix 'Infinity' (Some Infinity)
#
# | Seq -> nonfix 'Seq' (Some Seq)
# | Len -> nonfix 'Len' (Some Len)
# | BSeq -> nonfix 'BSeq' (Some BSeq)
# | Append -> nonfix 'Append' (Some Append)
# | Cat -> { (lookup '\\o') with defn = Some Cat }
# | Head -> nonfix 'Head' (Some Head)
# | Tail -> nonfix 'Tail' (Some Tail)
# | SubSeq -> nonfix 'SubSeq' (Some SubSeq)
# | SelectSeq -> nonfix 'SelectSeq' (Some SelectSeq)
#
# | OneArg -> { (lookup ':>') with defn = Some OneArg }
# | Extend -> { (lookup '@@') with defn = Some Extend }
# | Print -> nonfix 'Print' (Some Print)
# | PrintT -> nonfix 'PrintT' (Some PrintT)
# | Assert -> nonfix 'Assert' (Some Assert)
# | JavaTime -> nonfix 'JavaTime' (Some JavaTime)
# | TLCGet -> nonfix 'TLCGet' (Some TLCGet)
# | TLCSet -> nonfix 'TLCSet' (Some TLCSet)
# | Permutations -> nonfix 'Permutations' (Some Permutations)
# | SortSeq -> nonfix 'SortSeq' (Some SortSeq)
# | RandomElement -> nonfix 'RandomElement' (Some RandomElement)
# | Any -> nonfix 'Any' (Some Any)
# | ToString -> nonfix 'ToString' (Some ToString)
#
# | Unprimable -> nonfix 'Unprimable' None
# | Irregular -> nonfix 'Irregular' None
# ;;
| 41.497908 | 87 | 0.354658 |
910cebe2f9c8f06e688c3bb7c05c5907ea9954d5
| 40,599 |
py
|
Python
|
DIE/UI/FunctionViewEx.py
|
a1ext/DIE
|
1a3a19f016f44cf611847ce4f0d126b136040cb6
|
[
"MIT"
] | 5 |
2017-05-17T21:53:46.000Z
|
2019-07-12T20:05:20.000Z
|
DIE/UI/FunctionViewEx.py
|
a1ext/DIE
|
1a3a19f016f44cf611847ce4f0d126b136040cb6
|
[
"MIT"
] | null | null | null |
DIE/UI/FunctionViewEx.py
|
a1ext/DIE
|
1a3a19f016f44cf611847ce4f0d126b136040cb6
|
[
"MIT"
] | 1 |
2020-03-15T21:25:14.000Z
|
2020-03-15T21:25:14.000Z
|
import networkx as nx
from awesome.context import ignored
import sark
import idaapi
import idautils
import idc
from idaapi import PluginForm
from sark.qt import QtGui, QtCore, QtWidgets, form_to_widget, use_qt5
if use_qt5:
_QSortFilterProxyModel = QtCore.QSortFilterProxyModel
_MatchRecursive = QtCore.Qt.MatchRecursive
_MatchExactly = QtCore.Qt.MatchExactly
_PositionAtTop = QtWidgets.QAbstractItemView.PositionAtTop
else:
_QSortFilterProxyModel = QtGui.QSortFilterProxyModel
_MatchRecursive = QtCore.Qt.MatchFlag.MatchRecursive
_MatchExactly = QtCore.Qt.MatchFlag.MatchExactly
_PositionAtTop = QtWidgets.QAbstractItemView.ScrollHint.PositionAtTop
import DIE.UI.Die_Icons
import DIE.UI.ValueViewEx
import DIE.UI.ParserView
import DIE.UI.BPView
import DIE.Lib.IDAConnector
import DIE.Lib.DIEDb
import DIE.Lib.BpHandler
import sark.ui
###############################################################################################
# View Delegates.
# Singelton
function_view = None
| 40.885196 | 176 | 0.610828 |
910e142fb045682f0db143a5a746598a72de10d6
| 1,103 |
py
|
Python
|
peerbot/PeerBot.py
|
danerprog/PeerHostedDiscordBot
|
310467d8f123826a20ed92174666beb46fe35d02
|
[
"Apache-2.0"
] | null | null | null |
peerbot/PeerBot.py
|
danerprog/PeerHostedDiscordBot
|
310467d8f123826a20ed92174666beb46fe35d02
|
[
"Apache-2.0"
] | null | null | null |
peerbot/PeerBot.py
|
danerprog/PeerHostedDiscordBot
|
310467d8f123826a20ed92174666beb46fe35d02
|
[
"Apache-2.0"
] | null | null | null |
from peerbot.PeerBotStateMachine import PeerBotStateMachine
from utils.Logger import Logger
import discord
| 34.46875 | 91 | 0.637353 |
910ef698981e05b36f0d36710e6f745c6eeb2055
| 3,017 |
py
|
Python
|
dags/oss_know/oss_know_dags/dags_github/dag_github_init_issues_timeline.py
|
ynang/airflow-jobs-1
|
857e9dfbc2444dc1d23dd2b0463fe89108f01b89
|
[
"Apache-2.0"
] | 4 |
2022-01-24T11:27:39.000Z
|
2022-02-25T11:44:10.000Z
|
dags/oss_know/oss_know_dags/dags_github/dag_github_init_issues_timeline.py
|
linruoma/airflow-jobs-1
|
25d3996c6d11ee2d8f93ff2b73abcf83da986804
|
[
"Apache-2.0"
] | 13 |
2021-12-17T12:14:11.000Z
|
2022-02-24T07:01:08.000Z
|
dags/oss_know/oss_know_dags/dags_github/dag_github_init_issues_timeline.py
|
linruoma/airflow-jobs-1
|
25d3996c6d11ee2d8f93ff2b73abcf83da986804
|
[
"Apache-2.0"
] | 13 |
2022-01-19T09:30:03.000Z
|
2022-03-02T10:45:07.000Z
|
from datetime import datetime
from airflow import DAG
from airflow.operators.python import PythonOperator
# v0.0.1
from oss_know.libs.base_dict.variable_key import NEED_INIT_GITHUB_ISSUES_TIMELINE_REPOS, GITHUB_TOKENS, \
OPENSEARCH_CONN_DATA, PROXY_CONFS
from oss_know.libs.util.proxy import KuaiProxyService, ProxyManager, GithubTokenProxyAccommodator
from oss_know.libs.util.token import TokenManager
with DAG(
dag_id='github_init_issues_timeline_v1',
schedule_interval=None,
start_date=datetime(2000, 1, 1),
catchup=False,
tags=['github'],
) as dag:
op_scheduler_init_github_issues_timeline = PythonOperator(
task_id='op_scheduler_init_github_issues_timeline',
python_callable=scheduler_init_github_issues_timeline
)
need_do_init_ops = []
from airflow.models import Variable
need_init_github_issues_timeline_repos = Variable.get(NEED_INIT_GITHUB_ISSUES_TIMELINE_REPOS,
deserialize_json=True)
for need_init_github_issues_timeline_repo in need_init_github_issues_timeline_repos:
op_do_init_github_issues_timeline = PythonOperator(
task_id='op_do_init_github_issues_timeline_{owner}_{repo}'.format(
owner=need_init_github_issues_timeline_repo["owner"],
repo=need_init_github_issues_timeline_repo["repo"]),
python_callable=do_init_github_issues_timeline,
op_kwargs={'params': need_init_github_issues_timeline_repo},
)
op_scheduler_init_github_issues_timeline >> op_do_init_github_issues_timeline
| 40.226667 | 111 | 0.705668 |
9111781e785cdbf0e4af0d7fe8d43c637a7447e2
| 126 |
py
|
Python
|
conans/conan.py
|
laundry-96/conan
|
fd938f7220ca042d94c42ec5eb607ee69c6785a3
|
[
"MIT"
] | 2 |
2019-01-09T10:01:29.000Z
|
2019-01-09T10:01:31.000Z
|
conans/conan.py
|
laundry-96/conan
|
fd938f7220ca042d94c42ec5eb607ee69c6785a3
|
[
"MIT"
] | 6 |
2016-03-08T22:06:45.000Z
|
2020-06-02T15:22:19.000Z
|
conans/conan.py
|
laundry-96/conan
|
fd938f7220ca042d94c42ec5eb607ee69c6785a3
|
[
"MIT"
] | 2 |
2019-08-07T18:15:16.000Z
|
2021-08-04T12:33:05.000Z
|
import sys
from conans.client.command import main
if __name__ == '__main__':
run()
| 10.5 | 38 | 0.642857 |
9111af8dea9204ecc79252d0615a08b9fa56ab3b
| 4,998 |
py
|
Python
|
tests/apps/persons/test_cms_plugins_person.py
|
lunika/richie
|
b0b04d0ffc0b16f2f1b8a8201418b8f86941e45f
|
[
"MIT"
] | null | null | null |
tests/apps/persons/test_cms_plugins_person.py
|
lunika/richie
|
b0b04d0ffc0b16f2f1b8a8201418b8f86941e45f
|
[
"MIT"
] | null | null | null |
tests/apps/persons/test_cms_plugins_person.py
|
lunika/richie
|
b0b04d0ffc0b16f2f1b8a8201418b8f86941e45f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Unit tests for the Person plugin and its model
"""
from django import forms
from django.conf import settings
from django.test import TestCase
from cms.api import add_plugin, create_page
from cmsplugin_plain_text.cms_plugins import PlaintextPlugin
from djangocms_picture.cms_plugins import PicturePlugin
from richie.apps.core.factories import FilerImageFactory, UserFactory
from richie.apps.core.helpers import create_i18n_page
from richie.apps.persons.cms_plugins import PersonPlugin
from richie.apps.persons.factories import PersonFactory
from richie.apps.persons.models import PersonPluginModel
| 36.75 | 91 | 0.62585 |
91124d593f9dcda3e366e95378c8d482f7f013ee
| 9,295 |
py
|
Python
|
mathics/core/subexpression.py
|
Mathics3/mathics-core
|
54dc3c00a42cd893c6430054e125291b6eb55ead
|
[
"Apache-2.0"
] | 90 |
2021-09-11T14:14:00.000Z
|
2022-03-29T02:08:29.000Z
|
mathics/core/subexpression.py
|
Mathics3/mathics-core
|
54dc3c00a42cd893c6430054e125291b6eb55ead
|
[
"Apache-2.0"
] | 187 |
2021-09-13T01:00:41.000Z
|
2022-03-31T11:52:52.000Z
|
mathics/core/subexpression.py
|
Mathics3/mathics-core
|
54dc3c00a42cd893c6430054e125291b6eb55ead
|
[
"Apache-2.0"
] | 10 |
2021-10-05T15:44:26.000Z
|
2022-03-21T12:34:33.000Z
|
# cython: language_level=3
# -*- coding: utf-8 -*-
from mathics.core.expression import Expression
from mathics.core.symbols import Atom, Symbol
from mathics.core.atoms import Integer
from mathics.builtin.base import MessageException
"""
This module provides some infrastructure to deal with SubExpressions.
"""
def _pspec_span_to_tuple(pspec, expr):
"""
This function takes an expression and a Mathics
`Span` Expression and returns a tuple with the positions
of the leaves.
"""
start = 1
stop = None
step = 1
leaves = pspec.leaves
if len(leaves) > 3:
raise MessageException("Part", "span", leaves)
if len(leaves) > 0:
start = leaves[0].get_int_value()
if len(leaves) > 1:
stop = leaves[1].get_int_value()
if stop is None:
if leaves[1].get_name() == "System`All":
stop = None
else:
raise MessageException("Part", "span", pspec)
else:
stop = stop - 1 if stop > 0 else len(expr.leaves) + stop
if len(pspec.leaves) > 2:
step = leaves[2].get_int_value()
if start is None or step is None:
raise MessageException("Part", "span", pspec)
if start == 0 or stop == 0:
# index 0 is undefined
raise MessageException("Part", "span", Integer(0))
if start < 0:
start = len(expr.leaves) - start
else:
start = start - 1
if stop is None:
stop = 0 if step < 0 else len(expr.leaves) - 1
stop = stop + 1 if step > 0 else stop - 1
return tuple(k for k in range(start, stop, step))
def replace(self, new):
"""
This method replaces the value pointed out by a `new` value.
"""
# First, look for the ancestor that is not an ExpressionPointer,
# keeping the positions of each step:
parent = self.parent
pos = [self.position]
while type(parent) is ExpressionPointer:
position = parent.position
if position is None:
parent = parent.parent
continue
pos.append(parent.position)
parent = parent.parent
# At this point, we hit the expression, and we have
# the path to reach the position
i = pos.pop()
try:
while pos:
if i == 0:
parent = parent._head
else:
parent = parent.elements[i - 1]
i = pos.pop()
except Exception:
raise MessageException("Part", "span", pos)
# Now, we have a pointer to an element in a true `Expression`.
# Now, set it to the new value.
if i == 0:
parent.set_head(new)
else:
parent.set_element(i - 1, new)
class SubExpression(object):
"""
This class represents a Subexpression of an existing Expression.
Assignment to a subexpression results in the change of the original Expression.
"""
def __new__(cls, expr, pos=None):
"""
`expr` can be an `Expression`, a `ExpressionPointer` or
another `SubExpression`
`pos` can be `None`, an integer value or an `Expression` that
indicates a subset of leaves in the original `Expression`.
If `pos` points out to a single whole leaf of `expr`, then
returns an `ExpressionPointer`.
"""
# If pos is a list, take the first element, and
# store the remainder.
if type(pos) in (tuple, list):
pos, rem_pos = pos[0], pos[1:]
if len(rem_pos) == 0:
rem_pos = None
else:
rem_pos = None
# Trivial conversion: if pos is an `Integer`, convert
# to a Python native int
if type(pos) is Integer:
pos = pos.get_int_value()
# pos == `System`All`
elif isinstance(pos, Symbol) and pos.get_name() == "System`All":
pos = None
elif type(pos) is Expression:
if pos.has_form("System`List", None):
tuple_pos = [i.get_int_value() for i in pos.leaves]
if any([i is None for i in tuple_pos]):
raise MessageException("Part", "pspec", pos)
pos = tuple_pos
elif pos.has_form("System`Span", None):
pos = _pspec_span_to_tuple(pos, expr)
else:
raise MessageException("Part", "pspec", pos)
if pos is None or type(pos) is int:
if rem_pos is None:
return ExpressionPointer(expr, pos)
else:
return SubExpression(ExpressionPointer(expr, pos), rem_pos)
elif type(pos) is tuple:
self = super(SubExpression, cls).__new__(cls)
self._headp = ExpressionPointer(expr.head, 0)
self._elementsp = [
SubExpression(ExpressionPointer(expr, k + 1), rem_pos) for k in pos
]
return self
def get_head_name(self):
return self._headp.parent.get_head_name()
def to_expression(self):
return Expression(
self._headp.to_expression(),
*(leaf.to_expression() for leaf in self._elementsp)
)
def replace(self, new):
"""
Asigns `new` to the subexpression, according to the logic of `mathics.core.walk_parts`
"""
if (new.has_form("List", None) or new.get_head_name() == "System`List") and len(
new.leaves
) == len(self._elementsp):
for leaf, sub_new in zip(self._elementsp, new.leaves):
leaf.replace(sub_new)
else:
for leaf in self._elementsp:
leaf.replace(new)
| 30.276873 | 94 | 0.563636 |
911286f14d1a282acaf40a71af59fdf1cfb8d6e8
| 901 |
py
|
Python
|
pyopenproject/business/services/command/configuration/find.py
|
webu/pyopenproject
|
40b2cb9fe0fa3f89bc0fe2a3be323422d9ecf966
|
[
"MIT"
] | 5 |
2021-02-25T15:54:28.000Z
|
2021-04-22T15:43:36.000Z
|
pyopenproject/business/services/command/configuration/find.py
|
webu/pyopenproject
|
40b2cb9fe0fa3f89bc0fe2a3be323422d9ecf966
|
[
"MIT"
] | 7 |
2021-03-15T16:26:23.000Z
|
2022-03-16T13:45:18.000Z
|
pyopenproject/business/services/command/configuration/find.py
|
webu/pyopenproject
|
40b2cb9fe0fa3f89bc0fe2a3be323422d9ecf966
|
[
"MIT"
] | 6 |
2021-06-18T18:59:11.000Z
|
2022-03-27T04:58:52.000Z
|
from pyopenproject.api_connection.exceptions.request_exception import RequestError
from pyopenproject.api_connection.requests.get_request import GetRequest
from pyopenproject.business.exception.business_error import BusinessError
from pyopenproject.business.services.command.configuration.configuration_command import ConfigurationCommand
from pyopenproject.model.configuration import Configuration
| 39.173913 | 108 | 0.760266 |
9112d9a09ef3e419ea9c838421fb6d27323a5f4c
| 1,960 |
py
|
Python
|
lib/python/treadmill/tests/api/cell_test.py
|
vrautela/treadmill
|
05e47fa8acdf8bad7af78e737efb26ea6488de82
|
[
"Apache-2.0"
] | 1 |
2019-04-14T20:17:07.000Z
|
2019-04-14T20:17:07.000Z
|
lib/python/treadmill/tests/api/cell_test.py
|
vrautela/treadmill
|
05e47fa8acdf8bad7af78e737efb26ea6488de82
|
[
"Apache-2.0"
] | 1 |
2017-09-18T10:36:12.000Z
|
2017-09-18T10:36:12.000Z
|
lib/python/treadmill/tests/api/cell_test.py
|
evreng/treadmill
|
05e47fa8acdf8bad7af78e737efb26ea6488de82
|
[
"Apache-2.0"
] | null | null | null |
"""Cell API tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import mock
from treadmill import admin
from treadmill.api import cell
if __name__ == '__main__':
unittest.main()
| 32.131148 | 72 | 0.628061 |
91150271775e1bcf188908a5352023d285ea5e40
| 363 |
py
|
Python
|
src/python_package/__init__.py
|
microsoft/ai-python-package
|
770f5167ebc32b5410739f04c5730e68f84785c9
|
[
"MIT"
] | 3 |
2021-12-11T17:02:56.000Z
|
2022-02-23T19:45:35.000Z
|
src/python_package/__init__.py
|
microsoft/ai-python-package
|
770f5167ebc32b5410739f04c5730e68f84785c9
|
[
"MIT"
] | 5 |
2022-03-24T13:21:21.000Z
|
2022-03-31T13:21:39.000Z
|
src/python_package/__init__.py
|
microsoft/python-package-template
|
770f5167ebc32b5410739f04c5730e68f84785c9
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in project root for information.
# -------------------------------------------------------------
"""Python Package Template"""
from __future__ import annotations
__version__ = "0.0.2"
| 40.333333 | 80 | 0.484848 |
9116473055c5bd072ad59a444dc826781f8a2c35
| 2,387 |
py
|
Python
|
tests/test_integration_partition.py
|
themoodymann/piChain
|
4de9e8da3994901371713b68bc05295fe6676571
|
[
"MIT"
] | 8 |
2018-02-22T08:52:26.000Z
|
2022-02-01T01:28:29.000Z
|
tests/test_integration_partition.py
|
themoodymann/piChain
|
4de9e8da3994901371713b68bc05295fe6676571
|
[
"MIT"
] | 3 |
2018-03-07T18:13:53.000Z
|
2019-12-03T23:42:42.000Z
|
tests/test_integration_partition.py
|
florianmorath/piChain
|
df498021cb7c2df26a7980fb85b795f4a0105faf
|
[
"MIT"
] | 7 |
2018-02-26T12:28:34.000Z
|
2021-01-01T11:33:59.000Z
|
"""Integration test: Test partition of piChain nodes.
Note: run tests with default setting values in config.py.
"""
import time
from tests.util import MultiNodeTest
| 38.5 | 70 | 0.735233 |
9116cf95f3505891c20808a9297cb4047c9dcb7a
| 776 |
py
|
Python
|
sandbox/pdp2/arbitrary_data/zip_files.py
|
projectpai/paipass
|
8b8e70b6808bf026cf957e240c7eed7bfcf4c55d
|
[
"MIT"
] | 3 |
2021-04-17T10:20:26.000Z
|
2022-03-08T07:36:13.000Z
|
sandbox/pdp2/arbitrary_data/zip_files.py
|
projectpai/paipass
|
8b8e70b6808bf026cf957e240c7eed7bfcf4c55d
|
[
"MIT"
] | null | null | null |
sandbox/pdp2/arbitrary_data/zip_files.py
|
projectpai/paipass
|
8b8e70b6808bf026cf957e240c7eed7bfcf4c55d
|
[
"MIT"
] | null | null | null |
import zipfile
import random
RAND_INT_RANGE = (1,100)
fnames = []
for i in range(10):
fname = 'file' + str(i) + '.txt'
wrf(fname)
fnames.append(fname)
dirpaths = set()
with zipfile.ZipFile('myzip.zip', 'w', compression=zipfile.ZIP_DEFLATED) as zf:
for fname in fnames:
dirpath = '/dirpath'+str(random.randint(*RAND_INT_RANGE))
# let's not have duplicate dirpaths.
while dirpath in dirpaths:
dirpath = '/dirpath' + str(random.randint(*RAND_INT_RANGE))
zf.write(fname, arcname=dirpath+'/'+fname)
dirpaths.add(dirpath)
print('dirpaths', dirpaths)
print('fnames', fnames)
| 26.758621 | 79 | 0.636598 |
911750f22693957597b2ca1cf0ab39d191230dfc
| 1,497 |
py
|
Python
|
tests/testproject/testproject/tests/test_middleware.py
|
mwesterhof/wagtail_managed404
|
a961271c7fc70accb43ec329da9defe36e3dab3c
|
[
"MIT"
] | 1 |
2021-03-11T10:06:04.000Z
|
2021-03-11T10:06:04.000Z
|
tests/testproject/testproject/tests/test_middleware.py
|
mwesterhof/wagtail_managed404
|
a961271c7fc70accb43ec329da9defe36e3dab3c
|
[
"MIT"
] | null | null | null |
tests/testproject/testproject/tests/test_middleware.py
|
mwesterhof/wagtail_managed404
|
a961271c7fc70accb43ec329da9defe36e3dab3c
|
[
"MIT"
] | null | null | null |
import unittest
from django.test import Client
from wagtail.core.models import Page
from wagtail_managed404.models import PageNotFoundEntry
| 34.022727 | 76 | 0.706079 |
91179c358fa25c39a2ad4c7490688913f5e6e658
| 24 |
py
|
Python
|
src/reversion/version.py
|
maraujop/django-reversion
|
c9e7788ca858d3c75b617a7277ffcd177a19d414
|
[
"BSD-3-Clause"
] | null | null | null |
src/reversion/version.py
|
maraujop/django-reversion
|
c9e7788ca858d3c75b617a7277ffcd177a19d414
|
[
"BSD-3-Clause"
] | null | null | null |
src/reversion/version.py
|
maraujop/django-reversion
|
c9e7788ca858d3c75b617a7277ffcd177a19d414
|
[
"BSD-3-Clause"
] | null | null | null |
__version__ = (1, 8, 5)
| 12 | 23 | 0.583333 |
911891456d9e7cb41632224dd81128e9e0fa9e6b
| 2,776 |
py
|
Python
|
observations/r/bomsoi.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 199 |
2017-07-24T01:34:27.000Z
|
2022-01-29T00:50:55.000Z
|
observations/r/bomsoi.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 46 |
2017-09-05T19:27:20.000Z
|
2019-01-07T09:47:26.000Z
|
observations/r/bomsoi.py
|
hajime9652/observations
|
2c8b1ac31025938cb17762e540f2f592e302d5de
|
[
"Apache-2.0"
] | 45 |
2017-07-26T00:10:44.000Z
|
2022-03-16T20:44:59.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import numpy as np
import os
import sys
from observations.util import maybe_download_and_extract
def bomsoi(path):
"""Southern Oscillation Index Data
The Southern Oscillation Index (SOI) is the difference in barometric
pressure at sea level between Tahiti and Darwin. Annual SOI and
Australian rainfall data, for the years 1900-2001, are given.
Australia's annual mean rainfall is an area-weighted average of the
total annual precipitation at approximately 370 rainfall stations around
the country.
This data frame contains the following columns:
Year
a numeric vector
Jan
average January SOI values for each year
Feb
average February SOI values for each year
Mar
average March SOI values for each year
Apr
average April SOI values for each year
May
average May SOI values for each year
Jun
average June SOI values for each year
Jul
average July SOI values for each year
Aug
average August SOI values for each year
Sep
average September SOI values for each year
Oct
average October SOI values for each year
Nov
average November SOI values for each year
Dec
average December SOI values for each year
SOI
a numeric vector consisting of average annual SOI values
avrain
a numeric vector consisting of a weighted average annual rainfall at
a large number of Australian sites
NTrain
Northern Territory rain
northRain
north rain
seRain
southeast rain
eastRain
east rain
southRain
south rain
swRain
southwest rain
Australian Bureau of Meteorology web pages:
http://www.bom.gov.au/climate/change/rain02.txt and
http://www.bom.gov.au/climate/current/soihtm1.shtml
Args:
path: str.
Path to directory which either stores file or otherwise file will
be downloaded and extracted there.
Filename is `bomsoi.csv`.
Returns:
Tuple of np.ndarray `x_train` with 106 rows and 21 columns and
dictionary `metadata` of column headers (feature names).
"""
import pandas as pd
path = os.path.expanduser(path)
filename = 'bomsoi.csv'
if not os.path.exists(os.path.join(path, filename)):
url = 'http://dustintran.com/data/r/DAAG/bomsoi.csv'
maybe_download_and_extract(path, url,
save_file_name='bomsoi.csv',
resume=False)
data = pd.read_csv(os.path.join(path, filename), index_col=0,
parse_dates=True)
x_train = data.values
metadata = {'columns': data.columns}
return x_train, metadata
| 22.942149 | 74 | 0.699207 |
9118ae0e8ce4a6964c33407d1f9bb269a5f81229
| 948 |
py
|
Python
|
openpype/hosts/houdini/plugins/publish/validate_bypass.py
|
dangerstudios/OpenPype
|
10ddcc4699137888616eec57cd7fac9648189714
|
[
"MIT"
] | null | null | null |
openpype/hosts/houdini/plugins/publish/validate_bypass.py
|
dangerstudios/OpenPype
|
10ddcc4699137888616eec57cd7fac9648189714
|
[
"MIT"
] | null | null | null |
openpype/hosts/houdini/plugins/publish/validate_bypass.py
|
dangerstudios/OpenPype
|
10ddcc4699137888616eec57cd7fac9648189714
|
[
"MIT"
] | null | null | null |
import pyblish.api
import openpype.api
| 27.085714 | 78 | 0.632911 |
9119b7e105152a68ddb6c7704cd3d58179e633e6
| 4,687 |
py
|
Python
|
gavPrj/dataset_core.py
|
GavinK-ai/cv
|
6dd11b2100c40aca281508c3821c807ef0ee227d
|
[
"MIT"
] | 1 |
2021-11-15T06:16:44.000Z
|
2021-11-15T06:16:44.000Z
|
gavPrj/dataset_core.py
|
JKai96/cv
|
6dd11b2100c40aca281508c3821c807ef0ee227d
|
[
"MIT"
] | null | null | null |
gavPrj/dataset_core.py
|
JKai96/cv
|
6dd11b2100c40aca281508c3821c807ef0ee227d
|
[
"MIT"
] | null | null | null |
import os
import cv2 as cv
import matplotlib.pyplot as plt
import numpy as np
#srcPaths = ('dataset/Screenshot1','dataset/Screenshot2','dataset/Screenshot3', 'dataset/Screenshot4')
#srcPaths = ('all_dataset/s1',
# 'all_dataset/s10',
# 'all_dataset/s11',
# 'all_dataset/s12',
# 'all_dataset/s13',
# 'all_dataset/s14',
# 'all_dataset/s15',
# 'all_dataset/s16',
# 'all_dataset/s17',
# 'all_dataset/s18',
# 'all_dataset/s19',
# 'all_dataset/s2',
# 'all_dataset/s20',
# 'all_dataset/s21',
# 'all_dataset/s22',
# 'all_dataset/s23',
# 'all_dataset/s24',
# 'all_dataset/s25',
# 'all_dataset/s26',
# 'all_dataset/s27',
# 'all_dataset/s28',
# 'all_dataset/s29',
# 'all_dataset/s3',
# 'all_dataset/s30',
# 'all_dataset/s31',
# 'all_dataset/s32',
# 'all_dataset/s33',
# 'all_dataset/s34',
# 'all_dataset/s35',
# 'all_dataset/s36',
# 'all_dataset/s37',
# 'all_dataset/s38',
# 'all_dataset/s39',
# 'all_dataset/s4',
# 'all_dataset/s40',
# 'all_dataset/s41',
# 'all_dataset/s42',
# 'all_dataset/s43',
# 'all_dataset/s44',
# 'all_dataset/s45',
# 'all_dataset/s46',
# 'all_dataset/s47',
# 'all_dataset/s48',
# 'all_dataset/s49',
# 'all_dataset/s5',
# 'all_dataset/s50',
# 'all_dataset/s51',
# 'all_dataset/s52',
# 'all_dataset/s53',
# 'all_dataset/s54',
# 'all_dataset/s55',
# 'all_dataset/s56',
# 'all_dataset/s57',
# 'all_dataset/s58',
# 'all_dataset/s59',
# 'all_dataset/s6',
# 'all_dataset/s60',
# 'all_dataset/s61',
# 'all_dataset/s62',
# 'all_dataset/s63',
# 'all_dataset/s7',
# 'all_dataset/s8',
# 'all_dataset/s9')
srcPaths = ('testdataset/t1','testdataset/t2')
datasetfilename = 'testdataset1.npz'
if __name__ == '__main__':
# save a dataset in numpy compressed format
# datasetfilename = 'tiredataset.npz'
classNames = {'afiq':0, 'azureen':1, 'gavin':2, 'goke':3, 'inamul':4, 'jincheng':5, 'mahmuda':6, 'numan':7, 'saseendran':8}
if create_dataset(datasetfilename, srcPaths, classNames):
data = np.load(datasetfilename, allow_pickle=True)
imgList = data['images']
labelList = data['labels']
labelNameList = data['labelnames']
img = imgList[0]
label = labelList[0]
labelNameList = data['labelnames']
imgRGB = img[:, :, ::-1]
plt.imshow(imgRGB)
plt.title(label)
plt.show()
print(imgList.shape)
print(labelList.shape)
# imgList, labelList = create_dataset()
# img = imgList[0]
# label = labelList[0]
# imgRGB = img[:, :, ::-1]
# plt.imshow(imgRGB)
# plt.title(label)
# plt.show()
# img = imgList[1]
# label = labelList[1]
# imgRGB = img[:, :, ::-1]
# plt.imshow(imgRGB)
# plt.title(label)
# plt.show()
# img = imgList[3]
# label = labelList[3]
# imgRGB = img[:, :, ::-1]
# plt.imshow(imgRGB)
# plt.title(label)
# plt.show()
| 26.331461 | 128 | 0.528056 |
911a60720a34ab009d3e5702a34a60c445eb65cc
| 5,827 |
py
|
Python
|
kronos/kronos.py
|
jinified/kronos
|
1f110372a025d28ccc407372320491ee818c893d
|
[
"MIT"
] | null | null | null |
kronos/kronos.py
|
jinified/kronos
|
1f110372a025d28ccc407372320491ee818c893d
|
[
"MIT"
] | null | null | null |
kronos/kronos.py
|
jinified/kronos
|
1f110372a025d28ccc407372320491ee818c893d
|
[
"MIT"
] | null | null | null |
"""
Kronos: A simple scheduler for graduate training programme
Entities: User, Schedule, Rotation
"""
from operator import itemgetter
from datetime import datetime, timedelta
def getRotationCapacity(rotationId, startDate, endDate, assignments):
""" Calculate number of users assigned to a particular rotation during the specified duration
"""
start = datetime.strptime(startDate, "%d%m%Y")
end = datetime.strptime(endDate, "%d%m%Y")
duration = int((end - start).days / 7.0)
# Weeks involved during the rotation
weeks = [(start + timedelta(weeks=x)).strftime("%W%Y") for x in range(0, duration)]
capacity = sum(itemgetter(*weeks)(assignments[rotationId][0][0]))
return capacity
def score_assignment(
assignments,
solution,
earliestAvailableDate,
core_rotations=["PMO", "PE", "SE", "PM"],
rotation_duration={
"PMO": 12,
"PE": 12,
"SE": 12,
"PM": 12,
"SYS": 12,
"ARC": 12,
"ANA": 12,
},
):
""" Calculate loss function for suggested solution (negative = better)
Parameters:
assignments (dict): global assignment object by rotation
solution (dict): rotation assignment for a user
earliestAvailableDate (date): earliest date where a user can be assigned a rotation
core_rotations (list): rotation that should be completed first
rotation_duration (dict): duration of each rotation
"""
print(solution)
# SOFT CONSTRAINT 1 - Core rotations should be completed in the first 4 rotations if possible
core_first_loss = sum(
[
-3 if x[0] in core_rotations else 0
for x in solution
if int(x[1]) <= len(core_rotations)
]
)
# SOFT CONSTRAINT 2 - External Assignment must be assigned last
external_assignment_loss = (
99 if "EXT" in [x[0] for x in solution] and solution[-1][0] != "EXT" else 0
)
# Calculate timing of each rotation from solution
solution = [
(
x[0],
rotation_duration[x[0]]
+ (sum([rotation_duration[x[0]] for x in solution[:i]]) if i != 0 else 0),
)
for i, x in enumerate(solution)
]
startDate = earliestAvailableDate
schedule = []
for x in solution:
endDate = startDate + timedelta(weeks=x[1]) - timedelta(days=1)
# Make sure the date falls on weekday
if endDate.weekday() >= 5:
endDate -= timedelta(endDate.weekday() - 4)
schedule.append(
(x[0], startDate.strftime("%d%m%Y"), endDate.strftime("%d%m%Y"))
)
startDate += timedelta(weeks=x[1])
spread_first_loss = sum(
[getRotationCapacity(x[0], x[1], x[2], assignments) for x in schedule]
)
loss = core_first_loss + external_assignment_loss + spread_first_loss
return loss
def schedule2assignments(schedule):
""" Convert schedule object to assignment object
"""
rotations = {}
for userId, userSchedule in schedule.items():
for rotation in userSchedule:
id = rotation["rotationId"]
if id not in rotations:
rotations[id] = [[{}], []]
print(rotations[id][0][0])
startDate, endDate = itemgetter("startDate", "endDate")(rotation)
start = datetime.strptime(startDate, "%d%m%Y")
end = datetime.strptime(endDate, "%d%m%Y")
duration = int((end - start).days / 7.0)
for i in range(duration):
date = (start + timedelta(weeks=i)).strftime("%W%Y")
if date not in rotations[id][0][0]:
rotations[id][0][0][date] = 0
rotations[id][0][0][date] += 1
rotations[id][1].append((userId, startDate, endDate))
sortedDate = sorted(list(rotations[id][0][0].keys()))
if len(rotations[id][0]) < 2:
rotations[id][0].append(sortedDate[0])
rotations[id][0].append(sortedDate[-1])
elif sortedDate[0] < rotations[id][0][1]:
rotations[id][0][1] = sortedDate[0]
elif len(rotations[id][0]) > 2 and sortedDate[-1] > rotations[id][0][2]:
rotations[id][0][2] = sortedDate[-1]
print(rotations)
return rotations
def assignments2schedule(assignments):
""" Convert assignment object to overall schedule
"""
users = {}
for rotationId, rotationInfo in assignments.items():
for userId, userAssignment in rotationInfo[1].items():
if userId not in users:
users[userId] = []
users[userId].append(
{
"rotationId": rotationId,
"startDate": userAssignment[0],
"endDate": userAssignment[1],
}
)
print(users)
return users
def generateUserSchedule(user, assignments, scoring_function):
""" Generate most optimal user schedule
Parameters:
user (object): User
assignments (dict): Time-bounded assignments
scoring_function (function): scoring function to rank possible assignments
Returns:
schedule (list): list of rotations
"""
return [{"rotationId": "PMO", "startDate": "012018"}]
def getOverallSchedule(users):
""" Generate overall schedule from individual user's schedule
Parameters:
users (list): list of Users
Returns:
schedule (dict): overall assignments
"""
return {}
def getConflictingAssignments(schedule):
""" Get list of assignments which exceeded rotation capacity
Parameters:
schedule (dict): overall assignments
Returns:
confictingAssignmentsByRotation (dict): overall schedule with conflicting assignments
"""
return {}
if __name__ == "__main__":
pass
| 33.107955 | 97 | 0.60151 |
911aa9326eb51bb9ac375b836bec89f414a26904
| 2,384 |
py
|
Python
|
personal_env/lib/python3.8/site-packages/pylint/lint/utils.py
|
jestinmwilson/personal-website
|
6e47a7f33ed3b1ca5c1d42c89c5380d22992ed74
|
[
"MIT"
] | null | null | null |
personal_env/lib/python3.8/site-packages/pylint/lint/utils.py
|
jestinmwilson/personal-website
|
6e47a7f33ed3b1ca5c1d42c89c5380d22992ed74
|
[
"MIT"
] | null | null | null |
personal_env/lib/python3.8/site-packages/pylint/lint/utils.py
|
jestinmwilson/personal-website
|
6e47a7f33ed3b1ca5c1d42c89c5380d22992ed74
|
[
"MIT"
] | null | null | null |
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
import contextlib
import sys
from pylint.utils import utils
def preprocess_options(args, search_for):
"""look for some options (keys of <search_for>) which have to be processed
before others
values of <search_for> are callback functions to call when the option is
found
"""
i = 0
while i < len(args):
arg = args[i]
if arg.startswith("--"):
try:
option, val = arg[2:].split("=", 1)
except ValueError:
option, val = arg[2:], None
try:
cb, takearg = search_for[option]
except KeyError:
i += 1
else:
del args[i]
if takearg and val is None:
if i >= len(args) or args[i].startswith("-"):
msg = "Option %s expects a value" % option
raise ArgumentPreprocessingError(msg)
val = args[i]
del args[i]
elif not takearg and val is not None:
msg = "Option %s doesn't expects a value" % option
raise ArgumentPreprocessingError(msg)
cb(option, val)
else:
i += 1
| 30.961039 | 81 | 0.557047 |
911ae3a32af48a82692eb10be784caaac6d3d48a
| 4,847 |
py
|
Python
|
mol_dqn/experimental/multi_obj.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 23,901 |
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
mol_dqn/experimental/multi_obj.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 891 |
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
mol_dqn/experimental/multi_obj.py
|
deepneuralmachine/google-research
|
d2ce2cf0f5c004f8d78bfeddf6e88e88f4840231
|
[
"Apache-2.0"
] | 6,047 |
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Generates molecules that satisfy two targets.
Target1: SAS
Target2: QED
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import json
import os
from absl import app
from absl import flags
from rdkit import Chem
from rdkit.Chem import QED
from rdkit.Contrib import SA_Score
from tensorflow.compat.v1 import gfile
from mol_dqn.chemgraph.mcts import deep_q_networks
from mol_dqn.chemgraph.mcts import molecules as molecules_mdp
from mol_dqn.chemgraph.mcts import run_dqn
from mol_dqn.chemgraph.tensorflow import core
flags.DEFINE_float('target_sas', 1, 'The target SAS of the molecule.')
flags.DEFINE_float('target_qed', 0.5, 'The target QED of the molecule.')
flags.DEFINE_float('gamma', 0.999, 'discount')
FLAGS = flags.FLAGS
if __name__ == '__main__':
app.run(main)
| 30.484277 | 78 | 0.704766 |
911b8570dbe4dd13160970c51c2cd287f8cc9dae
| 4,147 |
py
|
Python
|
myuw/test/views/test_rest_search.py
|
uw-it-aca/myuw
|
3fa1fabeb3c09d81a049f7c1a8c94092d612438a
|
[
"Apache-2.0"
] | 18 |
2015-02-04T01:09:11.000Z
|
2021-11-25T03:10:39.000Z
|
myuw/test/views/test_rest_search.py
|
uw-it-aca/myuw
|
3fa1fabeb3c09d81a049f7c1a8c94092d612438a
|
[
"Apache-2.0"
] | 2,323 |
2015-01-15T19:45:10.000Z
|
2022-03-21T19:57:06.000Z
|
myuw/test/views/test_rest_search.py
|
uw-it-aca/myuw
|
3fa1fabeb3c09d81a049f7c1a8c94092d612438a
|
[
"Apache-2.0"
] | 9 |
2015-01-15T19:29:26.000Z
|
2022-02-11T04:51:23.000Z
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
# -*- coding: utf-8 -*-
from django.test.utils import override_settings
from django.urls import reverse
from myuw.test.api import MyuwApiTest
| 39.875 | 77 | 0.613214 |
911c431b68da1378ffaf6b7b804e393825322dec
| 1,770 |
py
|
Python
|
examples/cli-solver/cli_solver.py
|
danagle/boggled
|
13fea4c31b5dff72093c38d1ad368dec9d44f4d0
|
[
"MIT"
] | null | null | null |
examples/cli-solver/cli_solver.py
|
danagle/boggled
|
13fea4c31b5dff72093c38d1ad368dec9d44f4d0
|
[
"MIT"
] | null | null | null |
examples/cli-solver/cli_solver.py
|
danagle/boggled
|
13fea4c31b5dff72093c38d1ad368dec9d44f4d0
|
[
"MIT"
] | null | null | null |
# cli_solver.py
import argparse
import os
from boggled import BoggleBoard, BoggleSolver, BoggleWords
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("letters", type=str,
help="Board letters")
parser.add_argument("dictionary", type=str,
help="The text file containing the dictionary word list.")
parser.add_argument("-m", "--min", type=int,
help="The minimum word size.")
parser.add_argument("-p", "--paths", action="store_true",
help="Include the path followed for each word found.")
args = parser.parse_args()
if os.path.isfile(args.dictionary):
if isinstance(args.min, int):
words = BoggleWords(args.min)
else:
words = BoggleWords()
words.loadFromFile(args.dictionary)
board = BoggleBoard(args.letters)
display_board_details(board)
solved_board = solve_board(board, words)
print('Found:', len(solved_board.found))
if args.paths:
for word in solved_board.found:
print('{} : {}'.format(word, solved_board.found[word]))
else:
print(solved_board.foundWords)
else:
print("Error: Unable to find the dictionary.")
| 30 | 82 | 0.589266 |
911cc6fdfec9f96a292bbbfc6b3b0ac51752840f
| 45,086 |
py
|
Python
|
src/wepy/orchestration/orchestrator.py
|
gitter-badger/wepy-1
|
9bc619aeae178ad5d10f658fae2abfd2c7aeb18a
|
[
"MIT"
] | 35 |
2017-08-22T15:39:06.000Z
|
2022-03-20T15:17:52.000Z
|
src/wepy/orchestration/orchestrator.py
|
gitter-badger/wepy-1
|
9bc619aeae178ad5d10f658fae2abfd2c7aeb18a
|
[
"MIT"
] | 33 |
2017-10-02T22:04:45.000Z
|
2022-03-02T22:19:08.000Z
|
src/wepy/orchestration/orchestrator.py
|
stxinsite/wepy
|
352d4c1316b20e839aae8824eedd66f0f2d0b456
|
[
"MIT"
] | 17 |
2018-07-14T15:33:30.000Z
|
2022-01-18T16:30:55.000Z
|
from copy import copy, deepcopy
import sqlite3
from hashlib import md5
import time
import os
import os.path as osp
from base64 import b64encode, b64decode
from zlib import compress, decompress
import itertools as it
import logging
# instead of pickle we use dill, so we can save dynamically defined
# classes
import dill
from wepy.sim_manager import Manager
from wepy.orchestration.configuration import Configuration
from wepy.orchestration.snapshot import SimApparatus, SimSnapshot
from wepy.util.kv import KV, SQLITE3_INMEMORY_URI, gen_uri
# core methods for serializing python objects, used for snapshots,
# apparatuses, configurations, and the initial walker list
# defaults getters and setters
def set_default_sim_apparatus(self, sim_apparatus):
# serialize the apparatus and then set it
serial_app = self.serialize(sim_apparatus)
self.metadata_kv['default_sim_apparatus'] = serial_app
def set_default_init_walkers(self, init_walkers):
# serialize the apparatus and then set it
serial_walkers = self.serialize(init_walkers)
self.metadata_kv['default_init_walkers'] = serial_walkers
def set_default_configuration(self, configuration):
# serialize the apparatus and then set it
serial_config = self.serialize(configuration)
config_hash = self.hash_snapshot(serial_config)
self.metadata_kv['default_configuration_hash'] = config_hash
self.configuration_kv[config_hash] = serial_config
def set_default_snapshot(self, snapshot):
snaphash = self.add_snapshot(snapshot)
# then save the hash in the metadata
self.metadata_kv['default_snapshot_hash'] = snaphash
return snaphash
def gen_default_snapshot(self):
# generate the snapshot
sim_start_hash = self.gen_start_snapshot(self.get_default_init_walkers())
# then save the hash in the metadata
self.metadata_kv['default_snapshot_hash'] = sim_start_hash
return sim_start_hash
def get_default_sim_apparatus(self):
return self.deserialize(self.metadata_kv['default_sim_apparatus'])
def get_default_init_walkers(self):
return self.deserialize(self.metadata_kv['default_init_walkers'])
def get_default_configuration(self):
config_hash = self.metadata_kv['default_configuration_hash']
return self.get_configuration(config_hash)
def get_default_configuration_hash(self):
return self.metadata_kv['default_configuration_hash']
def get_default_snapshot(self):
start_hash = self.metadata_kv['default_snapshot_hash']
return self.get_snapshot(start_hash)
def get_default_snapshot_hash(self):
return self.metadata_kv['default_snapshot_hash']
def get_snapshot(self, snapshot_hash):
"""Returns a copy of a snapshot.
Parameters
----------
snapshot_hash :
Returns
-------
"""
return self.deserialize(self.snapshot_kv[snapshot_hash])
def get_configuration(self, config_hash):
"""Returns a copy of a snapshot.
Parameters
----------
config_hash :
Returns
-------
"""
return self.deserialize(self.configuration_kv[config_hash])
def add_snapshot(self, snapshot):
"""
Parameters
----------
snapshot :
Returns
-------
"""
# serialize the snapshot using the protocol for doing so
serialized_snapshot = self.serialize(snapshot)
# get the hash of the snapshot
snaphash = self.hash_snapshot(serialized_snapshot)
# check that the hash is not already in the snapshots
if any([True if snaphash == md5 else False for md5 in self.snapshot_hashes]):
# just skip the rest of the function and return the hash
return snaphash
# save the snapshot in the KV store
self.snapshot_kv[snaphash] = serialized_snapshot
return snaphash
def add_serial_snapshot(self, serial_snapshot):
# get the hash of the snapshot
snaphash = self.hash_snapshot(serial_snapshot)
# check that the hash is not already in the snapshots
if any([True if snaphash == md5 else False for md5 in self.snapshot_hashes]):
# just skip the rest of the function and return the hash
return snaphash
# save the snapshot in the KV store
self.snapshot_kv[snaphash] = serial_snapshot
return snaphash
def gen_start_snapshot(self, init_walkers):
"""
Parameters
----------
init_walkers :
Returns
-------
"""
# make a SimSnapshot object using the initial walkers and
start_snapshot = SimSnapshot(init_walkers, self.get_default_sim_apparatus())
# save the snapshot, and generate its hash
sim_start_md5 = self.add_snapshot(start_snapshot)
return sim_start_md5
def snapshot_registered(self, snapshot):
"""Check whether a snapshot is already in the database, based on the
hash of it.
This serializes the snapshot so may be slow.
Parameters
----------
snapshot : SimSnapshot object
The snapshot object you want to query for.
Returns
-------
"""
# serialize and hash the snapshot
snaphash = self.hash_snapshot(self.serialize(snapshot))
# then check it
return self.snapshot_hash_registered(snaphash)
def snapshot_hash_registered(self, snapshot_hash):
"""Check whether a snapshot hash is already in the database.
Parameters
----------
snapshot_hash : str
The string hash of the snapshot.
Returns
-------
"""
if any([True if snapshot_hash == h else False for h in self.snapshot_hashes]):
return True
else:
return False
def configuration_hash_registered(self, config_hash):
"""Check whether a snapshot hash is already in the database.
Parameters
----------
snapshot_hash : str
The string hash of the snapshot.
Returns
-------
"""
if any([True if config_hash == h else False for h in self.configuration_hashes]):
return True
else:
return False
### run methods
def _add_run_record(self, start_hash, end_hash, configuration_hash, cycle_idx):
params = (start_hash, end_hash, configuration_hash, cycle_idx)
# do it as a transaction
c = self._db.cursor()
# run the insert
c.execute(self.add_run_record_query, params)
def _delete_run_record(self, start_hash, end_hash):
params = (start_hash, end_hash)
cursor = self._db.cursor()
cursor.execute(self.delete_run_record_query, params)
def _update_run_record(self, start_hash, end_hash, new_config_hash, new_last_cycle_idx):
params = (new_config_hash, new_last_cycle_idx, start_hash, end_hash)
# do it as a transaction
c = self._db.cursor()
# run the update
c.execute(self.update_run_record_query, params)
def register_run(self, start_hash, end_hash, config_hash, cycle_idx):
"""
Parameters
----------
start_hash :
end_hash :
config_hash :
cycle_idx : int
The cycle of the simulation run the checkpoint was generated for.
Returns
-------
"""
# check that the hashes are for snapshots in the orchestrator
# if one is not registered raise an error
if not self.snapshot_hash_registered(start_hash):
raise OrchestratorError(
"snapshot start_hash {} is not registered with the orchestrator".format(
start_hash))
if not self.snapshot_hash_registered(end_hash):
raise OrchestratorError(
"snapshot end_hash {} is not registered with the orchestrator".format(
end_hash))
if not self.configuration_hash_registered(config_hash):
raise OrchestratorError(
"config hash {} is not registered with the orchestrator".format(
config_hash))
# save the configuration and get it's id
self._add_run_record(start_hash, end_hash, config_hash, cycle_idx)
def get_run_records(self):
get_run_record_query = """
SELECT *
FROM runs
""".format(fields=', '.join(self.RUN_SELECT_FIELDS))
cursor = self._db.cursor()
cursor.execute(get_run_record_query)
records = cursor.fetchall()
return records
def get_run_record(self, start_hash, end_hash):
get_run_record_query = """
SELECT {fields}
FROM runs
WHERE start_hash=? AND end_hash=?
""".format(fields=', '.join(self.RUN_SELECT_FIELDS))
params = (start_hash, end_hash)
cursor = self._db.cursor()
cursor.execute(get_run_record_query, params)
record = cursor.fetchone()
return record
def run_last_cycle_idx(self, start_hash, end_hash):
record = self.get_run_record(start_hash, end_hash)
last_cycle_idx = record[self.RUN_SELECT_FIELDS.index('last_cycle_idx')]
return last_cycle_idx
def run_configuration(self, start_hash, end_hash):
record = self.get_run_record(start_hash, end_hash)
config_hash = record[self.RUN_SELECT_FIELDS.index('config_hash')]
# get the configuration object and deserialize it
return self.deserialize(self.configuration_kv[config_hash])
def run_configuration_hash(self, start_hash, end_hash):
record = self.get_run_record(start_hash, end_hash)
config_hash = record[self.RUN_SELECT_FIELDS.index('config_hash')]
return config_hash
def run_hashes(self):
return [(rec[0], rec[1]) for rec in self.get_run_records()]
def run_continues(self, start_hash, end_hash):
"""Given a start hash and end hash for a run, find the run that this
continues.
Parameters
----------
start_hash :
end_hash :
Returns
-------
run_id
"""
# loop through the runs in this orchestrator until we find one
# where the start_hash matches the end hash
runs = self.run_hashes()
run_idx = 0
while True:
run_start_hash, run_end_hash = runs[run_idx]
# if the start hash of the queried run is the same as the
# end hash for this run we have found it
if start_hash == run_end_hash:
return (run_start_hash, run_end_hash)
run_idx += 1
# if the index is over the number of runs we quit and
# return None as no match
if run_idx >= len(runs):
return None
def _save_checkpoint(self, checkpoint_snapshot, config_hash,
checkpoint_db_path, cycle_idx,
):
"""
Parameters
----------
checkpoint_snapshot :
config_hash :
checkpoint_db_path :
mode :
(Default value = 'wb')
Returns
-------
"""
# orchestrator wrapper to the db
logging.debug("Opening the checkpoint orch database")
checkpoint_orch = Orchestrator(checkpoint_db_path, mode='r+')
# connection to the db
cursor = checkpoint_orch._db.cursor()
# we replicate the code for adding the snapshot here because
# we want it to occur transactionally the delete and add
# serialize the snapshot using the protocol for doing so
serialized_snapshot = self.serialize(checkpoint_snapshot)
# get the hash of the snapshot
snaphash = self.hash_snapshot(serialized_snapshot)
# the queries for deleting and inserting the new run record
delete_query = """
DELETE FROM runs
WHERE start_hash=?
AND end_hash=?
"""
insert_query = """
INSERT INTO runs (start_hash, end_hash, config_hash, last_cycle_idx)
VALUES (?, ?, ?, ?)
"""
# if there are any runs in the checkpoint orch remove the
# final snapshot
delete_params = None
if len(checkpoint_orch.run_hashes()) > 0:
start_hash, old_checkpoint_hash = checkpoint_orch.run_hashes()[0]
delete_params = (start_hash, old_checkpoint_hash)
else:
start_hash = list(checkpoint_orch.snapshot_kv.keys())[0]
# the config should already be in the orchestrator db
insert_params = (start_hash, snaphash, config_hash, cycle_idx)
# start this whole process as a transaction so we don't get
# something weird in between
logging.debug("Starting transaction for updating run table in checkpoint")
cursor.execute("BEGIN TRANSACTION")
# add the new one, using a special method for setting inside
# of a transaction
logging.debug("setting the new checkpoint snapshot into the KV")
cursor = checkpoint_orch.snapshot_kv.set_in_tx(cursor, snaphash, serialized_snapshot)
logging.debug("finished")
# if we need to delete the old end of the run snapshot and the
# run record for it
if delete_params is not None:
logging.debug("Old run record needs to be removed")
# remove the old run from the run table
logging.debug("Deleting the old run record")
cursor.execute(delete_query, delete_params)
logging.debug("finished")
# register the new run in the run table
logging.debug("Inserting the new run record")
cursor.execute(insert_query, insert_params)
logging.debug("finished")
# end the transaction
logging.debug("Finishing transaction")
cursor.execute("COMMIT")
logging.debug("Transaction committed")
# we do the removal of the old snapshot outside of the
# transaction since it is slow and can cause timeouts to
# occur. Furthermore, it is okay if it is in the checkpoint as
# the run record is what matters as long as the new checkpoint
# is there.
# delete the old snapshot if we need to
if delete_params is not None:
# WARN: occasionally and for unknown reasons we have found
# that the final checkpoint hash is the same as the one
# before. (The case where the last snapshot is on the same
# cycle as a backup is already covered). So as a last
# resort, we check that they don't have the same hash. If
# they do we don't delete it!
if snaphash != old_checkpoint_hash:
logging.debug("Deleting the old snapshot")
del checkpoint_orch.snapshot_kv[old_checkpoint_hash]
logging.debug("finished")
else:
logging.warn("Final snapshot has same hash as the previous checkpoint. Not deleting the previous one.")
checkpoint_orch.close()
logging.debug("closed the checkpoint orch connection")
def run_snapshot_by_time(self, start_hash, run_time, n_steps,
checkpoint_freq=None,
checkpoint_dir=None,
configuration=None,
configuration_hash=None,
checkpoint_mode='x'):
"""For a finished run continue it but resetting all the state of the
resampler and boundary conditions
Parameters
----------
start_hash :
run_time :
n_steps :
checkpoint_freq :
(Default value = None)
checkpoint_dir :
(Default value = None)
configuration :
(Default value = None)
configuration_hash :
(Default value = None)
checkpoint_mode :
(Default value = None)
Returns
-------
"""
# you must have a checkpoint dir if you ask for a checkpoint
# frequency
if checkpoint_freq is not None and checkpoint_dir is None:
raise ValueError("Must provide a directory for the checkpoint file "
"is a frequency is specified")
if configuration_hash is not None and configuration is not None:
raise ValueError("Cannot specify both a hash of an existing configuration"
"and provide a runtime configuration")
# if no configuration was specified we use the default one, oth
elif (configuration is None) and (configuration_hash is None):
configuration = self.get_default_configuration()
# if a configuration hash was given only then we retrieve that
# configuration since we must pass configurations to the
# checkpoint DB initialization
elif configuration_hash is not None:
configuration = self.configuration_kv[configuration_hash]
# check that the directory for checkpoints exists, and create
# it if it doesn't and isn't already created
if checkpoint_dir is not None:
checkpoint_dir = osp.realpath(checkpoint_dir)
os.makedirs(checkpoint_dir, exist_ok=True)
# if the checkpoint dir is not specified don't create a
# checkpoint db orch
checkpoint_db_path = None
if checkpoint_dir is not None:
logging.debug("Initialization of checkpoint database is requested")
checkpoint_db_path, configuration_hash = self._init_checkpoint_db(start_hash,
configuration,
checkpoint_dir,
mode=checkpoint_mode)
logging.debug("finished initializing checkpoint database")
# get the snapshot and the configuration to use for the sim_manager
start_snapshot = self.get_snapshot(start_hash)
# generate the simulation manager given the snapshot and the
# configuration
sim_manager = self.gen_sim_manager(start_snapshot, configuration)
# handle and process the optional arguments for running simulation
if 'runner' in configuration.apparatus_opts:
runner_opts = configuration.apparatus_opts['runner']
else:
runner_opts = None
# run the init subroutine for the simulation manager
logging.debug("Running sim_manager.init")
sim_manager.init()
# run each cycle manually creating checkpoints when necessary
logging.debug("Starting run loop")
walkers = sim_manager.init_walkers
cycle_idx = 0
start_time = time.time()
while time.time() - start_time < run_time:
logging.debug("Running cycle {}".format(cycle_idx))
# run the cycle
walkers, filters = sim_manager.run_cycle(
walkers,
n_steps,
cycle_idx,
runner_opts=runner_opts,
)
# check to see if a checkpoint is necessary
if (checkpoint_freq is not None):
if (cycle_idx % checkpoint_freq == 0):
logging.debug("Checkpoint is required for this cycle")
# make the checkpoint snapshot
logging.debug("Generating the simulation snapshot")
checkpoint_snapshot = SimSnapshot(walkers, SimApparatus(filters))
# save the checkpoint (however that is implemented)
logging.debug("saving the checkpoint to the database")
self._save_checkpoint(checkpoint_snapshot,
configuration_hash,
checkpoint_db_path,
cycle_idx)
logging.debug("finished saving the checkpoint to the database")
# increase the cycle index for the next cycle
cycle_idx += 1
logging.debug("Finished the run cycle")
# the cycle index was set for the next cycle which didn't run
# so we decrement it
last_cycle_idx = cycle_idx - 1
logging.debug("Running sim_manager.cleanup")
# run the cleanup subroutine
sim_manager.cleanup()
# run the segment given the sim manager and run parameters
end_snapshot = SimSnapshot(walkers, SimApparatus(filters))
logging.debug("Run finished")
# return the things necessary for saving to the checkpoint if
# that is what is wanted later on
return end_snapshot, configuration_hash, checkpoint_db_path, last_cycle_idx
def orchestrate_snapshot_run_by_time(self, snapshot_hash, run_time, n_steps,
checkpoint_freq=None,
checkpoint_dir=None,
orchestrator_path=None,
configuration=None,
# these can reparametrize the paths
# for both the orchestrator produced
# files as well as the configuration
work_dir=None,
config_name=None,
narration=None,
mode=None,
# extra kwargs will be passed to the
# configuration.reparametrize method
**kwargs):
"""
Parameters
----------
snapshot_hash :
run_time :
n_steps :
checkpoint_freq :
(Default value = None)
checkpoint_dir :
(Default value = None)
orchestrator_path :
(Default value = None)
configuration :
(Default value = None)
# these can reparametrize the paths# for both the orchestrator produced# files as well as the configurationwork_dir :
(Default value = None)
config_name :
(Default value = None)
narration :
(Default value = None)
mode :
(Default value = None)
# extra kwargs will be passed to the# configuration.reparametrize method**kwargs :
Returns
-------
"""
# for writing the orchestration files we set the default mode
# if mode is not given
if mode is None:
# the orchestrator mode is used for pickling the
# orchestrator and so must be in bytes mode
orch_mode = self.DEFAULT_ORCHESTRATION_MODE
# there are two possible uses for the path reparametrizations:
# the configuration and the orchestrator file paths. If both
# of those are explicitly specified by passing in the whole
# configuration object or both of checkpoint_dir,
# orchestrator_path then those reparametrization kwargs will
# not be used. As this is likely not the intention of the user
# we will raise an error. If there is even one use for them no
# error will be raised.
# first check if any reparametrizations were even requested
parametrizations_requested = (True if work_dir is not None else False,
True if config_name is not None else False,
True if narration is not None else False,
True if mode is not None else False,)
# check if there are any available targets for reparametrization
reparametrization_targets = (True if configuration is None else False,
True if checkpoint_dir is None else False,
True if orchestrator_path is None else False)
# if paramatrizations were requested and there are no targets
# we need to raise an error
if any(parametrizations_requested) and not any(reparametrization_targets):
raise OrchestratorError("Reparametrizations were requested but none are possible,"
" due to all possible targets being already explicitly given")
# if any paths were not given and no defaults for path
# parameters we want to fill in the defaults for them. This
# will also fill in any missing parametrizations with defaults
# we do this by just setting the path parameters if they
# aren't set, then later the parametrization targets will be
# tested for if they have been set or not, and if they haven't
# then these will be used to generate paths for them.
if work_dir is None:
work_dir = self.DEFAULT_WORKDIR
if config_name is None:
config_name = self.DEFAULT_CONFIG_NAME
if narration is None:
narration = self.DEFAULT_NARRATION
if mode is None:
mode = self.DEFAULT_MODE
# if no configuration was specified use the default one
if configuration is None:
configuration = self.get_default_configuration()
# reparametrize the configuration with the given path
# parameters and anything else in kwargs. If they are none
# this will have no effect anyhow
logging.debug("Reparametrizing the configuration")
configuration = configuration.reparametrize(work_dir=work_dir,
config_name=config_name,
narration=narration,
mode=mode,
**kwargs)
# make parametric paths for the checkpoint directory and the
# orchestrator pickle to be made, unless they are explicitly given
if checkpoint_dir is None:
# the checkpoint directory will be in the work dir
logging.debug("checkpoint directory defaulted to the work_dir")
checkpoint_dir = work_dir
logging.debug("In the orchestrate run, calling to run_snapshot by time")
# then actually run the simulation with checkpointing. This
# returns the end snapshot and doesn't write out anything to
# orchestrators other than the checkpointing
(end_snapshot, configuration_hash, checkpoint_db_path, last_cycle_idx) =\
self.run_snapshot_by_time(snapshot_hash, run_time, n_steps,
checkpoint_freq=checkpoint_freq,
checkpoint_dir=checkpoint_dir,
configuration=configuration,
checkpoint_mode=orch_mode)
logging.debug("Finished running snapshot by time")
# if the last cycle in the run was a checkpoint skip this step
# of saving a checkpoint
do_final_checkpoint = True
# make sure the checkpoint_freq is defined before testing it
if checkpoint_freq is not None:
if checkpoint_freq % last_cycle_idx == 0:
logging.debug("Last cycle saved a checkpoint, no need to save one")
do_final_checkpoint = False
if do_final_checkpoint:
logging.debug("Saving a final checkpoint for the end of the run")
# now that it is finished we save the final snapshot to the
# checkpoint file. This is done transactionally using the
# SQLite transaction functionality (either succeeds or doesn't
# happen) that way we don't have worry about data integrity
# loss. Here we also don't have to worry about other processes
# interacting with the checkpoint which makes it isolated.
self._save_checkpoint(end_snapshot, configuration_hash,
checkpoint_db_path, last_cycle_idx)
logging.debug("Finished saving the final checkpoint for the run")
# then return the final orchestrator
logging.debug("Getting a connection to that orch to retun")
checkpoint_orch = Orchestrator(checkpoint_db_path,
mode='r+',
append_only=True)
return checkpoint_orch
def reconcile_orchestrators(host_path, *orchestrator_paths):
"""
Parameters
----------
template_orchestrator :
*orchestrators :
Returns
-------
"""
if not osp.exists(host_path):
assert len(orchestrator_paths) > 1, \
"If the host path is a new orchestrator, must give at least 2 orchestrators to merge."
# open the host orchestrator at the location which will have all
# of the new things put into it from the other orchestrators. If
# it doesn't already exist it will be created otherwise open
# read-write.
new_orch = Orchestrator(orch_path=host_path,
mode='a',
append_only=True)
# TODO deprecate, if there is no defaults we can't set them since
# the mode is append only, we don't really care about these so
# don't set them, otherwise do some mode logic to figure this out
# and open in write mode and set defaults, then change to append
# only
# # if this is an existing orchestrator copy the default
# # sim_apparatus and init_walkers
# try:
# default_app = new_orch.get_default_sim_apparatus()
# except KeyError:
# # no default apparatus, that is okay
# pass
# else:
# # set it
# new_orch.set_default_sim_apparatus(default_app)
# # same for the initial walkers
# try:
# default_walkers = new_orch.get_default_init_walkers()
# except KeyError:
# # no default apparatus, that is okay
# pass
# else:
# # set it
# new_orch.set_default_sim_apparatus(default_walkers)
for orch_path in orchestrator_paths:
# open it in read-write fail if doesn't exist
orch = Orchestrator(orch_path=orch_path,
mode='r+',
append_only=True)
# add in all snapshots from each orchestrator, by the hash not the
# snapshots themselves, we trust they are correct
for snaphash in orch.snapshot_hashes:
# check that the hash is not already in the snapshots
if any([True if snaphash == md5 else False for md5 in new_orch.snapshot_hashes]):
# skip it and move on
continue
# if it is not copy it over without deserializing
new_orch.snapshot_kv[snaphash] = orch.snapshot_kv[snaphash]
# add in the configurations for the runs from each
# orchestrator, by the hash not the snapshots themselves, we
# trust they are correct
for run_id in orch.run_hashes():
config_hash = orch.run_configuration_hash(*run_id)
# check that the hash is not already in the snapshots
if any([True if config_hash == md5 else False for md5 in new_orch.configuration_hashes]):
# skip it and move on
continue
# if it is not set it
new_orch.configuration_kv[config_hash] = orch.configuration_kv[config_hash]
# concatenate the run table with an SQL union from an attached
# database
attached_table_name = "other"
# query to attach the foreign database
attach_query = """
ATTACH '{}' AS {}
""".format(orch_path, attached_table_name)
# query to update the runs tabel with new unique runs
union_query = """
INSERT INTO runs
SELECT * FROM (
SELECT * FROM {}.runs
EXCEPT
SELECT * FROM runs
)
""".format(attached_table_name)
# query to detach the table
detach_query = """
DETACH {}
""".format(attached_table_name)
# then run the queries
cursor = new_orch._db.cursor()
try:
cursor.execute('BEGIN TRANSACTION')
cursor.execute(attach_query)
cursor.execute(union_query)
cursor.execute('COMMIT')
cursor.execute(detach_query)
except:
cursor.execute('COMMIT')
import pdb; pdb.set_trace()
cursor.execute("SELECT * FROM (SELECT * FROM other.runs EXCEPT SELECT * FROM runs)")
recs = cursor.fetchall()
return new_orch
| 32.319713 | 125 | 0.595883 |
911d2626da51dec7964f3f20d1a80f93b2a0e8f3
| 2,681 |
py
|
Python
|
src/generate_class_specific_samples.py
|
HesterLim/pytorch-cnn-visualizations
|
59ddf0ef6ea2c9d4d69c1ac6b260cb399867d178
|
[
"MIT"
] | 6,725 |
2017-10-25T08:00:25.000Z
|
2022-03-31T15:25:46.000Z
|
src/generate_class_specific_samples.py
|
woojoo99/pytorch-cnn-visualizations
|
16eddfa055a9c618ba548e9fb4529e2ccbc79c35
|
[
"MIT"
] | 105 |
2017-11-26T11:59:24.000Z
|
2022-01-11T01:37:00.000Z
|
src/generate_class_specific_samples.py
|
woojoo99/pytorch-cnn-visualizations
|
16eddfa055a9c618ba548e9fb4529e2ccbc79c35
|
[
"MIT"
] | 1,419 |
2017-10-25T08:00:27.000Z
|
2022-03-30T08:28:35.000Z
|
"""
Created on Thu Oct 26 14:19:44 2017
@author: Utku Ozbulak - github.com/utkuozbulak
"""
import os
import numpy as np
import torch
from torch.optim import SGD
from torchvision import models
from misc_functions import preprocess_image, recreate_image, save_image
if __name__ == '__main__':
target_class = 130 # Flamingo
pretrained_model = models.alexnet(pretrained=True)
csig = ClassSpecificImageGeneration(pretrained_model, target_class)
csig.generate()
| 34.818182 | 125 | 0.613577 |
911d31b9a8a7937bf3f3cbbfb6a83e53d58e13d7
| 16,673 |
py
|
Python
|
sumo/tools/net/visum_mapDistricts.py
|
iltempe/osmosi
|
c0f54ecdbb7c7b5602d587768617d0dc50f1d75d
|
[
"MIT"
] | null | null | null |
sumo/tools/net/visum_mapDistricts.py
|
iltempe/osmosi
|
c0f54ecdbb7c7b5602d587768617d0dc50f1d75d
|
[
"MIT"
] | null | null | null |
sumo/tools/net/visum_mapDistricts.py
|
iltempe/osmosi
|
c0f54ecdbb7c7b5602d587768617d0dc50f1d75d
|
[
"MIT"
] | 2 |
2017-12-14T16:41:59.000Z
|
2020-10-16T17:51:27.000Z
|
#!/usr/bin/env python
"""
@file visum_mapDistricts.py
@author Daniel Krajzewicz
@author Michael Behrisch
@date 2007-10-25
@version $Id$
This script reads a network and a dump file and
draws the network, coloring it by the values
found within the dump-file.
SUMO, Simulation of Urban MObility; see http://sumo.dlr.de/
Copyright (C) 2008-2017 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import math
from optparse import OptionParser
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import sumolib.net
import netshiftadaptor
# initialise
optParser = OptionParser()
optParser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="tell me what you are doing")
# i/o
optParser.add_option("-1", "--net1", dest="net1",
help="SUMO network to use (mandatory)", metavar="FILE")
optParser.add_option("-2", "--net2", dest="net2",
help="SUMO network to use (mandatory)", metavar="FILE")
optParser.add_option("-a", "--nodes1", dest="nodes1",
help="The first matching nodes", metavar="NODELIST")
optParser.add_option("-b", "--nodes2", dest="nodes2",
help="The second matching nodes", metavar="NODELIST")
# parse options
(options, args) = optParser.parse_args()
# read networks
if options.verbose:
print("Reading net#1...")
net1 = sumolib.net.readNet(options.net1)
if options.verbose:
print("Reading net#2...")
net2 = sumolib.net.readNet(options.net2)
# reproject the visum net onto the navteq net
adaptor = netshiftadaptor.NetShiftAdaptor(
net1, net2, options.nodes1.split(","), options.nodes2.split(","))
adaptor.reproject(options.verbose)
# build a speed-up grid
xmin = 100000
xmax = -100000
ymin = 100000
ymax = -100000
for n in net1._nodes:
xmin = min(xmin, n._coord[0])
xmax = max(xmax, n._coord[0])
ymin = min(ymin, n._coord[1])
ymax = max(ymax, n._coord[1])
for n in net2._nodes:
xmin = min(xmin, n._coord[0])
xmax = max(xmax, n._coord[0])
ymin = min(ymin, n._coord[1])
ymax = max(ymax, n._coord[1])
xmin = xmin - .1
xmax = xmax + .1
ymin = ymin - .1
ymax = ymax + .1
CELLSIZE = 100
arr1 = []
arr2 = []
for y in range(0, CELLSIZE):
arr1.append([])
arr2.append([])
for x in range(0, CELLSIZE):
arr1[-1].append([])
arr2[-1].append([])
cw = (xmax - xmin) / float(CELLSIZE)
ch = (ymax - ymin) / float(CELLSIZE)
for n in net2._nodes:
cx = (n._coord[0] - xmin) / cw
cy = (n._coord[1] - ymin) / ch
arr1[int(cy)][int(cx)].append(n)
for n in net1._nodes:
cx = (n._coord[0] - xmin) / cw
cy = (n._coord[1] - ymin) / ch
arr2[int(cy)][int(cx)].append(n)
# map
nmap1to2 = {}
nmap2to1 = {}
nodes1 = net2._nodes
nodes2 = net1._nodes
highwayNodes2 = set()
highwaySinks2 = set()
highwaySources2 = set()
urbanNodes2 = set()
for n2 in nodes2:
noIncoming = 0
noOutgoing = 0
for e in n2._outgoing:
if e.getSpeed() > 80. / 3.6 and e.getSpeed() < 99:
highwayNodes2.add(n2)
if e.getSpeed() < 99:
noOutgoing = noOutgoing + 1
for e in n2._incoming:
if e.getSpeed() > 80. / 3.6 and e.getSpeed() < 99:
highwayNodes2.add(n2)
if e.getSpeed() < 99:
noIncoming = noIncoming + 1
if n2 in highwayNodes2:
if noOutgoing == 0:
highwaySinks2.add(n2)
if noIncoming == 0:
highwaySources2.add(n2)
else:
urbanNodes2.add(n2)
print("Found " + str(len(highwaySinks2)) + " highway sinks in net2")
cont = ""
for n in highwaySinks2:
cont = cont + n._id + ", "
print(cont)
cont = ""
print("Found " + str(len(highwaySources2)) + " highway sources in net2")
for n in highwaySources2:
cont = cont + n._id + ", "
print(cont)
fdd = open("dconns.con.xml", "w")
fdd.write("<connections>\n")
highwaySinks1 = set()
highwaySources1 = set()
origDistrictNodes = {}
nnn = {}
for n1 in nodes1:
if n1._id.find('-', 1) < 0:
continue
# if n1._id.find("38208387")<0:
# continue
un1 = None
for e in n1._outgoing:
un1 = e._to
for e in n1._incoming:
un1 = e._from
d = n1._id[:n1._id.find('-', 1)]
if d[0] == '-':
d = d[1:]
if d not in origDistrictNodes:
origDistrictNodes[d] = []
if options.verbose:
print("District: " + d)
isHighwayNode = False
isHighwaySink = False
isHighwaySource = False
noIncoming = 0
noOutgoing = 0
noInConns = 0
noOutConns = 0
for e in un1._outgoing:
if e.getSpeed() > 80. / 3.6 and e.getSpeed() < 99:
isHighwayNode = True
if e.getSpeed() < 99:
noOutgoing = noOutgoing + 1
if e.getSpeed() > 99:
noOutConns = noOutConns + 1
for e in un1._incoming:
if e.getSpeed() > 80. / 3.6 and e.getSpeed() < 99:
isHighwayNode = True
if e.getSpeed() < 99:
noIncoming = noIncoming + 1
if e.getSpeed() > 99:
noInConns = noInConns + 1
if options.verbose:
print("Check", un1._id, noOutgoing, noIncoming)
if isHighwayNode:
if noOutgoing == 0:
highwaySinks1.add(n1)
isHighwaySink = True
if noIncoming == 0:
highwaySources1.add(n1)
isHighwaySource = True
# the next is a hack for bad visum-networks
if noIncoming == 1 and noOutgoing == 1 and noInConns == 1 and noOutConns == 1:
highwaySinks1.add(n1)
isHighwaySink = True
highwaySources1.add(n1)
isHighwaySource = True
best = None
bestDist = -1
check = urbanNodes2
if n1 in highwaySinks1:
check = highwaySinks2
elif n1 in highwaySources1:
check = highwaySources2
elif isHighwayNode:
check = highwayNodes2
for n2 in check:
dist = computeDistance(un1, n2)
if bestDist == -1 or bestDist > dist:
best = n2
bestDist = dist
if best:
nnn[best] = n1
if d not in nmap1to2:
nmap1to2[d] = []
if best not in nmap1to2[d]:
nmap1to2[d].append(best)
if best not in nmap2to1:
nmap2to1[best] = []
if n1 not in nmap2to1[best]:
nmap2to1[best].append(n1)
if options.verbose:
print("a: " + d + "<->" + best._id)
if best not in origDistrictNodes[d]:
origDistrictNodes[d].append(best)
preBest = best
best = None
bestDist = -1
check = []
if n1 in highwaySinks1 or preBest in highwaySinks2:
check = highwaySources2
elif n1 in highwaySources1 or preBest in highwaySources2:
check = highwaySinks2
elif isHighwayNode:
check = highwayNodes2
for n2 in check:
dist = computeDistance(un1, n2)
if (bestDist == -1 or bestDist > dist) and n2 != preBest:
best = n2
bestDist = dist
if best:
nnn[best] = n1
if d not in nmap1to2:
nmap1to2[d] = []
if best not in nmap1to2[d]:
nmap1to2[d].append(best)
if best not in nmap2to1:
nmap2to1[best] = []
if n1 not in nmap2to1[best]:
nmap2to1[best].append(n1)
print("b: " + d + "<->" + best._id)
if best not in origDistrictNodes[d]:
origDistrictNodes[d].append(best)
if options.verbose:
print("Found " + str(len(highwaySinks1)) + " highway sinks in net1")
for n in highwaySinks1:
print(n._id)
print("Found " + str(len(highwaySources1)) + " highway sources in net1")
for n in highwaySources1:
print(n._id)
connectedNodesConnections = {}
for d in nmap1to2:
for n2 in nmap1to2[d]:
if n2 in connectedNodesConnections:
continue
n1i = net1.addNode("i" + n2._id, nnn[n2]._coord)
n1o = net1.addNode("o" + n2._id, nnn[n2]._coord)
haveIncoming = False
incomingLaneNo = 0
for e in n2._incoming:
if e._id[0] != "i" and e._id[0] != "o":
haveIncoming = True
incomingLaneNo = incomingLaneNo + e.getLaneNumber()
haveOutgoing = False
outgoingLaneNo = 0
for e in n2._outgoing:
if e._id[0] != "i" and e._id[0] != "o":
haveOutgoing = True
outgoingLaneNo = outgoingLaneNo + e.getLaneNumber()
if haveIncoming:
e1 = net1.addEdge("o" + n2._id, n2._id, n1o._id, -2)
if haveOutgoing:
net1.addLane(e1, 20, 100.)
else:
for i in range(0, incomingLaneNo):
net1.addLane(e1, 20, 100.)
if len(n2._incoming) == 1:
fdd.write(' <connection from="' + n2._incoming[
0]._id + '" to="' + e1._id + '" lane="' + str(i) + ':' + str(i) + '"/>\n')
if haveOutgoing:
if options.verbose:
print("has outgoing")
e2 = net1.addEdge("i" + n2._id, n1i._id, n2._id, -2)
if haveIncoming:
net1.addLane(e2, 20, 100.)
else:
for i in range(0, outgoingLaneNo):
net1.addLane(e2, 20, 100.)
if len(n2._outgoing) == 1:
fdd.write(' <connection from="' + e2._id + '" to="' +
n2._outgoing[0]._id + '" lane="' + str(i) + ':' + str(i) + '"/>\n')
connectedNodesConnections[n2] = [n1i, n1o]
newDistricts = {}
districtSources = {}
districtSinks = {}
mappedDistrictNodes = {}
connNodes = {}
dRemap = {}
for d in nmap1to2:
newDistricts[d] = []
if len(nmap1to2[d]) == 1:
n = nmap1to2[d][0]
if n in dRemap:
districtSources[d] = districtSources[dRemap[n]]
districtSinks[d] = districtSinks[dRemap[n]]
newDistricts[d] = []
newDistricts[d].append(n._id)
continue
else:
dRemap[n] = d
[ni, no] = connectedNodesConnections[n]
if len(ni._outgoing) > 0:
districtSources[d] = ni._outgoing[0]._id
if len(no._incoming) > 0:
districtSinks[d] = no._incoming[0]._id
fdd.write(' <connection from="' + no._incoming[0]._id + '"/>\n')
else:
incomingLaneNoG = 0
outgoingLaneNoG = 0
for n in nmap1to2[d]:
for e in n._incoming:
if e._id[0] != "i" and e._id[0] != "o":
incomingLaneNoG = incomingLaneNoG + e.getLaneNumber()
for e in n._outgoing:
if e._id[0] != "i" and e._id[0] != "o":
outgoingLaneNoG = outgoingLaneNoG + e.getLaneNumber()
p1 = [0, 0]
p11 = [0, 0]
p12 = [0, 0]
p2 = [0, 0]
for n in nmap1to2[d]:
p1[0] = p1[0] + n._coord[0]
p1[1] = p1[1] + n._coord[1]
p2[0] = p2[0] + nnn[n]._coord[0]
p2[1] = p2[1] + nnn[n]._coord[1]
p2[0] = (p1[0] + p2[0]) / float(len(origDistrictNodes[d]) * 2)
p2[1] = (p1[1] + p2[1]) / float(len(origDistrictNodes[d]) * 2)
dn2i = net1.addNode("cci" + d, p2)
dn2o = net1.addNode("cci" + d, p2)
p11[0] = p1[0] / float(len(origDistrictNodes[d]))
p11[1] = p1[1] / float(len(origDistrictNodes[d]))
dn1o = net1.addNode("co" + d, p11)
e1 = net1.addEdge("co" + d, dn1o._id, dn2o._id, -2)
for i in range(0, incomingLaneNoG):
net1.addLane(e1, 22, 100.)
districtSinks[d] = e1._id
p12[0] = p1[0] / float(len(origDistrictNodes[d]))
p12[1] = p1[1] / float(len(origDistrictNodes[d]))
dn1i = net1.addNode("ci" + d, p12)
e2 = net1.addEdge("ci" + d, dn2i._id, dn1i._id, -2)
for i in range(0, outgoingLaneNoG):
net1.addLane(e2, 21, 100.)
districtSources[d] = e2._id
runningOutLaneNumber = 0
runningInLaneNumber = 0
for n2 in nmap1to2[d]:
[ni, no] = connectedNodesConnections[n2]
print("In: " + ni._id + " " + str(len(ni._incoming)) +
" " + str(len(ni._outgoing)))
print("Out: " + no._id + " " + str(len(no._incoming)) +
" " + str(len(no._outgoing)))
if len(no._incoming) > 0:
incomingLaneNo = 0
for e in n2._incoming:
if e._id[0] != "i" and e._id[0] != "o":
incomingLaneNo = incomingLaneNo + e.getLaneNumber()
e1 = net1.addEdge("o" + d + "#" + n2._id, no._id, dn1o._id, -2)
for i in range(0, incomingLaneNo):
net1.addLane(e1, 19, 100.)
fdd.write(' <connection from="' + "o" + d + "#" + n2._id + '" to="' + dn1o._outgoing[
0]._id + '" lane="' + str(i) + ':' + str(runningOutLaneNumber) + '"/>\n')
runningOutLaneNumber = runningOutLaneNumber + 1
fdd.write(
' <connection from="' + dn1o._outgoing[0]._id + '"/>\n')
if incomingLaneNo == 0:
net1.addLane(e1, 19, 100.)
runningOutLaneNumber = runningOutLaneNumber + 1
if len(ni._outgoing) > 0:
outgoingLaneNo = 0
for e in n2._outgoing:
if e._id[0] != "i" and e._id[0] != "o":
outgoingLaneNo = outgoingLaneNo + e.getLaneNumber()
e2 = net1.addEdge("i" + d + "#" + n2._id, dn1i._id, ni._id, -2)
for i in range(0, outgoingLaneNo):
net1.addLane(e2, 18, 100.)
fdd.write(' <connection from="' + dn1i._incoming[
0]._id + '" to="' + "i" + d + "#" + n2._id + '" lane="' + str(runningInLaneNumber) + ':' + str(i) + '"/>\n')
runningInLaneNumber = runningInLaneNumber + 1
if outgoingLaneNo == 0:
net1.addLane(e2, 18, 100.)
runningInLaneNumber = runningInLaneNumber + 1
fd = open("districts.xml", "w")
fd.write("<tazs>\n")
for d in newDistricts:
fd.write(' <taz id="' + d + '">\n')
if d in districtSources:
fd.write(
' <tazSource id="' + districtSources[d] + '" weight="1"/>\n')
if d in districtSinks:
fd.write(
' <tazSink id="' + districtSinks[d] + '" weight="1"/>\n')
fd.write(' </taz>\n')
fd.write("</tazs>\n")
fd.close()
fdd.write("</connections>\n")
writeNodes(net1)
writeEdges(net1)
| 33.346 | 138 | 0.537156 |
911d404601c245497e0b927e48a8d554d335993b
| 42,222 |
py
|
Python
|
BKPMediaDetector.py
|
bkpifc/BKPMediaDetector
|
51858b45e218e0c4b5ed4d6aac6d751e029d850e
|
[
"Apache-2.0"
] | 5 |
2019-04-03T08:04:06.000Z
|
2019-10-01T12:08:30.000Z
|
BKPMediaDetector.py
|
bkpifc/BKPMediaDetector
|
51858b45e218e0c4b5ed4d6aac6d751e029d850e
|
[
"Apache-2.0"
] | 13 |
2019-04-08T14:24:15.000Z
|
2022-03-11T23:50:32.000Z
|
BKPMediaDetector.py
|
bkpifc/BKPMediaDetector
|
51858b45e218e0c4b5ed4d6aac6d751e029d850e
|
[
"Apache-2.0"
] | 2 |
2019-04-04T11:20:27.000Z
|
2019-04-04T14:51:11.000Z
|
#!/usr/bin/env python3
######
# General Detector
# 06.12.2018 / Last Update: 20.05.2021
# LRB
######
import numpy as np
import os
import sys
import tensorflow as tf
import hashlib
import cv2
import magic
import PySimpleGUI as sg
import csv
import imagehash
import face_recognition
import subprocess
from itertools import groupby
from distutils.version import StrictVersion
from PIL import Image
from datetime import datetime
from time import strftime
from time import gmtime
from multiprocessing import Pool
from Models.Face import detect_face
from pathlib import Path
from openvino.inference_engine import IENetwork, IECore
from AudioAnalysis import audioAnalysis
######
# Worker function to check the input provided via the GUI
#######
######
# Worker function to update the progress bar
######
######
# Worker function to prepare and reshape the input images into a Numpy array
# and to calculate the MD5 hashes of them.
######
######
# Worker function to prepare and reshape the input videos to a Numpy array
# and to calculate the MD5 hashes of them.
# The function analyzes as much frames as indicated in the variable "frames_per_second" (Default = 0.5)
######
######
# Detection within loaded images with Tensorflow framework
# Creation of output file with hashes, detection scores and class
######
######
# Detect and count faces in loaded images
# Prepare and call age/gender detection once done
######
######
# Detection with the OPEN VINO Framework
# Evaluate Age & Gender based on input faces
######
######
# Detection with the OPEN VINO Framework
# Creation of output file with hashes, detection scores and class
######
######
# Worker function to load and encode known faces and to compare them against
# the provided input material
######
######
# Worker function to conduct speech detection in audio files
# for all audio files detected
######
######
# Split the report file to allow seamless integration into XWays Hash Database per category
######
######
#
# Main program function
# First initiates required parameters and variables, then loads the GUI
# After which the image and video load functions are triggered based on the input parameters
# Finally, the detection is executed and results written to the place requested
#
######
# Prevent execution when externally called
if __name__ == '__main__':
######
# Collecting parameters via GUI
######
sg.ChangeLookAndFeel('Dark')
layout = [[sg.Text('General Settings', font=("Helvetica", 13), text_color='sea green')],
[sg.Text('Please specify the folder holding the media data:')],
[sg.Input(), sg.FolderBrowse('Browse', initial_folder='/home/b/Desktop/TestBilder', button_color=('black', 'grey'))], #Path.home() = Initial folder
[sg.Text('Where shall I place the results?')],
[sg.Input(), sg.FolderBrowse('Browse', initial_folder='/home/b/Desktop/TestResults', button_color=('black', 'grey'))], #Path.home()
[sg.Text('TENSORFLOW DETECTORS')],
[sg.Checkbox('Objects/Persons', size=(15, 2)),
sg.Checkbox('Actions'),
sg.Checkbox('IS Logos'),
sg.Checkbox("Face Recognition")],
[sg.Text('OPEN VINO DETECTORS')],
[sg.Checkbox('Objects-fast', size=(15, 2)),
sg.Checkbox('Faces/Age/Gender')],
[sg.Text('Output Format:'), sg.Listbox(values=('Nuix', 'XWays', 'csv'), size=(29, 3))],
[sg.Text('Video Settings', font=("Helvetica", 13), text_color='sea green')],
[sg.Text('# of frames to be analyzed per Minute:', size=(36, 0))],
[sg.Slider(range=(1, 120), orientation='h', size=(29, 20), default_value=30)],
[sg.Text('Max. # of frames to be analyzed per Video:', size=(36, 0))],
[sg.Slider(range=(1, 500), orientation='h', size=(29, 20), default_value=100)],
[sg.Text('Check for & discard similar frames?'),
sg.InputCombo(('Yes', 'No'), default_value='No', size=(10, 2))],
[sg.Text('Face Recognition', font=("Helvetica", 13), text_color='sea green')],
[sg.Text('Specify folder with known faces (if FaceReq selected): ')],
[sg.Input(), sg.FolderBrowse('Browse', initial_folder='/home/b/Desktop/known', button_color=('black', 'grey'))],
[sg.Text('Specify face recognition tolerance (Default: 60%):', size=(48, 0))],
[sg.Slider(range=(0, 100), orientation='h', size=(29, 20), default_value=60)],
[sg.Checkbox('Output detected faces as jpg', size=(25, 2))],
[sg.Text('Audio Settings', font=("Helvetica", 13), text_color='sea green')],
[sg.Text('AUDIO PROCESSING')],
[sg.Checkbox('Speech Detection', size=(15, 2))],
[sg.OK(button_color=('black', 'sea green')), sg.Cancel(button_color=('black', 'grey'))]]
layout_progress = [[sg.Text('Detection in progress')],
[sg.ProgressBar(12, orientation='h', size=(20, 20), key='progressbar')],
[sg.Cancel()]]
# Render the GUI
gui_input = sg.Window('BKP Media Detector').Layout(layout).Read()
error = False
# Validate input
validateInput(gui_input)
# Initiating progress meter
updateProgressMeter(1, 'Initializing variables & parameters...')
startTime = datetime.now()
# Variable to determine minimum GPU Processor requirement & to disable TF log output
# os.environ['TF_MIN_GPU_MULTIPROCESSOR_COUNT'] = '5'
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
# Validating TF version
if StrictVersion(tf.__version__) < StrictVersion('1.9.0'):
raise ImportError('Please upgrade your TensorFlow installation to v1.9.* or later!')
# Defining multiple needed variables based on GUI input & adding TF/OpenVINO directory to path
PATH_TO_INPUT = Path(gui_input[1][0])
TEST_IMAGE_PATHS = Path.iterdir(PATH_TO_INPUT)
number_of_input = 0
for elements in Path.iterdir(PATH_TO_INPUT):
number_of_input += 1
PATH_TO_RESULTS = Path(gui_input[1][1])
PATH_TO_OBJECT_DETECTION_DIR = '/home/b/Programs/tensorflow/models/research' # PLACEHOLDER-tobereplacedWithPathtoDirectory
sys.path.append(PATH_TO_OBJECT_DETECTION_DIR)
REPORT_FORMAT = gui_input[1][8]
frames_per_second = gui_input[1][9] / 60
max_frames_per_video = gui_input[1][10]
video_sensitivity_text = gui_input[1][11]
KNOWN_FACES_PATH = gui_input[1][12]
facereq_tolerance = int(gui_input[1][13])/100
output_detFaces = gui_input[1][14]
if video_sensitivity_text == "Yes":
video_sensitivity = 20
else:
video_sensitivity = 0
# Check which models to apply and load their corresponding label maps
from object_detection.utils import label_map_util
graphlist = []
indexlist = []
MODEL1 = bool(gui_input[1][2])
if MODEL1:
OPEN_IMAGES_GRAPH = str(Path('Models/OpenImages/openimages.pb'))
OPEN_IMAGES_LABELS = str(OPEN_IMAGES_GRAPH)[:-3] + '.pbtxt'
OPEN_IMAGES_INDEX = label_map_util.create_category_index_from_labelmap(OPEN_IMAGES_LABELS)
graphlist.append(OPEN_IMAGES_GRAPH)
indexlist.append(OPEN_IMAGES_INDEX)
MODEL2 = bool(gui_input[1][3])
if MODEL2:
AVA_GRAPH = str(Path('Models/AVA/ava.pb'))
AVA_LABELS = str(AVA_GRAPH)[:-3] + '.pbtxt'
AVA_INDEX = label_map_util.create_category_index_from_labelmap(AVA_LABELS)
graphlist.append(AVA_GRAPH)
indexlist.append(AVA_INDEX)
MODEL3 = bool(gui_input[1][4])
if MODEL3:
SPECIAL_DETECTOR_GRAPH = str(Path('Models/ISLogos/islogos.pb'))
SPECIAL_DETECTOR_LABELS = str(SPECIAL_DETECTOR_GRAPH)[:-3] + '.pbtxt'
SPECIAL_DETECTOR_INDEX = label_map_util.create_category_index_from_labelmap(SPECIAL_DETECTOR_LABELS)
graphlist.append(SPECIAL_DETECTOR_GRAPH)
indexlist.append(SPECIAL_DETECTOR_INDEX)
FACE_RECOGNITION = bool(gui_input[1][5])
OPEN_VINO_vgg19 = bool(gui_input[1][6])
FACE_MODEL = bool(gui_input[1][7])
AUDIO_SPEECH_DETECTION = bool(gui_input[1][15])
# Update the progress indicator
updateProgressMeter(2, 'Process started. Loading ' + str(number_of_input) + ' media files...')
# Create logfile
logfile = open(str(PATH_TO_RESULTS / 'Logfile.txt'), 'w')
logfile.write('***DETECTION LOG***\n')
logfile.write("*" + str(datetime.now()) + ': \tProcess started. Loading images...*\n')
# Create resultsfile
detectionresults_path = PATH_TO_RESULTS / 'Detection_Results.csv'
detectionresults = open(str(detectionresults_path), 'w')
if REPORT_FORMAT[0] == 'Nuix':
detectionresults.write("tag,searchterm\n")
else:
detectionresults.write("name,hash,score,category\n")
detectionresults.flush()
detectionresults.close()
# Initiate needed variables
vidlist = []
audiolist = []
final_images = []
errors = []
# Multiprocess the image load function on all CPU cores available
pool = Pool(maxtasksperchild=100)
processed_images = pool.map(load_image_into_numpy_array, TEST_IMAGE_PATHS, chunksize=10)
pool.close()
# Synchronize after completion
pool.join()
pool.terminate()
# Clean the result for None types (where image conversion failed)
processed_images = [x for x in processed_images if x != None]
# Check for the different flags set by mimetype
for processed_image in processed_images:
if str(processed_image[1]) == "VIDEO":
# If present, populate the video list
vidlist.append(processed_image[0])
elif str(processed_image[1]) == "AUDIO":
audiolist.append(processed_image[0])
elif str(processed_image[1]) == "OCTET":
if processed_image[0][-3:] in ["mp4", "mov", "mpg", "avi", "exo", "mkv", "m4v", "ebm"]:
vidlist.append(processed_image[0])
else:
audiolist.append(processed_image[0])
elif str(processed_image[1]) == "ERROR":
errors.append(processed_image[0])
else:
# If not, put it to the final images list
final_images.append(processed_image)
for error in errors:
logfile.write(error)
logfile.flush()
# Count the number of images before adding the videoframes
number_of_images = len(final_images)
# Update the progress indicator
updateProgressMeter(3, 'Loading ' + str(len(vidlist)) + ' Videos...')
# Multiprocess the video load function on all CPU cores available
pool = Pool(maxtasksperchild=10)
videoframes = pool.map(load_video_into_numpy_array, vidlist, chunksize=2)
pool.close()
# Synchronize after completion
pool.join()
pool.terminate()
number_of_videos = 0
# Clean the result for None types (where video conversion failed)
for video in videoframes:
if type(video) is str:
errors.append(video)
if type(video) is list:
final_images.extend(video)
number_of_videos += 1
for error in errors:
logfile.write(error)
logfile.flush()
# Split the result from the loading function into hashes and image arrays
if len(final_images) != 0:
image_path, hashvalues, image_nps = zip(*final_images)
# Update the progress indicator & logfile
updateProgressMeter(4, 'Starting detection of ' + str(len(final_images)) + ' media files')
logfile.write("*" + str(datetime.now()) + ": \tLoading completed. Detecting...*\n")
# Conduct Face Recognition if needed
if FACE_RECOGNITION:
known_face_counter = faceRecognition(KNOWN_FACES_PATH, image_path, image_nps, hashvalues)
# Conduct OpenVino VGG19 Model if needed
if OPEN_VINO_vgg19:
run_inference_openvino(image_path, image_nps, hashvalues)
# Execute all other detection models
if len(final_images) != 0:
run_inference_for_multiple_images(image_path, image_nps, hashvalues)
# Conduct face/age/gender detection
if FACE_MODEL:
faceDetection(image_path, image_nps, hashvalues)
if AUDIO_SPEECH_DETECTION:
audiofiles_processed = audioSpeechDetection(audiolist)
else:
audiofiles_processed = 0
# Check whether an Xways report needs to be created
if REPORT_FORMAT[0] == 'XWays':
createXWaysReport()
# Write process statistics to logfile
logfile.write("*Results:\t\t\t" + str(PATH_TO_RESULTS / 'Detection_Results.csv*\n'))
logfile.write("*Total Amount of Files:\t\t" + str(number_of_input) + " (of which " + str(number_of_images + number_of_videos + audiofiles_processed) + " were processed.)*\n")
logfile.write("*Processed Images:\t\t" + str(number_of_images) + "*\n")
logfile.write("*Processed Videos: \t\t" + str(number_of_videos) + " (analyzed " + str(frames_per_second * 60) + " frames per minute, up to max. 500) with the check for content-based duplicates set to " + video_sensitivity_text + "\n")
logfile.write("*Processed Audio Files:\t\t" + str(audiofiles_processed) + "*\n")
logfile.write("*Applied models:\n")
for y in range(0, len(graphlist)): logfile.write("\t\t\t\t" + graphlist[y] + "\n")
if OPEN_VINO_vgg19: logfile.write("\t\t\t\tOpenVINO Object Detector\n")
if FACE_MODEL: logfile.write("\t\t\t\tFace-Age-Gender Detector\n")
if FACE_RECOGNITION: logfile.write("\t\t\t\tFace Recognition (Known faces detected: " + str(known_face_counter) + ")\n")
logfile.write("*Processing time:\t\t" + str(datetime.now() - startTime) + "*\n")
logfile.write("*Time per processed file:\t" + str((datetime.now() - startTime) / (number_of_images + number_of_videos + audiofiles_processed)) + "*\n")
logfile.flush()
logfile.close()
# Update progress indicator
sg.OneLineProgressMeter('BKP Media Detector', 12, 12, 'key', 'Detection finished',orientation='h',size=(100, 10))
# Deliver final success pop up to user
sg.Popup('The detection was successful',
'The results are placed here:',
'Path: "{}"'.format(str(PATH_TO_RESULTS)))
| 40.676301 | 238 | 0.613093 |
911e4f54a8e9fbbfd53aa376d04e2f253bbddbd8
| 2,252 |
py
|
Python
|
src/BruteForce.py
|
stevenwalton/Retro-Learner
|
74586c57b5dd5f6e82abaff99344285731f1fc56
|
[
"MIT"
] | null | null | null |
src/BruteForce.py
|
stevenwalton/Retro-Learner
|
74586c57b5dd5f6e82abaff99344285731f1fc56
|
[
"MIT"
] | null | null | null |
src/BruteForce.py
|
stevenwalton/Retro-Learner
|
74586c57b5dd5f6e82abaff99344285731f1fc56
|
[
"MIT"
] | null | null | null |
import time
import retro
import FrameSkip
import TimeLimit
import Brute
| 34.121212 | 98 | 0.525311 |
911fe80423c3725cffb5c649027000c3b8755a5f
| 5,429 |
py
|
Python
|
tutorials/04-advanced/03-super-resolution-onnx/main.py
|
yakhyo/PyTorch-Tutorials
|
163287bc735b09c366dbdfa3989e81acaef6fa1f
|
[
"MIT"
] | 7 |
2021-05-16T14:36:20.000Z
|
2021-12-30T07:07:31.000Z
|
tutorials/04-advanced/03-super-resolution-onnx/main.py
|
yakhyo/PyTorch-Tutorials
|
163287bc735b09c366dbdfa3989e81acaef6fa1f
|
[
"MIT"
] | null | null | null |
tutorials/04-advanced/03-super-resolution-onnx/main.py
|
yakhyo/PyTorch-Tutorials
|
163287bc735b09c366dbdfa3989e81acaef6fa1f
|
[
"MIT"
] | 3 |
2021-05-17T12:11:11.000Z
|
2021-11-25T10:06:14.000Z
|
import io
import numpy as np
import torch.utils.model_zoo as model_zoo
import torch.onnx
import torch.nn as nn
import torch.nn.init as init
# ================================================================ #
# Building the Model #
# ================================================================ #
# Creating an instance from SuperResolutionNet
net = SuperResolutionNet(upscale_factor=3)
# ================================================================ #
# Downloading Pretrained Weights #
# ================================================================ #
model_url = 'https://s3.amazonaws.com/pytorch/test_data/export/superres_epoch100-44c6958e.pth'
# Initialize model with the pretrained weights
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
net.load_state_dict(model_zoo.load_url(model_url, map_location=device))
net.eval() # Changing to eval mode to save it onnx format
# onnx input shape: x.shape : (batch_size=1, channel=1, H, W)
# The model expects the Y component of the YCbCr of an image as an input so it has one channel
x = torch.randn(1, 1, 224, 224, requires_grad=True)
onnx_model = net(x)
# Export the onnx model
torch.onnx.export(onnx_model, # model being run
x, # model input (or a tuple for multiple inputs)
"super_resolution.onnx", # where to save the model
export_params=True, # store the trained parameter weights inside the model file
opset_version=10, # the ONNX version to export the model to
do_constant_folding=True, # whether to execute constant folding for optimization
input_names=['input'], # the model's input names
output_names=['output'], # the model's output names
dynamic_axes={'input': {0: 'batch_size'}, # variable length axes
'output': {0: 'batch_size'}})
# ================================================================ #
# Loading ONNX model #
# ================================================================ #
import onnx
import onnxruntime
onnx_model = onnx.load("super_resolution.onnx")
onnx.checker.check_model(onnx_model)
ort_session = onnxruntime.InferenceSession("super_resolution.onnx")
# compute ONNX Runtime output prediction
ort_inputs = {ort_session.get_inputs()[0].name: to_numpy(x)}
ort_outs = ort_session.run(None, ort_inputs)
# compare ONNX Runtime and PyTorch results
np.testing.assert_allclose(to_numpy(torch_out), ort_outs[0], rtol=1e-03, atol=1e-05)
print("Exported model has been tested with ONNXRuntime, and the result looks good!")
# ================================================================ #
# Reading Original Image and Feed it to Model #
# ================================================================ #
from PIL import Image
import torchvision.transforms as transforms
img = Image.open("../../../cat_224x224.jpg")
resize = transforms.Resize([224, 224])
img = resize(img)
# The model expects the Y component of the YCbCr of an image as an input
img_ycbcr = img.convert('YCbCr')
img_y, img_cb, img_cr = img_ycbcr.split()
to_tensor = transforms.ToTensor()
img_y = to_tensor(img_y)
img_y.unsqueeze_(0)
ort_inputs = {ort_session.get_inputs()[0].name: to_numpy(img_y)}
ort_outs = ort_session.run(None, ort_inputs)
img_out_y = ort_outs[0]
img_out_y = Image.fromarray(np.uint8((img_out_y[0] * 255.0).clip(0, 255)[0]), mode='L')
# get the output image follow post-processing step from PyTorch implementation
output = Image.merge(
"YCbCr",
[img_out_y, img_cb.resize(img_out_y.size, Image.BICUBIC), img_cr.resize(img_out_y.size, Image.BICUBIC), ]
).convert("RGB")
# Save the image, we will compare this with the output image from mobile device
output.save("../../../cat_superres_with_ort.jpg")
| 40.514925 | 120 | 0.592374 |
91209eac140dfeb3483e2df389892eaa71a76d66
| 8,963 |
py
|
Python
|
features/steps/section.py
|
revvsales/python-docx-1
|
5b3ff2b828cc30f1567cb1682a8cb399143732d7
|
[
"MIT"
] | 3,031 |
2015-01-02T11:11:24.000Z
|
2022-03-30T00:57:17.000Z
|
features/steps/section.py
|
revvsales/python-docx-1
|
5b3ff2b828cc30f1567cb1682a8cb399143732d7
|
[
"MIT"
] | 934 |
2015-01-06T20:53:56.000Z
|
2022-03-28T10:08:03.000Z
|
features/steps/section.py
|
revvsales/python-docx-1
|
5b3ff2b828cc30f1567cb1682a8cb399143732d7
|
[
"MIT"
] | 901 |
2015-01-07T18:22:07.000Z
|
2022-03-31T18:38:51.000Z
|
# encoding: utf-8
"""
Step implementations for section-related features
"""
from __future__ import absolute_import, print_function, unicode_literals
from behave import given, then, when
from docx import Document
from docx.enum.section import WD_ORIENT, WD_SECTION
from docx.section import Section
from docx.shared import Inches
from helpers import test_docx
# given ====================================================
# when =====================================================
# then =====================================================
| 34.340996 | 88 | 0.716278 |
9120d4c7c58950a1c79165874f5716c1d3e76e4c
| 4,421 |
py
|
Python
|
scipy/sparse/csgraph/_laplacian.py
|
seberg/scipy
|
d8081cdd40ed8cbebd5905c0ad6c323c57d5da6e
|
[
"BSD-3-Clause"
] | 1 |
2018-10-04T15:34:14.000Z
|
2018-10-04T15:34:14.000Z
|
scipy/sparse/csgraph/_laplacian.py
|
seberg/scipy
|
d8081cdd40ed8cbebd5905c0ad6c323c57d5da6e
|
[
"BSD-3-Clause"
] | null | null | null |
scipy/sparse/csgraph/_laplacian.py
|
seberg/scipy
|
d8081cdd40ed8cbebd5905c0ad6c323c57d5da6e
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Laplacian of a compressed-sparse graph
"""
# Authors: Aric Hagberg <[email protected]>
# Gael Varoquaux <[email protected]>
# Jake Vanderplas <[email protected]>
# License: BSD
import numpy as np
from scipy.sparse import isspmatrix, coo_matrix
###############################################################################
# Graph laplacian
def laplacian(csgraph, normed=False, return_diag=False):
""" Return the Laplacian matrix of a directed graph.
For non-symmetric graphs the out-degree is used in the computation.
Parameters
----------
csgraph : array_like or sparse matrix, 2 dimensions
compressed-sparse graph, with shape (N, N).
normed : bool, optional
If True, then compute normalized Laplacian.
return_diag : bool, optional
If True, then return diagonal as well as laplacian.
Returns
-------
lap : ndarray
The N x N laplacian matrix of graph.
diag : ndarray
The length-N diagonal of the laplacian matrix.
diag is returned only if return_diag is True.
Notes
-----
The Laplacian matrix of a graph is sometimes referred to as the
"Kirchoff matrix" or the "admittance matrix", and is useful in many
parts of spectral graph theory. In particular, the eigen-decomposition
of the laplacian matrix can give insight into many properties of the graph.
For non-symmetric directed graphs, the laplacian is computed using the
out-degree of each node.
Examples
--------
>>> from scipy.sparse import csgraph
>>> G = np.arange(5) * np.arange(5)[:, np.newaxis]
>>> G
array([[ 0, 0, 0, 0, 0],
[ 0, 1, 2, 3, 4],
[ 0, 2, 4, 6, 8],
[ 0, 3, 6, 9, 12],
[ 0, 4, 8, 12, 16]])
>>> csgraph.laplacian(G, normed=False)
array([[ 0, 0, 0, 0, 0],
[ 0, 9, -2, -3, -4],
[ 0, -2, 16, -6, -8],
[ 0, -3, -6, 21, -12],
[ 0, -4, -8, -12, 24]])
"""
if csgraph.ndim != 2 or csgraph.shape[0] != csgraph.shape[1]:
raise ValueError('csgraph must be a square matrix or array')
if normed and (np.issubdtype(csgraph.dtype, np.int)
or np.issubdtype(csgraph.dtype, np.uint)):
csgraph = csgraph.astype(np.float)
if isspmatrix(csgraph):
return _laplacian_sparse(csgraph, normed=normed,
return_diag=return_diag)
else:
return _laplacian_dense(csgraph, normed=normed,
return_diag=return_diag)
| 32.507353 | 86 | 0.570007 |
9120f5bc8f814b4692efc7406b81c0fe8103d83e
| 7,225 |
py
|
Python
|
samples/barebone/settings.py
|
kuasha/peregrine
|
b3dd92146d26fe9e4ea589868431b590324b47d1
|
[
"MIT"
] | 1 |
2018-10-12T15:12:15.000Z
|
2018-10-12T15:12:15.000Z
|
samples/barebone/settings.py
|
kuasha/peregrine
|
b3dd92146d26fe9e4ea589868431b590324b47d1
|
[
"MIT"
] | null | null | null |
samples/barebone/settings.py
|
kuasha/peregrine
|
b3dd92146d26fe9e4ea589868431b590324b47d1
|
[
"MIT"
] | null | null | null |
import os
import logging
from collections import namedtuple
from Crypto.PublicKey import RSA
from tornado import gen
from tornado import concurrent
from cosmos.rbac.object import *
from cosmos.service import OBSERVER_PROCESSOR
DEBUG = True
DB_HOST = "127.0.0.1"
DB_NAME = "cosmos"
DB_PORT = 27017
DB_USER_NAME = None
DB_USER_PASSWORD = None
LOG_DB_HOST = "127.0.0.1"
LOG_DB_NAME = "cosmos"
LOG_COL_NAME = "log"
LOG_DB_PORT = 27017
LOG_LEVEL = logging.DEBUG
LOG_DB_USER_NAME = None
LOG_DB_USER_PASSWORD = None
STATIC_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "app")
TEMPLATE_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
INDEX_HTML_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "app/index.html")
LOGIN_HTML_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates/login.html")
WEB_SERVER_LISTEN_PORT = 8080
DB_CHANGE_PROCESSOR_ENDPOINT_FORMAT = "http://localhost:{0}/handlechange"
#TODO: You MUST change the following values
COOKIE_SECRET = "+8/YqtEUQfiYLUdO2iJ2OyzHHFSADEuKvKYwFqemFas="
HMAC_KEY = "+8/YqtEUQfiYLUdO2iJ2OyzHIFSAKEuKvKYwFqemFas="
facebook_client_id='000000000000000'
facebook_client_secret='00000000000000000000000000000000'
facebook_scope = "email,public_profile,user_friends"
facebook_redirect_uri = None
DEFAULT_LOGIN_NEXT_URI = "/"
"""
# pip install pycrypto for Crypto
# then from python console generate private_pem and public_pen and assign to SERVICE_PRIVATE_KEY and SERVICE_PUBLIC_KEY
import Crypto.PublicKey.RSA as RSA
key = RSA.generate(2048)
private_pem = key.exportKey()
public_pem = key.publickey().exportKey()
"""
# TODO: set both keys below. Private key backup must be kept in a secure place and should never be shared
# If private key is compromised, this service and all other services that trust this will be compromised
# Public key is to share publicly for verification
SERVICE_PRIVATE_KEY = None
SERVICE_PUBLIC_KEY = None
directory_listing_allowed = True
CONFIGURE_LOG = False
START_WEB_SERVER = True
START_OBJECT_CHANGE_MONITOR = False
GOOGLE_OAUTH2_CLIENT_ID = None
GOOGLE_OAUTH2_CLIENT_SECRET = None
GOOGLE_OAUTH2_REDIRECT_URI = None
GITHUB_CLIENT_ID = None
GITHUB_CLIENT_SECRET = None
GITHUB_OAUTH2_CALLBACK_URI = None
USERS_IDENTITY_COL_NAME = "cosmos.users.identity"
USERS_PROFILE_FB_COL_NAME = "cosmos.users.profile.facebook"
USERS_FB_FRIENDS_COL_NAME = "cosmos.users.facebook.friends"
login_url = "/login/"
OAUTH2_SERVICE_URL = r"/(?P<tenant_id>[^\/]+)/oauth2/(?P<function>[^\/]+)/"
OAUTH2_PRIVATE_KEY_PEM = b'-----BEGIN RSA PRIVATE KEY-----\nMIIEpAIBAAKCAQEAl0RIYISOe+9F8dRkm+XQrdaVsn/d3GjufnBnFARRgceu+E6q\nWLlptI5arhckFyXjDOAUEuMnOwmISfeXHrIIp4BU6RMjqRw6ciaIhI7e3LSn5fQ7\nOwCywUaHlUkyq+zQynfH77lUC95YumyUQzGVfdiwQw8XZZYDo2wAFMKJa8heo38Z\nQ0HT788VrcuSa1f4PY9i/wRHXF+xp/9NWUE7wER8eNJjqKxkm0EUKYuB23vUFLHh\n8PG7DiATUlCCpV5txhHcNXa2iEoOGecdWg8Yk5Qs2Gq9aqacJGcgfFK9DN+2/yLn\nFEj+xMVPhB2ynILoJ9N+lfA3TE6nWVKiuriXBQIDAQABAoIBAQCAX2CVGKnbH+ra\nGofvjg+VGCEexUlBvoN4Jmg0Ip4RZ6dj70690UyWAKGQUO89/dc8nAYtKT2n6qUR\nMN+9GxYhINXun2GKKPyo127QIHeeEmrSynxhzGvnfrWdyesI4QcobJLvLPbYw6/F\nNlR02eWmUXj00B/pBHC+Be/jrlz1bF5Gwbw/RINzEJPOxVfaN2D31lotetx5WnV7\nXrTxR5ONpCnwbK8phH4/vQL3rv+ZJgKVhRM8uqd+auW5Lp57y36JFXb+g5SmkFo3\nq+mB2CfMkyip8zpJGDyyVo8XiI1jKieqaiimZ4zpJZwkClBzYsFmio60f9smMGYB\n+nQCX5iZAoGBAL6WtY9BSL0hIxMIwDh4C87rORMmy8ZW5sl91wdFHmjnqlc2Q2yS\n3uVwK32BvxQCTq6FXNRoqYO0xHSrrupSRTJD5KT9EoxpaGlqi1MSB6U6o7r41bSb\nhNwcjKJ40OSABZ/YzATOwq9+AfgU+pMZD+WNlzesYL+7QIPHyKXdwrPLAoGBAMsu\ntcUadzsZEmaaSW5xtouyZF5tWPadB6VZ0Gney8x6uWQ2+ZGLv0QRIxJP0f4cBTkY\nsPx5pUZuo7oaDzCaRH9cV2VJFBahsGrFqcsexVsKh8CfZEMD1PBptodD1Cialr9M\nL0RdSu+1lmcfRqxOXSlaMSHml/cqfOjfHOj3RaZvAoGAEG2LLtLwwySlElHxx6xJ\nUEekPstcSzdYY0vOihjiGybE3wmVXDl4rwwxI3tYjg/42kAylTiETA771BasWBRJ\nVKDXh4Us4R+A2X1OjxWBxTM9w7MJMK0rEZIAaUzCrL+APJwCUfPEgj35S3n7c0x4\nu0+uFiVsnXo1gGZrHCj2TGsCgYEApm3Ccos1MvFcgzLKB2+ZqWAcmsRS5N7Hjoe9\nEZtvsDSuewoU70VbDDRFWBCN3+mv1Y8GGijCWqjx79S8sIEMro5DADIWBFu5GByE\n8l5oJiTAAeYNyF7xI2RUIQRMWl4WMOgEp6kLYsKJSjryNt2Rrfe02yH5RHpHCrEH\nC0TQhn0CgYB0iyjs20bdGYYWNTMlSYPtf8LVhUktvGYyytA/sepRXUe13T87vjCc\nvD3utXPsuaBVGhloE7Dk5YHJdar4n5UcLITNJnu1TyRM4binlzbU4rByxVjclaSX\nGB0O/DCgCsgNFK+LFKf/N1EhRxwJKy+BLVWCIshsAxNv26u296I9jA==\n-----END RSA PRIVATE KEY-----'
OAUTH2_PUBLIC_KEY_PEM = b'-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAl0RIYISOe+9F8dRkm+XQ\nrdaVsn/d3GjufnBnFARRgceu+E6qWLlptI5arhckFyXjDOAUEuMnOwmISfeXHrII\np4BU6RMjqRw6ciaIhI7e3LSn5fQ7OwCywUaHlUkyq+zQynfH77lUC95YumyUQzGV\nfdiwQw8XZZYDo2wAFMKJa8heo38ZQ0HT788VrcuSa1f4PY9i/wRHXF+xp/9NWUE7\nwER8eNJjqKxkm0EUKYuB23vUFLHh8PG7DiATUlCCpV5txhHcNXa2iEoOGecdWg8Y\nk5Qs2Gq9aqacJGcgfFK9DN+2/yLnFEj+xMVPhB2ynILoJ9N+lfA3TE6nWVKiuriX\nBQIDAQAB\n-----END PUBLIC KEY-----'
OAUTH2_TOKEN_EXPIRY_SECONDS = 600
TENANT_ID = 'cosmosframework.com'
OAUTH2_TRUSTED_REDIRECT_URLS = ['http://localhost:8080/oauth2client/authorize/']
AUTH_PUBLIC_KEY_PEM_URL = r"/(?P<tenant_id>[^\/]+)/auth/key/"
#TODO: You should remove this processon in production environment
observers = [
{
"object_name": "test",
"function": test_observer,
"access": [AccessType.READ, AccessType.INSERT, AccessType.UPDATE, AccessType.DELETE],
"type": OBSERVER_PROCESSOR
}
]
try:
from local_settings import *
except ImportError:
pass
if DB_USER_NAME and DB_USER_PASSWORD:
DATABASE_URI = "mongodb://"+ DB_USER_NAME + ":"+ DB_USER_PASSWORD +"@"+ DB_HOST+":"+str(DB_PORT)+"/"+DB_NAME
else:
DATABASE_URI = "mongodb://"+DB_HOST+":"+str(DB_PORT)
if LOG_DB_USER_NAME and LOG_DB_USER_PASSWORD:
LOG_DATABASE_URI = "mongodb://"+ LOG_DB_USER_NAME + ":"+ LOG_DB_USER_PASSWORD +"@"+ LOG_DB_HOST+":"+str(LOG_DB_PORT)+"/"+LOG_DB_NAME
else:
LOG_DATABASE_URI = "mongodb://"+ LOG_DB_HOST+":"+str(LOG_DB_PORT)
GOOGLE_OAUTH2_SETTINGS = {"key": GOOGLE_OAUTH2_CLIENT_ID, "secret": GOOGLE_OAUTH2_CLIENT_SECRET, "redirect_uri": GOOGLE_OAUTH2_REDIRECT_URI}
GITHUB_OAUTH_SETTINGS = {"client_id": GITHUB_CLIENT_ID, "secret": GITHUB_CLIENT_SECRET, "redirect_uri": GITHUB_OAUTH2_CALLBACK_URI}
| 47.847682 | 1,732 | 0.800969 |
9123424991c5403f506995b73c11800e6232bbeb
| 274 |
py
|
Python
|
zilean/system/zilean_migrator.py
|
A-Hilaly/zilean
|
2b2e87969a0d8064e8b92b07c346a4006f93c795
|
[
"Apache-2.0"
] | null | null | null |
zilean/system/zilean_migrator.py
|
A-Hilaly/zilean
|
2b2e87969a0d8064e8b92b07c346a4006f93c795
|
[
"Apache-2.0"
] | null | null | null |
zilean/system/zilean_migrator.py
|
A-Hilaly/zilean
|
2b2e87969a0d8064e8b92b07c346a4006f93c795
|
[
"Apache-2.0"
] | null | null | null |
from .utils.migrations import (migrate_database_from,
migrate_machine_from,
zilean_rollback_database_backup,
zilean_rollback_machine_backup)
| 34.25 | 63 | 0.569343 |
912495f93184573b9203df22fc8bb27548652827
| 14,605 |
py
|
Python
|
coltran/run.py
|
DionysisChristopoulos/google-research
|
7f59ef421beef32ca16c2a7215be74f7eba01a0f
|
[
"Apache-2.0"
] | 23,901 |
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
coltran/run.py
|
DionysisChristopoulos/google-research
|
7f59ef421beef32ca16c2a7215be74f7eba01a0f
|
[
"Apache-2.0"
] | 891 |
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
coltran/run.py
|
admariner/google-research
|
7cee4b22b925581d912e8d993625c180da2a5a4f
|
[
"Apache-2.0"
] | 6,047 |
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ColTran: Training and Continuous Evaluation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import os
import time
from absl import app
from absl import flags
from absl import logging
from ml_collections import config_flags
import tensorflow as tf
import tensorflow_datasets as tfds
from coltran import datasets
from coltran.models import colorizer
from coltran.models import upsampler
from coltran.utils import train_utils
# pylint: disable=g-direct-tensorflow-import
# pylint: disable=missing-docstring
# pylint: disable=not-callable
# pylint: disable=g-long-lambda
flags.DEFINE_enum('mode', 'train', [
'train', 'eval_train', 'eval_valid', 'eval_test'], 'Operation mode.')
flags.DEFINE_string('logdir', '/tmp/svt', 'Main directory for logs.')
flags.DEFINE_string('master', 'local',
'BNS name of the TensorFlow master to use.')
flags.DEFINE_enum('accelerator_type', 'GPU', ['CPU', 'GPU', 'TPU'],
'Hardware type.')
flags.DEFINE_enum('dataset', 'imagenet', ['imagenet', 'custom'], 'Dataset')
flags.DEFINE_string('data_dir', None, 'Data directory for custom images.')
flags.DEFINE_string('tpu_worker_name', 'tpu_worker', 'Name of the TPU worker.')
flags.DEFINE_string(
'pretrain_dir', None, 'Finetune from a pretrained checkpoint.')
flags.DEFINE_string('summaries_log_dir', 'summaries', 'Summaries parent.')
flags.DEFINE_integer('steps_per_summaries', 100, 'Steps per summaries.')
flags.DEFINE_integer('devices_per_worker', 1, 'Number of devices per worker.')
flags.DEFINE_integer('num_workers', 1, 'Number workers.')
config_flags.DEFINE_config_file(
'config',
default='test_configs/colorizer.py',
help_string='Training configuration file.')
FLAGS = flags.FLAGS
def loss_on_batch(inputs, model, config, training=False):
"""Loss on a batch of inputs."""
logits, aux_output = model.get_logits(
inputs_dict=inputs, train_config=config, training=training)
loss, aux_loss_dict = model.loss(
targets=inputs, logits=logits, train_config=config, training=training,
aux_output=aux_output)
loss_factor = config.get('loss_factor', 1.0)
loss_dict = collections.OrderedDict()
loss_dict['loss'] = loss
total_loss = loss_factor * loss
for aux_key, aux_loss in aux_loss_dict.items():
aux_loss_factor = config.get(f'{aux_key}_loss_factor', 1.0)
loss_dict[aux_key] = aux_loss
total_loss += aux_loss_factor * aux_loss
loss_dict['total_loss'] = total_loss
extra_info = collections.OrderedDict([
('scalar', loss_dict),
])
return total_loss, extra_info
def train_step(config,
model,
optimizer,
metrics,
ema=None,
strategy=None):
"""Training StepFn."""
def step_fn(inputs):
"""Per-Replica StepFn."""
with tf.GradientTape() as tape:
loss, extra = loss_on_batch(inputs, model, config, training=True)
scaled_loss = loss
if strategy:
scaled_loss /= float(strategy.num_replicas_in_sync)
grads = tape.gradient(scaled_loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
for metric_key, metric in metrics.items():
metric.update_state(extra['scalar'][metric_key])
if ema is not None:
ema.apply(model.trainable_variables)
return loss
return train_utils.step_with_strategy(step_fn, strategy)
###############################################################################
## Train.
###############################################################################
###############################################################################
## Evaluating.
###############################################################################
def evaluate(logdir, subset):
"""Executes the evaluation loop."""
config = FLAGS.config
strategy, batch_size = train_utils.setup_strategy(
config, FLAGS.master,
FLAGS.devices_per_worker, FLAGS.mode, FLAGS.accelerator_type)
model, optimizer, ema = train_utils.with_strategy(
lambda: build(config, batch_size, False), strategy)
metric_keys = ['loss', 'total_loss']
# metric_keys += model.metric_keys
metrics = {}
for metric_key in metric_keys:
func = functools.partial(tf.keras.metrics.Mean, metric_key)
curr_metric = train_utils.with_strategy(func, strategy)
metrics[metric_key] = curr_metric
checkpoints = train_utils.with_strategy(
lambda: train_utils.create_checkpoint(model, optimizer, ema),
strategy)
dataset = train_utils.dataset_with_strategy(input_fn, strategy)
num_examples = config.eval_num_examples
eval_step = train_utils.step_with_strategy(step_fn, strategy)
ckpt_path = None
wait_max = config.get(
'eval_checkpoint_wait_secs', config.save_checkpoint_secs * 100)
is_ema = True if ema else False
eval_summary_dir = os.path.join(
logdir, 'eval_{}_summaries_pyk_{}'.format(subset, is_ema))
writer = tf.summary.create_file_writer(eval_summary_dir)
while True:
ckpt_path = train_utils.wait_for_checkpoint(logdir, ckpt_path, wait_max)
logging.info(ckpt_path)
if ckpt_path is None:
logging.info('Timed out waiting for checkpoint.')
break
train_utils.with_strategy(
lambda: train_utils.restore(model, checkpoints, logdir, ema),
strategy)
data_iterator = iter(dataset)
num_steps = num_examples // batch_size
for metric_key, metric in metrics.items():
metric.reset_states()
logging.info('Starting evaluation.')
done = False
for i in range(0, num_steps, FLAGS.steps_per_summaries):
start_run = time.time()
for k in range(min(num_steps - i, FLAGS.steps_per_summaries)):
try:
if k % 10 == 0:
logging.info('Step: %d', (i + k + 1))
eval_step(data_iterator)
except (StopIteration, tf.errors.OutOfRangeError):
done = True
break
if done:
break
bits_per_dim = metrics['loss'].result()
logging.info('Bits/Dim: %.3f, Speed: %.3f seconds/step, Step: %d/%d',
bits_per_dim,
(time.time() - start_run) / FLAGS.steps_per_summaries,
i + k + 1, num_steps)
# logging.info('Final Bits/Dim: %.3f', bits_per_dim)
with writer.as_default():
for metric_key, metric in metrics.items():
curr_scalar = metric.result().numpy()
tf.summary.scalar(metric_key, curr_scalar, step=optimizer.iterations)
if __name__ == '__main__':
app.run(main)
| 34.940191 | 135 | 0.692503 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.