max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 7
115
| max_stars_count
int64 101
368k
| id
stringlengths 2
8
| content
stringlengths 6
1.03M
|
---|---|---|---|---|
testing/MLDB-1703_null_char_import.py | kstepanmpmg/mldb | 665 | 11190996 | <gh_stars>100-1000
#
# MLDB-1703 null char import
# 2 juin 2016
# This file is part of MLDB. Copyright 2016 mldb.ai inc. All rights reserved.
#
import unittest
from mldb import mldb, MldbUnitTest, ResponseException
class Mldb1703(MldbUnitTest):
@classmethod
def setUpClass(self):
# this file includes a problematic character
mldb.log(mldb.put("/v1/procedures/importer", {
"type": "import.text",
"params": {
"dataFileUrl": "file://mldb/testing/dataset/MDLB-1703_data.csv",
"outputDataset": "test_case",
"named": "rowName",
"select": "* EXCLUDING(rowName)",
"runOnCreation": True,
"structuredColumnNames": True,
"allowMultiLines": False,
"replaceInvalidCharactersWith": " "
}
}))
def test_select(self):
mldb.query("select name from test_case")
def test_tokenize(self):
mldb.query("select tokenize(name) from test_case")
mldb.run_tests()
|
utils/dbpedia.py | Tijana37/chimera | 125 | 11191009 | <reponame>Tijana37/chimera
import json
from collections import Counter
from functools import lru_cache
from itertools import chain
from os.path import isfile
import requests
from utils.file_system import makedir
cache = "/tmp/dbpedia/"
makedir(cache)
DBPEDIA = "http://dbpedia.org/"
@lru_cache(maxsize=None)
def normalize_entity(entity: str):
return entity \
.replace("/", "%2F") \
.replace("&", "%26") \
.replace("+", "%2B")
@lru_cache(maxsize=None)
def get_dbpedia_entity(entity: str):
entity = normalize_entity(entity)
cache_ent = cache + entity + ".json"
if isfile(cache_ent):
f = open(cache_ent, "r")
content = json.load(f)
f.close()
return content
r = requests.get(url=DBPEDIA + 'data/' + entity + '.json')
content = r.json()
f = open(cache_ent, "w")
json.dump(content, f)
f.close()
return content
def english_value(entries):
filtered = [e["value"] for e in entries if e["lang"] == "en"]
if len(filtered) > 0:
return filtered[0]
return None
gender_pronouns = {
"male": ["he", "him", "his", "himself"],
"female": ["she", "her", "hers", "herself"],
"inanimate": ["it", "its", "itself"],
"plural": ["they", "them", "theirs"]
}
all_pronouns = set(chain.from_iterable(gender_pronouns.values()))
def pronouns(entity: str):
dbpedia = get_dbpedia_entity(entity)
ent_uri = DBPEDIA + 'resource/' + entity
gender_uri = "http://xmlns.com/foaf/0.1/gender"
abstract_uri = "http://dbpedia.org/ontology/abstract"
if ent_uri not in dbpedia:
# print(dbpedia)
# raise ValueError("No URI - " + entity)
return []
if gender_uri in dbpedia[ent_uri]:
gender = english_value(dbpedia[ent_uri][gender_uri])
if gender is not None:
return gender_pronouns[gender]
if abstract_uri in dbpedia[ent_uri]:
abstract = english_value(dbpedia[ent_uri][abstract_uri])
if abstract is not None:
words = Counter(abstract.lower().split())
gender_by_words = {g: sum([words[w] for w in g_words]) for g, g_words in gender_pronouns.items()}
gender = max(gender_by_words, key=gender_by_words.get)
if gender_by_words[gender] == 0:
gender = "inanimate" # Default
return gender_pronouns[gender]
return []
if __name__ == "__main__":
print("Start")
for e in ['Jalisco', 'Diane_Duane', '23rd_Street_(Manhattan)']:
print(len(get_dbpedia_entity(e)))
print("end")
print(pronouns("United_States"))
print(pronouns("Buzz_Aldrin"))
print(pronouns("Hillary_Clinton"))
print(pronouns("Italy_national_football_team"))
|
examples/article_examples/E_cartoon_plot.py | ATayls/DnaFeaturesViewer | 391 | 11191054 | from matplotlib import rc_context
from A_linear_plot import CustomTranslator
rc_context({"path.sketch": (1.5, 300, 1)}) # scale, length, randomness
class CustomTranslatorVariant(CustomTranslator):
def compute_feature_fontdict(self, feature):
return {"family": "<NAME>"}
translator = CustomTranslatorVariant()
graphic_record = translator.translate_record("plasmid.gb")
cropped_record = graphic_record.crop((0, 1850))
ax, _ = cropped_record.plot(figure_width=2.5, with_ruler=False)
ax.figure.savefig("E_cartoon_plot.png", dpi=300, bbox_inches="tight")
|
projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/__init__.py | makistsantekidis/opendr | 217 | 11191070 | <filename>projects/perception/slam/full_map_posterior_gmapping/src/map_simulator/src/map_simulator/map_obstacles/__init__.py
from .polygonal_obstacle import PolygonalObstacle
__all__ = ['PolygonalObstacle', ]
|
bindings/python/cntk/ops/tests/evaluation_test.py | shyamalschandra/CNTK | 17,702 | 11191107 | <reponame>shyamalschandra/CNTK
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
"""
Unit tests for evaluation operations (grad and eval)
"""
from __future__ import division
import numpy as np
import pytest
from .ops_test_utils import _test_binary_op, AA, precision, PRECISION_TO_TYPE,\
unittest_helper
from cntk import dropout, combine
import cntk as C
def test_sequence_grad_as_numpy_false(device_id, precision):
from .. import sequence
a = sequence.input_variable(shape=(1,), dtype=PRECISION_TO_TYPE[precision], needs_gradient=True, name='a')
sequence_sum_a_plus_sequence_sum_a = sequence.reduce_sum(a) + sequence.reduce_sum(a)
a_data = [AA([[2]], dtype=PRECISION_TO_TYPE[precision]), AA([[2], [3]], dtype=PRECISION_TO_TYPE[precision]), AA([[2], [3], [4]], dtype=PRECISION_TO_TYPE[precision])]
actual_grad = sequence_sum_a_plus_sequence_sum_a.grad({a: a_data}, [a], as_numpy=False)
test_op = a + 1
result = test_op.eval({a : actual_grad})
assert np.array_equal(result[0], np.asarray([[3.]]))
assert np.array_equal(result[1], np.asarray([[3.], [3.]]))
assert np.array_equal(result[2], np.asarray([[3.], [3.], [3.]]))
def test_grad_with_no_arguments_needing_gradients():
x = C.input_variable(10)
z = dropout(x, .4)
with pytest.raises(ValueError):
_, result = z.grad({x: [np.array([5]*150, "float32").reshape(15, 10)]}, outputs=[z])
def test_eval_not_all_outputs():
x = C.input_variable(1)
x_data = [AA([3], dtype=np.float32)]
y = C.input_variable(1)
y_data = [AA([2], dtype=np.float32)]
plus_func = x + 1
minus_func = y - 1
func = combine([plus_func, minus_func])
result = func.eval({x : x_data}, [plus_func])
assert np.array_equal(result, np.asarray([[4.]]))
result = func.eval({y : y_data}, [minus_func])
assert np.array_equal(result, np.asarray([[1.]]))
def test_grad_custimized_root():
x = C.input_variable(shape=(1,), needs_gradient=True)
y = C.sqrt(x)
y2 = C.log(x)
combine = C.combine([y.output, y2.output])
a = np.asarray([1,4,16], dtype=np.float32).reshape(3,1)
grads = combine.grad({x:a}, grad_root = y.output)
expect_grad = np.asarray([[0.5],[0.25],[0.125]], dtype=np.float32)
assert np.array_equal(grads, expect_grad)
def test_constant_eval():
c = C.Constant(value=1)
c_plus_1 = c + 1
op = C.combine([c_plus_1, c])
result = op.eval({})
assert np.array_equal(result[c_plus_1.output], 2.0)
assert np.array_equal(result[c], 1.0)
def test_input_without_dynamic_axes():
x = C.input_variable(shape=(2,), dynamic_axes=[], needs_gradient=True, name='x')
assert len(x.dynamic_axes) == 0
op = x * .01 + 3.0
grad_result, eval_result = op.grad({x : np.asarray([.6, -.8], dtype=np.float32)}, outputs=[op], wrt=[x])
assert np.allclose(eval_result, [3.006, 2.992])
assert np.allclose(grad_result, [.01, .01])
w = C.parameter(init=np.asarray([[0.5], [-1.5]], dtype=np.float32))
op = C.times(x, w) + 3.0
grad_result, eval_result = op.grad({x : np.asarray([.6, -.8], dtype=np.float32)}, outputs=[op], wrt=[w])
assert np.allclose(eval_result, [4.5])
assert np.allclose(grad_result, [[.6], [-.8]])
def test_grad_after_eval():
x = C.input_variable((C.FreeDimension, 2))
w = C.parameter(init=np.asarray([[2, 5], [1, 3]], dtype=np.float32))
t = C.times(x, w)
x_data = np.asarray([[0.5, 0.2]], np.float32)
t_val = t.eval({x : x_data})
assert np.array_equal(t_val, np.asarray([[[1.2, 3.1]]], dtype=np.float32))
w_grad, t_val = t.grad({x : x_data}, wrt=[w], outputs=[t])
assert np.array_equal(t_val, np.asarray([[[1.2, 3.1]]], dtype=np.float32))
assert np.array_equal(w_grad, np.asarray([[0.5, .5], [.2, .2]], dtype=np.float32))
x_data = np.asarray([[0.5, 0.2], [0.1, .6]], np.float32)
t_val = t.eval({x : x_data})
assert np.allclose(t_val, np.asarray([[[1.2, 3.1], [0.8, 2.3]]], dtype=np.float32))
w_grad, t_val = t.grad({x : x_data}, wrt=[w], outputs=[t])
assert np.allclose(t_val, np.asarray([[[1.2, 3.1], [0.8, 2.3]]], dtype=np.float32))
assert np.array_equal(w_grad, np.asarray([[0.6, .6], [.8, .8]], dtype=np.float32))
def test_validation_before_eval():
w = C.parameter((4,C.InferredDimension))
v = C.parameter((C.InferredDimension,5))
wv = C.times(w,v)
p = C.input((4,1))
wp = C.times(w,p)
q = C.input((1,5))
qv = C.times(q,v)
with pytest.raises(ValueError):
wv.eval()
|
desktop/core/ext-py/nose-1.3.7/functional_tests/test_isolate_plugin.py | kokosing/hue | 5,079 | 11191122 | import os
import sys
import unittest
from nose.plugins.isolate import IsolationPlugin
from nose.plugins import PluginTester
support = os.path.join(os.path.dirname(__file__), 'support')
class TestDiscovery(PluginTester, unittest.TestCase):
activate = '--with-isolation'
args = ['-v']
plugins = [IsolationPlugin()]
suitepath = os.path.join(support, 'ipt')
def runTest(self):
print str(self.output)
for line in self.output:
if not line.strip():
continue
if line.startswith('-'):
break
assert line.strip().endswith('ok'), \
"Failed test: %s" % line.strip()
class TestLoadFromNames(PluginTester, unittest.TestCase):
activate = '--with-isolation'
args = ['-v', 'test1/tests.py', 'test2/tests.py']
plugins = [IsolationPlugin()]
suitepath = None
def setUp(self):
self._dir = os.getcwd()
os.chdir(os.path.join(support, 'ipt'))
super(TestLoadFromNames, self).setUp()
def tearDown(self):
os.chdir(self._dir)
super(TestLoadFromNames, self).tearDown()
def makeSuite(self):
return None
def runTest(self):
print str(self.output)
for line in self.output:
if not line.strip():
continue
if line.startswith('-'):
break
assert line.strip().endswith('ok'), \
"Failed test: %s" % line.strip()
if __name__ == '__main__':
unittest.main()
|
toad/tadpole/base.py | Padfoot-ted/toad | 325 | 11191176 | import seaborn as sns
from .utils import (
get_axes,
tadpole_axes,
FIG_SIZE,
)
class Tadpole:
def __getattr__(self, name):
t = getattr(sns, name)
if callable(t):
return self.wrapsns(t)
return t
def wrapsns(self, f):
@tadpole_axes
def wrapper(*args, figure_size = FIG_SIZE, **kwargs):
kw = kwargs.copy()
if 'ax' not in kw:
kw['ax'] = get_axes(size = figure_size)
try:
return f(*args, **kw)
except:
return f(*args, **kwargs)
return wrapper
|
tests/unit/filters/test_xyz_filters.py | bernssolg/pyntcloud-master | 1,142 | 11191182 | <reponame>bernssolg/pyntcloud-master<filename>tests/unit/filters/test_xyz_filters.py
import pytest
from numpy.testing import assert_array_equal
from pyntcloud.filters.xyz import BoundingBoxFilter
@pytest.mark.usefixtures("simple_pyntcloud")
def test_BoundingBoxFilter_default_values(simple_pyntcloud):
"""
Default bounding box values are infinite so all points
should pass the filter.
"""
bbox_filter = BoundingBoxFilter(pyntcloud=simple_pyntcloud)
bbox_filter.extract_info()
result = bbox_filter.compute()
assert all(result)
@pytest.mark.parametrize("bounding_box,expected_result", [
(
{
"min_x": 0.4,
"max_x": 0.6,
"min_y": 0.4,
"max_y": 0.6
},
[False, False, False, True, False, False]
),
(
{
"min_x": 0.4,
},
[False, False, False, True, True, True]
),
(
{
"max_x": 1.,
},
[True, True, True, True, True, False]
)
])
@pytest.mark.usefixtures("simple_pyntcloud")
def test_BoundingBoxFilter_expected_results(simple_pyntcloud, bounding_box, expected_result):
bbox_filter = BoundingBoxFilter(
pyntcloud=simple_pyntcloud,
**bounding_box
)
bbox_filter.extract_info()
result = bbox_filter.compute()
assert_array_equal(result, expected_result)
|
DevOps/resources/Jenkins/scripts/notification/notifier/main.py | jaswinder9051998/Resources | 101 | 11191185 | # Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import requests
import sys
from notifier import parse
message = 'Build {}'.format('FAILED')
headers = {
'Content-Type': 'application/json',
}
payload = {
'message': message,
}
endpoint = ''
def main():
"""Main entry point."""
# Parse given arguments
parser = parse.create_parser()
args = parser.parse_args()
r = requests.post(endpoint,
data=json.dumps(payload + args),
headers=headers)
print(r.status)
if __name__ == '__main__':
sys.exit(main())
|
qt__pyqt__pyside__pyqode/pyqt__custom_title_bar__FramelessWindow/FramelessWindow.py | DazEB2/SimplePyScripts | 117 | 11191205 | <filename>qt__pyqt__pyside__pyqode/pyqt__custom_title_bar__FramelessWindow/FramelessWindow.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Created on 2018年4月30日
# author: Irony
# site: https://github.com/892768447
# email: <EMAIL>
# file: FramelessWindow
# description:
__Author__ = """By: Irony
QQ: 892768447
Email: <EMAIL>"""
__Copyright__ = 'Copyright (c) 2018 Irony'
__Version__ = 1.0
# SOURCE: https://github.com/892768447/PyQt/blob/f6ff3ee8bf8e7e9dd8d3ba3d39cf5cefa3c91e7b/%E6%97%A0%E8%BE%B9%E6%A1%86%E8%87%AA%E5%AE%9A%E4%B9%89%E6%A0%87%E9%A2%98%E6%A0%8F%E7%AA%97%E5%8F%A3/FramelessWindow.py
from enum import Enum, auto
from PyQt5.QtCore import Qt, pyqtSignal, QPoint
from PyQt5.QtGui import QFont, QEnterEvent, QPainter, QColor, QPen
from PyQt5.QtWidgets import QWidget, QVBoxLayout, QHBoxLayout, QLabel, QSpacerItem, QSizePolicy, QPushButton
# стиль
STYLE_SHEET = """
/* Панель заголовка */
TitleBar {
background-color: rgb(54, 157, 180);
}
/* Минимизировать кнопку `Максимальное выключение` Общий фон по умолчанию */
#buttonMinimum, #buttonMaximum, #buttonClose {
border: none;
background-color: rgb(54, 157, 180);
}
/* Зависание */
#buttonMinimum:hover,#buttonMaximum:hover {
background-color: rgb(48, 141, 162);
}
#buttonClose:hover {
color: white;
background-color: rgb(232, 17, 35);
}
/* Мышь удерживать */
#buttonMinimum:pressed,#buttonMaximum:pressed {
background-color: rgb(44, 125, 144);
}
#buttonClose:pressed {
color: white;
background-color: rgb(161, 73, 92);
}
"""
class TitleBar(QWidget):
# Сигнал минимизации окна
windowMinimumed = pyqtSignal()
# увеличить максимальный сигнал окна
windowMaximumed = pyqtSignal()
# сигнал восстановления окна
windowNormaled = pyqtSignal()
# сигнал закрытия окна
windowClosed = pyqtSignal()
# Окно мобильных
windowMoved = pyqtSignal(QPoint)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Поддержка настройки фона qss
self.setAttribute(Qt.WA_StyledBackground, True)
self._old_pos = None
# Размер значка по умолчанию
self.iconSize = 20
# Установите цвет фона по умолчанию, иначе он будет прозрачным из-за влияния родительского окна
self.setAutoFillBackground(True)
palette = self.palette()
palette.setColor(palette.Window, QColor(240, 240, 240))
self.setPalette(palette)
# значок окна
self.iconLabel = QLabel()
# self.iconLabel.setScaledContents(True)
# название окна
self.titleLabel = QLabel()
self.titleLabel.setMargin(2)
# Использовать шрифты Webdings для отображения значков
font = self.font() or QFont()
font.setFamily('Webdings')
self.buttonMinimum = QPushButton('0', clicked=self.windowMinimumed.emit, font=font, objectName='buttonMinimum')
self.buttonMaximum = QPushButton('1', clicked=self.showMaximized, font=font, objectName='buttonMaximum')
self.buttonClose = QPushButton('r', clicked=self.windowClosed.emit, font=font, objectName='buttonClose')
# макет
layout = QHBoxLayout(spacing=0)
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(self.iconLabel)
layout.addWidget(self.titleLabel)
# Средний телескопический бар
layout.addSpacerItem(QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum))
self.layout_custom_widget = QHBoxLayout()
self.layout_custom_widget.setContentsMargins(0, 0, 0, 0)
layout.addLayout(self.layout_custom_widget)
layout.addWidget(self.buttonMinimum)
layout.addWidget(self.buttonMaximum)
layout.addWidget(self.buttonClose)
self.setLayout(layout)
# начальная высота
self.setHeight()
def addWidget(self, widget, width=38, height=38):
self.layout_custom_widget.addWidget(widget)
widget.setMinimumSize(width, height)
widget.setMaximumSize(width, height)
def showMaximized(self):
if self.buttonMaximum.text() == '1':
# Максимизировать
self.buttonMaximum.setText('2')
self.windowMaximumed.emit()
else: # Восстановить
self.buttonMaximum.setText('1')
self.windowNormaled.emit()
def setHeight(self, height=38):
""" Установка высоты строки заголовка """
self.setMinimumHeight(height)
self.setMaximumHeight(height)
# Задайте размер правой кнопки ?
self.buttonMinimum.setMinimumSize(height, height)
self.buttonMinimum.setMaximumSize(height, height)
self.buttonMaximum.setMinimumSize(height, height)
self.buttonMaximum.setMaximumSize(height, height)
self.buttonClose.setMinimumSize(height, height)
self.buttonClose.setMaximumSize(height, height)
def setTitle(self, title):
""" Установить заголовок """
self.titleLabel.setText(title)
def setIcon(self, icon):
""" настройки значокa """
self.iconLabel.setPixmap(icon.pixmap(self.iconSize, self.iconSize))
def setIconSize(self, size):
""" Установить размер значка """
self.iconSize = size
def enterEvent(self, event):
self.setCursor(Qt.ArrowCursor)
super().enterEvent(event)
def mouseDoubleClickEvent(self, event):
super().mouseDoubleClickEvent(event)
self.showMaximized()
def mousePressEvent(self, event):
""" Событие клика мыши """
if event.button() == Qt.LeftButton:
self._old_pos = event.pos()
event.accept()
def mouseReleaseEvent(self, event):
""" Событие отказов мыши """
self._old_pos = None
event.accept()
def mouseMoveEvent(self, event):
if event.buttons() == Qt.LeftButton and self._old_pos:
self.windowMoved.emit(self.mapToGlobal(event.pos() - self._old_pos))
event.accept()
# Перечислить верхнюю левую, нижнюю правую и четыре неподвижные точки
class Direction(Enum):
LEFT = auto()
TOP = auto()
RIGHT = auto()
BOTTOM = auto()
LEFT_TOP = auto()
RIGHT_TOP = auto()
LEFT_BOTTOM = auto()
RIGHT_BOTTOM = auto()
class FramelessWindow(QWidget):
# Четыре периметра
MARGINS = 7
def __init__(self):
super().__init__()
self.setStyleSheet(STYLE_SHEET)
self._old_pos = None
self._direction = None
self._widget = None
# Фон прозрачный
self.setAttribute(Qt.WA_TranslucentBackground, True)
# Нет границы
self.setWindowFlags(self.windowFlags() | Qt.FramelessWindowHint)
# TODO: AttributeError: 'FramelessWindow' object has no attribute 'setWindowFlag'
# self.setWindowFlag(Qt.FramelessWindowHint)
# Отслеживание мыши
self.setMouseTracking(True)
# макет
layout = QVBoxLayout(spacing=0)
# Зарезервировать границы для изменения размера окна без полей
layout.setContentsMargins(self.MARGINS, self.MARGINS, self.MARGINS, self.MARGINS)
# Панель заголовка
self.titleBar = TitleBar(self)
layout.addWidget(self.titleBar)
self.setLayout(layout)
# слот сигнала
self.titleBar.windowMinimumed.connect(self.showMinimized)
self.titleBar.windowMaximumed.connect(self.showMaximized)
self.titleBar.windowNormaled.connect(self.showNormal)
self.titleBar.windowClosed.connect(self.close)
self.titleBar.windowMoved.connect(self.move)
self.windowTitleChanged.connect(self.titleBar.setTitle)
self.windowIconChanged.connect(self.titleBar.setIcon)
def setTitleBarHeight(self, height=38):
""" Установка высоты строки заголовка """
self.titleBar.setHeight(height)
def setIconSize(self, size):
""" Установка размера значка """
self.titleBar.setIconSize(size)
def setWidget(self, widget):
""" Настройте свои собственные элементы управления """
self._widget = widget
# Установите цвет фона по умолчанию, иначе он будет прозрачным из-за влияния родительского окна
self._widget.setAutoFillBackground(True)
palette = self._widget.palette()
palette.setColor(palette.Window, QColor(240, 240, 240))
self._widget.setPalette(palette)
self._widget.installEventFilter(self)
self.layout().addWidget(self._widget)
def move(self, pos):
if self.windowState() == Qt.WindowMaximized or self.windowState() == Qt.WindowFullScreen:
# Максимизировать или полноэкранный режим не допускается
return
super().move(pos)
def showMaximized(self):
""" Чтобы максимизировать, удалите верхнюю, нижнюю, левую и правую границы.
Если вы не удалите его, в пограничной области будут пробелы. """
super().showMaximized()
self.layout().setContentsMargins(0, 0, 0, 0)
def showNormal(self):
""" Восстановить, сохранить верхнюю и нижнюю левую и правую границы,
иначе нет границы, которую нельзя отрегулировать """
super().showNormal()
self.layout().setContentsMargins(self.MARGINS, self.MARGINS, self.MARGINS, self.MARGINS)
def eventFilter(self, obj, event):
""" Фильтр событий, используемый для решения мыши в других элементах
управления и восстановления стандартного стиля мыши """
if isinstance(event, QEnterEvent):
self.setCursor(Qt.ArrowCursor)
return super().eventFilter(obj, event)
def paintEvent(self, event):
""" Поскольку это полностью прозрачное фоновое окно, жесткая для поиска
граница с прозрачностью 1 рисуется в событии перерисовывания, чтобы отрегулировать размер окна. """
super().paintEvent(event)
painter = QPainter(self)
painter.setPen(QPen(QColor(255, 255, 255, 1), 2 * self.MARGINS))
painter.drawRect(self.rect())
def mousePressEvent(self, event):
""" Событие клика мыши """
super().mousePressEvent(event)
if event.button() == Qt.LeftButton:
self._old_pos = event.pos()
def mouseReleaseEvent(self, event):
""" Событие отказов мыши """
super().mouseReleaseEvent(event)
self._old_pos = None
self._direction = None
def mouseMoveEvent(self, event):
""" Событие перемещения мыши """
super().mouseMoveEvent(event)
pos = event.pos()
xPos, yPos = pos.x(), pos.y()
wm, hm = self.width() - self.MARGINS, self.height() - self.MARGINS
if self.isMaximized() or self.isFullScreen():
self._direction = None
self.setCursor(Qt.ArrowCursor)
return
if event.buttons() == Qt.LeftButton and self._old_pos:
self._resizeWidget(pos)
return
if xPos <= self.MARGINS and yPos <= self.MARGINS:
# Верхний левый угол
self._direction = Direction.LEFT_TOP
self.setCursor(Qt.SizeFDiagCursor)
elif wm <= xPos <= self.width() and hm <= yPos <= self.height():
# Нижний правый угол
self._direction = Direction.RIGHT_BOTTOM
self.setCursor(Qt.SizeFDiagCursor)
elif wm <= xPos and yPos <= self.MARGINS:
# верхний правый угол
self._direction = Direction.RIGHT_TOP
self.setCursor(Qt.SizeBDiagCursor)
elif xPos <= self.MARGINS and hm <= yPos:
# Нижний левый угол
self._direction = Direction.LEFT_BOTTOM
self.setCursor(Qt.SizeBDiagCursor)
elif 0 <= xPos <= self.MARGINS and self.MARGINS <= yPos <= hm:
# Влево
self._direction = Direction.LEFT
self.setCursor(Qt.SizeHorCursor)
elif wm <= xPos <= self.width() and self.MARGINS <= yPos <= hm:
# Право
self._direction = Direction.RIGHT
self.setCursor(Qt.SizeHorCursor)
elif self.MARGINS <= xPos <= wm and 0 <= yPos <= self.MARGINS:
# выше
self._direction = Direction.TOP
self.setCursor(Qt.SizeVerCursor)
elif self.MARGINS <= xPos <= wm and hm <= yPos <= self.height():
# ниже
self._direction = Direction.BOTTOM
self.setCursor(Qt.SizeVerCursor)
else:
# Курсор по умолчанию
self.setCursor(Qt.ArrowCursor)
def _resizeWidget(self, pos):
""" Отрегулируйте размер окна """
if self._direction is None:
return
mpos = pos - self._old_pos
xPos, yPos = mpos.x(), mpos.y()
geometry = self.geometry()
x, y, w, h = geometry.x(), geometry.y(), geometry.width(), geometry.height()
if self._direction == Direction.LEFT_TOP: # Верхний левый угол
if w - xPos > self.minimumWidth():
x += xPos
w -= xPos
if h - yPos > self.minimumHeight():
y += yPos
h -= yPos
elif self._direction == Direction.RIGHT_BOTTOM: # Нижний правый угол
if w + xPos > self.minimumWidth():
w += xPos
self._old_pos = pos
if h + yPos > self.minimumHeight():
h += yPos
self._old_pos = pos
elif self._direction == Direction.RIGHT_TOP: # верхний правый угол
if h - yPos > self.minimumHeight():
y += yPos
h -= yPos
if w + xPos > self.minimumWidth():
w += xPos
self._old_pos.setX(pos.x())
elif self._direction == Direction.LEFT_BOTTOM: # Нижний левый угол
if w - xPos > self.minimumWidth():
x += xPos
w -= xPos
if h + yPos > self.minimumHeight():
h += yPos
self._old_pos.setY(pos.y())
elif self._direction == Direction.LEFT: # Влево
if w - xPos > self.minimumWidth():
x += xPos
w -= xPos
else:
return
elif self._direction == Direction.RIGHT: # Право
if w + xPos > self.minimumWidth():
w += xPos
self._old_pos = pos
else:
return
elif self._direction == Direction.TOP: # выше
if h - yPos > self.minimumHeight():
y += yPos
h -= yPos
else:
return
elif self._direction == Direction.BOTTOM: # ниже
if h + yPos > self.minimumHeight():
h += yPos
self._old_pos = pos
else:
return
self.setGeometry(x, y, w, h)
if __name__ == '__main__':
import sys
from PyQt5.QtWidgets import QApplication, QTextEdit
app = QApplication(sys.argv)
w = FramelessWindow()
w.setWindowTitle('Test')
# Добавить свое окно
w.setWidget(QTextEdit("Hello World!", w))
w.show()
sys.exit(app.exec_())
|
Packs/PerceptionPoint/Integrations/PerceptionPoint/PerceptionPoint.py | diCagri/content | 799 | 11191211 | <filename>Packs/PerceptionPoint/Integrations/PerceptionPoint/PerceptionPoint.py
import demistomock as demisto
from CommonServerPython import *
''' IMPORTS'''
import requests
import json
from collections import defaultdict
''' INTEGRATION PARAMS '''
URL = 'http://api.perception-point.io/api/v1/{endpoint}' # disable-secrets-detection
INCIDENTS_ENDPOINT = 'scans/incidents/'
RELEASE_ENDPOINT = 'quarantine/release/{id_}'
USER_PARAMS = demisto.params()
SECURED = not USER_PARAMS.get('insecure', False)
PP_TOKEN = USER_PARAMS.get('pp_token', None)
if PP_TOKEN is None:
return_error('Perception Point token is mandatory. '
'Please enter your token or contact PerceptionPoint support for assistance')
try:
API_MAX_LOOPS = int(USER_PARAMS.get('api_loops', 1))
except Exception:
API_MAX_LOOPS = 1
HEADER = {'Authorization': f'Token {PP_TOKEN}'}
''' CONSTANTS '''
RELEASE = 'release'
LIST = 'list'
API_ACTIONS_DICT = {RELEASE: RELEASE_ENDPOINT,
LIST: INCIDENTS_ENDPOINT}
SPAM = 'SPM'
BLOCKED = 'BLK'
MALICIOUS = 'MAL'
API_CURSOR_ARG = '_cursor'
VERBOSE_VERDICT_PARAM = 'verbose_verdict[]'
FETCH_INCIDENTS_TYPE = [{'demisto_param': 'fetch_malicious',
'req_pname': VERBOSE_VERDICT_PARAM,
'req_pval': MALICIOUS},
{'demisto_param': 'fetch_blocked',
'req_pname': VERBOSE_VERDICT_PARAM,
'req_pval': BLOCKED},
{'demisto_param': 'fetch_spam',
'req_pname': VERBOSE_VERDICT_PARAM,
'req_pval': SPAM}]
''' HELPER FUNCTIONS '''
def build_fetch_incident_types(fetch_blocked, fetch_malicious, fetch_spam):
fetch_type_dict = defaultdict(list) # type: ignore
fetch_select = {
'fetch_blocked': fetch_blocked,
'fetch_malicious': fetch_malicious,
'fetch_spam': fetch_spam
}
for darg in FETCH_INCIDENTS_TYPE:
darg_input = fetch_select.get(darg['demisto_param'])
if darg_input:
fetch_type_dict[darg['req_pname']].append(darg.get('req_pval', darg_input))
return dict(fetch_type_dict)
def create_incident(record):
record.pop('Attachment', None)
record['RawJSON'] = json.dumps(record)
return record
def collect_incidents(params):
list_url = build_request_url(LIST)
api_res = get_pp_api_result(list_url, params)
num_of_results = api_res.get('count')
incidents = [] # type: list
api_loops = 0
while num_of_results and api_loops < API_MAX_LOOPS:
incidents += map(create_incident, api_res.get('results'))
if api_res.get('next'):
api_res = get_pp_api_result(api_res.get('next'), {})
num_of_results = api_res.get('count')
api_loops += 1
return incidents
def report_incidents(incidents_list):
demisto.incidents(incidents_list)
def get_pp_api_result(url, params):
try:
res = requests.get(url=url,
params=params,
headers=HEADER,
verify=SECURED)
res.raise_for_status()
try:
res_content = res.json()
except Exception:
res_content = {}
return res_content
except requests.exceptions.HTTPError as err:
if 400 <= res.status_code < 500:
return_error('Invalid token')
else:
return_error(err)
except Exception as err:
return_error(err)
def build_request_url(api_action):
return URL.format(endpoint=API_ACTIONS_DICT.get(api_action))
def command_fetch_incidents():
try:
fetch_blocked = USER_PARAMS.get('fetch_blocked')
fetch_spam = USER_PARAMS.get('fetch_spam')
fetch_malicious = USER_PARAMS.get('fetch_malicious')
req_args = build_fetch_incident_types(fetch_blocked, fetch_malicious, fetch_spam)
last_run_id = int(demisto.getLastRun().get('scan_id', 0))
req_args[API_CURSOR_ARG] = last_run_id
incidents_list = collect_incidents(req_args)
report_incidents(incidents_list)
if incidents_list:
last_run_id = max(last_run_id, int(incidents_list[-1].get('Scan Id')))
demisto.setLastRun({'scan_id': int(last_run_id)})
except Exception as err:
return_error(f'An error occurred while trying to fetch new incidents. '
f'Please contact PerceptionPoint support for more info. {err}')
def release_email_and_get_message(scan_id_to_release):
try:
release_url = build_request_url(RELEASE).format(id_=scan_id_to_release)
_ = get_pp_api_result(release_url, {})
return f'Email with id {scan_id_to_release} was released Successfully!'
except Exception:
raise
def command_release_email():
try:
scan_id_to_release = demisto.args().get('scan_id')
entry = {
'Type': entryTypes['note'],
'ReadableContentsFormat': formats['markdown']
}
email_release_response = release_email_and_get_message(scan_id_to_release)
entry.update({'Contents': email_release_response,
'ContentsFormat': formats['text'],
'EntryContext': {'PP.Released': scan_id_to_release}}
)
demisto.results(entry)
except Exception as err:
return_error(f'An error occurred while trying to release email. '
f'Please contact PerceptionPoint support for more info\n. {err}')
def test_command():
list_url = build_request_url(LIST)
if get_pp_api_result(list_url, {}):
demisto.results('ok')
''' COMMAND CLASSIFIER'''
try:
handle_proxy()
if demisto.command() == 'test-module':
test_command()
if demisto.command() == 'fetch-incidents':
command_fetch_incidents()
if demisto.command() == 'pp-release-email':
command_release_email()
except Exception as e:
LOG(str(e))
message = f'Unexpected error: {e} \n'
LOG(message)
LOG.print_log()
return_error(message)
|
exoplanet-ml/beam/astrowavenet/prediction_fns.py | ritwik12/exoplanet-ml | 286 | 11191243 | <reponame>ritwik12/exoplanet-ml
# Copyright 2018 The Exoplanet ML Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Beam DoFns for making and saving predictions using an AstroWavenet model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os.path
import apache_beam as beam
import numpy as np
import tensorflow as tf
from astrowavenet import astrowavenet_model
from astrowavenet.data import kepler_light_curves
from astrowavenet.util import estimator_util
from tf_util import config_util
from tf_util import example_util
def _get_step_from_checkpoint_path(path):
"""Extracts the global step from a checkpoint path."""
split_path = path.rsplit("model.ckpt-", 1)
if len(split_path) != 2:
raise ValueError("Unrecognized checkpoint path: {}".format(path))
return int(split_path[1])
class MakePredictionsDoFn(beam.DoFn):
"""Generates predictions for a particular checkpoint."""
def __init__(self, hparams, dataset_overrides):
"""Initializes the DoFn."""
self.hparams = hparams
self.dataset_overrides = dataset_overrides
def process(self, inputs):
checkpoint_path, input_file_pattern = inputs
global_step = _get_step_from_checkpoint_path(checkpoint_path)
# Create the input_fn.
dataset_builder = kepler_light_curves.KeplerLightCurves(
input_file_pattern,
mode=tf.estimator.ModeKeys.PREDICT,
config_overrides=self.dataset_overrides)
tf.logging.info("Dataset config: %s",
config_util.to_json(dataset_builder.config))
input_fn = estimator_util.create_input_fn(dataset_builder)
# Create the estimator.
estimator = estimator_util.create_estimator(astrowavenet_model.AstroWaveNet,
self.hparams)
# Generate predictions.
for predictions in estimator.predict(
input_fn, checkpoint_path=checkpoint_path):
# Add global_step.
predictions["global_step"] = global_step
# Squeeze and un-pad the sequences.
weights = np.squeeze(predictions["seq_weights"])
real_length = len(weights)
while real_length > 0 and weights[real_length - 1] == 0:
real_length -= 1
for name, value in predictions.items():
value = np.squeeze(predictions[name])
if value.shape:
value = value[0:real_length]
predictions[name] = value
yield predictions
class SaveLossesDoFn(beam.DoFn):
"""Writes losses for a particular global step to a csv file."""
def __init__(self, output_dir):
self.output_dir = output_dir
def start_bundle(self):
if not tf.gfile.Exists(self.output_dir):
tf.gfile.MakeDirs(self.output_dir)
def process(self, inputs):
# Unpack the inputs and sort predictions by loss.
global_step, all_predictions = inputs
all_predictions = sorted(all_predictions, key=lambda p: p["mean_loss"])
if not all_predictions:
return
# Write the CSV.
csv_filename = os.path.join(self.output_dir, "{}.csv".format(global_step))
with tf.gfile.Open(csv_filename, "w") as f:
fieldnames = ["example_id", "mean_loss"]
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
for predictions in all_predictions:
writer.writerow({
"example_id": predictions["example_id"],
"mean_loss": predictions["mean_loss"],
})
class SavePredictionsDoFn(beam.DoFn):
"""Writes predictions for a particular example to a TFRecord file."""
def __init__(self, output_dir):
self.output_dir = output_dir
def start_bundle(self):
if not tf.gfile.Exists(self.output_dir):
tf.gfile.MakeDirs(self.output_dir)
def process(self, inputs):
# Unpack the inputs and sort predictions by global step.
example_id, all_predictions = inputs
all_predictions = sorted(all_predictions, key=lambda p: p["global_step"])
if not all_predictions:
return
filename = os.path.join(self.output_dir, "{}.tfrecord".format(example_id))
with tf.python_io.TFRecordWriter(filename) as writer:
for predictions in all_predictions:
ex = tf.train.Example()
for name, value in predictions.items():
if name == "example_id":
continue
if not np.shape(value):
value = [value]
example_util.set_feature(ex, name, value)
writer.write(ex.SerializeToString())
|
codigo/Live15/determinante_0.py | cassiasamp/live-de-python | 572 | 11191247 | from threading import Thread
from queue import Queue
from time import sleep
q = Queue()
matriz = [[2, 9],
[-1, 6]]
def principal(mat):
sleep(10)
q.put(mat[0][0] * mat[1][1])
def secundaria(mat):
count = 0
while q.empty():
count += 1
print(count)
q.put(mat[1][0] * mat[0][1])
t_p = Thread(target=principal, kwargs={'mat': matriz}, name='principal')
t_p.start()
t_s = Thread(target=secundaria, kwargs={'mat': matriz}, name='secundária')
t_s.start()
t_s.join()
print(q.queue)
val_p = q.queue[0]
val_s = q.queue[1]
print(val_p - val_s)
|
distribution/bin/web-console-dep-lister.py | RomaKoks/druid | 5,813 | 11191294 | <reponame>RomaKoks/druid
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
# Helper program for listing the deps in the compiled web-console-<VERSION>.js file in druid-console.jar
if len(sys.argv) != 2:
sys.stderr.write('usage: program <web-console js path>\n')
sys.exit(1)
web_console_path = sys.argv[1]
dep_dict = {}
with open(web_console_path, 'r') as web_console_file:
for line in web_console_file.readlines():
match_result = re.match('/\*\*\*/ "\./node_modules/([\@\-a-zA-Z0-9_]+)/.*', line)
if match_result != None:
dependency_name = match_result.group(1)
dep_dict[dependency_name] = True
for dep in dep_dict:
print(dep) |
presidio-image-redactor/presidio_image_redactor/image_pii_verify_engine.py | vtols/presidio | 1,408 | 11191316 | from PIL import Image, ImageChops
from presidio_image_redactor.image_analyzer_engine import ImageAnalyzerEngine
import matplotlib
import io
from matplotlib import pyplot as plt
def fig2img(fig):
"""Convert a Matplotlib figure to a PIL Image and return it."""
buf = io.BytesIO()
fig.savefig(buf)
buf.seek(0)
img = Image.open(buf)
return img
class ImagePiiVerifyEngine:
"""ImagePiiVerifyEngine class only supporting Pii verification currently."""
def __init__(self):
self.analyzer_engine = ImageAnalyzerEngine()
def verify(self, image: Image) -> Image:
"""Annotate image with the detect PII entity.
Please notice, this method duplicates the image, creates a new instance and
manipulate it.
:param image: PIL Image to be processed
:return: the annotated image
"""
image = ImageChops.duplicate(image)
image_x, image_y = image.size
analyzer_engine = ImageAnalyzerEngine()
bboxes = analyzer_engine.analyze(image)
fig, ax = plt.subplots()
image_r = 70
fig.set_size_inches(image_x / image_r, image_y / image_r)
if len(bboxes) == 0:
return image
else:
for box in bboxes:
entity_type = box.entity_type
x0 = box.left
y0 = box.top
x1 = x0 + box.width
y1 = y0 + box.height
rect = matplotlib.patches.Rectangle(
(x0, y0), x1 - x0, y1 - y0, edgecolor="b", facecolor="none"
)
ax.add_patch(rect)
ax.annotate(
entity_type,
xy=(x0 - 3, y0 - 3),
xycoords="data",
bbox=dict(boxstyle="round4,pad=.5", fc="0.9"),
)
ax.imshow(image)
im_from_fig = fig2img(fig)
im_resized = im_from_fig.resize((image_x, image_y))
return im_resized
|
maro/cli/process/utils/default_param.py | yangboz/maro | 598 | 11191319 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
process_setting = {
"redis_info": {
"host": "localhost",
"port": 19999
},
"redis_mode": "MARO", # one of MARO, customized. customized Redis won't exit after maro process clear.
"parallel_level": 1,
"keep_agent_alive": 1, # If 0 (False), agents will exit after 5 minutes of no pending jobs and running jobs.
"check_interval": 60, # seconds
"agent_countdown": 5 # how many times to shutdown agents about finding no job in Redis.
}
|
webs/bilibili/tasks/get_animations_full_data.py | billvsme/videoSpider | 216 | 11191381 | # -*- coding: utf-8 -*-
import requests
import models
from config import sqla
from helpers import random_str
from gevent.pool import Pool
from webs.bilibili import parsers
from sqlalchemy.exc import IntegrityError, InvalidRequestError
bilibili_animation_url = 'http://www.bilibili.com/bangumi/i/'
cookies = {
'sid': ''
}
def create_requests_and_save_datas(bilibili_id):
session = sqla['session']
cookies['sid'] = random_str(8)
r = requests.get(
bilibili_animation_url + str(bilibili_id),
cookies=cookies,
timeout=10
)
if r.status_code != 200:
return
data = parsers.animation.start_parser(r.text)
animation = session.query(models.Animation).filter_by(
bilibili_id=bilibili_id
).one()
genre_class = models.AnimationGenre
for k, v in data.items():
if k == 'genres':
for genre in v:
try:
genre_obj = genre_class(**genre)
session.add(genre_obj)
session.commit()
except (IntegrityError, InvalidRequestError):
session.rollback()
genre_obj = session.query(genre_class).filter_by(
name=genre['name']
).one()
animation.genres.append(genre_obj)
for k, v in data.items():
if k != 'genres':
if type(v) == list:
v = str(v)
setattr(animation, k, v)
animation.is_detail = True
session.commit()
print(','.join(
[bilibili_id, data.get('title')]
))
def task(bilibili_ids, pool_number):
pool = Pool(pool_number)
for bilibili_id in bilibili_ids:
pool.spawn(
create_requests_and_save_datas,
bilibili_id=bilibili_id,
)
pool.join()
|
vumi/transports/vumi_bridge/__init__.py | seidu626/vumi | 199 | 11191404 | from vumi.transports.vumi_bridge.vumi_bridge import GoConversationTransport
__all__ = [
'GoConversationTransport',
]
|
neptune/new/internal/utils/generic_attribute_mapper.py | Raalsky/neptune-client | 254 | 11191426 | #
# Copyright (c) 2021, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from neptune.new.internal.backends.api_model import AttributeType
class NoValue:
pass
VALUE = "value"
LAST_VALUE = "last"
VALUES = "values"
atomic_attribute_types_map = {
AttributeType.FLOAT.value: "floatProperties",
AttributeType.INT.value: "intProperties",
AttributeType.BOOL.value: "boolProperties",
AttributeType.STRING.value: "stringProperties",
AttributeType.DATETIME.value: "datetimeProperties",
AttributeType.RUN_STATE.value: "experimentStateProperties",
AttributeType.NOTEBOOK_REF.value: "notebookRefProperties",
}
value_series_attribute_types_map = {
AttributeType.FLOAT_SERIES.value: "floatSeriesProperties",
AttributeType.STRING_SERIES.value: "stringSeriesProperties",
}
value_set_attribute_types_map = {
AttributeType.STRING_SET.value: "stringSetProperties",
}
# TODO: nicer mapping?
_unmapped_attribute_types_map = {
AttributeType.FILE_SET.value: "fileSetProperties", # TODO: return size?
AttributeType.FILE.value: "fileProperties", # TODO: name? size?
AttributeType.IMAGE_SERIES.value: "imageSeriesProperties", # TODO: return last step?
AttributeType.GIT_REF.value: "gitRefProperties", # TODO: commit? branch?
}
def map_attribute_result_to_value(attribute):
for attribute_map, value_key in [
(atomic_attribute_types_map, VALUE),
(value_series_attribute_types_map, LAST_VALUE),
(value_set_attribute_types_map, VALUES),
]:
source_property = attribute_map.get(attribute.type)
if source_property is not None:
mapped_attribute_entry = getattr(attribute, source_property)
return getattr(mapped_attribute_entry, value_key)
return NoValue
|
stethoscope/plugins/sources/jamf/utils.py | HoriaHorvat/stethoscope | 1,865 | 11191460 | <reponame>HoriaHorvat/stethoscope<gh_stars>1000+
# vim: set fileencoding=utf-8 :
from __future__ import absolute_import, print_function, unicode_literals
import pprint
import logbook
import six.moves
logger = logbook.Logger(__name__)
def _parse_parameter_dict(item_dict):
"""Convert a mapping with 'name' and 'value' keys into a key-value tuple.
>>> _parse_parameter_dict({
... u'id': 65,
... u'name': u'Computer Model',
... u'type': u'String',
... u'value': u'MacBook Pro (Retina, 15-inch, Mid 2015)',
... })
(u'Computer Model', u'MacBook Pro (Retina, 15-inch, Mid 2015)')
>>> _parse_parameter_dict({
... u'value': u'MacBook Pro (Retina, 15-inch, Mid 2015)',
... })
Traceback (most recent call last):
...
KeyError: 'name'
"""
try:
name = item_dict['name']
value = item_dict['value']
# appears there is only the string type at the moment (2015-12-17)
# vtype = item_dict['type']
# not using 'id'
# item_dict['id']
except KeyError:
logger.error("incorrect input for parameter: {!s}".format(pprint.pformat(item_dict)))
raise
return (name, value)
def _parse_parameter_list(dict_list):
"""Create a mapping from the given list of item-describing dictionaries.
>>> returned = _parse_parameter_list([
... {u'id': 68,
... u'name': u'Carbon Black Installed',
... u'type': u'String',
... u'value': u'Installed'},
... {u'id': 65,
... u'name': u'Computer Model',
... u'type': u'String',
... u'value': u'MacBook Pro (Retina, 15-inch, Mid 2015)'},
... ])
>>> returned == {
... u'Computer Model': u'MacBook Pro (Retina, 15-inch, Mid 2015)',
... u'Carbon Black Installed': u'Installed',
... }
True
"""
return dict(six.moves.map(_parse_parameter_dict, dict_list))
|
pytorch_projects/integral_human_pose/_init_paths.py | KGMSFT/integral-human-pose | 472 | 11191498 | import os
import sys
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
this_dir = os.path.dirname(__file__)
add_path(os.path.join(this_dir, '..', '..', 'common'))
add_path(os.path.join(this_dir, '..', 'common_pytorch'))
add_path(os.path.join(this_dir, '..', '..'))
add_path(os.path.join(this_dir, '..'))
add_path(os.path.join(this_dir))
print("=================SYS PATH================\n")
for path in sys.path:
print(path)
print("\n=================SYS PATH================")
|
unittest/scripts/py_dev_api_examples/concepts/Setting_the_Current_Schema.py | mueller/mysql-shell | 119 | 11191510 | from mysqlsh import mysqlx
# Direct connect with no client side default schema defined
mySession = mysqlx.get_session('mike:paSSw0rd@localhost')
mySession.set_current_schema("test")
|
tests/test_lang/test_spacy_language.py | nth-attempt/whatlies | 325 | 11191514 | <gh_stars>100-1000
import pytest
import numpy as np
from spacy.vocab import Vocab
from spacy.language import Language
from whatlies.language import SpacyLanguage
@pytest.fixture()
def color_lang():
vector_data = {
"red": np.array([1.0, 0.0]),
"green": np.array([0.5, 0.5]),
"blue": np.array([0.0, 1.0]),
"purple": np.array([0.0, 1.0]),
}
vocab = Vocab(strings=list(vector_data.keys()))
for word, vector in vector_data.items():
vocab.set_vector(word, vector)
nlp = Language(vocab=vocab)
return SpacyLanguage(nlp)
def test_basic_usage(color_lang):
queries = [
"green is blue and yellow",
"purple is red and blue",
"purple isn't same as red!",
"red and blue is a like a glue!",
]
emb = color_lang[queries]
assert len(emb) == 4
assert emb[queries[0]].name == "green is blue and yellow"
assert emb[queries[0]].vector.shape == (2,)
def test_score_similar_one(color_lang):
scores = color_lang.score_similar("blue", n=2, prob_limit=None, lower=False)
print(scores)
assert all([s[1] == 0 for s in scores])
assert "blue" in [s[0].name for s in scores]
assert "purple" in [s[0].name for s in scores]
def test_single_token_words(color_lang):
# test for issue here: https://github.com/RasaHQ/whatlies/issues/5
assert len(color_lang["red"].vector) > 0
|
packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py | Jasha10/pyright | 3,934 | 11191575 | <gh_stars>1000+
# This sample tests the case where a local (constant) variable that
# is assigned a narrowing expression can be used in a type guard condition.
# These are sometimes referred to as "aliased conditional expressions".
from typing import Optional, Union
import random
class A:
a: int
class B:
b: int
def func1(x: Union[A, B]) -> None:
is_a = not not isinstance(x, A)
if not is_a:
reveal_type(x, expected_text="B")
else:
reveal_type(x, expected_text="A")
def func2(x: Union[A, B]) -> None:
is_a = isinstance(x, A)
if random.random() < 0.5:
x = B()
if is_a:
reveal_type(x, expected_text="B | A")
else:
reveal_type(x, expected_text="B | A")
def func3(x: Optional[int]):
is_number = x != None
if is_number:
reveal_type(x, expected_text="int")
else:
reveal_type(x, expected_text="None")
def func4() -> Optional[A]:
return A() if random.random() < 0.5 else None
maybe_a1 = func4()
is_a1 = maybe_a1
if is_a1:
reveal_type(maybe_a1, expected_text="A")
else:
reveal_type(maybe_a1, expected_text="None")
maybe_a2 = func4()
def func5():
global maybe_a2
maybe_a2 = False
is_a2 = maybe_a2
if is_a2:
reveal_type(maybe_a2, expected_text="A | None")
else:
reveal_type(maybe_a2, expected_text="A | None")
def func6(x: Union[A, B]) -> None:
is_a = isinstance(x, A)
for y in range(1):
if is_a:
reveal_type(x, expected_text="A | B")
else:
reveal_type(x, expected_text="A | B")
if random.random() < 0.5:
x = B()
def get_string() -> str:
...
def get_optional_string() -> Optional[str]:
...
def func7(val: Optional[str] = None):
val = get_optional_string()
val_is_none = val is None
if val_is_none:
val = get_string()
reveal_type(val, expected_text="str")
def func8(val: Optional[str] = None):
val = get_optional_string()
val_is_none = val is None
val = get_optional_string()
if val_is_none:
val = get_string()
reveal_type(val, expected_text="str | None")
def func9(var: Optional[str] = None):
if var_not_None := not (var is None):
reveal_type(var, expected_text="str")
reveal_type(var, expected_text="str | None")
if var_not_None:
reveal_type(var, expected_text="str")
if 1 > 1 + 2:
var = None
else:
var = "a" + "b"
if var_not_None:
reveal_type(var, expected_text="Literal['ab'] | None")
|
pwncat/modules/linux/enumerate/system/aslr.py | Mitul16/pwncat | 1,454 | 11191606 | <reponame>Mitul16/pwncat<filename>pwncat/modules/linux/enumerate/system/aslr.py
#!/usr/bin/env python3
from pwncat.db import Fact
from pwncat.platform.linux import Linux
from pwncat.modules.enumerate import EnumerateModule
class ASLRStateData(Fact):
def __init__(self, source, state):
super().__init__(source=source, types=["system.aslr"])
self.state: int = state
""" the value of /proc/sys/kernel/randomize_va_space """
def title(self, session):
if self.state == 0:
return "[green]disabled[/green]"
return "[red]enabled[/red]"
class Module(EnumerateModule):
"""
Determine whether or not ASLR is enabled or disabled.
:return:
"""
PROVIDES = ["system.aslr"]
PLATFORM = [Linux]
def enumerate(self, session):
try:
with session.platform.open(
"/proc/sys/kernel/randomize_va_space", "r"
) as filp:
value = filp.read()
try:
value = int(value)
except ValueError:
value = None
if value is not None:
yield ASLRStateData(self.name, value)
except (FileNotFoundError, PermissionError):
pass
|
conans/test/functional/cross_building/build_helper_test.py | matthiasng/conan | 6,205 | 11191633 | <reponame>matthiasng/conan
import textwrap
import unittest
from conans.test.utils.tools import TestClient
class BuildHelperTest(unittest.TestCase):
def test_autotools_helper(self):
client = TestClient()
conanfile = textwrap.dedent("""
from conans import ConanFile, AutoToolsBuildEnvironment
class Pkg(ConanFile):
def build(self):
AutoToolsBuildEnvironment(self)
""")
client.save({"conanfile.py": conanfile,
"host": "",
"build": ""})
client.run("create . pkg/1.0@ --profile:build=build --profile:host=host")
self.assertIn("Configuration (profile_host):", client.out)
self.assertIn("Configuration (profile_build):", client.out)
self.assertIn("pkg/1.0: Calling build()", client.out)
self.assertIn("pkg/1.0: Created package", client.out)
|
tests/unit/viz/test_basemaps.py | manmorjim/cartoframes | 236 | 11191692 | from cartoframes.viz import basemaps
class TestBasemaps(object):
def test_is_defined(self):
"basemaps"
assert basemaps is not None
def test_has_defined_basemaps(self):
"basemaps content"
assert basemaps.positron == 'Positron'
assert basemaps.darkmatter == 'DarkMatter'
assert basemaps.voyager == 'Voyager'
|
notebook/pandas_tz_convert_tz_localize.py | vhn0912/python-snippets | 174 | 11191697 | <filename>notebook/pandas_tz_convert_tz_localize.py
import pandas as pd
s = '2018-01-01T12:00+09:00'
print(s)
# 2018-01-01T12:00+09:00
print(type(s))
# <class 'str'>
ts = pd.to_datetime(s)
print(ts)
# 2018-01-01 12:00:00+09:00
print(type(ts))
# <class 'pandas._libs.tslibs.timestamps.Timestamp'>
print(ts.tz)
# pytz.FixedOffset(540)
ts_utc = pd.to_datetime(s, utc=True)
print(ts_utc)
# 2018-01-01 03:00:00+00:00
print(ts_utc.tz)
# UTC
s_without_tz = '2018-01-01T12:00'
ts_naive = pd.to_datetime(s_without_tz)
print(ts_naive)
# 2018-01-01 12:00:00
print(ts_naive.tz)
# None
ts_set_utc = pd.to_datetime(s_without_tz, utc=True)
print(ts_set_utc)
# 2018-01-01 12:00:00+00:00
print(ts_set_utc.tz)
# UTC
print(ts_utc)
# 2018-01-01 03:00:00+00:00
print(ts_utc.tz)
# UTC
ts_jst = ts_utc.tz_convert('Asia/Tokyo')
print(ts_jst)
# 2018-01-01 12:00:00+09:00
print(ts_jst.tz)
# Asia/Tokyo
print(ts_utc.value)
# 1514775600000000000
print(ts_jst.value)
# 1514775600000000000
print(ts_utc == ts_jst)
# True
ts_pst = ts_utc.tz_convert('US/Pacific')
print(ts_pst)
# 2017-12-31 19:00:00-08:00
print(ts_pst.tz)
# US/Pacific
print(ts_utc.tz_convert('America/Los_Angeles'))
# 2017-12-31 19:00:00-08:00
print(ts_utc.tz_convert('America/Vancouver'))
# 2017-12-31 19:00:00-08:00
print(ts_naive)
# 2018-01-01 12:00:00
print(ts_naive.tz)
# None
# print(ts_naive.tz_convert('Asia/Tokyo'))
# TypeError: Cannot convert tz-naive Timestamp, use tz_localize to localize
ts_jst_localize = ts_naive.tz_localize('Asia/Tokyo')
print(ts_jst_localize)
# 2018-01-01 12:00:00+09:00
print(ts_jst_localize.tz)
# Asia/Tokyo
print(ts_naive.tz_localize('US/Pacific'))
# 2018-01-01 12:00:00-08:00
print(ts_naive.tz_localize('Asia/Tokyo') == ts_naive.tz_localize('US/Pacific'))
# False
print(ts_jst)
# 2018-01-01 12:00:00+09:00
print(ts_jst.tz)
# Asia/Tokyo
# print(ts_jst.tz_localize('US/Pacific'))
# TypeError: Cannot localize tz-aware Timestamp, use tz_convert for conversions
print(ts_jst)
# 2018-01-01 12:00:00+09:00
print(ts_jst.tz)
# Asia/Tokyo
print(ts_jst.tz_convert(None))
# 2018-01-01 03:00:00
print(ts_jst.tz_localize(None))
# 2018-01-01 12:00:00
df = pd.DataFrame({'date': ['2018-01-01T12:00',
'2018-01-02T00:00',
'2018-01-03T10:00',
'2018-01-03T19:00'],
'value': ['A', 'B', 'C', 'D']})
print(df)
# date value
# 0 2018-01-01T12:00 A
# 1 2018-01-02T00:00 B
# 2 2018-01-03T10:00 C
# 3 2018-01-03T19:00 D
s_naive = pd.to_datetime(df['date'])
print(s_naive)
# 0 2018-01-01 12:00:00
# 1 2018-01-02 00:00:00
# 2 2018-01-03 10:00:00
# 3 2018-01-03 19:00:00
# Name: date, dtype: datetime64[ns]
print(s_naive[0])
# 2018-01-01 12:00:00
print(type(s_naive[0]))
# <class 'pandas._libs.tslibs.timestamps.Timestamp'>
print(s_naive[0].tz)
# None
s_utc = pd.to_datetime(df['date'], utc=True)
print(s_utc)
# 0 2018-01-01 12:00:00+00:00
# 1 2018-01-02 00:00:00+00:00
# 2 2018-01-03 10:00:00+00:00
# 3 2018-01-03 19:00:00+00:00
# Name: date, dtype: datetime64[ns, UTC]
print(s_utc[0].tz)
# UTC
# print(s_naive.tz_localize('Asia/Tokyo'))
# TypeError: index is not a valid DatetimeIndex or PeriodIndex
# print(s_utc.tz_convert('Asia/Tokyo'))
# TypeError: index is not a valid DatetimeIndex or PeriodIndex
print(s_naive.dt.tz_localize('Asia/Tokyo'))
# 0 2018-01-01 12:00:00+09:00
# 1 2018-01-02 00:00:00+09:00
# 2 2018-01-03 10:00:00+09:00
# 3 2018-01-03 19:00:00+09:00
# Name: date, dtype: datetime64[ns, Asia/Tokyo]
print(s_utc.dt.tz_convert('Asia/Tokyo'))
# 0 2018-01-01 21:00:00+09:00
# 1 2018-01-02 09:00:00+09:00
# 2 2018-01-03 19:00:00+09:00
# 3 2018-01-04 04:00:00+09:00
# Name: date, dtype: datetime64[ns, Asia/Tokyo]
# print(s_naive.dt.tz_convert('Asia/Tokyo'))
# TypeError: Cannot convert tz-naive timestamps, use tz_localize to localize
# print(s_utc.dt.tz_localize('Asia/Tokyo'))
# TypeError: Already tz-aware, use tz_convert to convert.
# print(df['date'].dt.tz_localize('Asia/Tokyo'))
# AttributeError: Can only use .dt accessor with datetimelike values
df['date'] = pd.to_datetime(df['date'])
df_ts = df.set_index('date')
print(df_ts)
# value
# date
# 2018-01-01 12:00:00 A
# 2018-01-02 00:00:00 B
# 2018-01-03 10:00:00 C
# 2018-01-03 19:00:00 D
print(df_ts.index)
# DatetimeIndex(['2018-01-01 12:00:00', '2018-01-02 00:00:00',
# '2018-01-03 10:00:00', '2018-01-03 19:00:00'],
# dtype='datetime64[ns]', name='date', freq=None)
print(type(df_ts.index))
# <class 'pandas.core.indexes.datetimes.DatetimeIndex'>
print(df_ts['2018-01-03'])
# value
# date
# 2018-01-03 10:00:00 C
# 2018-01-03 19:00:00 D
print(df_ts.index.tz_localize('Asia/Tokyo'))
# DatetimeIndex(['2018-01-01 12:00:00+09:00', '2018-01-02 00:00:00+09:00',
# '2018-01-03 10:00:00+09:00', '2018-01-03 19:00:00+09:00'],
# dtype='datetime64[ns, Asia/Tokyo]', name='date', freq=None)
print(df_ts.tz_localize('Asia/Tokyo'))
# value
# date
# 2018-01-01 12:00:00+09:00 A
# 2018-01-02 00:00:00+09:00 B
# 2018-01-03 10:00:00+09:00 C
# 2018-01-03 19:00:00+09:00 D
s_ts = df_ts['value']
print(s_ts)
# date
# 2018-01-01 12:00:00 A
# 2018-01-02 00:00:00 B
# 2018-01-03 10:00:00 C
# 2018-01-03 19:00:00 D
# Name: value, dtype: object
print(s_ts.tz_localize('Asia/Tokyo'))
# date
# 2018-01-01 12:00:00+09:00 A
# 2018-01-02 00:00:00+09:00 B
# 2018-01-03 10:00:00+09:00 C
# 2018-01-03 19:00:00+09:00 D
# Name: value, dtype: object
df = pd.DataFrame({'date': ['2018-01-01T12:00+09:00',
'2018-01-02T00:00+09:00',
'2018-01-03T10:00+09:00',
'2018-01-03T19:00+09:00'],
'value': ['A', 'B', 'C', 'D']})
print(df)
# date value
# 0 2018-01-01T12:00+09:00 A
# 1 2018-01-02T00:00+09:00 B
# 2 2018-01-03T10:00+09:00 C
# 3 2018-01-03T19:00+09:00 D
print(pd.to_datetime(df['date']))
# 0 2018-01-01 12:00:00+09:00
# 1 2018-01-02 00:00:00+09:00
# 2 2018-01-03 10:00:00+09:00
# 3 2018-01-03 19:00:00+09:00
# Name: date, dtype: datetime64[ns, pytz.FixedOffset(540)]
print(pd.to_datetime(df['date'], utc=True))
# 0 2018-01-01 03:00:00+00:00
# 1 2018-01-01 15:00:00+00:00
# 2 2018-01-03 01:00:00+00:00
# 3 2018-01-03 10:00:00+00:00
# Name: date, dtype: datetime64[ns, UTC]
print(pd.to_datetime(df['date']).dt.tz_convert('US/Pacific'))
# 0 2017-12-31 19:00:00-08:00
# 1 2018-01-01 07:00:00-08:00
# 2 2018-01-02 17:00:00-08:00
# 3 2018-01-03 02:00:00-08:00
# Name: date, dtype: datetime64[ns, US/Pacific]
df['date'] = pd.to_datetime(df['date'])
df_ts = df.set_index('date')
print(df_ts)
# value
# date
# 2018-01-01 12:00:00+09:00 A
# 2018-01-02 00:00:00+09:00 B
# 2018-01-03 10:00:00+09:00 C
# 2018-01-03 19:00:00+09:00 D
print(df_ts.index)
# DatetimeIndex(['2018-01-01 12:00:00+09:00', '2018-01-02 00:00:00+09:00',
# '2018-01-03 10:00:00+09:00', '2018-01-03 19:00:00+09:00'],
# dtype='datetime64[ns, pytz.FixedOffset(540)]', name='date', freq=None)
print(df_ts.tz_convert('US/Pacific'))
# value
# date
# 2017-12-31 19:00:00-08:00 A
# 2018-01-01 07:00:00-08:00 B
# 2018-01-02 17:00:00-08:00 C
# 2018-01-03 02:00:00-08:00 D
df = pd.DataFrame({'date': ['2018-01-01T12:00+09:00',
'2018-01-02T00:00+09:00',
'2018-01-03T10:00-05:00',
'2018-01-03T19:00-08:00'],
'value': ['A', 'B', 'C', 'D']})
print(df)
# date value
# 0 2018-01-01T12:00+09:00 A
# 1 2018-01-02T00:00+09:00 B
# 2 2018-01-03T10:00-05:00 C
# 3 2018-01-03T19:00-08:00 D
print(pd.to_datetime(df['date']))
# 0 2018-01-01 12:00:00+09:00
# 1 2018-01-02 00:00:00+09:00
# 2 2018-01-03 10:00:00-05:00
# 3 2018-01-03 19:00:00-08:00
# Name: date, dtype: object
print(type(pd.to_datetime(df['date'])[0]))
# <class 'datetime.datetime'>
print(pd.to_datetime(df['date'])[0].tzinfo)
# tzoffset(None, 32400)
print(pd.to_datetime(df['date'])[2].tzinfo)
# tzoffset(None, -18000)
print(pd.to_datetime(df['date'], utc=True))
# 0 2018-01-01 03:00:00+00:00
# 1 2018-01-01 15:00:00+00:00
# 2 2018-01-03 15:00:00+00:00
# 3 2018-01-04 03:00:00+00:00
# Name: date, dtype: datetime64[ns, UTC]
print(type(pd.to_datetime(df['date'], utc=True)[0]))
# <class 'pandas._libs.tslibs.timestamps.Timestamp'>
# print(pd.to_datetime(df['date']).dt.tz_convert('Asia/Tokyo'))
# ValueError: Tz-aware datetime.datetime cannot be converted to datetime64 unless utc=True
df['date'] = pd.to_datetime(df['date'])
df_dt = df.set_index('date')
print(df_dt)
# value
# date
# 2018-01-01 12:00:00+09:00 A
# 2018-01-02 00:00:00+09:00 B
# 2018-01-03 10:00:00-05:00 C
# 2018-01-03 19:00:00-08:00 D
print(df_dt.index)
# Index([2018-01-01 12:00:00+09:00, 2018-01-02 00:00:00+09:00,
# 2018-01-03 10:00:00-05:00, 2018-01-03 19:00:00-08:00],
# dtype='object', name='date')
# print(df_dt.tz_convert('Asia/Tokyo'))
# TypeError: index is not a valid DatetimeIndex or PeriodIndex
# print(df_dt.tz_localize('Asia/Tokyo'))
# TypeError: index is not a valid DatetimeIndex or PeriodIndex
|
bookwyrm/views/imports/manually_review.py | mouse-reeve/fedireads | 270 | 11191720 | """ verify books we're unsure about """
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.decorators import method_decorator
from django.views import View
from django.views.decorators.http import require_POST
from bookwyrm import models
from bookwyrm.importers.importer import import_item_task
from bookwyrm.settings import PAGE_LENGTH
# pylint: disable= no-self-use
@method_decorator(login_required, name="dispatch")
class ImportManualReview(View):
"""problems items in an existing import"""
def get(self, request, job_id):
"""status of an import job"""
job = get_object_or_404(models.ImportJob, id=job_id)
if job.user != request.user:
raise PermissionDenied()
items = job.items.order_by("index").filter(
book__isnull=True, book_guess__isnull=False
)
paginated = Paginator(items, PAGE_LENGTH)
page = paginated.get_page(request.GET.get("page"))
data = {
"job": job,
"items": page,
"page_range": paginated.get_elided_page_range(
page.number, on_each_side=2, on_ends=1
),
"complete": True,
}
return TemplateResponse(request, "import/manual_review.html", data)
@login_required
@require_POST
# pylint: disable=unused-argument
def approve_import_item(request, job_id, item_id):
"""we guessed right"""
item = get_object_or_404(
models.ImportItem, id=item_id, job__id=job_id, book_guess__isnull=False
)
item.fail_reason = None
item.book = item.book_guess
item.book_guess = None
item.save()
# the good stuff - actually import the data
import_item_task.delay(item.id)
return redirect("import-review", job_id)
@login_required
@require_POST
# pylint: disable=unused-argument
def delete_import_item(request, job_id, item_id):
"""we guessed right"""
item = get_object_or_404(
models.ImportItem, id=item_id, job__id=job_id, book_guess__isnull=False
)
item.book_guess = None
item.save()
return redirect("import-review", job_id)
|
scripts/result_mdtable_generator.py | Spaskich/NLP-Cube | 488 | 11191724 | # This script reads the results.json file and converts it to a results.md file
import json, collections
def extract_language_from_test_file(file):
# ex: "/home/ubuntu/ud-treebanks-v2.2/UD_Afrikaans-AfriBooms/af_afribooms-ud-test.txt"
parts = file.split("/")
dir = parts[-2].replace("UD_","")
dir = dir[:dir.find("-")]
return dir.replace("_"," ")
all = json.load(open("results.json","r"))
lall = {}
for model, elem in all.items():
language = extract_language_from_test_file(elem["test_file"])
print(language)
if language not in lall:
lall[language] = {}
lall[language][model] = elem
all = collections.OrderedDict(sorted(lall.items()))
header = "|Language|Model|Token|Sentence|UPOS|XPOS|AllTags|Lemmas|UAS|LAS|\n"
header += "|--------|-----|:---:|:------:|:--:|:--:|:-----:|:----:|:-:|:-:|\n"
rows = []
for language, langdict in all.items():
lrow = "|"+language+"|\n"
rows.append(lrow)
langdict = collections.OrderedDict(sorted(langdict.items()))
for model, elem in langdict.items():
row = "| |"
row += model + "|"
row += str(round(elem["metrics"]["Tokens"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["Sentences"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["UPOS"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["XPOS"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["AllTags"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["Lemmas"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["UAS"]["f1"],2)) + "|"
row += str(round(elem["metrics"]["LAS"]["f1"],2)) + "|\n"
rows.append(row)
with open("results.md","w",encoding="utf8") as f:
f.write(header)
for row in rows:
f.write(row) |
neon/data/aeon_shim.py | rsketine/neon | 4,415 | 11191733 | <reponame>rsketine/neon
# ******************************************************************************
# Copyright 2017-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
import sys
from neon import logger as neon_logger
from neon.data.dataloaderadapter import DataLoaderAdapter
try:
from aeon import DataLoader as AeonLoader
except ImportError:
neon_logger.error('Unable to load Aeon data loading module.')
neon_logger.error('Please follow installation instructions at:')
neon_logger.error('https://github.com/NervanaSystems/aeon')
sys.exit(1)
def AeonDataLoader(config, adapter=True):
if adapter:
return DataLoaderAdapter(AeonLoader(config))
else:
return AeonLoader(config)
|
IQA_pytorch/VIF.py | dingkeyan93/IQA-pytorch | 203 | 11191744 | <filename>IQA_pytorch/VIF.py<gh_stars>100-1000
import torch
from torch import nn
from torch.nn import functional as F
import numpy as np
from torchvision import transforms
from .utils import fspecial_gauss
from .SteerPyrSpace import SteerablePyramidSpace
import math
class VIF(torch.nn.Module):
# Refer to https://live.ece.utexas.edu/research/Quality/VIF.htm
def __init__(self, channels=3, level=4, ori=6, device = torch.device("cuda")):
super(VIF, self).__init__()
self.ori = ori-1
self.level = level
self.channels = channels
self.M=3
self.subbands=[4, 7, 10, 13, 16, 19, 22, 25]
self.sigma_nsq=0.4
self.tol = 1e-12
def corrDn(self, image, filt, step=1, channels=1,start=[0,0],end=[0,0]):
filt_ = torch.from_numpy(filt).float().unsqueeze(0).unsqueeze(0).repeat(channels,1,1,1).to(image.device)
p = (filt_.shape[2]-1)//2
image = F.pad(image, (p,p,p,p),'reflect')
img = F.conv2d(image, filt_, stride=1, padding=0, groups = channels)
img = img[:,:,start[0]:end[0]:step,start[1]:end[1]:step]
return img
def vifsub_est_M(self, org, dist):
g_all = []
vv_all = []
for i in range(len(self.subbands)):
sub=self.subbands[i]-1
y=org[sub]
yn=dist[sub]
lev=np.ceil((sub-1)/6)
winsize=int(2**lev+1)
win = np.ones((winsize,winsize))
newsizeX=int(np.floor(y.shape[2]/self.M)*self.M)
newsizeY=int(np.floor(y.shape[3]/self.M)*self.M)
y=y[:,:,:newsizeX,:newsizeY]
yn=yn[:,:,:newsizeX,:newsizeY]
winstart=[int(1*np.floor(self.M/2)),int(1*np.floor(self.M/2))]
winend=[int(y.shape[2]-np.ceil(self.M/2))+1,int(y.shape[3]-np.ceil(self.M/2))+1]
mean_x = self.corrDn(y,win/(winsize**2),step=self.M, channels=self.channels,start=winstart,end=winend)
mean_y = self.corrDn(yn,win/(winsize**2),step=self.M, channels=self.channels,start=winstart,end=winend)
cov_xy = self.corrDn(y*yn, win, step=self.M, channels=self.channels,start=winstart,end=winend) - (winsize**2)*mean_x*mean_y
ss_x = self.corrDn(y**2,win, step=self.M, channels=self.channels,start=winstart,end=winend) - (winsize**2)*mean_x**2
ss_y = self.corrDn(yn**2,win, step=self.M, channels=self.channels,start=winstart,end=winend) - (winsize**2)*mean_y**2
ss_x = F.relu(ss_x)
ss_y = F.relu(ss_y)
g = cov_xy/(ss_x+self.tol)
vv = (ss_y - g*cov_xy)/(winsize**2)
g = g.masked_fill(ss_x < self.tol,0)
vv [ss_x < self.tol] = ss_y [ss_x < self.tol]
ss_x = ss_x.masked_fill(ss_x < self.tol,0)
g = g.masked_fill(ss_y < self.tol,0)
vv = vv.masked_fill(ss_y < self.tol,0)
vv[g<0]=ss_y[g<0]
g = F.relu(g)
vv = vv.masked_fill(vv < self.tol, self.tol)
g_all.append(g)
vv_all.append(vv)
return g_all, vv_all
def refparams_vecgsm(self, org):
ssarr, l_arr, cu_arr = [], [], []
for i in range(len(self.subbands)):
sub=self.subbands[i]-1
y=org[sub]
M = self.M
newsizeX=int(np.floor(y.shape[2]/M)*M)
newsizeY=int(np.floor(y.shape[3]/M)*M)
y=y[:,:,:newsizeX,:newsizeY]
B,C,H,W = y.shape
temp=[]
for j in range(M):
for k in range(M):
temp.append(y[:,:,k:H-(M-k)+1, j:W-(M-j)+1].reshape(B,C,-1))
temp = torch.stack(temp,dim=3)
mcu = torch.mean(temp,dim=2).unsqueeze(2).repeat(1,1,temp.shape[2],1)
cu=torch.matmul((temp-mcu).permute(0,1,3,2),temp-mcu)/temp.shape[2]
temp=[]
for j in range(M):
for k in range(M):
temp.append(y[:,:,k:H+1:M, j:W+1:M].reshape(B,C,-1))
temp = torch.stack(temp,dim=2)
ss=torch.matmul(torch.pinverse(cu),temp)
# ss = torch.matmul(torch.pinverse(cu),temp)
ss=torch.sum(ss*temp,dim=2)/(M*M)
ss=ss.reshape(B,C,H//M,W//M)
v,_ = torch.symeig(cu,eigenvectors=True)
l_arr.append(v)
ssarr.append(ss)
cu_arr.append(cu)
return ssarr, l_arr, cu_arr
def vif(self, x, y):
sp_x = SteerablePyramidSpace(x, height=self.level, order=self.ori, channels=self.channels)[::-1]
sp_y = SteerablePyramidSpace(y, height=self.level, order=self.ori, channels=self.channels)[::-1]
g_all, vv_all = self.vifsub_est_M(sp_y, sp_x)
ss_arr, l_arr, cu_arr = self.refparams_vecgsm(sp_y)
num, den = [], []
for i in range(len(self.subbands)):
sub=self.subbands[i]
g=g_all[i]
vv=vv_all[i]
ss=ss_arr[i]
lamda = l_arr[i]
neigvals=lamda.shape[2]
lev=np.ceil((sub-1)/6)
winsize=2**lev+1
offset=(winsize-1)/2
offset=int(np.ceil(offset/self.M))
_,_,H,W = g.shape
g= g[:,:,offset:H-offset,offset:W-offset]
vv=vv[:,:,offset:H-offset,offset:W-offset]
ss=ss[:,:,offset:H-offset,offset:W-offset]
temp1=0
temp2=0
for j in range(neigvals):
cc = lamda[:,:,j].unsqueeze(2).unsqueeze(3)
temp1=temp1+torch.sum(torch.log2(1+g*g*ss*cc/(vv+self.sigma_nsq)),dim=[2,3])
temp2=temp2+torch.sum(torch.log2(1+ss*cc/(self.sigma_nsq)),dim=[2,3])
num.append(temp1.mean(1))
den.append(temp2.mean(1))
return torch.stack(num,dim=1).sum(1)/(torch.stack(den,dim=1).sum(1)+1e-12)
def forward(self, y, x, as_loss=True):
assert x.shape == y.shape
x = x * 255
y = y * 255
if as_loss:
score = self.vif(x, y)
return 1 - score.mean()
else:
with torch.no_grad():
score = self.vif(x, y)
return score
if __name__ == '__main__':
from PIL import Image
import argparse
from utils import prepare_image
parser = argparse.ArgumentParser()
parser.add_argument('--ref', type=str, default='images/r0.png')
parser.add_argument('--dist', type=str, default='images/r1.png')
args = parser.parse_args()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
ref = prepare_image(Image.open(args.ref).convert("L"),repeatNum=1).to(device)
dist = prepare_image(Image.open(args.dist).convert("L"),repeatNum=1).to(device)
dist.requires_grad_(True)
model = VIF(channels=1)
score = model(dist, ref, as_loss=False)
print('score: %.4f' % score.item())
# score: 0.1804
|
tests/test_apis/test_utils.py | hongxuenong/mmocr | 2,261 | 11191795 | # Copyright (c) OpenMMLab. All rights reserved.
import copy
import os
import pytest
from mmcv import Config
from mmocr.apis.utils import (disable_text_recog_aug_test,
replace_image_to_tensor)
@pytest.mark.parametrize('cfg_file', [
'../configs/textrecog/sar/sar_r31_parallel_decoder_academic.py',
])
def test_disable_text_recog_aug_test(cfg_file):
tmp_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
config_file = os.path.join(tmp_dir, cfg_file)
cfg = Config.fromfile(config_file)
test = cfg.data.test.datasets[0]
# cfg.data.test.type is 'OCRDataset'
cfg1 = copy.deepcopy(cfg)
test1 = copy.deepcopy(test)
test1.pipeline = cfg1.data.test.pipeline
cfg1.data.test = test1
cfg1 = disable_text_recog_aug_test(cfg1, set_types=['test'])
assert cfg1.data.test.pipeline[1].type != 'MultiRotateAugOCR'
# cfg.data.test.type is 'UniformConcatDataset'
# and cfg.data.test.pipeline is list[dict]
cfg2 = copy.deepcopy(cfg)
test2 = copy.deepcopy(test)
test2.pipeline = cfg2.data.test.pipeline
cfg2.data.test.datasets = [test2]
cfg2 = disable_text_recog_aug_test(cfg2, set_types=['test'])
assert cfg2.data.test.pipeline[1].type != 'MultiRotateAugOCR'
assert cfg2.data.test.datasets[0].pipeline[1].type != 'MultiRotateAugOCR'
# cfg.data.test.type is 'ConcatDataset'
cfg3 = copy.deepcopy(cfg)
test3 = copy.deepcopy(test)
test3.pipeline = cfg3.data.test.pipeline
cfg3.data.test = Config(dict(type='ConcatDataset', datasets=[test3]))
cfg3 = disable_text_recog_aug_test(cfg3, set_types=['test'])
assert cfg3.data.test.datasets[0].pipeline[1].type != 'MultiRotateAugOCR'
# cfg.data.test.type is 'UniformConcatDataset'
# and cfg.data.test.pipeline is list[list[dict]]
cfg4 = copy.deepcopy(cfg)
test4 = copy.deepcopy(test)
test4.pipeline = cfg4.data.test.pipeline
cfg4.data.test.datasets = [[test4], [test]]
cfg4.data.test.pipeline = [
cfg4.data.test.pipeline, cfg4.data.test.pipeline
]
cfg4 = disable_text_recog_aug_test(cfg4, set_types=['test'])
assert cfg4.data.test.datasets[0][0].pipeline[1].type != \
'MultiRotateAugOCR'
# cfg.data.test.type is 'UniformConcatDataset'
# and cfg.data.test.pipeline is None
cfg5 = copy.deepcopy(cfg)
test5 = copy.deepcopy(test)
test5.pipeline = copy.deepcopy(cfg5.data.test.pipeline)
cfg5.data.test.datasets = [test5]
cfg5.data.test.pipeline = None
cfg5 = disable_text_recog_aug_test(cfg5, set_types=['test'])
assert cfg5.data.test.datasets[0].pipeline[1].type != 'MultiRotateAugOCR'
@pytest.mark.parametrize('cfg_file', [
'../configs/textdet/psenet/psenet_r50_fpnf_600e_ctw1500.py',
])
def test_replace_image_to_tensor(cfg_file):
tmp_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
config_file = os.path.join(tmp_dir, cfg_file)
cfg = Config.fromfile(config_file)
test = cfg.data.test.datasets[0]
# cfg.data.test.pipeline is list[dict]
# and cfg.data.test.datasets is list[dict]
cfg1 = copy.deepcopy(cfg)
test1 = copy.deepcopy(test)
test1.pipeline = copy.deepcopy(cfg.data.test.pipeline)
cfg1.data.test.datasets = [test1]
cfg1 = replace_image_to_tensor(cfg1, set_types=['test'])
assert cfg1.data.test.pipeline[1]['transforms'][3][
'type'] == 'DefaultFormatBundle'
assert cfg1.data.test.datasets[0].pipeline[1]['transforms'][3][
'type'] == 'DefaultFormatBundle'
# cfg.data.test.pipeline is list[list[dict]]
# and cfg.data.test.datasets is list[list[dict]]
cfg2 = copy.deepcopy(cfg)
test2 = copy.deepcopy(test)
test2.pipeline = copy.deepcopy(cfg.data.test.pipeline)
cfg2.data.test.datasets = [[test2], [test2]]
cfg2.data.test.pipeline = [
cfg2.data.test.pipeline, cfg2.data.test.pipeline
]
cfg2 = replace_image_to_tensor(cfg2, set_types=['test'])
assert cfg2.data.test.pipeline[0][1]['transforms'][3][
'type'] == 'DefaultFormatBundle'
assert cfg2.data.test.datasets[0][0].pipeline[1]['transforms'][3][
'type'] == 'DefaultFormatBundle'
|
testcases/ch2o_tests/node/Concat.py | vermashresth/chainer-compiler | 116 | 11191796 | # coding: utf-8
import chainer
import chainer.functions as F
class ConcatTuple(chainer.Chain):
def forward(self, x, y):
return F.concat((x, y))
class ConcatList(chainer.Chain):
def forward(self, x, y):
return F.concat([x, y])
# ======================================
from chainer_compiler import ch2o
import numpy as np
if __name__ == '__main__':
v = np.random.rand(7, 4, 2).astype(np.float32)
w = np.random.rand(7, 3, 2).astype(np.float32)
ch2o.generate_testcase(ConcatTuple, [v, w])
ch2o.generate_testcase(ConcatList, [v, w], subname='list')
|
ghidra_9.0/Ghidra/Features/Python/data/jython-2.7.1/Lib/_fsum.py | ChristopherMorrison/ghidra | 577 | 11191868 | #!/usr/bin/env python
from sys import float_info
import math
mant_dig = float_info.mant_dig
etiny = float_info.min_exp - mant_dig
def fsum(iterable):
"""Full precision summation. Compute sum(iterable) without any
intermediate accumulation of error. Based on the 'lsum' function
at http://code.activestate.com/recipes/393090/
"""
tmant, texp = 0, 0
for x in iterable:
mant, exp = math.frexp(x)
mant, exp = int(math.ldexp(mant, mant_dig)), exp - mant_dig
if texp > exp:
tmant <<= texp-exp
texp = exp
else:
mant <<= exp-texp
tmant += mant
# Round tmant * 2**texp to a float. The original recipe
# used float(str(tmant)) * 2.0**texp for this, but that's
# a little unsafe because str -> float conversion can't be
# relied upon to do correct rounding on all platforms.
tail = max(len(bin(abs(tmant)))-2 - mant_dig, etiny - texp)
if tail > 0:
h = 1 << (tail-1)
tmant = tmant // (2*h) + bool(tmant & h and tmant & 3*h-1)
texp += tail
return math.ldexp(tmant, texp)
|
src/genie/libs/parser/junos/tests/ShowInterfacesDescriptions/cli/equal/golden_output2_expected.py | balmasea/genieparser | 204 | 11191901 | expected_output = {
'interface-information':{
'physical-interface':[
{
'name':'ge-0/0/0',
'admin-status':'up',
'oper-status':'up',
'description':'none/100G/in/hktGCS002_ge-0/0/0 more description'
}
]
}
} |
scripts/timing/core_domain.py | wetgi/lagom | 109 | 11191931 | <gh_stars>100-1000
class SomeOtherThingAsAsingleton:
def work(self):
return 1
class SomeService:
def __init__(self, other: SomeOtherThingAsAsingleton):
self.other = other
def do_it(self):
return self.other.work()
class AThingIMightNeed:
service: SomeService
def __init__(self, service: SomeService):
self.service = service
def do_it(self):
return self.service.do_it()
|
tools/update_feed.py | mubashshirjamal/code | 1,582 | 11191949 | # -*- coding: utf-8 -*-
import json
from datetime import datetime
from vilya.libs.rdstore import rds
from vilya.models.utils import (
CJsonEncoder,
)
from vilya.models.feed import (
get_user_feed as get_user_feed_v2,
get_user_inbox as get_user_inbox_v2,
get_public_feed as get_public_feed_v2,
get_team_feed as get_team_feed_v2,
)
from vilya.models.team import Team
# update feed data from v1 to v2
MAX_ACTIONS_COUNT = 1009 # Happy Number
RDS_USER_INBOX_KEY = 'feed:private:user:v1:%s'
RDS_USER_FEED_KEY = 'feed:public:user:v1:%s'
RDS_PUBLIC_FEED_KEY = 'feed:public:everyone:v1'
RDS_TEAM_FEED_KEY = 'feed:public:team:v1:%s'
class Feed(object):
def __init__(self, db_key):
self.db_key = db_key
def __repr__(self):
return '%s (%s)' % (self.__class__, self.db_key)
@classmethod
def get(cls, db_key):
return cls(db_key=db_key)
def add_action(self, action_data):
data = json.dumps(action_data, cls=CJsonEncoder)
rds.lpush(self.db_key, data)
rds.ltrim(self.db_key, 0, MAX_ACTIONS_COUNT)
def get_actions(self):
data = rds.lrange(self.db_key, 0, MAX_ACTIONS_COUNT)
return [json.loads(d) for d in data]
def get_user_inbox(user):
return Feed.get(db_key=RDS_USER_INBOX_KEY % user)
def get_user_feed(user):
return Feed.get(db_key=RDS_USER_FEED_KEY % user)
def get_public_feed():
return Feed.get(db_key=RDS_PUBLIC_FEED_KEY)
def get_team_feed(team):
return Feed.get(db_key=RDS_TEAM_FEED_KEY % team)
def main():
public_feed = get_public_feed()
public_feed_v2 = get_public_feed_v2()
feeds = public_feed.get_actions()
for feed in feeds:
date = datetime.strptime(feed['date'], "%Y-%m-%d %H:%M:%S")
feed['date'] = date
public_feed_v2.add_action(feed)
print "updated %s public feeds." % len(feeds)
teams = Team.gets()
for team in teams:
team_feed = get_team_feed(team.id)
team_feed_v2 = get_team_feed_v2(team.id)
feeds = team_feed.get_actions()
for feed in feeds:
date = datetime.strptime(feed['date'], "%Y-%m-%d %H:%M:%S")
feed['date'] = date
team_feed_v2.add_action(feed)
print "updated %s team %s feeds." % (len(feeds), team.name)
user_inbox_keys = rds.keys('feed:private:user:v1:*')
for key in user_inbox_keys:
_, _, _, _, user = key.split(':')
user_feed = get_user_inbox(user)
user_feed_v2 = get_user_inbox_v2(user)
feeds = user_feed.get_actions()
for feed in feeds:
date = datetime.strptime(feed['date'], "%Y-%m-%d %H:%M:%S")
feed['date'] = date
user_feed_v2.add_action(feed)
print "updated %s user %s inbox feeds." % (len(feeds), user)
user_feed_keys = rds.keys('feed:public:user:v1:*')
for key in user_feed_keys:
_, _, _, _, user = key.split(':')
user_feed = get_user_feed(user)
user_feed_v2 = get_user_feed_v2(user)
feeds = user_feed.get_actions()
for feed in feeds:
date = datetime.strptime(feed['date'], "%Y-%m-%d %H:%M:%S")
feed['date'] = date
user_feed_v2.add_action(feed)
print "updated %s user %s feeds." % (len(feeds), user)
if __name__ == "__main__":
main()
|
python/scrape_for_events.py | victorromeo/uTensor | 1,047 | 11191993 | <reponame>victorromeo/uTensor
import numpy as np
import glob
import re
from collections import defaultdict
from pprint import pprint
def mHash_fnv1a(mStr):
np.seterr(over='ignore')
val_32_const = np.uint32(0x811c9dc5)
prime_32_const = np.uint32(0x1000193)
value = val_32_const
for c in mStr:
value = (value ^ np.uint32(ord(c))) * prime_32_const
return value
def get_target_files():
x = glob.glob('**/*.[ch]pp', recursive=True)
return x
def get_event_map():
tgts = get_target_files()
event_names = []
event_map = defaultdict(list)
for f in tgts:
with open(f) as fp:
for line in fp:
m = re.match("\s*DECLARE_\w+\((\w+)\)", line)
if m:
#print(m)
event_names.append(m.group(1))
for evt in event_names:
x = mHash_fnv1a(evt)
event_map[x].append(evt)
pprint(event_map)
return event_map
if __name__ == "__main__":
x = get_event_map()
|
5-minute-coding-interview-bootcamp-basic-algorithms/dfs.py | dapopov-st/python-youtube-code | 262 | 11191998 | <filename>5-minute-coding-interview-bootcamp-basic-algorithms/dfs.py
graph = {"A": set(["B", "C"]),
"B": set(["A", "D", "E"]),
"C": set(["A", "F", "G"]),
"D": set(["B"]),
"E": set(["B"]),
"F": set(["C"]),
'G': set(["C"])}
def dfs(graph, start_node):
explored, fronteir = set(), [start_node]
while fronteir:
node = fronteir.pop()
if node not in explored:
explored.update(node)
print(node)
fronteir.extend(graph[node] - explored)
return
dfs(graph, "A") |
Py Apple Dynamics V6.5/Py Apple Dynamics V6.5 固件及程序/V6.5 源代码/config.py | ToanTech/py-apple-dynamics | 125 | 11192044 | #=============Wifi设置=============
#do_connect_STA('Toan','Toan123456') #WIFI账号密码
do_connect_AP()
#=============步态参数=============
Ts=1 #周期
faai=0.5 #占空比
pit_max_ang=20 #设定俯仰轴最大限制角度
rol_max_ang=20 #设定滚转轴最大限制角度
xs_max=80 #设定最大x轴移动角度
|
cookietemple/util/dict_util.py | e2jk/cookietemple | 117 | 11192049 | from collections.abc import MutableMapping
from contextlib import suppress
def delete_keys_from_dict(dictionary: MutableMapping, keys: list) -> None:
"""
Deletes all key instances in an arbitrarily nested dictionary inplace
:param dictionary: dictionary of which the keys are deleted
:param keys: list of keys to delete
"""
for key in keys:
with suppress(KeyError):
del dictionary[key]
for value in dictionary.values():
if isinstance(value, MutableMapping):
delete_keys_from_dict(value, keys)
def is_nested_dictionary(dictionary: dict) -> bool:
"""
Determines whether a dictionary is nested or not
:param dictionary: dictionary to examine
:return: True if dictionary is nested, false otherwise
"""
return any(isinstance(_, dict) for _ in dictionary.values())
|
docs/plasmapy_sphinx/utils.py | seanjunheng2/PlasmaPy | 429 | 11192062 | <reponame>seanjunheng2/PlasmaPy
"""
A utility package containing functions and variables to support development of
the core functionality in `plasmapy_sphinx`.
"""
__all__ = [
"default_grouping_info",
"find_mod_objs",
"get_custom_grouping_info",
"package_dir",
"templates_dir",
]
import inspect
import os
from collections import OrderedDict
from importlib import import_module
from sphinx.application import Sphinx
from typing import Any, Dict
package_dir = os.path.abspath(os.path.dirname(__file__))
"""Absolute path to the `plasmapy_sphinx` package directory."""
templates_dir = os.path.join(package_dir, "templates")
"""Absolute path to the `plasmapy_sphinx` templates directory."""
default_grouping_info = OrderedDict(
{
"modules": {"title": "Sub-Packages & Modules"},
"classes": {"title": "Classes"},
"exceptions": {"title": "Exceptions"},
"warnings": {"title": "Warnings"},
"functions": {"title": "Functions"},
"variables": {"title": "Variables & Attributes"},
},
)
"""
Dictionary containing information related to the default object groups used
by the :rst:dir:`automodapi` and :rst:dir:`automodsumm` directives. Can be
extend using the configuration value :confval:`automodapi_custom_groups`.
"""
def get_custom_grouping_info(app: Sphinx):
"""
Retrieve the custom groups dictionary defined by the configuration value
:confval:`automodapi_custom_groups`.
"""
try:
_info = app.config.automodapi_custom_groups
except AttributeError:
_info = {}
return _info
def find_mod_objs(modname: str, app: Sphinx = None) -> Dict[str, Dict[str, Any]]:
"""
Inspect the module ``modname`` for all the contained objects, sort for the
object type (module, function, class, etc.), and return a dictionary containing
object names, fully qualified names, and instances.
Parameters
----------
modname : str
Name of the module (e.g. ``"plasmapy_sphinx.utils'``) to be inspect.
app : `~sphinx.application.Sphinx`
Instance of the `Sphinx` application.
Returns
-------
mod_objs : Dict[str, Dict[str, List[Any]]]
A dictionary containing names, qualified names, and objects instances of all
the objects in ``modname`` sorted by their respective group (module, class,
function, etc.)
The first key of the dictionary represents the object type (modules, classes,
functions, etc.). The second key is either ``"names"`` (list of all object
short names), ``"qualnames"`` (list of all object qualified names), and
``"objs"`` (list of object instances).
Examples
--------
>>> find_mod_objs("plasmapy_sphinx.utils")
{
'functions': {
'names': ['find_mod_objs', 'get_custom_grouping_info'],
'qualnames': [
'plasmapy_sphinx.utils.find_mod_objs',
'plasmapy_sphinx.utils.get_custom_grouping_info',
],
'objs': [
<function plasmapy_sphinx.utils.find_mod_objs>,
<function plasmapy_sphinx.utils.get_custom_grouping_info>,
]
},
'variables': {
'names': ['default_grouping_info', 'package_dir', 'templates_dir'],
'qualnames': [
'plasmapy_sphinx.utils.default_grouping_info',
'plasmapy_sphinx.utils.package_dir',
'plasmapy_sphinx.utils.templates_dir',
],
'objs': [
OrderedDict(...),
"/.../plasmapy_sphinx",
"/.../plasmapy_sphinx/templates",
]
}
}
Notes
-----
If the module contains the ``__all__`` dunder, then the routine groups the
objects specified in the dunder; otherwise, it will search the module's `globals`,
minus any private or special members. The routing will then group the
module objects in the following order...
1. Group any imported modules or packages.
- Regardless of if ``__all__`` is defined, the routine will first search
the module's `globals` for any imported modules or packages.
- Any 3rd party modules are excluded unless specified in ``__all__``.
- Any non-direct sub-modules are excluded unless specified in ``__all__``.
2. Custom groups defined by :confval:`automodapi_custom_groups` are then collected.
3. The remaining objects are grouped into the default groupds defined by
:attr:`default_grouping_info`.
"""
if app is not None:
if isinstance(app, Sphinx):
cgroups_def = get_custom_grouping_info(app)
else:
# assuming dict for testing
cgroups_def = app
cgroups = set(cgroups_def)
else:
cgroups_def = {}
cgroups = set()
mod = import_module(modname)
pkg_name = modname.split(".")[0]
# define what to search
pkg_names = {name for name in mod.__dict__.keys() if not name.startswith("_")}
if hasattr(mod, "__all__"):
no_all = False
names_to_search = set(mod.__all__)
else:
no_all = True
names_to_search = pkg_names
# filter pkg_names
for name in pkg_names.copy():
obj = getattr(mod, name)
if not no_all and name in names_to_search:
continue
ismod = inspect.ismodule(obj)
ispkg = ismod and obj.__package__ == obj.__name__
# remove test folders
if ispkg and obj.__package__.split(".")[-1] == "tests":
pkg_names.remove(name)
continue
# remove 3rd party objects
if ismod and obj.__package__.split(".")[0] != pkg_name:
pkg_names.remove(name)
continue
elif (
not ismod
and hasattr(obj, "__module__")
and obj.__module__.split(".")[0] != pkg_name
):
# Note: this will miss ufuncs like numpy.sqrt since they do not have
# a __module__ property
pkg_names.remove(name)
continue
# remove non direct sub-pkgs and mods of modname
if ismod:
if not obj.__name__.startswith(modname):
pkg_names.remove(name)
continue
else:
nm = obj.__name__[len(modname) :].split(".")
nm.remove("")
if len(nm) != 1:
pkg_names.remove(name)
continue
# find local modules first
names_of_modules = set()
for name in pkg_names.copy():
obj = getattr(mod, name)
if inspect.ismodule(obj):
names_of_modules.add(name)
mod_objs = {"modules": {"names": []}}
if len(names_of_modules) > 0:
names_of_modules = names_of_modules
mod_objs["modules"]["names"] = list(names_of_modules)
names_to_search = names_to_search - names_of_modules
# find and filter custom groups
for name in cgroups:
dunder = cgroups_def[name]["dunder"]
if hasattr(mod, dunder):
custom_names = set(getattr(mod, dunder))
else:
continue
if len(custom_names) > 0:
mod_objs.update({name: {"names": list(custom_names)}})
names_to_search = names_to_search - custom_names
# gather all remaining groups
mod_objs.update(
{
"classes": {"names": []},
"exceptions": {"names": []},
"warnings": {"names": []},
"functions": {"names": []},
"variables": {"names": []},
}
) # type: Dict[str, Dict[str, Any]]
for name in names_to_search:
obj = getattr(mod, name)
if inspect.isroutine(obj):
# is a user-defined or built-in function
mod_objs["functions"]["names"].append(name)
elif inspect.isclass(obj):
if issubclass(obj, Warning):
mod_objs["warnings"]["names"].append(name)
elif issubclass(obj, BaseException):
mod_objs["exceptions"]["names"].append(name)
else:
mod_objs["classes"]["names"].append(name)
else:
mod_objs["variables"]["names"].append(name)
# retrieve and defined qualnames and objs
for obj_type in list(mod_objs):
if len(mod_objs[obj_type]["names"]) == 0:
del mod_objs[obj_type]
continue
mod_objs[obj_type].update({"qualnames": [], "objs": []})
for name in list(mod_objs[obj_type]["names"]):
# Note: The 'qualname' is always constructed with 'name' so when
# something like
#
# def func(...):
# ...
#
# f2 = func
#
# is done, then the 'qualname' ends with 'f2' and not 'func'.
#
obj = getattr(mod, name)
ismod = inspect.ismodule(obj)
# ispkg = ismod and obj.__package__ == obj.__name__
if not ismod and no_all:
# only allow local objects to be collected
# - at this point modules & pkgs should have already been
# filtered for direct sub-modules and pkgs
if not hasattr(obj, "__module__"):
# this would be a locally defined variable like
# plasmapy.__citation__
pass
elif not obj.__module__.startswith(pkg_name):
# object not from package being documented
mod_objs[obj_type]["names"].remove(name)
continue
if ismod:
obj_renamed = obj.__name__.split(".")[-1] != name
elif not hasattr(obj, "__name__"):
obj_renamed = False
else:
obj_renamed = obj.__name__ != name
if ismod and obj_renamed:
qualname = f"{obj.__package__}.{name}"
elif ismod and not obj_renamed:
qualname = obj.__name__
elif obj_renamed or not hasattr(obj, "__module__"):
# can not tell if the object was renamed in modname or in
# obj.__module__, so assumed it happened in modname
qualname = f"{modname}.{name}"
elif obj.__module__.split(".")[0] != pkg_name:
# this will catch scenarios like typing alias definitions where
# __module__ == typing even when defined locally
qualname = f"{modname}.{name}"
else:
qualname = f"{obj.__module__}.{name}"
mod_objs[obj_type]["qualnames"].append(qualname)
mod_objs[obj_type]["objs"].append(obj)
# sort lists
names = sorted(mod_objs[obj_type]["names"].copy())
qualnames = []
objs = []
for name in names:
index = mod_objs[obj_type]["names"].index(name)
qualnames.append(mod_objs[obj_type]["qualnames"][index])
objs.append(mod_objs[obj_type]["objs"][index])
mod_objs[obj_type] = {"names": names, "qualnames": qualnames, "objs": objs}
return mod_objs
|
nalaf/learning/crfsuite.py | ashish-narwal/nalaf | 103 | 11192111 | <reponame>ashish-narwal/nalaf<filename>nalaf/learning/crfsuite.py<gh_stars>100-1000
import warnings
import pycrfsuite
from nalaf.structures.data import Label
class PyCRFSuite:
def __init__(self, model_file=None):
self.model_file = model_file
if self.model_file is None:
self.tagger = None
else:
self.tagger = pycrfsuite.Tagger()
self.tagger.open(self.model_file)
def close(self):
if self.tagger is not None:
self.tagger.close()
def __del__(self):
self.close()
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def annotate(self, corpus, class_id):
"""
:type corpus: nalaf.structures.data.Dataset
:type class_id: str ~ to annotate with
"""
for sentence in corpus.sentences():
labels = self.tagger.tag(pycrfsuite.ItemSequence(token.features for token in sentence))
for token_index in range(len(sentence)):
label = labels[token_index]
try:
sentence[token_index].predicted_labels = [Label(label, self.tagger.marginal(label, token_index))]
except Exception as e:
raise Exception("Exception when assining the predicted labels; likely a Multi-Thread problem", e)
corpus.form_predicted_annotations(class_id)
@staticmethod
def train(data, model_file, params=None):
"""
:type data: nalaf.structures.data.Dataset
:type model_file: str ~ filename (from local file system) to save trained model to. If None, no model is saved.
"""
trainer = pycrfsuite.Trainer()
try:
if params is not None:
trainer.set_params(params)
for sentence in data.sentences():
trainer.append(pycrfsuite.ItemSequence([token.features for token in sentence]),
[token.original_labels[0].value for token in sentence])
# The CRFSuite library handles the "pickling" of the file; saves the model here
trainer.train(model_file)
finally:
trainer.clear()
@staticmethod
def tag(data, model_file, class_id):
warnings.warn('Use non-static `annotate` instead', DeprecationWarning)
"""
:type data: nalaf.structures.data.Dataset
:type model_file: str
"""
tagger = pycrfsuite.Tagger()
try:
tagger.open(model_file)
for sentence in data.sentences():
labels = tagger.tag(pycrfsuite.ItemSequence(token.features for token in sentence))
for token_index in range(len(sentence)):
label = labels[token_index]
sentence[token_index].predicted_labels = [Label(label, tagger.marginal(label, token_index))]
data.form_predicted_annotations(class_id)
finally:
tagger.close()
|
zproc/state/server.py | pycampers/zproc | 106 | 11192116 | <gh_stars>100-1000
import os
import struct
import time
from bisect import bisect
from collections import defaultdict
from contextlib import contextmanager
from copy import deepcopy
from typing import Any, Dict, List, Tuple
import zmq
from zproc import serializer
from zproc.consts import Cmds, ServerMeta
from zproc.consts import Msgs
RequestType = Dict[Msgs, Any]
class StateServer:
identity: bytes
namespace: bytes
state_map: Dict[bytes, dict]
state: dict
history: Dict[bytes, Tuple[List[float], List[List[bytes]]]]
pending: Dict[bytes, Tuple[bytes, bytes, bool, float]]
def __init__(
self,
state_router: zmq.Socket,
watch_router: zmq.Socket,
server_meta: ServerMeta,
) -> None:
self.state_router = state_router
self.watch_router = watch_router
self.server_meta = server_meta
self.dispatch_dict = {
Cmds.run_fn_atomically: self.run_fn_atomically,
Cmds.run_dict_method: self.run_dict_method,
Cmds.get_state: self.send_state,
Cmds.set_state: self.set_state,
Cmds.get_server_meta: self.get_server_meta,
Cmds.ping: self.ping,
Cmds.time: self.time,
}
self.state_map = defaultdict(dict)
self.history = defaultdict(lambda: ([], []))
self.pending = {}
def send_state(self, _):
"""reply with state to the current client"""
self.reply(self.state)
def get_server_meta(self, _):
self.reply(self.server_meta)
def ping(self, request):
self.reply((request[Msgs.info], os.getpid()))
def time(self, _):
self.reply(time.time())
def set_state(self, request):
new = request[Msgs.info]
with self.mutate_safely():
self.state_map[self.namespace] = new
self.reply(True)
def run_dict_method(self, request):
"""Execute a method on the state ``dict`` and reply with the result."""
state_method_name, args, kwargs = (
request[Msgs.info],
request[Msgs.args],
request[Msgs.kwargs],
)
# print(method_name, args, kwargs)
with self.mutate_safely():
self.reply(getattr(self.state, state_method_name)(*args, **kwargs))
def run_fn_atomically(self, request):
"""Execute a function, atomically and reply with the result."""
fn = serializer.loads_fn(request[Msgs.info])
args, kwargs = request[Msgs.args], request[Msgs.kwargs]
with self.mutate_safely():
self.reply(fn(self.state, *args, **kwargs))
def recv_request(self):
self.identity, request = self.state_router.recv_multipart()
request = serializer.loads(request)
try:
self.namespace = request[Msgs.namespace]
except KeyError:
pass
else:
self.state = self.state_map[self.namespace]
self.dispatch_dict[request[Msgs.cmd]](request)
def reply(self, response):
# print("server rep:", self.identity, response, time.time())
self.state_router.send_multipart([self.identity, serializer.dumps(response)])
@contextmanager
def mutate_safely(self):
old = deepcopy(self.state)
stamp = time.time()
try:
yield
except Exception:
self.state = self.state_map[self.namespace] = old
raise
slot = self.history[self.namespace]
slot[0].append(stamp)
slot[1].append(
[
self.identity,
serializer.dumps((old, self.state, stamp)),
self.state == old,
]
)
self.resolve_pending()
def resolve_watcher(
self,
w_ident: bytes,
s_ident: bytes,
namespace: bytes,
identical_not_okay: bool,
only_after: float,
) -> bool:
timestamps, history = self.history[namespace]
index = bisect(timestamps, only_after) - 1
while True:
index += 1
try:
ident, update, identical = history[index]
except IndexError:
break
if ident == s_ident:
continue
if identical_not_okay and identical:
continue
self.watch_router.send_multipart([w_ident, update, bytes(identical)])
return True
return False
def resolve_pending(self):
pending = self.pending
if not pending:
return
for w_ident in list(pending):
if self.resolve_watcher(w_ident, *pending[w_ident]):
del pending[w_ident]
def recv_watcher(self):
w_ident, s_ident, namespace, identical_okay, only_after = (
self.watch_router.recv_multipart()
)
self.pending[w_ident] = (
s_ident,
namespace,
not identical_okay,
*struct.unpack("d", only_after),
)
def reset_internal_state(self):
self.identity = None
self.namespace = None
self.state = None
def tick(self):
self.resolve_pending()
for sock in zmq.select([self.watch_router, self.state_router], [], [])[0]:
if sock is self.state_router:
self.recv_request()
elif sock is self.watch_router:
self.recv_watcher()
|
mmwave/tracking/__init__.py | gimac/OpenRadar | 275 | 11192122 | from .ekf import *
|
job_report/main.py | DazEB2/SimplePyScripts | 117 | 11192123 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
# p12 to pem:
# C:\Users\ipetrash>openssl pkcs12 -in ipetrash.p12 -out ipetrash.pem -nodes -clcerts
# Enter Import Password:
# MAC verified OK
# OR:
# OpenSSL_example\p12_to_pem.py
PEM_FILE_NAME = 'ipetrash.pem'
if __name__ == '__main__':
from job_report.utils import get_report_persons_info, get_person_info
report_dict = get_report_persons_info(PEM_FILE_NAME)
# Вывести всех сотрудников, отсортировав их по количестве переработанных часов
from itertools import chain
person_list = set(chain(*report_dict.values()))
# Проверка того, что сортировка работает (в принципе, думаю можно удалить)
assert sorted(person_list, key=lambda x: x.deviation_of_time) == \
sorted(person_list, key=lambda x: x.deviation_of_time.total)
sorted_person_list = sorted(person_list, key=lambda x: x.deviation_of_time, reverse=True)
for i, person in enumerate(sorted_person_list, 1):
print('{:>3}. {} {}'.format(i, person.full_name, person.deviation_of_time))
print()
person = get_person_info(PEM_FILE_NAME, second_name='Петраш', first_name='Илья', report_dict=report_dict)
if person:
print('#{}. {} {}'.format(sorted_person_list.index(person) + 1, person.full_name, person.deviation_of_time))
|
tests/sparseml/tensorflow_v1/optim/test_manager.py | clementpoiret/sparseml | 922 | 11192151 | <reponame>clementpoiret/sparseml
# Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from typing import Callable
import pytest
from sparseml.tensorflow_v1.optim import Modifier, ScheduledModifierManager
from sparseml.tensorflow_v1.utils import tf_compat
from tests.sparseml.tensorflow_v1.optim.test_modifier import (
ModifierTest,
ScheduledModifierImpl,
conv_graph_lambda,
mlp_graph_lambda,
)
@pytest.mark.skipif(
os.getenv("NM_ML_SKIP_TENSORFLOW_TESTS", False),
reason="Skipping tensorflow_v1 tests",
)
@pytest.mark.parametrize(
"modifier_lambda",
[lambda: ScheduledModifierManager([ScheduledModifierImpl()])],
scope="function",
)
@pytest.mark.parametrize(
"graph_lambda", [mlp_graph_lambda, conv_graph_lambda], scope="function"
)
@pytest.mark.parametrize("steps_per_epoch", [100], scope="function")
class TestManagerImpl(ModifierTest):
def test_yaml(
self,
modifier_lambda: Callable[[], Modifier],
graph_lambda: Callable[[], tf_compat.Graph],
steps_per_epoch: int,
):
# no yaml tests for manager
return
@pytest.mark.skipif(
os.getenv("NM_ML_SKIP_TENSORFLOW_TESTS", False),
reason="Skipping tensorflow_v1 tests",
)
def test_manager_yaml():
manager = ScheduledModifierManager([ScheduledModifierImpl()])
yaml_str = str(manager)
assert yaml_str
|
experiments/ekfloc.py | VladPodilnyk/Kalman-and-Bayesian-Filters-in-Python | 12,315 | 11192170 | <filename>experiments/ekfloc.py
# -*- coding: utf-8 -*-
"""
Created on Sun May 24 08:39:36 2015
@author: Roger
"""
#x = x x' y y' theta
from math import cos, sin, sqrt, atan2
import numpy as np
from numpy import array, dot
from numpy.linalg import pinv
def print_x(x):
print(x[0, 0], x[1, 0], np.degrees(x[2, 0]))
def control_update(x, u, dt):
""" x is [x, y, hdg], u is [vel, omega] """
v = u[0]
w = u[1]
if w == 0:
# approximate straight line with huge radius
w = 1.e-30
r = v/w # radius
return x + np.array([[-r*sin(x[2]) + r*sin(x[2] + w*dt)],
[ r*cos(x[2]) - r*cos(x[2] + w*dt)],
[w*dt]])
a1 = 0.001
a2 = 0.001
a3 = 0.001
a4 = 0.001
sigma_r = 0.1
sigma_h = a_error = np.radians(1)
sigma_s = 0.00001
def normalize_angle(x, index):
if x[index] > np.pi:
x[index] -= 2*np.pi
if x[index] < -np.pi:
x[index] = 2*np.pi
def ekfloc_predict(x, P, u, dt):
h = x[2]
v = u[0]
w = u[1]
if w == 0:
# approximate straight line with huge radius
w = 1.e-30
r = v/w # radius
sinh = sin(h)
sinhwdt = sin(h + w*dt)
cosh = cos(h)
coshwdt = cos(h + w*dt)
G = array(
[[1, 0, -r*cosh + r*coshwdt],
[0, 1, -r*sinh + r*sinhwdt],
[0, 0, 1]])
V = array(
[[(-sinh + sinhwdt)/w, v*(sin(h)-sinhwdt)/(w**2) + v*coshwdt*dt/w],
[(cosh - coshwdt)/w, -v*(cosh-coshwdt)/(w**2) + v*sinhwdt*dt/w],
[0, dt]])
# covariance of motion noise in control space
M = array([[a1*v**2 + a2*w**2, 0],
[0, a3*v**2 + a4*w**2]])
x = x + array([[-r*sinh + r*sinhwdt],
[r*cosh - r*coshwdt],
[w*dt]])
P = dot(G, P).dot(G.T) + dot(V, M).dot(V.T)
return x, P
def ekfloc(x, P, u, zs, c, m, dt):
h = x[2]
v = u[0]
w = u[1]
if w == 0:
# approximate straight line with huge radius
w = 1.e-30
r = v/w # radius
sinh = sin(h)
sinhwdt = sin(h + w*dt)
cosh = cos(h)
coshwdt = cos(h + w*dt)
F = array(
[[1, 0, -r*cosh + r*coshwdt],
[0, 1, -r*sinh + r*sinhwdt],
[0, 0, 1]])
V = array(
[[(-sinh + sinhwdt)/w, v*(sin(h)-sinhwdt)/(w**2) + v*coshwdt*dt/w],
[(cosh - coshwdt)/w, -v*(cosh-coshwdt)/(w**2) + v*sinhwdt*dt/w],
[0, dt]])
# covariance of motion noise in control space
M = array([[a1*v**2 + a2*w**2, 0],
[0, a3*v**2 + a4*w**2]])
x = x + array([[-r*sinh + r*sinhwdt],
[r*cosh - r*coshwdt],
[w*dt]])
P = dot(F, P).dot(F.T) + dot(V, M).dot(V.T)
R = np.diag([sigma_r**2, sigma_h**2, sigma_s**2])
for i, z in enumerate(zs):
j = c[i]
q = (m[j][0] - x[0, 0])**2 + (m[j][1] - x[1, 0])**2
z_est = array([sqrt(q),
atan2(m[j][1] - x[1, 0], m[j][0] - x[0, 0]) - x[2, 0],
0])
H = array(
[[-(m[j, 0] - x[0, 0]) / sqrt(q), -(m[j, 1] - x[1, 0]) / sqrt(q), 0],
[ (m[j, 1] - x[1, 0]) / q, -(m[j, 0] - x[0, 0]) / q, -1],
[0, 0, 0]])
S = dot(H, P).dot(H.T) + R
#print('S', S)
K = dot(P, H.T).dot(pinv(S))
y = z - z_est
normalize_angle(y, 1)
y = array([y]).T
#print('y', y)
x = x + dot(K, y)
I = np.eye(P.shape[0])
I_KH = I - dot(K, H)
#print('i', I_KH)
P = dot(I_KH, P).dot(I_KH.T) + dot(K, R).dot(K.T)
return x, P
def ekfloc2(x, P, u, zs, c, m, dt):
h = x[2]
v = u[0]
w = u[1]
if w == 0:
# approximate straight line with huge radius
w = 1.e-30
r = v/w # radius
sinh = sin(h)
sinhwdt = sin(h + w*dt)
cosh = cos(h)
coshwdt = cos(h + w*dt)
F = array(
[[1, 0, -r*cosh + r*coshwdt],
[0, 1, -r*sinh + r*sinhwdt],
[0, 0, 1]])
V = array(
[[(-sinh + sinhwdt)/w, v*(sin(h)-sinhwdt)/(w**2) + v*coshwdt*dt/w],
[(cosh - coshwdt)/w, -v*(cosh-coshwdt)/(w**2) + v*sinhwdt*dt/w],
[0, dt]])
# covariance of motion noise in control space
M = array([[a1*v**2 + a2*w**2, 0],
[0, a3*v**2 + a4*w**2]])
x = x + array([[-r*sinh + r*sinhwdt],
[r*cosh - r*coshwdt],
[w*dt]])
P = dot(F, P).dot(F.T) + dot(V, M).dot(V.T)
R = np.diag([sigma_r**2, sigma_h**2])
for i, z in enumerate(zs):
j = c[i]
q = (m[j][0] - x[0, 0])**2 + (m[j][1] - x[1, 0])**2
z_est = array([sqrt(q),
atan2(m[j][1] - x[1, 0], m[j][0] - x[0, 0]) - x[2, 0]])
H = array(
[[-(m[j, 0] - x[0, 0]) / sqrt(q), -(m[j, 1] - x[1, 0]) / sqrt(q), 0],
[ (m[j, 1] - x[1, 0]) / q, -(m[j, 0] - x[0, 0]) / q, -1]])
S = dot(H, P).dot(H.T) + R
#print('S', S)
K = dot(P, H.T).dot(pinv(S))
y = z - z_est
normalize_angle(y, 1)
y = array([y]).T
#print('y', y)
x = x + dot(K, y)
print('x', x)
I = np.eye(P.shape[0])
I_KH = I - dot(K, H)
P = dot(I_KH, P).dot(I_KH.T) + dot(K, R).dot(K.T)
return x, P
m = array([[5, 5],
[7,6],
[4, 8]])
x = array([[2, 6, .3]]).T
u = array([.5, .01])
P = np.diag([1., 1., 1.])
c = [0, 1, 2]
import matplotlib.pyplot as plt
from numpy.random import randn
from filterpy.common import plot_covariance_ellipse
from filterpy.kalman import KalmanFilter
plt.figure()
plt.plot(m[:, 0], m[:, 1], 'o')
plt.plot(x[0], x[1], 'x', color='b', ms=20)
xp = x.copy()
dt = 0.1
np.random.seed(1234)
for i in range(1000):
xp, _ = ekfloc_predict(xp, P, u, dt)
plt.plot(xp[0], xp[1], 'x', color='g', ms=20)
if i % 10 == 0:
zs = []
for lmark in m:
d = sqrt((lmark[0] - xp[0, 0])**2 + (lmark[1] - xp[1, 0])**2) + randn()*sigma_r
a = atan2(lmark[1] - xp[1, 0], lmark[0] - xp[0, 0]) - xp[2, 0] + randn()*sigma_h
zs.append(np.array([d, a]))
x, P = ekfloc2(x, P, u, zs, c, m, dt*10)
if P[0,0] < 10000:
plot_covariance_ellipse((x[0,0], x[1,0]), P[0:2, 0:2], std=2,
facecolor='g', alpha=0.3)
plt.plot(x[0], x[1], 'x', color='r')
plt.axis('equal')
plt.show()
|
adversarial_mnist.py | goodcq/zhihuzhuanlan | 101 | 11192180 | from __future__ import absolute_import
#from __future__ import print_function
import os
import struct
from array import array
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.regularizers import l2, l1
from keras.constraints import maxnorm
from keras.optimizers import SGD, Adam, RMSprop, Adagrad
from keras.utils import np_utils, generic_utils
import numpy as np
import matplotlib.pyplot as plt
class MNIST(object):
def __init__(self, path='.'):
self.path = path
self.test_img_fname = 't10k-images-idx3-ubyte'
self.test_lbl_fname = 't10k-labels-idx1-ubyte'
self.train_img_fname = 'train-images-idx3-ubyte'
self.train_lbl_fname = 'train-labels-idx1-ubyte'
self.test_images = []
self.test_labels = []
self.train_images = []
self.train_labels = []
def load_testing(self):
ims, labels = self.load(os.path.join(self.path, self.test_img_fname),
os.path.join(self.path, self.test_lbl_fname))
self.test_images = np.array(ims)
self.test_labels = np.array(labels)
return ims, labels
def load_training(self):
ims, labels = self.load(os.path.join(self.path, self.train_img_fname),
os.path.join(self.path, self.train_lbl_fname))
self.train_images = np.array(ims)
self.train_labels = np.array(labels)
np.random.seed(1337)
np.random.shuffle(self.train_images)
np.random.seed(1337)
np.random.shuffle(self.train_labels)
return ims, labels
@classmethod
def load(cls, path_img, path_lbl):
with open(path_lbl, 'rb') as file:
magic, size = struct.unpack(">II", file.read(8))
if magic != 2049:
raise ValueError('Magic number mismatch, expected 2049,'
'got %d' % magic)
labels = array("B", file.read())
with open(path_img, 'rb') as file:
magic, size, rows, cols = struct.unpack(">IIII", file.read(16))
if magic != 2051:
raise ValueError('Magic number mismatch, expected 2051,'
'got %d' % magic)
image_data = array("B", file.read())
images = []
for i in xrange(size):
images.append([0]*rows*cols)
for i in xrange(size):
images[i][:] = image_data[i*rows*cols : (i+1)*rows*cols]
return images, labels
def test(self):
test_img, test_label = self.load_testing()
train_img, train_label = self.load_training()
assert len(test_img) == len(test_label)
assert len(test_img) == 10000
assert len(train_img) == len(train_label)
assert len(train_img) == 60000
print ("Showing num:" , train_label[0])
print (self.display(train_img[0]))
print
return True
@classmethod
def display(cls, img, width=28):
render = ''
for i in range(len(img)):
if i % width == 0: render += '\n'
if img[i] > 200:
render += '1'
else:
render += '0'
return render
def image_generator(img, batch_size):
dataset = np.zeros((64, 1, 28, 28))
for i in range(batch_size):
dataset[i] = img + np.random.uniform(low=-0.1, high=0.1, size=(1, 28, 28))
return dataset
def build_model():
nb_classes = 10
model = Sequential()
model.add(Convolution2D(32, 3, 3, input_shape=(1,28,28)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, 3, 3))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(256, init='normal'))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes, init='normal'))
model.add(Activation('softmax'))
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd)
return model
def gen(X_train, Y_train, X_test, Y_test):
batch_size = 64
nb_classes = 10
nb_epoch = 20
img = X_train[2]
img = img.astype("float32").reshape((1,28,28))
label = Y_train[2]
img /= 255.0
print 'label=' + str(label)
plt.imshow(img.reshape((28,28)),cmap = plt.cm.gray)
plt.show()
model = build_model()
model.load_weights('mnist_cnn')
for iterator in range(200):
ds = image_generator(img, 64)
pred = model.predict(ds, batch_size=64)
pred_label = np.argmax(pred, axis=1)
flag = False
for i in range(64):
if pred_label[i] == label:
choosed_img = ds[i]
flag = True
break
if flag == False:
print 'iter=' + str(iterator) + ", break"
break
else:
img = choosed_img
print 'iter=' + str(iterator) + ", label = " + str(label)
if iterator == 50 or iterator == 100 or iterator == 150:
plt.imshow(img.reshape((28,28)),cmap = plt.cm.gray)
plt.show()
print img
plt.imshow(img.reshape((28,28)),cmap = plt.cm.gray)
plt.show()
# choose the best
def gen2(X_train, Y_train, X_test, Y_test):
batch_size = 64
nb_classes = 10
nb_epoch = 20
img = X_train[2]
img = img.astype("float32").reshape((1,28,28))
label = Y_train[2]
img /= 255.0
print 'label=' + str(label)
model = build_model()
model.load_weights('mnist_cnn')
for iterator in range(1000):
ds = image_generator(img, 64)
pred = model.predict(ds, batch_size=64)
pred_label = np.argmax(pred, axis=1)
flag = False
for i in range(64):
if pred_label[i] != label:
choosed_idx = i
flag = True
break
if flag == False:
print 'iter=' + str(iterator) + ", no change"
img = ds[0]
else:
img = ds[choosed_idx]
print 'iter=' + str(iterator) + ", label = " + str(pred_label[choosed_idx])
break
plt.imshow(img.reshape((28,28)),cmap = plt.cm.gray)
plt.show()
def CNN(X_train, Y_train, X_test, Y_test):
batch_size = 64
nb_classes = 10
nb_epoch = 20
X_train = X_train.reshape(60000, 1, 28, 28)
X_test = X_test.reshape(10000, 1, 28, 28)
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
print(X_train.shape, 'train samples')
print(Y_train.shape, 'train labels')
print(X_test.shape, 'test smaples')
Y_train = np_utils.to_categorical(Y_train, nb_classes)
Y_test = np_utils.to_categorical(Y_test, nb_classes)
model = build_model()
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=30)
score = model.evaluate(X_test, Y_test, batch_size=batch_size)
model.save_weights('mnist_cnn')
print('Test score:', score)
if __name__ == "__main__":
print ('Testing')
mn = MNIST('.')
if mn.test():
print ('Passed')
#CNN(mn.train_images, mn.train_labels, mn.test_images, mn.test_labels)
gen2(mn.train_images, mn.train_labels, mn.test_images, mn.test_labels)
|
tests/unit/cfngin/lookups/test_registry.py | avosper-intellaegis/runway | 134 | 11192185 | """Tests for runway.cfngin.lookups.registry."""
# pylint: disable=no-self-use
# pyright: basic
from __future__ import annotations
from typing import TYPE_CHECKING, Any
import pytest
from runway.cfngin.lookups.handlers.default import DefaultLookup
from runway.cfngin.lookups.registry import (
CFNGIN_LOOKUP_HANDLERS,
register_lookup_handler,
unregister_lookup_handler,
)
if TYPE_CHECKING:
from pytest_mock import MockerFixture
def test_autoloaded_lookup_handlers(mocker: MockerFixture) -> None:
"""Test autoloaded lookup handlers."""
mocker.patch.dict(CFNGIN_LOOKUP_HANDLERS, {})
handlers = [
"ami",
"cfn",
"default",
"dynamodb",
"ecr",
"envvar",
"file",
"hook_data",
"kms",
"output",
"random.string",
"rxref",
"split",
"ssm",
"xref",
]
for handler in handlers:
assert (
handler in CFNGIN_LOOKUP_HANDLERS
), f'Lookup handler: "{handler}" not registered'
assert len(CFNGIN_LOOKUP_HANDLERS) == len(
handlers
), f"expected {len(handlers)} autoloaded handlers but found {len(CFNGIN_LOOKUP_HANDLERS)}"
def test_register_lookup_handler_function() -> None:
"""Test register_lookup_handler function."""
def fake_lookup(**_: Any) -> None:
"""Fake lookup."""
with pytest.raises(TypeError):
register_lookup_handler("test", fake_lookup) # type: ignore
def test_register_lookup_handler_not_subclass() -> None:
"""Test register_lookup_handler no subclass."""
class FakeLookup:
"""Fake lookup."""
with pytest.raises(TypeError):
register_lookup_handler("test", FakeLookup) # type: ignore
def test_register_lookup_handler_str(mocker: MockerFixture) -> None:
"""Test register_lookup_handler from string."""
mocker.patch.dict(CFNGIN_LOOKUP_HANDLERS, {})
register_lookup_handler(
"test", "runway.cfngin.lookups.handlers.default.DefaultLookup"
)
assert "test" in CFNGIN_LOOKUP_HANDLERS
assert CFNGIN_LOOKUP_HANDLERS["test"] == DefaultLookup
def test_unregister_lookup_handler(mocker: MockerFixture) -> None:
"""Test unregister_lookup_handler."""
mocker.patch.dict(CFNGIN_LOOKUP_HANDLERS, {"test": "something"})
assert "test" in CFNGIN_LOOKUP_HANDLERS
unregister_lookup_handler("test")
assert "test" not in CFNGIN_LOOKUP_HANDLERS
|
mobula/config.py | wkcn/MobulaOP | 161 | 11192193 | from .utils import with_metaclass
class DefaultConfig:
TARGET = 'mobula_op'
BUILD_PATH = './'
BUILD_IN_LOCAL_PATH = True
SHOW_BUILDING_COMMAND = False
MAX_BUILDING_WORKER_NUM = 8
DEBUG = False
USING_OPENMP = True
USING_CBLAS = False
HOST_NUM_THREADS = 0 # 0 : auto
USING_HIGH_LEVEL_WARNINGS = False
USING_OPTIMIZATION = True
USING_ASYNC_EXEC = True
GPU_BACKEND = 'cuda'
CXX = 'g++'
NVCC = 'nvcc'
HIPCC = 'hipcc'
CUDA_DIR = '/opt/cuda'
HIP_DIR = '/opt/rocm/hip'
class Config:
def __init__(self):
for name in dir(DefaultConfig):
if not name.startswith('_'):
self.__dict__[name] = getattr(DefaultConfig, name)
def __setattr__(self, name, value):
data = self.__dict__.get(name, None)
if data is None:
raise AttributeError("Config has no attribute '{}'".format(name))
target_type = type(data)
value_type = type(value)
if target_type is not value_type:
raise TypeError('The type of config attribute `{}` is not consistent, target {} vs value {}.'.format(
name, target_type, value_type))
self.__dict__[name] = value
config = Config()
class TempConfig:
def __init__(self, **kwargs):
self.kwargs = kwargs
self.old_config = dict()
def __enter__(self):
for k, v in self.kwargs.items():
if not hasattr(config, k):
raise AttributeError(
"'mobula.config' object has no attribute '{}'".format(k))
self.old_config[k] = getattr(config, k)
setattr(config, k, v)
def __exit__(self, *dummy):
for k, v in self.old_config.items():
setattr(config, k, v)
Config.TempConfig = TempConfig
|
Chapter13/c13_99_get_finStatement_not_working.py | John-ye666/Python-for-Finance-Second-Edition | 236 | 11192208 | """
Name : c13_99_get_finStatement.py
Book : Python for Finance
Publisher: Packt Publishing Ltd.
Author : <NAME>
Date : 2/27/2017
email : <EMAIL>
<EMAIL>
"""
import urllib2
import pandas as pd
url='http://financials.morningstar.com/income-statement/is.html?t=IBM®ion=usa&culture=en-US'
x= pd.read_csv(url, skiprows=10, index_col=0)
|
examples/dummy_plugin/dummy_plugin/secrets.py | psmware-ltd/nautobot | 384 | 11192226 | <reponame>psmware-ltd/nautobot
from django import forms
from nautobot.utilities.forms import BootstrapMixin
from nautobot.extras.secrets import SecretsProvider
class ConstantValueSecretsProvider(SecretsProvider):
"""
Example of a plugin-provided SecretsProvider - this one just uses a user-specified constant value.
Obviously this is insecure and not something you'd want to actually use!
"""
slug = "constant-value"
name = "Constant Value"
class ParametersForm(BootstrapMixin, forms.Form):
"""
User-friendly form for specifying the required parameters of this provider.
"""
constant = forms.CharField(
required=True,
help_text="Constant secret value. <strong>Example Only - DO NOT USE FOR REAL SENSITIVE DATA</strong>",
)
@classmethod
def get_value_for_secret(cls, secret, obj=None, **kwargs):
"""
Return the value defined in the Secret.parameters "constant" key.
A more realistic SecretsProvider would make calls to external APIs, etc. to retrieve a secret from storage.
Args:
secret (nautobot.extras.models.Secret): The secret whose value should be retrieved.
obj (object): The object (Django model or similar) providing context for the secret's parameters.
"""
return secret.rendered_parameters(obj=obj).get("constant")
secrets_providers = [ConstantValueSecretsProvider]
|
pymtl/tools/translation/verilator_sim_test.py | belang/pymtl | 206 | 11192318 | #=======================================================================
# verilator_sim_test.py
#=======================================================================
from pymtl import SimulationTool
from verilator_sim import TranslationTool
from pymtl import requires_verilator
from pclib.rtl import Reg
#-----------------------------------------------------------------------
# Test Config
#-----------------------------------------------------------------------
# Skip all tests in module if verilator is not installed
pytestmark = requires_verilator
#-----------------------------------------------------------------------
# Test Function
#-----------------------------------------------------------------------
def reg_test( model ):
vmodel = TranslationTool( model )
vmodel.elaborate()
sim = SimulationTool( vmodel )
sim.reset()
assert vmodel.out == 0
vmodel.in_.value = 10
sim.cycle()
assert vmodel.out == 10
vmodel.in_.value = 12
assert vmodel.out == 10
sim.cycle()
assert vmodel.out == 12
#-----------------------------------------------------------------------
# Run Tests
#-----------------------------------------------------------------------
def test_reg8():
reg_test( Reg(8) )
def test_reg16():
reg_test( Reg(16) )
|
onnx/bin/__init__.py | pchandrasekaran1595/onnx | 12,820 | 11192324 | # SPDX-License-Identifier: Apache-2.0
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
|
tests/blob/test_blob_storage_account.py | Ross1503/azure-storage-python | 348 | 11192330 | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from azure.storage.blob import (
BlockBlobService,
)
from azure.storage.blob.models import StandardBlobTier, BatchSetBlobTierSubRequest, RehydratePriority
from tests.testcase import (
StorageTestCase,
record,
)
# ------------------------------------------------------------------------------
TEST_BLOB_PREFIX = 'blob'
# ------------------------------------------------------------------------------
class BlobStorageAccountTest(StorageTestCase):
def setUp(self):
super(BlobStorageAccountTest, self).setUp()
self.bs = self._create_storage_service_for_blob_storage_account(BlockBlobService, self.settings)
self.container_name = self.get_resource_name('utcontainer')
if not self.is_playback():
self.bs.create_container(self.container_name)
def tearDown(self):
if not self.is_playback():
try:
self.bs.delete_container(self.container_name)
except:
pass
return super(BlobStorageAccountTest, self).tearDown()
# --Helpers-----------------------------------------------------------------
def _get_blob_reference(self):
return self.get_resource_name(TEST_BLOB_PREFIX)
def _create_blob(self):
blob_name = self._get_blob_reference()
self.bs.create_blob_from_bytes(self.container_name, blob_name, b'')
return blob_name
def assertBlobEqual(self, container_name, blob_name, expected_data):
actual_data = self.bs.get_blob_to_bytes(container_name, blob_name)
self.assertEqual(actual_data.content, expected_data)
# --Tests specific to Blob Storage Accounts (not general purpose)------------
@record
def test_standard_blob_tier_set_tier_api(self):
self.bs.create_container(self.container_name)
tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]
for tier in tiers:
blob_name = self._get_blob_reference()
data = b'hello world'
self.bs.create_blob_from_bytes(self.container_name, blob_name, data)
blob_ref = self.bs.get_blob_properties(self.container_name, blob_name)
self.assertIsNotNone(blob_ref.properties.blob_tier)
self.assertTrue(blob_ref.properties.blob_tier_inferred)
self.assertIsNone(blob_ref.properties.blob_tier_change_time)
blobs = list(self.bs.list_blobs(self.container_name))
# Assert
self.assertIsNotNone(blobs)
self.assertGreaterEqual(len(blobs), 1)
self.assertIsNotNone(blobs[0])
self.assertNamedItemInContainer(blobs, blob_name)
self.assertIsNotNone(blobs[0].properties.blob_tier)
self.assertTrue(blobs[0].properties.blob_tier_inferred)
self.assertIsNone(blobs[0].properties.blob_tier_change_time)
self.bs.set_standard_blob_tier(self.container_name, blob_name, tier)
blob_ref2 = self.bs.get_blob_properties(self.container_name, blob_name)
self.assertEqual(tier, blob_ref2.properties.blob_tier)
self.assertFalse(blob_ref2.properties.blob_tier_inferred)
self.assertIsNotNone(blob_ref2.properties.blob_tier_change_time)
blobs = list(self.bs.list_blobs(self.container_name))
# Assert
self.assertIsNotNone(blobs)
self.assertGreaterEqual(len(blobs), 1)
self.assertIsNotNone(blobs[0])
self.assertNamedItemInContainer(blobs, blob_name)
self.assertEqual(blobs[0].properties.blob_tier, tier)
self.assertFalse(blobs[0].properties.blob_tier_inferred)
self.assertIsNotNone(blobs[0].properties.blob_tier_change_time)
self.bs.delete_blob(self.container_name, blob_name)
def test_empty_batch_set_standard_blob_tier(self):
# Arrange
batch_set_standard_blob_tier_requests = list()
with self.assertRaises(ValueError):
self.bs.batch_set_standard_blob_tier(batch_set_standard_blob_tier_requests)
def test_batch_set_257_standard_blob_tier_for_blobs(self):
# Arrange
batch_set_standard_blob_tier_requests = list()
for i in range(0, 257):
batch_set_standard_blob_tier_requests.append(
BatchSetBlobTierSubRequest(self.container_name, i, StandardBlobTier.Archive))
with self.assertRaises(ValueError):
self.bs.batch_set_standard_blob_tier(batch_set_standard_blob_tier_requests)
@record
def test_set_standard_blob_tier_with_rehydrate_priority(self):
# Arrange
self.bs.create_container(self.container_name)
blob_name = self._create_blob()
blob_tier = StandardBlobTier.Archive
rehydrate_tier = StandardBlobTier.Cool
rehydrate_priority = RehydratePriority.Standard
# Act
self.bs.set_standard_blob_tier(self.container_name, blob_name, blob_tier,
rehydrate_priority=rehydrate_priority)
self.bs.set_standard_blob_tier(self.container_name, blob_name, rehydrate_tier)
blob_ref = self.bs.get_blob_properties(self.container_name, blob_name)
# Assert
self.assertEquals('rehydrate-pending-to-cool', blob_ref.properties.rehydration_status)
@record
def test_batch_set_standard_blob_tier_for_one_blob(self):
# Arrange
batch_set_blob_tier_request = []
self.bs.create_container(self.container_name)
blob_name = self._get_blob_reference()
data = b'hello world'
blob_tier = StandardBlobTier.Cool
self.bs.create_blob_from_bytes(self.container_name, blob_name, data)
sub_request = BatchSetBlobTierSubRequest(self.container_name, blob_name, blob_tier)
batch_set_blob_tier_request.append(sub_request)
# Act
resp = self.bs.batch_set_standard_blob_tier(batch_set_blob_tier_request)
blob_ref = self.bs.get_blob_properties(self.container_name, blob_name)
# Assert
self.assertIsNotNone(resp)
self.assertEquals(len(batch_set_blob_tier_request), len(resp))
self.assertEquals(blob_tier, blob_ref.properties.blob_tier)
for sub_response in resp:
self.assertTrue(sub_response.is_successful)
@record
def test_batch_set_three_blob_tier(self):
# Arrange
self.bs.create_container(self.container_name)
tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]
rehydrate_priority = [RehydratePriority.High, RehydratePriority.Standard, RehydratePriority.High]
blob_names = list()
batch_set_blob_tier_request = []
for i in range(0, len(tiers)):
blob_name = str(i)
data = b'hello world'
self.bs.create_blob_from_bytes(self.container_name, blob_name, data)
sub_request = BatchSetBlobTierSubRequest(self.container_name, blob_name, tiers[i], rehydrate_priority[i])
batch_set_blob_tier_request.append(sub_request)
blob_names.append(blob_name)
# Act
resp = self.bs.batch_set_standard_blob_tier(batch_set_blob_tier_request)
blob_refs = list()
for blob_name in blob_names:
blob_refs.append(self.bs.get_blob_properties(self.container_name, blob_name))
# Assert
self.assertIsNotNone(resp)
self.assertEquals(len(batch_set_blob_tier_request), len(resp))
for i in range(0, len(resp)):
self.assertTrue(resp[i].is_successful)
# make sure the tier for each blob is correct
self.assertEquals(tiers[i], blob_refs[i].properties.blob_tier)
@record
def test_batch_set_nine_standard_blob_tier(self):
# To make sure BatchSubResponse is bounded to a correct sub-request
# Arrange
self.bs.create_container(self.container_name)
tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot,
StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot,
StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]
batch_set_blob_tier_request = []
# For even index, create batch delete sub-request for existing blob and their snapshot
# For odd index, create batch delete sub-request for non-existing blob
for i in range(0, len(tiers)):
blob_name = str(i)
if i % 2 is 0:
data = b'hello world'
self.bs.create_blob_from_bytes(self.container_name, blob_name, data)
sub_request = BatchSetBlobTierSubRequest(self.container_name, blob_name, tiers[i])
batch_set_blob_tier_request.append(sub_request)
# Act
resp = self.bs.batch_set_standard_blob_tier(batch_set_blob_tier_request)
# Assert
self.assertIsNotNone(resp)
self.assertEquals(len(batch_set_blob_tier_request), len(resp))
for i in range(0, len(tiers)):
is_successful = resp[i].is_successful
# for every even indexed sub-request, the blob should be deleted successfully
if i % 2 is 0:
self.assertEquals(is_successful, True, "sub-request" + str(i) + "should be true")
# For every odd indexed sub-request, there should be a 404 http status code because the blob is non-existing
else:
self.assertEquals(is_successful, False, "sub-request" + str(i) + "should be false")
self.assertEquals(404, resp[i].http_response.status)
@record
def test_batch_set_standard_blob_tier_api_with_non_askii_blob_name(self):
# Arrange
self.bs.create_container(self.container_name)
tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]
batch_set_blob_tier_request = []
for tier in tiers:
blob_name = "ööööööööö"
data = b'hello world'
self.bs.create_blob_from_bytes(self.container_name, blob_name, data)
sub_request = BatchSetBlobTierSubRequest(self.container_name, blob_name, tier)
batch_set_blob_tier_request.append(sub_request)
# Act
resp = self.bs.batch_set_standard_blob_tier(batch_set_blob_tier_request)
# Assert
self.assertIsNotNone(resp)
self.assertEquals(len(batch_set_blob_tier_request), len(resp))
for sub_response in resp:
self.assertTrue(sub_response.is_successful)
@record
def test_batch_set_non_existing_blob_tier(self):
# Arrange
self.bs.create_container(self.container_name)
tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]
batch_set_blob_tier_request = []
for tier in tiers:
blob_name = self._get_blob_reference()
sub_request = BatchSetBlobTierSubRequest(self.container_name, blob_name, tier)
batch_set_blob_tier_request.append(sub_request)
# Act
resp = self.bs.batch_set_standard_blob_tier(batch_set_blob_tier_request)
# Assert
self.assertIsNotNone(resp)
self.assertEquals(len(batch_set_blob_tier_request), len(resp))
for sub_response in resp:
self.assertFalse(sub_response.is_successful)
@record
def test_rehydration_status(self):
blob_name = 'rehydration_test_blob_1'
blob_name2 = 'rehydration_test_blob_2'
data = b'hello world'
self.bs.create_blob_from_bytes(self.container_name, blob_name, data)
self.bs.set_standard_blob_tier(self.container_name, blob_name, StandardBlobTier.Archive)
self.bs.set_standard_blob_tier(self.container_name, blob_name, StandardBlobTier.Cool)
blob_ref = self.bs.get_blob_properties(self.container_name, blob_name)
self.assertEqual(StandardBlobTier.Archive, blob_ref.properties.blob_tier)
self.assertEqual("rehydrate-pending-to-cool", blob_ref.properties.rehydration_status)
self.assertFalse(blob_ref.properties.blob_tier_inferred)
blobs = list(self.bs.list_blobs(self.container_name))
self.bs.delete_blob(self.container_name, blob_name)
# Assert
self.assertIsNotNone(blobs)
self.assertGreaterEqual(len(blobs), 1)
self.assertIsNotNone(blobs[0])
self.assertNamedItemInContainer(blobs, blob_name)
self.assertEqual(StandardBlobTier.Archive, blobs[0].properties.blob_tier)
self.assertEqual("rehydrate-pending-to-cool", blobs[0].properties.rehydration_status)
self.assertFalse(blobs[0].properties.blob_tier_inferred)
self.bs.create_blob_from_bytes(self.container_name, blob_name2, data)
self.bs.set_standard_blob_tier(self.container_name, blob_name2, StandardBlobTier.Archive)
self.bs.set_standard_blob_tier(self.container_name, blob_name2, StandardBlobTier.Hot)
blob_ref2 = self.bs.get_blob_properties(self.container_name, blob_name2)
self.assertEqual(StandardBlobTier.Archive, blob_ref2.properties.blob_tier)
self.assertEqual("rehydrate-pending-to-hot", blob_ref2.properties.rehydration_status)
self.assertFalse(blob_ref2.properties.blob_tier_inferred)
blobs = list(self.bs.list_blobs(self.container_name))
# Assert
self.assertIsNotNone(blobs)
self.assertGreaterEqual(len(blobs), 1)
self.assertIsNotNone(blobs[0])
self.assertNamedItemInContainer(blobs, blob_name2)
self.assertEqual(StandardBlobTier.Archive, blobs[0].properties.blob_tier)
self.assertEqual("rehydrate-pending-to-hot", blobs[0].properties.rehydration_status)
self.assertFalse(blobs[0].properties.blob_tier_inferred)
# ------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
|
esmvaltool/cmorizers/obs/cmorize_obs_cowtanway.py | cffbots/ESMValTool | 148 | 11192355 | <reponame>cffbots/ESMValTool
"""ESMValTool CMORizer for CowtanWay.
Tier
Tier 2: other freely-available dataset.
Source
https://www-users.york.ac.uk/~kdc3/papers/coverage2013/series.html
Last access
20200226
Download and processing instructions
Download the following files:
'had4_krig_v1_0_0.nc.gz'
'had4_uah_v1_0_0.nc.gz'
'had4_short_krig_v2_0_0.nc.gz'
'had4_short_uah_v2_0_0.nc.gz'
'ghcn_short_krig_v2_0_0.nc.gz'
'ghcn_short_uah_v2_0_0.nc.gz'
'had4sst4_krig_v2_0_0.nc.gz'
'had4_krig_v2_0_0.nc.gz'
"""
import gzip
import logging
import os
import shutil
import iris
from . import utilities as utils
logger = logging.getLogger(__name__)
def _clean(filepath):
"""Remove unzipped input file."""
if os.path.isfile(filepath):
os.remove(filepath)
logger.info("Removed cached file %s", filepath)
def _extract_variable(short_name, var, vkey, version, cfg, filepath, out_dir):
"""Extract variable."""
raw_var = var.get('raw', short_name)
cube = iris.load_cube(filepath, utils.var_name_constraint(raw_var))
# Fix units
cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name).copy()
cube.convert_units(cmor_info.units)
utils.convert_timeunits(cube, 1950)
# Fix coordinates
utils.fix_coords(cube)
if 'height2m' in cmor_info.dimensions:
utils.add_height2m(cube)
# Fix metadata
attrs = cfg['attributes'].copy()
attrs['mip'] = var['mip']
attrs['version'] = version
baseline = cfg['attributes']['baseline'][vkey]
attrs['baseline'] = baseline
attrs['comment'] = attrs['comment'].format(baseline=baseline)
utils.fix_var_metadata(cube, cmor_info)
utils.set_global_atts(cube, attrs)
# Save variable
utils.save_variable(cube,
short_name,
out_dir,
attrs,
unlimited_dimensions=['time'])
def _unzip(short_name, zip_path, out_dir):
"""Unzip `*.gz` file."""
if not os.path.isfile(zip_path):
logger.debug("Skipping '%s', file '%s' not found", short_name,
zip_path)
return None
logger.info("Found input file '%s'", zip_path)
filename = os.path.basename(zip_path.replace('.gz', ''))
new_path = os.path.join(out_dir, filename)
with gzip.open(zip_path, 'rb') as zip_file:
with open(new_path, 'wb') as new_file:
shutil.copyfileobj(zip_file, new_file)
logger.info("Succefully extracted file to %s", new_path)
return new_path
def cmorization(in_dir, out_dir, cfg, _):
"""Cmorization func call."""
raw_filepath = os.path.join(in_dir, cfg['filename'])
# Run the cmorization
for (short_name, var) in cfg['variables'].items():
for (vkey, version) in cfg['attributes']['version'].items():
logger.info("CMORizing variable '%s' version '%s'",
short_name, version)
zip_filepath = raw_filepath.format(version=version)
filepath = _unzip(short_name, zip_filepath, out_dir)
if filepath is None:
continue
_extract_variable(short_name, var, vkey, version, cfg,
filepath, out_dir)
_clean(filepath)
|
aws_lambda_powertools/utilities/data_classes/s3_event.py | Sordie/aws-lambda-powertools-python | 1,208 | 11192389 | <gh_stars>1000+
from typing import Dict, Iterator, Optional
from urllib.parse import unquote_plus
from aws_lambda_powertools.utilities.data_classes.common import DictWrapper
class S3Identity(DictWrapper):
@property
def principal_id(self) -> str:
return self["principalId"]
class S3RequestParameters(DictWrapper):
@property
def source_ip_address(self) -> str:
return self["requestParameters"]["sourceIPAddress"]
class S3Bucket(DictWrapper):
@property
def name(self) -> str:
return self["s3"]["bucket"]["name"]
@property
def owner_identity(self) -> S3Identity:
return S3Identity(self["s3"]["bucket"]["ownerIdentity"])
@property
def arn(self) -> str:
return self["s3"]["bucket"]["arn"]
class S3Object(DictWrapper):
@property
def key(self) -> str:
"""Object key"""
return self["s3"]["object"]["key"]
@property
def size(self) -> int:
"""Object byte size"""
return int(self["s3"]["object"]["size"])
@property
def etag(self) -> str:
"""object eTag"""
return self["s3"]["object"]["eTag"]
@property
def version_id(self) -> Optional[str]:
"""Object version if bucket is versioning-enabled, otherwise null"""
return self["s3"]["object"].get("versionId")
@property
def sequencer(self) -> str:
"""A string representation of a hexadecimal value used to determine event sequence,
only used with PUTs and DELETEs
"""
return self["s3"]["object"]["sequencer"]
class S3Message(DictWrapper):
@property
def s3_schema_version(self) -> str:
return self["s3"]["s3SchemaVersion"]
@property
def configuration_id(self) -> str:
"""ID found in the bucket notification configuration"""
return self["s3"]["configurationId"]
@property
def bucket(self) -> S3Bucket:
return S3Bucket(self._data)
@property
def get_object(self) -> S3Object:
"""Get the `object` property as an S3Object"""
# Note: this name conflicts with existing python builtins
return S3Object(self._data)
class S3EventRecordGlacierRestoreEventData(DictWrapper):
@property
def lifecycle_restoration_expiry_time(self) -> str:
"""Time when the object restoration will be expired."""
return self["restoreEventData"]["lifecycleRestorationExpiryTime"]
@property
def lifecycle_restore_storage_class(self) -> str:
"""Source storage class for restore"""
return self["restoreEventData"]["lifecycleRestoreStorageClass"]
class S3EventRecordGlacierEventData(DictWrapper):
@property
def restore_event_data(self) -> S3EventRecordGlacierRestoreEventData:
"""The restoreEventData key contains attributes related to your restore request.
The glacierEventData key is only visible for s3:ObjectRestore:Completed events
"""
return S3EventRecordGlacierRestoreEventData(self._data)
class S3EventRecord(DictWrapper):
@property
def event_version(self) -> str:
"""The eventVersion key value contains a major and minor version in the form <major>.<minor>."""
return self["eventVersion"]
@property
def event_source(self) -> str:
"""The AWS service from which the S3 event originated. For S3, this is aws:s3"""
return self["eventSource"]
@property
def aws_region(self) -> str:
"""aws region eg: us-east-1"""
return self["awsRegion"]
@property
def event_time(self) -> str:
"""The time, in ISO-8601 format, for example, 1970-01-01T00:00:00.000Z, when S3 finished
processing the request"""
return self["eventTime"]
@property
def event_name(self) -> str:
"""Event type"""
return self["eventName"]
@property
def user_identity(self) -> S3Identity:
return S3Identity(self["userIdentity"])
@property
def request_parameters(self) -> S3RequestParameters:
return S3RequestParameters(self._data)
@property
def response_elements(self) -> Dict[str, str]:
"""The responseElements key value is useful if you want to trace a request by following up with AWS Support.
Both x-amz-request-id and x-amz-id-2 help Amazon S3 trace an individual request. These values are the same
as those that Amazon S3 returns in the response to the request that initiates the events, so they can be
used to match the event to the request.
"""
return self["responseElements"]
@property
def s3(self) -> S3Message:
return S3Message(self._data)
@property
def glacier_event_data(self) -> Optional[S3EventRecordGlacierEventData]:
"""The glacierEventData key is only visible for s3:ObjectRestore:Completed events."""
item = self.get("glacierEventData")
return None if item is None else S3EventRecordGlacierEventData(item)
class S3Event(DictWrapper):
"""S3 event notification
Documentation:
-------------
- https://docs.aws.amazon.com/lambda/latest/dg/with-s3.html
- https://docs.aws.amazon.com/AmazonS3/latest/dev/NotificationHowTo.html
- https://docs.aws.amazon.com/AmazonS3/latest/dev/notification-content-structure.html
"""
@property
def records(self) -> Iterator[S3EventRecord]:
for record in self["Records"]:
yield S3EventRecord(record)
@property
def record(self) -> S3EventRecord:
"""Get the first s3 event record"""
return next(self.records)
@property
def bucket_name(self) -> str:
"""Get the bucket name for the first s3 event record"""
return self["Records"][0]["s3"]["bucket"]["name"]
@property
def object_key(self) -> str:
"""Get the object key for the first s3 event record and unquote plus"""
return unquote_plus(self["Records"][0]["s3"]["object"]["key"])
|
tests/m3u/__init__.py | grdorin/mopidy | 6,700 | 11192434 | <reponame>grdorin/mopidy<filename>tests/m3u/__init__.py
def generate_song(i):
return f"dummy:track:song{i}"
|
tests/lib/test_take.py | bogdanvuk/pygears | 120 | 11192436 | <reponame>bogdanvuk/pygears
import pytest
from pygears import Intf, gear
from pygears.lib import decouple
from pygears.lib import take
from pygears.lib.delay import delay_rng
from pygears.lib.verif import directed, drv, verif
from pygears.sim import sim
from pygears.typing import Queue, Tuple, Uint
from pygears.util.test_utils import formal_check, synth_check
T_DIN = Queue[Tuple[Uint[16], Uint[16]]]
T_DIN_SEP = Queue[Uint[16]]
T_QDIN_SEP = Queue[Uint[16], 2]
T_CFG = Uint[16]
T_QDIN = Queue[Tuple[Uint[16], Uint[16]], 2]
def get_dut(dout_delay):
@gear
def decoupled(din):
return din | take | decouple
if dout_delay == 0:
return decoupled
return take
@pytest.mark.parametrize('din_delay', [0, 5])
@pytest.mark.parametrize('dout_delay', [0, 5])
def test_directed(sim_cls, din_delay, dout_delay):
seq = []
tmp = []
for i in range(9):
tmp.append((i, 2))
seq.append(tmp)
tmp = []
for i in range(5):
tmp.append((i, 3))
seq.append(tmp)
dut = get_dut(dout_delay)
directed(
drv(t=T_DIN, seq=seq) | delay_rng(din_delay, din_delay),
f=dut(sim_cls=sim_cls),
ref=[[0, 1], [0, 1, 2]],
delays=[delay_rng(dout_delay, dout_delay)])
sim()
def test_directed_two_inputs(cosim_cls):
verif(drv(t=T_DIN_SEP, seq=[list(range(9)), list(range(5))]),
drv(t=T_CFG, seq=[2, 3]),
f=take(sim_cls=cosim_cls),
ref=take(name='ref_model'))
sim()
@pytest.mark.parametrize('delay', [0, 5])
@pytest.mark.parametrize('dout_delay', [0, 5])
def test_q_directed(sim_cls, delay, dout_delay):
seq1 = [[(j, 2) for j in range(3)] for _ in range(9)]
seq2 = [[(j, 3) for j in range(6)] for _ in range(5)]
seq = [seq1, seq2]
dut = get_dut(dout_delay)
directed(
drv(t=T_QDIN, seq=seq) | delay_rng(delay, delay),
f=dut(sim_cls=sim_cls),
ref=[[list(range(3))] * 2, [list(range(6))] * 3],
delays=[delay_rng(dout_delay, dout_delay)])
sim()
@pytest.mark.parametrize('din_delay', [0, 5])
@pytest.mark.parametrize('cfg_delay', [0, 5])
def test_q_directed_two_inputs(sim_cls, din_delay, cfg_delay):
seq1 = [list(range(3)) for _ in range(9)]
seq2 = [list(range(6)) for _ in range(5)]
seq = [seq1, seq2]
directed(drv(t=T_QDIN_SEP, seq=seq) | delay_rng(din_delay, din_delay),
drv(t=T_CFG, seq=[2, 3]) | delay_rng(cfg_delay, cfg_delay),
f=take(sim_cls=sim_cls),
ref=[[list(range(3))] * 2, [list(range(6))] * 3])
sim()
@formal_check()
def test_take_formal():
take(Intf(T_DIN))
@formal_check()
def test_qtake_formal():
take(Intf(T_QDIN))
@synth_check({'logic luts': 19, 'ffs': 16}, tool='vivado')
def test_take_vivado():
take(Intf(T_DIN))
@synth_check({'logic luts': 16, 'ffs': 15}, tool='yosys')
def test_take_yosys():
take(Intf(T_DIN))
|
mangum/backends/base.py | tasn/mangum | 661 | 11192442 | <reponame>tasn/mangum
from dataclasses import dataclass
from typing import Any
@dataclass
class WebSocketBackend: # pragma: no cover
"""
Base class for implementing WebSocket backends to store API Gateway connections.
Data source backends are required to implement configuration using the `dsn`
connection string setting.
"""
dsn: str
async def __aenter__(self) -> "WebSocketBackend":
"""
Establish the connection to a data source.
"""
raise NotImplementedError()
async def __aexit__(self, *exc_info: Any) -> None:
"""
Closes the connection to a data source.
"""
raise NotImplementedError()
async def save(self, connection_id: str, *, json_scope: str) -> None:
"""
Save the JSON scope for a connection.
"""
raise NotImplementedError()
async def retrieve(self, connection_id: str) -> str:
"""
Retrieve the JSON scope for a connection.
"""
raise NotImplementedError()
async def delete(self, connection_id: str) -> None:
"""
Delete the JSON scope for a connection.
"""
raise NotImplementedError()
|
polymorphic_tree/tests/models.py | avaddon/django-polymorphic-tree | 105 | 11192496 | <reponame>avaddon/django-polymorphic-tree<filename>polymorphic_tree/tests/models.py<gh_stars>100-1000
from django.core.exceptions import ValidationError
from django.db import models
from mptt.exceptions import InvalidMove
from polymorphic.showfields import ShowFieldContent
from polymorphic_tree.models import PolymorphicMPTTModel, PolymorphicTreeForeignKey
class PlainA(models.Model):
field1 = models.CharField(max_length=10)
class PlainB(PlainA):
field2 = models.CharField(max_length=10)
class PlainC(PlainB):
field3 = models.CharField(max_length=10)
class Model2A(ShowFieldContent, PolymorphicMPTTModel):
parent = PolymorphicTreeForeignKey('self', blank=True, null=True, related_name='children', verbose_name='parent',
on_delete=models.CASCADE)
field1 = models.CharField(max_length=10)
class Model2B(Model2A):
field2 = models.CharField(max_length=10)
class Model2C(Model2B):
field3 = models.CharField(max_length=10)
class Model2D(Model2C):
field4 = models.CharField(max_length=10)
class One2OneRelatingModel(PolymorphicMPTTModel):
parent = PolymorphicTreeForeignKey('self', blank=True, null=True, related_name='children', verbose_name='parent',
on_delete=models.CASCADE)
one2one = models.OneToOneField(Model2A,
on_delete=models.CASCADE)
field1 = models.CharField(max_length=10)
class One2OneRelatingModelDerived(One2OneRelatingModel):
field2 = models.CharField(max_length=10)
class Base(ShowFieldContent, PolymorphicMPTTModel):
parent = PolymorphicTreeForeignKey('self', blank=True, null=True, related_name='children', verbose_name='parent',
on_delete=models.CASCADE)
field_b = models.CharField(max_length=10)
class ModelX(Base):
field_x = models.CharField(max_length=10)
class ModelY(Base):
field_y = models.CharField(max_length=10)
class ModelWithCustomParentName(PolymorphicMPTTModel):
"""Model with custom parent name
A model where ``PolymorphicTreeForeignKey`` attribute has not ``parent``
name, but ``chief``
Attributes:
chief (ModelWithCustomParentName): parent
field5 (str): test field
"""
chief = PolymorphicTreeForeignKey('self',
blank=True,
null=True,
related_name='subordinate',
verbose_name='Chief',
on_delete=models.CASCADE)
field5 = models.CharField(max_length=10)
class MPTTMeta:
parent_attr = 'chief'
def __str__(self):
return self.field5
class ModelWithValidation(PolymorphicMPTTModel):
"""Model with custom validation
A model with redefined ``clean`` and ``validate_move_to`` methods
``clean`` method always raises ``ValidationError``
``validate_move_to`` always calls ``clean``
Attributes:
parent (ModelWithValidation): parent
field6 (str): test field
"""
parent = PolymorphicTreeForeignKey('self',
blank=True,
null=True,
related_name='children',
on_delete=models.CASCADE)
field6 = models.CharField(max_length=10)
def clean(self):
"""Raise validation error"""
raise ValidationError({
'parent': 'There is something with parent field'
})
def validate_move_to(self, target):
"""Execute ``clean``"""
self.clean()
class ModelWithInvalidMove(PolymorphicMPTTModel):
"""Model with custom validation
A model with redefined only ``validate_move_to`` method which always raises
``InvalidMove``
Attributes:
parent (ModelWithValidation): parent
field7 (str): test field
"""
parent = PolymorphicTreeForeignKey('self',
blank=True,
null=True,
related_name='children',
on_delete=models.CASCADE)
field7 = models.CharField(max_length=10)
def validate_move_to(self, target):
"""Raise ``InvalidMove``"""
raise InvalidMove('Invalid move')
class ModelMustBeChildRoot(PolymorphicMPTTModel):
"""Model that must be a child"""
can_be_root = True
parent = PolymorphicTreeForeignKey('self', blank=True, null=True, related_name='children',
on_delete=models.CASCADE)
field8 = models.CharField(max_length=10)
class ModelMustBeChild(ModelMustBeChildRoot):
can_be_root = False
class ModelRestrictedChildren(Base):
child_types = [
ModelX,
]
|
GuiBuilder/PROJECTS/Demo/__main__.py | lon3wolf/MyPyBuilder | 237 | 11192501 | from GuiBuilder.PROJECTS.Demo.MainGui import Gui
class Main(object):
def __init__(self):
self.app = Gui()
self.app.run()
if __name__ == '__main__':
Main()
|
test/python/tests/test_loop.py | bh107/bohrium | 236 | 11192516 | import util
np_dw_loop_src = """
def do_while(func, niters, *args, **kwargs):
import sys
i = 0
if niters is None:
niters = sys.maxsize
while i < niters:
cond = func(*args, **kwargs)
if cond is not None and not cond:
break
i += 1
"""
class test_loop_fixed:
""" Test loop with fixed number of iterations"""
def init(self):
cmd = np_dw_loop_src + """
def kernel(a, b):
b += a * b
a = M.arange(10);
res = M.ones_like(a)
"""
yield (cmd)
def test_func(self, cmd):
"""Test of the loop function"""
return (cmd + "do_while(kernel, 5, a, res)", cmd + "M.do_while(kernel, 5, a, res)")
class test_loop_cond:
""" Test loop with a condition variable"""
def init(self):
cmd = np_dw_loop_src + """
def kernel(a, b):
b += a * b
return M.sum(b) < 10000
a = M.arange(10);
res = M.ones_like(a)
"""
yield (cmd, 1000000)
yield (cmd, 3)
yield (cmd, None)
def test_func(self, args):
"""Test of the do_while function"""
(cmd, niter) = args
return (cmd + "do_while(kernel, %s, a, res)" % (niter), cmd + "M.do_while(kernel, %s, a, res)" % (niter))
np_dw_loop_slide_src = """
def do_while_i(func, niters, *args, **kwargs):
import sys
i = 0
if niters is None:
niters = sys.maxsize
args += (0,)
while i < niters:
args = args[:-1] + (i,)
cond = func(*args, **kwargs)
if cond is not None and not cond:
break
i += 1
"""
class test_loop_faculty_function_using_sliding_views:
""" Test a of sliding two views within the do_while loop.
The calculation results in the triangular numbers."""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
iter = %s
def kernel(a,i):
a[i+1] += a[i]
res = M.arange(1,iter+2)
do_while_i(kernel, iter, res)
"""
cmd2 = np_dw_loop_src + \
"""
iter = %s
def kernel(a):
i = get_iterator()
a[i+1] += a[i]
res = M.arange(1,iter+2)
M.do_while(kernel, iter, res)
"""
yield (cmd1, cmd2, 10)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % (niter), cmd2 % (niter))
class test_loop_one_and_two_dimensional_sliding_views:
"""Test of sliding two views with a for loop. One view is one-dimensional, while the other is two-dimensional"""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
iter = %s
def kernel(a,b,i):
a[i] += b[i, i]
b = M.ones((20, iter))
b[::2, ::2] += 1
b[1::2, 1::2] += 1
res = M.zeros((iter, 1))
do_while_i(kernel, iter, res, b)
"""
cmd2 = np_dw_loop_src + \
"""
iter = %s
def kernel(a, b):
i = get_iterator()
a[i] += b[i, i]
b = M.ones((20, iter))
b[::2, ::2] += 1
b[1::2, 1::2] += 1
res = M.zeros((iter, 1))
M.do_while(kernel, iter, res, b)
"""
yield (cmd1, cmd2, 5)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % (niter), cmd2 % (niter))
class test_loop_sliding_view_index_switch_negative_positive:
"""Test of a sliding view that goes from a negative to a positive index (and vice versa)"""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
iter = %s
def kernel(a,i):
a[i] += 1
def kernel2(a,i):
a[-i] += 1
res = M.zeros(iter)
do_while_i(kernel, iter, res)
do_while_i(kernel2, iter, res)
"""
cmd2 = np_dw_loop_src + \
"""
iter = %s
def kernel(a):
i = get_iterator(-2)
a[i] += 1
def kernel2(a):
i = get_iterator(-2)
a[-i] += 1
res = M.zeros(iter)
M.do_while(kernel, iter, res)
M.do_while(kernel2, iter, res)
"""
yield (cmd1, cmd2, 5)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % (niter), cmd2 % (niter))
class test_loop_sliding_view_negative_index_3d:
"""Test of negative sliding in a 3-dimensional view"""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
iter = %s
def kernel(a,i):
a[-i, -i, -i] += 1
res = M.zeros((iter,iter,iter))
do_while_i(kernel, iter, res)
"""
cmd2 = np_dw_loop_src + \
"""
iter = %s
def kernel(a):
i = get_iterator()
a[-i, -i, -i] += 1
res = M.zeros((iter,iter,iter))
M.do_while(kernel, iter, res)
"""
yield (cmd1, cmd2, 3)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % (niter), cmd2 % (niter))
class test_loop_sliding_view_out_of_bounds:
"""Test a of error checks when sliding out of bounds"""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
def kernel(a,i):
a[i] += 1
res = M.zeros(5)
"""
cmd2 = np_dw_loop_src + \
"""
iter = %s
def kernel_out_of_bounds_overflow(a):
i = get_iterator(1)
a[i] += 1
def kernel_out_of_bounds_underflow(a):
i = get_iterator(2)
a[-i] += 1
def kernel(a):
i = get_iterator()
a[i] += 1
dummy = M.zeros(iter)
res = M.zeros(iter)
failure = False
try:
M.do_while(kernel_out_of_bounds_overflow, len(res), dummy)
failure = True
except M.loop.IteratorOutOfBounds:
pass
try:
M.do_while(kernel_out_of_bounds_underflow, len(res), dummy)
failure = True
except M.loop.IteratorOutOfBounds:
pass
if not failure:
M.do_while(kernel, iter, res)
"""
yield (cmd1, cmd2, 5)
def test_func(self, args):
"""Test exceptions of underflow and overflow"""
(cmd1, cmd2, niter) = args
return (cmd1 + "do_while_i(kernel, %s, res)" % (niter), cmd2 % (niter))
class test_3d_grid:
"""Test iterating through a 3d grid"""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
iter = (%s, %s, %s)
res = np.zeros(iter)
counter = np.zeros(1)
for i in range(iter[0]):
for j in range(iter[1]):
for k in range(iter[2]):
counter += 1
res[i,j,k] += counter
"""
cmd2 = np_dw_loop_src + \
"""
iter = (%s, %s, %s)
def kernel(res, counter):
i, j, k = get_grid(*iter)
counter += 1
res[i,j,k] += counter
res = bh.zeros(iter)
counter = bh.zeros(1)
M.do_while(kernel, iter[0]*iter[1]*iter[2], res, counter)
"""
yield (cmd1, cmd2, (4,4,4))
def test_func(self, args):
"""Test exceptions of underflow and overflow"""
(cmd1, cmd2, niter) = args
return (cmd1 % niter, cmd2 % niter)
class test_dynamic_vector_broadcast:
"""Test broadcasting a single value to a vector with a shape that
changes between each iteration"""
def init(self):
cmd1 = \
"""
iter = %s
res = M.zeros(iter)
b = M.arange(iter)+1
for i in range(1,iter+1):
res[:-i] += b[i-1]
"""
cmd2 = \
"""
iter = %s
def loop_body(res, b):
i = get_iterator(1)
res[:-i] += b[i-1]
res = M.zeros(iter)
b = M.arange(iter)+1
M.do_while(loop_body, iter, res, b)
"""
yield (cmd1, cmd2, 15)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % niter, cmd2 % niter)
class test_dynamic_tensor_broadcast:
"""Test broadcasting a single value to a tensor with a shape that
changes between each iteration"""
def init(self):
cmd1 = \
"""
iter = %s
res = M.zeros((iter,iter,iter))
b = M.arange(iter)+1
for i in range(1,iter+1):
res[:-i,:-i,:-i] += b[i-1]
"""
cmd2 = \
"""
iter = %s
def loop_body(res, b):
i = get_iterator(1)
res[:-i,:-i,:-i] += b[i-1]
res = M.zeros((iter,iter,iter))
b = M.arange(iter)+1
M.do_while(loop_body, iter, res, b)
"""
yield (cmd1, cmd2, 15)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % niter, cmd2 % niter)
class test_gaussian_elimination:
"""Test of gaussian elimination on a 10 by 10 matrix (equation system)"""
def init(self):
cmd = "R = bh.random.RandomState(42); S = R.random((10,10), dtype=np.float, bohrium=BH); "
cmd1 = cmd + \
"""
for c in range(1, S.shape[0]):
S[c:, c - 1:] -= (S[c:,c-1:c] / S[c-1:c, c-1:c]) * S[c-1:c,c-1:]
S /= np.diagonal(S)[:, None]
res = S
"""
cmd2 = cmd + \
"""
def loop_body(S):
c = get_iterator(1)
S[c:, c - 1:] -= (S[c:,c-1:c] / S[c-1:c, c-1:c]) * S[c-1:c,c-1:]
M.do_while(loop_body, S.shape[0]-1, S)
S /= np.diagonal(S)[:, None]
res = S
"""
yield (cmd1, cmd2)
def test_func(self, args):
(cmd1, cmd2) = args
return (cmd1, cmd2)
class test_nested_dynamic_view:
"""Test of a nested dynamic view"""
def init(self):
cmd1 = \
"""
iter = %s
res = M.zeros(iter**2)
for i in range(iter):
a = res[i:i+iter]
a[i] += 1
"""
cmd2 = \
"""
iter = %s
res = M.zeros(iter**2)
def loop_body(res):
i = get_iterator()
a = res[i:i+iter]
a[i] += 1
M.do_while(loop_body, iter, res)
"""
yield (cmd1, cmd2, 15)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % niter, cmd2 % niter)
class test_advanced_nested_dynamic_view:
"""Test of three nested views"""
def init(self):
cmd1 = \
"""
a = M.zeros(32)
for i in range(2):
b = a[16*i:16*(i+1)-1:2]
b += 1
c = b[4*i:4*(i+1)-1:2]
c += 1
c[i] += 1
res = a
"""
cmd2 = \
"""
def loop_body(a):
i = get_iterator()
b = a[16*i:16*(i+1)-1:2]
b += 1
c = b[4*i:4*(i+1)-1:2]
c += 1
c[i] += 1
res = M.zeros(32)
M.do_while(loop_body, 2, res)
"""
yield (cmd1, cmd2)
def test_func(self, args):
(cmd1, cmd2) = args
return (cmd1, cmd2)
class test_do_while_convolution:
"""Test of a convolution that takes the average of a 3 by 3 window"""
def init(self):
cmd = "R = bh.random.RandomState(42); S = R.random((10,10), dtype=np.float, bohrium=BH); "
cmd1 = cmd + \
"""
a = M.zeros((12,12))
a[1:-1,1:-1] += S
b = M.zeros((10,10))
for i in range(10):
for j in range(10):
point = b[i:i+1, j:j+1]
window = a[i:i+3, j:j+3]
point += M.mean(window)
res = b
"""
cmd2 = cmd +\
"""
def convolution(a, b):
i, j = get_grid(10,10)
point = b[i, j]
window = a[i:i+3, j:j+3]
point += M.mean(window)
a = M.zeros((12,12))
a[1:-1,1:-1] += S
b = M.zeros((10,10))
M.do_while(convolution, b.shape[0] * b.shape[1], a, b)
res = b
"""
yield (cmd1, cmd2)
def test_func(self, args):
(cmd1, cmd2) = args
return (cmd1, cmd2)
class test_temp_arrays_with_changing_shape:
"""Test of temporary arrays with changing shape"""
def init(self):
cmd1 = \
"""
iter = %s
a = M.ones(iter*5)
res = M.zeros(iter)
for i in range(iter):
b = a[i:iter]
c = a[i+iter:2*iter]
d = a[i+2*iter:3*iter]
e = a[i+3*iter:4*iter]
res[i:] += b+c+d+e
"""
cmd2 = \
"""
iter = %s
def loop_body(a, res):
i = get_iterator()
b = a[i:iter]
c = a[i+iter:2*iter]
d = a[i+2*iter:3*iter]
e = a[i+3*iter:4*iter]
res[i:] += b+c+d+e
a = M.ones(iter*5)
res = M.zeros(iter)
M.do_while(loop_body, iter, a, res)
"""
yield (cmd1, cmd2, 5)
def test_func(self, args):
(cmd1, cmd2, niter) = args
return (cmd1 % niter, cmd2 % niter)
class test_loop_illegal_iterator_mix:
"""Test of mixing iterators within a grid illegally"""
def init(self):
cmd1 = np_dw_loop_slide_src + \
"""
def kernel(a,i):
a[i] += 1
res = M.zeros(5)
"""
cmd2 = np_dw_loop_src + \
"""
iter = %s
def iterator_mix1(a):
i, j = get_grid(5,5)
b = a[i:i+5,j:j+5]
c = b[j, i]
c += 1
def iterator_mix2(a):
i, j = get_grid(5,5)
k, l = get_grid(4,4)
b = a[i:i+5]
c = b[k]
c += 1
def kernel(a):
i = get_iterator()
a[i] += 1
a = bh.zeros((10,10))
dummy = M.zeros(iter)
res = M.zeros(iter)
failure = False
try:
M.do_while(iterator_mix1, 2, a)
failure = True
except M.loop.IteratorIllegalDepth:
pass
try:
M.do_while(iterator_mix2, 2, a)
failure = True
except M.loop.IteratorIllegalDepth:
pass
if not failure:
M.do_while(kernel, iter, res)
"""
yield (cmd1, cmd2, 5)
def test_func(self, args):
"""Test exceptions of underflow and overflow"""
(cmd1, cmd2, niter) = args
return (cmd1 + "do_while_i(kernel, %s, res)" % (niter), cmd2 % (niter))
|
tests/loading/schema/test_host_validation.py | maroux/flex | 160 | 11192526 | import itertools
import pytest
from flex.loading.schema import (
swagger_schema_validator,
)
from flex.loading.schema.host import decompose_hostname
from flex.exceptions import ValidationError
from flex.error_messages import MESSAGES
from tests.utils import (
assert_message_in_errors,
assert_path_not_in_errors,
assert_path_in_errors,
)
from tests.factories import (
RawSchemaFactory,
)
@pytest.mark.parametrize(
'scheme,hostname,port,path',
itertools.product(
('http', ''),
('127.0.0.1', 'example.com', 'www.example.com'),
('8000', ''),
('api', ''),
),
)
def test_hostname_decomposition(scheme, hostname, port, path):
"""
Ensure that the hostname decomposition tool works as expected.
"""
value = hostname
if scheme:
value = "{0}://{1}".format(scheme, value)
if port:
value = "{0}:{1}".format(value, port)
if path:
value = "{0}/{1}".format(value, path)
assert decompose_hostname(value) == (scheme, hostname, port, path)
def test_host_is_not_required():
"""
Test that the info field is required for overall schema validation.
"""
raw_schema = RawSchemaFactory()
raw_schema.pop('host', None)
assert 'host' not in raw_schema
try:
swagger_schema_validator(raw_schema)
except ValidationError as err:
errors = err.detail
else:
errors = {}
assert_path_not_in_errors(
'host',
errors,
)
@pytest.mark.parametrize(
'value',
(
'127.0.0.1',
'127.0.0.1:8000',
'example.com',
'example.com:8000',
'www.example.com',
'www.example.com:8000',
),
)
def test_valid_host_values(value):
raw_schema = RawSchemaFactory(host=value)
try:
swagger_schema_validator(raw_schema)
except ValidationError as err:
errors = err.detail
else:
errors = {}
assert_path_not_in_errors(
'host',
errors,
)
@pytest.mark.parametrize(
'value',
(
'127.0.0.1/api',
'127.0.0.1:8000/api',
'example.com/api',
'example.com:8000/api',
'www.example.com/api',
'www.example.com:8000/api',
),
)
def test_invalid_host_value_with_path(value):
raw_schema = RawSchemaFactory(host=value)
with pytest.raises(ValidationError) as err:
swagger_schema_validator(raw_schema)
assert_message_in_errors(
MESSAGES['host']['may_not_include_path'],
err.value.detail,
'host.path',
)
@pytest.mark.parametrize(
'value',
(
'http://127.0.0.1/api',
'http://127.0.0.1:8000/api',
'http://example.com/api',
'http://example.com:8000/api',
'http://www.example.com/api',
'http://www.example.com:8000/api',
),
)
def test_invalid_host_value_with_scheme(value):
raw_schema = RawSchemaFactory(host=value)
with pytest.raises(ValidationError) as err:
swagger_schema_validator(raw_schema)
assert_message_in_errors(
MESSAGES['host']['may_not_include_scheme'],
err.value.detail,
'host.scheme',
)
|
unicorn/tests/regress/deadlock_1.py | clayne/unicorn_pe | 491 | 11192551 | <reponame>clayne/unicorn_pe
#!/usr/bin/python
# From issue #1 of <NAME>
from unicorn import *
import regress
CODE = b"\x90\x91\x92"
class DeadLock(regress.RegressTest):
def runTest(self):
mu = Uc(UC_ARCH_X86, UC_MODE_64)
mu.mem_map(0x100000, 4 * 1024)
mu.mem_write(0x100000, CODE)
with self.assertRaises(UcError):
mu.emu_start(0x100000, 0x1000 + len(CODE))
if __name__ == '__main__':
regress.main()
|
mudpi/extensions/t9602/__init__.py | icyspace/mudpi-core | 163 | 11192563 | """
T9602 Extension
Includes sensor interface for T9602.
Works on i2c over linux boards (typically
on a raspberry pi.)
"""
from mudpi.extensions import BaseExtension
class Extension(BaseExtension):
namespace = 't9602'
update_interval = 30
|
federated_learning/nvflare/nvflare_example_docker/expr_files/download_dataset.py | tommydino93/tutorials | 535 | 11192602 | <reponame>tommydino93/tutorials<filename>federated_learning/nvflare/nvflare_example_docker/expr_files/download_dataset.py
import os
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from monai.apps.utils import download_and_extract
def download_spleen_dataset(root_dir: str):
"""
This function is used to download Spleen dataset for this example.
If you'd like to download other Decathlon datasets, please check
``monai.apps.datasets.DecathlonDataset`` for more details.
"""
url = "https://msd-for-monai.s3-us-west-2.amazonaws.com/Task09_Spleen.tar"
md5 = "410d4a301da4e5b2f6f86ec3ddba524e"
task = "Task09_Spleen"
dataset_dir = os.path.join(root_dir, task)
tarfile_name = f"{dataset_dir}.tar"
download_and_extract(
url=url, filepath=tarfile_name, output_dir=root_dir, hash_val=md5
)
if __name__ == "__main__":
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument(
"-root_dir", type=str, help="the root path to put downloaded file."
)
args = parser.parse_args()
download_spleen_dataset(root_dir=args.root_dir)
|
dirigible/fts/tests/test_2712_ImportCSV.py | EnoX1/dirigible-spreadsheet | 168 | 11192629 | <filename>dirigible/fts/tests/test_2712_ImportCSV.py<gh_stars>100-1000
# Copyright (c) 2010 Resolver Systems Ltd.
# All Rights Reserved
#
from functionaltest import FunctionalTest, PAGE_LOAD_TIMEOUT
import os
class Test_2712_ImportCSV(FunctionalTest):
def test_can_import_excel_generated_csv_to_cursor_position(self):
file_name = 'excel_generated_csv.csv'
# Harold has a csv file he wants to import into a cloud-based
# python-infused spreadsheet
# * Harold logs in to Dirigible and creates a nice shiny new sheet
self.login_and_create_new_sheet()
# * After weeks of frustration at being unable to get any data into the app,
# it's with great joy that he spots a new button called 'import'
self.wait_for_element_visibility('id=id_import_button', True)
self.assertEquals(
self.selenium.get_attribute('id=id_import_button@alt'),
"Import a file"
)
self.assertEquals(
self.selenium.get_attribute('id=id_import_button@title'),
"Import a file"
)
# With preternatural insightfulness, he guesses the button will import
# to the current cursor position. Accordingly, he gives himself a
# little wriggle room
self.click_on_cell(2, 2)
# * He clicks the import CSV button
self.selenium.click('id=id_import_button')
# * He is presented with a jquery dialog that contains a file input element
self.wait_for_element_visibility('id=id_import_form', True)
self.wait_for_element_visibility('id=id_import_form_file', True)
self.wait_for_element_visibility('id=id_import_form_upload_csv_button', False)
self.wait_for_element_visibility('id=id_import_form_upload_xls_values_button', False)
self.wait_for_element_visibility('id=id_import_form_cancel_button', True)
#Harold chooses a file, but changes his mind and clicks cancel
file_name = os.path.join(
os.path.dirname(__file__), 'test_data', file_name
)
self.set_filename_for_upload(file_name, 'id=id_import_form_file')
self.selenium.click('id=id_import_form_cancel_button')
# the dialog disappears
self.wait_for_element_visibility('id=id_import_form', False)
# harold, keen to get data imported, summons up the strength to try again
self.selenium.click('id=id_import_button')
# the dialog reappears
self.wait_for_element_visibility('id=id_import_form', True)
# his previous file choice doesn't
self.assertEquals(
self.selenium.get_value('id=id_import_form_file'),
''
)
# * He clicks on the browse button * He is presented with a file-open
# dialog, and chooses a suitable csv file
self.set_filename_for_upload(file_name, 'id=id_import_form_file')
# He spots a radio button, which is defaulted to the 'excel' option
self.wait_for_element_visibility(
'css=input[type="radio"][name="csv_encoding"][value="excel"]', True
)
self.wait_for_element_visibility(
'css=input[type="radio"][name="csv_encoding"][value="other"]', True
)
self.assertEquals(
self.selenium.get_value(
'css=input[type="radio"][name="csv_encoding"][value="excel"]'
),
'on'
)
# so he clicks the upload button
self.wait_for_element_visibility('id=id_import_form_upload_csv_button', True)
self.selenium.click('id=id_import_form_upload_csv_button')
# * ...and waits for the page to refresh
self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
self.wait_for_grid_to_appear()
# * and is mightily pleased when his data appears at the cursor position.
self.wait_for_cell_value(2, 2, 'some text')
self.wait_for_cell_value(3, 2, 'text with quotes "\'""\'"')
# * In order to check that the cell with a carraige return is imported
# ok, he has to check its value via the console, since the \n is
# converted to something else for display in the cell
#self.wait_for_cell_value(4, 2, 'text with a \ncarriage return')
self.append_usercode('print worksheet.D2.value == "text with a \\ncarriage return"')
self.wait_for_console_content('True')
self.wait_for_cell_value(2, 3, 'some european characters:')
self.wait_for_cell_value(3, 3, u'Herg\xe9')
self.wait_for_cell_value(2, 4, 'some european money:')
self.wait_for_cell_value(3, 4, u'pounds: \xa3')
self.wait_for_cell_value(4, 4, u'euros : \u20ac')
self.wait_for_cell_value(2, 5, 'numbers')
self.wait_for_cell_value(2, 6, '1')
self.wait_for_cell_value(3, 6, '2')
self.wait_for_cell_value(4, 6, '3000000000')
def test_can_import_utf8_csv(self):
# Harold has a kawaii csv file he wants to import into a cloud-based
# python-infused spreadsheet
file_name = 'japanese.csv'
# * He creates a new sheet and clicks the import CSV button
self.login_and_create_new_sheet()
self.selenium.click('id=id_import_button')
# the dialog reappears
self.wait_for_element_visibility('id=id_import_form', True)
# * He clicks on the browse button * He is presented with a file-open
# dialog, and chooses a suitable csv file
file_name = os.path.join(
os.path.dirname(__file__), 'test_data', file_name
)
self.set_filename_for_upload(file_name, 'id=id_import_form_file')
# He spots a radio button, which is defaulted to the 'excel' option
self.wait_for_element_visibility(
'css=input[type="radio"][name="csv_encoding"][value="excel"]', True
)
self.wait_for_element_visibility(
'css=input[type="radio"][name="csv_encoding"][value="other"]', True
)
self.assertEquals(
self.selenium.get_value(
'css=input[type="radio"][name="csv_encoding"][value="excel"]'
),
'on'
)
# so he changes the radio button option to 'other'
self.selenium.check(
'css=input[type="radio"][name="csv_encoding"][value="other"]'
)
# and clicks the upload button
self.wait_for_element_visibility('id=id_import_form_upload_csv_button', True)
self.selenium.click('id=id_import_form_upload_csv_button')
# * ...and waits for the page to refresh
self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
self.wait_for_grid_to_appear()
# * and is mightily pleased when his kanji appear
self.wait_for_cell_value(1, 1, u'\u65b0\u4e16\u7d00\u30a8\u30f4\u30a1\u30f3\u30b2\u30ea\u30aa\u30f3')
def test_bad_files_are_gracefully_handled(self):
# Harold thinks that if he imports an image file,
# it will appear in his spreadsheet
file_name = os.path.join(
os.path.dirname(__file__), 'test_data', 'import_csv_button.png')
# * He logs in to Dirigible and creates a nice shiny new sheet
self.login_and_create_new_sheet()
sheet_url = self.browser.current_url
# * He clicks the import toolbar button
self.selenium.click('id=id_import_button')
# * He is presented with a jquery dialog that contains a file input element
self.wait_for_element_visibility('id=id_import_form', True)
self.wait_for_element_visibility('id=id_import_form_file', True)
self.wait_for_element_visibility('id=id_import_form_cancel_button', True)
# * He clicks on the browse button
# * He is presented with a file-open dialog, and chooses his image file
self.set_filename_for_upload(file_name, 'id=id_import_form_file')
# He clicks the upload button
self.wait_for_element_visibility('id=id_import_form_upload_csv_button', True)
self.selenium.click('id=id_import_form_upload_csv_button')
# * ...and waits for the page to refresh
self.selenium.wait_for_page_to_load(PAGE_LOAD_TIMEOUT)
# * He is presented with an appropriate error page
self.assertEquals(self.browser.title, "CSV Import Error: Dirigible")
self.assertEquals(
self.get_text("id=id_server_error_title"),
"Could not import CSV file"
)
error_text = self.get_text("id=id_server_error_text")
msg = "Sorry, the file you uploaded was not in a recognised CSV format"
self.assertTrue(msg in error_text)
# * There is a link back to the sheet page, which he follows
self.click_link('id_sheet_link')
# And finds himself back on his sheet page.
self.wait_for_grid_to_appear()
self.assertEquals(self.browser.current_url, sheet_url)
|
arsdk-xml/ARSDKBuildUtils/Utils/Python/Common_GenAutotoolsLibraryDoc.py | 2016-Capstone/PythonController | 114 | 11192643 | '''
Copyright (C) 2014 <NAME>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Parrot nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
'''
from ARFuncs import *
import re
import tempfile
import os
def Common_WriteValuesToDoxyCfgFile(DFile, **kwargs):
for key, value in kwargs.items():
os.write(DFile, '%(key)s = %(value)s\n' % locals())
def Common_GenAutotoolsLibraryDoc(target, lib, clean=False, subdirsToInclude=[], onlyC=True):
args = dict(locals())
StartDumpArgs(**args)
res = False
# Do not generate doc for external lib
if lib.ext:
ARLog('Documentation will not be generated for external libraries')
return EndDumpArgs(res=True, **args)
OutputDir = ARPathFromHere('Targets/%(target)s/Build/Doc/lib%(lib)s' % locals())
# Clean handle
if clean:
ARDeleteIfExists(OutputDir)
return EndDumpArgs(res=True, **args)
# Create output directory
if not os.path.exists(OutputDir):
os.makedirs(OutputDir)
# If the directory (release) is configured, just call "make doxygen-doc"
BuildDir = ARPathFromHere('Targets/%(target)s/Build/lib%(lib)s' % locals())
Makefile = '%(BuildDir)s/Makefile' % locals()
if os.path.exists(Makefile):
bdir = Chdir(BuildDir)
res = ARExecute(os.environ.get('ARMAKE') + ' doxygen-doc')
bdir.exit()
# If make doxygen-doc failed, or if the Makefile does not exists, run doxygen manually
if not res:
DummyDirForRelativePath = ARPathFromHere('Targets/%(target)s/Build/Dummy' % locals())
if not os.path.exists(DummyDirForRelativePath):
os.makedirs(DummyDirForRelativePath)
ConfigureAc = lib.path + '/Build/configure.ac'
DoxyCfg = lib.path + '/Build/doxygen.cfg'
if not os.path.exists(ConfigureAc):
ARLog('Unable to generate lib%(lib)s documentation')
ARLog('lib%(lib)s does not contains a configure.ac file, and was not previously built')
return EndDumpArgs(res=False, **args)
# Create Doxygen Extra Args
SRCDIR = lib.path + '/Build'
PROJECT = 'lib%(lib)s' % locals()
# -- Search version in configure.ac file
confacfile = open(ConfigureAc)
VERSION = ''
for line in confacfile.readlines():
match = re.search(r'AC_INIT', line)
if match:
Version = re.sub(r'[A-Z_]*\(\[[^]]*\], \[([^]]*)\].*', r'\1', line).strip()
break
confacfile.close()
if not Version:
ARLog('Unable to read version from configure.ac file')
return EndDumpArgs(res=False, **args)
PERL_PATH = os.popen('which perl').read().strip()
HAVE_DOT = 'NO'
if ARExistsInPath('dot'):
HAVE_DOT = 'YES'
# Create temporary configuration file
DoxyCfgFinalFile, DoxyCfgFinalName = tempfile.mkstemp()
# -- Copy original file in the temporary one
DoxyCfgFile = open(DoxyCfg)
for line in DoxyCfgFile.readlines():
os.write(DoxyCfgFinalFile, line)
DoxyCfgFile.close()
# -- Export needed values
ARSetEnv ('PROJECT', PROJECT)
ARSetEnv ('VERSION', VERSION)
ARSetEnv ('SRCDIR', SRCDIR)
# -- Append needed values
Common_WriteValuesToDoxyCfgFile(DoxyCfgFinalFile,
PERL_PATH=PERL_PATH,
HAVE_DOT=HAVE_DOT,
GENERATE_MAN='NO',
GENERATE_RTF='NO',
GENERATE_XML='NO',
GENERATE_HTMLHELP='NO',
GENERATE_CHI='NO',
GENERATE_HTML='YES',
GENERATE_LATEX='NO',)
# -- Append subdirs
for extraDir in subdirsToInclude:
if os.path.exists(lib.path + '/../' + extraDir):
os.write(DoxyCfgFinalFile, 'INPUT += $(SRCDIR)/../%(extraDir)s\n' % locals())
# -- Append Non-C mode
if not onlyC:
Common_WriteValuesToDoxyCfgFile(DoxyCfgFinalFile, OPTIMIZE_OUTPUT_FOR_C='NO')
# -- Close temporary file
os.close(DoxyCfgFinalFile)
# Call doxygen
bdir = Chdir (DummyDirForRelativePath)
res = ARExecute ('doxygen %(DoxyCfgFinalName)s' % locals())
bdir.exit()
ARDeleteIfExists(DummyDirForRelativePath)
os.remove (DoxyCfgFinalName)
return EndDumpArgs(res, **args)
|
deployment/setup.py | ruxi/logomaker | 125 | 11192652 | from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='logomaker',
version='0.8.0',
description='Package for making Sequence Logos',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering :: Bio-Informatics',
],
keywords='Sequence Logos',
url='http://logomaker.readthedocs.io',
author='<NAME> and <NAME>',
author_email='<EMAIL>',
license='MIT',
packages=['logomaker'],
include_package_data=True,
install_requires=[
'numpy',
'matplotlib>=2.2.2',
'pandas'
],
zip_safe=False) |
dnsgate/help.py | jakeogh/dnsgate | 121 | 11192654 | #!/usr/bin/env python3
# tab-width:4
# pylint: disable=missing-docstring
import os
import sys
import time
from pathlib import Path
from .config import dnsmasq_config_file_line
from .global_vars import CACHE_EXPIRE
from .global_vars import CONFIG_FILE
from .global_vars import CUSTOM_BLACKLIST
from .global_vars import CUSTOM_WHITELIST
from .global_vars import DEFAULT_REMOTE_BLACKLISTS
from .global_vars import DNSMASQ_CONFIG_FILE
from .global_vars import OUTPUT_FILE_PATH
def dnsmasq_install_help(*,
dnsmasq_config_file: Path,
output_file: Path,
):
config_file_line = dnsmasq_config_file_line()
print(' $ cp -vi ' + dnsmasq_config_file.as_posix() + ' ' + dnsmasq_config_file.as_posix() + '.bak.' + str(time.time()), file=sys.stderr,)
print(' $ grep ' + config_file_line + ' ' + dnsmasq_config_file + '|| { echo ' + config_file_line + ' >> dnsmasq_config_file ; }', file=sys.stderr,)
print(' $ /etc/init.d/dnsmasq restart', file=sys.stderr,)
def hosts_install_help(output_file: Path = OUTPUT_FILE_PATH,):
print(' $ mv -vi /etc/hosts /etc/hosts.default', file=sys.stderr)
print(' $ cat /etc/hosts.default ' + output_file.as_posix() + ' > /etc/hosts', file=sys.stderr)
OUTPUT_FILE_HELP = '(for testing) output file (defaults to ' + OUTPUT_FILE_PATH.as_posix() + ')'
DNSMASQ_CONFIG_HELP = 'dnsmasq config file (defaults to ' + DNSMASQ_CONFIG_FILE.as_posix() + ')'
BACKUP_HELP = 'backup output file before overwriting'
INSTALL_HELP_HELP = 'Help configure dnsmasq or /etc/hosts'
SOURCES_HELP = '''remote blacklist(s) to get rules from. Defaults to:
\b
''' + ' '.join(DEFAULT_REMOTE_BLACKLISTS)
WHITELIST_HELP = '''\b
whitelists(s) defaults to:''' + CUSTOM_WHITELIST.as_posix()
BLOCK_AT_PSL_HELP = 'strips subdomains, for example: analytics.google.com -> google.com' + \
' (must manually whitelist inadvertently blocked domains)'
VERBOSE_HELP = 'print debug information to stderr'
NO_CACHE_HELP = 'do not cache --source files as sha1(url) to ~/.dnsgate/cache/'
CACHE_EXPIRE_HELP = 'seconds until cached remote sources are re-downloaded ' + \
'(defaults to ' + str(CACHE_EXPIRE / 3600) + ' hours)'
DEST_IP_HELP = 'IP to redirect blocked connections to (defaults to ' + \
'127.0.0.1 in hosts mode, specifying this in dnsmasq mode causes ' + \
'lookups to resolve rather than return NXDOMAIN)'
NO_RESTART_DNSMASQ_HELP = 'do not restart the dnsmasq service'
BLACKLIST_HELP = 'Add domain(s) to ' + CUSTOM_BLACKLIST.as_posix()
WHITELIST_HELP = 'Add domain(s) to ' + CUSTOM_WHITELIST.as_posix()
DISABLE_HELP = 'Disable ' + OUTPUT_FILE_PATH.as_posix()
ENABLE_HELP = 'Enable ' + OUTPUT_FILE_PATH.as_posix()
CONFIGURE_HELP = '''Write ''' + CONFIG_FILE.as_posix() + '''
\b
[SOURCES] are the ''' + SOURCES_HELP
GENERATE_HELP = 'Create ' + OUTPUT_FILE_PATH.as_posix()
BLOCKALL_HELP = 'return NXDOMAIN on _ALL_ domains'
|
cherry/debug.py | acse-yl27218/cherry | 160 | 11192661 | <filename>cherry/debug.py<gh_stars>100-1000
#!/usr/bin/env python3
"""
General debugging utilities.
"""
import os
import sys
import logging
import traceback
import pdb
import queue
from logging import handlers
from datetime import datetime
IS_DEBUGGING = False
# Sets up general debugger
logger = logging.getLogger('cherry')
logger.setLevel(logging.INFO)
logger.propagate = False
# Handler for normal printing
fmt = logging.Formatter(fmt='%(message)s', datefmt='')
print_handler = logging.StreamHandler(sys.stdout)
print_handler.setFormatter(fmt)
print_handler.setLevel(logging.INFO)
logger.addHandler(print_handler)
def debug(log_dir='./'):
"""
Enables some debugging utilities for logging and pdb.
Includes:
* Automatically dropping into a post-mortem pdb debugger session
whenever an exception is raised.
* Enables fast DEBUG logging to a logging file via QueueHandler.
* Copies all stdout output to the logging file. (Experimental)
**References**
1. Automatically start the debugger on an exception (Python recipe), <NAME>, 2001,
[Link](http://code.activestate.com/recipes/65287-automatically-start-the-debugger-on-an-exception/)
2. Dealing with handlers that block, Python Documentation, 2019.
[Link](https://docs.python.org/3/howto/logging-cookbook.html#dealing-with-handlers-that-block)
**Arguments**
* **log_dir** (str, *optional*, Default: './') - Location to store the log files.
**Example**
~~~python
ch.debug.debug()
raise Exception('My exception')
-> raise('My exception')
(Pdb)
~~~
"""
global IS_DEBUGGING
if not IS_DEBUGGING:
# Enable debugging logging.
now = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
log_file = os.path.join(log_dir, 'cherry_debug_' + now + '.log')
if not os.path.exists(log_dir):
os.mkdir(log_dir)
# Experimental: forward stdout/print to log_file too
log_file = open(log_file, mode='a', buffering=1, encoding='utf-8')
stdout_write = sys.stdout.write
stderr_write = sys.stderr.write
def custom_stdout_write(*args, **kwargs):
stdout_write(*args, **kwargs)
log_file.write(*args, **kwargs)
def custom_stderr_write(*args, **kwargs):
stderr_write(*args, **kwargs)
log_file.write(*args, **kwargs)
def custom_newline_stdout(*args, **kwargs):
custom_stdout_write(*args, **kwargs)
custom_stdout_write('\n')
global print
print = custom_newline_stdout
sys.stdout.write = custom_stdout_write
sys.stderr.write = custom_stderr_write
# Log to file using queue handler and listener
logger.setLevel(logging.DEBUG)
debug_queue = queue.Queue(-1)
queue_handler = handlers.QueueHandler(debug_queue)
logger.addHandler(queue_handler)
debug_fmt = logging.Formatter(fmt='%(asctime)s - %(name)s - %(levelname)s \n%(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
debug_handler = logging.StreamHandler(log_file)
debug_handler.setFormatter(debug_fmt)
debug_handler.setLevel(logging.DEBUG)
queue_listener = handlers.QueueListener(debug_queue, debug_handler)
queue_listener.start()
logger.debug('Debugging started.')
# Enable automatic post-mortem on Exception.
def info(type, value, tb):
if hasattr(sys, 'ps1') or not sys.stderr.isatty():
sys.__excepthook__(type, value, tb)
else:
traceback.print_exception(type, value, tb)
pdb.pm()
sys.excepthook = info
# Turn debug flag on.
IS_DEBUGGING = True
if __name__ == '__main__':
print('This is from print.')
print('This is from print.')
sys.stdout.write('This is from stdout.')
logger.debug('debug')
logger.info('info')
debug(log_dir='./logs')
debug()
logger.info('info')
logger.debug('debug')
print('This is from print.')
print('This is from print.')
sys.stdout.write('This is from stdout.')
raise Exception('haha')
|
third_party/graphy/graphy/backends/google_chart_api/line_chart_test.py | tingshao/catapult | 2,151 | 11192682 | <filename>third_party/graphy/graphy/backends/google_chart_api/line_chart_test.py
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittest for Graphy and Google Chart API backend."""
from graphy import common
from graphy import graphy_test
from graphy import line_chart
from graphy.backends import google_chart_api
from graphy.backends.google_chart_api import base_encoder_test
# Extend XYChartTest so that we pick up & repeat all the basic tests which
# LineCharts should continue to satisfy
class LineChartTest(base_encoder_test.XYChartTest):
def GetChart(self, *args, **kwargs):
return google_chart_api.LineChart(*args, **kwargs)
def AddToChart(self, chart, points, color=None, label=None):
return chart.AddLine(points, color=color, label=label)
def testChartType(self):
self.assertEqual(self.Param('cht'), 'lc')
def testMarkers(self):
x = common.Marker('x', '0000FF', 5)
o = common.Marker('o', '00FF00', 5)
line = common.Marker('V', 'dddddd', 1)
self.chart.AddLine([1, 2, 3], markers=[(1, x), (2, o), (3, x)])
self.chart.AddLine([4, 5, 6], markers=[(x, line) for x in range(3)])
x = 'x,0000FF,0,%s,5'
o = 'o,00FF00,0,%s,5'
V = 'V,dddddd,1,%s,1'
actual = self.Param('chm')
expected = [m % i for i, m in zip([1, 2, 3, 0, 1, 2], [x, o, x, V, V, V])]
expected = '|'.join(expected)
error_msg = '\n%s\n!=\n%s' % (actual, expected)
self.assertEqual(actual, expected, error_msg)
def testLinePatterns(self):
self.chart.AddLine([1, 2, 3])
self.chart.AddLine([4, 5, 6], pattern=line_chart.LineStyle.DASHED)
self.assertEqual(self.Param('chls'), '1,1,0|1,8,4')
def testMultipleAxisLabels(self):
self.ExpectAxes('', '')
left_axis = self.chart.AddAxis(common.AxisPosition.LEFT,
common.Axis())
left_axis.labels = [10, 20, 30]
left_axis.label_positions = [0, 50, 100]
self.ExpectAxes('0:|10|20|30', '0,0,50,100')
bottom_axis = self.chart.AddAxis(common.AxisPosition.BOTTOM,
common.Axis())
bottom_axis.labels = ['A', 'B', 'c', 'd']
bottom_axis.label_positions = [0, 33, 66, 100]
sub_axis = self.chart.AddAxis(common.AxisPosition.BOTTOM,
common.Axis())
sub_axis.labels = ['CAPS', 'lower']
sub_axis.label_positions = [0, 50]
self.ExpectAxes('0:|10|20|30|1:|A|B|c|d|2:|CAPS|lower',
'0,0,50,100|1,0,33,66,100|2,0,50')
self.chart.AddAxis(common.AxisPosition.RIGHT, left_axis)
self.ExpectAxes('0:|10|20|30|1:|10|20|30|2:|A|B|c|d|3:|CAPS|lower',
'0,0,50,100|1,0,50,100|2,0,33,66,100|3,0,50')
self.assertEqual(self.Param('chxt'), 'y,r,x,x')
def testAxisProperties(self):
self.ExpectAxes('', '')
self.chart.top.labels = ['cow', 'horse', 'monkey']
self.chart.top.label_positions = [3.7, 10, -22.9]
self.ExpectAxes('0:|cow|horse|monkey', '0,3.7,10,-22.9')
self.chart.left.labels = [10, 20, 30]
self.chart.left.label_positions = [0, 50, 100]
self.ExpectAxes('0:|10|20|30|1:|cow|horse|monkey',
'0,0,50,100|1,3.7,10,-22.9')
self.assertEqual(self.Param('chxt'), 'y,t')
sub_axis = self.chart.AddAxis(common.AxisPosition.BOTTOM,
common.Axis())
sub_axis.labels = ['CAPS', 'lower']
sub_axis.label_positions = [0, 50]
self.ExpectAxes('0:|10|20|30|1:|CAPS|lower|2:|cow|horse|monkey',
'0,0,50,100|1,0,50|2,3.7,10,-22.9')
self.assertEqual(self.Param('chxt'), 'y,x,t')
self.chart.bottom.labels = ['A', 'B', 'C']
self.chart.bottom.label_positions = [0, 33, 66]
self.ExpectAxes('0:|10|20|30|1:|A|B|C|2:|CAPS|lower|3:|cow|horse|monkey',
'0,0,50,100|1,0,33,66|2,0,50|3,3.7,10,-22.9')
self.assertEqual(self.Param('chxt'), 'y,x,x,t')
# Extend LineChartTest so that we pick up & repeat all the line tests which
# Sparklines should continue to satisfy
class SparklineTest(LineChartTest):
def GetChart(self, *args, **kwargs):
return google_chart_api.Sparkline(*args, **kwargs)
def testChartType(self):
self.assertEqual(self.Param('cht'), 'lfi')
if __name__ == '__main__':
graphy_test.main()
|
mmdet/datasets/pipelines/pixel_aug_pil.py | Qianna00/InstanceLoc | 120 | 11192687 | <gh_stars>100-1000
import random
import cv2
import mmcv
import numpy as np
import torchvision.transforms as transforms
from PIL import Image, ImageFilter
from ..builder import PIPELINES
class GaussianBlur(object):
"""Gaussian blur augmentation in SimCLR https://arxiv.org/abs/2002.05709"""
def __init__(self, sigma=[.1, 2.]):
self.sigma = sigma
def __call__(self, x):
sigma = random.uniform(self.sigma[0], self.sigma[1])
x = x.filter(ImageFilter.GaussianBlur(radius=sigma))
return x
@PIPELINES.register_module()
class PixelAugPil(object):
""" Apply the same augmentation as the MoCoV2.
"""
def __init__(self, to_rgb=False):
# MoCo v2's aug: similar to SimCLR https://arxiv.org/abs/2002.05709
augmentation = [
transforms.RandomApply(
[
transforms.ColorJitter(0.4, 0.4, 0.4,
0.1) # not strengthened
],
p=0.8),
transforms.RandomGrayscale(p=0.2),
transforms.RandomApply([GaussianBlur([.1, 2.])], p=0.5),
]
self.transforms = transforms.Compose(augmentation)
self.to_rgb = to_rgb
def __call__(self, results):
bgr_img = results['img']
pil_img = Image.fromarray(bgr_img[:, :, ::-1]) # BGR2RGB first
out_pil_img = self.transforms(pil_img)
out_rgb_img = np.array(out_pil_img)
if self.to_rgb:
results['img'] = out_rgb_img
else:
results['img'] = out_rgb_img[:, :, ::-1] # RGB2BGR
return results
|
plato/agent/component/dialogue_policy/dialogue_policy.py | avmi/plato-research-dialogue-system | 899 | 11192688 | <reponame>avmi/plato-research-dialogue-system<filename>plato/agent/component/dialogue_policy/dialogue_policy.py
"""
Copyright (c) 2019-2020 Uber Technologies, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__author__ = "<NAME>"
from plato.agent.component.conversational_module \
import ConversationalModule
from abc import abstractmethod
"""
DialoguePolicy is the abstract parent class of all policies and defines the
interface that each dialogue policy derived class should adhere to.
"""
class DialoguePolicy(ConversationalModule):
def __init__(self):
"""
Initialize the internal structures of the dialogue policy
"""
super(DialoguePolicy, self).__init__()
@abstractmethod
def initialize(self, args):
"""
Initialize internal structures at the beginning of each dialogue
:return: Nothing
"""
pass
@abstractmethod
def restart(self, **kwargs):
"""
Re-initialize relevant parameters / variables at the beginning of each
dialogue.
:return:
"""
pass
@abstractmethod
def next_action(self, state):
"""
Consult the internal model and produce the agent's response, given
the current state
:param state: the current dialogue state
:return:
"""
pass
# From the ConversationalModule interface
def generate_output(self, args=None):
"""
:param args:
:return:
"""
# Unpack args
if isinstance(args, dict):
if 'args' in args:
args = args['args']
else:
raise ValueError('DialoguePolicy: unacceptable input!')
return self.next_action(args)
@abstractmethod
def train(self, dialogues):
"""
Train the dialogue policy's internal model
:param dialogues: the dialogue experience
:return:
"""
pass
@abstractmethod
def save(self, path=None):
"""
Save the internal model to the path provided (or to a default one)
:param path: the path to save the model to
:return:
"""
pass
@abstractmethod
def load(self, path):
"""
Load the model from the path provided
:param path: the path to load the model from
:return:
"""
pass
|
dags/cm360_segmentology_dag.py | Ressmann/starthinker | 138 | 11192690 | ###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
#
# This code generated (see starthinker/scripts for possible source):
# - Command: "python starthinker_ui/manage.py airflow"
#
###########################################################################
'''
--------------------------------------------------------------
Before running this Airflow module...
Install StarThinker in cloud composer ( recommended ):
From Release: pip install starthinker
From Open Source: pip install git+https://github.com/google/starthinker
Or push local code to the cloud composer plugins directory ( if pushing local code changes ):
source install/deploy.sh
4) Composer Menu
l) Install All
--------------------------------------------------------------
If any recipe task has "auth" set to "user" add user credentials:
1. Ensure an RECIPE['setup']['auth']['user'] = [User Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_user", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/deploy_commandline.md#optional-setup-user-credentials
--------------------------------------------------------------
If any recipe task has "auth" set to "service" add service credentials:
1. Ensure an RECIPE['setup']['auth']['service'] = [Service Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_service", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/cloud_service.md
--------------------------------------------------------------
CM360 Segmentology
CM360 funnel analysis using Census data.
- Wait for <b>BigQuery->->->Census_Join</b> to be created.
- Join the <a href='https://groups.google.com/d/forum/starthinker-assets' target='_blank'>StarThinker Assets Group</a> to access the following assets
- Copy <a href='https://datastudio.google.com/c/u/0/reporting/3673497b-f36f-4448-8fb9-3e05ea51842f/' target='_blank'>CM360 Segmentology Sample</a>. Leave the Data Source as is, you will change it in the next step.
- Click Edit Connection, and change to <b>BigQuery->->->Census_Join</b>.
- Or give these intructions to the client.
--------------------------------------------------------------
This StarThinker DAG can be extended with any additional tasks from the following sources:
- https://google.github.io/starthinker/
- https://github.com/google/starthinker/tree/master/dags
'''
from starthinker.airflow.factory import DAG_Factory
INPUTS = {
'account':'',
'auth_read':'user', # Credentials used for reading data.
'auth_write':'service', # Authorization used for writing data.
'recipe_name':'', # Name of report, not needed if ID used.
'date_range':'LAST_365_DAYS', # Timeframe to run report for.
'recipe_slug':'', # Name of Google BigQuery dataset to create.
'advertisers':[], # Comma delimited list of CM360 advertiser ids.
}
RECIPE = {
'tasks':[
{
'dataset':{
'description':'Create a dataset for bigquery tables.',
'hour':[
4
],
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Credentials used for writing data.'}},
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','description':'Place where tables will be created in BigQuery.'}}
}
},
{
'bigquery':{
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Credentials used for writing function.'}},
'function':'Pearson Significance Test',
'to':{
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','order':4,'default':'','description':'Name of Google BigQuery dataset to create.'}}
}
}
},
{
'google_api':{
'auth':'user',
'api':'dfareporting',
'version':'v3.4',
'function':'accounts.get',
'kwargs':{
'id':{'field':{'name':'account','kind':'integer','order':5,'default':'','description':'Campaign Manager Account ID'}},
'fields':'id,name'
},
'results':{
'bigquery':{
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Credentials used for writing function.'}},
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','order':4,'default':'','description':'Name of Google BigQuery dataset to create.'}},
'table':'CM360_Account'
}
}
}
},
{
'dcm':{
'auth':{'field':{'name':'auth_read','kind':'authentication','order':0,'default':'user','description':'Credentials used for reading data.'}},
'report':{
'filters':{
'advertiser':{
'values':{'field':{'name':'advertisers','kind':'integer_list','order':6,'default':[],'description':'Comma delimited list of CM360 advertiser ids.'}}
}
},
'account':{'field':{'name':'account','kind':'string','order':5,'default':'','description':'Campaign Manager Account ID'}},
'body':{
'name':{'field':{'name':'recipe_name','kind':'string','suffix':' Segmentology','description':'The report name.','default':''}},
'criteria':{
'dateRange':{
'kind':'dfareporting#dateRange',
'relativeDateRange':{'field':{'name':'date_range','kind':'choice','order':3,'default':'LAST_365_DAYS','choices':['LAST_7_DAYS','LAST_14_DAYS','LAST_30_DAYS','LAST_365_DAYS','LAST_60_DAYS','LAST_7_DAYS','LAST_90_DAYS','LAST_24_MONTHS','MONTH_TO_DATE','PREVIOUS_MONTH','PREVIOUS_QUARTER','PREVIOUS_WEEK','PREVIOUS_YEAR','QUARTER_TO_DATE','WEEK_TO_DATE','YEAR_TO_DATE'],'description':'Timeframe to run report for.'}}
},
'dimensions':[
{
'kind':'dfareporting#sortedDimension',
'name':'advertiserId'
},
{
'kind':'dfareporting#sortedDimension',
'name':'advertiser'
},
{
'kind':'dfareporting#sortedDimension',
'name':'zipCode'
}
],
'metricNames':[
'impressions',
'clicks',
'totalConversions'
]
},
'type':'STANDARD',
'delivery':{
'emailOwner':False
},
'format':'CSV'
}
}
}
},
{
'dcm':{
'auth':{'field':{'name':'auth_read','kind':'authentication','order':0,'default':'user','description':'Credentials used for reading data.'}},
'report':{
'account':{'field':{'name':'account','kind':'string','default':''}},
'name':{'field':{'name':'recipe_name','kind':'string','order':3,'suffix':' Segmentology','default':'','description':'Name of report, not needed if ID used.'}}
},
'out':{
'bigquery':{
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Authorization used for writing data.'}},
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','order':4,'default':'','description':'Name of Google BigQuery dataset to create.'}},
'table':'CM360_KPI',
'header':True
}
}
}
},
{
'bigquery':{
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Authorization used for writing data.'}},
'from':{
'query':'SELECT Id AS Partner_Id, Name AS Partner, Advertiser_Id, Advertiser, Zip_Postal_Code AS Zip, SAFE_DIVIDE(Impressions, SUM(Impressions) OVER(PARTITION BY Advertiser_Id)) AS Impression, SAFE_DIVIDE(Clicks, Impressions) AS Click, SAFE_DIVIDE(Total_Conversions, Impressions) AS Conversion, Impressions AS Impressions FROM `{dataset}.CM360_KPI` CROSS JOIN `{dataset}.CM360_Account` ',
'parameters':{
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','description':'Place where tables will be created in BigQuery.'}}
},
'legacy':False
},
'to':{
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','description':'Place where tables will be written in BigQuery.'}},
'view':'CM360_KPI_Normalized'
}
}
},
{
'census':{
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Authorization used for writing data.'}},
'normalize':{
'census_geography':'zip_codes',
'census_year':'2018',
'census_span':'5yr'
},
'to':{
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','order':4,'default':'','description':'Name of Google BigQuery dataset to create.'}},
'type':'view'
}
}
},
{
'census':{
'auth':{'field':{'name':'auth_write','kind':'authentication','order':1,'default':'service','description':'Authorization used for writing data.'}},
'correlate':{
'join':'Zip',
'pass':[
'Partner_Id',
'Partner',
'Advertiser_Id',
'Advertiser'
],
'sum':[
'Impressions'
],
'correlate':[
'Impression',
'Click',
'Conversion'
],
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','order':4,'default':'','description':'Name of Google BigQuery dataset to create.'}},
'table':'CM360_KPI_Normalized',
'significance':80
},
'to':{
'dataset':{'field':{'name':'recipe_slug','kind':'string','suffix':'_Segmentology','order':4,'default':'','description':'Name of Google BigQuery dataset to create.'}},
'type':'view'
}
}
}
]
}
dag_maker = DAG_Factory('cm360_segmentology', RECIPE, INPUTS)
dag = dag_maker.generate()
if __name__ == "__main__":
dag_maker.print_commandline()
|
LeetCode/0437_Path_Sum_III.py | Achyut-sudo/PythonAlgorithms | 144 | 11192700 | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
count = 0
past_sums = {}
def pathSum(self, root: TreeNode, sum: int) -> int:
if root is not None:
self.recurse(root, 0, sum)
return self.count
def recurse(self, node, summed, target):
if node is None:
return False
else:
summed += node.val
if summed == target:
self.count += 1
if summed - target in self.past_sums:
self.count += self.past_sums[summed - target]
if summed in self.past_sums:
self.past_sums[summed] += 1
else:
self.past_sums[summed] = 1
self.recurse(node.left, summed, target)
self.recurse(node.right, summed, target)
self.past_sums[summed] -= 1
|
insights/parsers/tests/test_ls_var_lib_nova_instances.py | lhuett/insights-core | 121 | 11192715 | import doctest
from insights.parsers import ls_var_lib_nova_instances as ls_instances
from insights.tests import context_wrap
LS_VAR_LIB_NOVA_INSTANCES = '''
/var/lib/nova/instances/:
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 .
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 ..
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 11415c6c-a2a5-45f0-a198-724246b96631
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 _base
-rw-r--r--. nova nova system_u:object_r:nova_var_lib_t:s0 compute_nodes
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 locks
/var/lib/nova/instances/11415c6c-a2a5-45f0-a198-724246b96631:
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 .
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 ..
-rw-------. root root system_u:object_r:nova_var_lib_t:s0 console.log
-rw-r--r--. qemu qemu system_u:object_r:svirt_image_t:s0:c92,c808 disk
-rw-r--r--. nova nova system_u:object_r:nova_var_lib_t:s0 disk.info
/var/lib/nova/instances/_base:
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 .
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 ..
-rw-r--r--. qemu qemu system_u:object_r:virt_content_t:s0 572dfdb7e1d9304342cbe1fd5e3da4ff2e55c7a6
/var/lib/nova/instances/locks:
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 .
drwxr-xr-x. nova nova system_u:object_r:nova_var_lib_t:s0 ..
-rw-r--r--. nova nova system_u:object_r:nova_var_lib_t:s0 nova-572dfdb7e1d9304342cbe1fd5e3da4ff2e55c7a6
-rw-r--r--. nova nova system_u:object_r:nova_var_lib_t:s0 nova-storage-registry-lock
'''.strip()
LS_R_VAR_LIB_NOVA_INSTANCES = '''
/var/lib/nova/instances:
total 4
drwxr-xr-x. 5 nova nova 97 Feb 20 2017 .
drwxr-xr-x. 9 nova nova 111 Feb 17 2017 ..
drwxr-xr-x. 2 nova nova 54 Feb 17 2017 _base
-rw-r--r--. 1 nova nova 44 May 26 2017 compute_nodes
drwxr-xr-x. 2 nova nova 54 Feb 17 2017 e560e649-41fd-46a2-a3d2-5f4750ba2bb4
drwxr-xr-x. 2 nova nova 93 Feb 17 2017 locks
/var/lib/nova/instances/_base:
total 18176
drwxr-xr-x. 2 nova nova 54 Feb 17 2017 .
drwxr-xr-x. 5 nova nova 97 Feb 20 2017 ..
-rw-r--r--. 1 qemu qemu 41126400 May 26 2017 faf1184c098da91e90290a920b8fab1ee6e1d4c4
/var/lib/nova/instances/e560e649-41fd-46a2-a3d2-5f4750ba2bb4:
total 2104
drwxr-xr-x. 2 nova nova 54 Feb 17 2017 .
drwxr-xr-x. 5 nova nova 97 Feb 20 2017 ..
-rw-r--r--. 1 qemu qemu 48957 Feb 20 2017 console.log
-rw-r--r--. 1 qemu qemu 2097152 Feb 20 2017 disk
-rw-r--r--. 1 nova nova 79 Feb 17 2017 disk.info
/var/lib/nova/instances/locks:
total 0
drwxr-xr-x. 2 nova nova 93 Feb 17 2017 .
drwxr-xr-x. 5 nova nova 97 Feb 20 2017 ..
-rw-r--r--. 1 nova nova 0 Feb 17 2017 nova-faf1184c098da91e90290a920b8fab1ee6e1d4c4
-rw-r--r--. 1 nova nova 0 Feb 17 2017 nova-storage-registry-lock
'''.strip()
def test_ls_var_lib_nova_instances():
ls_var_lib_nova_instances = ls_instances.LsVarLibNovaInstances(context_wrap(LS_VAR_LIB_NOVA_INSTANCES))
assert ls_var_lib_nova_instances.dirs_of('/var/lib/nova/instances/') == ['.', '..', '11415c6c-a2a5-45f0-a198-724246b96631', '_base', 'locks']
assert ls_var_lib_nova_instances.listings['/var/lib/nova/instances/11415c6c-a2a5-45f0-a198-724246b96631']['entries']['console.log']['se_type'] == 'nova_var_lib_t'
assert ls_var_lib_nova_instances.dir_entry('/var/lib/nova/instances/locks', 'nova-storage-registry-lock') == {'se_type': 'nova_var_lib_t', 'name': 'nova-storage-registry-lock', 'perms': 'rw-r--r--.', 'se_user': 'system_u', 'raw_entry': '-rw-r--r--. nova nova system_u:object_r:nova_var_lib_t:s0 nova-storage-registry-lock', 'se_mls': 's0', 'se_role': 'object_r', 'owner': 'nova', 'group': 'nova', 'type': '-', 'dir': '/var/lib/nova/instances/locks'}
assert ls_var_lib_nova_instances.dir_entry('/var/lib/nova/instances/11415c6c-a2a5-45f0-a198-724246b96631', 'console.log')['owner'] == 'root'
def test_ls_r_var_lib_nova_instances():
ls_r_instances = ls_instances.LsRVarLibNovaInstances(context_wrap(LS_R_VAR_LIB_NOVA_INSTANCES))
assert ls_r_instances.dir_entry('/var/lib/nova/instances/e560e649-41fd-46a2-a3d2-5f4750ba2bb4', 'console.log')['size'] == 48957
def test_ls_var_lib_nova_instances_doc_examples():
failed, total = doctest.testmod(
ls_instances,
globs={'ls_var_lib_nova_instances': ls_instances.LsVarLibNovaInstances(context_wrap(LS_VAR_LIB_NOVA_INSTANCES)),
'ls_r_var_lib_nova_instances': ls_instances.LsRVarLibNovaInstances(context_wrap(LS_R_VAR_LIB_NOVA_INSTANCES))}
)
assert failed == 0
|
DQM/SiStripCommon/python/EventAnalyzer_cfi.py | ckamtsikis/cmssw | 852 | 11192750 | import FWCore.ParameterSet.Config as cms
EventAnalyzer = cms.EDAnalyzer("EventContentAnalyzer")
|
topicnet/embeddings/api.py | machine-intelligence-laboratory/topicnet | 123 | 11192753 | <reponame>machine-intelligence-laboratory/topicnet
import os
import shutil
import ssl
import sys
from tqdm import tqdm
from urllib.request import (
Request,
urlopen,
)
from ..cooking_machine.models import TopicModel
_SERVER_URL = 'http://172.16.31.10:8885'
_ARCHIVE_EXTENSION = '.tar.gz'
def load_model(model_name: str) -> TopicModel:
"""
Load model by model_name.
Run ``get_info()`` to get model information
Parameters
----------
model_name: str
name of model to download
"""
# model_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), model_name)
model_path = os.path.join('.', model_name)
if os.path.isdir(model_path):
print(f'The model "{model_name}" was already downloaded before, loading it...')
return TopicModel.load(model_path)
req = Request(_SERVER_URL + '/' + model_name)
context = ssl._create_unverified_context()
print(f'Downloading the "{model_name}" model...')
save_path = None
try:
# with urlopen(req, data=data, context=context) as answer:
with urlopen(req, context=context) as answer:
total_size = int(answer.headers.get('content-length', 0))
block_size = 1024
save_path = model_path # + answer.getheader('file-extension')
t = tqdm(total=total_size, unit='iB', unit_scale=True, file=sys.stdout)
with open(save_path + _ARCHIVE_EXTENSION, 'wb') as f:
while True:
chunk = answer.read(block_size)
if not chunk:
break
t.update(len(chunk))
f.write(chunk)
t.close()
if total_size != 0 and t.n != total_size:
raise RuntimeError(
"Failed to download the model!"
" Some data was lost during network transfer"
)
shutil.unpack_archive(save_path + _ARCHIVE_EXTENSION, save_path)
return TopicModel.load(save_path)
except Exception as exception:
if save_path is not None and os.path.isfile(save_path):
os.remove(save_path)
raise exception
finally:
if save_path is not None and os.path.isfile(save_path + _ARCHIVE_EXTENSION):
os.remove(save_path + _ARCHIVE_EXTENSION)
|
RecoMET/METFilters/python/HcalStripHaloFilter_cfi.py | ckamtsikis/cmssw | 852 | 11192756 | import FWCore.ParameterSet.Config as cms
HcalStripHaloFilter = cms.EDFilter(
"HcalStripHaloFilter",
taggingMode = cms.bool(False),
maxWeightedStripLength = cms.int32(7),
maxEnergyRatio = cms.double(0.15),
minHadEt = cms.double(100.0)
)
|
wbb/core/types/__init__.py | DopeBotz/WilliamButcherBot-1 | 175 | 11192758 | # flake8: noqa
from .InlineQueryResult import (InlineQueryResultAudio,
InlineQueryResultCachedDocument)
|
实践案例/B15-基于深度学习的代码搜索案例/src/3Model/args.py | microsoft/ai-edu | 11,094 | 11192767 | <filename>实践案例/B15-基于深度学习的代码搜索案例/src/3Model/args.py
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
import argparse
def add_common_model_arguments(parser: argparse.ArgumentParser) -> None:
"""Add common arguments like `--dataset_path`, `--model_path`, `--gpu`,
`--load`, `--epoch`, `--batch_size`, `--learning_rate`, `--log_every_iter`,
`--valid_every_epoch`, `--save_every_epoch`, `--comment`."""
parser.add_argument('--dataset_path', help='path to the dataset')
parser.add_argument('--model_path', help='path for saving models and codes',
required=True)
parser.add_argument('--gpu', type=lambda x: list(map(int, x.split(','))),
default=[], nargs='?', const=[-1],
help='GPU ids splited by ",", no-ids to use all GPU')
parser.add_argument('--load', type=int, default=0,
help='load module training at give epoch')
parser.add_argument('--epoch', type=int, default=200, help='epoch to train')
parser.add_argument('--batch_size', type=int, default=32, help='batch size')
parser.add_argument('--learning_rate', type=float, default=0.001,
help='learning rate')
parser.add_argument('--log_every_iter', type=int, default=100,
help='log loss every numbers of iteration')
parser.add_argument('--valid_every_epoch', type=int, default=10,
help='run validation every numbers of epoch; '
'0 for disabling')
parser.add_argument('--save_every_epoch', type=int, default=10,
help='save model every numbers of epoch; '
'0 for disabling')
parser.add_argument('--comment', default='', help='comment for tensorboard')
def add_server_arguments(parser: argparse.ArgumentParser) -> None:
"""Add server arguments like `--port` and `--host`."""
parser.add_argument('--port', type=int, default=8080,
help='port to serve at')
parser.add_argument('--host', default='0.0.0.0', help='address to serve at')
def add_retrieval_eval_arguments(parser: argparse.ArgumentParser) -> None:
"""Add retrieval evaluation arguments like `--eval_pool_size` and
`--eval_k`."""
parser.add_argument('--eval_pool_size', type=int, default=200,
help='pool size for evaluation')
parser.add_argument('--eval_k', type=int, default=10,
help='k for evaluation')
def add_search_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--search_top_n', type=int, default=5,
help='search top-n results for search task')
def add_codenn_train_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--embed_size', type=int, default=100,
help='embedding size')
parser.add_argument('--repr_size', type=int, default=100,
help='representation size; for bidirectional rnn, the '
'real value will be doubled')
parser.add_argument('--pool', choices=['max', 'mean', 'sum'], default='mean',
help='pooling method to use')
parser.add_argument('--rnn', choices=['lstm', 'gru', 'rnn'], default='gru',
help='rnn and rnn variants to use')
parser.add_argument('--bidirectional', choices=['true', 'false'],
default='true', help='whether to use bidirectional rnn')
parser.add_argument('--activation', choices=['relu', 'tanh'],
default='relu', help='activation function to use')
parser.add_argument('--margin', type=float, default=0.05,
help='margin to use in the loss function')
parser.add_argument('--name_len', type=int, default=6,
help='length of name sequence')
parser.add_argument('--api_len', type=int, default=30,
help='length of api sequence')
parser.add_argument('--token_len', type=int, default=50,
help='length of tokens')
parser.add_argument('--desc_len', type=int, default=30,
help='length of description sequence')
def add_code_summarizer_train_arguments(parser: argparse.ArgumentParser) \
-> None:
parser.add_argument('--embed_size', type=int, default=800,
help='embedding size')
parser.add_argument('--hidden_size', type=int, default=1000,
help='hidden state size')
def get_codenn_argparser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument('--task',
choices=['train', 'valid', 'test', 'repr',
'search', 'serve'],
default='train',
help="task to run; `train' for training the dataset; "
"`valid'/`test' for evaluating model on "
"corresponding dataset; `repr' for converting "
"whole dataset(`use') to code; `search' for "
"searching in whole dataset, it require `repr' to "
"run first; `serve' for searching as web server, "
"it require `repr' to run first")
add_common_model_arguments(parser)
add_codenn_train_arguments(parser)
add_retrieval_eval_arguments(parser)
add_search_arguments(parser)
add_server_arguments(parser)
return parser
def get_code_summarizer_argparser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser()
parser.add_argument('--task',
choices=['train', 'valid', 'test', 'summarize'],
default='train',
help="task to run; `train' for training the dataset; "
"`valid'/`test' for evaluating model on "
"corresponding dataset; `summarize' for summarize "
"the user input code")
add_common_model_arguments(parser)
add_code_summarizer_train_arguments(parser)
return parser
|
setup.py | carloshanson/etlalchemy | 525 | 11192777 | import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass into pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
import pytest
import shlex
errno = pytest.main(shlex.split(self.pytest_args))
sys.exit(errno)
setup(
name = 'etlalchemy',
packages = ['etlalchemy'],
version = '1.0.6',
description = 'Extract, Transform, Load. Migrate any SQL Database in 4 lines of code',
author = '<NAME>',
author_email='<EMAIL>',
url='https://github.com/seanharr11/etlalchemy',
download_url='https://github.com/seanharr11/etlalchemy/tarball/1.0.6',
keywords=['sql','migration','etl','database'],
install_requires = [
"six>=1.9.0",
"SQLAlchemy>=1.2.1,<1.3",
"sqlalchemy-migrate>=0.9.7",
"SQLAlchemy-Utils>=0.32.0"
],
classifiers=[],
cmdclass={'test': PyTest},
tests_require = ["pytest"],
)
|
i3pystatus/group.py | fkusei/i3pystatus | 413 | 11192790 | from i3pystatus import IntervalModule, Status, Module
from i3pystatus.core import util
from i3pystatus.core.imputil import ClassFinder
class Group(Module, Status):
"""
Module for grouping modules together
Cycles trough groups by means of scrolling
.. code-block:: python
group = Group()
group.register("network",
interface="eth0",
divisor=1024,
start_color='white',
format_up="{bytes_recv}K / {bytes_sent}K"
)
group.register("network",
interface="eth0",
color_up='#FFFFFF',
format_up="{v4}"
)
status.register(group)
"""
on_upscroll = ['cycle_module', 1]
on_downscroll = ['cycle_module', -1]
def __init__(self, *args, **kwargs):
Module.__init__(self, *args, **kwargs)
self.modules = util.ModuleList(self, ClassFinder(Module))
self.active = 0
self.__name__ = 'Group'
def get_active_module(self):
if self.active > len(self.modules):
return
return self.modules[self.active]
def run(self):
activemodule = self.get_active_module()
if not activemodule:
return
self.output = activemodule.output
def register(self, *args, **kwargs):
module = Status.register(self, *args, **kwargs)
if module:
module.on_change = self.run
return module
def cycle_module(self, increment=1):
active = self.active + increment
if active >= len(self.modules):
active = 0
elif active < 0:
active = len(self.modules) - 1
self.active = active
def on_click(self, button, **kwargs):
"""
Capture scrollup and scorlldown to move in groups
Pass everthing else to the module itself
"""
if button in (4, 5):
return super().on_click(button, **kwargs)
else:
activemodule = self.get_active_module()
if not activemodule:
return
return activemodule.on_click(button, **kwargs)
|
setup.py | oscarpicas/loophole | 153 | 11192797 | import sys
from distutils.core import setup
setup(
name = 'loophole',
packages = ['loophole', 'loophole.polar', 'loophole.polar.pb'],
version = '0.5.2',
description = 'Polar devices Python API and CLI.',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/rsc-dev/loophole',
download_url = 'https://github.com/rsc-dev/loophole/releases/tag/0.5.2',
keywords = ['polar', 'api', 'cli', 'reverse', ''],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
install_requires=['protobuf'] + (
['pywinusb'] if "win" in sys.platform else ['pyusb']
)
) |
libs/fuel/tests/test_streams.py | dendisuhubdy/attention-lvcsr | 767 | 11192808 | <reponame>dendisuhubdy/attention-lvcsr
import numpy
from numpy.testing import assert_equal, assert_raises
from fuel.datasets import IterableDataset, IndexableDataset
from fuel.schemes import SequentialExampleScheme, SequentialScheme
from fuel.streams import AbstractDataStream, DataStream
class DummyDataStream(AbstractDataStream):
def reset(self):
pass
def close(self):
pass
def next_epoch(self):
pass
def get_epoch_iterator(self, as_dict=False):
pass
def get_data(self, request=None):
pass
class TestAbstractDataStream(object):
def test_raises_value_error_on_no_scheme_no_produces_examples(self):
stream = DummyDataStream()
assert_raises(ValueError, getattr, stream, 'produces_examples')
def test_raises_value_error_when_setting_produces_examples_if_scheme(self):
stream = DummyDataStream(SequentialExampleScheme(2))
assert_raises(ValueError, setattr, stream, 'produces_examples', True)
class TestDataStream(object):
def setUp(self):
self.dataset = IterableDataset(numpy.eye(2))
def test_sources_setter(self):
stream = DataStream(self.dataset)
stream.sources = ('features',)
assert_equal(stream.sources, ('features',))
def test_no_axis_labels(self):
stream = DataStream(self.dataset)
assert stream.axis_labels is None
def test_axis_labels_on_produces_examples(self):
axis_labels = {'data': ('batch', 'features')}
self.dataset.axis_labels = axis_labels
stream = DataStream(self.dataset)
assert_equal(stream.axis_labels, {'data': ('features',)})
def test_axis_labels_on_produces_batches(self):
dataset = IndexableDataset(numpy.eye(2))
axis_labels = {'data': ('batch', 'features')}
dataset.axis_labels = axis_labels
stream = DataStream(dataset, iteration_scheme=SequentialScheme(2, 2))
assert_equal(stream.axis_labels, axis_labels)
def test_produces_examples(self):
stream = DataStream(self.dataset,
iteration_scheme=SequentialExampleScheme(2))
assert stream.produces_examples
|
src/homework/tests/api/tests_markdown_sanitization.py | denkasyanov/education-backend | 151 | 11192858 | import pytest
pytestmark = [
pytest.mark.django_db,
pytest.mark.usefixtures('purchase'),
]
@pytest.mark.parametrize(('text', 'expected'), [
('<script>Ev1l</script>', '<!-- raw HTML omitted -->'),
('*should be rendered*', '<p><em>should be rendered</em></p>'),
('', '<p><img alt="" src="typicalmacuser.jpg"></p>'),
('<em ev1l="hax0r">test</em>', '<p><!-- raw HTML omitted -->test<!-- raw HTML omitted --></p>'),
('a\nb', '<p>a\nb</p>'),
('<h1><h2><h3><h4><h5>', '<!-- raw HTML omitted -->'),
('# test', '<h1>test</h1>'),
('a<hr>b', '<p>a<!-- raw HTML omitted -->b</p>'),
('> а хули ты?', '<blockquote>\n<p>а хули ты?</p>\n</blockquote>'),
])
def test_markdown_gets_sanitized(api, answer, text, expected):
answer.text = text
answer.save()
got = api.get(f'/api/v2/homework/answers/{answer.slug}/')
assert got['text'].strip() == expected
|
mmflow/models/losses/__init__.py | ArlenCHEN/mmflow | 481 | 11192879 | # Copyright (c) OpenMMLab. All rights reserved.
from .census_loss import census_loss
from .multilevel_bce import (MultiLevelBCE, binary_cross_entropy,
multi_levels_binary_cross_entropy)
from .multilevel_charbonnier_loss import (MultiLevelCharbonnierLoss,
charbonnier_loss)
from .multilevel_epe import MultiLevelEPE, endpoint_error
from .sequence_loss import SequenceLoss, sequence_loss
from .smooth_loss import smooth_1st_loss, smooth_2nd_loss
from .ssim import weighted_ssim
__all__ = [
'endpoint_error', 'sequence_loss', 'binary_cross_entropy', 'SequenceLoss',
'MultiLevelBCE', 'MultiLevelEPE', 'MultiLevelCharbonnierLoss',
'multi_levels_binary_cross_entropy', 'charbonnier_loss', 'weighted_ssim',
'smooth_1st_loss', 'smooth_2nd_loss', 'census_loss'
]
|
tests/unit/resources/security/test_certificate_rabbitmq.py | doziya/hpeOneView | 107 | 11192898 | <reponame>doziya/hpeOneView<filename>tests/unit/resources/security/test_certificate_rabbitmq.py
# -*- coding: utf-8 -*-
###
# (C) Copyright (2012-2017) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from unittest import TestCase
import mock
from hpOneView.connection import connection
from hpOneView.resources.resource import ResourceClient
from hpOneView.resources.security.certificate_rabbitmq import CertificateRabbitMQ
class CertificateRabbitMQTest(TestCase):
def setUp(self):
self.host = '127.0.0.1'
self.connection = connection(self.host)
self._certificate_rabbitmq = CertificateRabbitMQ(self.connection)
@mock.patch.object(ResourceClient, 'create')
def test_generate_called_once_with_defaults(self, mock_create):
information = {
"commonName": "default",
"type": "RabbitMqClientCertV2"
}
self._certificate_rabbitmq.generate(information)
mock_create.assert_called_once_with(information, timeout=-1)
@mock.patch.object(ResourceClient, 'get')
def test_get_by_alias_name_called_once(self, mock_get):
alias_name = 'default'
self._certificate_rabbitmq.get(alias_name)
mock_get.assert_called_once_with(alias_name)
@mock.patch.object(ResourceClient, 'get')
def test_get_key_pair_called_once(self, mock_get):
alias_name = 'default'
self._certificate_rabbitmq.get_key_pair(alias_name)
uri = "/rest/certificates/client/rabbitmq/keypair/" + alias_name
mock_get.assert_called_once_with(uri)
@mock.patch.object(ResourceClient, 'get')
def test_get_keys_called_once(self, mock_get):
alias_name = 'default'
key_format = 'Base64'
self._certificate_rabbitmq.get_keys(alias_name, key_format)
uri = "/rest/certificates/client/rabbitmq/keys/" + alias_name + "?format=" + key_format
mock_get.assert_called_once_with(uri)
|
tests/apps/namespace_package_base/nsapp/apps.py | MikeAmy/django | 5,079 | 11192901 | import os
from django.apps import AppConfig
from django.utils._os import upath
class NSAppConfig(AppConfig):
name = 'nsapp'
path = upath(os.path.dirname(__file__))
|
mixly_arduino/sample/mixpy/海龟画图/py/海龟画图08猜一猜_01猜性别.py | wecake/Mixly_Arduino | 118 | 11192934 | import turtle
tina= turtle.Turtle()
mygender = "女"
guess = turtle.textinput("猜一猜","猜一猜我是男生还是女生?")
tina.pencolor("#ff0000")
tina.hideturtle()
if mygender == guess:
tina.write("你真棒,猜对了!",False,align="left",font=("黑体",20,"normal"))
else:
tina.write("很遗憾,猜错了!",False,align="left",font=("黑体",20,"normal"))
|
src/tests/file_upload_feature_test.py | tomgilbertson/script-server-v1 | 833 | 11192943 | import os
import time
import unittest
from features.file_upload_feature import FileUploadFeature
from files.user_file_storage import UserFileStorage
from tests import test_utils
from utils import file_utils
class TestUserFileStorage(unittest.TestCase):
def setUp(self):
test_utils.setup()
self.__storage = UserFileStorage(b'12345678')
self.upload_feature = FileUploadFeature(self.__storage, test_utils.temp_folder)
def tearDown(self):
test_utils.cleanup()
self.__storage._stop_autoclean()
def test_prepare_new_folder(self):
file_path = self.upload_feature.prepare_new_folder('userX')
self.assertTrue(os.path.exists(file_path))
def test_prepare_new_folder_different_users(self):
path1 = self.upload_feature.prepare_new_folder('userX')
path2 = self.upload_feature.prepare_new_folder('userY')
self.assertNotEqual(path1, path2)
def test_prepare_new_folder_twice(self):
file_path1 = self.upload_feature.prepare_new_folder('userX')
time.sleep(0.1)
file_path2 = self.upload_feature.prepare_new_folder('userX')
self.assertNotEqual(file_path1, file_path2)
|
tests/testing.py | WamboJambo/spur.py | 217 | 11192950 | import os
import spur
import spur.ssh
def _int_or_none(val):
return int(val) if val is not None else None
HOSTNAME = os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1")
USERNAME = os.environ["TEST_SSH_USERNAME"]
PASSWORD = os.environ["TEST_SSH_PASSWORD"]
PORT = _int_or_none(os.environ.get("TEST_SSH_PORT", 22))
def create_ssh_shell(missing_host_key=None, shell_type=None):
return spur.SshShell(
hostname=HOSTNAME,
username=USERNAME,
password=PASSWORD,
port=PORT,
missing_host_key=(missing_host_key or spur.ssh.MissingHostKey.accept),
shell_type=shell_type,
)
|
036-ircbot/bot.py | gynvael/stream | 152 | 11192988 | <reponame>gynvael/stream
#!/usr/bin/python3
import sys
import socket
import threading
import os
import time
import random
import string
import telnetlib
import queue
def recvuntil(sock, txt):
d = b""
while d.find(txt) == -1:
try:
dnow = sock.recv(1)
if len(dnow) == 0:
return False
except socket.error as msg:
return False
d += dnow
return d
def recvall(sock, n):
d = ""
while len(d) != n:
try:
dnow = sock.recv(n - len(d))
if len(dnow) == 0:
return False
except socket.error as msg:
return False
d += dnow
return d
# Proxy object for sockets.
class gsocket(object):
def __init__(self, *p):
self._sock = socket.socket(*p)
def __getattr__(self, name):
return getattr(self._sock, name)
def recvall(self, n):
return recvall(self._sock, n)
def recvuntil(self, txt):
return recvuntil(self._sock, txt)
def gen_random_nick(prefix):
return prefix + ''.join(random.choice(string.ascii_letters) for _ in range(4))
def toutf8(x):
return bytes(x, "utf-8")
class ReciverThread(threading.Thread):
def __init__(self, s, q):
threading.Thread.__init__(self)
self.s = s
self.q = q
def run(self):
while True:
txt = str(self.s.recvuntil(b"\n").strip(), "utf-8")
self.q.put(txt)
def handle_PING(s, src, cmd, params):
s.sendall(toutf8("PONG " + params + "\r\n"))
print("********************** SENT PONG")
def xhandle_PRIVMSG(s, src, cmd, params):
x = params.split(" ", 2)
if len(x) != 2:
return
chan, msg = x
if msg.startswith(":"):
msg = msg[1:]
if msg == "!rand":
x = random.randint(0, 10000000000000000000000000)
s.sendall(toutf8("PRIVMSG %s :%i\r\n" % (chan, x)))
print("RANDOM SENT")
def xhandle_376(s, src, cmd, params):
s.sendall(toutf8("JOIN #gynvaelstream\r\n"))
handlers = {}
def init_handlers():
for k, v in globals().items():
if k.startswith("handle_"):
handlers[k[7:]] = v
print(handlers)
def go():
global HOST
global PORT
init_handlers()
s = gsocket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT))
nick = gen_random_nick("Pacynka_")
s.sendall(toutf8("NICK %s\r\n" % nick))
s.sendall(toutf8("USER %s %s %s :%s\r\n" % (nick, nick, nick, nick)))
msg_queue = queue.Queue()
rt = ReciverThread(s, msg_queue)
rt.daemon = True
rt.start()
while True:
try:
msg = msg_queue.get(timeout=0.1)
msg_split = msg.split(" ", 2)
if len(msg_split) == 3:
src, cmd, params = msg_split
else:
src, cmd, params = "", *msg_split
print("DEBUG: (%s, %s, %s)" % (src, cmd, params))
if cmd in handlers:
print("USING HANDLER: %s" % (str(handlers[cmd])))
handlers[cmd](s, src, cmd, params)
else:
print("HANDLER NOT FOUND")
except queue.Empty:
pass
#t = telnetlib.Telnet()
#t.sock = s
#t.interact()
s.close()
HOST = 'irc.freenode.net'
PORT = 6667
go()
|
recipes/Python/578293_unicode_Command_line_histograms/recipe-578293.py | tdiprima/code | 2,023 | 11193027 | <gh_stars>1000+
import numpy as np
def cli_hist(data,bins=10):
bars = u' ▁▂▃▄▅▆▇█'
n,_ = np.histogram(data,bins=bins)
n2=n*(len(bars)-1)/(max(n))
res = u" ".join( bars[i] for i in n2 )
return res
data = np.random.random(100)
print cli_hist(data)
# ▆ ▄ ▃ ▅ █ ▄ ▅ ▁ ▅ ▇
print cli_hist(data,bins=5)
# ▆ ▅ █ ▄ ▇
|
scripts/nextpnr-timing.py | keadwen/CFU-Playground | 240 | 11193040 | #!/usr/bin/env python3
import json
import argparse
arg = argparse.ArgumentParser()
arg.add_argument('file', help='JSON timing report')
arg.add_argument('--src', default='', help='Source name')
arg.add_argument('--dst', default='', help='Destination name')
arg.add_argument('--results', default=100, type=int, help='Number of paths reported')
arg.add_argument('--tgt-len', default=0.0, type=float, help='List paths that are longer than this value (in ns)')
args = vars(arg.parse_args())
data = dict()
with open(args['file'], 'r') as file:
data = json.load(file)
paths = []
for net in data['detailed_net_timings']:
if args['src'] in net['driver']:
src = net['driver']
for endpoint in net['endpoints']:
if args['dst'] in endpoint['cell']:
dly = endpoint['delay']
tgt = endpoint['cell']
if args['tgt_len'] < dly:
paths.append((src, tgt, dly))
paths.sort(key=lambda tup: tup[2], reverse=True)
for path in paths[:args['results']]:
src, tgt, dly = path
print(f"{src} -> {tgt} : {dly}")
|
venv/Lib/site-packages/rivescript/regexp.py | Hazemcodes/GimmyBot | 154 | 11193057 | # RiveScript-Python
#
# This code is released under the MIT License.
# See the "LICENSE" file for more information.
#
# https://www.rivescript.com/
from __future__ import unicode_literals
import re
"""Common regular expressions used in RiveScript."""
# Common regular expressions.
class RE(object):
equals = re.compile('\s*=\s*')
ws = re.compile('\s+')
space = re.compile('\\\\s')
objend = re.compile('^\s*<\s*object')
weight = re.compile(r'\s*\{weight=(\d+)\}\s*')
inherit = re.compile('\{inherits=(\d+)\}')
wilds_and_optionals = re.compile('[\s\*\#\_\[\]()]+')
nasties = re.compile('[^A-Za-z0-9 ]')
crlf = re.compile('<crlf>')
literal_w = re.compile(r'\\w')
array = re.compile(r'\@(.+?)\b')
reply_array = re.compile(r'\(@([A-Za-z0-9_]+)\)')
ph_array = re.compile(r'\x00@([A-Za-z0-9_]+)\x00')
def_syntax = re.compile(r'^.+(?:\s+.+|)\s*=\s*.+?$')
name_syntax = re.compile(r'[^a-z0-9_\-\s]')
obj_syntax = re.compile(r'[^A-Za-z0-9_\-\s]')
utf8_trig = re.compile(r'[A-Z\\.]')
trig_syntax = re.compile(r'[^a-z0-9(\|)\[\]*_#@{}<>=\s]')
cond_syntax = re.compile(r'^.+?\s*(?:==|eq|!=|ne|<>|<|<=|>|>=)\s*.+?=>.+?$')
utf8_meta = re.compile(r'[\\<>]')
utf8_punct = re.compile(r'[.?,!;:@#$%^&*()]')
cond_split = re.compile(r'\s*=>\s*')
cond_parse = re.compile(r'^(.+?)\s+(==|eq|!=|ne|<>|<|<=|>|>=)\s+(.+?)$')
topic_tag = re.compile(r'\{topic=(.+?)\}')
set_tag = re.compile(r'<set (.+?)=(.+?)>')
bot_tag = re.compile(r'<bot (.+?)>')
get_tag = re.compile(r'<get (.+?)>')
star_tags = re.compile(r'<star(\d+)>')
botstars = re.compile(r'<botstar(\d+)>')
input_tags = re.compile(r'<input([1-9])>')
reply_tags = re.compile(r'<reply([1-9])>')
random_tags = re.compile(r'\{random\}(.+?)\{/random\}')
redir_tag = re.compile(r'\{@(.+?)\}')
tag_search = re.compile(r'<([^<]+?)>')
placeholder = re.compile(r'\x00(\d+)\x00')
zero_star = re.compile(r'^\*$')
optionals = re.compile(r'\[(.+?)\]')
empty_pipe = re.compile(r'\|\s*\||\[\s*\||\|\s*\]|\(\s*\||\|\s*\)') # ||, [|, |], (|, |)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.