max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 7
115
| max_stars_count
int64 101
368k
| id
stringlengths 2
8
| content
stringlengths 6
1.03M
|
---|---|---|---|---|
src/probflow/utils/initializers.py | chiragnagpal/probflow | 134 | 12665025 | """Initializers.
Functions to initialize posterior distribution variables.
* :func:`.xavier` - Xavier initializer
* :func:`.scale_xavier` - Xavier initializer scaled for scale parameters
* :func:`.pos_xavier` - positive-only initizlier
----------
"""
import numpy as np
from probflow.utils.settings import get_backend, get_datatype
def xavier(shape):
"""Xavier initializer"""
scale = np.sqrt(2 / sum(shape))
if get_backend() == "pytorch":
# TODO: use truncated normal for torch
import torch
return torch.randn(shape, dtype=get_datatype()) * scale
else:
import tensorflow as tf
return tf.random.truncated_normal(
shape, mean=0.0, stddev=scale, dtype=get_datatype()
)
def scale_xavier(shape):
"""Xavier initializer for scale variables"""
vals = xavier(shape)
if get_backend() == "pytorch":
import torch
numel = torch.prod(torch.Tensor(shape))
return vals + 2 - 2 * torch.log(numel) / np.log(10.0)
else:
import tensorflow as tf
numel = float(tf.reduce_prod(shape))
return vals + 2 - 2 * tf.math.log(numel) / tf.math.log(10.0)
def pos_xavier(shape):
"""Xavier initializer for positive variables"""
vals = xavier(shape)
if get_backend() == "pytorch":
import torch
numel = torch.prod(torch.Tensor(shape))
return vals + torch.log(numel) / np.log(10.0)
else:
import tensorflow as tf
numel = float(tf.reduce_prod(shape))
return vals + tf.math.log(numel) / tf.math.log(10.0)
def full_of(val):
"""Get initializer which returns tensor full of single value"""
import probflow.utils.ops as O
def init(shape):
return val * O.ones(shape)
return init
|
examples/misc/djangoweb/webpages/models.py | takipsizad/pyjs | 739 | 12665036 | from django.db.models import *
class Page(Model):
name = CharField(max_length=50)
text = TextField()
def __unicode__(self):
return str(self.text)
|
tests/test_cases/test_multi_dimension_array/test_cocotb_array.py | lavanyajagan/cocotb | 350 | 12665057 | <reponame>lavanyajagan/cocotb<filename>tests/test_cases/test_multi_dimension_array/test_cocotb_array.py<gh_stars>100-1000
import cocotb
from cocotb.triggers import Timer
@cocotb.test()
async def test_in_vect_packed(dut):
test_value = 0x5
dut.in_vect_packed.value = test_value
await Timer(1, "ns")
assert dut.out_vect_packed.value == test_value
@cocotb.test()
async def test_in_vect_unpacked(dut):
test_value = [0x1, 0x0, 0x1]
dut.in_vect_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_vect_unpacked.value == test_value
@cocotb.test()
async def test_in_arr(dut):
test_value = 0x5
dut.in_arr.value = test_value
await Timer(1, "ns")
assert dut.out_arr.value == test_value
@cocotb.test()
async def test_in_2d_vect_packed_packed(dut):
test_value = (0x5 << 6) | (0x5 << 3) | 0x5
dut.in_2d_vect_packed_packed.value = test_value
await Timer(1, "ns")
assert dut.out_2d_vect_packed_packed.value == test_value
@cocotb.test()
async def test_in_2d_vect_packed_unpacked(dut):
test_value = [0x5, 0x5, 0x5]
dut.in_2d_vect_packed_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_2d_vect_packed_unpacked.value == test_value
@cocotb.test()
async def test_in_2d_vect_unpacked_unpacked(dut):
test_value = 3 * [[0x1, 0x0, 0x1]]
dut.in_2d_vect_unpacked_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_2d_vect_unpacked_unpacked.value == test_value
@cocotb.test()
async def test_in_arr_packed(dut):
test_value = 365
dut.in_arr_packed.value = test_value
await Timer(1, "ns")
assert dut.out_arr_packed.value == test_value
@cocotb.test()
async def test_in_arr_unpacked(dut):
test_value = [0x5, 0x5, 0x5]
dut.in_arr_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_arr_unpacked.value == test_value
@cocotb.test()
async def test_in_2d_arr(dut):
test_value = 365
dut.in_2d_arr.value = test_value
await Timer(1, "ns")
assert dut.out_2d_arr.value == test_value
@cocotb.test()
async def test_in_vect_packed_packed_packed(dut):
test_value = 95869805
dut.in_vect_packed_packed_packed.value = test_value
await Timer(1, "ns")
assert dut.out_vect_packed_packed_packed.value == test_value
# Questa is unable to access elements of a logic array if the last dimension is unpacked (gh-2605)
@cocotb.test(
expect_error=IndexError
if cocotb.LANGUAGE == "verilog" and cocotb.SIM_NAME.lower().startswith("modelsim")
else ()
)
async def test_in_vect_packed_packed_unpacked(dut):
test_value = [365, 365, 365]
dut.in_vect_packed_packed_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_vect_packed_packed_unpacked.value == test_value
@cocotb.test()
async def test_in_vect_packed_unpacked_unpacked(dut):
test_value = 3 * [3 * [5]]
dut.in_vect_packed_unpacked_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_vect_packed_unpacked_unpacked.value == test_value
@cocotb.test()
async def test_in_vect_unpacked_unpacked_unpacked(dut):
test_value = 3 * [3 * [[1, 0, 1]]]
dut.in_vect_unpacked_unpacked_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_vect_unpacked_unpacked_unpacked.value == test_value
@cocotb.test()
async def test_in_arr_packed_packed(dut):
test_value = (365 << 18) | (365 << 9) | (365)
dut.in_arr_packed_packed.value = test_value
await Timer(1, "ns")
assert dut.out_arr_packed_packed.value == test_value
# Questa is unable to access elements of a logic array if the last dimension is unpacked (gh-2605)
@cocotb.test(
expect_error=IndexError
if cocotb.LANGUAGE == "verilog" and cocotb.SIM_NAME.lower().startswith("modelsim")
else ()
)
async def test_in_arr_packed_unpacked(dut):
test_value = [365, 365, 365]
dut.in_arr_packed_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_arr_packed_unpacked.value == test_value
@cocotb.test()
async def test_in_arr_unpacked_unpacked(dut):
test_value = 3 * [3 * [5]]
dut.in_arr_unpacked_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_arr_unpacked_unpacked.value == test_value
@cocotb.test()
async def test_in_2d_arr_packed(dut):
test_value = (365 << 18) | (365 << 9) | (365)
dut.in_2d_arr_packed.value = test_value
await Timer(1, "ns")
assert dut.out_2d_arr_packed.value == test_value
# Questa is unable to access elements of a logic array if the last dimension is unpacked (gh-2605)
@cocotb.test(
expect_error=IndexError
if cocotb.LANGUAGE == "verilog" and cocotb.SIM_NAME.lower().startswith("modelsim")
else ()
)
async def test_in_2d_arr_unpacked(dut):
test_value = [365, 365, 365]
dut.in_2d_arr_unpacked.value = test_value
await Timer(1, "ns")
assert dut.out_2d_arr_unpacked.value == test_value
@cocotb.test()
async def test_in_3d_arr(dut):
test_value = (365 << 18) | (365 << 9) | (365)
dut.in_3d_arr.value = test_value
await Timer(1, "ns")
assert dut.out_3d_arr.value == test_value
|
tests/issues/test_issue_020.py | RodrigoDeRosa/related | 190 | 12665064 | <filename>tests/issues/test_issue_020.py
import related
@related.immutable
class ImageOptions(object):
registry = related.URLField()
email = related.StringField()
def test_image_options():
options = ImageOptions(
registry="https://imgur.com/gallery/GAhlfKS", email="<EMAIL>"
)
assert options.registry
assert options.email
|
models/__init__.py | briana-jin-zhang/spatial-segmentation | 733 | 12665082 | <filename>models/__init__.py
from . import backbone
from .losses import *
from .single_stage_model import *
from .supervised import *
from .partial_completion_mask import *
from .partial_completion_content import *
from .partial_completion_content_cgan import *
|
cryptol-remote-api/python/tests/cryptol/test_EvenMansour.py | GaloisInc/cryptol | 773 | 12665084 | import unittest
from pathlib import Path
import unittest
import cryptol
from cryptol.single_connection import *
from cryptol.bitvector import BV
class TestEvenMansour(unittest.TestCase):
def test_EvenMansour(self):
connect(verify=False)
load_file(str(Path('tests','cryptol','test-files','examples','contrib','EvenMansour.cry')))
F_10_4 = cry_eval('F:[10][4]')
self.assertTrue(call('is_a_permutation', F_10_4))
Finv_10_4 = cry_eval("F':[10][4]")
digits = [ BV(size=4, value=i) for i in range(0,10) ]
# ^ the same as: c.eval('[0..9]:[_][4]')
self.assertTrue(call('is_inverse_permutation', digits, F_10_4, Finv_10_4))
self.assertTrue(check('E_and_D_are_inverses'))
if __name__ == "__main__":
unittest.main()
|
src/django-nonrel/tests/regressiontests/comment_tests/custom_comments/__init__.py | adamjmcgrath/glancydesign | 790 | 12665087 | <filename>src/django-nonrel/tests/regressiontests/comment_tests/custom_comments/__init__.py<gh_stars>100-1000
from django.core import urlresolvers
from regressiontests.comment_tests.custom_comments.models import CustomComment
from regressiontests.comment_tests.custom_comments.forms import CustomCommentForm
def get_model():
return CustomComment
def get_form():
return CustomCommentForm
def get_form_target():
return urlresolvers.reverse(
"regressiontests.comment_tests.custom_comments.views.custom_submit_comment"
)
def get_flag_url(c):
return urlresolvers.reverse(
"regressiontests.comment_tests.custom_comments.views.custom_flag_comment",
args=(c.id,)
)
def get_delete_url(c):
return urlresolvers.reverse(
"regressiontests.comment_tests.custom_comments.views.custom_delete_comment",
args=(c.id,)
)
def get_approve_url(c):
return urlresolvers.reverse(
"regressiontests.comment_tests.custom_comments.views.custom_approve_comment",
args=(c.id,)
)
|
mayo/session/test.py | deep-fry/mayo | 110 | 12665122 | <reponame>deep-fry/mayo
from mayo.session.base import SessionBase
class Test(SessionBase):
mode = 'test'
def __init__(self, config):
super().__init__(config)
self.load_checkpoint(self.config.system.checkpoint.load)
def test(self):
todo = list(zip(self.task.names, self.task.predictions))
results = self.run(todo, batch=True)
for names, predictions in results:
self.task.test(names, predictions)
|
tests/data/test_DataBuffer.py | ankitshah009/dcase_util | 122 | 12665130 | <gh_stars>100-1000
""" Unit tests for FIFOBuffer """
import nose.tools
import numpy
import dcase_util
from dcase_util.containers import MetaDataContainer
from dcase_util.data import DataBuffer
def test_DataBuffer():
buf = DataBuffer(size=2)
nose.tools.eq_(buf.count, 0)
nose.tools.eq_(buf.full, False)
buf.set(key='key1', data=[1, 2, 3], meta='metadata1')
nose.tools.eq_(buf.count, 1)
nose.tools.eq_(buf.full, False)
buf.set(key='key2', data=[2, 3, 4], meta='metadata2')
nose.tools.eq_(buf.count, 2)
nose.tools.eq_(buf.full, True)
item_data, item_meta = buf.get(key='key1')
nose.tools.eq_(item_data, [1, 2, 3])
nose.tools.eq_(item_meta, 'metadata1')
item_data, item_meta = buf.get(key='key2')
nose.tools.eq_(item_data, [2, 3, 4])
nose.tools.eq_(item_meta, 'metadata2')
buf.set(key='key3', data=[3, 4, 5], meta='metadata3')
item_data, item_meta = buf.get(key='key3')
nose.tools.eq_(item_data, [3, 4, 5])
nose.tools.eq_(item_meta, 'metadata3')
nose.tools.eq_(buf.get(key='key4'), (None, None))
nose.tools.eq_(buf.count, 2)
buf.clear()
nose.tools.eq_(buf.count, 0)
def test_log():
with dcase_util.utils.DisableLogger():
DataBuffer(
size=2,
filename='event_roller.cpickle'
).log()
|
tests/test_difference.py | ajhynes7/datatest | 277 | 12665144 | <reponame>ajhynes7/datatest
# -*- coding: utf-8 -*-
import datetime
import decimal
import re
import textwrap
from . import _unittest as unittest
from datatest.differences import (
BaseDifference,
Missing,
Extra,
Invalid,
Deviation,
_make_difference,
NOVALUE,
)
# FOR TESTING: A minimal subclass of BaseDifference.
# BaseDifference itself should not be instantiated
# directly.
class MinimalDifference(BaseDifference):
def __init__(self, *args):
self._args = args
@property
def args(self):
return self._args
class TestBaseDifference(unittest.TestCase):
def test_instantiation(self):
"""BaseDifference should not be instantiated directly.
It should only serve as a superclass for more specific
differences.
"""
# Subclass should instantiate normally:
subclass_instance = MinimalDifference('A')
# Base class should raise error.
regex = "Can't instantiate abstract class BaseDifference"
with self.assertRaisesRegex(TypeError, regex):
base_instance = BaseDifference()
def test_args(self):
"""Args should be tuple of arguments."""
diff = MinimalDifference('A')
self.assertEqual(diff.args, ('A',))
def test_repr(self):
diff = MinimalDifference('A')
self.assertEqual(repr(diff), "MinimalDifference('A')")
diff = MinimalDifference('A', 'B')
self.assertEqual(repr(diff), "MinimalDifference('A', 'B')")
diff = MinimalDifference('A', None)
self.assertEqual(repr(diff), "MinimalDifference('A', None)")
def myfunc(x):
return True
diff = MinimalDifference('A', myfunc)
self.assertEqual(repr(diff), "MinimalDifference('A', myfunc)")
class MyClass(object):
pass
diff = MinimalDifference('A', MyClass)
self.assertEqual(repr(diff), "MinimalDifference('A', MyClass)")
def test_numbers_equal(self):
first = MinimalDifference(1)
second = MinimalDifference(1.0)
self.assertEqual(first, second)
first = MinimalDifference(1)
second = MinimalDifference(2)
self.assertNotEqual(first, second)
def test_string_equal(self):
first = MinimalDifference('A')
second = MinimalDifference('A')
self.assertEqual(first, second)
def test_nan_equal(self):
"""NaN values should test as equal when part of a difference."""
first = MinimalDifference(float('nan'))
second = MinimalDifference(float('nan'))
self.assertEqual(first, second)
# NaNs nested in a tuple should also test as equal.
first = MinimalDifference(('abc', float('nan')))
second = MinimalDifference(('abc', float('nan')))
self.assertEqual(first, second)
# Complex numbers, too.
first = MinimalDifference(float('nan'))
second = MinimalDifference(complex(float('nan')))
self.assertEqual(first, second)
def test_comparing_different_types(self):
diff = MinimalDifference('X')
self.assertNotEqual(diff, Exception('X'))
self.assertNotEqual(diff, None)
self.assertNotEqual(diff, True)
self.assertNotEqual(diff, False)
class TestSubclassRelationship(unittest.TestCase):
def test_subclass(self):
self.assertTrue(issubclass(Extra, BaseDifference))
self.assertTrue(issubclass(Missing, BaseDifference))
self.assertTrue(issubclass(Invalid, BaseDifference))
self.assertTrue(issubclass(Deviation, BaseDifference))
class TestInvalid(unittest.TestCase):
def test_repr(self):
diff = Invalid('foo')
self.assertEqual(repr(diff), "Invalid('foo')")
diff = Invalid('foo', 'bar')
self.assertEqual(repr(diff), "Invalid('foo', expected='bar')")
diff = Invalid('foo', None)
self.assertEqual(repr(diff), "Invalid('foo', expected=None)")
def test_repr_with_callables(self):
def myfunc(x):
return True
class MyClass(object):
pass
diff = Invalid('foo', myfunc)
self.assertEqual(repr(diff), "Invalid('foo', expected=myfunc)")
diff = Invalid('foo', MyClass)
self.assertEqual(repr(diff), "Invalid('foo', expected=MyClass)")
diff = Invalid(myfunc, 'bar')
self.assertEqual(repr(diff), "Invalid(myfunc, expected='bar')")
diff = Invalid(MyClass, 'bar')
self.assertEqual(repr(diff), "Invalid(MyClass, expected='bar')")
def test_same_values(self):
with self.assertRaises(ValueError):
Invalid('foo', 'foo')
def test_equality_error(self):
class BadObj(object):
def __eq__(self, other):
if isinstance(other, BadObj):
return True
raise TypeError('Sudden but inevitable betrayal!')
diff = Invalid(BadObj(), float) # <- Checks for equality on init.
self.assertEqual(diff.invalid, BadObj())
self.assertEqual(diff.expected, float)
class TestDeviation(unittest.TestCase):
def test_instantiation(self):
Deviation(1, 100) # Pass without error.
with self.assertRaises(ValueError):
Deviation(0, 100) # Deviation should not be zero.
def test_nonquantitative(self):
with self.assertRaises(TypeError):
Deviation(set([3]), set([1, 2]))
def test_repr(self):
diff = Deviation(1, 100) # Simple positive.
self.assertEqual(repr(diff), "Deviation(+1, 100)")
diff = Deviation(-1, 100) # Simple negative.
self.assertEqual(repr(diff), "Deviation(-1, 100)")
diff = Deviation(float('nan'), 100) # None reference.
self.assertEqual(repr(diff), "Deviation(float('nan'), 100)")
def test_repr_with_datetime(self):
diff = Deviation(
datetime.timedelta(hours=-1),
datetime.datetime(1989, 2, 24, hour=11, minute=30),
)
expected = \
'Deviation(timedelta(seconds=-3600), datetime(1989, 2, 24, 11, 30))'
self.assertEqual(repr(diff), expected)
def test_repr_with_date(self):
diff = Deviation(
datetime.timedelta(days=1),
datetime.date(1989, 2, 24),
)
expected = 'Deviation(timedelta(days=+1), date(1989, 2, 24))'
self.assertEqual(repr(diff), expected)
def test_zero_and_empty_value_handling(self):
"""Empty values receive special handling."""
# Expected 0 (pass without error).
Deviation(+5, 0)
Deviation(-5, 0)
Deviation(float('nan'), 0)
with self.assertRaises(ValueError):
Deviation(0, 0)
# Expected numeric value (pass without error).
Deviation(+1, 5)
Deviation(-1, 5)
Deviation(float('nan'), 5)
# Expected non-zero, with empty or zero deviation.
with self.assertRaises(ValueError):
Deviation(0, 5)
with self.assertRaises(TypeError):
Deviation(None, 5)
with self.assertRaises(TypeError):
Deviation('', 5)
with self.assertRaises(TypeError):
Deviation(5, None)
with self.assertRaises(TypeError):
Deviation(5, '')
# NaN handling.
Deviation(float('nan'), 0)
Deviation(0, float('nan'))
def test_repr_eval(self):
diff = Deviation(+1, 100)
self.assertEqual(diff, eval(repr(diff)))
diff = Deviation(-1, 100)
self.assertEqual(diff, eval(repr(diff)))
diff = Deviation(float('nan'), 100)
self.assertEqual(diff, eval(repr(diff)))
class TestImmutability(unittest.TestCase):
"""Differences should act like an immutable objects."""
def test_missing(self):
diff = Missing('foo')
with self.assertRaises(AttributeError):
diff.attr = ('bar',)
with self.assertRaises(AttributeError):
diff.new_attribute = 'baz'
def test_extra(self):
diff = Extra('foo')
with self.assertRaises(AttributeError):
diff.attr = ('bar',)
with self.assertRaises(AttributeError):
diff.new_attribute = 'baz'
def test_invalid(self):
diff = Invalid('foo')
with self.assertRaises(AttributeError):
diff.expected = 'bar'
with self.assertRaises(AttributeError):
diff.new_attribute = 'baz'
def test_deviation(self):
diff = Deviation(+1, 100)
with self.assertRaises(AttributeError):
diff.expected = 101
with self.assertRaises(AttributeError):
diff.new_attribute = 202
class TestHashability(unittest.TestCase):
"""Built-in differences should be hashable (in the same way that
tuples are).
"""
def test_hashable(self):
"""Differences with hashable *args should be hashable."""
# Following should all pass without error.
hash(Missing('foo'))
hash(Extra('bar'))
hash(Invalid('baz'))
hash(Invalid('baz', 'qux'))
hash(Deviation(-1, 10))
def test_unhashable_contents(self):
"""The hash behavior of differences should act like tuples do.
When a difference's contents are unhashable, the difference
itself becomes unhashable too.
"""
with self.assertRaises(TypeError):
hash(Missing(['foo']))
with self.assertRaises(TypeError):
hash(Extra(['bar']))
with self.assertRaises(TypeError):
hash(Invalid(['baz']))
with self.assertRaises(TypeError):
hash(Invalid('baz', ['qux']))
class TestMakeDifference(unittest.TestCase):
def test_numeric_vs_numeric(self):
diff = _make_difference(5, 6)
self.assertEqual(diff, Deviation(-1, 6))
def test_decimal_vs_float(self):
diff = _make_difference(decimal.Decimal('5'), 6.0)
self.assertEqual(diff, Invalid(decimal.Decimal('5'), expected=6.0))
def test_datetime_vs_datetime(self):
diff = _make_difference(
datetime.datetime(1989, 2, 24, hour=10, minute=30),
datetime.datetime(1989, 2, 24, hour=11, minute=30),
)
self.assertEqual(
diff,
Deviation(
datetime.timedelta(hours=-1),
datetime.datetime(1989, 2, 24, hour=11, minute=30),
),
)
def test_numeric_vs_none(self):
diff = _make_difference(5, None)
self.assertEqual(diff, Invalid(5, None))
diff = _make_difference(0, None)
self.assertEqual(diff, Invalid(0, None))
def test_none_vs_numeric(self):
diff = _make_difference(None, 6)
self.assertEqual(diff, Invalid(None, 6))
diff = _make_difference(None, 0)
self.assertEqual(diff, Invalid(None, 0))
def test_object_vs_object(self):
"""Non-numeric comparisons return Invalid type."""
diff = _make_difference('a', 'b')
self.assertEqual(diff, Invalid('a', 'b'))
diff = _make_difference(5, 'b')
self.assertEqual(diff, Invalid(5, 'b'))
diff = _make_difference('a', 6)
self.assertEqual(diff, Invalid('a', 6))
diff = _make_difference(float('nan'), 6)
self.assertEqual(diff, Deviation(float('nan'), 6))
diff = _make_difference(5, float('nan'))
self.assertEqual(diff, Deviation(float('nan'), float('nan')))
fn = lambda x: True
diff = _make_difference('a', fn)
self.assertEqual(diff, Invalid('a', fn))
regex = re.compile('^test$')
diff = _make_difference('a', regex)
self.assertEqual(diff, Invalid('a', re.compile('^test$')))
def test_boolean_comparisons(self):
"""Boolean differences should not be treated quantitatively."""
diff = _make_difference(False, True)
self.assertIs(diff.invalid, False)
self.assertIs(diff.expected, True)
diff = _make_difference(True, False)
self.assertIs(diff.invalid, True)
self.assertIs(diff.expected, False)
diff = _make_difference(0, True)
self.assertEqual(diff.invalid, 0)
self.assertIsNot(diff.invalid, False)
self.assertIs(diff.expected, True)
diff = _make_difference(1, False)
self.assertEqual(diff.invalid, 1)
self.assertIsNot(diff.invalid, True)
self.assertIs(diff.expected, False)
diff = _make_difference(False, 1)
self.assertIs(diff.invalid, False)
self.assertEqual(diff.expected, 1)
self.assertIsNot(diff.expected, True)
diff = _make_difference(True, 0)
self.assertIs(diff.invalid, True)
self.assertEqual(diff.expected, 0)
self.assertIsNot(diff.expected, False)
def test_novalue_comparisons(self):
diff = _make_difference('a', NOVALUE)
self.assertEqual(diff, Extra('a'))
diff = _make_difference(5, NOVALUE)
self.assertEqual(diff, Extra(5))
diff = _make_difference(0, NOVALUE)
self.assertEqual(diff, Extra(0))
diff = _make_difference(NOVALUE, 'a')
self.assertEqual(diff, Missing('a'))
diff = _make_difference(NOVALUE, 5)
self.assertEqual(diff, Missing(5))
diff = _make_difference(NOVALUE, 0)
self.assertEqual(diff, Missing(0))
def test_show_expected(self):
"""If requirement is common it should be omitted from Invalid
difference (but not from Deviation differences).
"""
diff = _make_difference('a', 6, show_expected=True)
self.assertEqual(diff, Invalid('a', expected=6))
diff = _make_difference('a', 6, show_expected=False)
self.assertEqual(diff, Invalid('a'))
# Show expected should not effect Missing, Extra, or Deviation:
diff = _make_difference(NOVALUE, 6, show_expected=True)
self.assertEqual(diff, Missing(6))
diff = _make_difference(NOVALUE, 6, show_expected=False)
self.assertEqual(diff, Missing(6))
diff = _make_difference(6, NOVALUE, show_expected=True)
self.assertEqual(diff, Extra(6))
diff = _make_difference(6, NOVALUE, show_expected=False)
self.assertEqual(diff, Extra(6))
diff = _make_difference(1, 2, show_expected=True)
self.assertEqual(diff, Deviation(-1, 2))
diff = _make_difference(1, 2, show_expected=False)
self.assertEqual(diff, Deviation(-1, 2))
def test_same(self):
with self.assertRaises(ValueError):
diff = _make_difference('a', 'a')
with self.assertRaises(ValueError):
diff = _make_difference(None, None)
# NaN should work though.
_make_difference(float('nan'), float('nan'))
|
mushroom_rl/environments/pybullet_envs/air_hockey/__init__.py | jdsalmonson/mushroom-rl | 344 | 12665147 | from mushroom_rl.environments.pybullet_envs.air_hockey.hit import AirHockeyHit
from mushroom_rl.environments.pybullet_envs.air_hockey.defend import AirHockeyDefend
AirHockeyHit.register()
AirHockeyDefend.register()
|
app/helpers/eventing/helpers.py | DaveCheez/serverless-store-demo | 171 | 12665160 | # Copyright 2018 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A collection of helper functions for streaming events.
"""
import os
import json
import time
from google.cloud import pubsub_v1
publisher = pubsub_v1.PublisherClient()
GCP_PROJECT = os.environ.get('GCP_PROJECT')
def stream_event(topic_name, event_type, event_context):
"""
Helper function for publishing an event.
Parameters:
topic_name (str): The name of the Cloud Pub/Sub topic.
event_type (str): The type of the event.
event_context: The context of the event.
Output:
None.
"""
topic_path = publisher.topic_path(GCP_PROJECT, topic_name)
request = {
'event_type': event_type,
'created_time': str(int(time.time())),
'event_context': event_context
}
data = json.dumps(request).encode()
publisher.publish(topic_path, data)
|
tests_obsolete/extension/dataflow_/manager/dataflow_manager.py | akmaru/veriloggen | 232 | 12665168 | from __future__ import absolute_import
from __future__ import print_function
import sys
import os
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))
from veriloggen import *
import veriloggen.dataflow as dataflow
def mkMain():
m = Module('main')
clk = m.Input('CLK')
rst = m.Input('RST')
df = dataflow.DataflowManager(m, clk, rst)
x = df.Counter()
y = df.Counter()
z = x * 2 - y
z.output('zdata', 'zvalid')
return m
def mkTest(numports=8):
m = Module('test')
# target instance
main = mkMain()
params = m.copy_params(main)
ports = m.copy_sim_ports(main)
clk = ports['CLK']
rst = ports['RST']
uut = m.Instance(main, 'uut',
params=m.connect_params(main),
ports=m.connect_ports(main))
reset_done = m.Reg('reset_done', initval=0)
reset_stmt = []
reset_stmt.append(reset_done(0))
simulation.setup_waveform(m, uut)
simulation.setup_clock(m, clk, hperiod=5)
init = simulation.setup_reset(m, rst, reset_stmt, period=100)
nclk = simulation.next_clock
init.add(
Delay(1000),
reset_done(1),
nclk(clk),
Delay(10000),
Systask('finish'),
)
return m
if __name__ == '__main__':
test = mkTest()
verilog = test.to_verilog('tmp.v')
print(verilog)
# run simulator (Icarus Verilog)
sim = simulation.Simulator(test)
rslt = sim.run() # display=False
#rslt = sim.run(display=True)
print(rslt)
# launch waveform viewer (GTKwave)
# sim.view_waveform() # background=False
# sim.view_waveform(background=True)
|
blockchain-workbench/rest-api-samples/python/swagger_client/api/applications_api.py | chaosmail/blockchain | 738 | 12665173 | # coding: utf-8
"""
Azure Blockchain Workbench REST API
The Azure Blockchain Workbench REST API is a Workbench extensibility point, which allows developers to create and manage blockchain applications, manage users and organizations within a consortium, integrate blockchain applications into services and platforms, perform transactions on a blockchain, and retrieve transactional and contract data from a blockchain. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class ApplicationsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def application_delete(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Deletes the specified blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: Currently not implemented. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_delete(application_id, async=True)
>>> result = thread.get()
:param async bool
:param str application_id: The id of the application. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.application_delete_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.application_delete_with_http_info(application_id, **kwargs) # noqa: E501
return data
def application_delete_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Deletes the specified blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: Currently not implemented. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_delete_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param str application_id: The id of the application. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method application_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `application_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationID'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def application_disable(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Disables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_disable(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.application_disable_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.application_disable_with_http_info(application_id, **kwargs) # noqa: E501
return data
def application_disable_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Disables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_disable_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method application_disable" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `application_disable`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationID}/disable', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def application_enable(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Enables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_enable(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.application_enable_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.application_enable_with_http_info(application_id, **kwargs) # noqa: E501
return data
def application_enable_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Enables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_enable_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method application_enable" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `application_enable`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationID}/enable', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def application_get(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Gets the blockchain application matching a specific application ID. Users who are Workbench administrators get the blockchain application. Non-Workbench administrators get the blockchain application if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_get(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application. (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.application_get_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.application_get_with_http_info(application_id, **kwargs) # noqa: E501
return data
def application_get_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Gets the blockchain application matching a specific application ID. Users who are Workbench administrators get the blockchain application. Non-Workbench administrators get the blockchain application if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.application_get_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application. (required)
:return: Application
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method application_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `application_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Application', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def applications_get(self, **kwargs): # noqa: E501
""" # noqa: E501
Lists all blockchain applications to which a user has access in Workbench. Users who are Workbench administrators get all blockchain applications. Non-Workbench administrators get all blockchain applications for which they have at least one associated application role or an associated smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.applications_get(async=True)
>>> result = thread.get()
:param async bool
:param int top: The maximum number of entries to return in the result set.
:param int skip: The number of entries to skip in the result set.
:param bool enabled: A Boolean for whether to filter the result set to only enabled applications.
:return: ApplicationList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.applications_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.applications_get_with_http_info(**kwargs) # noqa: E501
return data
def applications_get_with_http_info(self, **kwargs): # noqa: E501
""" # noqa: E501
Lists all blockchain applications to which a user has access in Workbench. Users who are Workbench administrators get all blockchain applications. Non-Workbench administrators get all blockchain applications for which they have at least one associated application role or an associated smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.applications_get_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:param int top: The maximum number of entries to return in the result set.
:param int skip: The number of entries to skip in the result set.
:param bool enabled: A Boolean for whether to filter the result set to only enabled applications.
:return: ApplicationList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['top', 'skip', 'enabled'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method applications_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'top' in params:
query_params.append(('top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
if 'enabled' in params:
query_params.append(('enabled', params['enabled'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ApplicationList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def applications_post(self, file, **kwargs): # noqa: E501
""" # noqa: E501
Creates a new blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.applications_post(file, async=True)
>>> result = thread.get()
:param async bool
:param file file: Upload File (required)
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.applications_post_with_http_info(file, **kwargs) # noqa: E501
else:
(data) = self.applications_post_with_http_info(file, **kwargs) # noqa: E501
return data
def applications_post_with_http_info(self, file, **kwargs): # noqa: E501
""" # noqa: E501
Creates a new blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.applications_post_with_http_info(file, async=True)
>>> result = thread.get()
:param async bool
:param file file: Upload File (required)
:return: int
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method applications_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file' is set
if ('file' not in params or
params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `applications_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file'] # noqa: E501
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_code_delete(self, contract_code_id, **kwargs): # noqa: E501
""" # noqa: E501
Deletes the specified blockchain smart contract implementation of a specific blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: not currently implemented # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_code_delete(contract_code_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_code_id: The id of the contract code (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_code_delete_with_http_info(contract_code_id, **kwargs) # noqa: E501
else:
(data) = self.contract_code_delete_with_http_info(contract_code_id, **kwargs) # noqa: E501
return data
def contract_code_delete_with_http_info(self, contract_code_id, **kwargs): # noqa: E501
""" # noqa: E501
Deletes the specified blockchain smart contract implementation of a specific blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: not currently implemented # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_code_delete_with_http_info(contract_code_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_code_id: The id of the contract code (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contract_code_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_code_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contract_code_id' is set
if ('contract_code_id' not in params or
params['contract_code_id'] is None):
raise ValueError("Missing the required parameter `contract_code_id` when calling `contract_code_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'contract_code_id' in params:
path_params['contractCodeId'] = params['contract_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/contractCode/{contractCodeId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_code_get(self, contract_code_id, **kwargs): # noqa: E501
""" # noqa: E501
Get the blockchain smart contract implementation matching a specific ledger implementation ID. Users who are Workbench administrators get the specified smart contract implementation. Non-Workbench administrators get the smart contract implementation if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_code_get(contract_code_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_code_id: The id of the contract code (required)
:return: FileStreamResult
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_code_get_with_http_info(contract_code_id, **kwargs) # noqa: E501
else:
(data) = self.contract_code_get_with_http_info(contract_code_id, **kwargs) # noqa: E501
return data
def contract_code_get_with_http_info(self, contract_code_id, **kwargs): # noqa: E501
""" # noqa: E501
Get the blockchain smart contract implementation matching a specific ledger implementation ID. Users who are Workbench administrators get the specified smart contract implementation. Non-Workbench administrators get the smart contract implementation if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_code_get_with_http_info(contract_code_id, async=True)
>>> result = thread.get()
:param async bool
:param int contract_code_id: The id of the contract code (required)
:return: FileStreamResult
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contract_code_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_code_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contract_code_id' is set
if ('contract_code_id' not in params or
params['contract_code_id'] is None):
raise ValueError("Missing the required parameter `contract_code_id` when calling `contract_code_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'contract_code_id' in params:
path_params['contractCodeId'] = params['contract_code_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/contractCode/{contractCodeId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileStreamResult', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_code_post(self, application_id, file, **kwargs): # noqa: E501
""" # noqa: E501
Uploads one or more smart contracts (ex. .sol or .zip), representing the implementation of the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_code_post(application_id, file, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param file file: Upload File (required)
:param int ledger_id: The index of the ledger
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_code_post_with_http_info(application_id, file, **kwargs) # noqa: E501
else:
(data) = self.contract_code_post_with_http_info(application_id, file, **kwargs) # noqa: E501
return data
def contract_code_post_with_http_info(self, application_id, file, **kwargs): # noqa: E501
""" # noqa: E501
Uploads one or more smart contracts (ex. .sol or .zip), representing the implementation of the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_code_post_with_http_info(application_id, file, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param file file: Upload File (required)
:param int ledger_id: The index of the ledger
:return: int
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'file', 'ledger_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_code_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `contract_code_post`") # noqa: E501
# verify the required parameter 'file' is set
if ('file' not in params or
params['file'] is None):
raise ValueError("Missing the required parameter `file` when calling `contract_code_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
if 'ledger_id' in params:
query_params.append(('ledgerId', params['ledger_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file'] # noqa: E501
body_params = None
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}/contractCode', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def contract_codes_get(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
List all blockchain smart contract implementations of the specified blockchain application. Users who are Workbench administrators get all smart contract implementations. Non-Workbench administrators get all smart contract implementations for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_codes_get(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param int ledger_id: The index of the chain type
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:return: ContractCodeList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.contract_codes_get_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.contract_codes_get_with_http_info(application_id, **kwargs) # noqa: E501
return data
def contract_codes_get_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
List all blockchain smart contract implementations of the specified blockchain application. Users who are Workbench administrators get all smart contract implementations. Non-Workbench administrators get all smart contract implementations for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.contract_codes_get_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param int ledger_id: The index of the chain type
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:return: ContractCodeList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'ledger_id', 'top', 'skip'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method contract_codes_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `contract_codes_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
if 'ledger_id' in params:
query_params.append(('ledgerId', params['ledger_id'])) # noqa: E501
if 'top' in params:
query_params.append(('top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationID}/contractCode', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ContractCodeList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def role_assignment_delete(self, application_id, role_assignment_id, **kwargs): # noqa: E501
""" # noqa: E501
Deletes the specified role assignment. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignment_delete(application_id, role_assignment_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param int role_assignment_id: The id of the role assignment (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.role_assignment_delete_with_http_info(application_id, role_assignment_id, **kwargs) # noqa: E501
else:
(data) = self.role_assignment_delete_with_http_info(application_id, role_assignment_id, **kwargs) # noqa: E501
return data
def role_assignment_delete_with_http_info(self, application_id, role_assignment_id, **kwargs): # noqa: E501
""" # noqa: E501
Deletes the specified role assignment. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignment_delete_with_http_info(application_id, role_assignment_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param int role_assignment_id: The id of the role assignment (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'role_assignment_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method role_assignment_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `role_assignment_delete`") # noqa: E501
# verify the required parameter 'role_assignment_id' is set
if ('role_assignment_id' not in params or
params['role_assignment_id'] is None):
raise ValueError("Missing the required parameter `role_assignment_id` when calling `role_assignment_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
if 'role_assignment_id' in params:
path_params['roleAssignmentId'] = params['role_assignment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}/roleAssignments/{roleAssignmentId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def role_assignment_get(self, application_id, role_assignment_id, **kwargs): # noqa: E501
""" # noqa: E501
Get a role assignment of the specified blockchain application matching a specific user role assignment ID. Users who are Workbench administrators get the role assignment. Non-Workbench administrators get the role assignment if they are associated in the application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignment_get(application_id, role_assignment_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the configuration (required)
:param int role_assignment_id: The id of the role assignment (required)
:return: RoleAssignment
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.role_assignment_get_with_http_info(application_id, role_assignment_id, **kwargs) # noqa: E501
else:
(data) = self.role_assignment_get_with_http_info(application_id, role_assignment_id, **kwargs) # noqa: E501
return data
def role_assignment_get_with_http_info(self, application_id, role_assignment_id, **kwargs): # noqa: E501
""" # noqa: E501
Get a role assignment of the specified blockchain application matching a specific user role assignment ID. Users who are Workbench administrators get the role assignment. Non-Workbench administrators get the role assignment if they are associated in the application. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignment_get_with_http_info(application_id, role_assignment_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the configuration (required)
:param int role_assignment_id: The id of the role assignment (required)
:return: RoleAssignment
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'role_assignment_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method role_assignment_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `role_assignment_get`") # noqa: E501
# verify the required parameter 'role_assignment_id' is set
if ('role_assignment_id' not in params or
params['role_assignment_id'] is None):
raise ValueError("Missing the required parameter `role_assignment_id` when calling `role_assignment_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
if 'role_assignment_id' in params:
path_params['roleAssignmentId'] = params['role_assignment_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}/roleAssignments/{roleAssignmentId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleAssignment', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def role_assignments_get(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
List all role assignments of the specified blockchain application. Users who are Workbench administrators get all role assignments. Non-Workbench administrators get all their role assignments. Roles are specified in the Workbench application configuration and can be retrieved from GET /applications/{applicationID}. Also, user information can be retrieved from GET /users/{userID}. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignments_get(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the configuration (required)
:param int application_role_id: The id of the application role
:param int top: The maximum number of entries to return in the result set.
:param int skip: The number of entries to skip in the result set.
:return: RoleAssignmentList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.role_assignments_get_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.role_assignments_get_with_http_info(application_id, **kwargs) # noqa: E501
return data
def role_assignments_get_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
List all role assignments of the specified blockchain application. Users who are Workbench administrators get all role assignments. Non-Workbench administrators get all their role assignments. Roles are specified in the Workbench application configuration and can be retrieved from GET /applications/{applicationID}. Also, user information can be retrieved from GET /users/{userID}. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignments_get_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the configuration (required)
:param int application_role_id: The id of the application role
:param int top: The maximum number of entries to return in the result set.
:param int skip: The number of entries to skip in the result set.
:return: RoleAssignmentList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'application_role_id', 'top', 'skip'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method role_assignments_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `role_assignments_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
if 'application_role_id' in params:
query_params.append(('applicationRoleId', params['application_role_id'])) # noqa: E501
if 'top' in params:
query_params.append(('top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}/roleAssignments', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RoleAssignmentList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def role_assignments_post(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Creates a user-to-role mapping in the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignments_post(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the configuration. (required)
:param RoleAssignmentInput role_assignment: New user-to-role mapping.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.role_assignments_post_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.role_assignments_post_with_http_info(application_id, **kwargs) # noqa: E501
return data
def role_assignments_post_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
Creates a user-to-role mapping in the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.role_assignments_post_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the configuration. (required)
:param RoleAssignmentInput role_assignment: New user-to-role mapping.
:return: int
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'role_assignment'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method role_assignments_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `role_assignments_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'role_assignment' in params:
body_params = params['role_assignment']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json-patch+json', 'application/json', 'text/json', 'application/*+json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}/roleAssignments', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='int', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def workflow_get(self, workflow_id, **kwargs): # noqa: E501
""" # noqa: E501
Get a workflow matching a specific workflow ID. Users who are Workbench administrators get the workflow. Non-Workbench administrators get the workflow if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.workflow_get(workflow_id, async=True)
>>> result = thread.get()
:param async bool
:param int workflow_id: The id of the workflow (required)
:return: Workflow
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.workflow_get_with_http_info(workflow_id, **kwargs) # noqa: E501
else:
(data) = self.workflow_get_with_http_info(workflow_id, **kwargs) # noqa: E501
return data
def workflow_get_with_http_info(self, workflow_id, **kwargs): # noqa: E501
""" # noqa: E501
Get a workflow matching a specific workflow ID. Users who are Workbench administrators get the workflow. Non-Workbench administrators get the workflow if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.workflow_get_with_http_info(workflow_id, async=True)
>>> result = thread.get()
:param async bool
:param int workflow_id: The id of the workflow (required)
:return: Workflow
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['workflow_id'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method workflow_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling `workflow_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflowId'] = params['workflow_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/workflows/{workflowId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workflow', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def workflows_get(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
List all workflows of the specified blockchain application. Users who are Workbench administrators get all workflows. Non-Workbench administrators get all workflows for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.workflows_get(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:return: WorkflowList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.workflows_get_with_http_info(application_id, **kwargs) # noqa: E501
else:
(data) = self.workflows_get_with_http_info(application_id, **kwargs) # noqa: E501
return data
def workflows_get_with_http_info(self, application_id, **kwargs): # noqa: E501
""" # noqa: E501
List all workflows of the specified blockchain application. Users who are Workbench administrators get all workflows. Non-Workbench administrators get all workflows for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.workflows_get_with_http_info(application_id, async=True)
>>> result = thread.get()
:param async bool
:param int application_id: The id of the application (required)
:param int top: The maximum number of items to return
:param int skip: The number of items to skip before returning
:return: WorkflowList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['application_id', 'top', 'skip'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method workflows_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'application_id' is set
if ('application_id' not in params or
params['application_id'] is None):
raise ValueError("Missing the required parameter `application_id` when calling `workflows_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'application_id' in params:
path_params['applicationId'] = params['application_id'] # noqa: E501
query_params = []
if 'top' in params:
query_params.append(('top', params['top'])) # noqa: E501
if 'skip' in params:
query_params.append(('skip', params['skip'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v1/applications/{applicationId}/workflows', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WorkflowList', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
osp/citations/models/citation.py | davidmcclure/open-syllabus-project | 220 | 12665179 | <reponame>davidmcclure/open-syllabus-project<filename>osp/citations/models/citation.py
from osp.common import config
from osp.common.models import BaseModel
from osp.corpus.models import Document
from osp.citations.models import Text
from osp.institutions.models import Institution
from osp.institutions.models import Institution_Document
from osp.fields.models import Subfield
from osp.fields.models import Subfield_Document
from playhouse.postgres_ext import ArrayField
from peewee import ForeignKeyField, CharField
class Citation(BaseModel):
text = ForeignKeyField(Text)
document = ForeignKeyField(Document)
tokens = ArrayField(CharField)
class Meta:
database = config.get_table_db('citation')
indexes = ((('document', 'text'), True),)
@property
def subfield(self):
"""
Get the document's subfield, if any.
Returns: Subfield
"""
return (
Subfield
.select()
.join(Subfield_Document)
.join(Document)
.where(Document.id==self.document)
.order_by(Subfield_Document.offset.asc())
.first()
)
@property
def institution(self):
"""
Get the document's institution, if any.
Returns: Institution
"""
return (
Institution
.select()
.join(Institution_Document)
.join(Document)
.where(Document.id==self.document)
.first()
)
|
kythe/docs/asciidoc.bzl | wcalandro/kythe | 1,168 | 12665193 | load("@bazel_skylib//lib:shell.bzl", "shell")
load("@bazel_skylib//lib:paths.bzl", "paths")
AsciidocInfo = provider(
doc = "Information about the asciidoc-generated files.",
fields = {
"primary_output_path": "Path of the primary output file beneath {resource_dir}.",
"resource_dir": "File for the directory containing all of the generated resources.",
},
)
_toolchain_type = "//tools/build_rules/external_tools:external_tools_toolchain_type"
def _asciidoc_impl(ctx):
resource_dir = ctx.actions.declare_directory(ctx.label.name + ".d")
primary_output = "{name}.html".format(name = ctx.label.name)
# Declared as an output, but not saved as part of the default output group.
# Build with --output_groups=+asciidoc_logfile to retain.
logfile = ctx.actions.declare_file(ctx.label.name + ".logfile")
# Locate the asciidoc binary from the toolchain and construct its args.
asciidoc = ctx.toolchains[_toolchain_type].asciidoc
args = ["--backend", "html", "--no-header-footer"]
for key, value in ctx.attr.attrs.items():
if value:
args.append("--attribute=%s=%s" % (key, value))
else:
args.append("--attribute=%s!" % (key,))
if ctx.attr.example_script:
args.append("--attribute=example_script=" + ctx.file.example_script.path)
args += ["--conf-file=%s" % c.path for c in ctx.files.confs]
args += ["-o", paths.join(resource_dir.path, primary_output)]
args.append(ctx.file.src.path)
# Get the path where all our necessary tools are located so it can be set
# to PATH in our run_shell command.
tool_path = ctx.toolchains[_toolchain_type].path
# Resolve data targets to get input files and runfiles manifests.
data, _, manifests = ctx.resolve_command(tools = ctx.attr.data)
# Run asciidoc and capture stderr to logfile. If it succeeds, look in the
# captured log for error messages and fail if we find any.
ctx.actions.run_shell(
inputs = ([ctx.file.src] +
ctx.files.confs +
([ctx.file.example_script] if ctx.file.example_script else []) +
data),
input_manifests = manifests,
outputs = [resource_dir, logfile],
arguments = args,
command = "\n".join([
"set -e",
"mkdir -p {resource_dir}".format(resource_dir = shell.quote(resource_dir.path)),
# Run asciidoc itself, and fail if it returns nonzero.
"{asciidoc} \"$@\" 2> >(tee -a {logfile} >&2)".format(
logfile = shell.quote(logfile.path),
asciidoc = shell.quote(asciidoc),
),
# The tool succeeded, but now check for error diagnostics.
'if grep -q -e "filter non-zero exit code" -e "no output from filter" {logfile}; then'.format(
logfile = shell.quote(logfile.path),
),
"exit 1",
"fi",
# Move SVGs to the appropriate directory.
"find . -name '*.svg' -maxdepth 1 -exec mv '{{}}' {out}/ \\;".format(out = shell.quote(resource_dir.path)),
]),
env = {"PATH": tool_path},
mnemonic = "RunAsciidoc",
)
return [
DefaultInfo(files = depset([resource_dir])),
OutputGroupInfo(asciidoc_logfile = depset([logfile])),
AsciidocInfo(primary_output_path = primary_output, resource_dir = resource_dir),
]
asciidoc = rule(
implementation = _asciidoc_impl,
toolchains = ["//tools/build_rules/external_tools:external_tools_toolchain_type"],
attrs = {
"src": attr.label(
doc = "asciidoc file to process",
allow_single_file = True,
),
"attrs": attr.string_dict(
doc = "Dict of attributes to pass to asciidoc as --attribute=KEY=VALUE",
),
"confs": attr.label_list(
doc = "`conf-file`s to pass to asciidoc",
allow_files = True,
),
"data": attr.label_list(
doc = "Files/targets used during asciidoc generation. Only needed for tools used in example_script.",
allow_files = True,
),
"example_script": attr.label(
doc = "Script to pass to asciidoc as --attribute=example_script=VALUE.",
allow_single_file = True,
),
},
doc = "Generate asciidoc",
)
|
examples/slack/chat.py | q0w/snug | 123 | 12665208 | """queries for the 'chat' method family"""
import snug
from .query import json_post
from .types import Message
@json_post('chat.postMessage', rtype=Message, key='message')
def post_message(channel: str, text: str) -> snug.Query[Message]:
return {'channel': channel, 'text': text}
|
saas/aiops/api/aiops-server/services/tsp_algorithms_service.py | iuskye/SREWorks | 407 | 12665217 | #!/usr/bin/env python
# encoding: utf-8
""" """
__author__ = 'sreworks'
import logging
from services.base_service import BaseService
from models.tsp_algorithms_model import TSPAlgorithmsModel
class TSPAlgorithmsService(BaseService):
def __init__(self):
BaseService.__init__(self)
self.logger = logging.getLogger(__name__)
def get_tsp_algorithm(self, algorithm_id):
tsp_algorithm = TSPAlgorithmsModel.query.filter_by(id=algorithm_id).one_or_none()
if tsp_algorithm:
return tsp_algorithm.to_json()
else:
return None
|
LA Team Results/FinalPipeline_LM_mean_per_facies.py | jcohut/2016_ml_competition_facies_classification | 182 | 12665236 | <filename>LA Team Results/FinalPipeline_LM_mean_per_facies.py<gh_stars>100-1000
import numpy as np
from sklearn.cluster import FeatureAgglomeration
from sklearn.ensemble import ExtraTreesClassifier, GradientBoostingClassifier, VotingClassifier
from sklearn.feature_selection import SelectFromModel
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline, make_union
from sklearn.preprocessing import FunctionTransformer
# NOTE: Make sure that the class is labeled 'class' in the data file
tpot_data = np.recfromcsv('PATH/TO/DATA/FILE', delimiter='COLUMN_SEPARATOR', dtype=np.float64)
features = np.delete(tpot_data.view(np.float64).reshape(tpot_data.size, -1), tpot_data.dtype.names.index('class'), axis=1)
training_features, testing_features, training_classes, testing_classes = \
train_test_split(features, tpot_data['class'], random_state=42)
exported_pipeline = make_pipeline(
make_union(
make_union(
FeatureAgglomeration(affinity="euclidean", linkage="ward"),
SelectFromModel(estimator=ExtraTreesClassifier(bootstrap=False, class_weight=None, criterion='gini',
max_depth=None, max_features=0.02, max_leaf_nodes=None,
min_impurity_split=1e-07, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=10, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False), threshold=0.06)
),
FunctionTransformer(lambda X: X)
),
GradientBoostingClassifier(learning_rate=0.21, max_features=0.21, n_estimators=500)
)
exported_pipeline.fit(training_features, training_classes)
results = exported_pipeline.predict(testing_features)
|
src/db/migrations/00011.py | agu3rra/InfraBox | 265 | 12665249 | import os
import base64
from Crypto.PublicKey import RSA
from pyinfraboxutils.secrets import encrypt_secret
import psycopg2
private_key_path = os.environ.get('INFRABOX_RSA_PRIVATE_KEY_PATH', '/var/run/secrets/infrabox.net/rsa/id_rsa')
def decrypt_secret(s):
with open(private_key_path) as f:
key = RSA.importKey(f.read())
s = base64.b64decode(s)
return key.decrypt(s)
def migrate(conn):
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('''
SELECT id, value
FROM secret
''')
secrets = cur.fetchall()
cur.close()
for s in secrets:
value = decrypt_secret(s['value'])
new_value = encrypt_secret(value)
cur = conn.cursor()
cur.execute('''
UPDATE secret
SET value = %s
WHERE id = %s
''', [new_value, s['id']])
cur.close()
|
src/CVC_solver.py | shushu-qin/deeponet | 140 | 12665274 | <filename>src/CVC_solver.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
def solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt):
"""Solve
u_t + a(x) * u_x = 0
"""
# Case I: Analytical solution for a(x)=1, u(x,0)=V(x) (V,V' periodic)
x = np.linspace(xmin, xmax, Nx)
t = np.linspace(tmin, tmax, Nt)
u = V((x[:, None] - t) % 1)
# Case II: Wendroff for a(x)=1, u(x,0)=f(x), u(0,t)=g(t) (f(0)=g(0))
"""
x = np.linspace(xmin, xmax, Nx)
t = np.linspace(tmin, tmax, Nt)
h = x[1] - x[0]
dt = t[1] - t[0]
lam = dt / h
u = np.zeros([Nx, Nt])
u[:, 0] = f(x)
u[0, :] = g(t)
r = (1 - lam) / (1 + lam)
K = np.eye(Nx - 1, k=0)
K_temp = np.eye(Nx - 1, k=0)
Trans = np.eye(Nx - 1, k=-1)
for _ in range(Nx - 2):
K_temp = (-r) * (Trans @ K_temp)
K += K_temp
D = r * np.eye(Nx - 1, k=0) + np.eye(Nx - 1, k=-1)
for n in range(Nt - 1):
b = np.zeros(Nx - 1)
b[0] = g(n * dt) - r * g((n + 1) * dt)
u[1:, n + 1] = K @ (D @ u[1:, n] + b)
"""
# Case III: Wendroff for a(x)=1+0.1*V(x), u(x,0)=f(x), u(0,t)=g(t) (f(0)=g(0))
"""
x = np.linspace(xmin, xmax, Nx)
t = np.linspace(tmin, tmax, Nt)
h = x[1] - x[0]
dt = t[1] - t[0]
lam = dt / h
v = 1 + 0.1 * V(x)
u = np.zeros([Nx, Nt])
u[:, 0] = f(x)
u[0, :] = g(t)
a = (v[:-1] + v[1:]) / 2
k = (1 - a * lam) / (1 + a * lam)
K = np.eye(Nx - 1, k=0)
K_temp = np.eye(Nx - 1, k=0)
Trans = np.eye(Nx - 1, k=-1)
for _ in range(Nx - 2):
K_temp = (-k[:, None]) * (Trans @ K_temp)
K += K_temp
D = np.diag(k) + np.eye(Nx - 1, k=-1)
for n in range(Nt - 1):
b = np.zeros(Nx - 1)
b[0] = g(n * dt) - k[0] * g((n + 1) * dt)
u[1:, n + 1] = K @ (D @ u[1:, n] + b)
"""
# Case IV: Wendroff for a(x)=1+0.1*(V(x)+V(1-x))/2, u(x,0)=f(x) (f,f' periodic)
"""
x = np.linspace(xmin, xmax, Nx)
t = np.linspace(tmin, tmax, Nt)
h = x[1] - x[0]
dt = t[1] - t[0]
lam = dt / h
v = 1 + 0.1 * (V(x) + V(x)[::-1]) / 2
u = np.zeros([Nx, Nt])
u[:, 0] = f(x)
a = (v[:-1] + v[1:]) / 2
I = np.eye(Nx - 1)
Ir = np.roll(I, 1, axis=0)
D = lam * a[:, None] * (I - Ir)
A = I + Ir + D
B = I + Ir - D
for n in range(Nt - 1):
u[1:, n + 1] = np.linalg.solve(A, B @ u[1:, n])
u[0, :] = u[-1, :]
"""
return x, t, u
def main():
# Case I: Analytical solution for a(x)=1, u(x,0)=V(x) (V,V' periodic)
xmin, xmax = 0, 1
tmin, tmax = 0, 1
V = lambda x: np.sin(2 * np.pi * x)
f = None
g = None
u_true = lambda x, t: V(x - t)
Nx, Nt = 100, 100
x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt)
print(np.max(abs(u - u_true(x[:, None], t))))
print(np.average(abs(u - u_true(x[:, None], t))))
# Case II: Wendroff for a(x)=1, u(x,0)=V(x), u(0,t)=0 (V(0)=0)
"""
xmin, xmax = 0, 1
tmin, tmax = 0, 1
V = None
f = lambda x: (2 * np.pi * x) ** 5
g = lambda t: (2 * np.pi * (-t)) ** 5
u_true = lambda x, t: (2 * np.pi * (x - t)) ** 5
Nx, Nt = 100, 100
x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt)
print(np.max(abs(u - u_true(x[:, None], t))))
print(np.average(abs(u - u_true(x[:, None], t))))
"""
# Case III: Wendroff for a(x)=1+0.1*V(x), u(x,0)=f(x), u(0,t)=g(t) (f(0)=g(0))
"""
vel = 1
xmin, xmax = 0, 1
tmin, tmax = 0, 1
V = lambda x: np.ones_like(x) * vel
f = lambda x: np.sin(2 * np.pi * x)
g = lambda t: np.sin(2 * np.pi * (-(1 + 0.1 * vel) * t))
u_true = lambda x, t: np.sin(2 * np.pi * (x - (1 + 0.1 * vel) * t))
Nx, Nt = 100, 100
x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt)
print(np.max(abs(u - u_true(x[:, None], t))))
print(np.average(abs(u - u_true(x[:, None], t))))
"""
# Case IV: Wendroff for a(x)=1+0.1*(V(x)+V(1-x))/2, u(x,0)=f(x) (f,f' periodic)
"""
vel = 1
xmin, xmax = 0, 1
tmin, tmax = 0, 1
V = lambda x: np.ones_like(x) * vel
f = lambda x: np.sin(2 * np.pi * x)
g = lambda t: np.sin(2 * np.pi * (-(1 + 0.1 * vel) * t))
u_true = lambda x, t: np.sin(2 * np.pi * (x - (1 + 0.1 * vel) * t))
Nx, Nt = 100, 100
x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt)
print(np.max(abs(u - u_true(x[:, None], t))))
print(np.average(abs(u - u_true(x[:, None], t))))
"""
# plot
u_true = u_true(x[:, None], t)
error = abs(u - u_true)
axis = plt.subplot(111)
plt.imshow(error, cmap="rainbow", vmin=0)
plt.colorbar()
xlabel = [format(i, ".1f") for i in np.linspace(0, 1, num=11)]
ylabel = [format(i, ".1f") for i in np.linspace(0, 1, num=11)]
axis.set_xticks(range(0, 101, 10))
axis.set_xticklabels(xlabel)
axis.set_yticks(range(0, 101, 10))
axis.set_yticklabels(ylabel)
axis.set_xlabel("t")
axis.set_ylabel("x")
axis.set_title(r"Error", fontdict={"fontsize": 30}, loc="left")
return error
if __name__ == "__main__":
error = main()
|
convfeatures.py | taha-a/image | 161 | 12665279 | <filename>convfeatures.py
import tensorflow as tf
import numpy as np
import argparse
import os
batch_size = 10
files, input_layer, output_layer = [None]*3
def build_prepro_graph(inception_path):
global input_layer, output_layer
with open(inception_path, 'rb') as f:
fileContent = f.read()
graph_def = tf.GraphDef()
graph_def.ParseFromString(fileContent)
tf.import_graph_def(graph_def)
graph = tf.get_default_graph()
input_layer = graph.get_tensor_by_name("import/InputImage:0")
output_layer = graph.get_tensor_by_name(
"import/InceptionV4/Logits/AvgPool_1a/AvgPool:0")
input_file = tf.placeholder(dtype=tf.string, name="InputFile")
image_file = tf.read_file(input_file)
jpg = tf.image.decode_jpeg(image_file, channels=3)
png = tf.image.decode_png(image_file, channels=3)
output_jpg = tf.image.resize_images(jpg, [299, 299]) / 255.0
output_jpg = tf.reshape(
output_jpg, [
1, 299, 299, 3], name="Preprocessed_JPG")
output_png = tf.image.resize_images(png, [299, 299]) / 255.0
output_png = tf.reshape(
output_png, [
1, 299, 299, 3], name="Preprocessed_PNG")
return input_file, output_jpg, output_png
def load_image(sess, io, image):
if image.split('.')[-1] == "png":
return sess.run(io[2], feed_dict={io[0]: image})
return sess.run(io[1], feed_dict={io[0]: image})
def load_next_batch(sess, io, img_path):
for batch_idx in range(0, len(files), batch_size):
batch = files[batch_idx:batch_idx + batch_size]
batch = np.array(
map(lambda x: load_image(sess, io, img_path + x), batch))
batch = batch.reshape((batch_size, 299, 299, 3))
yield batch
def forward_pass(io, img_path):
global output_layer, files
files = sorted(np.array(os.listdir(img_path)))
print "#Images:", len(files)
n_batch = len(files) / batch_size
with tf.Session() as sess:
init = tf.global_variables_initializer()
sess.run(init)
batch_iter = load_next_batch(sess, io, img_path)
for i in xrange(n_batch):
batch = batch_iter.next()
assert batch.shape == (batch_size, 299, 299, 3)
feed_dict = {input_layer: batch}
if i is 0:
prob = sess.run(
output_layer, feed_dict=feed_dict).reshape(
batch_size, 1536)
else:
prob = np.append(
prob,
sess.run(
output_layer,
feed_dict=feed_dict).reshape(
batch_size,
1536),
axis=0)
if i % 5 == 0:
print "Progress:" + str(((i + 1) / float(n_batch) * 100)) + "%\n"
print "Progress:" + str(((n_batch) / float(n_batch) * 100)) + "%\n"
print
print "Saving Features : features.npy\n"
np.save('Dataset/features', prob)
def get_features(sess, io, img, saveencoder=False):
global output_layer
output_layer = tf.reshape(output_layer, [1,1536], name="Output_Features")
image = load_image(sess, io, img)
feed_dict = {input_layer: image}
prob = sess.run(output_layer, feed_dict=feed_dict)
if saveencoder:
tensors = [n.name for n in sess.graph.as_graph_def().node]
with open("model/Encoder/Encoder_Tensors.txt", 'w') as f:
for t in tensors:
f.write(t + "\n")
saver = tf.train.Saver()
saver.save(sess, "model/Encoder/model.ckpt")
return prob
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument(
"--data_path",
type=str,
help="A valid path to MSCCOCO/flickr30k images(unzipped)",
required=True)
parser.add_argument(
"--inception_path",
type=str,
help="A valid path to inception_v4.pb",
required=True)
args = parser.parse_args()
return args
if __name__ == "__main__":
args=get_arguments()
print "Extracting Features"
io = build_prepro_graph(args.inception_path)
forward_pass(io, args.data_path)
print "done"
|
lib/metric/__init__.py | khanhptnk/bandit-nmt | 149 | 12665280 | from .PertFunction import PertFunction
from .Loss import *
from .Reward import *
from .Bleu import *
|
gdc_client/settings/parser.py | jrouly/gdc-client | 139 | 12665300 | <filename>gdc_client/settings/parser.py
import logging
from functools import partial
from gdc_client.common.config import GDCClientConfigShared
logger = logging.getLogger("gdc-client")
HELP = (
"Path to INI-type config file. See what settings will look like if a custom"
" config file is used"
)
class SettingsResolver(object):
def __init__(self, config_file):
self.config = GDCClientConfigShared(config_file)
def download(self):
logger.info(self.config.to_display_string("download"))
return self.config.to_display_string("download")
def upload(self):
logger.info(self.config.to_display_string("upload"))
return self.config.to_display_string("upload")
def resolve(config_file, args):
resolver = SettingsResolver(config_file)
func = getattr(resolver, args.section)
return func()
def config(parser, config_file=None):
parser.add_argument("--config", help=HELP, metavar="FILE")
choices = parser.add_subparsers(title="Settings to display", dest="section")
choices.required = True
download_choice = choices.add_parser("download", help="Display download settings")
download_choice.add_argument("--config", help=HELP, metavar="FILE")
download_choice.set_defaults(func=partial(resolve, config_file))
upload_choice = choices.add_parser("upload", help="Display upload settings")
upload_choice.add_argument("--config", help=HELP, metavar="FILE")
upload_choice.set_defaults(func=partial(resolve, config_file))
|
vit/formatter/modified_julian.py | kinifwyne/vit | 179 | 12665313 | from vit.formatter.modified import Modified
class ModifiedJulian(Modified):
def format(self, modified, task):
return self.julian(modified)
|
examples/django_hx_chatserver/example_app/run.py | bliedblad/hendrix | 309 | 12665325 | <gh_stars>100-1000
from hendrix.deploy.base import HendrixDeploy
from hendrix.experience import hey_joe
deployer = HendrixDeploy(options={'wsgi': 'example_app.wsgi.application', 'http_port': 7575})
websocket_service = hey_joe.WebSocketService("127.0.0.1", 9000)
deployer.add_non_tls_websocket_service(websocket_service)
deployer.run()
|
tools/find/tokenizer.py | Schweinepriester/oil | 2,209 | 12665341 | #!/usr/bin/env python2
# Copyright 2019 <NAME>. All rights reserved.
# Copyright 2019 <NAME>. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
"""
tokenizer.py: Tokenizer for find.
"""
_ops = [
('!', 'BANG'),
('(', 'LPAR'),
(')', 'RPAR'),
('-o', 'OR'),
('-a', 'AND'),
(',', 'COMMA'),
(';', 'SEMI'),
('+', 'PLUS'),
('-true', 'TRUE'),
('-false', 'FALSE'),
('-name', 'NAME'),
('-iname', 'INAME'),
('-lname', 'LNAME'),
('-ilname', 'ILNAME'),
('-path', 'PATH'),
('-ipath', 'IPATH'),
('-regex', 'REGEX'),
('-iregex', 'IREGEX'),
('-executable', 'EXECUTABLE'),
('-readable', 'READABLE'),
('-writable', 'WRITABLE'),
('-empty', 'EMPTY'),
('-size', 'SIZE'),
('-type', 'TYPE'),
('-xtype', 'XTYPE'),
('-perm', 'PERM'),
('-group', 'GROUP'),
('-user', 'USER'),
('-gid', 'GID'),
('-uid', 'UID'),
('-nogroup', 'NOGROUP'),
('-nouser', 'NOUSER'),
('-amin', 'AMIN'),
('-anewer', 'ANEWER'),
('-atime', 'ATIME'),
('-cmin', 'CMIN'),
('-cnewer', 'CNEWER'),
('-ctime', 'CTIME'),
('-mmin', 'MMIN'),
# note -newer not -mnewer
('-newer', 'MNEWER'),
('-mtime', 'MTIME'),
('-newerXY', 'NEWERXY'),
('-delete', 'DELETE'),
('-prune', 'PRUNE'),
('-quit', 'QUIT'),
('-print', 'PRINT'),
('-print0', 'PRINT0'),
('-printf', 'PRINTF'),
('-ls', 'LS'),
('-fprint', 'FPRINT'),
('-fprint0', 'FPRINT0'),
('-fprintf', 'FPRINTF'),
('-fls', 'FLS'),
('-exec', 'EXEC'),
('-execdir', 'EXECDIR'),
('-ok', 'OK'),
('-okdir', 'OKDIR'),
]
# start=100 is pgen voodoo, don't touch
opmap = dict((op, i) for i, (op, name) in enumerate(_ops, start=100))
tok_name = dict((i, name) for i, (op, name) in enumerate(_ops, start=100))
tok_name[0] = 'ENDMARKER'
tok_name[1] = 'STRING'
#tok_name[len(tok_name)] = 'N_TOKENS'
tok_name[256] = 'NT_OFFSET'
import sys
this_module = sys.modules[__name__]
for i, name in tok_name.items():
setattr(this_module, name, i)
class TokenDef(object):
def GetTerminalNum(self, label):
""" e.g. NAME -> 1 """
itoken = getattr(this_module, label, None)
assert isinstance(itoken, int), label
assert itoken in tok_name, label
return itoken
def GetOpNum(self, value):
""" e.g '(' -> LPAR """
return opmap[value]
def GetKeywordNum(self, value):
return None
def tokenize(argv):
start = end = (1, 0) # dummy location data
line_text = ''
for a in argv:
#log('tok = %r', a)
typ = opmap.get(a, STRING)
# print (typ, a, start, end, line_text)
yield (typ, a, start, end, line_text)
yield (ENDMARKER, '', start, end, line_text)
def is_terminal(type):
# type (int) -> bool
return type < NT_OFFSET
def is_nonterminal(type):
# type (int) -> bool
return type >= NT_OFFSET
def is_eof(type):
# type (int) -> bool
return type == ENDMARKER
|
tests/pre_test.py | danielpalstra/mealie | 1,927 | 12665379 | <filename>tests/pre_test.py
from mealie.core.config import determine_sqlite_path, settings
DB_URL = determine_sqlite_path(path=True, suffix="test")
DB_URL.unlink(missing_ok=True)
if settings.DB_ENGINE != "postgres":
# Monkeypatch Database Testing
settings.DB_URL = determine_sqlite_path(path=False, suffix="test")
|
scripts/nfs-roles-management/internal/model/share_mount_model.py | AlfiyaRF/cloud-pipeline | 126 | 12665412 | # Copyright 2017-2020 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ShareMountModel(object):
def __init__(self):
self.identifier = None
self.region_id = None
self.mount_root = None
self.mount_type = None
self.mount_options = None
@classmethod
def load(cls, json):
instance = ShareMountModel()
if not json:
return None
instance.identifier = json['id']
instance.region_id = json['regionId']
instance.mount_root = json['mountRoot']
instance.mount_type = json['mountType']
instance.mount_options = json['mountOptions'] if 'mountOptions' in json else None
return instance
|
bert-distillation-multimetric/sigopt_clients/sigopt_experiment_client.py | meghanaravikumar/sigopt-examples | 213 | 12665424 | <reponame>meghanaravikumar/sigopt-examples
import logging
class SigOptExperiment:
def __init__(self, connection):
self.connection = connection
def initialize_random_experiment(self, experiment_name, project_name, parameters_list, metrics_list, observation_budget,
metadata, parallel_bandwidth=1):
return self.initialize_experiment(experiment_name, project_name, parameters_list, list(), list(), metrics_list,
observation_budget, metadata, "random", parallel_bandwidth)
def initialize_bayesian_experiment(self, experiment_name, project_name, parameters_list, metrics_list, observation_budget, metadata, parallel_bandwidth):
return self.initialize_experiment(experiment_name, project_name, parameters_list, list(), list(), metrics_list,
observation_budget,
metadata, "offline", parallel_bandwidth)
def initialize_experiment(self, experiment_name, project_name, parameters_list, conditionals_list,
linear_constraints_list, metrics_list,
observation_budget, metadata, experiment_type, parallel_bandwidth=1):
experiment = self.connection.experiments().create(
name=experiment_name,
project=project_name,
# Define which parameters you would like to tune
parameters=parameters_list,
linear_constraints=linear_constraints_list,
conditionals=conditionals_list,
metrics=metrics_list,
parallel_bandwidth=parallel_bandwidth,
# Define an Observation Budget for your experiment
observation_budget=observation_budget,
metadata=metadata,
type=experiment_type
)
logging.info("Created experiment: https://sigopt.com/experiment/%s", experiment.id)
return experiment
def get_initialized_experiment(self, experiment_id):
return self.connection.experiments(experiment_id).fetch()
def get_suggestions(self, experiment):
return self.connection.experiments(experiment.id).suggestions().create()
def get_suggestions_meatadata(self, experiment, metadata_dict):
return self.connection.experiments(experiment.id).suggestions().create(metadata=metadata_dict)
def get_best_suggestions(self, experiment):
return self.connection.experiments(experiment.id).best_assignments().fetch()
def update_suggestion(self, experiment_id, suggestion_id, metadata_dict):
return self.connection.experiments(experiment_id).suggestions(suggestion_id).update(
metadata=metadata_dict
)
def update_experiment(self, experiment, suggestion, evaluated_value):
observation = self.connection.experiments(experiment.id).observations().create(suggestion=suggestion.id, value=evaluated_value)
return self.connection.experiments(experiment.id).fetch(), observation
def update_experiment_multimetric_metadata(self, experiment, suggestion, evaluated_value, metadata_dict, failed=False):
logging.info("updating experiment %s with metadata %s", experiment.id, str(metadata_dict))
self.connection.experiments(experiment.id).observations().create(suggestion=suggestion.id,
values=evaluated_value,
failed=failed,
metadata=metadata_dict)
return self.connection.experiments(experiment.id).fetch()
def update_experiment_multimetric(self, experiment, suggestion, evaluated_values, failed=False):
self.connection.experiments(experiment.id).observations().create(suggestion=suggestion.id,
values=evaluated_values,
failed=failed)
return self.connection.experiments(experiment.id).fetch()
def create_experiment_metadata(self, experiment, metadata_dict):
self.connection.experiments(experiment.id).observations().create(metadata=metadata_dict)
return self.connection.experiments(experiment.id).fetch()
def create_observation_metadata(self, experiment, observation, metadata_dict):
updated_observation = self.connection.experiments(experiment.id).observations(observation.id).update(metadata=metadata_dict)
return self.connection.experiments(experiment.id).fetch(), updated_observation
def get_all_experiments(self):
return self.connection.experiments().fetch()
def get_all_observations(self, experiment):
return self.connection.experiments(experiment.id).observations().fetch()
def archive_experiment(self, experiment):
logging.info("archiving experiment with id: %s", experiment.id)
self.connection.experiments(experiment.id).delete()
|
ppgan/faceutils/face_enhancement/face_enhance.py | pcwuyu/PaddleGAN | 6,852 | 12665456 | <filename>ppgan/faceutils/face_enhancement/face_enhance.py
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import paddle.nn as nn
import math
import cv2
import numpy as np
from ppgan.utils.download import get_path_from_url
from ppgan.models.generators import GPEN
from ppgan.faceutils.face_detection.detection.blazeface.utils import *
GPEN_weights = 'https://paddlegan.bj.bcebos.com/models/GPEN-512.pdparams'
class FaceEnhancement(object):
def __init__(self,
path_to_enhance=None,
size = 512,
batch_size=1
):
super(FaceEnhancement, self).__init__()
# Initialise the face detector
if path_to_enhance is None:
model_weights_path = get_path_from_url(GPEN_weights)
model_weights = paddle.load(model_weights_path)
else:
model_weights = paddle.load(path_to_enhance)
self.face_enhance = GPEN(size=512, style_dim=512, n_mlp=8)
self.face_enhance.load_dict(model_weights)
self.face_enhance.eval()
self.size = size
self.mask = np.zeros((512, 512), np.float32)
cv2.rectangle(self.mask, (26, 26), (486, 486), (1, 1, 1), -1, cv2.LINE_AA)
self.mask = cv2.GaussianBlur(self.mask, (101, 101), 11)
self.mask = cv2.GaussianBlur(self.mask, (101, 101), 11)
self.mask = paddle.tile(paddle.to_tensor(self.mask).unsqueeze(0).unsqueeze(-1), repeat_times=[batch_size,1,1,3]).numpy()
def enhance_from_image(self, img):
if isinstance(img, np.ndarray):
img, _ = resize_and_crop_image(img, 512)
img = paddle.to_tensor(img).transpose([2, 0, 1])
else:
assert img.shape == [3, 512, 512]
return self.enhance_from_batch(img.unsqueeze(0))[0]
def enhance_from_batch(self, img):
if isinstance(img, np.ndarray):
img_ori, _ = resize_and_crop_batch(img, 512)
img = paddle.to_tensor(img_ori).transpose([0, 3, 1, 2])
else:
assert img.shape[1:] == [3, 512, 512]
img_ori = img.transpose([0, 2, 3, 1]).numpy()
img_t = (img/255. - 0.5) / 0.5
with paddle.no_grad():
out, __ = self.face_enhance(img_t)
image_tensor = out * 0.5 + 0.5
image_tensor = image_tensor.transpose([0, 2, 3, 1]) # RGB
image_numpy = paddle.clip(image_tensor, 0, 1) * 255.0
out = image_numpy.astype(np.uint8).cpu().numpy()
return out * self.mask + (1-self.mask) * img_ori
|
open/core/betterself/models/supplement.py | lawrendran/open | 105 | 12665459 | <reponame>lawrendran/open
from django.db.models import CharField, ManyToManyField, BooleanField
from open.core.betterself.constants import BetterSelfResourceConstants
from open.core.betterself.models.ingredient_composition import IngredientComposition
from open.utilities.models import BaseModelWithUserGeneratedContent
class Supplement(BaseModelWithUserGeneratedContent):
"""
Could be a stack like BCAA (which would have 4 ingredient comps)
Or could just be something simple like Caffeine.
"""
RESOURCE_NAME = BetterSelfResourceConstants.SUPPLEMENTS
name = CharField(max_length=300)
ingredient_compositions = ManyToManyField(IngredientComposition, blank=True)
is_taken_with_food = BooleanField(default=None, blank=True, null=True)
class Meta:
unique_together = ("user", "name")
ordering = ["user", "name"]
verbose_name = "Supplement"
verbose_name_plural = "Supplements"
|
examples/classifiers/CeleryTasks.py | jashanmeet-collab/mango | 123 | 12665461 | """
This is the file running on all the workers.
They will run the classifier with the desired hyper parameters
And return back the results.
"""
# from __future__ import absolute_import, unicode_literals
# from __future__ import absolute_import, unicode_literals
# from celery import Celery
from __future__ import absolute_import, unicode_literals
from .celery import app
#whether sklearn_xgboost models should be enables on Celery
include_sklearn_xgboost = True
#whether prophet model should be enabled on Celery
include_prophet = True
# app = Celery('Mango',
# broker='amqp://',
# backend='rpc://')
#
# # Optional configuration
# app.conf.update(
# result_expires=3600,
# broker_heartbeat = 0
# )
if include_sklearn_xgboost:
"""
All the Classifier Functions from sklearn
"""
import sklearn
"""
All the Classifier Functions from xgboost
"""
import xgboost
from sklearn.model_selection import cross_val_score
import numpy as np
from importlib import import_module
#Global variables to identify dataset is loaded by the worker
X = None
Y = None
worker_dataset_name = None
#global variables to identify classifier loaded by the worker
clf_fxn = None
worker_clf_name = None
num_counter = 0
# load the dataset for the classifier
def get_data_loader(dataset_name):
global worker_dataset_name
module = import_module('sklearn.datasets')
data_loader = getattr(module,dataset_name)
worker_dataset_name = dataset_name
return data_loader
# load the classifier as passed to the worker
def get_clf(clf_name):
global worker_clf_name
worker_clf_name = clf_name
for module in sklearn.__all__:
try:
module = import_module(f'sklearn.{module}')
try:
for clf in module.__all__:
if clf ==clf_name:
clf_function = getattr(module,clf_name)
return clf_function
except:
pass
except:
pass
for module in xgboost.__all__:
try:
if module ==clf_name:
module = import_module(f'xgboost')
clf_function = getattr(module,clf_name)
return clf_function
except Exception as e:
print(e)
@app.task
def run_clf_celery(clf_name,
dataset_name,
hyper_par=None):
global X, Y, clf_fxn, worker_clf_name, worker_dataset_name, num_counter
num_counter = num_counter+1
#print('Worked is called:',num_counter)
#load dataset if not done already
if worker_dataset_name!=dataset_name:
data_loader = get_data_loader(dataset_name)
X,Y= data_loader(return_X_y=True)
#load classifier if not done already
if worker_clf_name!=clf_name:
clf_fxn = get_clf(clf_name)
#Assign the hyper parameters to the classifier
if hyper_par!=None:
clf = clf_fxn(**hyper_par)
else:
clf = clf_fxn()
accuracy = cross_val_score(clf, X, Y, cv=3, scoring='accuracy').mean()
#print('accuracy is:',accuracy)
return accuracy
# If include_prophet is set to true
if include_prophet:
"""
Enabling the functionality of running prophet on PJME
"""
import numpy as np
from .prophet import Prophet
from .xgboosttree import Xgboosttree
from sklearn.model_selection import cross_val_score
from sklearn.metrics import mean_squared_error
import os
data_path = os.path.abspath('.')+'/classifiers/data/'
model = Xgboosttree()
#X_train, y_train = model.load_train_dataset("data/PJME/train_data")
X_train, y_train = model.load_train_dataset(data_path+"PJME/train_data")
X_validate, y_validate = model.load_train_dataset(data_path+"PJME/validate_data")
@app.task
def run_prophet(hyper_par):
global X_train, y_train,X_validate,y_validate
clf = Prophet(**hyper_par)
clf.fit(X_train, y_train.ravel())
y_pred = clf.predict(X_validate)
mse = mean_squared_error(y_validate, y_pred)
mse = mse/10e5
result = (-1.0) * mse
return result
|
cartoframes/viz/widget_list.py | CartoDB/cartoframes | 236 | 12665474 | from .widget import Widget
from .styles.utils import prop
class WidgetList:
"""WidgetList
Args:
widgets (list, Widget): The list of widgets for a layer.
default_widget (Widget, optional): The widget to be used by default.
"""
def __init__(self, widgets=None, default_widget=None):
self._widgets = self._init_widgets(widgets, default_widget)
def _init_widgets(self, widgets, default_widget):
if isinstance(widgets, list):
widget_list = []
for widget in widgets:
if isinstance(widget, dict):
widget_list.append(Widget(widget))
elif isinstance(widget, Widget):
if widget._type == 'default' and default_widget:
widget._type = default_widget._type
widget._prop = default_widget._prop
widget._value = default_widget._value
widget_list.append(widget)
return widget_list
if isinstance(widgets, dict):
return [Widget(widgets)]
else:
return []
def get_widgets_info(self):
widgets_info = []
for widget in self._widgets:
if widget:
widgets_info.append(widget.get_info())
return widgets_info
def get_variables(self):
output = {}
for widget in self._widgets:
if widget._variable_name:
output[widget._variable_name] = prop(widget._value) if widget.has_bridge() else widget._value
return output
|
create/models.py | Orad/webvirtmgr | 1,633 | 12665478 | <reponame>Orad/webvirtmgr
from django.db import models
class Flavor(models.Model):
label = models.CharField(max_length=12)
memory = models.IntegerField()
vcpu = models.IntegerField()
disk = models.IntegerField()
def __unicode__(self):
return self.name
|
testing/slides/examples/test_marks.py | ramosmaria/school2021 | 252 | 12665513 | import pytest
import time
@pytest.mark.slow
def test_slow():
time.sleep(2)
assert 1 + 1 == 2
def test_fast():
assert 1 + 1== 2
|
inferno/extensions/initializers/__init__.py | 0h-n0/inferno | 204 | 12665516 | from .base import *
from .presets import *
|
IOMC/EventVertexGenerators/python/VtxSmearedEarly2p2TeVCollision_cfi.py | ckamtsikis/cmssw | 852 | 12665520 | import FWCore.ParameterSet.Config as cms
from IOMC.EventVertexGenerators.VtxSmearedParameters_cfi import Early2p2TeVCollisionVtxSmearingParameters,VtxSmearedCommon
VtxSmeared = cms.EDProducer("BetafuncEvtVtxGenerator",
Early2p2TeVCollisionVtxSmearingParameters,
VtxSmearedCommon
)
|
sk_dsp_comm/test/sandbox.py | chiranthsiddappa/scikit-dsp-comm | 139 | 12665542 | import numpy as np
from sk_dsp_comm import fec_conv
from sk_dsp_comm import digitalcom as dc
np.random.seed(100)
cc = fec_conv.FecConv()
print(cc.Nstates)
import matplotlib.pyplot as plt
import numpy as np
from sk_dsp_comm import fec_conv as fc
SNRdB = np.arange(2,12,.1)
Pb_uc = fc.conv_Pb_bound(1/2,5,[1,4,12,32,80,192,448,1024],SNRdB,2)
Pb_s = fc.conv_Pb_bound(1/2,5,[1,4,12,32,80,192,448,1024],SNRdB,1)
plt.figure(figsize=(5,5))
plt.semilogy(SNRdB,Pb_uc)
plt.semilogy(SNRdB,Pb_s)
plt.axis([2,12,1e-7,1e0])
plt.xlabel(r'$E_b/N_0$ (dB)')
plt.ylabel(r'Symbol Error Probability')
#plt.legend(('Uncoded BPSK','R=1/2, K=5, Soft'),loc='best')
plt.grid();
plt.show()
|
aries_cloudagent/protocols/present_proof/v2_0/models/__init__.py | kuraakhilesh8230/aries-cloudagent-python | 247 | 12665543 | <gh_stars>100-1000
"""Package-wide data and code."""
from os import environ
UNENCRYPTED_TAGS = environ.get("EXCH_UNENCRYPTED_TAGS", "False").upper() == "TRUE"
|
mysqloperator/init_main.py | sjmudd/mysql-operator | 206 | 12665554 | # Copyright (c) 2020, 2021, Oracle and/or its affiliates.
#
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
#
import subprocess
import mysqlsh
import sys
import os
import logging
import shutil
from typing import cast
from .controller import utils, k8sobject
from .controller.innodbcluster.cluster_api import MySQLPod
k8sobject.g_component = "initconf"
k8sobject.g_host = os.getenv("HOSTNAME")
mysql = mysqlsh.mysql
def init_conf(datadir, pod, cluster, logger):
"""
Initialize MySQL configuration files and init scripts, which must be mounted
in /mnt/mycnfdata.
The source config files must be mounted in /mnt/initconf.
The config files are them symlinked to /etc to be used by mysqld in the rest
of the script. The main container should directly mount them in their final
locations.
Init scripts are executed by the mysql container entrypoint when it's
initializing for the 1st time.
"""
server_id = pod.index + cluster.parsed_spec.baseServerId
report_host = f'{os.getenv("MY_POD_NAME")}.{cluster.name}-instances.{cluster.namespace}.svc.cluster.local'
logger.info(
f"Setting up configurations for {pod.name} server_id={server_id} report_host={report_host}")
srcdir = "/mnt/initconf/"
destdir = "/mnt/mycnfdata/"
os.makedirs(destdir + "my.cnf.d", exist_ok=True)
os.makedirs(destdir + "docker-entrypoint-initdb.d", exist_ok=True)
with open(srcdir + "my.cnf.in") as f:
data = f.read()
data = data.replace("@@SERVER_ID@@", str(server_id))
data = data.replace("@@HOSTNAME@@", str(report_host))
data = data.replace("@@DATADIR@@", datadir)
with open(destdir + "my.cnf", "w+") as mycnf:
mycnf.write(data)
for f in os.listdir(srcdir):
if f.startswith("initdb-"):
shutil.copy(os.path.join(srcdir, f), destdir +
"docker-entrypoint-initdb.d")
if f.endswith(".sh"):
os.chmod(os.path.join(
destdir + "docker-entrypoint-initdb.d", f), 0o555)
elif f.endswith(".cnf"):
shutil.copy(os.path.join(srcdir, f), destdir + "my.cnf.d")
if os.path.exists("/etc/my.cnf"):
logger.info("Replacing /etc/my.cnf, old contents were:")
logger.info(open("/etc/my.cnf").read())
os.remove("/etc/my.cnf")
os.symlink(destdir + "my.cnf", "/etc/my.cnf")
if os.path.exists("/etc/my.cnf.d"):
os.rmdir("/etc/my.cnf.d")
os.symlink(destdir + "my.cnf.d", "/etc/my.cnf.d")
logger.info(f"Configuration done")
def main(argv):
datadir = argv[1] if len(argv) > 1 else "/var/lib/mysql"
mysqlsh.globals.shell.options.useWizards = False
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - [%(levelname)s] [%(name)s] %(message)s',
datefmt="%Y-%m-%dT%H:%M:%S")
logger = logging.getLogger("initmysql")
name = cast(str, os.getenv("MY_POD_NAME"))
namespace = cast(str, os.getenv("MY_POD_NAMESPACE"))
utils.log_banner(__file__, logger)
logger.info(f"Configuring mysql pod {namespace}/{name}, datadir={datadir}")
logger.debug(f"Initial contents of {datadir}:")
subprocess.run(["ls", "-l", datadir])
logger.debug("Initial contents of /mnt:")
subprocess.run(["ls", "-lR", "/mnt"])
try:
pod = MySQLPod.read(name, namespace)
cluster = pod.get_cluster()
init_conf(datadir, pod, cluster, logger)
except Exception as e:
import traceback
traceback.print_exc()
logger.critical(f"Unhandled exception while bootstrapping MySQL: {e}")
# TODO post event to the Pod and the Cluster object if this is the seed
return 1
# TODO support for restoring from clone snapshot or MEB goes in here
return 0
|
tests/slack_sdk/oauth/installation_store/test_interaface.py | timgates42/python-slack-sdk | 2,486 | 12665556 | import unittest
from slack_sdk.oauth.installation_store import InstallationStore
from slack_sdk.oauth.installation_store.async_installation_store import (
AsyncInstallationStore,
)
class TestInterface(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_sync(self):
store = InstallationStore()
self.assertIsNotNone(store)
def test_async(self):
store = AsyncInstallationStore()
self.assertIsNotNone(store)
|
python/ray/workflow/examples/comparisons/google_cloud_workflows/concat_array_workflow.py | linyiyue/ray | 21,382 | 12665566 | from typing import List
from ray import workflow
@workflow.step
def iterate(array: List[str], result: str, i: int) -> str:
if i >= len(array):
return result
return iterate.step(array, result + array[i], i + 1)
if __name__ == "__main__":
workflow.init()
print(iterate.step(["foo", "ba", "r"], "", 0).run())
|
src/python/grapl_analyzerlib/tests/test_query_gen.py | msilvey/grapl | 313 | 12665580 | <reponame>msilvey/grapl<filename>src/python/grapl_analyzerlib/tests/test_query_gen.py<gh_stars>100-1000
import unittest
import pytest
import hypothesis
from grapl_analyzerlib.prelude import GraphClient
from grapl_analyzerlib.nodes.lens import LensView, LensQuery
@pytest.mark.integration_test
class TestQueryGen(unittest.TestCase):
@hypothesis.settings(deadline=None)
@hypothesis.given(
lens_name=hypothesis.strategies.text(max_size=100),
)
def test_weird_chars_in_lens_name(self, lens_name: str) -> None:
"""
Roundabout way to ensure some basic properties of filter generation.
"""
client = GraphClient()
lens = LensView.get_or_create(
gclient=client,
lens_name=lens_name,
lens_type="engagement",
)
requery_lens = LensQuery().with_lens_name(lens_name).query_first(client)
assert requery_lens.get_lens_name() == lens_name
|
ch-poetry-nlg/data_loader.py | shinoyuki222/torch-light | 310 | 12665675 | <gh_stars>100-1000
import numpy as np
import torch
from torch.autograd import Variable
import const
class DataLoader(object):
def __init__(self, src_sents, max_len, batch_size, cuda=True):
self.cuda = cuda
self.sents_size = len(src_sents)
self._step = 0
self._stop_step = self.sents_size // batch_size
self._batch_size = batch_size
self._max_len = max_len
self.gen_data(src_sents)
def gen_data(self, src_sents):
src_sents = np.asarray(src_sents)
self._src_sents = src_sents[:, :-1]
self._label = src_sents[:, 1:]
def _shuffle(self):
indices = np.arange(self._src_sents.shape[0])
np.random.shuffle(indices)
self._src_sents = self._src_sents[indices]
self._label = self._label[indices]
def __iter__(self):
return self
def __next__(self):
def to_longest(insts):
inst_data_tensor = Variable(torch.from_numpy(insts))
if self.cuda:
inst_data_tensor = inst_data_tensor.cuda()
return inst_data_tensor
if self._step == self._stop_step:
self._step = 0
raise StopIteration()
_start = self._step*self._batch_size
_bsz = self._batch_size
self._step += 1
data = to_longest(self._src_sents[_start: _start+_bsz])
label = to_longest(self._label[_start: _start+_bsz])
return data, label.contiguous().view(-1)
if __name__ == "__main__":
data = torch.load("data/ch_pro_nlg.pt")
_data = DataLoader(
data['train'],
data["max_word_len"],
64)
d = {v: k for k, v in data['dict']['src'].items()}
print([d[w] for s in _data._src_sents for w in s])
print([d[w] for s in _data._label for w in s])
|
tests/core/test_task_composite.py | ai-fast-track/mantisshrimp | 580 | 12665681 | # from icevision.all import *
# first_task = tasks.Task("first")
# second_task = tasks.Task("second")
# record = BaseRecord(
# (
# FilepathRecordComponent(),
# InstancesLabelsRecordComponent(task=first_task),
# BBoxesRecordComponent(task=first_task),
# InstancesLabelsRecordComponent(task=second_task),
# BBoxesRecordComponent(task=second_task),
# )
# )
# record.builder_template()
# [
# "record.set_img_size(<ImgSize>)",
# "record.set_filepath(<Union[str, Path]>)",
# "record.first.add_labels_names(<Sequence[Hashable]>)",
# "record.first.add_bboxes(<Sequence[BBox]>)",
# "record.second.add_labels_names(<Sequence[Hashable]>)",
# "record.second.add_bboxes(<Sequence[BBox]>)",
# ]
|
alipay/aop/api/domain/TradeComplainQueryResponse.py | antopen/alipay-sdk-python-all | 213 | 12665685 | <reponame>antopen/alipay-sdk-python-all
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class TradeComplainQueryResponse(object):
def __init__(self):
self._complain_event_id = None
self._complain_reason = None
self._content = None
self._gmt_create = None
self._gmt_finished = None
self._gmt_modified = None
self._images = None
self._leaf_category_name = None
self._merchant_order_no = None
self._phone_no = None
self._status = None
self._target_id = None
self._target_type = None
self._trade_no = None
@property
def complain_event_id(self):
return self._complain_event_id
@complain_event_id.setter
def complain_event_id(self, value):
self._complain_event_id = value
@property
def complain_reason(self):
return self._complain_reason
@complain_reason.setter
def complain_reason(self, value):
self._complain_reason = value
@property
def content(self):
return self._content
@content.setter
def content(self, value):
self._content = value
@property
def gmt_create(self):
return self._gmt_create
@gmt_create.setter
def gmt_create(self, value):
self._gmt_create = value
@property
def gmt_finished(self):
return self._gmt_finished
@gmt_finished.setter
def gmt_finished(self, value):
self._gmt_finished = value
@property
def gmt_modified(self):
return self._gmt_modified
@gmt_modified.setter
def gmt_modified(self, value):
self._gmt_modified = value
@property
def images(self):
return self._images
@images.setter
def images(self, value):
if isinstance(value, list):
self._images = list()
for i in value:
self._images.append(i)
@property
def leaf_category_name(self):
return self._leaf_category_name
@leaf_category_name.setter
def leaf_category_name(self, value):
self._leaf_category_name = value
@property
def merchant_order_no(self):
return self._merchant_order_no
@merchant_order_no.setter
def merchant_order_no(self, value):
self._merchant_order_no = value
@property
def phone_no(self):
return self._phone_no
@phone_no.setter
def phone_no(self, value):
self._phone_no = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def target_id(self):
return self._target_id
@target_id.setter
def target_id(self, value):
self._target_id = value
@property
def target_type(self):
return self._target_type
@target_type.setter
def target_type(self, value):
self._target_type = value
@property
def trade_no(self):
return self._trade_no
@trade_no.setter
def trade_no(self, value):
self._trade_no = value
def to_alipay_dict(self):
params = dict()
if self.complain_event_id:
if hasattr(self.complain_event_id, 'to_alipay_dict'):
params['complain_event_id'] = self.complain_event_id.to_alipay_dict()
else:
params['complain_event_id'] = self.complain_event_id
if self.complain_reason:
if hasattr(self.complain_reason, 'to_alipay_dict'):
params['complain_reason'] = self.complain_reason.to_alipay_dict()
else:
params['complain_reason'] = self.complain_reason
if self.content:
if hasattr(self.content, 'to_alipay_dict'):
params['content'] = self.content.to_alipay_dict()
else:
params['content'] = self.content
if self.gmt_create:
if hasattr(self.gmt_create, 'to_alipay_dict'):
params['gmt_create'] = self.gmt_create.to_alipay_dict()
else:
params['gmt_create'] = self.gmt_create
if self.gmt_finished:
if hasattr(self.gmt_finished, 'to_alipay_dict'):
params['gmt_finished'] = self.gmt_finished.to_alipay_dict()
else:
params['gmt_finished'] = self.gmt_finished
if self.gmt_modified:
if hasattr(self.gmt_modified, 'to_alipay_dict'):
params['gmt_modified'] = self.gmt_modified.to_alipay_dict()
else:
params['gmt_modified'] = self.gmt_modified
if self.images:
if isinstance(self.images, list):
for i in range(0, len(self.images)):
element = self.images[i]
if hasattr(element, 'to_alipay_dict'):
self.images[i] = element.to_alipay_dict()
if hasattr(self.images, 'to_alipay_dict'):
params['images'] = self.images.to_alipay_dict()
else:
params['images'] = self.images
if self.leaf_category_name:
if hasattr(self.leaf_category_name, 'to_alipay_dict'):
params['leaf_category_name'] = self.leaf_category_name.to_alipay_dict()
else:
params['leaf_category_name'] = self.leaf_category_name
if self.merchant_order_no:
if hasattr(self.merchant_order_no, 'to_alipay_dict'):
params['merchant_order_no'] = self.merchant_order_no.to_alipay_dict()
else:
params['merchant_order_no'] = self.merchant_order_no
if self.phone_no:
if hasattr(self.phone_no, 'to_alipay_dict'):
params['phone_no'] = self.phone_no.to_alipay_dict()
else:
params['phone_no'] = self.phone_no
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
if self.target_id:
if hasattr(self.target_id, 'to_alipay_dict'):
params['target_id'] = self.target_id.to_alipay_dict()
else:
params['target_id'] = self.target_id
if self.target_type:
if hasattr(self.target_type, 'to_alipay_dict'):
params['target_type'] = self.target_type.to_alipay_dict()
else:
params['target_type'] = self.target_type
if self.trade_no:
if hasattr(self.trade_no, 'to_alipay_dict'):
params['trade_no'] = self.trade_no.to_alipay_dict()
else:
params['trade_no'] = self.trade_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = TradeComplainQueryResponse()
if 'complain_event_id' in d:
o.complain_event_id = d['complain_event_id']
if 'complain_reason' in d:
o.complain_reason = d['complain_reason']
if 'content' in d:
o.content = d['content']
if 'gmt_create' in d:
o.gmt_create = d['gmt_create']
if 'gmt_finished' in d:
o.gmt_finished = d['gmt_finished']
if 'gmt_modified' in d:
o.gmt_modified = d['gmt_modified']
if 'images' in d:
o.images = d['images']
if 'leaf_category_name' in d:
o.leaf_category_name = d['leaf_category_name']
if 'merchant_order_no' in d:
o.merchant_order_no = d['merchant_order_no']
if 'phone_no' in d:
o.phone_no = d['phone_no']
if 'status' in d:
o.status = d['status']
if 'target_id' in d:
o.target_id = d['target_id']
if 'target_type' in d:
o.target_type = d['target_type']
if 'trade_no' in d:
o.trade_no = d['trade_no']
return o
|
api/brands/urls.py | gaybro8777/osf.io | 628 | 12665704 | <gh_stars>100-1000
from django.conf.urls import url
from api.brands import views
app_name = 'osf'
urlpatterns = [
url(r'^$', views.BrandList.as_view(), name=views.BrandList.view_name),
url(r'^(?P<brand_id>\w+)/$', views.BrandDetail.as_view(), name=views.BrandDetail.view_name),
]
|
bot.py | sevazhidkov/tweets-search-bot | 2,071 | 12665726 | <gh_stars>1000+
import os
import json
import requests
import telebot
import re
from telebot import types
LOKLAK_API_URL = "http://loklak.org/api/search.json?q={query}"
bot = telebot.TeleBot(os.environ['TELEGRAM_BOT_TOKEN'])
user_results = {}
def get_tweet_rating(tweet):
"""
Function that count tweet rating based on favourites and retweets
"""
return (tweet['retweet_count'] * 2) + tweet['favourites_count']
def tweet_answer(tweet, tweets_left):
"""
Function that making text answer from tweet object
"""
answer = '"{message}" - {author} \n\n{link}\n\n{more} more tweets.'.format(
message=tweet['text'],
author=tweet['screen_name'],
link=tweet['link'],
more=tweets_left
)
return answer
@bot.message_handler(commands=['start', 'help'])
def description(message):
bot.reply_to(message,
"loklak.org bot - simple Telegram bot for searching tweets.\n"
"Just send a message with your query and bot will process it, "
"using loklag.org API. \n"
"If you want to contribute, project is open source: "
"https://github.com/sevazhidkov/tweets-search-bot\n"
"You can search a particular user's entire tweets by enter \"/user:USERNAME\""
)
@bot.message_handler(commands=['next-tweet', 'next_tweet'])
def next_tweet(message):
user_id = message.from_user.id
if user_id in user_results and user_results[user_id]:
tweet = user_results[user_id].pop()
bot.reply_to(message, tweet_answer(tweet, len(user_results[user_id])))
else:
bot.reply_to(message, "You haven't searched anything.")
@bot.message_handler(regexp="/user:.+")
def user_search(message):
query_msg = message.text
baseURL = "http://loklak.org/api/search.json?q=from:"
base_infoURL = "http://loklak.org/api/user.json?screen_name="
pattern = re.compile("/user:(.+)")
mtch = pattern.match(query_msg)
if mtch:
username = mtch.group(1)
raw = requests.get(baseURL + username)
info_raw = requests.get(base_infoURL + username)
try:
tweets = json.loads(raw.text)['statuses']
info = json.loads(info_raw.text)['user']
time_zone = info['time_zone']
profile_image = info['profile_image_url']
friends_num = info['friends_count']
except ValueError:
return
if tweets:
tweets.sort(key=get_tweet_rating)
tweet = tweets.pop()
user_results[message.from_user.id] = tweets
#show a botton on top of input
markup = types.ReplyKeyboardMarkup(row_width=1)
markup.add('/next-tweet')
full_text = ""
full_text += "Username:" + username + "\n"
full_text += "Profile Picture:" + profile_image + "\n"
full_text += "Friends Number:" + str(friends_num) + "\n"
full_text += tweet_answer(tweet, len(tweets))
bot.reply_to(message, full_text, reply_markup=markup)
else:
bot.reply_to(message, "Error in find a user, make sure you are in a correct format. \"user:USERNAME\"")
else:
bot.reply_to(message, "Error in format, make sure you are in a correct format.")
@bot.message_handler(func=lambda m: True)
def search(message):
query_msg = message.text
result = requests.get(LOKLAK_API_URL.format(query=query_msg))
try:
tweets = json.loads(result.text)['statuses']
except ValueError:
return
if tweets:
# Find the best tweet for this search query,
# by using sorting
tweets.sort(key=get_tweet_rating)
tweet = tweets.pop()
user_results[message.from_user.id] = tweets
#show a botton on top of input
markup = types.ReplyKeyboardMarkup(row_width=2)
markup.add('/next-tweet')
bot.reply_to(message, tweet_answer(tweet, len(tweets)), reply_markup=markup)
else:
# Delete words from message until result is not avaliable
#Strategy: keep removing the smallest word in a sentence
words = query_msg.split()
if(len(words) > 1):
words.sort(key = len)
del words[0]
reconstructed = ""
for word in words:
reconstructed += word + " "
message.text = reconstructed
search(message)
else:
bot.reply_to(message, '404 Not found')
bot.polling()
|
libraries/botbuilder-schema/botbuilder/schema/teams/additional_properties.py | Fl4v/botbuilder-python | 388 | 12665752 | <gh_stars>100-1000
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
class ContentType:
O365_CONNECTOR_CARD = "application/vnd.microsoft.teams.card.o365connector"
FILE_CONSENT_CARD = "application/vnd.microsoft.teams.card.file.consent"
FILE_DOWNLOAD_INFO = "application/vnd.microsoft.teams.file.download.info"
FILE_INFO_CARD = "application/vnd.microsoft.teams.card.file.info"
class Type:
O365_CONNECTOR_CARD_VIEWACTION = "ViewAction"
O365_CONNECTOR_CARD_OPEN_URI = "OpenUri"
O365_CONNECTOR_CARD_HTTP_POST = "HttpPOST"
O365_CONNECTOR_CARD_ACTION_CARD = "ActionCard"
O365_CONNECTOR_CARD_TEXT_INPUT = "TextInput"
O365_CONNECTOR_CARD_DATE_INPUT = "DateInput"
O365_CONNECTOR_CARD_MULTICHOICE_INPUT = "MultichoiceInput"
|
learning_tensorflow/1.py | drpreetyrai/ChatBotCourse | 5,087 | 12665771 | <gh_stars>1000+
import tensorflow as tf
sess = tf.Session()
a = tf.placeholder("float")
b = tf.placeholder("float")
c = tf.constant(6.0)
d = tf.mul(a, b)
y = tf.mul(d, c)
print sess.run(y, feed_dict={a: 3, b: 3})
A = [[1.1,2.3],[3.4,4.1]]
Y = tf.matrix_inverse(A)
print sess.run(Y)
sess.close()
|
routeros_api/api_communicator/encoding_decorator.py | davidc/RouterOS-api | 183 | 12665795 | class EncodingApiCommunicator(object):
def __init__(self, inner):
self.inner = inner
def call(self, path, command, arguments=None, queries=None,
additional_queries=()):
path = path.encode()
command = command.encode()
arguments = self.transform_dictionary(arguments or {})
queries = self.transform_dictionary(queries or {})
promise = self.inner.call(
path, command, arguments, queries, additional_queries)
return self.decorate_promise(promise)
def transform_dictionary(self, dictionary):
return dict(self.transform_item(item) for item in dictionary.items())
def transform_item(self, item):
key, value = item
return (key.encode(), value)
def decorate_promise(self, promise):
return EncodedPromiseDecorator(promise)
class EncodedPromiseDecorator(object):
def __init__(self, inner):
self.inner = inner
def get(self):
response = self.inner.get()
return response.map(self.transform_row)
def __iter__(self):
return map(self.transform_row, self.inner)
def transform_row(self, row):
return dict(self.transform_item(item) for item in row.items())
def transform_item(self, item):
key, value = item
return (key.decode(), value)
|
ice/core/utils.py | i2y/ice | 313 | 12665802 | from collections import Sequence
from .constants import LPARA, RPARA
def issequence(obj):
return isinstance(obj, Sequence)
def issequence_except_str(obj):
if isinstance(obj, str):
return False
return isinstance(obj, Sequence)
def is_tuple_or_list(obj):
return type(obj) in {tuple, list}
def emit_sexp(sexpr):
ol = []
stack = [sexpr]
while len(stack) > 0:
sexpr = stack.pop()
if is_tuple_or_list(sexpr):
stack.append(RPARA)
rsexpr = []
for sub in sexpr:
rsexpr.insert(0, sub)
stack.extend(rsexpr)
stack.append(LPARA)
else:
ol.append(sexpr)
retval = ''
oldsitem = ''
for item in ol:
sitem = repr(item)
if sitem[0] == "'" and sitem[-1] == "'":
sitem = sitem.replace('"', "\\\"")
sitem = '"' + sitem[1:-1] + '"'
if not ((sitem == ')') or (oldsitem == '(')):
oldsitem = sitem
sitem = ' ' + sitem
else:
oldsitem = sitem
retval += sitem
return retval[1:] |
eeauditor/auditors/aws/AWS_License_Manager_Auditor.py | kbhagi/ElectricEye | 442 | 12665807 | <gh_stars>100-1000
#This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import boto3
import datetime
import os
from check_register import CheckRegister
registry = CheckRegister()
# import boto3 clients
licensemanager = boto3.client("license-manager")
@registry.register_check("license-manager")
def license_manager_hard_count_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[LicenseManager.1] License Manager license configurations should be configured to enforce a hard limit"""
try:
# TODO: need to catch the case that License Manager is not setup
response = licensemanager.list_license_configurations()
lmCheck = str(response["LicenseConfigurations"])
if lmCheck == "[]":
pass
else:
myLiscMgrConfigs = response["LicenseConfigurations"]
for lmconfigs in myLiscMgrConfigs:
liscConfigArn = str(lmconfigs["LicenseConfigurationArn"])
# ISO Time
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
try:
response = licensemanager.get_license_configuration(
LicenseConfigurationArn=liscConfigArn
)
liscConfigId = str(response["LicenseConfigurationId"])
liscConfigName = str(response["Name"])
hardLimitCheck = str(response["LicenseCountHardLimit"])
if hardLimitCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": liscConfigArn + "/license-manager-enforce-hard-limit-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": liscConfigArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[LicenseManager.1] License Manager license configurations should be configured to enforce a hard limit",
"Description": "License Manager license configuration "
+ liscConfigName
+ " does not enforce a hard limit. Enforcing a hard limit prevents new instances from being created that if you have already provisioned all available licenses. Refer to the remediation instructions to remediate this behavior",
"Remediation": {
"Recommendation": {
"Text": "For information on hard limits refer to the License Configuration Parameters and Rules section of the AWS License Manager User Guide",
"Url": "https://docs.aws.amazon.com/license-manager/latest/userguide/config-overview.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsLicenseManagerLicenseConfiguration",
"Id": liscConfigArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"Other": {
"licenseConfigurationId": liscConfigId,
"licenseConfigurationName": liscConfigName,
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": liscConfigArn + "/license-manager-enforce-hard-limit-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": liscConfigArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[LicenseManager.1] License Manager license configurations should be configured to enforce a hard limit",
"Description": "License Manager license configuration "
+ liscConfigName
+ " enforces a hard limit.",
"Remediation": {
"Recommendation": {
"Text": "For information on hard limits refer to the License Configuration Parameters and Rules section of the AWS License Manager User Guide",
"Url": "https://docs.aws.amazon.com/license-manager/latest/userguide/config-overview.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsLicenseManagerLicenseConfiguration",
"Id": liscConfigArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"Other": {
"licenseConfigurationId": liscConfigId,
"licenseConfigurationName": liscConfigName,
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
except Exception as e:
print(e)
@registry.register_check("license-manager")
def license_manager_disassociation_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[LicenseManager.2] License Manager license configurations should disassociate hosts when license in scope is not found"""
try:
# TODO: need to catch the case that License Manager is not setup
response = licensemanager.list_license_configurations()
lmCheck = str(response["LicenseConfigurations"])
if lmCheck == "[]":
pass
else:
myLiscMgrConfigs = response["LicenseConfigurations"]
for lmconfigs in myLiscMgrConfigs:
liscConfigArn = str(lmconfigs["LicenseConfigurationArn"])
# ISO Time
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
try:
response = licensemanager.get_license_configuration(
LicenseConfigurationArn=liscConfigArn
)
liscConfigId = str(response["LicenseConfigurationId"])
liscConfigName = str(response["Name"])
disassocCheck = str(response["DisassociateWhenNotFound"])
if disassocCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": liscConfigArn + "/license-manager-disassociation-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": liscConfigArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[LicenseManager.2] License Manager license configurations should disassociate hosts when license in scope is not found",
"Description": "License Manager license configuration "
+ liscConfigName
+ " does not enforce automatic disassociation. Refer to the remediation instructions to remediate this behavior.",
"Remediation": {
"Recommendation": {
"Text": "For information on disassociation refer to the Disassociating license configurations and AMIs section of the AWS License Manager User Guide",
"Url": "https://docs.aws.amazon.com/license-manager/latest/userguide/license-rules.html#ami-disassociation",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsLicenseManagerLicenseConfiguration",
"Id": liscConfigArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"Other": {
"LicenseConfigurationId": liscConfigId,
"LicenseConfigurationName": liscConfigName,
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": liscConfigArn + "/license-manager-disassociation-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": liscConfigArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[LicenseManager.2] License Manager license configurations should disassociate hosts when license in scope is not found",
"Description": "License Manager license configuration "
+ liscConfigName
+ " enforces automatic disassociation.",
"Remediation": {
"Recommendation": {
"Text": "For information on disassociation refer to the Disassociating license configurations and AMIs section of the AWS License Manager User Guide",
"Url": "https://docs.aws.amazon.com/license-manager/latest/userguide/license-rules.html#ami-disassociation",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsLicenseManagerLicenseConfiguration",
"Id": liscConfigArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"Other": {
"LicenseConfigurationId": liscConfigId,
"LicenseConfigurationName": liscConfigName,
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1"
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
except Exception as e:
print(e) |
vut/lib/python3.8/site-packages/pipenv/vendor/passa/internals/specifiers.py | dan-mutua/djangowk1 | 6,263 | 12665842 | <reponame>dan-mutua/djangowk1
# -*- coding=utf-8 -*-
from __future__ import absolute_import, unicode_literals
import itertools
import operator
from packaging.specifiers import SpecifierSet, Specifier
from vistir.misc import dedup
def _tuplize_version(version):
return tuple(int(x) for x in version.split("."))
def _format_version(version):
return ".".join(str(i) for i in version)
# Prefer [x,y) ranges.
REPLACE_RANGES = {">": ">=", "<=": "<"}
def _format_pyspec(specifier):
if isinstance(specifier, str):
if not any(op in specifier for op in Specifier._operators.keys()):
specifier = "=={0}".format(specifier)
specifier = Specifier(specifier)
if specifier.operator == "==" and specifier.version.endswith(".*"):
specifier = Specifier("=={0}".format(specifier.version[:-2]))
try:
op = REPLACE_RANGES[specifier.operator]
except KeyError:
return specifier
version = specifier.version.replace(".*", "")
curr_tuple = _tuplize_version(version)
try:
next_tuple = (curr_tuple[0], curr_tuple[1] + 1)
except IndexError:
next_tuple = (curr_tuple[0], 1)
specifier = Specifier("{0}{1}".format(op, _format_version(next_tuple)))
return specifier
def _get_specs(specset):
if isinstance(specset, Specifier):
specset = str(specset)
if isinstance(specset, str):
specset = SpecifierSet(specset.replace(".*", ""))
return [
(spec._spec[0], _tuplize_version(spec._spec[1]))
for spec in getattr(specset, "_specs", [])
]
def _group_by_op(specs):
specs = [_get_specs(x) for x in list(specs)]
flattened = [(op, version) for spec in specs for op, version in spec]
specs = sorted(flattened, key=operator.itemgetter(1))
grouping = itertools.groupby(specs, key=operator.itemgetter(0))
return grouping
def cleanup_pyspecs(specs, joiner="or"):
specs = {_format_pyspec(spec) for spec in specs}
# for != operator we want to group by version
# if all are consecutive, join as a list
results = set()
for op, versions in _group_by_op(specs):
versions = [version[1] for version in versions]
versions = sorted(dedup(versions))
# if we are doing an or operation, we need to use the min for >=
# this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6
# if we do an AND operation we need to use MAX to be more selective
if op in (">", ">="):
if joiner == "or":
results.add((op, _format_version(min(versions))))
else:
results.add((op, _format_version(max(versions))))
# we use inverse logic here so we will take the max value if we are
# using OR but the min value if we are using AND
elif op in ("<=", "<"):
if joiner == "or":
results.add((op, _format_version(max(versions))))
else:
results.add((op, _format_version(min(versions))))
# leave these the same no matter what operator we use
elif op in ("!=", "==", "~="):
version_list = sorted(
"{0}".format(_format_version(version))
for version in versions
)
version = ", ".join(version_list)
if len(version_list) == 1:
results.add((op, version))
elif op == "!=":
results.add(("not in", version))
elif op == "==":
results.add(("in", version))
else:
specifier = SpecifierSet(",".join(sorted(
"{0}".format(op, v) for v in version_list
)))._specs
for s in specifier:
results &= (specifier._spec[0], specifier._spec[1])
else:
if len(version) == 1:
results.add((op, version))
else:
specifier = SpecifierSet("{0}".format(version))._specs
for s in specifier:
results |= (specifier._spec[0], specifier._spec[1])
return results
def pyspec_from_markers(marker):
if marker._markers[0][0] != 'python_version':
return
op = marker._markers[0][1].value
version = marker._markers[0][2].value
specset = set()
if op == "in":
specset.update(
Specifier("=={0}".format(v.strip()))
for v in version.split(",")
)
elif op == "not in":
specset.update(
Specifier("!={0}".format(v.strip()))
for v in version.split(",")
)
else:
specset.add(Specifier("".join([op, version])))
if specset:
return specset
return None
|
model/AO_model/AO_model.py | ishine/Looking-to-Listen-at-the-Cocktail-Party | 145 | 12665871 | <filename>model/AO_model/AO_model.py
from keras import optimizers
from keras.layers import Input, Dense, Convolution2D, Bidirectional,TimeDistributed
from keras.layers import Flatten, BatchNormalization, Reshape
from keras.layers.core import Activation
from keras.models import Model, load_model
from keras.layers.recurrent import LSTM
from keras.initializers import he_normal,glorot_uniform
def AO_model(people_num=2):
model_input = Input(shape=(298, 257, 2))
print('0:', model_input.shape)
conv1 = Convolution2D(96, kernel_size=(1, 7), strides=(1, 1), padding='same', dilation_rate=(1, 1), name='conv1')(model_input)
conv1 = BatchNormalization()(conv1)
conv1 = Activation('relu')(conv1)
print('1:', conv1.shape)
conv2 = Convolution2D(96, kernel_size=(7, 1), strides=(1, 1), padding='same', dilation_rate=(1, 1), name='conv2')(conv1)
conv2 = BatchNormalization()(conv2)
conv2 = Activation('relu')(conv2)
print('2:', conv2.shape)
conv3 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(1, 1), name='conv3')(conv2)
conv3 = BatchNormalization()(conv3)
conv3 = Activation('relu')(conv3)
print('3:', conv3.shape)
conv4 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(2, 1), name='conv4')(conv3)
conv4 = BatchNormalization()(conv4)
conv4 = Activation('relu')(conv4)
print('4:', conv4.shape)
conv5 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(4, 1), name='conv5')(conv4)
conv5 = BatchNormalization()(conv5)
conv5 = Activation('relu')(conv5)
print('5:', conv5.shape)
conv6 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(8, 1), name='conv6')(conv5)
conv6 = BatchNormalization()(conv6)
conv6 = Activation('relu')(conv6)
print('6:', conv6.shape)
conv7 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(16, 1), name='conv7')(conv6)
conv7 = BatchNormalization()(conv7)
conv7 = Activation('relu')(conv7)
print('7:', conv7.shape)
conv8 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(32, 1), name='conv8')(conv7)
conv8 = BatchNormalization()(conv8)
conv8 = Activation('relu')(conv8)
print('8:', conv8.shape)
conv9 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(1, 1), name='conv9')(conv8)
conv9 = BatchNormalization()(conv9)
conv9 = Activation('relu')(conv9)
print('9:', conv9.shape)
conv10 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(2, 2), name='conv10')(conv9)
conv10 = BatchNormalization()(conv10)
conv10 = Activation('relu')(conv10)
print('10:', conv10.shape)
conv11 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(4, 4), name='conv11')(conv10)
conv11 = BatchNormalization()(conv11)
conv11 = Activation('relu')(conv11)
print('11:', conv11.shape)
conv12 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(8, 8), name='conv12')(conv11)
conv12 = BatchNormalization()(conv12)
conv12 = Activation('relu')(conv12)
print('12:', conv12.shape)
conv13 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(16, 16), name='conv13')(conv12)
conv13 = BatchNormalization()(conv13)
conv13 = Activation('relu')(conv13)
print('13:', conv13.shape)
conv14 = Convolution2D(96, kernel_size=(5, 5), strides=(1, 1), padding='same', dilation_rate=(32, 32), name='conv14')(conv13)
conv14 = BatchNormalization()(conv14)
conv14 = Activation('relu')(conv14)
print('14:', conv14.shape)
conv15 = Convolution2D(8, kernel_size=(1, 1), strides=(1, 1), padding='same', dilation_rate=(1, 1), name='conv15')(conv14)
conv15 = BatchNormalization()(conv15)
conv15 = Activation('relu')(conv15)
print('15:', conv15.shape)
AVfusion = TimeDistributed(Flatten())(conv15)
print('AVfusion:', AVfusion.shape)
lstm = Bidirectional(LSTM(400,input_shape=(298,8*257),return_sequences=True),merge_mode='sum')(AVfusion)
print('lstm:', lstm.shape)
fc1 = Dense(600, name="fc1", activation='relu', kernel_initializer=he_normal(seed=27))(lstm)
print('fc1:', fc1.shape)
fc2 = Dense(600, name="fc2", activation='relu', kernel_initializer=he_normal(seed=42))(fc1)
print('fc2:', fc2.shape)
fc3 = Dense(600, name="fc3", activation='relu', kernel_initializer=he_normal(seed=65))(fc2)
print('fc3:', fc3.shape)
complex_mask = Dense(257 * 2 * people_num, name="complex_mask", kernel_initializer=glorot_uniform(seed=87))(fc3)
print('complex_mask:', complex_mask.shape)
complex_mask_out = Reshape((298, 257, 2, people_num))(complex_mask)
print('complex_mask_out:', complex_mask_out.shape)
# --------------------------- AO end ---------------------------
AO_model = Model(inputs=model_input, outputs=complex_mask_out)
return AO_model
|
doc/examples/sparsevid.py | samgal/scikit-video | 615 | 12665953 | <gh_stars>100-1000
import matplotlib.pyplot as plt
import numpy as np
import scipy
import sklearn.linear_model
from matplotlib import gridspec
from sklearn.feature_extraction import image
import skvideo.datasets
try:
xrange
except NameError:
xrange = range
np.random.seed(0)
# use greedy K-SVD algorithm with OMP
def code_step(X, D):
model = sklearn.linear_model.OrthogonalMatchingPursuit(
n_nonzero_coefs=5, fit_intercept=False, normalize=False
)
#C = sklearn.
model.fit(D.T, X.T)
return model.coef_
def dict_step(X, C, D):
unused_indices = []
for k in xrange(D.shape[0]):
usedidx = np.abs(C[:, k])>0
if np.sum(usedidx) <= 1:
print("Skipping filter #%d" % (k,))
unused_indices.append(k)
continue
selectNotK = np.arange(D.shape[0]) != k
used_coef = C[usedidx, :][:, selectNotK]
E_kR = X[usedidx, :].T - np.dot(used_coef, D[selectNotK, :]).T
U, S, V = scipy.sparse.linalg.svds(E_kR, k=1)
# choose sign based on largest dot product
choicepos = np.dot(D[k,:], U[:, 0])
choiceneg = np.dot(D[k,:], -U[:, 0])
if choicepos > choiceneg:
D[k, :] = U[:, 0]
C[usedidx, k] = S[0] * V[0, :]
else:
D[k, :] = -U[:, 0]
C[usedidx, k] = -S[0] * V[0, :]
# re-randomize filters that were not used
for i in unused_indices:
D[i, :] = np.random.normal(size=D.shape[1])
D[i, :] /= np.sqrt(np.dot(D[i,:], D[i,:]))
return D
def plot_weights(basis):
n_filters, n_channels, height, width = basis.shape
ncols = 10
nrows = 10
fig = plt.figure()
gs = gridspec.GridSpec(nrows, ncols)
rown = 0
coln = 0
for filter in xrange(n_filters):
ax = fig.add_subplot(gs[rown, coln])
mi = np.min(basis[filter, 0, :, :])
ma = np.max(basis[filter, 0, :, :])
ma = np.max((np.abs(mi), np.abs(ma)))
mi = -ma
ax.imshow(basis[filter, 0, :, :], vmin=mi, vmax=ma, cmap='Greys_r', interpolation='none')
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
coln += 1
if coln >= ncols:
coln = 0
rown += 1
gs.tight_layout(fig, pad=0, h_pad=0, w_pad=0)
fig.canvas.draw()
buf, sz = fig.canvas.print_to_buffer()
data = np.fromstring(buf, dtype=np.uint8).reshape(sz[1], sz[0], -1)[:, :, :3]
plt.close()
return data
# a 5 fps video encoded using x264
writer = skvideo.io.FFmpegWriter("sparsity.mp4",
inputdict={
"-r": "10"
},
outputdict={
'-vcodec': 'libx264', '-b': '30000000'
})
# open the first frame of bigbuckbunny
filename = skvideo.datasets.bigbuckbunny()
vidframe = skvideo.io.vread(filename, outputdict={"-pix_fmt": "gray"})[0, :, :, 0]
# initialize D
D = np.random.normal(size=(100, 7*7))
for i in range(D.shape[0]):
D[i, :] /= np.sqrt(np.dot(D[i,:], D[i,:]))
X = image.extract_patches_2d(vidframe, (7, 7))
X = X.reshape(X.shape[0], -1).astype(np.float)
# sumsample about 10000 patches
X = X[np.random.permutation(X.shape[0])[:10000]]
for i in range(200):
print("Iteration %d / %d" % (i, 200))
C = code_step(X, D)
D = dict_step(X, C, D)
frame = plot_weights(D.reshape(100, 1, 7, 7))
writer.writeFrame(frame)
writer.close()
|
pony/orm/tests/test_relations_one2one4.py | ProgHaj/pony | 2,628 | 12665977 | <gh_stars>1000+
from __future__ import absolute_import, print_function, division
import unittest
from pony.orm.core import *
from pony.orm.tests.testutils import *
from pony.orm.tests import setup_database, teardown_database
db = Database()
class Person(db.Entity):
name = Required(unicode)
passport = Optional("Passport")
class Passport(db.Entity):
code = Required(unicode)
person = Required("Person")
class TestOneToOne4(unittest.TestCase):
def setUp(self):
setup_database(db)
with db_session:
p1 = Person(id=1, name='John')
Passport(id=1, code='123', person=p1)
def tearDown(self):
teardown_database(db)
@raises_exception(ConstraintError, 'Cannot unlink Passport[1] from previous Person[1] object, because Passport.person attribute is required')
@db_session
def test1(self):
p2 = Person(id=2, name='Mike')
pas2 = Passport(id=2, code='456', person=p2)
commit()
p1 = Person.get(name='John')
pas2.person = p1
if __name__ == '__main__':
unittest.main()
|
USPTO/rank-diff-wln/nntrain.py | wengong-jin/nips17-rexgen | 113 | 12666008 | import tensorflow as tf
from utils.nn import linearND, linear
from mol_graph import atom_fdim as adim, bond_fdim as bdim, max_nb, smiles2graph
from models import *
import math, sys, random
from optparse import OptionParser
import threading
parser = OptionParser()
parser.add_option("-t", "--train", dest="train_path")
parser.add_option("-p", "--cand", dest="cand_path", default=None)
parser.add_option("-b", "--batch", dest="batch_size", default=4)
parser.add_option("-c", "--ncore", dest="core_size", default=10)
parser.add_option("-a", "--ncand", dest="cand_size", default=500)
parser.add_option("-m", "--save_dir", dest="save_path")
parser.add_option("-w", "--hidden", dest="hidden_size", default=100)
parser.add_option("-d", "--depth", dest="depth", default=1)
parser.add_option("-n", "--max_norm", dest="max_norm", default=50.0)
opts,args = parser.parse_args()
hidden_size = int(opts.hidden_size)
depth = int(opts.depth)
core_size = int(opts.core_size)
cutoff = int(opts.cand_size)
max_norm = float(opts.max_norm)
batch_size = int(opts.batch_size)
session = tf.Session()
_input_atom = tf.placeholder(tf.float32, [None, None, adim])
_input_bond = tf.placeholder(tf.float32, [None, None, bdim])
_atom_graph = tf.placeholder(tf.int32, [None, None, max_nb, 2])
_bond_graph = tf.placeholder(tf.int32, [None, None, max_nb, 2])
_num_nbs = tf.placeholder(tf.int32, [None, None])
_label = tf.placeholder(tf.int32, [None])
_src_holder = [_input_atom, _input_bond, _atom_graph, _bond_graph, _num_nbs, _label]
q = tf.FIFOQueue(100, [tf.float32, tf.float32, tf.int32, tf.int32, tf.int32, tf.int32])
enqueue = q.enqueue(_src_holder)
input_atom, input_bond, atom_graph, bond_graph, num_nbs, label = q.dequeue()
input_atom.set_shape([None, None, adim])
input_bond.set_shape([None, None, bdim])
atom_graph.set_shape([None, None, max_nb, 2])
bond_graph.set_shape([None, None, max_nb, 2])
num_nbs.set_shape([None, None])
label.set_shape([None])
graph_inputs = (input_atom, input_bond, atom_graph, bond_graph, num_nbs)
with tf.variable_scope("mol_encoder"):
fp_all_atoms = rcnn_wl_only(graph_inputs, hidden_size=hidden_size, depth=depth)
reactant = fp_all_atoms[0:1,:]
candidates = fp_all_atoms[1:,:]
candidates = candidates - reactant
candidates = tf.concat(0, [reactant, candidates])
with tf.variable_scope("diff_encoder"):
reaction_fp = wl_diff_net(graph_inputs, candidates, hidden_size=hidden_size, depth=1)
reaction_fp = reaction_fp[1:]
reaction_fp = tf.nn.relu(linear(reaction_fp, hidden_size, "rex_hidden"))
score = tf.squeeze(linear(reaction_fp, 1, "score"), [1])
loss = tf.nn.softmax_cross_entropy_with_logits(score, label)
pred = tf.argmax(score, 0)
_lr = tf.placeholder(tf.float32, [])
optimizer = tf.train.AdamOptimizer(learning_rate=_lr)
tvs = tf.trainable_variables()
param_norm = tf.global_norm(tvs)
grads_and_vars = optimizer.compute_gradients(loss, tvs)
grads, var = zip(*grads_and_vars)
grad_norm = tf.global_norm(grads)
new_grads, _ = tf.clip_by_global_norm(grads, max_norm)
accum_grads = [tf.Variable(tf.zeros(v.get_shape().as_list()), trainable=False) for v in tvs]
zero_ops = [v.assign(tf.zeros(v.get_shape().as_list())) for v in accum_grads]
accum_ops = [accum_grads[i].assign_add(grad) for i, grad in enumerate(new_grads)]
grads_and_vars = zip(accum_grads, var)
backprop = optimizer.apply_gradients(grads_and_vars)
tf.global_variables_initializer().run(session=session)
size_func = lambda v: reduce(lambda x, y: x*y, v.get_shape().as_list())
n = sum(size_func(v) for v in tf.trainable_variables())
print "Model size: %dK" % (n/1000,)
def count(s):
c = 0
for i in xrange(len(s)):
if s[i] == ':':
c += 1
return c
def read_data(coord):
data = []
train_f = open(opts.train_path, 'r')
cand_f = open(opts.cand_path, 'r')
for line in train_f:
r,e = line.strip("\r\n ").split()
cand = cand_f.readline()
cbonds = []
for b in e.split(';'):
x,y = b.split('-')
x,y = int(x)-1,int(y)-1
cbonds.append((x,y))
sbonds = set(cbonds)
for b in cand.strip("\r\n ").split():
x,y = b.split('-')
x,y = int(x)-1,int(y)-1
if (x,y) not in sbonds:
cbonds.append((x,y))
data.append((r,cbonds))
random.shuffle(data)
data_len = len(data)
it = 0
while True:
reaction, cand_bonds = data[it]
cand_bonds = cand_bonds[:core_size]
it = (it + 1) % data_len
r,_,p = reaction.split('>')
n = count(r)
if n <= 2 or n > 100: continue
src_tuple,_ = smiles2graph(r, p, cand_bonds, cutoff=cutoff)
feed_map = {x:y for x,y in zip(_src_holder, src_tuple)}
session.run(enqueue, feed_dict=feed_map)
coord.request_stop()
coord = tf.train.Coordinator()
t = threading.Thread(target=read_data, args=(coord,))
t.start()
saver = tf.train.Saver()
it, sum_acc, sum_err, sum_gnorm = 0, 0.0, 0.0, 0.0
lr = 0.001
try:
while not coord.should_stop():
it += batch_size
session.run(zero_ops)
for i in xrange(batch_size):
ans = session.run(accum_ops + [pred])
if ans[-1] != 0:
sum_err += 1.0
_, pnorm, gnorm = session.run([backprop, param_norm, grad_norm], feed_dict={_lr:lr})
sum_gnorm += gnorm
if it % 200 == 0 and it > 0:
print "Training Error: %.4f, Param Norm: %.2f, Grad Norm: %.2f" % (sum_err / 200, pnorm, sum_gnorm / 200 * batch_size)
sys.stdout.flush()
sum_err, sum_gnorm = 0.0, 0.0
if it % 40000 == 0 and it > 0:
saver.save(session, opts.save_path + "/model.ckpt-%d" % it)
lr *= 0.9
print "Learning Rate: %.6f" % lr
except Exception as e:
print e
coord.request_stop(e)
finally:
saver.save(session, opts.save_path + "/model.final")
coord.request_stop()
coord.join([t])
|
examples/core/data_readers/tum.py | mli0603/lietorch | 360 | 12666030 |
import numpy as np
import torch
import csv
import os
import cv2
import math
import random
import json
import pickle
import os.path as osp
from lietorch import SE3
from .stream import RGBDStream
from .rgbd_utils import loadtum
intrinsics_dict = {
'freiburg1': [517.3, 516.5, 318.6, 255.3],
'freiburg2': [520.9, 521.0, 325.1, 249.7],
'freiburg3': [535.4, 539.2, 320.1, 247.6],
}
distortion_dict = {
'freiburg1': [0.2624, -0.9531, -0.0054, 0.0026, 1.1633],
'freiburg2': [0.2312, -0.7849, -0.0033, -0.0001, 0.9172],
'freiburg3': [0, 0, 0, 0, 0],
}
def as_intrinsics_matrix(intrinsics):
K = np.eye(3)
K[0,0] = intrinsics[0]
K[1,1] = intrinsics[1]
K[0,2] = intrinsics[2]
K[1,2] = intrinsics[3]
return K
class TUMStream(RGBDStream):
def __init__(self, datapath, **kwargs):
super(TUMStream, self).__init__(datapath=datapath, **kwargs)
def _build_dataset_index(self):
""" build list of images, poses, depths, and intrinsics """
images, depths, poses, intrinsics = loadtum(self.datapath, self.frame_rate)
intrinsic, _ = TUMStream.calib_read(self.datapath)
intrinsics = np.tile(intrinsic[None], (len(images), 1))
# set first pose to identity
poses = SE3(torch.as_tensor(poses))
poses = poses[[0]].inv() * poses
poses = poses.data.cpu().numpy()
self.images = images
self.poses = poses
self.depths = depths
self.intrinsics = intrinsics
@staticmethod
def calib_read(datapath):
if 'freiburg1' in datapath:
intrinsic = intrinsics_dict['freiburg1']
d_coef = distortion_dict['freiburg1']
elif 'freiburg2' in datapath:
intrinsic = intrinsics_dict['freiburg2']
d_coef = distortion_dict['freiburg2']
elif 'freiburg3' in datapath:
intrinsic = intrinsics_dict['freiburg3']
d_coef = distortion_dict['freiburg3']
return np.array(intrinsic), np.array(d_coef)
@staticmethod
def image_read(image_file):
intrinsics, d_coef = TUMStream.calib_read(image_file)
K = as_intrinsics_matrix(intrinsics)
image = cv2.imread(image_file)
return cv2.undistort(image, K, d_coef)
@staticmethod
def depth_read(depth_file):
depth = cv2.imread(depth_file, cv2.IMREAD_ANYDEPTH)
return depth.astype(np.float32) / 5000.0
|
components/aws/sagemaker/tests/unit_tests/tests/common/test_boto3_manager.py | Strasser-Pablo/pipelines | 2,860 | 12666042 | <gh_stars>1000+
import unittest
import os
from boto3.session import Session
from unittest.mock import patch, MagicMock, ANY
from common.boto3_manager import Boto3Manager
class Boto3ManagerTestCase(unittest.TestCase):
def test_assume_default_boto3_session(self):
returned_session = Boto3Manager._get_boto3_session("us-east-1")
assert isinstance(returned_session, Session)
assert returned_session.region_name == "us-east-1"
@patch("common.boto3_manager.DeferredRefreshableCredentials", MagicMock())
@patch("common.boto3_manager.AssumeRoleCredentialFetcher", MagicMock())
def test_assume_role_boto3_session(self):
returned_session = Boto3Manager._get_boto3_session(
"us-east-1", role_arn="abc123"
)
assert isinstance(returned_session, Session)
assert returned_session.region_name == "us-east-1"
# Bury into the internals to ensure our provider was registered correctly
our_provider = returned_session._session._components.get_component(
"credential_provider"
).providers[0]
assert isinstance(our_provider, Boto3Manager.AssumeRoleProvider)
def test_assumed_sagemaker_client(self):
Boto3Manager._get_boto3_session = MagicMock()
mock_sm_client = MagicMock()
# Mock the client("SageMaker", ...) return value
Boto3Manager._get_boto3_session.return_value.client.return_value = (
mock_sm_client
)
client = Boto3Manager.get_sagemaker_client(
"v1.0.0", "us-east-1", assume_role_arn="abc123"
)
assert client == mock_sm_client
Boto3Manager._get_boto3_session.assert_called_once_with("us-east-1", "abc123")
Boto3Manager._get_boto3_session.return_value.client.assert_called_once_with(
"sagemaker", endpoint_url=None, config=ANY, region_name="us-east-1"
)
|
dragonfly/nn/syn_nn_functions.py | hase1128/dragonfly | 675 | 12666060 | <reponame>hase1128/dragonfly<gh_stars>100-1000
"""
Implements various synthetic functions on NN architectures.
-- <EMAIL>
"""
# pylint: disable=invalid-name
import numpy as np
def _get_vals_wo_None(iter_of_vals):
""" Returns a list of values without Nones. """
return [x for x in iter_of_vals if x is not None]
def _num_units_signal(num_units_vals, bias_val, decay):
""" Signal on the number of units. """
num_units_vals = np.array(_get_vals_wo_None(num_units_vals))
return np.exp(-decay * abs(num_units_vals.mean() - bias_val))
def _degree_signal(in_degrees, out_degrees, bias_val, decay):
""" Signal on the degrees. """
avg_degree = (in_degrees.mean() + out_degrees.mean())/2.0
return np.exp(-decay * abs(avg_degree - bias_val))
def _get_ip_op_distance_signal(ip_op_dist, bias_val, decay):
""" Signal on distance from input to output. """
return np.exp(-decay * abs(ip_op_dist - bias_val))
def _get_layer_degree_signal(degree_of_layer, bias_val, decay):
""" A signal based on the degree of a layer. """
return np.exp(-decay * abs(degree_of_layer - bias_val))
def _get_num_layers_signal(num_layers, bias_val, decay):
""" A signal based on the number of layers. """
return np.exp(-decay * abs(num_layers - bias_val))
def _get_num_edges_signal(num_edges, bias_val, decay):
""" A signal based on the total number of edges. """
return np.exp(-decay * abs(num_edges - bias_val))
def _get_stride_signal(strides, bias_val, decay):
""" A signal using the strides. """
strides = np.array(_get_vals_wo_None(strides))
return np.exp(-decay * abs(strides.mean() - bias_val))
def _get_conv_signal(layer_labels):
""" A signal using the convolutional layers. """
conv_layers = [ll for ll in layer_labels if \
ll.startswith('conv') or ll.startswith('res')]
conv_filter_vals = np.array([float(ll[-1]) for ll in conv_layers])
return (conv_filter_vals == 3).sum() / float(len(conv_filter_vals) + 1)
def _get_sigmoid_signal(layer_labels):
""" A function using the sigmoid layer fraction as the signal. """
internal_layers = [ll for ll in layer_labels if ll not in ['ip', 'op', 'linear']]
good_layers = [ll in ['logistic', 'relu'] for ll in internal_layers]
return sum(good_layers) / float(len(internal_layers) + 1)
def syn_func1_common(nn):
""" A synthetic function on NN architectures. """
return _num_units_signal(nn.num_units_in_each_layer, 1000, 0.002) + \
_degree_signal(nn.get_in_degrees(), nn.get_out_degrees(), 5, 0.4) + \
_get_ip_op_distance_signal(nn.get_distances_from_ip()[nn.get_op_layer_idx()],
10, 0.2) + \
_get_layer_degree_signal(nn.get_in_degrees()[nn.get_op_layer_idx()], 3, 0.5) + \
_get_layer_degree_signal(nn.get_out_degrees()[nn.get_ip_layer_idx()], 4, 0.5) + \
_get_num_layers_signal(nn.num_layers, 30, 0.1) + \
_get_num_edges_signal(nn.conn_mat.sum(), 100, 0.05)
def cnn_syn_func1(nn):
""" A synthetic function for CNNs. """
return syn_func1_common(nn) + \
_num_units_signal(nn.num_units_in_each_layer, 500, 0.001) + \
_get_num_layers_signal(nn.num_layers, 50, 0.3) + \
_get_stride_signal(nn.strides, 1.5, 3.0) + \
_get_conv_signal(nn.layer_labels)
def mlp_syn_func1(nn):
""" A synthetic function for MLPs. """
return syn_func1_common(nn) + \
_get_num_edges_signal(nn.conn_mat.sum(), 50, 0.1) + \
_num_units_signal(nn.num_units_in_each_layer, 2000, 0.001) + \
_get_sigmoid_signal(nn.layer_labels)
|
trove/common/policies/configuration_parameters.py | sapcc/trove | 244 | 12666065 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from trove.common.policies.base import PATH_DATASTORE, PATH_VERSIONS
rules = [
policy.DocumentedRuleDefault(
name='configuration-parameter:index',
check_str='rule:admin_or_owner',
description='List all parameters bind to a datastore version.',
operations=[
{
'path': PATH_DATASTORE + '/versions/{version}/parameters',
'method': 'GET'
}
]),
policy.DocumentedRuleDefault(
name='configuration-parameter:show',
check_str='rule:admin_or_owner',
description='Get a paramter of a datastore version.',
operations=[
{
'path': (PATH_DATASTORE +
'/versions/{version}/parameters/{param}'),
'method': 'GET'
}
]),
policy.DocumentedRuleDefault(
name='configuration-parameter:index_by_version',
check_str='rule:admin_or_owner',
description='List all paramters bind to a datastore version by '
'the id of the version(datastore is not provided).',
operations=[
{
'path': PATH_VERSIONS + '/{version}/paramters',
'method': 'GET'
}
]),
policy.DocumentedRuleDefault(
name='configuration-parameter:show_by_version',
check_str='rule:admin_or_owner',
description='Get a paramter of a datastore version by it names and '
'the id of the version(datastore is not provided).',
operations=[
{
'path': PATH_VERSIONS + '/{version}/paramters/{param}',
'method': 'GET'
}
])
]
def list_rules():
return rules
|
src/python/emane/events/eventservice.py | weston-nrl/emane | 114 | 12666092 | #
# Copyright (c) 2013-2015,2017 - Adjacent Link LLC, Bridgewater,
# New Jersey
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Adjacent Link LLC nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from . import event_pb2
from ..ota import otaheader_pb2
from . import EventServiceException
import os
import socket
import threading
import fcntl
import struct
import select
import time
import uuid
import sys
def get_ip_address(ifname):
# http://code.activestate.com/recipes/439094/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15].encode() if sys.version_info >= (3,0) else ifname[:15])
)[20:24])
def init_multicast_socket(group,port,device):
try:
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
except socket.error as msg :
if sys.version_info >= (3,3):
raise EventServiceException("event socket failure %s" % str(msg),True)
else:
raise EventServiceException("event socket failure %s %s" % (str(msg[0]), msg[1]),True)
try:
sock.setsockopt(socket.IPPROTO_IP,socket.IP_MULTICAST_TTL,32)
except socket.error as msg :
if sys.version_info >= (3,3):
raise EventServiceException("event socket option failure %s" % str(msg),True)
else:
raise EventServiceException("event socket option failure %s %s" % (str(msg[0]), msg[1]),True)
try:
sock.setsockopt(socket.IPPROTO_IP,socket.IP_MULTICAST_LOOP,1)
except socket.error as msg :
if sys.version_info >= (3,3):
raise EventServiceException("event socket option failure %s" % str(msg),True)
else:
raise EventServiceException("event socket option failure %s %s" % (str(msg[0]), msg[1]),True)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error as msg :
if sys.version_info >= (3,3):
raise EventServiceException("event socket option failure %s" % str(msg),True)
else:
raise EventServiceException("event socket option failure %s %s" % (str(msg[0]), msg[1]),True)
try:
sock.bind((group,port))
except socket.error as msg:
if sys.version_info >= (3,3):
raise EventServiceException("bind failure %s" % str(msg),True)
else:
raise EventServiceException("bind failure %s %s" % (str(msg[0]), msg[1]),True)
try:
if device:
devAddress = socket.inet_aton(get_ip_address(device))
else:
devAddress = socket.inet_aton("0.0.0.0")
sock.setsockopt(socket.SOL_IP,
socket.IP_ADD_MEMBERSHIP,
socket.inet_aton(group) +
devAddress)
sock.setsockopt(socket.SOL_IP,
socket.IP_MULTICAST_IF,
devAddress)
except socket.error as msg:
if sys.version_info >= (3,3):
raise EventServiceException("mulicast add membership failure %s" % str(msg),True)
else:
raise EventServiceException("mulicast add membership failure %s %s" % (str(msg[0]), msg[1]),True)
except IOError:
raise EventServiceException("unknown device %s" % device,True)
return sock
class EventService:
def __init__(self,eventchannel,otachannel = None):
(self._multicastGroup,self._port,_) = eventchannel
self._defaultHandler = None
self._handlers = {}
self._socket = None
self._readFd,self._writeFd = os.pipe()
self._uuid = uuid.uuid4()
self._sequenceNumber = 0
self._socketOTA = None
self._socket = init_multicast_socket(*eventchannel)
if otachannel:
self._socketOTA = init_multicast_socket(*otachannel)
self._lock = threading.Lock()
def breakloop(self):
os.write(self._writeFd,"\n".encode())
def loop(self,default=None):
buffer = ""
running = True
while running:
rdfds = [self._socket,self._readFd]
if self._socketOTA:
rdfds.append(self._socketOTA)
try:
readable,_,_ = select.select(rdfds,[],[])
except select.error:
continue
for fd in readable:
if fd is self._socket:
data,_ = self._socket.recvfrom(65535)
if not len(data):
running = False
break
(length,) = struct.unpack_from("!H",data)
if length == len(data) - 2:
event = event_pb2.Event()
event.ParseFromString(data[2:])
for serialization in event.data.serializations:
self._lock.acquire()
try:
if serialization.eventId in self._handlers:
self._handlers[serialization.eventId](serialization.nemId,
serialization.eventId,
serialization.data,
uuid.UUID(bytes=event.uuid),
event.sequenceNumber)
elif default:
default(serialization.nemId,
serialization.eventId,
serialization.data,
uuid.UUID(bytes=event.uuid),
event.sequenceNumber)
finally:
self._lock.release()
elif fd is self._readFd:
running = False
break
elif fd is self._socketOTA:
data,_ = self._socketOTA.recvfrom(65535)
if not len(data):
running = False
break
(headerLength,) = struct.unpack_from("!H",data)
otaHeader = otaheader_pb2.OTAHeader()
otaHeader.ParseFromString(data[2:headerLength+2])
eventData = event_pb2.Event.Data()
# currently we only process attached events that
# are fully contained in the first part (fragment)
# of a one-part (no fragmentation) or multi-part
# (fragmented) OTA message
#
# Notes for below logic:
# 2 + headerLength = 2 byte header length field
# + header length
#
# 9 = OTA PartInfo header length. Where PartInfo
# is used to support fragmentation.
if otaHeader.HasField("payloadInfo") and \
len(data) >= 2 + headerLength + 9 + otaHeader.payloadInfo.eventLength:
eventData.ParseFromString(data[2+headerLength + 9:2 + headerLength + 9 + otaHeader.payloadInfo.eventLength])
for serialization in eventData.serializations:
self._lock.acquire()
try:
if serialization.eventId in self._handlers:
self._handlers[serialization.eventId](serialization.nemId,
serialization.eventId,
serialization.data,
uuid.UUID(bytes=otaHeader.uuid),
otaHeader.sequence)
elif default:
default(serialization.nemId,
serialization.eventId,
serialization.data,
uuid.UUID(bytes=otaHeader.uuid),
otaHeader.sequence)
finally:
self._lock.release()
def nextEvent(self):
events = []
eventId = 0
running = True
while running:
try:
rdfds = [self._socket,self._readFd]
if self._socketOTA:
rdfds.append(self._socketOTA)
readable,_,_ = select.select(rdfds,[],[])
except select.error:
continue
for fd in readable:
if fd is self._socket:
data,_ = self._socket.recvfrom(65535)
if not len(data):
running = False
break
(length,) = struct.unpack_from("!H",data)
if length == len(data) - 2:
event = event_pb2.Event()
event.ParseFromString(data[2:])
for serialization in event.data.serializations:
events.append((serialization.nemId,
serialization.eventId,
serialization.data))
return (uuid.UUID(bytes=event.uuid),
event.sequenceNumber,
tuple(events))
elif fd is self._readFd:
running = False
break
elif fd is self._socketOTA:
data,_ = self._socketOTA.recvfrom(65535)
if not len(data):
running = False
break
(headerLength,) = struct.unpack_from("!H",data)
otaHeader = otaheader_pb2.OTAHeader()
otaHeader.ParseFromString(data[2:headerLength+2])
eventData = event_pb2.Event.Data()
eventData.ParseFromString(data[2+headerLength:2 + headerLength +otaHeader.eventLength])
for serialization in eventData.serializations:
events.append((serialization.nemId,
serialization.eventId,
serialization.data))
return (uuid.UUID(bytes=otaHeader.uuid),
otaHeader.sequenceNumber,
tuple(events))
return (None, None, tuple(events))
def subscribe(self,eventId,callback):
self._lock.acquire()
if callback:
self._handlers[eventId] = callback
self._lock.release()
def unsubscribe(self,eventId):
self._lock.acquire()
if eventId in self._handlers:
del self._handlers[eventId]
self._lock.release()
def publish(self,nemId,event):
self._sequenceNumber += 1
msg = event_pb2.Event()
msg.uuid = self._uuid.bytes
msg.sequenceNumber = self._sequenceNumber
serialization = msg.data.serializations.add()
serialization.nemId = nemId
serialization.eventId = event.IDENTIFIER
serialization.data = event.serialize()
buf = msg.SerializeToString()
self._socket.sendto(struct.pack("!H",len(buf)) + buf,
(self._multicastGroup,self._port))
|
dspn/plot-state-progress.py | rishabh1694/dspn-mod | 102 | 12666110 | import argparse
import itertools
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.colors as colors
import numpy as np
import os
import data
from PIL import Image
parser = argparse.ArgumentParser()
parser.add_argument("n", type=int, nargs="*")
parser.add_argument("--keep", type=int, nargs="*")
args = parser.parse_args()
matplotlib.rc("text", usetex=True)
params = {"text.latex.preamble": [r"\usepackage{bm,amsmath,mathtools,amssymb}"]}
plt.rcParams.update(params)
base_path = "clevr/images/val"
val_images = sorted(os.listdir(base_path))
def take(iterable, n):
l = []
for _ in range(n):
l.append(next(iterable))
return l
def load_file(path):
with open(path) as fd:
for f in fd:
tokens = iter(f.strip().split(" "))
take(tokens, 1)
if "detect" in path:
score = float(take(tokens, 1)[0])
if score < 0.5:
continue
else:
score = 1.0
coord = take(tokens, 3)
material = np.argmax(take(tokens, 2))
color = np.argmax(take(tokens, 8))
shape = np.argmax(take(tokens, 3))
size = np.argmax(take(tokens, 2))
access = lambda x, i: data.CLASSES[x][i]
yield (
"({:.2f}, {:.2f}, {:.2f})".format(*map(lambda x: 3 * float(x), coord)),
access("size", size),
access("color", color),
access("material", material),
access("shape", shape),
)
indices_to_use = args.keep
indices_to_use.append(-2)
indices_to_use.append(-1)
plt.figure(figsize=(12, 4))
for j, index in enumerate(args.n):
progress = []
path = "out/clevr-state/{}-clevr-state-1-{}/{}/{}.txt"
for i in range(31):
points = list(
load_file(path.format("dspn", "30", "detections", f"{index}-step{i}"))
)
progress.append(points)
progress.append(list(load_file(path.format("base", "10", "groundtruths", index))))
progress.append(list(load_file(path.format("base", "10", "detections", index))))
img = Image.open(os.path.join(base_path, val_images[int(index)]))
img = img.resize((128, 128), Image.LANCZOS)
plt.imshow(img)
plt.xticks([])
plt.yticks([])
plt.savefig(f"img-{j}.pdf", bbox_inches="tight")
matrix = []
for i, progress_n in enumerate(indices_to_use):
column = []
step = progress[progress_n]
if progress_n == -2:
header = r"True $\bm{Y}$"
elif progress_n == -1:
header = r"Baseline"
else:
header = r"$\hat{\bm{Y}}^{(" + str(progress_n) + ")}$"
column.append(header)
for object in sorted(
step, key=lambda x: [float(x.strip()) for x in x[0][1:-1].split(",")]
):
column.append(object[0])
column.append(" ".join(object[1:]))
matrix.append(column)
# transpose
matrix = itertools.zip_longest(*matrix, fillvalue="")
# mark mismatched entries
contains_words = lambda row: "small" in row[-2] or "large" in row[-2]
# make an attribute red if it isn't correct
matrix = [
[
" ".join(
(
r"\textcolor{red}{" + attribute + "}"
if attribute != correct_attribute
else attribute
)
for attribute, correct_attribute in zip(
state.split(" "), row[-2].split(" ")
)
)
for state in row
]
if contains_words(row)
else row
for row in matrix
]
matrix = [" & ".join(row) for row in matrix]
# format into table
template = r"""
\includegraphics[width=0.22\linewidth]{{img-{}}}
\begin{{tabular}}{}
\toprule
{}\\
\midrule
{}\\
\bottomrule
\end{{tabular}}
"""
table = template.format(
j, "{" + "c" * len(indices_to_use) + "}", matrix[0], "\\\\\n".join(matrix[1:])
)
print(table)
|
libact/query_strategies/tests/test_density_weighted_meta.py | afedyukova/libact | 788 | 12666119 | <reponame>afedyukova/libact
import unittest
import os
import numpy as np
from numpy.testing import assert_array_equal
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import cosine_similarity
from libact.base.dataset import Dataset, import_libsvm_sparse
from libact.models import LogisticRegression
from libact.query_strategies import UncertaintySampling
from libact.labelers import IdealLabeler
from ..density_weighted_meta import DensityWeightedMeta
from .utils import run_qs
class DensityWeightedMetaTestCase(unittest.TestCase):
def setUp(self):
dataset_filepath = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'datasets/heart_scale')
self.X, self.y = import_libsvm_sparse(
dataset_filepath).format_sklearn()
self.quota = 10
def test_density_weighted_meta_uncertainty_lc(self):
trn_ds = Dataset(self.X[:20], np.concatenate([self.y[:6], [None] * 14]))
base_qs = UncertaintySampling(
trn_ds, method='lc',
model=LogisticRegression(solver='liblinear', multi_class="ovr"))
similarity_metric = cosine_similarity
clustering_method = KMeans(n_clusters=3, random_state=1126)
qs = DensityWeightedMeta(
dataset=trn_ds, base_query_strategy=base_qs,
similarity_metric=similarity_metric,
clustering_method=clustering_method,
beta=1.0, random_state=1126)
model = LogisticRegression(solver='liblinear', multi_class="ovr")
qseq = run_qs(trn_ds, qs, self.y, self.quota)
assert_array_equal(qseq, np.array([13, 18, 9, 12, 8, 16, 10, 19, 15, 17]))
if __name__ == '__main__':
unittest.main()
|
tests/test_authentication.py | joshlk/many_requests | 398 | 12666120 | <filename>tests/test_authentication.py
from asks import BasicAuth
from unittest import TestCase
import pytest
from asks.response_objects import Response
from many_requests.many_requests_ import ManyRequests
from many_requests.common import BadResponse
from .mock_server import web_server
@pytest.mark.usefixtures("web_server")
class TestManyRequestAuth(TestCase):
def test_basic_auth(self):
auths = [
BasicAuth(auth_info=(("username", "password"))), # ok
BasicAuth(auth_info=(("username", "bad_password"))) # bad
]
url = 'http://0.0.0.0:8080/basic_auth'
responses = ManyRequests(10, 2, retries=2, retry_sleep=0)(method='GET', url=url, auth=auths)
assert len(responses) == 2
ok_res = responses[0]
assert isinstance(ok_res, Response)
assert ok_res.url == url
bad_res = responses[1]
assert isinstance(bad_res, BadResponse)
assert bad_res.response.status_code == 401
assert bad_res.response.url == url
|
python/tvm/driver/tvmc/pass_config.py | XiaoSong9905/tvm | 4,640 | 12666121 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
TVMC PassContext Interface
"""
import importlib
import tvm
from tvm.driver.tvmc import TVMCException
def load_function(full_name):
"""Dynamic loading a function by the full name.
Parameters
----------
full_name: str
The name of a PackedFunc or a string of the form "path.to.module.func"
that indicates the module that can be imported.
You must be aware of the load order here, it first tries to find it via
TVM global function, if not find, try to import it by "importlib.import_module".
Returns
-------
func: function or PackedFunc
The loaded fucntion.
"""
global_func = tvm.get_global_func(full_name, allow_missing=True)
if global_func is not None:
return global_func
# split full name "path.to.module.func" into two parts ["path.to.module", "func"]
module_name, func_name = full_name.rsplit(".", 1)
# import module and find the function
module = importlib.import_module(module_name)
if hasattr(module, func_name):
return getattr(module, func_name)
raise TVMCException(f"No function '{func_name}' found in module '{module_name}'.")
def get_pass_config_value(name, value, config_type):
"""Get a PassContext configuration value, based on its config data type.
Parameters
----------
name: str
config identifier name.
value: str
value assigned to the config, provided via command line.
config_type: str
data type defined to the config, as string.
Returns
-------
parsed_value: bool, int or str
a representation of the input value, converted to the type
specified by config_type.
"""
parsed_value = None
if config_type == "IntImm":
# "Bool" configurations in the PassContext are recognized as
# IntImm, so deal with this case here
mapping_values = {
"false": False,
"true": True,
}
if value.isdigit():
parsed_value = int(value)
else:
# if not an int, accept only values on the mapping table, case insensitive
parsed_value = mapping_values.get(value.lower(), None)
if parsed_value is None:
raise TVMCException(f"Invalid value '{value}' for configuration '{name}'.")
elif config_type == "runtime.String":
parsed_value = value
elif config_type == "Array":
if name == "tir.add_lower_pass":
pass_list = value.split(",")
if len(pass_list) % 2 != 0:
raise TVMCException(
f"The configuration of '{name}' must be of the form "
"'tir.add_lower_pass=opt_level1,pass1,opt_evel2,pass2'"
)
parsed_value = []
for i in range(0, len(pass_list), 2):
level, pass_func = pass_list[i].strip(), pass_list[i + 1].strip()
try:
level = int(level)
except ValueError:
raise TVMCException(f"Only integer is allow for configuration '{name}'.")
# TODO (@leeexyz) We should parse configurations of each tir Pass.
# For now, we only use the defaults. Currently, There are four config nodes:
# `tir.transform.LoopPartitionConfig`
# `tir.transform.UnrollLoopConfig`
# `tir.transform.HoistIfThenElseConfig`
# `tir.transform.InjectDoubleBufferConfig`
# loading pass func and calling it to get the Pass
pass_func = load_function(pass_func)()
parsed_value.append((level, pass_func))
else:
raise TVMCException(f"Unsupported configuration '{name}' for '{config_type}' type.")
else:
# not raise here cause we alreay checked before calling this function
pass
return parsed_value
def parse_configs(input_configs):
"""Parse configuration values set via command line.
Parameters
----------
input_configs: list of str
list of configurations provided via command line.
Returns
-------
pass_context_configs: dict
a dict containing key-value configs to be used in the PassContext.
"""
if not input_configs:
return {}
all_configs = tvm.ir.transform.PassContext.list_configs()
supported_config_types = ("IntImm", "runtime.String", "Array")
supported_configs = [
name for name in all_configs.keys() if all_configs[name]["type"] in supported_config_types
]
pass_context_configs = {}
for config in input_configs:
if not config:
raise TVMCException(
f"Invalid format for configuration '{config}', use <config>=<value>"
)
# Each config is expected to be provided as "name=value"
try:
name, value = config.split("=")
name = name.strip()
value = value.strip()
except ValueError:
raise TVMCException(
f"Invalid format for configuration '{config}', use <config>=<value>"
)
if name not in all_configs:
raise TVMCException(
f"Configuration '{name}' is not defined in TVM. "
f"These are the existing configurations: {', '.join(all_configs)}"
)
if name not in supported_configs:
raise TVMCException(
f"Configuration '{name}' uses a data type not supported by TVMC. "
f"The following configurations are supported: {', '.join(supported_configs)}"
)
config_type = all_configs[name]["type"]
parsed_value = get_pass_config_value(name, value, config_type)
if config_type == "Array" and name in pass_context_configs:
# merge configs if the configuration exists
pass_context_configs[name].extend(parsed_value)
else:
pass_context_configs[name] = parsed_value
return pass_context_configs
|
pygears/lib/cat_util.py | bogdanvuk/pygears | 120 | 12666129 | from pygears.typing import Queue, typeof
def din_data_cat(intfs, order=None):
if order is None:
order = range(len(intfs))
data = []
for o in order:
intf = intfs[o]
if intf['modport'] == 'producer':
continue
if issubclass(intf['type'], Queue):
if intf['type'][0].width > 0:
data.append(f'{intf["name"]}_s.data')
else:
if intf['type'].width > 0:
data.append(f'{intf["name"]}_s')
return f'{{ {", ".join(reversed(data))} }}'
def din_data_cat_value(data):
dout = []
for d in data:
if isinstance(d, Queue):
dout.append(d.data)
else:
dout.append(d)
return tuple(dout)
|
linter.py | TheBossProSniper/electric-windows | 210 | 12666130 | import json, os, sys
os.chdir(r'C:\Users\xtrem\Desktop\Electric\Electric Packages\packages')
for f in os.listdir():
data = ''
try:
with open(f, 'r') as file:
data = json.load(file)
except:
continue
linted = {
'display-name': data['display-name'],
'package-name': data['package-name'],
}
# Change Based On Version
# Not Portable
if 'portable' in list(data.keys()):
linted['portable'] = {'latest-version': data['portable']['latest-version']}
for version in list(data['portable'].keys()):
if version not in ['latest-version', 'auto-update', 'package-name', 'display-name']:
linted['portable'][version] = {'url': data['portable'][version]['url']}
if 'checksum' in list(data['portable'][version].keys()):
linted['portable'][version]['checksum'] = data['portable'][version]['checksum']
if 'file-type' in list(data['portable'][version].keys()):
linted['portable'][version]['file-type'] = data['portable'][version]['file-type']
if 'pre-install' in list(data['portable'][version].keys()):
linted['portable'][version]['pre-install'] = data['portable'][version]['pre-install']
if 'post-install' in list(data['portable'][version].keys()):
linted['portable'][version]['post-install'] = data['portable'][version]['post-install']
for key in list(data['portable'][version].keys()):
if key not in list(linted['portable'][version].keys()):
linted['portable'][version][key] = data['portable'][version][key]
print(json.dumps(linted, indent=4))
# TODO: Handle Portable Section data['portable'] |
onnx_tf/handlers/backend/max.py | malisit/onnx-tensorflow | 1,110 | 12666152 | <filename>onnx_tf/handlers/backend/max.py
import tensorflow as tf
from onnx_tf.common import exception
from onnx_tf.common import data_type
from onnx_tf.common import sys_config
from onnx_tf.handlers.backend_handler import BackendHandler
from onnx_tf.handlers.handler import onnx_op
@onnx_op("Max")
class Max(BackendHandler):
supported_types = [
tf.bfloat16, tf.float16, tf.float32, tf.float64, tf.int32, tf.int64
]
cast_map = {
tf.uint8: tf.int32,
tf.uint16: tf.int32,
tf.uint32: tf.int64,
tf.int8: tf.int32,
tf.int16: tf.int32
}
cast_map[tf.uint64] = tf.int64 if sys_config.auto_cast else None
@classmethod
def args_check(cls, node, **kwargs):
dtype = kwargs["tensor_dict"][node.inputs[0]].dtype
if dtype in cls.cast_map and cls.cast_map[dtype] is None:
exception.DTYPE_NOT_CAST_EXCEPT(
"Max input " + node.inputs[0] + " with data type '" +
data_type.tf_to_np_str(dtype) + "'",
data_type.tf_to_np_str_list(cls.supported_types))
@classmethod
def _common(cls, node, **kwargs):
values = [kwargs["tensor_dict"][inp] for inp in node.inputs]
dtype = values[0].dtype
if dtype in cls.cast_map:
values = [tf.cast(v, cls.cast_map[v.dtype]) for v in values]
result = values[0]
for i in range(1, len(values)):
result = tf.maximum(result, values[i])
return [tf.cast(result, dtype) if dtype in cls.cast_map else result]
@classmethod
def version_1(cls, node, **kwargs):
return cls._common(node, **kwargs)
@classmethod
def version_6(cls, node, **kwargs):
return cls._common(node, **kwargs)
@classmethod
def version_8(cls, node, **kwargs):
return cls._common(node, **kwargs)
@classmethod
def version_12(cls, node, **kwargs):
return cls._common(node, **kwargs)
@classmethod
def version_13(cls, node, **kwargs):
return cls._common(node, **kwargs)
|
ctc_decoder/bk_tree.py | TenaciousC22/CTCDecoder | 705 | 12666179 | from typing import List
import editdistance as ed
class BKTree:
"""Burkhard Keller tree: used to find strings within tolerance (w.r.t. edit distance metric)
to given query string."""
def __init__(self, txt_list: List[str]) -> None:
"""Pass list of texts (words) which are inserted into the tree."""
self.root = None
for txt in txt_list:
self._insert(self.root, txt)
def query(self, txt: str, tolerance: int) -> List[str]:
"""Query strings within given tolerance (w.r.t. edit distance metric)."""
return self._query(self.root, txt, tolerance)
def _insert(self, node, txt):
# insert root node
if node is None:
self.root = (txt, {})
return
# insert all other nodes
d = ed.eval(node[0], txt)
if d in node[1]:
self._insert(node[1][d], txt)
else:
node[1][d] = (txt, {})
def _query(self, node, txt, tolerance):
# handle empty root node
if node is None:
return []
# distance between query and current node
d = ed.eval(node[0], txt)
# add current node to result if within tolerance
res = []
if d <= tolerance:
res.append(node[0])
# iterate over children
for (edge, child) in node[1].items():
if d - tolerance <= edge <= d + tolerance:
res += self._query(child, txt, tolerance)
return res
|
kraino/core/keras_extensions.py | mateuszmalinowski/visual_turing_test-tutorial | 151 | 12666185 | <reponame>mateuszmalinowski/visual_turing_test-tutorial
"""
Additional theano/keras functions.
Author: <NAME>
Email: <EMAIL>
"""
#import marshal
import numpy
#import types
from keras.layers.convolutional import Convolution1D
from keras.layers.convolutional import MaxPooling1D
from keras.layers.core import Lambda
from keras.layers.core import MaskedLayer
from keras.layers.core import TimeDistributedMerge
from keras import backend as K
## functions ##
def time_distributed_nonzero_max_pooling(x):
"""
Computes maximum along the first (time) dimension.
It ignores the mask m.
In:
x - input; a 3D tensor
mask_value - value to mask out, if None then no masking;
by default 0.0,
"""
import theano.tensor as T
mask_value=0.0
x = T.switch(T.eq(x, mask_value), -numpy.inf, x)
masked_max_x = x.max(axis=1)
# replace infinities with mask_value
masked_max_x = T.switch(T.eq(masked_max_x, -numpy.inf), 0, masked_max_x)
return masked_max_x
def time_distributed_masked_ave(x, m):
"""
Computes average along the first (time) dimension.
In:
x - input; a 3D tensor
m - mask
"""
tmp = K.sum(x, axis=1)
nonzeros = K.sum(m, axis=-1)
return tmp / K.expand_dims(K.cast(nonzeros, tmp.dtype))
def time_distributed_masked_max(x, m):
"""
Computes max along the first (time) dimension.
In:
x - input; a 3D tensor
m - mask
m_value - value for masking
"""
# place infinities where mask is off
m_value = 0.0
tmp = K.switch(K.equal(m, 0.0), -numpy.inf, 0.0)
x_with_inf = x + K.expand_dims(tmp)
x_max = K.max(x_with_inf, axis=1)
r = K.switch(K.equal(x_max, -numpy.inf), m_value, x_max)
return r
## classes ##
# Transforms existing layers to masked layers
class MaskedTimeDistributedMerge(MaskedLayer, TimeDistributedMerge):
pass
class MaskedConvolution1D(MaskedLayer, Convolution1D):
pass
class MaskedMaxPooling1D(MaskedLayer, MaxPooling1D):
pass
# auxiliary mask-aware layers
class DropMask(MaskedLayer):
"""
Removes a mask from the layer.
"""
def get_output_mask(self, train=False):
return None
class LambdaWithMask(MaskedLayer, Lambda):
"""
Lambda function that takes a two argument function, and returns
a value returned by the function applied to the output of the previous layer
and the mask.
That is: LambdaWithMask(f) = f(previous, mask)
"""
def get_output(self, train=False):
#func = marshal.loads(self.function)
#func = types.FunctionType(func, globals())
func = self.function
if hasattr(self, 'previous'):
return func(self.previous.get_output(train),
self.previous.get_output_mask(train))
else:
return func(self.input, self.get_output_mask(train))
|
nikola/data/themes/base/messages/messages_ia.py | asmeurer/nikola | 1,901 | 12666194 | <filename>nikola/data/themes/base/messages/messages_ia.py
# -*- encoding:utf-8 -*-
"""Autogenerated file, do not edit. Submit translations on Transifex."""
MESSAGES = {
"%d min remaining to read": "%dminutas de lectura remanente",
"(active)": "(active)",
"Also available in:": "Anque disponibile in:",
"Archive": "Archivo",
"Atom feed": "Fluxo Atom",
"Authors": "Authores",
"Categories": "Categorias",
"Comments": "Commentos",
"LANGUAGE": "Interlingua",
"Languages:": "Linguas:",
"More posts about %s": "Plure entratas super %s",
"Newer posts": "Entratas plus recente",
"Next post": "Entrata successive",
"Next": "Successive",
"No posts found.": "Nulle entrata esseva trovate.",
"Nothing found.": "Nihil esseva trovate.",
"Older posts": "Entratas plus vetule",
"Original site": "Sito original",
"Posted:": "Publicate:",
"Posts about %s": "Entratas super %s",
"Posts by %s": "Entratas per %s",
"Posts for year %s": "Entratas del anno %s",
"Posts for {month_day_year}": "Entratas de {month_day_year}",
"Posts for {month_year}": "Entratas de {month_year}",
"Previous post": "Entrata precedente",
"Previous": "Precendente",
"Publication date": "Data de publication",
"RSS feed": "Fluxo RSS",
"Read in English": "Lege in interlingua",
"Read more": "Lege plus",
"Skip to main content": "Salta al contento principal",
"Source": "Sorgente",
"Subcategories:": "Subcategorias:",
"Tags and Categories": "Etiquettas e categorias",
"Tags": "Etiquettas",
"Toggle navigation": "Commuta navigation",
"Uncategorized": "Sin categoria",
"Up": "In alto",
"Updates": "Actualisationes",
"Write your page here.": "Scribe tu pagina hic.",
"Write your post here.": "Scribe tu entrata hic.",
"old posts, page %d": "Vetule entratas, pagina %d",
"page %d": "pagina %d",
"updated": "actualisate",
}
|
cnsenti/__init__.py | edddyeddy/cnsenti | 140 | 12666213 | from cnsenti.emotion import Emotion
from cnsenti.sentiment import Sentiment |
qinhaifang/src/evalTools/script/evaluateScene.py | SpaceNetChallenge/BuildingFootprintDetectors | 161 | 12666226 | <reponame>SpaceNetChallenge/BuildingFootprintDetectors
from spaceNet import evalTools as eT
from spaceNet import geoTools as gT
import numpy as np
import sys
import multiprocessing
import time
if __name__ == "__main__":
# load Truth and Test File Locations
if len(sys.argv) > 1:
truth_fp = sys.argv[1]
test_fp = sys.argv[2]
else:
test_fp = '/data/building_extraction/SpaceNet/data/predict_pixelGeoJson/3band_013022223132_Public_img2052_predict.geojson'
truth_fp = '/data/building_extraction/SpaceNet/data/predict_pixelGeoJson/3band_013022223132_Public_img2052_predict.geojson'
# check for cores available
if len(sys.argv) > 3:
max_cpu = int(sys.argv[3])
else:
max_cpu = multiprocessing.cpu_count()
parallel=False
# initialize scene counts
true_pos_counts = []
false_pos_counts = []
false_neg_counts = []
t0 = time.time()
# Start Ingest Of Truth and Test Case
sol_polys = gT.importgeojson(truth_fp, removeNoBuildings=True)
prop_polys = gT.importgeojson(test_fp)
#print('sol_polys{}'.format(sol_polys))
#print('prop_polys{}'.format(prop_polys))
t1 = time.time()
total = t1 - t0
print('time of ingest: ', total)
# Speed up search by preprocessing ImageId and polygonIds
test_image_ids = set([item['ImageId'] for item in prop_polys if item['ImageId'] > 0])
prop_polysIdList = np.asarray([item['ImageId'] for item in prop_polys if item["ImageId"] > 0 and \
item['BuildingId']!=-1])
#print('prop_polysIdLIST{}'.format(prop_polysIdList))
prop_polysPoly = np.asarray([item['poly'] for item in prop_polys if item["ImageId"] > 0 and \
item['BuildingId']!=-1])
#print('prop_polyspoly{}'.format(prop_polysPoly))
sol_polysIdsList = np.asarray([item['ImageId'] for item in sol_polys if item["ImageId"] > 0 and \
item['BuildingId']!=-1])
sol_polysPoly = np.asarray([item['poly'] for item in sol_polys if item["ImageId"] > 0 and \
item['BuildingId']!=-1])
bad_count = 0
F1ScoreList = []
cpu_count = min(multiprocessing.cpu_count(), max_cpu)
#print('{}'.format(max_cpu))
p = multiprocessing.Pool(processes=cpu_count)
ResultList = []
eval_function_input_list = eT.create_eval_function_input((test_image_ids,
(prop_polysIdList, prop_polysPoly),
(sol_polysIdsList, sol_polysPoly)))
# evalFunctionInput = creatEevalFunctionInput((test_image_ids,
# (prop_polysIdList, prop_polysPoly),
# (sol_polysIdsList, sol_polysPoly)))
# Calculate Values
t3 = time.time()
print('time For DataCreation {}s'.format(t3-t1))
#result_list = p.map(eT.evalfunction, eval_function_input_list)
if parallel==False:
result_list = []
for eval_input in eval_function_input_list:
print('eval_input={}'.format(eval_input))
result_list.append(eT.evalfunction(eval_input))
else:
result_list = p.map(eT.evalfunction, eval_function_input_list)
result_sum = np.sum(result_list, axis=0)
true_pos_total = result_sum[1]
false_pos_total = result_sum[2]
false_neg_total = result_sum[3]
print('True_Pos_Total', true_pos_total)
print('False_Pos_Total', false_pos_total)
print('False_Neg_Total', false_neg_total)
precision = float(true_pos_total) / (float(true_pos_total) + float(false_pos_total))
recall = float(true_pos_total) / (float(true_pos_total) + float(false_neg_total))
F1ScoreTotal = 2.0 * precision*recall / (precision + recall)
print('F1Total', F1ScoreTotal)
t2 = time.time()
total = t2-t0
print('time of evaluation: {}'.format(t2-t1))
print('time of evaluation {}s/imageId'.format((t2-t1)/len(result_list)))
print('Total Time {}s'.format(total))
print(result_list)
print(np.mean(result_list))
|
proximal/lin_ops/warp.py | kyleaj/ProxImaL | 101 | 12666279 | from .lin_op import LinOp
import numpy as np
import cv2
from proximal.halide.halide import Halide
from proximal.utils.utils import Impl
class warp(LinOp):
"""Warp using a homography.
"""
def __init__(self, arg, H, implem=None):
self.H = H.copy()
# Compute inverse
self.Hinv = np.zeros(H.shape)
if len(H.shape) > 2:
for j in range(self.H.shape[2]):
self.Hinv[:, :, j] = np.linalg.pinv(H[:, :, j])
else:
self.Hinv = np.linalg.pinv(H)
# Check for the shape
if len(H.shape) < 2 or len(H.shape) > 3:
raise Exception(
'Error, warp supports only up to 4d inputs (expects first 3 to be image).')
# Has to have third dimension
#if len(arg.shape) != 3:
# raise Exception('Images must have third dimension')
shape = arg.shape
if len(H.shape) == 3:
shape += (H.shape[2],)
# Temp array for halide
self.tmpfwd = np.zeros((shape[0], shape[1],
shape[2] if (len(shape) > 2) else 1,
H.shape[2] if (len(H.shape) > 2) else 1),
dtype=np.float32, order='F')
self.tmpadj = np.zeros((shape[0], shape[1], shape[2] if (
len(shape) > 2) else 1), dtype=np.float32, order='F')
super(warp, self).__init__([arg], shape, implem)
def forward(self, inputs, outputs):
"""The forward operator.
Reads from inputs and writes to outputs.
"""
if self.implementation == Impl['halide']:
# Halide implementation
Halide('A_warp').A_warp(inputs[0], self.H, self.tmpfwd) # Call
np.copyto(outputs[0], np.reshape(self.tmpfwd, self.shape))
else:
# CV2 version
inimg = inputs[0]
if len(self.H.shape) == 2:
warpedInput = cv2.warpPerspective(np.asfortranarray(inimg), self.H,
inimg.shape[1::-1], flags=cv2.INTER_LINEAR | cv2.WARP_INVERSE_MAP,
borderMode=cv2.BORDER_CONSTANT, borderValue=0.)
# Necessary due to array layout in opencv
np.copyto(outputs[0], warpedInput)
else:
for j in range(self.H.shape[2]):
warpedInput = cv2.warpPerspective(np.asfortranarray(inimg),
self.H[:, :, j], inimg.shape[1::-1],
flags=cv2.INTER_LINEAR | cv2.WARP_INVERSE_MAP,
borderMode=cv2.BORDER_CONSTANT,
borderValue=0.)
# Necessary due to array layout in opencv
np.copyto(outputs[0][:, :, :, j], warpedInput)
def adjoint(self, inputs, outputs):
"""The adjoint operator.
Reads from inputs and writes to outputs.
"""
if self.implementation == Impl['halide']:
# Halide implementation
Halide('At_warp').At_warp(inputs[0], self.Hinv, self.tmpadj) # Call
if outputs[0].ndim == 2:
np.copyto(outputs[0], self.tmpadj[..., 0])
else:
np.copyto(outputs[0], self.tmpadj)
else:
# CV2 version
inimg = inputs[0]
if len(self.H.shape) == 2:
# + cv2.WARP_INVERSE_MAP
warpedInput = cv2.warpPerspective(np.asfortranarray(inimg), self.H,
inimg.shape[1::-1], flags=cv2.INTER_LINEAR,
borderMode=cv2.BORDER_CONSTANT, borderValue=0.)
np.copyto(outputs[0], warpedInput)
else:
outputs[0][:] = 0.0
for j in range(self.H.shape[2]):
warpedInput = cv2.warpPerspective(np.asfortranarray(inimg[:, :, :, j]),
self.H, inimg.shape[1::-1],
flags=cv2.INTER_LINEAR,
borderMode=cv2.BORDER_CONSTANT,
borderValue=0.)
# Necessary due to array layout in opencv
outputs[0] += warpedInput
# TODO what is the spectral norm of a warp?
|
h2o-py/tests/testdir_munging/pyunit_isax.py | ahmedengu/h2o-3 | 6,098 | 12666317 | <gh_stars>1000+
from __future__ import print_function
import sys
sys.path.insert(1,"../../")
import h2o
from tests import pyunit_utils
def isax():
df = h2o.create_frame(rows=1,cols=256,real_fraction=1.0,missing_fraction=0.0,seed=123)
df2 = df.cumsum(axis=1)
res = df2.isax(num_words=10,max_cardinality=10)
res.show()
answer = "0^10_0^10_0^10_0^10_5^10_7^10_8^10_9^10_9^10_8^10"
assert answer == res[0,0], "expected isax index to be " + answer + " but got" + res[0,0] + " instead."
h2o.remove(df)
h2o.remove(df2)
h2o.remove(res)
if __name__ == "__main__":
pyunit_utils.standalone_test(isax)
else:
isax()
|
test/scrapers/test_hitrust_matches.py | HanselD/aws-allowlister | 180 | 12666324 | import unittest
from aws_allowlister.database.compliance_data import ComplianceData
from aws_allowlister.database.database import connect_db
compliance_data = ComplianceData()
db_session = connect_db()
class HitrustQATestCase(unittest.TestCase):
def test_gh_51_HITRUST_compliant_services(self):
results = compliance_data.get_compliant_services(
db_session=db_session, compliance_standard="HITRUST"
)
expected_results = [
"athena",
"kendra",
"guardduty",
"sagemaker",
"states"
]
# print(len(results))
for expected_result in expected_results:
# print(f"{expected_result}: in {expected_result in results}")
self.assertTrue(expected_result in results)
|
src/DataJoin/data_join/example_id_appender.py | huangwei19/9nfl | 103 | 12666326 | # Copyright 2020 The 9nFL Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import threading
class AppendExamplesManager(object):
def __init__(self, queue, partition_id):
self._lock = threading.Lock()
self._queue = queue
self._partition_id = partition_id
self._send_example_finished = False
self._next_example_index = 0
def get_next_example_index(self):
with self._lock:
return self._next_example_index
def append_batch_examples_into_queue(self, batch_examples):
with self._lock:
assert batch_examples, "batch_examples is None"
assert batch_examples.partition_id == self._partition_id, \
"the partition id of example batch mismatch with " \
"partition id of examples appending into queue : {} != {}".format(
self._partition_id, batch_examples.partition_id
)
self._next_example_index += len(batch_examples.example_id) - 1
self._queue.put(batch_examples)
return True
def finish_send_examples(self):
with self._lock:
self._send_example_finished = True
def is_send_example_finished(self):
with self._lock:
return self._send_example_finished
def need_append_into_queue(self):
with self._lock:
if not self._queue.empty():
return True
return False
|
zeus/modules/operators/quant/__init__.py | shaido987/vega | 240 | 12666361 | import zeus
if zeus.is_tf_backend():
from .tensorflow_quant import *
elif zeus.is_torch_backend():
from .pytorch_quant import *
|
data/test/python/9b5bb2aa0435ae771160122adef80202b6062e82test_nsobjectcontroller.py | harshp8l/deep-learning-lang-detection | 132 | 12666363 | from AppKit import *
from PyObjCTools.TestSupport import *
class TestNSObjectController (TestCase):
def testMethods(self):
self.assertResultIsBOOL(NSObjectController.automaticallyPreparesContent)
self.assertArgIsBOOL(NSObjectController.setAutomaticallyPreparesContent_, 0)
self.assertResultIsBOOL(NSObjectController.isEditable)
self.assertArgIsBOOL(NSObjectController.setEditable_, 0)
self.assertResultIsBOOL(NSObjectController.canAdd)
self.assertResultIsBOOL(NSObjectController.canRemove)
self.assertResultIsBOOL(NSObjectController.validateUserInterfaceItem_)
self.assertResultIsBOOL(NSObjectController.fetchWithRequest_merge_error_)
self.assertArgIsBOOL(NSObjectController.fetchWithRequest_merge_error_, 1)
self.assertArgIsOut(NSObjectController.fetchWithRequest_merge_error_, 2)
self.assertResultIsBOOL(NSObjectController.usesLazyFetching)
self.assertArgIsBOOL(NSObjectController.setUsesLazyFetching_, 0)
if __name__ == "__main__":
main()
|
Python/CombinationSumTest.py | TonnyL/Windary | 205 | 12666382 | <reponame>TonnyL/Windary
from unittest import TestCase
from CombinationSum import CombinationSum
class TestCombinationSum(TestCase):
def test_combinationSum(self):
cs = CombinationSum()
list0 = cs.combinationSum([2, 3, 6, 7], 7)
self.assertTrue(len(list0) == 2)
self.assertTrue(list0.__contains__([7]))
self.assertTrue(list0.__contains__([2, 2, 3]))
self.assertEqual(cs.combinationSum([1], 3), [[1, 1, 1]])
list1 = cs.combinationSum([1, 2], 4)
self.assertTrue(len(list1))
self.assertTrue(list1.__contains__([1, 1, 1, 1]))
self.assertTrue(list1.__contains__([1, 1, 2]))
self.assertTrue(list1.__contains__([2, 2]))
|
runtime/stdlib/api.py | cheery/lever | 136 | 12666423 | <reponame>cheery/lever
from rpython.translator.platform import platform
import pathobj
from space import *
import ffi
import json, os
class ApiConfig:
def __init__(self):
self.headers_dir = None
conf = ApiConfig()
def init(lever_path):
conf.headers_dir = pathobj.concat(lever_path, pathobj.parse(u"headers"))
class Api(Object):
def __init__(self, constants, types, variables, dependencies, decorator):
self.cache = {}
self.typecache = {}
self.constants = constants
self.types = types
self.variables = variables
self.dependencies = dependencies
self.cycle_catch = {}
self.decorator = decorator
def getitem(self, name):
if not isinstance(name, String):
raise OldError(u"API.getitem requires a string")
name = name
if name.string in self.cache:
return self.cache[name.string]
self.cache[name.string] = result = self.lookup(name)
return result
def lookup(self, name):
if self.constants.contains(name):
return self.constants.getitem(name)
if self.variables.contains(name):
decl = self.variables.getitem(name)
cname = decl.getitem(String(u"name"))
if not isinstance(cname, String):
raise OldError(u"incorrect name record")
if not isinstance(name, String):
raise OldError(u"incorrect name record")
ctype = decl.getitem(String(u"type"))
return ffi.Wrap(cname.string, self.build_ctype(name.string, ctype))
return self.lookup_type(name)
def lookup_type(self, name):
if isinstance(name, String):
if name.string in self.typecache:
return self.typecache[name.string]
if name.string.endswith(u'*'):
ctype = ffi.Pointer(self.lookup_type(String(name.string[:-1])))
self.typecache[name.string] = ctype
return ctype
if self.types.contains(name):
decl = self.types.getitem(name)
ctype = self.build_ctype(name.string, decl)
self.typecache[name.string] = ctype
return ctype
if name.string in ffi.systemv.types:
return ffi.systemv.types[name.string]
if name.string == u'void':
return null
if u"." in name.string and self.dependencies is not None:
namespace, name = name.string.split(u".", 1)
return self.dependencies.getitem(String(namespace)).getattr(name)
raise unwind(LKeyError(self, name))
else:
return self.build_ctype(u"<unnamed>", name)
def build_ctype(self, name, decl):
if isinstance(decl, String):
return self.lookup_type(decl)
if self.decorator is not None:
return ffi.to_type(self.decorator.call([self, String(name), decl]))
else:
return self.build_ctype_raw(name, decl)
def build_ctype_raw(self, name, decl):
which = decl.getitem(String(u"type"))
if isinstance(which, String) and which.string == u"cfunc":
restype = decl.getitem(String(u'restype'))
argtypes_list = decl.getitem(String(u'argtypes'))
if not isinstance(argtypes_list, List):
raise OldError(u"incorrect function record")
restype = self.lookup_type(restype)
argtypes = []
for argtype in argtypes_list.contents:
argtypes.append(self.lookup_type(argtype))
return ffi.CFunc(restype, argtypes)
if isinstance(which, String) and which.string == u"union":
if decl in self.cycle_catch:
return self.cycle_catch[decl]
fields = decl.getitem(String(u"fields"))
self.cycle_catch[decl] = ctype = ffi.Union(None, name)
ctype.declare(self.parse_fields(name, fields))
return ctype
if isinstance(which, String) and which.string == u"struct":
if decl in self.cycle_catch:
return self.cycle_catch[decl]
fields = decl.getitem(String(u"fields"))
self.cycle_catch[decl] = ctype = ffi.Struct(None, name)
ctype.declare(self.parse_fields(name, fields))
return ctype
if isinstance(which, String) and which.string == u"opaque":
return ffi.Struct(None, name)
if isinstance(which, String) and which.string == u"array":
ctype = self.lookup_type(decl.getitem(String(u'ctype')))
length = decl.getitem(String(u"length"))
if length is null:
return ffi.Array(ctype)
elif isinstance(length, Integer):
return ffi.Array(ctype, length.value)
else:
raise OldError(name + u": incorrect length value: %s" % length.repr())
if isinstance(which, String) and which.string == u"pointer":
to_obj = decl.getitem(String(u'to'))
# A little hack to name common opaque pointers.
if isinstance(to_obj, Dict):
to = self.build_ctype(name, to_obj)
else:
to = self.lookup_type(to_obj)
return ffi.Pointer(to)
if isinstance(which, String) and which.string == u"enum":
ctype = self.lookup_type(decl.getitem(String(u'ctype')))
constants = decl.getitem(String(u"constants"))
if not isinstance(constants, Dict):
raise unwind(LTypeError(name + u": expected constant table to be dictionary"))
table = {}
for name_, const in constants.data.iteritems():
if not isinstance(name_, String):
raise unwind(LTypeError(name + u": expected constants table key to be string"))
if not isinstance(const, Integer):
raise unwind(LTypeError(name + u": expected constants table value to be integer"))
table[name_.string] = const.value
return ffi.Bitmask(ffi.to_type(ctype), table, multichoice=False)
if isinstance(which, String) and which.string == u"bitmask":
ctype = self.lookup_type(decl.getitem(String(u'ctype')))
constants = decl.getitem(String(u"constants"))
if not isinstance(constants, Dict):
raise unwind(LTypeError(name + u": expected constant table to be dictionary"))
table = {}
for name_, const in constants.data.iteritems():
if not isinstance(name_, String):
raise unwind(LTypeError(name + u": expected constants table key to be string"))
if not isinstance(const, Integer):
raise unwind(LTypeError(name + u": expected constants table value to be integer"))
table[name_.string] = const.value
return ffi.Bitmask(ffi.to_type(ctype), table, multichoice=True)
raise OldError(name + u": no ctype builder for " + which.repr())
def parse_fields(self, name, fields_list):
if not isinstance(fields_list, List):
raise OldError(name + u": ctype fields, expected list")
fields = []
for field in fields_list.contents:
field_name = field.getitem(Integer(0))
if not isinstance(field_name, String):
raise OldError(name + u": first column should be the name")
ctype = self.lookup_type(field.getitem(Integer(1)))
fields.append((field_name.string, ctype))
return fields
# I'm not sure how this method should be called.
@Api.method(u"build_type", signature(Api, String, Object))
def api_build_type(api, name, obj):
return api.build_ctype_raw(name.string, obj)
@Api.method(u"lookup_type", signature(Api, Object))
def api_lookup_type(api, obj):
return api.lookup_type(obj)
class FuncLibrary(Object):
def __init__(self, api, func):
self.func = func
self.api = api
self.namespace = {}
def getattr(self, name):
if name in self.namespace:
return self.namespace[name]
try:
c = self.api.getitem(String(name))
except Unwinder, uw:
if isinstance(uw.exception, LKeyError):
return Object.getattr(self, name)
raise uw
if isinstance(c, ffi.Wrap):
cname = c.cname
ctype = c.ctype
else:
return c
res = self.func.call([String(cname)])
if isinstance(res, ffi.Mem):
return ffi.Mem(ctype, res.pointer, 1)
elif res is null:
raise unwind(LAttributeError(self, name))
else:
raise unwind(LTypeError(
u"expected api(%s) is memory object, got %s" % (
cname, res.repr())))
@FuncLibrary.instantiator
@signature(Object, Object)
def _(api, func):
return FuncLibrary(api, func)
module = Module(u'api', {
u"so_ext": from_cstring(platform.so_ext),
u"funclibrary": FuncLibrary.interface,
}, frozen=True)
def builtin(name, deco):
def _builtin_(fn):
module.setattr_force(name, Builtin(deco(fn)))
return fn
return _builtin_
@builtin(u"open", signature(String, Object, Object, Object, optional=3))
def open(path, func, dependencies, decorator):
print "api.open will be soon removed in favor to api.open_nobind"
print "Fix code using api.open(...) to use api.library(...)"
return library(path, func, dependencies, decorator)
@builtin(u"library", signature(String, Object, Object, Object, optional=3))
def library(path, func, dependencies, decorator):
path = path.string
if path.endswith(u".so") or path.endswith(u".json") or path.endswith(u".dll"):
path = path.rsplit(u'.', 1)[0]
json_path = pathobj.parse(path + u".json")
so_path = path + u"." + platform.so_ext.decode('utf-8')
api = read_file(json_path, dependencies, decorator)
if func is not None:
return FuncLibrary(api, func)
return ffi.Library.interface.call([String(so_path), api])
@builtin(u"open_nobind", signature(pathobj.Path, Object, Object, optional=2))
def open_nobind(path, dependencies, decorator):
print "api.open_nobind will be removed in favor to api.read_file"
print "Fix code using api.open_nobind(...) to use api.read_file(...)"
return read_file(path, dependencies, decorator)
@builtin(u"read_file", signature(pathobj.Path, Object, Object, optional=2))
def read_file(path, dependencies, decorator):
basename = path.getattr(u"basename")
if isinstance(basename, String):
if not basename.string.endswith(u".json"):
dirname = path.getattr(u"dirname")
path = pathobj.concat(dirname,
pathobj.parse(basename.string + u".json"))
path = pathobj.concat(conf.headers_dir, path)
try:
apispec = json.read_file(path)
except OSError as error:
raise OldError(u"[Errno %d]: %s\n" % (error.errno, pathobj.stringify(path)))
return read_object(apispec, dependencies, decorator)
@builtin(u"read_object", signature(Object, Object, Object, optional=2))
def read_object(apispec, dependencies, decorator):
return Api(
apispec.getitem(String(u"constants")),
apispec.getitem(String(u"types")),
apispec.getitem(String(u"variables")),
dependencies,
decorator)
|
tests/test_provider_banzaicloud_k8s.py | mjuenema/python-terrascript | 507 | 12666448 | <reponame>mjuenema/python-terrascript
# tests/test_provider_banzaicloud_k8s.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:19:55 UTC)
def test_provider_import():
import terrascript.provider.banzaicloud.k8s
def test_resource_import():
from terrascript.resource.banzaicloud.k8s import k8s_manifest
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.banzaicloud.k8s
#
# t = terrascript.provider.banzaicloud.k8s.k8s()
# s = str(t)
#
# assert 'https://github.com/banzaicloud/terraform-provider-k8s' in s
# assert '0.9.1' in s
|
tests/test_service_catalog/test_api/test_service/test_service_put.py | LaudateCorpus1/squest | 112 | 12666479 | <filename>tests/test_service_catalog/test_api/test_service/test_service_put.py
from rest_framework import status
from rest_framework.reverse import reverse
from tests.test_service_catalog.base_test_request import BaseTestRequest
from tests.utils import check_data_in_dict
class TestApiServicePut(BaseTestRequest):
def setUp(self):
super(TestApiServicePut, self).setUp()
self.put_data = {
'name': "My new name",
'description': "My new description",
'billing_group_id': self.service_test.billing_group_id,
'billing_group_is_shown': self.service_test.billing_group_is_shown,
'billing_group_is_selectable': self.service_test.billing_group_is_selectable,
'billing_groups_are_restricted': self.service_test.billing_groups_are_restricted,
'enabled': self.service_test.enabled
}
self.kwargs = {
'pk': self.service_test.id
}
self.get_service_details_url = reverse('api_service_details', kwargs=self.kwargs)
def test_admin_put_service(self):
response = self.client.put(self.get_service_details_url, data=self.put_data, content_type="application/json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
check_data_in_dict(self, [self.put_data], [response.data])
def test_admin_cannot_put_on_service_not_full(self):
self.put_data.pop('name')
response = self.client.put(self.get_service_details_url, data=self.put_data, content_type="application/json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_customer_cannot_put_service(self):
self.client.force_login(user=self.standard_user)
response = self.client.put(self.get_service_details_url, data=self.put_data, content_type="application/json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_cannot_put_service_when_logout(self):
self.client.logout()
response = self.client.put(self.get_service_details_url, data=self.put_data, content_type="application/json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
pytorch_tools/segmentation_models/__init__.py | YevheniiSemendiak/pytorch-tools | 155 | 12666481 | <reponame>YevheniiSemendiak/pytorch-tools<filename>pytorch_tools/segmentation_models/__init__.py<gh_stars>100-1000
from .unet import Unet
from .linknet import Linknet
from .deeplabv3_plus import DeepLabV3
from .segm_fpn import SegmentationFPN
from .segm_bifpn import SegmentationBiFPN
from .hrnet import HRNet
|
infer_tools_tree/metric/dijstra.py | gvieralopez/Coronary-Artery-Tracking-via-3D-CNN-Classification | 107 | 12666545 | <reponame>gvieralopez/Coronary-Artery-Tracking-via-3D-CNN-Classification<gh_stars>100-1000
# -*- coding: UTF-8 -*-
# @Time : 04/08/2020 15:38
# @Author : QYD
# @FileName: dijstra.py
# @Software: PyCharm
import heapq
import numpy as np
import copy
def get_distance(p1, p2):
return np.linalg.norm(np.array(p1) - np.array(p2))
class QueueElement:
def __init__(self, dis, connection: list):
self.dis = dis
self.connection = connection
def __lt__(self, other):
return self.dis < other.dis
def correspondence(ref, infer):
"""
:param ref: 参考点
:param infer:
:return:
"""
refer_size = len(ref)
infer_size = len(infer)
if infer == [] or ref == []:
print("data error")
Done = 0
OnFront = 1
NotVisited = 2
nodeStatus = np.ones((refer_size, infer_size)) * NotVisited # 记录点访问状态的数组
distanceMap = np.ones((refer_size, infer_size)) * float("inf") # 距离数组
prevPointer = np.zeros((refer_size, infer_size)) # 用以对匹配点进行回溯
#
q = []
dist = get_distance(ref[0], infer[0])
priorityQueue = QueueElement(dist, [0, 0])
heapq.heappush(q, priorityQueue) # 建立优先级队列
nodeStatus[0][0] = OnFront
distanceMap[0][0] = dist
while q and q[0].dis < distanceMap[-1][-1]:
queueElem = copy.deepcopy(q[0].connection)
dist = q[0].dis
heapq.heappop(q)
while q and nodeStatus[queueElem[0]][queueElem[1]] == Done: # 优先级队列不为空而且该点已经被访问过
queueElem = copy.deepcopy(q[0].connection)
dist = q[0].dis
heapq.heappop(q)
if nodeStatus[queueElem[0]][queueElem[1]] == Done:
break
if dist > distanceMap[-1][-1]:
break
nodeStatus[queueElem[0]][queueElem[1]] = Done
distanceMap[queueElem[0]][queueElem[1]] = dist
if queueElem[1] < infer_size - 1:
newDist = dist + get_distance(ref[queueElem[0]], infer[queueElem[1] + 1])
if nodeStatus[queueElem[0]][queueElem[1] + 1] == Done:
continue
elif nodeStatus[queueElem[0]][queueElem[1] + 1] == OnFront:
if newDist >= distanceMap[queueElem[0]][queueElem[1] + 1]:
continue
nodeStatus[queueElem[0]][queueElem[1] + 1] = OnFront
distanceMap[queueElem[0]][queueElem[1] + 1] = newDist
prevPointer[queueElem[0]][queueElem[1] + 1] = 2
heapq.heappush(q, QueueElement(newDist, [queueElem[0], queueElem[1] + 1]))
if queueElem[0] < refer_size - 1:
newDist = dist + get_distance(ref[queueElem[0] + 1], infer[queueElem[1]])
if nodeStatus[queueElem[0] + 1][queueElem[1]] == Done:
continue
elif nodeStatus[queueElem[0] + 1][queueElem[1]] == OnFront:
if newDist >= distanceMap[queueElem[0] + 1][queueElem[1]]:
continue
nodeStatus[queueElem[0] + 1][queueElem[1]] = OnFront
distanceMap[queueElem[0] + 1][queueElem[1]] = newDist
prevPointer[queueElem[0] + 1][queueElem[1]] = 1
heapq.heappush(q, QueueElement(newDist, [queueElem[0] + 1, queueElem[1]]))
revPath = []
revPath.append([refer_size - 1, infer_size - 1])
while revPath[-1][0] or revPath[-1][1]:
pointer = prevPointer[revPath[-1][0]][revPath[-1][1]]
if pointer == 1:
revPath.append([revPath[-1][0] - 1, revPath[-1][1]])
elif pointer == 2:
revPath.append([revPath[-1][0], revPath[-1][1] - 1])
else:
raise ValueError
revPath.reverse()
return revPath
|
zerver/migrations/0332_realmuserdefault.py | Pulkit007/zulip | 17,004 | 12666589 | <reponame>Pulkit007/zulip
# Generated by Django 3.2.2 on 2021-05-31 16:49
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0331_scheduledmessagenotificationemail"),
]
operations = [
migrations.CreateModel(
name="RealmUserDefault",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("enter_sends", models.BooleanField(null=True, default=False)),
("left_side_userlist", models.BooleanField(default=False)),
("default_language", models.CharField(default="en", max_length=50)),
("default_view", models.TextField(default="recent_topics")),
("dense_mode", models.BooleanField(default=True)),
("fluid_layout_width", models.BooleanField(default=False)),
("high_contrast_mode", models.BooleanField(default=False)),
("translate_emoticons", models.BooleanField(default=False)),
("twenty_four_hour_time", models.BooleanField(default=False)),
("starred_message_counts", models.BooleanField(default=True)),
("color_scheme", models.PositiveSmallIntegerField(default=1)),
("demote_inactive_streams", models.PositiveSmallIntegerField(default=1)),
(
"emojiset",
models.CharField(
choices=[
("google", "Google modern"),
("google-blob", "Google classic"),
("twitter", "Twitter"),
("text", "Plain text"),
],
default="google-blob",
max_length=20,
),
),
("enable_stream_desktop_notifications", models.BooleanField(default=False)),
("enable_stream_email_notifications", models.BooleanField(default=False)),
("enable_stream_push_notifications", models.BooleanField(default=False)),
("enable_stream_audible_notifications", models.BooleanField(default=False)),
("notification_sound", models.CharField(default="zulip", max_length=20)),
("wildcard_mentions_notify", models.BooleanField(default=True)),
("enable_desktop_notifications", models.BooleanField(default=True)),
("pm_content_in_desktop_notifications", models.BooleanField(default=True)),
("enable_sounds", models.BooleanField(default=True)),
("enable_offline_email_notifications", models.BooleanField(default=True)),
("message_content_in_email_notifications", models.BooleanField(default=True)),
("enable_offline_push_notifications", models.BooleanField(default=True)),
("enable_online_push_notifications", models.BooleanField(default=True)),
("desktop_icon_count_display", models.PositiveSmallIntegerField(default=1)),
("enable_digest_emails", models.BooleanField(default=True)),
("enable_login_emails", models.BooleanField(default=True)),
("enable_marketing_emails", models.BooleanField(default=True)),
("realm_name_in_notifications", models.BooleanField(default=False)),
("presence_enabled", models.BooleanField(default=True)),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
),
),
],
options={
"abstract": False,
},
),
]
|
uxy/uxy_ls.py | sustrik/uxy | 735 | 12666624 | <reponame>sustrik/uxy
# Copyright (c) 2019 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import argparse
import grp
import pwd
import re
import sys
from uxy import base
def _linux(args, uxy_args):
parser = argparse.ArgumentParser("__main__.py ls", add_help=False)
parser.add_argument("--author", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-b", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--escape", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-C", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--color", nargs="?", default=argparse.SUPPRESS)
parser.add_argument("-D", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-f", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--format", nargs="?", default=argparse.SUPPRESS)
parser.add_argument("--full-time", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-g", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-h", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--human-readable", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--si", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-G", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--no-group", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-i", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--inode", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-k", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--kibibytes", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-l", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-m", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-N", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--literal", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-o", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-q", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--hide-control-chars", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-Q", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--quote-name", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--quoting-style", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("-s", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--time", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("--time-style", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("-T", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("--tabsize", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("-w", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("--width", nargs=1, default=argparse.SUPPRESS)
parser.add_argument("-x", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-Z", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--context", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("-1", action="store_true", default=argparse.SUPPRESS)
parser.add_argument("--help", action="store_true", default=argparse.SUPPRESS)
base.check_args(args, parser)
if uxy_args.long:
fmtargs = ['-lnNisZ', '--time-style=full-iso']
regexp = re.compile(r'\s*([^\s]*)\s+([^\s]*)\s+(.)([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+(.*)')
fmt = base.Format("INODE BLOCKS TYPE PERMISSIONS LINKS OWNER GROUP CONTEXT SIZE TIME NAME")
owner_col = 6
group_col = 7
else:
fmtargs = ['-lnN', '--time-style=full-iso']
regexp = re.compile(r'\s*(.)([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+(.*)')
fmt = base.Format("TYPE PERMISSIONS LINKS OWNER GROUP SIZE TIME NAME")
owner_col = 4
group_col = 5
resolve_ids = True
if "-n" in args[1:] or "--numeric-uid-gid" in args[1:]:
resolve_ids = False
proc = base.launch(uxy_args, ['ls'] + fmtargs + args[1:])
base.writeline(fmt.render())
path = ""
for ln in proc:
if ln.startswith('total'):
continue
if ln == "":
# When running with -R this is the name of the directory.
ln = proc.readline()
if ln.endswith(":"):
path = ln[:-1] + "/"
continue
m = regexp.match(ln)
if not m:
continue
fields = []
for i in range(1, regexp.groups - 3):
field = m.group(i)
# In general, __main__.py is not supposed to supplant the functionality provided
# by the wrapped tool. However, there's little option here: User names
# can contain spaces (e.g. when provided by LDAP), but ls tool doesn't
# escape spaces in the names even with run with -b parameter.
if resolve_ids:
try:
if i == owner_col:
field = pwd.getpwuid(int(field)).pw_name
elif i == group_col:
field = grp.getgrgid(int(field)).gr_name
except (KeyError, ValueError):
pass
fields.append(base.encode_field(field))
# Convert to actual ISO8601 format.
time = "%sT%s%s:%s" % (
m.group(regexp.groups - 3),
m.group(regexp.groups - 2),
m.group(regexp.groups - 1)[:-2],
m.group(regexp.groups - 1)[-2:])
fields.append(base.encode_field(time))
fields.append(base.encode_field(path + m.group(regexp.groups)))
base.writeline(fmt.render(fields))
return proc.wait()
def _bsd(args, uxy_args):
fmtargs = ['-l']
# -rw-r--r-- 1 501 20 1025 May 31 07:11:49 2019 LICENSE
regexp = re.compile(r'\s*(.)([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*\s+[^\s]*\s+[^\s]*\s+[^\s]*)\s+(.*)')
fmt = base.Format("TYPE PERMISSIONS LINKS OWNER GROUP SIZE TIME NAME")
proc = base.launch(uxy_args, ['ls'] + fmtargs + args[1:])
base.writeline(fmt.render())
path = ""
for ln in proc:
if ln.startswith('total'):
continue
if ln == "":
# When running with -R this is the name of the directory.
ln = proc.readline()
if ln.endswith(":"):
path = ln[:-1] + "/"
continue
m = regexp.match(ln)
if not m:
continue
fields = []
for i in range(1, regexp.groups + 1):
fields.append(base.encode_field(m.group(i)))
base.writeline(fmt.render(fields))
return proc.wait()
def ls(args, uxy_args):
if uxy_args.platform.startswith("linux"):
return _linux(args, uxy_args)
else:
return _bsd(args, uxy_args)
|
backend/src/baserow/contrib/database/api/formula/urls.py | ashishdhngr/baserow | 839 | 12666654 | from django.urls import re_path
from baserow.contrib.database.api.formula.views import TypeFormulaView
app_name = "baserow.contrib.database.api.export"
urlpatterns = [
re_path(
r"(?P<field_id>[0-9]+)/type/$",
TypeFormulaView.as_view(),
name="type_formula",
),
]
|
tests/replication_tests/test_lr3opt.py | juicetinliu/VeRyPy | 156 | 12666704 | # -*- coding: utf-8 -*-
import unittest
from collections import namedtuple
from os import path
from random import shuffle
import numpy as np
from replicationbase import ReplicationBase, REPRO_QUALITY_LEVELS
from classic_heuristics.lr3opt import lr3opt_init, _check_lr3opt_move, _init_with_random
from cvrp_ops import calculate_objective
def _random_init_lr3opt(pts,D,d,C,L,st,times):
best_sol = None
best_f = float('inf')
for t in range(times):
sol = lr3opt_init(D, d, C, L, initialization_algorithm=_init_with_random)
sol_f = calculate_objective(sol, D)
if sol_f<best_f:
best_sol = None
best_f = None
return best_sol
def _random_init_lr3opt_once(pts,D,d,C,L,st):
return lr3opt_init(D, d, C, L, initialization_algorithm=_init_with_random)
LiteratureResult = namedtuple('LiteratureResult', 'obj_f cpu_time')
class TestLR3OPTAlgorithm(unittest.TestCase):
def setUp(self):
pass
def test_penalty_calculation_fig1_example(self):
D = np.array([
[ 0, 19, 39, 51, 66, 59, 42, 22, 30, 40, 54, 68, 73, 62, 41],
[19, 0, 21, 37, 54, 52, 36, 21, 36, 45, 58, 69, 77, 74, 54],
[39, 21, 0, 21, 37, 40, 30, 27, 39, 46, 59, 65, 76, 81, 63],
[51, 37, 21, 0, 17, 20, 19, 31, 34, 37, 48, 49, 61, 75, 60],
[66, 54, 37, 17, 0, 16, 28, 45, 43, 43, 50, 45, 59, 79, 67],
[59, 52, 40, 20, 16, 0, 17, 37, 30, 28, 34, 30, 43, 63, 53],
[42, 36, 30, 19, 28, 17, 0, 19, 15, 18, 30, 34, 45, 55, 41],
[22, 21, 27, 31, 45, 37, 19, 0, 15, 24, 37, 48, 56, 55, 35],
[30, 36, 39, 34, 43, 30, 15, 15, 0, 10, 22, 34, 41, 42, 26],
[40, 45, 46, 37, 43, 28, 18, 24, 10, 0, 13, 25, 32, 37, 24],
[54, 58, 59, 48, 50, 34, 30, 37, 22, 13, 0, 16, 19, 28, 24],
[68, 69, 65, 49, 45, 30, 34, 48, 34, 25, 16, 0, 13, 40, 40],
[73, 77, 76, 61, 59, 43, 45, 56, 41, 32, 19, 13, 0, 32, 39],
[62, 74, 81, 75, 79, 63, 55, 55, 42, 37, 28, 40, 32, 0, 20],
[41, 54, 63, 60, 67, 53, 41, 35, 26, 24, 24, 40, 39, 20, 0]])
C = 140
#d = [30,15,15,15,15,15,15, #route 3
# 10,20,20,20,30,30,20] #route 1
#sol = [0,1,2,3,4,5,6,7,0,8,9,10,11,12,13,14,0]
self.assertAlmostEqual( -10, _check_lr3opt_move(D, C, None, 60, 0,
[[0,4],[3,1],[2,5]],
[6,7,8,9,9,10], #end_p
[6,7,0,8,8,9], #end_n
[105,15,0,10,10,140], #cum_d
None,
8, None, [2.0, None]))
class TestStewartGoldenReplications(ReplicationBase):
def setUp(self):
self.algorithms = [
("lr3opt_det", lambda pts,D,d,C,L,st:\
lr3opt_init(D, d, C, L)),
("lr3opt_ran", _random_init_lr3opt_once)]
self.problem_names = [
"00-CW64_n31_k8c.vrp",
"05-E051-k5.vrp",
"06-E076-k15s.vrp",
"07-E076-k10s.vrp",
"08-E076-k8s.vrp",
"09-E076-k7s.vrp",
"10-E101-k14s.vrp",
"11-E101-k8.vrp"]
self.targets = [(1212,521,1058,847,751,692,1117,829), #det
(1212,521,1058,847,751,692,1117,829)] #rnd
self.problem_path = path.join("Classic", "GilletMiller1974")
def test_deterministic_LR3OPT_with_GilletMiller1974_instances(self):
avgq, sdq, minq, maxq = self.solve_problems(
"lr3opt_det", require_K = False,
round_f_func = np.int,
cost_compare = False)
self.assertTrue( abs(avgq) < REPRO_QUALITY_LEVELS.D_AVG, "Average quality not replicated (%.2f)"%avgq)
self.assertTrue( abs(sdq) < REPRO_QUALITY_LEVELS.B_SD, "There is too much variation between instances")
#TestStewartGoldenReplications.test_stochastic_LR3OPT_with_GilletMiller1974_instances
def test_stochastic_LR3OPT_with_GilletMiller1974_instances(self):
repeats_per_problem = zip(list(range(8)), [10, 10, 7, 8*3, 10, 10*2, 3, 6*2])
bestqs = [float('inf')]*8
for i, repeats in repeats_per_problem:
for repeat in range(repeats):
problem_name = self.problem_names[i]
print "Repeat %d of %d for %s"%(repeat+1,repeats,problem_name)
avgq, sdq, minq, maxq = self.solve_problems(
"lr3opt_ran", instance_idx = i, require_K = False,
round_f_func = np.int,
#round_D_func = np.around,
cost_compare = False)
if avgq<bestqs[i]:
bestqs[i] = avgq
# check the average gap of [10, 10, 7, 8, 10, 10, 3, 6] repeats
avgq = np.average(bestqs)
sdq = np.std(bestqs)
# Usually this assertion succeeds, but because it is stochastic, it
# is possible that by chance some of the results are (much) worse.
# Then, it is best to try again on bump the level up to B.
self.assertTrue( abs(avgq) < REPRO_QUALITY_LEVELS.A_AVG, "Average quality not replicated (%.2f)"%avgq)
self.assertTrue( abs(sdq) < REPRO_QUALITY_LEVELS.A_SD, "There is too much variation between instances")
if __name__ == '__main__':
unittest.main() |
kivymd/uix/templates/rotatewidget/__init__.py | marvelous-benji/KivyMD | 1,111 | 12666725 | from .rotatewidget import RotateWidget
|
appscan/getJson.py | eiri/java-sdk-core | 686 | 12666744 | <reponame>eiri/java-sdk-core
import json
import sys
def main():
data = load_data()
printFields(data)
def load_data():
data = ""
filename = sys.argv[1]
with open(filename, "r") as read:
data = json.load(read)
if isinstance(data, list):
data = data[0]
return data
def printFields(data):
fields = sys.argv[2:]
for i in fields:
print(data[i])
main()
|
nodejs/node_modules/speaker/deps/mpg123/scripts/tag_lyrics.py | qcuong98/KT-AI-Hackathon | 323 | 12666780 | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# needs mutagen
# grabbed from: http://code.activestate.com/recipes/577138-embed-lyrics-into-mp3-files-using-mutagen-uslt-tag/
# simplified to only work on one file and get lyrics from stdin
# I suspect this is public domain code. Just a usage example of the mutagen lib.
import os
import sys
import codecs
from mutagen.mp3 import MP3
from mutagen.id3 import ID3NoHeaderError
from mutagen.id3 import ID3, USLT
TEXT_ENCODING = 'utf8'
TEXT_LANG = 'XXX'
TEXT_DESC = ''
# get workdir from first arg or use current dir
if (len(sys.argv) > 1):
fname = sys.argv[1]
print "fname=" + fname
else:
print 'Give me at least a file name to work on, plus the lyrics from stdin'
print 'Optionally, you can provide the language (3 lowercase letters) of the lyrics and a description'
sys.exit()
if (len(sys.argv) > 2):
TEXT_LANG = sys.argv[2]
if (len(sys.argv) > 3):
TEXT_DESC = sys.argv[3]
print "reading lyrics from standard input ..."
lyrics = sys.stdin.read().strip()
# try to find the right encoding
for enc in ('utf8','iso-8859-1','iso-8859-15','cp1252','cp1251','latin1'):
try:
lyrics = lyrics.decode(enc)
TEXT_DESC = TEXT_DESC.decode(enc)
print enc,
break
except:
pass
print "Adding lyrics to " + fname
print "Language: " + TEXT_LANG
print "Description: " + TEXT_DESC
# create ID3 tag if not exists
try:
tags = ID3(fname)
except ID3NoHeaderError:
print "Adding ID3 header;",
tags = ID3()
# remove old unsychronized lyrics
if len(tags.getall(u"USLT::'"+TEXT_LANG+"'")) != 0:
print "Removing Lyrics."
tags.delall(u"USLT::'"+TEXT_LANG+"'")
#tags.save(fname) # hm, why?
#tags.add(USLT(encoding=3, lang=u'eng', desc=u'desc', text=lyrics))
# apparently the description is important when more than one
# USLT frames are present
#tags[u"USLT::'eng'"] = (USLT(encoding=3, lang=u'eng', desc=u'desc', text=lyrics))
tags[u"USLT::'"+TEXT_LANG+"'"] = (USLT(encoding=3, lang=TEXT_LANG, desc=TEXT_DESC, text=lyrics))
print 'Added USLT frame to', fname
tags.save(fname)
print 'Done'
|
tests/test_08b_provider_gencode.py | tilschaef/genomepy | 112 | 12666805 | import pytest
import genomepy
from tests import linux, travis
@pytest.mark.skipif(travis and linux, reason="FTP does not work on Travis-Linux")
def test_gencodeprovider(gencode):
assert gencode.name == "GENCODE"
assert gencode.taxid_fields == ["taxonomy_id"]
@pytest.mark.skipif(travis and linux, reason="FTP does not work on Travis-Linux")
def test_genome_info_tuple(gencode):
t = gencode._genome_info_tuple("GRCh37")
assert isinstance(t, tuple)
assert t[0:4] == ("GRCh37", "GCA_000001405.1", 9606, True)
@pytest.mark.skipif(travis and linux, reason="FTP does not work on Travis-Linux")
def test_genomes(gencode):
assert gencode.genomes["GRCh37"]["other_info"] == "GENCODE annotation + UCSC genome"
assert gencode.genomes["GRCh38"]["assembly_accession"] == "GCA_000001405.15"
@pytest.mark.skipif(travis and linux, reason="FTP does not work on Travis-Linux")
def test_get_genome_download_link(gencode):
link = gencode.get_genome_download_link("GRCh37", mask="soft")
assert link in [
"http://hgdownload.soe.ucsc.edu/goldenPath/hg19/bigZips/chromFa.tar.gz",
"http://hgdownload.soe.ucsc.edu/goldenPath/hg19/bigZips/hg19.fa.masked.gz",
]
@pytest.mark.skipif(travis and linux, reason="FTP does not work on Travis-Linux")
def test_get_annotation_download_links(gencode):
# default annotation filing system
genome = "GRCm39"
annots = gencode.get_annotation_download_links(genome)
expected = [ # release numbers removed
"ftp.ebi.ac.uk/pub/databases/gencode/Gencode_mouse/release_M",
"/gencode.vM",
".annotation.gtf.gz",
]
assert all([exp in annots[0] for exp in expected])
# GRCh37, the one with the unique filing system.
genome = "GRCh37"
annots = gencode.get_annotation_download_links(genome)
expected = [ # release numbers removed
"ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/release_",
"/GRCh37_mapping/gencode.v",
"lift37.annotation.gtf.gz",
]
assert all([exp in annots[0] for exp in expected])
@pytest.mark.skipif(travis and linux, reason="FTP does not work on Travis-Linux")
def test_download_annotation(gencode):
gencode.download_annotation("GRCm39") # smallest gencode annotation (0.8 GB)
def test_get_gencode2ucsc():
genomes = {
"test1": {"species": "Homo sapiens"},
"test2": {"species": "Mus Musculus"},
"test3": {"species": "whatever"},
}
gencode2ucsc = genomepy.providers.gencode.get_gencode2ucsc(genomes)
assert gencode2ucsc["test1"] == "hg1"
assert gencode2ucsc["test2"] == "mm2"
assert gencode2ucsc["test3"] == "mm3"
def test_get_releases():
listing = [
"/path/to/release_22",
"/path/to/mouse/release_M44",
"/path/to/mouse/release_M33",
"/path/to/release_01", # too old
"/path/to/something/else",
]
specie = "human"
releases = genomepy.providers.gencode.get_releases(listing, specie)
assert releases == ["44", "33", "22"]
specie = "mouse"
releases = genomepy.providers.gencode.get_releases(listing, specie)
assert releases == ["M44", "M33", "M22"]
def test_add_grch37():
release = 42
genomes = {
"GRCh11": {},
"GRCh22": {"annotations": [f"ftp/to/release_{release}/gtf"]},
}
genomes = genomepy.providers.gencode.add_grch37(genomes, "")
expected = (
f"/Gencode_human/release_{release}/GRCh37_mapping/"
f"gencode.v{release}lift37.annotation.gtf.gz"
)
assert genomes["GRCh22"]["annotations"] == [f"ftp/to/release_{release}/gtf"]
assert genomes["GRCh37"]["annotations"] == [expected]
|
DeepAlignmentNetwork/menpofit/visualize/__init__.py | chiawei-liu/DeepAlignmentNetwork | 220 | 12666821 | from .textutils import print_progress, statistics_table
from .base import (view_image_multiple_landmarks,
plot_cumulative_error_distribution)
|
tools/inference_synthesis.py | yuzhd/Text2Scene | 109 | 12666857 | <reponame>yuzhd/Text2Scene
#!/usr/bin/env python
import _init_paths
import os, sys, cv2, json
import math, PIL, cairo
import numpy as np
import pickle, random
import os.path as osp
from time import time
from copy import deepcopy
from glob import glob
import matplotlib.pyplot as plt
from composites_utils import *
from composites_config import get_config
from datasets.composites_coco import composites_coco
from datasets.composites_loader import sequence_loader, synthesis_loader
from modules.synthesis_model import SynthesisModel
from modules.synthesis_trainer import SynthesisTrainer
import torch, torchtext
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
def test_synthesis_model(config):
testdb = composites_coco(config, 'test', '2017')
trainer = SynthesisTrainer(config)
# we use the official validation set as test set
trainer.sample_for_eval(testdb)
if __name__ == '__main__':
cv2.setNumThreads(0)
config, unparsed = get_config()
np.random.seed(config.seed)
random.seed(config.seed)
torch.manual_seed(config.seed)
if(config.cuda):
torch.cuda.manual_seed_all(config.seed)
prepare_directories(config)
test_synthesis_model(config)
|
lib/model/MLP.py | desmondblue/image-reconstruction-web-app | 8,045 | 12666864 | <reponame>desmondblue/image-reconstruction-web-app
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import torch
import torch.nn as nn
import torch.nn.functional as F
class MLP(nn.Module):
def __init__(self,
filter_channels,
merge_layer=0,
res_layers=[],
norm='group',
last_op=None):
super(MLP, self).__init__()
self.filters = nn.ModuleList()
self.norms = nn.ModuleList()
self.merge_layer = merge_layer if merge_layer > 0 else len(filter_channels) // 2
self.res_layers = res_layers
self.norm = norm
self.last_op = last_op
for l in range(0, len(filter_channels)-1):
if l in self.res_layers:
self.filters.append(nn.Conv1d(
filter_channels[l] + filter_channels[0],
filter_channels[l+1],
1))
else:
self.filters.append(nn.Conv1d(
filter_channels[l],
filter_channels[l+1],
1))
if l != len(filter_channels)-2:
if norm == 'group':
self.norms.append(nn.GroupNorm(32, filter_channels[l+1]))
elif norm == 'batch':
self.norms.append(nn.BatchNorm1d(filter_channels[l+1]))
def forward(self, feature):
'''
feature may include multiple view inputs
args:
feature: [B, C_in, N]
return:
[B, C_out, N] prediction
'''
y = feature
tmpy = feature
phi = None
for i, f in enumerate(self.filters):
y = f(
y if i not in self.res_layers
else torch.cat([y, tmpy], 1)
)
if i != len(self.filters)-1:
if self.norm not in ['batch', 'group']:
y = F.leaky_relu(y)
else:
y = F.leaky_relu(self.norms[i](y))
if i == self.merge_layer:
phi = y.clone()
if self.last_op is not None:
y = self.last_op(y)
return y, phi
|
BPNN/BPNN_Regression/TensorFlow_BPNN_Regression.py | Jojoxiao/Machine-Learning-for-Beginner-by-Python3 | 397 | 12666866 | #-*- coding:utf-8 -*-
# &Author AnFany
# 适用于多维输出
from BPNN_DATA_Reg import model_data as R_data
import numpy as np
import tensorflow as tf
'''第一部分:数据准备'''
train_x_data = R_data[0] # 训练输入
train_y_data = R_data[1] # 训练输出
predict_x_data = R_data[2] # 测试输入
predict_y_data = R_data[3] # 测试输出
'''第二部分: 基于TensorFlow构建训练函数'''
# 创建激活函数
def activate(input_layer, weights, biases, actfunc):
layer = tf.add(tf.matmul(input_layer, weights), biases)
if actfunc == 'relu':
return tf.nn.relu(layer)
elif actfunc == 'tanh':
return tf.nn.tanh(layer)
elif actfunc == 'sigmoid':
return tf.nn.sigmoid(layer)
# 权重初始化的方式和利用激活函数的关系很大
# sigmoid: xavir tanh: xavir relu: he
# 构建训练函数
def Ten_train(xdata, ydata, prexdata, hiddenlayers=3, hiddennodes=100, \
learn_rate=0.05, itertimes=100000, batch_size=200, activate_func='sigmoid', break_error=0.0043):
# 开始搭建神经网络
Input_Dimen = len(xdata[0])
Unit_Layers = [Input_Dimen] + [hiddennodes] * hiddenlayers + [len(ydata[0])] # 输入的维数,隐层的神经数,输出的维数1
# 创建占位符
x_data = tf.placeholder(shape=[None, Input_Dimen], dtype=tf.float32)
y_target = tf.placeholder(shape=[None, len(ydata[0])], dtype=tf.float32)
# 实现动态命名变量
VAR_NAME = locals()
for jj in range(hiddenlayers + 1):
VAR_NAME['weight%s' % jj] = tf.Variable(np.random.rand(Unit_Layers[jj], Unit_Layers[jj + 1]), dtype=tf.float32,\
name='weight%s' % jj) / np.sqrt(Unit_Layers[jj]) # sigmoid tanh
# VAR_NAME['weight%s'%jj] = tf.Variable(np.random.rand(Unit_Layers[jj], Unit_Layers[jj + 1]), dtype=tf.float32,name='weight%s' % jj) \/ np.sqrt(Unit_Layers[jj] / 2) # relu
VAR_NAME['bias%s' % jj] = tf.Variable(tf.random_normal([Unit_Layers[jj + 1]], stddev=10, name='bias%s' % jj),
dtype=tf.float32)
if jj == 0:
VAR_NAME['ooutda%s' % jj] = activate(x_data, eval('weight%s' % jj), eval('bias%s' % jj), actfunc=activate_func)
else:
VAR_NAME['ooutda%s' % jj] = activate(eval('ooutda%s' % (jj - 1)), eval('weight%s' % jj), \
eval('bias%s' % jj), actfunc=activate_func)
# 均方误差
loss = tf.reduce_mean(tf.reduce_sum(tf.square(y_target - eval('ooutda%s' % (hiddenlayers))), reduction_indices=[1]))
# 优化的方法
my_opt = tf.train.AdamOptimizer(learn_rate)
train_step = my_opt.minimize(loss)
# 初始化
init = tf.global_variables_initializer()
loss_vec = [] # 训练误差
with tf.Session() as sess:
saver = tf.train.Saver()
sess.run(init)
for i in range(itertimes):
rand_index = np.random.choice(len(xdata), size=batch_size, replace=False)
rand_x = xdata[rand_index]
rand_y = ydata[rand_index]
sess.run(train_step, feed_dict={x_data: rand_x, y_target: rand_y})
temp_loss = sess.run(loss, feed_dict={x_data: xdata, y_target: ydata})
loss_vec.append(temp_loss)
# 根据输出的误差,判断训练的情况
if (i + 1) % 25 == 0:
print('Generation: ' + str(i + 1) + '. 归一误差:Loss = ' + str(temp_loss))
# 提前退出的判断
if temp_loss < break_error: # 根据经验获得此数值, 因为采用的是随机下降,因此误差在前期可能出现浮动
break
# 计算预测数据的输出
pre_in_data0 = np.array(prexdata, dtype=np.float32)
for ipre in range(hiddenlayers + 1):
VAR_NAME['pre_in_data%s' % (ipre + 1)] = activate(eval('pre_in_data%s' % ipre), eval('weight%s' % ipre).eval(),\
eval('bias%s' % ipre).eval(), actfunc=activate_func)
# 计算训练数据的输出
train_in_data0 = np.array(xdata, dtype=np.float32)
for ipre in range(hiddenlayers + 1):
VAR_NAME['train_in_data%s' % (ipre + 1)] = activate(eval('train_in_data%s' % ipre), eval('weight%s' % ipre).eval(),\
eval('bias%s' % ipre).eval(), actfunc=activate_func)
return eval('train_in_data%s'%(hiddenlayers+1)).eval(), eval('pre_in_data%s'%(hiddenlayers+1)).eval(), loss_vec
'''第三部分: 结果展示函数'''
import matplotlib.pyplot as plt
from pylab import mpl # 作图显示中文
mpl.rcParams['font.sans-serif'] = ['FangSong'] # 设置中文字体新宋体
# 绘制图像
def figure(real, net, le='训练', real_line='ko-', net_line='r.-', width=3):
length = len(real[0])
# 绘制每个维度的对比图
for iwe in range(length):
plt.subplot(length, 1, iwe+1)
plt.plot(list(range(len(real.T[iwe]))), real.T[iwe], real_line, linewidth=width)
plt.plot(list(range(len(net.T[iwe]))), net.T[iwe], net_line, linewidth=width - 1)
plt.legend(['%s真实值'%le, '网络输出值'])
if length == 1:
plt.title('%s结果对比'%le)
else:
if iwe == 0:
plt.title('%s结果: %s维度对比'%(le, iwe))
else:
plt.title('%s维度对比'%iwe)
plt.show()
# 绘制成本函数曲线图
def costfig(errlist, le='成本函数曲线图'):
plt.plot(list(range(len(errlist))), errlist, linewidth=3)
plt.title(le)
plt.xlabel('迭代次数')
plt.ylabel('成本函数值')
plt.show()
# 因为训练数据较多,为了不影响展示效果,按序随机选取一定数量的数据,便于展示
def select(datax, datay, count=200):
sign = list(range(len(datax)))
selectr_sign = np.random.choice(sign, count, replace=False)
return datax[selectr_sign], datay[selectr_sign]
# 将输出的数据转换尺寸,变为原始数据的尺寸
def trans(ydata, minumber=R_data[4][0], maxumber=R_data[4][1]):
return ydata * (maxumber - minumber) + minumber
if __name__ == '__main__':
# 训练
tfrelu = Ten_train(train_x_data, train_y_data, predict_x_data)
# 真实的数据转换尺寸
train_y_data_tran = trans(train_y_data)
predict_y_data_tran = trans(predict_y_data)
# 网络预测的数据转换尺寸
train_output = trans(tfrelu[0])
predict_output = trans(tfrelu[1])
# 数据多影响展示,随机挑选100条数据
random_train_x_data = select(train_output, train_y_data_tran, 200)
random_predict_x_data = select(predict_output, predict_y_data_tran, 100)
figure(random_train_x_data[1], random_train_x_data[0], le='训练')
figure(random_predict_x_data[1], random_predict_x_data[0], le='预测')
costfig(tfrelu[2])
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.