hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8297797069048f1e64c87757d3ccf7043bd8704b | 3,690 | py | Python | src/tests/dao_test/guild_roles_dao_test.py | Veloxization/likahbot | 24e22711f514fc0878cf6fb9e516ad44425ea6a7 | [
"MIT"
]
| null | null | null | src/tests/dao_test/guild_roles_dao_test.py | Veloxization/likahbot | 24e22711f514fc0878cf6fb9e516ad44425ea6a7 | [
"MIT"
]
| null | null | null | src/tests/dao_test/guild_roles_dao_test.py | Veloxization/likahbot | 24e22711f514fc0878cf6fb9e516ad44425ea6a7 | [
"MIT"
]
| null | null | null | import unittest
import os
from dao.guild_roles_dao import GuildRolesDAO
from dao.guild_role_categories_dao import GuildRoleCategoriesDAO
class TestGuildRolesDAO(unittest.TestCase):
def setUp(self):
self.db_addr = "database/test_db.db"
os.popen(f"sqlite3 {self.db_addr} < database/schema.sql")
self.guild_roles_dao = GuildRolesDAO(self.db_addr)
self.guild_role_categories_dao = GuildRoleCategoriesDAO(self.db_addr)
self.guild_role_categories_dao.add_guild_role_category(1234, "TEST")
self.guild_role_categories_dao.add_guild_role_category(2345, "TEST")
self.category_id1 = self.guild_role_categories_dao.get_all_guild_role_categories(1234)[0]["id"]
self.category_id2 = self.guild_role_categories_dao.get_all_guild_role_categories(2345)[0]["id"]
def tearDown(self):
self.guild_roles_dao.clear_guild_roles_table()
self.guild_role_categories_dao.clear_guild_role_categories_table()
def test_guild_role_is_added_correctly(self):
roles = self.guild_roles_dao.get_all_guild_roles(1234)
self.assertEqual(len(roles), 0)
self.guild_roles_dao.add_guild_role(9876, self.category_id1)
roles = self.guild_roles_dao.get_all_guild_roles(1234)
self.assertEqual(len(roles), 1)
def test_guild_role_is_removed_correctly(self):
self.guild_role_categories_dao.add_guild_role_category(1234, "TEST2")
cat_id = self.guild_role_categories_dao.get_all_guild_role_categories(1234)[1]["id"]
self.guild_roles_dao.add_guild_role(9876, self.category_id1)
self.guild_roles_dao.add_guild_role(9876, cat_id)
roles = self.guild_roles_dao.get_all_guild_roles(1234)
self.assertEqual(len(roles), 2)
self.guild_roles_dao.remove_guild_role_from_category(9876, self.category_id1)
roles = self.guild_roles_dao.get_all_guild_roles(1234)
self.assertEqual(len(roles), 1)
def test_all_guild_roles_are_removed_correctly(self):
self.guild_roles_dao.add_guild_role(9876, self.category_id1)
self.guild_roles_dao.add_guild_role(8765, self.category_id2)
roles1 = self.guild_roles_dao.get_all_guild_roles(1234)
roles2 = self.guild_roles_dao.get_all_guild_roles(2345)
self.assertEqual(len(roles1), 1)
self.assertEqual(len(roles2), 1)
self.guild_roles_dao.delete_guild_roles(1234)
roles1 = self.guild_roles_dao.get_all_guild_roles(1234)
roles2 = self.guild_roles_dao.get_all_guild_roles(2345)
self.assertEqual(len(roles1), 0)
self.assertEqual(len(roles2), 1)
def test_guild_roles_of_type_are_returned_correctly(self):
self.guild_role_categories_dao.add_guild_role_category(1234, "TEST2")
cat_id = self.guild_role_categories_dao.get_all_guild_role_categories(1234)[1]["id"]
self.guild_roles_dao.add_guild_role(9876, self.category_id1)
self.guild_roles_dao.add_guild_role(8765, self.category_id1)
self.guild_roles_dao.add_guild_role(7654, cat_id)
roles = self.guild_roles_dao.get_guild_roles_of_type("TEST", 1234)
self.assertEqual(len(roles), 2)
roles = self.guild_roles_dao.get_guild_roles_of_type("TEST2", 1234)
self.assertEqual(len(roles), 1)
def test_guild_role_is_returned_correctly_with_id(self):
self.guild_roles_dao.add_guild_role(9876, self.category_id1)
self.guild_roles_dao.add_guild_role(8765, self.category_id2)
role = self.guild_roles_dao.get_guild_roles_by_role_id(9876)[0]
self.assertEqual(role["role_id"], 9876)
self.assertEqual(role["guild_id"], 1234)
self.assertEqual(role["category"], "TEST")
| 52.714286 | 103 | 0.746612 | 3,551 | 0.962331 | 0 | 0 | 0 | 0 | 0 | 0 | 158 | 0.042818 |
82995e877d2337617c9148dbf6692f9969d5a1fd | 1,115 | py | Python | qcic.py | milkllc/qcic | dfa8eae928689e3cb114587f62947b7d8397fdef | [
"MIT"
]
| null | null | null | qcic.py | milkllc/qcic | dfa8eae928689e3cb114587f62947b7d8397fdef | [
"MIT"
]
| null | null | null | qcic.py | milkllc/qcic | dfa8eae928689e3cb114587f62947b7d8397fdef | [
"MIT"
]
| null | null | null | import picamera
import datetime
import os
delcount = 2
def check_fs():
global delcount
st = os.statvfs('/')
pct = 100 - st.f_bavail * 100.0 / st.f_blocks
print pct, "percent full"
if pct > 90:
# less than 10% left, delete a few minutes
files = os.listdir('.')
files.sort()
for i in range(0, delcount):
print "deleting", files[i]
os.remove(files[i])
delcount += 1 # keep increasing until we get under 90%
else:
delcount = 2
with picamera.PiCamera() as camera:
try:
check_fs()
tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
print "recording", tstamp
camera.start_recording(tstamp + '.h264')
camera.wait_recording(60)
while True:
check_fs()
tstamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S%f')
print "recording", tstamp
camera.split_recording(tstamp + '.h264')
camera.wait_recording(60)
except KeyboardInterrupt:
print "quitting"
camera.stop_recording()
| 25.340909 | 74 | 0.574888 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 190 | 0.170404 |
8299b31e5521abb8d2f8dfff1c5df4a520607b18 | 164 | py | Python | exercises/allergies/allergies.py | akashsara/python | 6778e437487ad0e813959fe8a5e8fd37302704f5 | [
"MIT"
]
| null | null | null | exercises/allergies/allergies.py | akashsara/python | 6778e437487ad0e813959fe8a5e8fd37302704f5 | [
"MIT"
]
| 1 | 2019-12-17T18:24:34.000Z | 2019-12-18T12:16:52.000Z | exercises/allergies/allergies.py | akashsara/python | 6778e437487ad0e813959fe8a5e8fd37302704f5 | [
"MIT"
]
| null | null | null | class Allergies(object):
def __init__(self, score):
pass
def allergic_to(self, item):
pass
@property
def lst(self):
pass
| 13.666667 | 32 | 0.560976 | 163 | 0.993902 | 0 | 0 | 41 | 0.25 | 0 | 0 | 0 | 0 |
8299ba8eed08b051c1bd7e22979a2992369a89ff | 4,398 | py | Python | forge/mock_handle.py | ujjwalsh/pyforge | 454d7df39f6d6cc7531d3f87e7b7f7d83ae6e66e | [
"BSD-3-Clause"
]
| 7 | 2015-01-01T18:40:53.000Z | 2021-10-20T14:13:08.000Z | forge/mock_handle.py | ujjwalsh/pyforge | 454d7df39f6d6cc7531d3f87e7b7f7d83ae6e66e | [
"BSD-3-Clause"
]
| 6 | 2016-03-31T16:40:30.000Z | 2020-12-23T07:24:53.000Z | forge/mock_handle.py | ujjwalsh/pyforge | 454d7df39f6d6cc7531d3f87e7b7f7d83ae6e66e | [
"BSD-3-Clause"
]
| 9 | 2016-03-31T15:21:29.000Z | 2021-03-20T06:29:09.000Z | from .handle import ForgeHandle
class MockHandle(ForgeHandle):
def __init__(self, forge, mock, behave_as_instance=True):
super(MockHandle, self).__init__(forge)
self.mock = mock
self.behaves_as_instance = behave_as_instance
self._attributes = {}
self._is_hashable = False
self._is_setattr_enabled_in_replay = False
def is_hashable(self):
return self._is_hashable
def enable_hashing(self):
self._is_hashable = True
def disable_hashing(self):
self._is_hashable = False
def enable_setattr_during_replay(self):
self._is_setattr_enabled_in_replay = True
def disable_setattr_during_replay(self):
self._is_setattr_enabled_in_replay = False
def is_setattr_enabled_in_replay(self):
return self._is_setattr_enabled_in_replay
def has_attribute(self, attr):
return False
def get_attribute(self, attr):
if self.forge.attributes.has_attribute(self.mock, attr):
return self.forge.attributes.get_attribute(self.mock, attr)
if self.has_nonmethod_class_member(attr):
return self.get_nonmethod_class_member(attr)
if self.has_method(attr):
return self.get_method(attr)
raise AttributeError("%s has no attribute %r" % (self.mock, attr))
def set_attribute(self, attr, value, caller_info):
if self.forge.is_recording() or self.is_setattr_enabled_in_replay():
self._set_attribute(attr, value)
else:
self._set_attribute_during_replay(attr, value, caller_info)
def expect_setattr(self, attr, value):
return self.forge.queue.push_setattr(self.mock, attr, value, caller_info=self.forge.debug.get_caller_info())
def _set_attribute_during_replay(self, attr, value, caller_info):
self.forge.queue.pop_matching_setattr(self.mock, attr, value, caller_info)
self._set_attribute(attr, value)
def _set_attribute(self, attr, value):
self.forge.attributes.set_attribute(self.mock, attr, value)
def has_method(self, attr):
return self.forge.stubs.has_initialized_method_stub(self.mock, attr) or self._has_method(attr)
def _has_method(self, name):
raise NotImplementedError()
def has_nonmethod_class_member(self, name):
raise NotImplementedError()
def get_nonmethod_class_member(self, name):
raise NotImplementedError()
def get_method(self, name):
returned = self.forge.stubs.get_initialized_method_stub_or_none(self.mock, name)
if returned is None:
real_method = self._get_real_method(name)
if not self.forge.is_recording():
self._check_unrecorded_method_getting(name)
returned = self._construct_stub(name, real_method)
self._bind_if_needed(name, returned)
self.forge.stubs.add_initialized_method_stub(self.mock, name, returned)
self._set_method_description(returned, name)
elif self.forge.is_replaying() and not returned.__forge__.has_recorded_calls():
self._check_getting_method_stub_without_recorded_calls(name, returned)
return returned
def _set_method_description(self, method, name):
method.__forge__.set_description("%s.%s" % (
self.describe(), name
))
def _construct_stub(self, name, real_method):
return self.forge.create_method_stub(real_method)
def _check_unrecorded_method_getting(self, name):
raise NotImplementedError()
def _check_getting_method_stub_without_recorded_calls(self, name, stub):
raise NotImplementedError()
def _get_real_method(self, name):
raise NotImplementedError()
def handle_special_method_call(self, name, args, kwargs, caller_info):
self._check_special_method_call(name, args, kwargs)
return self.get_method(name).__forge__.handle_call(args, kwargs, caller_info)
def _check_special_method_call(self, name, args, kwargs):
raise NotImplementedError()
def is_callable(self):
raise NotImplementedError()
def _bind_if_needed(self, name, method_stub):
bind_needed, bind_target = self._is_binding_needed(name, method_stub)
if bind_needed:
method_stub.__forge__.bind(bind_target)
def _is_binding_needed(self, name, method_stub):
raise NotImplementedError()
| 48.32967 | 116 | 0.705548 | 4,364 | 0.992269 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.007049 |
8299d63942c82469cfa51d90a39b4e86d506709d | 4,599 | py | Python | RecoBTag/PerformanceDB/python/measure/Pool_mistag110118.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
]
| 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoBTag/PerformanceDB/python/measure/Pool_mistag110118.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
]
| 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoBTag/PerformanceDB/python/measure/Pool_mistag110118.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
]
| 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
from CondCore.DBCommon.CondDBCommon_cfi import *
PoolDBESSourceMistag110118 = cms.ESSource("PoolDBESSource",
CondDBCommon,
toGet = cms.VPSet(
#
# working points
#
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJBPLtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPLtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJBPLwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPLwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJBPMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJBPMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJBPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJBPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJBPTwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJPLtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPLtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJPLwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPLwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJPMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJPMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGJPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGJPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGJPTwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGSSVHEMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHEMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGSSVHEMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHEMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGSSVHPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGSSVHPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGSSVHPTwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGTCHELtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHELtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGTCHELwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHELwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGTCHEMtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHEMtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGTCHEMwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHEMwp_v5_offline')
),
cms.PSet(
record = cms.string('PerformancePayloadRecord'),
tag = cms.string('BTagMISTAGTCHPTtable_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHPTtable_v5_offline')
),
cms.PSet(
record = cms.string('PerformanceWPRecord'),
tag = cms.string('BTagMISTAGTCHPTwp_v5_offline'),
label = cms.untracked.string('BTagMISTAGTCHPTwp_v5_offline')
),
))
PoolDBESSourceMistag110118.connect = 'frontier://FrontierProd/CMS_COND_31X_PHYSICSTOOLS'
| 37.390244 | 88 | 0.704718 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,960 | 0.42618 |
829b4d9d2ba83ae6309dbbbee76b950d8044a7f9 | 7,423 | py | Python | src/ScheduleEvaluation.py | franTarkenton/replication_health_check | 61d9197c6e007650437789ef7780da422af6b7fe | [
"Apache-2.0"
]
| null | null | null | src/ScheduleEvaluation.py | franTarkenton/replication_health_check | 61d9197c6e007650437789ef7780da422af6b7fe | [
"Apache-2.0"
]
| 3 | 2020-04-17T21:52:43.000Z | 2022-03-01T21:47:25.000Z | src/ScheduleEvaluation.py | franTarkenton/replication_health_check | 61d9197c6e007650437789ef7780da422af6b7fe | [
"Apache-2.0"
]
| 3 | 2018-11-26T17:44:09.000Z | 2021-04-14T22:10:38.000Z | '''
Created on Nov 22, 2018
@author: kjnether
methods that evaluate the given schedule
'''
import logging
import FMEUtil.FMEServerApiData
import re
class EvaluateSchedule(object):
def __init__(self, schedulesData):
self.logger = logging.getLogger(__name__)
if not isinstance(schedulesData, FMEUtil.FMEServerApiData.Schedules):
msg = 'arg schedulesData should be type FMEUtil.FMEServerAp' + \
'iData.Schedules instead its a type {0}'
msg = msg.format(type(schedulesData))
raise ValueError(msg)
self.schedule = schedulesData
def getDisabled(self):
'''
:return: a list of schedules that are currently disabled
'''
disableList = []
for schedule in self.schedule:
if not schedule.isEnabled():
fmw = schedule.getFMWName()
repo = schedule.getRepository()
schedName = schedule.getScheduleName()
disableList.append([schedName, repo, fmw])
# sort by the fmw name
disableList.sort(key=lambda x: x[0])
return disableList
def compareRepositorySchedule(self, workspacesData):
'''
identifies FMW's in the workspaces ( workspacesData) that are not
associated with a a schedule.
:param workspacesData: a fmeserver data api workspaces object that
is to be compared against the schedule data
:type workspacesData: FMEUtil.FMEServerApiData.Workspaces
'''
notScheduled = []
for workspace in workspacesData:
repoName = workspace.getRepositoryName()
workspaceName = workspace.getWorkspaceName()
scheduleName = self.schedule.getFMWRepositorySchedule(
repositoryName=repoName, fmwName=workspaceName)
if scheduleName is None:
notScheduled.append(workspaceName)
notScheduled.sort()
return notScheduled
def getEmbeddedData(self):
'''
identifies dataset that probably should be sourcing info from the
staging area but instead are sourcing from some other location
'''
searchRegex = re.compile('^\$\(FME_MF_\w*\).*$')
schedEmbeds = []
self.schedule.reset()
for schedule in self.schedule:
pubparams = schedule.getPublishedParameters()
schedName = schedule.getScheduleName()
for pubparam in pubparams:
paramName = pubparam.getName()
paramValue = pubparam.getValue()
self.logger.debug("paramName: %s", paramName)
self.logger.debug("paramValue: %s", paramValue)
if isinstance(paramValue, list):
paramValue = ', '.join(paramValue)
self.logger.info("list param as string: %s", paramValue)
if searchRegex.match(paramValue):
schedEmbeds.append([schedName, paramName, paramValue])
schedEmbeds.sort(key=lambda x: x[0])
return schedEmbeds
def getNonProdSchedules(self):
'''
iterates through the schedules returning a list of lists, where
the inner list contains the:
- FMW Name
- Value that DEST_DB_ENV_KEY is set to. Returns None if the parameter
doesn't exist at all.
'''
filterList = ['OTHR', 'PRD', 'DBCPRD', 'OTHER']
filteredScheds = self.getSchedsFilterByDestDbEnvKey(filterList,
includeNull=True)
nonProdList = []
for schedule in filteredScheds:
scheduleName = schedule.getScheduleName()
fmw = schedule.getFMWName()
scheduleName = schedule.getScheduleName()
fmw = schedule.getFMWName()
if fmw.upper() != 'APP_KIRK__FGDB.FMW':
pubparams = schedule.getPublishedParameters()
destDbEnvKey = pubparams.getDestDbEnvKey()
nonProdList.append([scheduleName, destDbEnvKey])
nonProdList.sort(key=lambda x: x[0])
return nonProdList
def getSchedsFilterByDestDbEnvKey(self, envKeysToExclude,
includeNull=False):
'''
returns a filtered list based on the parameters identified, does
not include KIRK jobs
:param envKeysToExclude: Schedules that are configured with these
values will be excluded from the list
:type envKeysToExclude: list of strings
:param includeNull: whether replication scripts that do not have
a DEST_DB_ENV_KEY defined for them should be
included in the replication.
:type includeNull:
'''
envKeysToExcludeUC = [element.upper() for element in
envKeysToExclude]
filterList = []
self.schedule.reset()
for schedule in self.schedule:
scheduleName = schedule.getScheduleName()
fmw = schedule.getFMWName()
if fmw.upper() != 'APP_KIRK__FGDB.FMW':
pubparams = schedule.getPublishedParameters()
destDbEnvKey = pubparams.getDestDbEnvKey()
if destDbEnvKey is None and includeNull:
filterList.append(schedule)
elif isinstance(destDbEnvKey, list):
if len(destDbEnvKey) == 1:
destDbEnvKey = destDbEnvKey[0]
elif len(destDbEnvKey) == 0:
destDbEnvKey = ''
else:
msg = 'The schedule {0} is configured with ' + \
"multiple DEST_DB_ENV_KEYS, uncertain " + \
"which key to use. The fmw associated " + \
'with the job is {1}, the number of ' + \
'values in the list is {2} the value for' + \
' DEST_DB_ENV_KEY\'s is {3}'
msg = msg.format(scheduleName, fmw,
len(destDbEnvKey), destDbEnvKey)
# logging this as a warning for now, will catch this
# case later when we get to evaluating schedules
# that are replicating to non prod
self.logger.warning(msg)
self.logger.debug(
f"destDbEnvKey: -{destDbEnvKey}- {scheduleName}")
if (destDbEnvKey is not None) and destDbEnvKey.upper() \
not in envKeysToExcludeUC:
self.logger.debug(f"adding the key: {destDbEnvKey}")
filterList.append(schedule)
return filterList
def getAllBCGWDestinations(self):
'''
retrieves all the BCGW destinations, to retrieve these they MUST
have the DEST_DB_ENV_KEY defined for them
'''
filterList = ['OTHR', 'OTHER']
filteredSchedules = self.getSchedsFilterByDestDbEnvKey(
envKeysToExclude=filterList)
return filteredSchedules
| 43.409357 | 78 | 0.555166 | 7,255 | 0.977368 | 0 | 0 | 0 | 0 | 0 | 0 | 2,372 | 0.319547 |
829c52d86cde3835b9fe8363fe095b5e95155b81 | 3,319 | py | Python | podcastista/ListenNowTab.py | andrsd/podcastista | c05a1de09d2820899aebe592d3d4b01d64d1e5fe | [
"MIT"
]
| null | null | null | podcastista/ListenNowTab.py | andrsd/podcastista | c05a1de09d2820899aebe592d3d4b01d64d1e5fe | [
"MIT"
]
| 17 | 2021-09-22T12:21:46.000Z | 2022-02-26T12:26:40.000Z | podcastista/ListenNowTab.py | andrsd/podcastista | c05a1de09d2820899aebe592d3d4b01d64d1e5fe | [
"MIT"
]
| null | null | null | from PyQt5 import QtWidgets, QtCore
from podcastista.ShowEpisodeWidget import ShowEpisodeWidget
from podcastista.FlowLayout import FlowLayout
class FillThread(QtCore.QThread):
""" Worker thread for loading up episodes """
def __init__(self, spotify, shows):
super().__init__()
self._spotify = spotify
self._shows = shows
def run(self):
for item in self._shows['items']:
show = item['show']
show['episodes'] = []
show_episodes = self._spotify.show_episodes(show['id'], limit=20)
for episode in show_episodes['items']:
display = True
if ('resume_point' in episode and
episode['resume_point']['fully_played']):
display = False
if display:
show['episodes'].append(episode)
@property
def shows(self):
return self._shows
class ListenNowTab(QtWidgets.QWidget):
"""
Tab on the main window with the list of shows
"""
def __init__(self, parent):
super().__init__()
self._main_window = parent
# empty widget
self._empty_widget = QtWidgets.QWidget()
empty_layout = QtWidgets.QVBoxLayout()
nothing = QtWidgets.QLabel("No items")
nothing.setSizePolicy(
QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Fixed)
nothing.setContentsMargins(40, 20, 40, 20)
nothing.setStyleSheet("""
font-size: 14px;
""")
nothing.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignTop)
empty_layout.addWidget(nothing)
empty_layout.addStretch(1)
self._empty_widget.setLayout(empty_layout)
# list of items
self._layout = FlowLayout()
widget = QtWidgets.QWidget()
widget.setLayout(self._layout)
widget.setSizePolicy(
QtWidgets.QSizePolicy.MinimumExpanding,
QtWidgets.QSizePolicy.MinimumExpanding)
self._list = QtWidgets.QScrollArea()
self._list.setFrameShape(QtWidgets.QFrame.NoFrame)
self._list.setWidgetResizable(True)
self._list.setWidget(widget)
self._stacked_layout = QtWidgets.QStackedLayout(self)
self._stacked_layout.addWidget(self._empty_widget)
self._stacked_layout.addWidget(self._list)
def clear(self):
self._stacked_layout.setCurrentWidget(self._empty_widget)
while self._layout.count() > 0:
item = self._layout.takeAt(0)
if item.widget() is not None:
item.widget().deleteLater()
def fill(self):
if self._main_window.spotify is None:
return
shows = self._main_window.spotify.current_user_saved_shows()
self._filler = FillThread(self._main_window.spotify, shows)
self._filler.finished.connect(self.onFillFinished)
self._filler.start()
def onFillFinished(self):
for item in self._filler.shows['items']:
show = item['show']
if len(show['episodes']) > 0:
w = ShowEpisodeWidget(show, self._main_window)
self._layout.addWidget(w)
if self._layout.count() > 0:
self._stacked_layout.setCurrentWidget(self._list)
| 31.913462 | 77 | 0.617957 | 3,171 | 0.955408 | 0 | 0 | 57 | 0.017174 | 0 | 0 | 302 | 0.090991 |
829d0c9553bb774075d15e5e3d5751bc89e20c32 | 866 | py | Python | ggpy/cruft/prolog_pyparser.py | hobson/ggpy | 4e6e6e876c3a4294cd711647051da2d9c1836b60 | [
"MIT"
]
| 1 | 2015-01-26T19:07:45.000Z | 2015-01-26T19:07:45.000Z | ggpy/cruft/prolog_pyparser.py | hobson/ggpy | 4e6e6e876c3a4294cd711647051da2d9c1836b60 | [
"MIT"
]
| null | null | null | ggpy/cruft/prolog_pyparser.py | hobson/ggpy | 4e6e6e876c3a4294cd711647051da2d9c1836b60 | [
"MIT"
]
| null | null | null | import pyparsing as pp
#relationship will refer to 'track' in all of your examples
relationship = pp.Word(pp.alphas).setResultsName('relationship')
number = pp.Word(pp.nums + '.')
variable = pp.Word(pp.alphas)
# an argument to a relationship can be either a number or a variable
argument = number | variable
# arguments are a delimited list of 'argument' surrounded by parenthesis
arguments= (pp.Suppress('(') + pp.delimitedList(argument) +
pp.Suppress(')')).setResultsName('arguments')
# a fact is composed of a relationship and it's arguments
# (I'm aware it's actually more complicated than this
# it's just a simplifying assumption)
fact = (relationship + arguments).setResultsName('facts', listAllMatches=True)
# a sentence is a fact plus a period
sentence = fact + pp.Suppress('.')
# self explanatory
prolog_sentences = pp.OneOrMore(sentence) | 37.652174 | 78 | 0.743649 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 445 | 0.513857 |
829d9d6e41067c52d752f4bdf77ffcbc9b8f2f17 | 4,496 | py | Python | Imaging/Core/Testing/Python/ReslicePermutations.py | inviCRO/VTK | a2dc2e79d4ecb8f6da900535b32e1a2a702c7f48 | [
"BSD-3-Clause"
]
| null | null | null | Imaging/Core/Testing/Python/ReslicePermutations.py | inviCRO/VTK | a2dc2e79d4ecb8f6da900535b32e1a2a702c7f48 | [
"BSD-3-Clause"
]
| null | null | null | Imaging/Core/Testing/Python/ReslicePermutations.py | inviCRO/VTK | a2dc2e79d4ecb8f6da900535b32e1a2a702c7f48 | [
"BSD-3-Clause"
]
| null | null | null | #!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# this script tests vtkImageReslice with various axes permutations,
# in order to cover a nasty set of "if" statements that check
# the intersections of the raster lines with the input bounding box.
# Image pipeline
reader = vtk.vtkImageReader()
reader.ReleaseDataFlagOff()
reader.SetDataByteOrderToLittleEndian()
reader.SetDataExtent(0,63,0,63,1,93)
reader.SetDataSpacing(3.2,3.2,1.5)
reader.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter")
reader.SetDataMask(0x7fff)
transform = vtk.vtkTransform()
# rotate about the center of the image
transform.Translate(+100.8,+100.8,+69.0)
transform.RotateWXYZ(10,1,1,0)
transform.Translate(-100.8,-100.8,-69.0)
reslice1 = vtk.vtkImageReslice()
reslice1.SetInputConnection(reader.GetOutputPort())
reslice1.SetResliceAxesDirectionCosines([1,0,0,0,1,0,0,0,1])
reslice1.SetResliceTransform(transform)
reslice1.SetOutputSpacing(3.2,3.2,3.2)
reslice1.SetOutputExtent(0,74,0,74,0,0)
reslice2 = vtk.vtkImageReslice()
reslice2.SetInputConnection(reader.GetOutputPort())
reslice2.SetResliceAxesDirectionCosines([0,1,0,0,0,1,1,0,0])
reslice2.SetResliceTransform(transform)
reslice2.SetOutputSpacing(3.2,3.2,3.2)
reslice2.SetOutputExtent(0,74,0,74,0,0)
reslice3 = vtk.vtkImageReslice()
reslice3.SetInputConnection(reader.GetOutputPort())
reslice3.SetResliceAxesDirectionCosines([0,0,1,1,0,0,0,1,0])
reslice3.SetResliceTransform(transform)
reslice3.SetOutputSpacing(3.2,3.2,3.2)
reslice3.SetOutputExtent(0,74,0,74,0,0)
reslice4 = vtk.vtkImageReslice()
reslice4.SetInputConnection(reader.GetOutputPort())
reslice4.SetResliceAxesDirectionCosines([-1,0,0,0,-1,0,0,0,-1])
reslice4.SetResliceTransform(transform)
reslice4.SetOutputSpacing(3.2,3.2,3.2)
reslice4.SetOutputExtent(0,74,0,74,0,0)
reslice5 = vtk.vtkImageReslice()
reslice5.SetInputConnection(reader.GetOutputPort())
reslice5.SetResliceAxesDirectionCosines([0,-1,0,0,0,-1,-1,0,0])
reslice5.SetResliceTransform(transform)
reslice5.SetOutputSpacing(3.2,3.2,3.2)
reslice5.SetOutputExtent(0,74,0,74,0,0)
reslice6 = vtk.vtkImageReslice()
reslice6.SetInputConnection(reader.GetOutputPort())
reslice6.SetResliceAxesDirectionCosines([0,0,-1,-1,0,0,0,-1,0])
reslice6.SetResliceTransform(transform)
reslice6.SetOutputSpacing(3.2,3.2,3.2)
reslice6.SetOutputExtent(0,74,0,74,0,0)
mapper1 = vtk.vtkImageMapper()
mapper1.SetInputConnection(reslice1.GetOutputPort())
mapper1.SetColorWindow(2000)
mapper1.SetColorLevel(1000)
mapper1.SetZSlice(0)
mapper2 = vtk.vtkImageMapper()
mapper2.SetInputConnection(reslice2.GetOutputPort())
mapper2.SetColorWindow(2000)
mapper2.SetColorLevel(1000)
mapper2.SetZSlice(0)
mapper3 = vtk.vtkImageMapper()
mapper3.SetInputConnection(reslice3.GetOutputPort())
mapper3.SetColorWindow(2000)
mapper3.SetColorLevel(1000)
mapper3.SetZSlice(0)
mapper4 = vtk.vtkImageMapper()
mapper4.SetInputConnection(reslice4.GetOutputPort())
mapper4.SetColorWindow(2000)
mapper4.SetColorLevel(1000)
mapper4.SetZSlice(0)
mapper5 = vtk.vtkImageMapper()
mapper5.SetInputConnection(reslice5.GetOutputPort())
mapper5.SetColorWindow(2000)
mapper5.SetColorLevel(1000)
mapper5.SetZSlice(0)
mapper6 = vtk.vtkImageMapper()
mapper6.SetInputConnection(reslice6.GetOutputPort())
mapper6.SetColorWindow(2000)
mapper6.SetColorLevel(1000)
mapper6.SetZSlice(0)
actor1 = vtk.vtkActor2D()
actor1.SetMapper(mapper1)
actor2 = vtk.vtkActor2D()
actor2.SetMapper(mapper2)
actor3 = vtk.vtkActor2D()
actor3.SetMapper(mapper3)
actor4 = vtk.vtkActor2D()
actor4.SetMapper(mapper4)
actor5 = vtk.vtkActor2D()
actor5.SetMapper(mapper5)
actor6 = vtk.vtkActor2D()
actor6.SetMapper(mapper6)
imager1 = vtk.vtkRenderer()
imager1.AddActor2D(actor1)
imager1.SetViewport(0.0,0.0,0.3333,0.5)
imager2 = vtk.vtkRenderer()
imager2.AddActor2D(actor2)
imager2.SetViewport(0.0,0.5,0.3333,1.0)
imager3 = vtk.vtkRenderer()
imager3.AddActor2D(actor3)
imager3.SetViewport(0.3333,0.0,0.6667,0.5)
imager4 = vtk.vtkRenderer()
imager4.AddActor2D(actor4)
imager4.SetViewport(0.3333,0.5,0.6667,1.0)
imager5 = vtk.vtkRenderer()
imager5.AddActor2D(actor5)
imager5.SetViewport(0.6667,0.0,1.0,0.5)
imager6 = vtk.vtkRenderer()
imager6.AddActor2D(actor6)
imager6.SetViewport(0.6667,0.5,1.0,1.0)
imgWin = vtk.vtkRenderWindow()
imgWin.AddRenderer(imager1)
imgWin.AddRenderer(imager2)
imgWin.AddRenderer(imager3)
imgWin.AddRenderer(imager4)
imgWin.AddRenderer(imager5)
imgWin.AddRenderer(imager6)
imgWin.SetSize(225,150)
imgWin.Render()
# --- end of script --
| 35.125 | 70 | 0.803158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 317 | 0.070507 |
829dd3506bffa917743930aa6c0983eab6866732 | 2,916 | py | Python | neuronlp2/nn/utils.py | ntunlp/ptrnet-depparser | 61cb113327ede02996b16ea4b9e19311062603c3 | [
"MIT"
]
| 9 | 2019-09-03T11:03:45.000Z | 2021-09-19T05:38:25.000Z | neuronlp2/nn/utils.py | danifg/BottomUp-Hierarchical-PtrNet | 2b6ebdb63825eafd63d86700bbbc278cabfafeb2 | [
"MIT"
]
| null | null | null | neuronlp2/nn/utils.py | danifg/BottomUp-Hierarchical-PtrNet | 2b6ebdb63825eafd63d86700bbbc278cabfafeb2 | [
"MIT"
]
| 1 | 2019-09-24T06:19:25.000Z | 2019-09-24T06:19:25.000Z | import collections
from itertools import repeat
import torch
import torch.nn as nn
import torch.nn.utils.rnn as rnn_utils
def _ntuple(n):
def parse(x):
if isinstance(x, collections.Iterable):
return x
return tuple(repeat(x, n))
return parse
_single = _ntuple(1)
_pair = _ntuple(2)
_triple = _ntuple(3)
_quadruple = _ntuple(4)
def prepare_rnn_seq(rnn_input, lengths, hx=None, masks=None, batch_first=False):
'''
Args:
rnn_input: [seq_len, batch, input_size]: tensor containing the features of the input sequence.
lengths: [batch]: tensor containing the lengthes of the input sequence
hx: [num_layers * num_directions, batch, hidden_size]: tensor containing the initial hidden state for each element in the batch.
masks: [seq_len, batch]: tensor containing the mask for each element in the batch.
batch_first: If True, then the input and output tensors are provided as [batch, seq_len, feature].
Returns:
'''
def check_decreasing(lengths):
lens, order = torch.sort(lengths, dim=0, descending=True)
if torch.ne(lens, lengths).sum() == 0:
return None
else:
_, rev_order = torch.sort(order)
return lens, order, rev_order
check_res = check_decreasing(lengths)
if check_res is None:
lens = lengths
rev_order = None
else:
lens, order, rev_order = check_res
batch_dim = 0 if batch_first else 1
rnn_input = rnn_input.index_select(batch_dim, order)
if hx is not None:
# hack lstm
if isinstance(hx, tuple):
hx, cx = hx
hx = hx.index_select(1, order)
cx = cx.index_select(1, order)
hx = (hx, cx)
else:
hx = hx.index_select(1, order)
lens = lens.tolist()
seq = rnn_utils.pack_padded_sequence(rnn_input, lens, batch_first=batch_first)
if masks is not None:
if batch_first:
masks = masks[:, :lens[0]]
else:
masks = masks[:lens[0]]
return seq, hx, rev_order, masks
def recover_rnn_seq(seq, rev_order, hx=None, batch_first=False):
output, _ = rnn_utils.pad_packed_sequence(seq, batch_first=batch_first)
if rev_order is not None:
batch_dim = 0 if batch_first else 1
output = output.index_select(batch_dim, rev_order)
if hx is not None:
# hack lstm
if isinstance(hx, tuple):
hx, cx = hx
hx = hx.index_select(1, rev_order)
cx = cx.index_select(1, rev_order)
hx = (hx, cx)
else:
hx = hx.index_select(1, rev_order)
return output, hx
def freeze_embedding(embedding):
assert isinstance(embedding, nn.Embedding), "input should be an Embedding module."
embedding.weight.detach_()
| 32.043956 | 136 | 0.614883 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 614 | 0.210562 |
829dd5cc20b5aa7c14726c3c740aa687c0a9650d | 194 | py | Python | Data_Analyst/Step_2_Intermediate_Python_and_Pandas/2_Data_Analysis_with_Pandas_Intermediate/3_Introduction_to_Pandas/7_Selecting_a_row/script.py | ustutz/dataquest | 6fa64fc824a060b19649ef912d11bee9ed671025 | [
"MIT"
]
| 8 | 2017-01-20T13:24:26.000Z | 2019-04-05T19:02:13.000Z | Data_Analyst/Step_2_Intermediate_Python_and_Pandas/2_Data_Analysis_with_Pandas_Intermediate/3_Introduction_to_Pandas/7_Selecting_a_row/script.py | ustutz/dataquest | 6fa64fc824a060b19649ef912d11bee9ed671025 | [
"MIT"
]
| null | null | null | Data_Analyst/Step_2_Intermediate_Python_and_Pandas/2_Data_Analysis_with_Pandas_Intermediate/3_Introduction_to_Pandas/7_Selecting_a_row/script.py | ustutz/dataquest | 6fa64fc824a060b19649ef912d11bee9ed671025 | [
"MIT"
]
| 25 | 2016-10-27T16:27:54.000Z | 2021-07-06T14:36:40.000Z | import pandas as pandas_Pandas_Module
class Script:
@staticmethod
def main():
food_info = pandas_Pandas_Module.read_csv("../food_info.csv")
print(str(food_info.dtypes))
Script.main() | 14.923077 | 63 | 0.752577 | 137 | 0.706186 | 0 | 0 | 121 | 0.623711 | 0 | 0 | 18 | 0.092784 |
829fa892ed939a93b224c00b60d5719ddb4dc7e0 | 2,176 | py | Python | examples/fire.py | pombreda/py-lepton | 586358747efe867208edafca112a3edbb24ff8f9 | [
"MIT"
]
| 7 | 2018-02-20T02:56:03.000Z | 2020-01-23T05:35:55.000Z | examples/fire.py | caseman/py-lepton | 586358747efe867208edafca112a3edbb24ff8f9 | [
"MIT"
]
| 1 | 2017-11-12T10:14:13.000Z | 2017-11-12T10:14:44.000Z | examples/fire.py | caseman/py-lepton | 586358747efe867208edafca112a3edbb24ff8f9 | [
"MIT"
]
| 1 | 2019-01-05T00:38:50.000Z | 2019-01-05T00:38:50.000Z | #############################################################################
#
# Copyright (c) 2008 by Casey Duncan and contributors
# All Rights Reserved.
#
# This software is subject to the provisions of the MIT License
# A copy of the license should accompany this distribution.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#
#############################################################################
"""Fire simulation using point sprites"""
__version__ = '$Id$'
import os
from pyglet import image
from pyglet.gl import *
from lepton import Particle, ParticleGroup, default_system
from lepton.renderer import PointRenderer
from lepton.texturizer import SpriteTexturizer, create_point_texture
from lepton.emitter import StaticEmitter
from lepton.domain import Line
from lepton.controller import Gravity, Lifetime, Movement, Fader, ColorBlender
win = pyglet.window.Window(resizable=True, visible=False)
win.clear()
glEnable(GL_BLEND)
glShadeModel(GL_SMOOTH)
glBlendFunc(GL_SRC_ALPHA,GL_ONE)
glDisable(GL_DEPTH_TEST)
flame = StaticEmitter(
rate=500,
template=Particle(
position=(300,25,0),
velocity=(0,0,0),
color=(1,1,1,1),
),
position=Line((win.width/2 - 85, -15, 0), (win.width/2 + 85, -15, 0)),
deviation=Particle(position=(10,0,0), velocity=(7,50,0), age=0.75)
)
default_system.add_global_controller(
Lifetime(6),
Gravity((0,20,0)),
Movement(),
ColorBlender(
[(0, (0,0,0.5,0)),
(0.5, (0,0,0.5,0.2)),
(0.75, (0,0.5,1,0.6)),
(1.5, (1,1,0,0.2)),
(2.7, (0.9,0.2,0,0.4)),
(3.2, (0.6,0.1,0.05,0.2)),
(4.0, (0.8,0.8,0.8,0.1)),
(6.0, (0.8,0.8,0.8,0)), ]
),
)
group = ParticleGroup(controllers=[flame],
renderer=PointRenderer(64, SpriteTexturizer(create_point_texture(64, 5))))
win.set_visible(True)
pyglet.clock.schedule_interval(default_system.update, (1.0/30.0))
pyglet.clock.set_fps_limit(None)
@win.event
def on_draw():
win.clear()
glLoadIdentity()
default_system.draw()
if __name__ == '__main__':
default_system.run_ahead(2, 30)
pyglet.app.run()
| 27.544304 | 78 | 0.665901 | 0 | 0 | 0 | 0 | 79 | 0.036305 | 0 | 0 | 616 | 0.283088 |
829fbfa6185a88b37d0e4fc7be2c4271027f431b | 3,810 | py | Python | landspout/cli.py | gmr/landspout | 1df922aa96c42dbfaa28681e748fbd97dfaf9836 | [
"BSD-3-Clause"
]
| null | null | null | landspout/cli.py | gmr/landspout | 1df922aa96c42dbfaa28681e748fbd97dfaf9836 | [
"BSD-3-Clause"
]
| null | null | null | landspout/cli.py | gmr/landspout | 1df922aa96c42dbfaa28681e748fbd97dfaf9836 | [
"BSD-3-Clause"
]
| null | null | null | # coding=utf-8
"""
Command Line Interface
======================
"""
import argparse
import logging
import os
from os import path
import sys
from landspout import core, __version__
LOGGER = logging.getLogger('landspout')
LOGGING_FORMAT = '[%(asctime)-15s] %(levelname)-8s %(name)-15s: %(message)s'
def exit_application(message=None, code=0):
"""Exit the application displaying the message to info or error based upon
the exit code
:param str message: The exit message
:param int code: The exit code (default: 0)
"""
log_method = LOGGER.error if code else LOGGER.info
log_method(message.strip())
sys.exit(code)
def parse_cli_arguments():
"""Return the base argument parser for CLI applications.
:return: :class:`~argparse.ArgumentParser`
"""
parser = argparse.ArgumentParser(
'landspout', 'Static website generation tool',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve')
parser.add_argument('-s', '--source', metavar='SOURCE',
help='Source content directory',
default='content')
parser.add_argument('-d', '--destination', metavar='DEST',
help='Destination directory for built content',
default='build')
parser.add_argument('-t', '--templates', metavar='TEMPLATE DIR',
help='Template directory',
default='templates')
parser.add_argument('-b', '--base-uri-path', action='store', default='/')
parser.add_argument('--whitespace', action='store',
choices=['all', 'single', 'oneline'],
default='all',
help='Compress whitespace')
parser.add_argument('-n', '--namespace', type=argparse.FileType('r'),
help='Load a JSON file of values to inject into the '
'default rendering namespace.')
parser.add_argument('-i', '--interval', type=int, default=3,
help='Interval in seconds between file '
'checks while watching or serving')
parser.add_argument('--port', type=int, default=8080,
help='The port to listen on when serving')
parser.add_argument('--debug', action='store_true',
help='Extra verbose debug logging')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s {}'.format(__version__),
help='output version information, then exit')
parser.add_argument('command', nargs='?',
choices=['build', 'watch', 'serve'],
help='The command to run', default='build')
return parser.parse_args()
def validate_paths(args):
"""Ensure all of the configured paths actually exist."""
if not path.exists(args.destination):
LOGGER.warning('Destination path "%s" does not exist, creating',
args.destination)
os.makedirs(path.normpath(args.destination))
for file_path in [args.source, args.templates]:
if not path.exists(file_path):
exit_application('Path {} does not exist'.format(file_path), 1)
def main():
"""Application entry point"""
args = parse_cli_arguments()
log_level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=log_level, format=LOGGING_FORMAT)
LOGGER.info('Landspout v%s [%s]', __version__, args.command)
validate_paths(args)
landspout = core.Landspout(args)
if args.command == 'build':
landspout.build()
elif args.command == 'watch':
landspout.watch()
elif args.command == 'serve':
landspout.serve()
| 36.990291 | 78 | 0.599475 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,398 | 0.366929 |
82a12ebdf14809677818644038ba067ccbd91713 | 474 | py | Python | examples/test_cross.py | rballester/ttpy | a2fdf08fae9d34cb1e5ba28482e82e04b249911b | [
"MIT"
]
| null | null | null | examples/test_cross.py | rballester/ttpy | a2fdf08fae9d34cb1e5ba28482e82e04b249911b | [
"MIT"
]
| null | null | null | examples/test_cross.py | rballester/ttpy | a2fdf08fae9d34cb1e5ba28482e82e04b249911b | [
"MIT"
]
| 1 | 2021-01-10T07:02:09.000Z | 2021-01-10T07:02:09.000Z | import sys
sys.path.append('../')
import numpy as np
import tt
d = 30
n = 2 ** d
b = 1E3
h = b / (n + 1)
#x = np.arange(n)
#x = np.reshape(x, [2] * d, order = 'F')
#x = tt.tensor(x, 1e-12)
x = tt.xfun(2, d)
e = tt.ones(2, d)
x = x + e
x = x * h
sf = lambda x : np.sin(x) / x #Should be rank 2
y = tt.multifuncrs([x], sf, 1e-6, ['y0', tt.ones(2, d)])
#y1 = tt.tensor(sf(x.full()), 1e-8)
print "pi / 2 ~ ", tt.dot(y, tt.ones(2, d)) * h
#print (y - y1).norm() / y.norm()
| 18.230769 | 56 | 0.516878 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 186 | 0.392405 |
82a1dcab7cd90d7023343f02b2320478208cc588 | 26,434 | py | Python | phone2board.py | brandjamie/phone2board | b27b6d8dfa944f03688df802a360f247f648b2f6 | [
"MIT"
]
| null | null | null | phone2board.py | brandjamie/phone2board | b27b6d8dfa944f03688df802a360f247f648b2f6 | [
"MIT"
]
| null | null | null | phone2board.py | brandjamie/phone2board | b27b6d8dfa944f03688df802a360f247f648b2f6 | [
"MIT"
]
| null | null | null | import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.auth
import tornado.escape
import os.path
import logging
import sys
import urllib
import json
from uuid import uuid4
from tornado.options import define, options
define("port", default=8000, help="run on the given port", type=int)
#to do -
# check character set of inputs (not vital as 'block' added to each user).
# scores?
#------------------------------------------------------------------------------Main app code-------------------------------------------
class Status (object):
currentStatus = "waitingforstart"
currentLoginStatus = "open"
currentTime = 90
currentQuestion = False
currentQuestionType = False
clientcallbacks = []
users = {} # users is a dictionary - names are keys, each item is a dictionary of score and (if neccesary), current question and correct or not
globalcallbacks = []
controlcallbacks = []
answercounter = 0
quiztype = ""
def registerclient(self, callback):
print('register client---------------------------------------------------------')
if (callback not in self.clientcallbacks):
self.clientcallbacks.append(callback)
def registerglobal(self, callback):
print('register global----------------------------------------------------------')
if (callback not in self.globalcallbacks):
self.globalcallbacks.append(callback)
def registercontrol(self, callback):
print('register control----------------------------------------------------------')
if (callback not in self.controlcallbacks):
self.controlcallbacks.append(callback)
def adduser(self, name):
if self.getStatus()=="waitingforstart":
self.users[tornado.escape.native_str(name)]={'qnum':0,'level':0,'complete':"false",'Score':0,'answerordinal':10000, 'block':"false",'finished':"false"}
else:
self.users[tornado.escape.native_str(name)]={'qnum':0,'level':0,'complete':"false",'Score':0,'answerordinal':10000, 'block':"false",'finished':"false"}
# self.users(tornado.escape.native_str(name))
self.notifyAddUser()
def removeuser(self, name):
print('removing user')
# self.users.remove(tornado.escape.native_str(name))
delname = tornado.escape.native_str(name)
if (delname in self.users):
del self.users[delname]
def setQuestion(self, question):
print('setquestion')
questtype = "open"
jsonstring ='{"type":"question","question":"'+question+'"}'
self.clearAnswers()
self.currentQuestion = question
self.currentQuestionType = questtype
self.setStatus("waitingforanswer")
self.setLoginStatus("closed")
self.notifyGlobal(jsonstring)
jsonstring ='{"type":"question","status":"waitingforanswer","loginstatus":"closed"}'
self.notifyControl(jsonstring)
jsonstring ='{"type":"questionasked","qtype":"'+questtype+'"}'
self.notifyClient(jsonstring)
# print ("what the hell")
# self.notifyAnswer()
# could be named better as is doing the marking
def setControlAnswer(self, answer):
print('set control answer')
answers = answer.split('/')
print(len(answers))
for user in self.users.keys():
if ('answer' in self.users[user]):
if (self.users[user]['answer']in answers):
self.users[user]['mark']="correct"
else:
self.users[user]['mark']="incorrect"
self.notifyGlobalAnswer()
self.notifyUserAllAnswers()
def setCorrectFromControl(self, user):
if (self.users[user]):
self.users[user]['mark']="correct"
print("does it workd")
print(self.users[user]['mark'])
self.notifyGlobalAnswer()
self.notifyUserAnswerCorrect(user)
def setIncorrectFromControl(self, user):
if (self.users[user]):
self.users[user]['mark']="incorrect"
print(self.users[user]['mark'])
self.notifyGlobalAnswer()
self.notifyUserAnswerIncorrect(user)
def setBlockFromControl(self, user):
if (self.users[user]):
self.users[user]['block']="true"
self.notifyGlobalAnswer()
def setUnblockFromControl(self, user):
if (self.users[user]):
self.users[user]['block']="false"
self.notifyGlobalAnswer()
def toggleLoginStatus(self):
if (self.getLoginStatus()=="closed"):
self.setLoginStatus("open")
else:
self.setLoginStatus("closed")
self.notifyControlLoginStatus()
def toggleStatus(self):
if (self.getStatus()=="waitingforanswer"):
self.setStatus("answersclosed")
else:
self.setStatus("waitingforanswer")
self.notifyControlStatus()
def resetGame(self):
jsonstring = '{"type":"reset"}'
print("what the hell")
self.notifyClient(jsonstring)
def setAnswer(self, answer, user):
print('getting answer')
print (answer)
print (user)
self.users[user]['answer'] = answer
self.users[user]['answerordinal']=self.answercounter
self.users[user]['mark']="notmarked"
self.answercounter=self.answercounter + 1
self.notifyAnswer()
def setClientResult(self, level, qnum, finished, user):
print ('gotten result')
print (level)
print (qnum)
print (user)
print (finished)
self.users[user]['level']=int(level)
self.users[user]['qnum']=int(qnum)
self.users[user]['finished']=finished
self.notifyAnswer()
def clearAnswers(self):
self.answercounter = 0
for user in self.users.keys():
if ('answer' in self.users[user]):
del self.users[user]['answer']
self.users[user]['answerordinal']=10000
self.users[user]['mark']="notmarked"
def setStatus(self, status):
self.currentStatus = status
def setQuizType(self, quiztype):
self.quiztype = quiztype
def setLoginStatus(self, status):
self.currentLoginStatus = status
def setTime(self, time):
print("SETTING TIMER________________")
self.currentTime = time
self.notifyGlobalTimeChange(time)
self.notifyUserTimeChange(time)
def notifyAddUser(self):
print("notify add user")
jsonstring = '{"type":"users","users":['
print (self.users)
for c in self.users.keys():
jsonstring = jsonstring+'"'+c+'",'
jsonstring = jsonstring[:-1]
jsonstring = jsonstring+']}'
self.notifyGlobal(jsonstring)
self.notifyControlAnswer()
def notifyAnswer(self):
print ("notify answer")
self.notifyGlobalAnswer()
self.notifyControlAnswer()
def notifyGlobalAnswer(self):
print ("notify gloabla answer")
jsonstring = '{"type":"answers","answers":['
answerarray = self.makeAnswerArrayString()
jsonstring = jsonstring+answerarray
jsonstring = jsonstring+']}'
self.notifyGlobal(jsonstring)
def notifyUserAnswerCorrect(self, markedusername):
jsonstring = '{"type":"mark","mark":"correct","markeduser":"'
jsonstring = jsonstring+markedusername+'"}'
self.notifyClient(jsonstring)
def notifyUserAnswerIncorrect(self, markedusername):
jsonstring = '{"type":"mark","mark":"incorrect","markeduser":"'
jsonstring = jsonstring+markedusername+'"}'
self.notifyClient(jsonstring)
def notifyUserTimeChange(self, time):
print ("notify user time")
jsonstring = '{"type":"time","time":'
jsonstring = jsonstring+time
jsonstring = jsonstring+'}'
self.notifyClient(jsonstring)
def notifyGlobalTimeChange(self, time):
print ("notify gloabl time")
jsonstring = '{"type":"time","time":'
jsonstring = jsonstring+time
jsonstring = jsonstring+'}'
self.notifyGlobal(jsonstring)
def notifyUserAllAnswers(self):
print ("notify all users")
jsonstring = '{"type":"alluseranswers","answers":['
answerarray = self.makeAnswerArrayString()
jsonstring = jsonstring+answerarray
jsonstring = jsonstring+']}'
self.notifyClient(jsonstring)
def notifyControlAnswer(self):
print ("notify contorl answer")
jsonstring = '{"type":"answers","answers":['
controlanswerarray = self.makeControlArrayString()
jsonstring = jsonstring+controlanswerarray
jsonstring = jsonstring+']'
# jsonstring = jsonstring+ ',"status":"'
# jsonstring = jsonstring+self.application.status.getstatus()+'"'
jsonstring = jsonstring+'}'
self.notifyControl(jsonstring)
def notifyControlLoginStatus(self):
print(self.getLoginStatus())
jsonstring = '{"type":"loginstatus","loginstatus":"'
jsonstring = jsonstring+self.getLoginStatus()
jsonstring = jsonstring + '"}'
self.notifyControl(jsonstring)
def notifyControlStatus(self):
print(self.getStatus())
jsonstring = '{"type":"status","status":"'
jsonstring = jsonstring+self.getStatus()
jsonstring = jsonstring + '"}'
self.notifyControl(jsonstring)
def makeAnswerArrayString (self):
if self.quiztype == "multiq":
sortedlist = self.getMultiqSortedUserList()
else:
sortedlist = self.getSortedUserList()
jsonstring = ""
#for c in self.users.keys():
#self.application.quiztype
for c in sortedlist:
if self.quiztype == "multiq":
jsonstring = jsonstring+'['
jsonstring = jsonstring+'"'+c[0]+'",'
jsonstring = jsonstring+'"no answer",'
jsonstring = jsonstring+'"'+str(c[1]['answerordinal'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['level'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['block'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['qnum'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['finished'])+'"],'
else:
if ('answer' in c[1]):
jsonstring = jsonstring+'['
jsonstring = jsonstring+'"'+c[0]+'",'
jsonstring = jsonstring+'"'+c[1]['answer']+'",'
jsonstring = jsonstring+'"'+str(c[1]['answerordinal'])+'",'
jsonstring = jsonstring+'"'+c[1]['mark']+'",'
jsonstring = jsonstring+'"'+str(c[1]['block'])+'"],'
jsonstring = jsonstring[:-1]
return jsonstring
def getSortedUserList (self):
print("-------------------------------------")
listfromusers = self.users.items()
print(listfromusers)
sortedlist = sorted(listfromusers, key=lambda usered: usered[1]['answerordinal'])
print(sortedlist)
return sortedlist
def getMultiqSortedUserList (self):
listfromusers = self.users.items()
sortedlist = sorted(listfromusers, key=lambda usered: (usered[1]['level'], usered[1]['qnum'],usered[1]['answerordinal']), reverse = True)
print(sortedlist)
return sortedlist
def makeControlArrayString (self):
jsonstring = ""
if self.quiztype == "multiq":
jsonstring = self.makeMultiqControlArrayString()
else:
sortedlist = self.getSortedUserList()
for c in sortedlist:
jsonstring = jsonstring+'['
jsonstring = jsonstring+'"'+c[0]+'",'
if ('answer' in c[1]):
jsonstring = jsonstring+'"'+c[1]['answer']+'",'
jsonstring = jsonstring+'"'+str(c[1]['answerordinal'])+'",'
jsonstring = jsonstring+'"'+c[1]['mark']+'",'
jsonstring = jsonstring+'"'+str(c[1]['block'])+'"],'
else:
jsonstring = jsonstring+'"noanswer",'
jsonstring = jsonstring+'"'+str(c[1]['answerordinal'])+'",'
jsonstring = jsonstring+'"nomark",'
jsonstring = jsonstring+'"'+str(c[1]['block'])+'"],'
jsonstring = jsonstring[:-1]
return jsonstring
def makeMultiqControlArrayString (self):
jsonstring = ""
sortedlist = self.getSortedUserList()
for c in sortedlist:
jsonstring = jsonstring+'['
jsonstring = jsonstring+'"'+c[0]+'",'
if ('answer' in c[1]):
jsonstring = jsonstring+'"'+c[1]['answer']+'",'
jsonstring = jsonstring+'"'+str(c[1]['answerordinal'])+'",'
jsonstring = jsonstring+'"'+c[1]['mark']+'",'
jsonstring = jsonstring+'"'+str(c[1]['block'])+'"],'
else:
jsonstring = jsonstring+'"noanswer",'
jsonstring = jsonstring+'"'+str(c[1]['answerordinal'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['level'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['block'])+'",'
jsonstring = jsonstring+'"'+str(c[1]['qnum'])+'"],'
jsonstring = jsonstring[:-1]
print (jsonstring)
print ("make controll array string")
return jsonstring
def notifyGlobal(self, message):
for c in self.globalcallbacks:
print('globalcallbacks')
print(message)
print(c)
c(message)
self.globalcallbacks=[]
def notifyControl(self, message):
for c in self.controlcallbacks:
print('controlcallbacks')
print(message)
print(c)
c(message)
self.controlcallbacks=[]
def notifyClient(self, message):
for c in self.clientcallbacks:
print('controlcallbacks')
print(message)
print(c)
c(message)
self.clientcallbacks=[]
def getUsers(self):
return self.users.keys()
def getStatus(self):
return self.currentStatus
def getTime(self):
return self.currentTime
def getLoginStatus(self):
return self.currentLoginStatus
def getQuestion(self):
return self.currentQuestion
def getQuizType(self):
return self.quizType
def getQuestionType(self):
return self.currentQuestionType
#----------------------------------------------------------status handlers-------------------------
# these handle the asynch hooks from the pages and sending messages to the pages
# a lot of shared code here - I'm sure this could be better!
class ClientStatusHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
@tornado.gen.engine
def get(self):
print("register client")
self.application.status.registerclient(self.on_message)
def on_message(self, message):
print("client message sent")
print(message)
self.write(message)
self.finish()
class GlobalStatusHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
@tornado.gen.engine
def get(self):
print("reggister gloabl")
self.application.status.registerglobal(self.on_message)
def on_message(self, message):
print("global message sent")
print(message)
self.write(message)
self.finish()
class ControlStatusHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
@tornado.gen.engine
def get(self):
print("registeredd control")
self.application.status.registercontrol(self.on_message)
def on_message(self, message):
print("control message sent")
print(message)
self.write(message)
self.finish()
# message handlers - recieves messages from the pages (currently only control and client)
class ControlMessageHandler(tornado.web.RequestHandler):
def get(self):
messagetype = self.get_argument("type")
if messagetype=="question":
question = urllib.parse.unquote(self.get_argument("question"))
self.application.status.setQuestion(question)
if messagetype=="time":
time = urllib.parse.unquote(self.get_argument("time"))
self.application.status.setTime(time)
if messagetype=="controlanswer":
answer = urllib.parse.unquote(self.get_argument("answer"))
self.application.status.setControlAnswer(answer)
if messagetype=="markcorrect":
name = urllib.parse.unquote(self.get_argument("id"))
self.application.status.setCorrectFromControl(name)
if messagetype=="markincorrect":
name = urllib.parse.unquote(self.get_argument("id"))
self.application.status.setIncorrectFromControl(name)
if messagetype=="block":
name = urllib.parse.unquote(self.get_argument("id"))
self.application.status.setBlockFromControl(name)
if messagetype=="unblock":
name = urllib.parse.unquote(self.get_argument("id"))
self.application.status.setUnblockFromControl(name)
if messagetype=="toggleloginstatus":
self.application.status.toggleLoginStatus()
if messagetype=="togglestatus":
self.application.status.toggleStatus()
if messagetype=="resetgame":
self.application.status.resetGame();
self.finish()
class ClientMessageHandler(tornado.web.RequestHandler):
def get(self):
messagetype = self.get_argument("type")
if messagetype=="answer":
currentstatus = self.application.status.getStatus()
if (currentstatus=="waitingforanswer"):
answer = urllib.parse.unquote(self.get_argument("answer"))
user = tornado.escape.native_str(self.get_secure_cookie("username"))
self.application.status.setAnswer(answer,user)
if messagetype=="clientmarked":
currentstatus = self.application.status.getStatus()
if (currentstatus=="waitingforanswer"):
user = tornado.escape.native_str(self.get_secure_cookie("username"))
level = self.get_argument("level");
qnum = self.get_argument("qnum");
finished = self.get_argument("finished");
self.application.status.setClientResult(level, qnum, finished, user);
self.finish()
class GlobalMessageHandler(tornado.web.RequestHandler):
def get(self):
messagetype = self.get_argument("type")
if messagetype=="requestanswers":
self.application.status.notifyAnswer()
self.finish()
# - template handlers ------------- pages that are actually called by the browser.
class ClientPageHandler(tornado.web.RequestHandler):
def get_current_user(self):
return self.get_secure_cookie("username")
def get(self):
session = uuid4()
class LoginHandler(ClientPageHandler):
def get(self):
#print (self.application.gamefile)
#print (self.application.gamefile["quiztype"])
if self.application.status.getLoginStatus()=="open":
self.render('login.html')
elif self.get_secure_cookie("username"):
print(self.application.status.getStatus())
self.redirect("/")
else:
print(self.application.status.getStatus())
self.render('gamestarted.html')
def post(self):
# if client already has a username set, remove it from the list before creating a new username
if self.get_secure_cookie("username"):
self.application.status.removeuser(self.current_user)
# create new user
self.set_secure_cookie("username",self.get_argument("username"),expires_days=1)
self.redirect("/")
class ClientWelcome(ClientPageHandler):
@tornado.web.authenticated
def get(self):
session = uuid4()
self.application.status.adduser(self.current_user)
currentstatus = self.application.status.getStatus()
currenttime = self.application.status.getTime()
questionarray = self.application.questionarray
currentquestiontype = self.application.status.getQuestionType()
clientpage = self.application.quiztypes[self.application.quiztype]['client_page']
self.render(clientpage,session=session,user=self.current_user, status=currentstatus, questiontype=currentquestiontype,time=currenttime, levels = questionarray)
class ControlPageHandler(tornado.web.RequestHandler):
def get(self):
# users = self.application.status.getUsers()
# userstring = "','".join(str(thisuser) for thisuser in users)
controlstring = self.application.status.makeControlArrayString()
currentstatus = self.application.status.getStatus()
currentloginstatus = self.application.status.getLoginStatus()
currenttime = self.application.status.getTime()
quiztype = "'" + self.application.quiztype + "'"
questionarray = self.application.questionarray
answerarray = self.application.answerarray
page = self.application.quiztypes[self.application.quiztype]["control_page"]
self.render(page,teams="["+str(controlstring)+"]", status=currentstatus, loginstatus=currentloginstatus, time=currenttime, quiztype = quiztype, questionarray = questionarray, answerarray = answerarray)
class GlobalPageHandler(tornado.web.RequestHandler):
def get(self):
users = self.application.status.getUsers()
userstring = '","'.join(str(thisuser) for thisuser in users)
currentstatus = self.application.status.getStatus()
currentquestion = self.application.status.getQuestion()
currentanswers = self.application.status.makeAnswerArrayString()
currenttime = self.application.status.getTime()
globalpage = self.application.quiztypes[self.application.quiztype]["global_page"]
# should add extra [ ] for current answers string (as in teams) - currently done in javascript
self.render(globalpage,teams='["'+str(userstring)+'"]', status=currentstatus, question=currentquestion, answers=currentanswers,time=currenttime)
class Application(tornado.web.Application):
def __init__(self):
self.status = Status()
# self.gametype = "default"
print('init')
handlers = [
(r'/',ClientWelcome),
(r'/control',ControlPageHandler),
(r'/global',GlobalPageHandler),
(r'/login',LoginHandler),
(r'/clientstatus',ClientStatusHandler),
(r'/globalstatus',GlobalStatusHandler),
(r'/controlstatus',ControlStatusHandler),
(r'/controlmessage',ControlMessageHandler),
(r'/clientmessage',ClientMessageHandler),
(r'/globalmessage',GlobalMessageHandler),
]
settings = {
'template_path':'./templates',
'static_path':'./static',
'cookie_secret':'123456',
'login_url':'/login',
'xsft_cookies':True,
'debug':True,
}
## states which pages should be served for each type of quiz.
self.quiztypes = {
'default':{"client_page":"default_client.html",
"global_page":"default_global.html",
"control_page":"default_control.html"},
'fixed_answers':{"client_page":"default_client.html",
"global_page":"default_global.html",
"control_page":"default_control.html"},
'open_answers':{"client_page":"default_client.html",
"global_page":"default_global.html",
"control_page":"default_control.html"},
'fixed_timed':{"client_page":"timed_client.html",
"global_page":"timed_global.html",
"control_page":"timed_control.html"},
'open_timed':{"client_page":"timed_client.html",
"global_page":"timed_global.html",
"control_page":"timed_control.html"},
'multiq':{"client_page":"multiq_client.html",
"global_page":"multiq_global.html",
"control_page":"multiq_control.html"}
}
tornado.web.Application.__init__(self, handlers,**settings)
if __name__ == '__main__':
# tornado.options.parse_command_line()
def set_defaults():
app.quiztype = "default"
app.notes = "Open ended questions can be entered in control pages. Answers can be marked individualy or by entering an answer in the control page."
app.questionarray = "{}"
app.answerarray = "{}"
app = Application()
if len(sys.argv) > 1:
try:
with open(sys.argv[1]) as json_data:
app.gamefile = json.load(json_data)
json_data.close()
app.quiztype = app.gamefile["quiztype"]
if "notes" in app.gamefile:
app.notes = app.gamefile["notes"]
if "questionarray" in app.gamefile:
app.questionarray = app.gamefile["questionarray"]
else:
app.questionarray = "{}"
if "answerarray" in app.gamefile:
app.answerarray = app.gamefile["answerarray"]
else:
app.answerarray = "{}"
except:
print("not a valid json file, using defaults")
set_defaults()
else:
print("no file given - using defaults")
set_defaults()
app.status.setQuizType(app.quiztype)
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
| 36.970629 | 209 | 0.591208 | 23,916 | 0.904744 | 0 | 0 | 1,146 | 0.043353 | 0 | 0 | 5,823 | 0.220284 |
82a2aae9ea64aaa7fb4b9cb2856b242dd76d5578 | 239 | py | Python | scripts/plotRUC.py | akrherz/radcomp | d44459f72891c6e1a92b61488e08422383b000d1 | [
"Apache-2.0"
]
| 3 | 2015-04-18T22:23:27.000Z | 2016-05-12T11:24:32.000Z | scripts/plotRUC.py | akrherz/radcomp | d44459f72891c6e1a92b61488e08422383b000d1 | [
"Apache-2.0"
]
| 4 | 2016-09-30T15:04:46.000Z | 2022-03-05T13:32:40.000Z | scripts/plotRUC.py | akrherz/radcomp | d44459f72891c6e1a92b61488e08422383b000d1 | [
"Apache-2.0"
]
| 4 | 2015-04-18T22:23:57.000Z | 2017-05-07T15:23:37.000Z | import matplotlib.pyplot as plt
import netCDF4
import numpy
nc = netCDF4.Dataset("data/ructemps.nc")
data = nc.variables["tmpc"][17, :, :]
nc.close()
(fig, ax) = plt.subplots(1, 1)
ax.imshow(numpy.flipud(data))
fig.savefig("test.png")
| 17.071429 | 40 | 0.698745 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.142259 |
82a4a9f7dd1ed9b3be8582ffaccf49c75f0cf8a6 | 3,031 | py | Python | tools/draw_cal_lr_ablation.py | twangnh/Calibration_mrcnn | e5f3076cefbe35297a403a753bb57e11503db818 | [
"Apache-2.0"
]
| 87 | 2020-07-24T01:28:39.000Z | 2021-08-29T08:40:18.000Z | tools/draw_cal_lr_ablation.py | twangnh/Calibration_mrcnn | e5f3076cefbe35297a403a753bb57e11503db818 | [
"Apache-2.0"
]
| 3 | 2020-09-27T12:59:28.000Z | 2022-01-06T13:14:08.000Z | tools/draw_cal_lr_ablation.py | twangnh/Calibration_mrcnn | e5f3076cefbe35297a403a753bb57e11503db818 | [
"Apache-2.0"
]
| 20 | 2020-09-05T04:37:19.000Z | 2021-12-13T02:25:48.000Z |
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import math
from matplotlib.ticker import FormatStrFormatter
from matplotlib import scale as mscale
from matplotlib import transforms as mtransforms
# z = [0,0.1,0.3,0.9,1,2,5]
z = [7.8, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1230]
# thick = [20,40,20,60,37,32,21]ax1.set_xscale('log')
# thick=[15.4, 18.2, 18.7, 19.2, 19.4, 19.5, 19.9, 20.1, 20.4, 20.5, 20.6, 20.7, 20.8, 20.7, 20.7, 20.6, 20.6, 20.6, 20.5, 20.5, 19.8]
mrcnn=[17.7, 19.8, 20.0, 19.9, 20.2, 19.5, 19.1, 19.1]
x_ticks = [0.001, 0.002, 0.004, 0.008, 0.01, 0.02, 0.04, 0.08]
# plt.plot([1.0],[44.8], 'D', color = 'black')
# plt.plot([0],[35.9], 'D', color = 'red')
# plt.plot([1.0],[56.8], 'D', color = 'black')
fig = plt.figure(figsize=(8,5))
ax1 = fig.add_subplot(111)
matplotlib.rcParams.update({'font.size': 20})
ax1.plot(x_ticks, mrcnn, linestyle='dashed', marker='o', linewidth=2, c='k', label='mrcnn-r50-ag')
# ax1.plot(z, htc, marker='o', linewidth=2, c='g', label='htc')
# ax1.plot([1e-4],[15.4], 'D', color = 'green')
# ax1.plot([1230],[19.8], 'D', color = 'red')
plt.xlabel('calibration lr', size=16)
plt.ylabel('bAP', size=16)
# plt.gca().set_xscale('custom')
ax1.set_xscale('log')
ax1.set_xticks(x_ticks)
# from matplotlib.ticker import ScalarFormatter
# ax1.xaxis.set_major_formatter(ScalarFormatter())
# plt.legend(['calibration lr'], loc='best')
plt.minorticks_off()
plt.grid()
plt.savefig('calibration_lr.eps', format='eps', dpi=1000)
plt.show()
# import numpy as np
# import matplotlib.pyplot as plt
# from scipy.interpolate import interp1d
# y1=[35.9, 43.4, 46.1, 49.3, 50.3, 51.3, 51.4, 49.9, 49.5, 48.5, 44.8]
# y2=[40.5, 48.2, 53.9 , 56.9, 57.8, 59.2, 58.3, 57.9, 57.5, 57.2, 56.8]
# y3=[61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5, 61.5]
# x = np.linspace(0, 1, num=11, endpoint=True)
#
# f1 = interp1d(x, y1, kind='cubic')
# f2 = interp1d(x, y2, kind='cubic')
# f3 = interp1d(x, y3, kind='cubic')
# xnew = np.linspace(0, 1, num=101, endpoint=True)
# plt.plot(xnew, f3(xnew), '--', color='fuchsia')
# plt.plot(xnew, f1(xnew), '--', color='blue')
# plt.plot(xnew, f2(xnew), '--', color='green')
#
# plt.plot([0],[40.5], 'D', color = 'red')
# plt.plot([1.0],[44.8], 'D', color = 'black')
# plt.plot([0],[35.9], 'D', color = 'red')
# plt.plot([1.0],[56.8], 'D', color = 'black')
# plt.plot(x, y3, 'o', color = 'fuchsia')
# plt.plot(x, y1, 'o', color = 'blue')
# plt.plot(x, y2, 'o', color = 'green')
# plt.plot([0],[40.5], 'D', color = 'red')
# plt.plot([1.0],[44.8], 'D', color = 'black')
# plt.plot([0],[35.9], 'D', color = 'red')
# plt.plot([1.0],[56.8], 'D', color = 'black')
# plt.legend(['teacher','0.25x', '0.5x', 'full-feature-imitation', 'only GT supervison'], loc='best')
# plt.xlabel('Thresholding factor')
# plt.ylabel('mAP')
# plt.title('Resulting mAPs of varying thresholding factors')
# #plt.legend(['0.5x'])
# # plt.savefig('varying_thresh.eps', format='eps', dpi=1000)
# plt.show()
| 35.244186 | 134 | 0.61069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,203 | 0.726823 |
82a4b552433b963daf6809d4d3f789619df85472 | 432 | py | Python | discord bot.py | salihdursun1/dc-bot | f5223f83134d6f8938d6bcf572613e80eb4ef33c | [
"Unlicense"
]
| null | null | null | discord bot.py | salihdursun1/dc-bot | f5223f83134d6f8938d6bcf572613e80eb4ef33c | [
"Unlicense"
]
| null | null | null | discord bot.py | salihdursun1/dc-bot | f5223f83134d6f8938d6bcf572613e80eb4ef33c | [
"Unlicense"
]
| null | null | null | import discord
from discord.ext.commands import Bot
TOKEN = "<discordtoken>"
client = discord.Client()
bot = Bot(command_prefix="!")
@bot.event
async def on_ready():
print("Bot Hazır " + str(bot.user))
@bot.event
async def on_message(message):
if message.author == client.user:
return
if message.content == "selam":
await message.channel.send("selam naber")
bot.run(TOKEN) | 18 | 50 | 0.638889 | 0 | 0 | 0 | 0 | 261 | 0.602771 | 237 | 0.547344 | 52 | 0.120092 |
82a4daed7ce221589ab2b1a7f5ba42efc8b6ae34 | 653 | py | Python | Lesson08/problem/problem_optional_pandas.py | AlexMazonowicz/PythonFundamentals | 5451f61d3b4e7cd285dea442795c25baa5072ef9 | [
"MIT"
]
| 2 | 2020-02-27T01:33:43.000Z | 2021-03-29T13:11:54.000Z | Lesson08/problem/problem_optional_pandas.py | AlexMazonowicz/PythonFundamentals | 5451f61d3b4e7cd285dea442795c25baa5072ef9 | [
"MIT"
]
| null | null | null | Lesson08/problem/problem_optional_pandas.py | AlexMazonowicz/PythonFundamentals | 5451f61d3b4e7cd285dea442795c25baa5072ef9 | [
"MIT"
]
| 6 | 2019-03-18T04:49:11.000Z | 2022-03-22T04:03:19.000Z | import pandas as pd
# Global variable to set the base path to our dataset folder
base_url = '../dataset/'
def update_mailing_list_pandas(filename):
"""
Your docstring documentation starts here.
For more information on how to proper document your function, please refer to the official PEP8:
https://www.python.org/dev/peps/pep-0008/#documentation-strings.
"""
df = # Read your csv file with pandas
return # Your logic to filter only rows with the `active` flag the return the number of rows
# Calling the function to test your code
print(update_mailing_list_pandas('mailing_list.csv'))
| 29.681818 | 104 | 0.70291 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 497 | 0.761103 |
82a57dff7d64fdf50fbba80937d52605a8fc479c | 7,357 | py | Python | example_problems/tutorial/euler_dir/services/is_eulerian_server.py | romeorizzi/TALight | 2b694cb487f41dd0d36d7aa39f5c9c5a21bfc18e | [
"MIT"
]
| 4 | 2021-06-27T13:27:24.000Z | 2022-03-24T10:46:28.000Z | example_problems/tutorial/euler_dir/services/is_eulerian_server.py | romeorizzi/TALight | 2b694cb487f41dd0d36d7aa39f5c9c5a21bfc18e | [
"MIT"
]
| 1 | 2021-01-23T06:50:31.000Z | 2021-03-17T15:35:18.000Z | example_problems/tutorial/euler_dir/services/is_eulerian_server.py | romeorizzi/TALight | 2b694cb487f41dd0d36d7aa39f5c9c5a21bfc18e | [
"MIT"
]
| 5 | 2021-04-01T15:21:57.000Z | 2022-01-29T15:07:38.000Z | #!/usr/bin/env python3
# "This service will check your statement that a directed graph you provide us admits an eulerian walk (of the specified type)""
from os import EX_TEMPFAIL
from sys import stderr, exit
import collections
from multilanguage import Env, Lang, TALcolors
from TALinputs import TALinput
from euler_dir_lib import *
# METADATA OF THIS TAL_SERVICE:
args_list = [
('walk_type',str),
('feedback',str),
('eulerian',bool),
('MAXN',int),
('MAXM',int),
]
ENV =Env(args_list)
TAc =TALcolors(ENV)
LANG=Lang(ENV, TAc, lambda fstring: eval(f"f'{fstring}'"))
MAXN = ENV['MAXN']
MAXM = ENV['MAXM']
# START CODING YOUR SERVICE:
print(f"#? waiting for your directed graph.\nFormat: each line two numbers separated by space. On the first line the number of nodes (an integer n in the interval [1,{MAXN}]) and the number of arcs (an integer m in the interval [1,{MAXM}]). Then follow m lines, one for each arc, each with two numbers in the interval [0,n). These specify the tail node and the head node of the arc, in this order.\nAny line beggining with the '#' character is ignored.\nIf you prefer, you can use the 'TA_send_txt_file.py' util here to send us the lines of a file. Just plug in the util at the 'rtal connect' command like you do with any other bot and let the util feed in the file for you rather than acting by copy and paste yourself.")
n, m = TALinput(int, 2, TAc=TAc)
if n < 1:
TAc.print(LANG.render_feedback("n-LB", f"# ERRORE: il numero di nodi del grafo deve essere almeno 1. Invece il primo dei numeri che hai inserito è n={n}."), "red")
exit(0)
if m < 0:
TAc.print(LANG.render_feedback("m-LB", f"# ERRORE: il numero di archi del grafo non può essere negativo. Invece il secondo dei numeri che hai inserito è m={m}."), "red")
exit(0)
if n > MAXN:
TAc.print(LANG.render_feedback("n-UB", f"# ERRORE: il numero di nodi del grafo non può eccedere {ENV['MAXN']}. Invece il primo dei numeri che hai inserito è n={n}>{ENV['MAXN']}."), "red")
exit(0)
if m > MAXM:
TAc.print(LANG.render_feedback("m-UB", f"# ERRORE: il numero di archi del grafo non può eccedere {ENV['MAXM']}. Invece il secondo dei numeri che hai inserito è n={n}>{ENV['MAXM']}."), "red")
exit(0)
g = Graph(int(n))
adj = [ [] for _ in range(n)]
for i in range(m):
head, tail = TALinput(int, 2, TAc=TAc)
if tail >= n or head >= n or tail < 0 or head < 0:
TAc.print(LANG.render_feedback("n-at-least-1", f"# ERRORE: entrambi gli estremi di un arco devono essere nodi del grafo, ossia numeri interi ricompresi nell'intervallo [0,{ENV['MAXN']}."), "red")
exit(0)
g.addEdge(int(head),int(tail))
adj[int(head)].append(int(tail))
eul = ENV['eulerian']
if eul == 1:
if ENV['walk_type'] == "closed":
answer1 = g.isEulerianCycle()
if answer1 == eul:
TAc.OK()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
printCircuit(adj)
exit(0)
else:
TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian cycle!"),"red")
exit(0)
else:
TAc.NO()
exit(0)
if ENV['walk_type'] == "open":
answer1 = g.isEulerianWalk()
answer2 = g.isEulerianCycle()
if answer1 == eul and answer2==False and answer1 ==True :
TAc.OK()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
printCircuit(adj)
exit(0)
else:
TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian walk!"),"red")
exit(0)
else:
TAc.NO()
exit(0)
if ENV['walk_type'] == "any":
answer1 = g.isEulerianCycle()
answer2 = g.isEulerianWalk()
if answer1 == eul or answer2 == eul:
TAc.OK()
if answer1 == eul:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
printCircuit(adj)
exit(0)
if answer2 == eul:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"green")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"green")
g.printEulerTour()
exit(0)
else:
TAc.print(LANG.render_feedback("not-eulerian", f"Il grafo NON contiene alcun eulerian walk/cycle!"),"red")
exit(0)
if eul == 0:
if ENV['walk_type'] == "closed":
answer1 = g.isEulerianCycle()
if answer1 == eul:
TAc.OK()
else:
TAc.NO()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
printCircuit(adj)
exit(0)
exit(0)
if ENV['walk_type'] == "open":
answer1 = g.isEulerianWalk()
answer2 = g.isEulerianCycle()
if answer1 == eul:
TAc.OK()
else:
TAc.NO()
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
printCircuit(adj)
exit(0)
if ENV['walk_type'] == "any":
answer1 = g.isEulerianCycle()
answer2 = g.isEulerianWalk()
if answer1 == True or answer2 == True:
TAc.NO()
if answer1 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian cycle!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
printCircuit(adj)
exit(0)
if answer2 == True:
TAc.print(LANG.render_feedback("eulerian", f"Il grafo ammette un eulerian walk!"),"red")
if ENV['feedback'] == "with_YES_certificate":
TAc.print(LANG.render_feedback("here-is-the-certificate", f"Eccone uno:"),"red")
g.printEulerTour()
exit(0)
else:
TAc.OK()
exit(0)
| 43.532544 | 722 | 0.578904 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,077 | 0.417844 |
82a59289b498d6c0a5800f00f50c27c1b22e3ddd | 1,047 | py | Python | get_vocab.py | Amir-Mehrpanah/hgraph2graph | 6d37153afe09f7684381ce56e8366675e22833e9 | [
"MIT"
]
| 182 | 2019-11-15T15:59:31.000Z | 2022-03-31T09:17:40.000Z | get_vocab.py | Amir-Mehrpanah/hgraph2graph | 6d37153afe09f7684381ce56e8366675e22833e9 | [
"MIT"
]
| 30 | 2020-03-03T16:35:52.000Z | 2021-12-16T04:06:57.000Z | get_vocab.py | Amir-Mehrpanah/hgraph2graph | 6d37153afe09f7684381ce56e8366675e22833e9 | [
"MIT"
]
| 60 | 2019-11-15T05:06:11.000Z | 2022-03-31T16:43:12.000Z | import sys
import argparse
from hgraph import *
from rdkit import Chem
from multiprocessing import Pool
def process(data):
vocab = set()
for line in data:
s = line.strip("\r\n ")
hmol = MolGraph(s)
for node,attr in hmol.mol_tree.nodes(data=True):
smiles = attr['smiles']
vocab.add( attr['label'] )
for i,s in attr['inter_label']:
vocab.add( (smiles, s) )
return vocab
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--ncpu', type=int, default=1)
args = parser.parse_args()
data = [mol for line in sys.stdin for mol in line.split()[:2]]
data = list(set(data))
batch_size = len(data) // args.ncpu + 1
batches = [data[i : i + batch_size] for i in range(0, len(data), batch_size)]
pool = Pool(args.ncpu)
vocab_list = pool.map(process, batches)
vocab = [(x,y) for vocab in vocab_list for x,y in vocab]
vocab = list(set(vocab))
for x,y in sorted(vocab):
print(x, y)
| 27.552632 | 81 | 0.603629 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 53 | 0.050621 |
82a5daea9d746a5e0fd1a18fd73ba8a7a242e08f | 612 | py | Python | web_app/cornwall/views.py | blackradley/heathmynd | 4495f8fadef9d3a36a7d5b49fae2b61cceb158bc | [
"MIT"
]
| null | null | null | web_app/cornwall/views.py | blackradley/heathmynd | 4495f8fadef9d3a36a7d5b49fae2b61cceb158bc | [
"MIT"
]
| 4 | 2018-11-06T16:15:10.000Z | 2018-11-07T12:03:09.000Z | web_app/cornwall/views.py | blackradley/heathmynd | 4495f8fadef9d3a36a7d5b49fae2b61cceb158bc | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
""" test """
from __future__ import unicode_literals
from django.template.loader import get_template
from django.contrib import messages
# Create your views here.
from django.http import HttpResponse
def index(request):
""" index """
template = get_template('cornwall/index.html')
messages.set_level(request, messages.DEBUG)
list(messages.get_messages(request))# clear out the previous messages
messages.add_message(request, messages.INFO, 'Hello world.')
context = {'nbar': 'cornwall'}
html = template.render(context, request)
return HttpResponse(html)
| 32.210526 | 73 | 0.730392 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 157 | 0.256536 |
82a91b76040314d727ba1163f259b5cbea984d08 | 838 | py | Python | vshare/user_/urls.py | jeyrce/vshare | 269fe05a4dc36f6fbf831ddf5057af95312b75ca | [
"Apache-2.0"
]
| 4 | 2019-11-30T06:07:14.000Z | 2020-10-27T08:48:23.000Z | vshare/user_/urls.py | jeeyshe/vshare | 269fe05a4dc36f6fbf831ddf5057af95312b75ca | [
"Apache-2.0"
]
| null | null | null | vshare/user_/urls.py | jeeyshe/vshare | 269fe05a4dc36f6fbf831ddf5057af95312b75ca | [
"Apache-2.0"
]
| null | null | null | # coding = utf-8
# env = python3.5.2
# author = lujianxin
# time = 201x-xx-xx
# purpose= - - -
from django.urls import re_path
from . import views
urlpatterns = [
# 此模块下的路径映射
re_path(r'usercenter$', views.UserCenter.as_view()),
re_path(r'details/(\d+)$', views.UserDetails.as_view()),
re_path(r'login$', views.Login.as_view()),
re_path(r'regist$', views.Regist.as_view()),
re_path(r'logout$', views.Logout.as_view()),
re_path(r'securecenter$', views.SecureCenter.as_view()),
re_path(r'write_article$', views.WriteArticle.as_view()),
re_path(r'change_art/(\d+)$', views.ChangeArt.as_view()),
re_path(r'cpwd$', views.ModifyPwd.as_view()),
re_path(r'findpwd$', views.FindPwd.as_view()),
re_path(r'cpwdsafe$', views.ModifyPwdSafe.as_view()),
]
if __name__ == '__main__':
pass
| 27.032258 | 61 | 0.656325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 280 | 0.327103 |
82a930b9747975fe0452c3e4307e6fa5f2321ccf | 1,825 | py | Python | Day_3/task2.py | DjaffDjaff/AdventOfCode | cf4f60dc71e349a44f4b5d07dbf4aa8555a4a37a | [
"MIT"
]
| 2 | 2021-12-03T23:14:28.000Z | 2021-12-03T23:16:54.000Z | Day_3/task2.py | DjaffDjaff/AdventOfCode | cf4f60dc71e349a44f4b5d07dbf4aa8555a4a37a | [
"MIT"
]
| null | null | null | Day_3/task2.py | DjaffDjaff/AdventOfCode | cf4f60dc71e349a44f4b5d07dbf4aa8555a4a37a | [
"MIT"
]
| null | null | null | import math
oxygen_rating = 0
co2_rating = 0
length = 0
n_bits = 12
common = [0] * n_bits
anti = [0] * n_bits
numbers = []
def new_bitmap(old_list):
new_list = [0] * n_bits
for num in old_list:
for j, bit in enumerate(num):
new_list[j] += bit
return new_list
with open("data.txt", "r") as f:
lines = f.readlines()
length = len(lines)
for line in lines:
bitmap = list(line.strip("\n"))
bitmap = [int(bit) for bit in bitmap]
numbers.append(bitmap)
#print(bitmap)
for j, bit in enumerate(bitmap):
common[j] += bit
# Let's find oxygen generator rating first
numbers_copy = [number for number in numbers]
for i in range(n_bits):
# Update common
common = new_bitmap(numbers)
# if more 1s in bit i
if common[i] >= len(numbers)/2:
most_c = 1
else:
most_c = 0
#print(f"In round {i+1}, most common: {most_c}")
numbers[:] = [number for number in numbers if (number[i] == most_c)]
#print(numbers)
if len(numbers) < 2:
break
oxygen_rating = int("".join(str(bit) for bit in numbers[0]), 2)
print("O2:",oxygen_rating)
for i in range(n_bits):
# Update common
common = new_bitmap(numbers_copy)
# if more 1s in bit i
if common[i] >= len(numbers_copy)/2:
most_c = 1
else:
most_c = 0
#print(f"In round {i+1}, most common: {most_c}")
numbers_copy[:] = [number for number in numbers_copy if (number[i] != most_c)]
#print(numbers_copy)
if len(numbers_copy) < 2:
break
co2_rating = int("".join(str(bit) for bit in numbers_copy[0]), 2)
print("CO2:", co2_rating)
print("Answer: ", oxygen_rating*co2_rating)
| 23.701299 | 83 | 0.566575 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 312 | 0.170959 |
82a9ed6ace49d5ef752eef71a6cddc94ed97513e | 7,838 | py | Python | polyjuice/filters_and_selectors/perplex_filter.py | shwang/polyjuice | 5f9a3a23d95e4a3877cc048cbcef01f071dc6353 | [
"BSD-3-Clause"
]
| 38 | 2021-05-25T02:18:40.000Z | 2022-03-25T12:09:58.000Z | polyjuice/filters_and_selectors/perplex_filter.py | shwang/polyjuice | 5f9a3a23d95e4a3877cc048cbcef01f071dc6353 | [
"BSD-3-Clause"
]
| 7 | 2021-06-03T04:08:55.000Z | 2021-12-06T06:53:05.000Z | polyjuice/filters_and_selectors/perplex_filter.py | shwang/polyjuice | 5f9a3a23d95e4a3877cc048cbcef01f071dc6353 | [
"BSD-3-Clause"
]
| 5 | 2021-11-12T21:43:59.000Z | 2022-03-22T21:51:08.000Z | import math
import numpy as np
from munch import Munch
from transformers import GPT2LMHeadModel, GPT2TokenizerFast
import torch
from copy import deepcopy
#########################################################################
### compute perplexity
#########################################################################
def _add_special_tokens(text, tokenizer):
return tokenizer.bos_token + text + tokenizer.eos_token
def _tokens_log_prob(texts, model, tokenizer, batch_size=128, is_cuda=True):
outputs = []
for i in range(0, len(texts), batch_size):
batch = texts[i : i + batch_size]
outputs.extend(_tokens_log_prob_for_batch(batch, model, tokenizer, is_cuda=is_cuda))
return outputs
def _tokens_log_prob_for_batch(texts, model, tokenizer, is_cuda=True):
device = "cuda" if is_cuda else "cpu"
outputs = []
texts = [_add_special_tokens(text, tokenizer) for text in deepcopy(texts)]
#encoding = tokenizer.batch_encode_plus(texts, return_tensors='pt')
encoding = tokenizer.batch_encode_plus(texts, return_tensors='pt', truncation=True, padding=True)
with torch.no_grad():
ids = encoding["input_ids"].to(device)
attention_mask = encoding["attention_mask"].to(device)
#nopad_mask = ids != tokenizer.pad_token_id
nopad_mask = ids != tokenizer.pad_token_id
logits = model(ids, attention_mask=attention_mask)[0]
for sent_index in range(len(texts)):
sent_nopad_mask = nopad_mask[sent_index]
sent_tokens = [tok
for i, tok in enumerate(encoding.tokens(sent_index))
if sent_nopad_mask[i] and i != 0]
sent_ids = ids[sent_index, sent_nopad_mask][1:]
sent_logits = logits[sent_index, sent_nopad_mask][:-1, :]
sent_logits[:, tokenizer.pad_token_id] = float("-inf")
sent_ids_scores = sent_logits.gather(1, sent_ids.unsqueeze(1)).squeeze(1)
sent_log_probs = sent_ids_scores - sent_logits.logsumexp(1)
#sent_log_probs = cast(torch.DoubleTensor, sent_log_probs)
#sent_ids = cast(torch.LongTensor, sent_ids)
output = (sent_log_probs.cpu().numpy(), sent_ids.cpu().numpy(), sent_tokens)
outputs.append(output)
return outputs
def load_perplex_scorer(model_id = 'gpt2', is_cuda=True):
model = GPT2LMHeadModel.from_pretrained(model_id)
tokenizer = GPT2TokenizerFast.from_pretrained(model_id, use_fast=True, add_special_tokens=False)
device = "cuda" if is_cuda else "cpu"
tokenizer.add_special_tokens({"additional_special_tokens": ["<|pad|>"]})
tokenizer.pad_token = "<|pad|>"
model.resize_token_embeddings(len(tokenizer))
model.eval()
model.to(device)
return Munch(model=model, tokenizer=tokenizer)
def reduce_perplex_prob(log_probs, log=False, reduce="prod"):
tlen = log_probs.shape[0]
if reduce == "prod":
score = log_probs.sum()
elif reduce == "mean":
score = log_probs.logsumexp(0) - math.log(tlen)
elif reduce == "gmean":
score = log_probs.mean(0)
elif reduce == "hmean":
score = log_probs.neg().logsumexp(0).neg() + math.log(tlen)
else:
raise ValueError("Unrecognized scoring strategy: %s" % reduce)
if not log:
score = score.exp()
return score.item()
def normalize_score(log_score, slen, alpha=0.8):
#Elephant in the Room: An Evaluation Framework for Assessing Adversarial Examples in NLP
return log_score/math.pow((5+slen)/6, alpha)
def compute_sent_perplexity(
sentences, perplex_scorer, log=True, reduce="prod", is_normalize=False, is_cuda=True):
"""Compute the sentence perplexity. For filtering.
Args:
sentences ([type]): [description]
perplex_scorer ([type]): [description]
log (bool, optional): [description]. Defaults to True.
reduce (str, optional): [description]. Defaults to "prod".
is_normalize (bool, optional): [description]. Defaults to False.
Returns:
[type]: [description]
"""
scores = []
model, tokenizer = perplex_scorer.model, perplex_scorer.tokenizer
outputs = _tokens_log_prob(sentences, model, tokenizer, is_cuda=is_cuda)
for sent_log_prob, sent_ids, sent_tokens in outputs:
score = reduce_perplex_prob(sent_log_prob, reduce=reduce, log=log)
if is_normalize:
score = normalize_score(score, len(sent_tokens))
scores.append(score)
return scores
def filter_by_sent_perplexity(sentences, perplex_scorer, thred=20, is_cuda=True):
scores = compute_sent_perplexity(
sentences, perplex_scorer, log=True, reduce="prod", is_normalize=False, is_cuda=is_cuda)
idxes = np.where(np.array(scores) <= thred)[0]
filtered = [sentences[i] for i in idxes]
def compute_phrase_perplexity(
sentence_phrase_tuples, perplex_scorer,
log=True, reduce="prod", is_normalize=False, is_cuda=True):
scores = []
sentence_phrase_tuples = sentence_phrase_tuples if type(sentence_phrase_tuples) != tuple else [sentence_phrase_tuples]
if len(sentence_phrase_tuples) == 0:
return scores
model, tokenizer = perplex_scorer.model, perplex_scorer.tokenizer
outputs = _tokens_log_prob([s[0] for s in sentence_phrase_tuples], model, tokenizer, is_cuda=is_cuda)
for idx, (sentence, phrase) in enumerate(sentence_phrase_tuples):
log_probs_all = outputs[idx][0]
full_len = len(outputs[idx][1]) - 1
if phrase:
prefix_len = len(tokenizer(sentence.split(phrase)[0].strip())["input_ids"])
else:
prefix_len = 0
phrase_len = len(tokenizer(phrase)["input_ids"])
prefix_idx, phrase_idx = [0, prefix_len], [prefix_len, prefix_len+phrase_len]
log_probs = log_probs_all[phrase_idx[0]:phrase_idx[1]]
#print(sentence.split(phrase)[0].strip(), perplex_scorer.tokenizer(sentence.split(phrase)[0].strip()))
#print(sentence, phrase, phrase_idx)
full_sent_score = reduce_perplex_prob(log_probs_all, log=log, reduce=reduce)
phrase_score = reduce_perplex_prob(log_probs, log=log, reduce=reduce)
if is_normalize:
full_sent_score = normalize_score(full_sent_score, full_len)
phrase_score = normalize_score(phrase_score, phrase_len)
scores.append((full_sent_score, phrase_score))
return scores
def compute_delta_perplexity(edit_ops, perplex_scorer, is_normalize=False, is_cuda=True):
"""This is to compute the perplexity
Args:
edit_ops ([type]): [description]
perplex_scorer ([type]): [description]
is_normalize (bool, optional): [description]. Defaults to False.
Returns:
[type]: [description]
"""
tuples = []
#print(metadata.primary.acore.doc.text)
#print(metadata.primary.bcore.doc.text)
edit_ops = [o for o in edit_ops if o.op != "equal"]
for op in edit_ops:
aphrase, bphrase = (op.fromz_full, op.toz_full) if \
op.op == "insert" or op.op == "delete" else (op.fromz_core, op.toz_core)
asent, bsent = aphrase.doc, bphrase.doc
tuples += [(asent.text, aphrase.text), (bsent.text, bphrase.text)]
#print(tuples)
scores = compute_phrase_perplexity(tuples, perplex_scorer,
is_normalize=is_normalize, is_cuda=is_cuda)
#print(scores)
paired_scores = []
for i in range(len(edit_ops)):
# because of negative, it's i - i+1; lower the better.
#print(scores[2*i])
#print(scores[2*i+1])
paired_scores.append(Munch(
pr_sent=scores[2*i][0]-scores[2*i+1][0],
pr_phrase=scores[2*i][1]-scores[2*i+1][1]))
paired_scores = sorted(paired_scores, key=lambda x: (
max(x.pr_sent, x.pr_phrase)), reverse=True) # use the most ungrammar part as the
return paired_scores[0]
| 43.787709 | 122 | 0.666114 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,749 | 0.223144 |
82ab0f9e283b82fa75f97cebd66085d095f1ab43 | 2,030 | py | Python | Python/example_controllers/visual_perception/flow.py | ricklentz/tdw | da40eec151acae20b28d6486defb4358d96adb0e | [
"BSD-2-Clause"
]
| null | null | null | Python/example_controllers/visual_perception/flow.py | ricklentz/tdw | da40eec151acae20b28d6486defb4358d96adb0e | [
"BSD-2-Clause"
]
| null | null | null | Python/example_controllers/visual_perception/flow.py | ricklentz/tdw | da40eec151acae20b28d6486defb4358d96adb0e | [
"BSD-2-Clause"
]
| null | null | null | from tdw.controller import Controller
from tdw.tdw_utils import TDWUtils
from tdw.add_ons.image_capture import ImageCapture
from tdw.backend.paths import EXAMPLE_CONTROLLER_OUTPUT_PATH
"""
Get the _flow pass.
"""
c = Controller()
object_id_0 = c.get_unique_id()
object_id_1 = c.get_unique_id()
object_id_2 = c.get_unique_id()
object_id_3 = c.get_unique_id()
object_names = {object_id_0: "small_table_green_marble",
object_id_1: "rh10",
object_id_2: "jug01",
object_id_3: "jug05"}
output_directory = EXAMPLE_CONTROLLER_OUTPUT_PATH.joinpath("flow")
# Enable image capture for the _flow pass.
print(f"Images will be saved to: {output_directory}")
capture = ImageCapture(path=output_directory, pass_masks=["_flow"], avatar_ids=["a"])
c.add_ons.append(capture)
commands = [TDWUtils.create_empty_room(12, 12),
c.get_add_object(object_names[object_id_0],
object_id=object_id_0),
c.get_add_object(object_names[object_id_1],
position={"x": 0.7, "y": 0, "z": 0.4},
rotation={"x": 0, "y": 30, "z": 0},
object_id=object_id_1),
c.get_add_object(model_name=object_names[object_id_2],
position={"x": -0.3, "y": 0.9, "z": 0.2},
object_id=object_id_2),
c.get_add_object(object_names[object_id_3],
position={"x": 0.3, "y": 0.9, "z": -0.2},
object_id=object_id_3),
{"$type": "apply_force_to_object",
"id": object_id_1,
"force": {"x": 0, "y": 5, "z": -200}}]
commands.extend(TDWUtils.create_avatar(position={"x": 2.478, "y": 1.602, "z": 1.412},
look_at={"x": 0, "y": 0.2, "z": 0},
avatar_id="a"))
c.communicate(commands)
for i in range(3):
c.communicate([])
c.communicate({"$type": "terminate"})
| 39.803922 | 85 | 0.565025 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 302 | 0.148768 |
82ac7d1720a0d22103d819e764e895c0a4bca209 | 2,844 | py | Python | main.py | pepetox/gae-angular-materialize | c6aee16dcc2eba75a254d783661e3115e492faa8 | [
"MIT"
]
| 1 | 2015-10-18T13:48:23.000Z | 2015-10-18T13:48:23.000Z | main.py | pepetox/gae-angular-materialize | c6aee16dcc2eba75a254d783661e3115e492faa8 | [
"MIT"
]
| null | null | null | main.py | pepetox/gae-angular-materialize | c6aee16dcc2eba75a254d783661e3115e492faa8 | [
"MIT"
]
| null | null | null | # Copyright 2013 Google, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import modelCourse as model
import webapp2
from google.appengine.api import users
def AsDict(course):
return {
'key': course.key.urlsafe(),
'author': course.author.email(),
'name': course.name,
'description': course.description,
'lang': course.lang,
'date': course.date.strftime("%B %d, %Y")
}
class RestHandler(webapp2.RequestHandler):
def dispatch(self):
# time.sleep(1)
if (users.get_current_user().email() == '[email protected]') | (users.get_current_user().email() == '[email protected]'):
super(RestHandler, self).dispatch()
else:
self.abort(402)
def SendJson(self, r):
self.response.headers['content-type'] = 'text/plain'
self.response.write(json.dumps(r))
class QueryHandler(RestHandler):
def get(self):
courses = model.All()
r = [AsDict(course) for course in courses]
self.SendJson(r)
class UpdateHandler(RestHandler):
def post(self):
r = json.loads(self.request.body)
guest = model.Update(r['key'], r['name'], r['description'], r['lang'])
r = AsDict(guest)
self.SendJson(r)
class InsertHandler(RestHandler):
def post(self):
r = json.loads(self.request.body)
course = model.Insert(r['name'], r['description'], r['lang'])
r = AsDict(course)
self.SendJson(r)
class DeleteHandler(RestHandler):
def post(self):
r = json.loads(self.request.body)
model.Delete(r['key'])
class GetUser(RestHandler):
def get(self):
user = users.get_current_user()
if user:
email = user.email()
url = users.create_logout_url(self.request.uri)
url_linktext = 'Logout'
else:
email = ''
url = users.create_login_url(self.request.uri)
url_linktext = 'Login'
r = {'user': email, 'url': url, 'url_linktext': url_linktext}
self.SendJson(r)
APP = webapp2.WSGIApplication([
('/rest/query', QueryHandler),
('/rest/insert', InsertHandler),
('/rest/delete', DeleteHandler),
('/rest/update', UpdateHandler),
('/rest/user', GetUser),
], debug=True)
| 26.830189 | 128 | 0.619902 | 1,640 | 0.576653 | 0 | 0 | 0 | 0 | 0 | 0 | 872 | 0.30661 |
82aec0d620a3d2b504e341e4b1d842730a0ba06a | 586 | py | Python | config.py | laundmo/counter-generator | 52b96ede55ea0d961c414102762c6430275d9fb9 | [
"MIT"
]
| null | null | null | config.py | laundmo/counter-generator | 52b96ede55ea0d961c414102762c6430275d9fb9 | [
"MIT"
]
| 4 | 2021-02-27T07:56:25.000Z | 2021-02-27T08:00:10.000Z | config.py | laundmo/counter-generator | 52b96ede55ea0d961c414102762c6430275d9fb9 | [
"MIT"
]
| null | null | null | from sys import platform
try:
from yaml import CSafeLoader as Loader # use the C loader when possible
except ImportError:
from yaml import SafeLoader as Loader
import yaml
with open("config.yml") as f:
config = yaml.load(f, Loader=Loader) # load the config yaml
if platform in ("linux", "linux2", "win32"):
import PySimpleGUI
elif (
platform == "darwin"
): # Have to use web/remi on MacOS as the normal tkinter version causes a OS error
# TODO: Test on MacOS with tkinter possibly figure out how to get it working.
import PySimpleGUIWeb as PySimpleGUI
| 30.842105 | 83 | 0.721843 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 252 | 0.430034 |
82affa262e4e61eb46885268e69de57c9213002a | 25,609 | py | Python | pysnmp/CISCO-IETF-PW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CISCO-IETF-PW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CISCO-IETF-PW-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-IETF-PW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-IETF-PW-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:43:40 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion")
CpwVcType, CpwGroupID, CpwVcIndexType, CpwOperStatus, CpwVcIDType = mibBuilder.importSymbols("CISCO-IETF-PW-TC-MIB", "CpwVcType", "CpwGroupID", "CpwVcIndexType", "CpwOperStatus", "CpwVcIDType")
ciscoExperiment, = mibBuilder.importSymbols("CISCO-SMI", "ciscoExperiment")
InetAddress, InetAddressType = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Counter32, MibIdentifier, experimental, ModuleIdentity, Unsigned32, NotificationType, IpAddress, TimeTicks, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, ObjectIdentity, Counter64, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "experimental", "ModuleIdentity", "Unsigned32", "NotificationType", "IpAddress", "TimeTicks", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Gauge32", "ObjectIdentity", "Counter64", "Integer32")
TruthValue, TimeStamp, StorageType, RowStatus, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TimeStamp", "StorageType", "RowStatus", "TextualConvention", "DisplayString")
cpwVcMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 10, 106))
cpwVcMIB.setRevisions(('2004-03-17 12:00', '2003-02-26 12:00', '2002-05-26 12:00', '2002-01-30 12:00', '2001-11-07 12:00', '2001-07-11 12:00',))
if mibBuilder.loadTexts: cpwVcMIB.setLastUpdated('200403171200Z')
if mibBuilder.loadTexts: cpwVcMIB.setOrganization('Cisco Systems, Inc.')
cpwVcObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 1))
cpwVcNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 2))
cpwVcConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3))
cpwVcIndexNext = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 1), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcIndexNext.setStatus('current')
cpwVcTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2), )
if mibBuilder.loadTexts: cpwVcTable.setStatus('current')
cpwVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"))
if mibBuilder.loadTexts: cpwVcEntry.setStatus('current')
cpwVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 1), CpwVcIndexType())
if mibBuilder.loadTexts: cpwVcIndex.setStatus('current')
cpwVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 2), CpwVcType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcType.setStatus('current')
cpwVcOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("manual", 1), ("maintenanceProtocol", 2), ("other", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcOwner.setStatus('current')
cpwVcPsnType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("mpls", 1), ("l2tp", 2), ("ip", 3), ("mplsOverIp", 4), ("gre", 5), ("other", 6)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcPsnType.setStatus('current')
cpwVcSetUpPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcSetUpPriority.setStatus('current')
cpwVcHoldingPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcHoldingPriority.setStatus('current')
cpwVcInboundMode = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("loose", 1), ("strict", 2))).clone('loose')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcInboundMode.setStatus('current')
cpwVcPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 8), InetAddressType().clone('ipv4')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcPeerAddrType.setStatus('current')
cpwVcPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 9), InetAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcPeerAddr.setStatus('current')
cpwVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 10), CpwVcIDType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcID.setStatus('current')
cpwVcLocalGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 11), CpwGroupID()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcLocalGroupID.setStatus('current')
cpwVcControlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 12), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcControlWord.setStatus('current')
cpwVcLocalIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 13), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcLocalIfMtu.setStatus('current')
cpwVcLocalIfString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 14), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcLocalIfString.setStatus('current')
cpwVcRemoteGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 15), CpwGroupID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcRemoteGroupID.setStatus('current')
cpwVcRemoteControlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("noControlWord", 1), ("withControlWord", 2), ("notYetKnown", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcRemoteControlWord.setStatus('current')
cpwVcRemoteIfMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 17), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcRemoteIfMtu.setStatus('current')
cpwVcRemoteIfString = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 18), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcRemoteIfString.setStatus('current')
cpwVcOutboundVcLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 19), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcOutboundVcLabel.setStatus('current')
cpwVcInboundVcLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 20), Unsigned32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcInboundVcLabel.setStatus('current')
cpwVcName = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 21), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcName.setStatus('current')
cpwVcDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 22), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcDescr.setStatus('current')
cpwVcCreateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 23), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcCreateTime.setStatus('current')
cpwVcUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 24), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcUpTime.setStatus('current')
cpwVcAdminStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("testing", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcAdminStatus.setStatus('current')
cpwVcOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 26), CpwOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcOperStatus.setStatus('current')
cpwVcInboundOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 27), CpwOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcInboundOperStatus.setStatus('current')
cpwVcOutboundOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 28), CpwOperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcOutboundOperStatus.setStatus('current')
cpwVcTimeElapsed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 29), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 900))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcTimeElapsed.setStatus('current')
cpwVcValidIntervals = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 30), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 96))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcValidIntervals.setStatus('current')
cpwVcRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 31), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcRowStatus.setStatus('current')
cpwVcStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 2, 1, 32), StorageType()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cpwVcStorageType.setStatus('current')
cpwVcPerfCurrentTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3), )
if mibBuilder.loadTexts: cpwVcPerfCurrentTable.setStatus('current')
cpwVcPerfCurrentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"))
if mibBuilder.loadTexts: cpwVcPerfCurrentEntry.setStatus('current')
cpwVcPerfCurrentInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentInHCPackets.setStatus('current')
cpwVcPerfCurrentInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentInHCBytes.setStatus('current')
cpwVcPerfCurrentOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentOutHCPackets.setStatus('current')
cpwVcPerfCurrentOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 3, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfCurrentOutHCBytes.setStatus('current')
cpwVcPerfIntervalTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4), )
if mibBuilder.loadTexts: cpwVcPerfIntervalTable.setStatus('current')
cpwVcPerfIntervalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"), (0, "CISCO-IETF-PW-MIB", "cpwVcPerfIntervalNumber"))
if mibBuilder.loadTexts: cpwVcPerfIntervalEntry.setStatus('current')
cpwVcPerfIntervalNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 96)))
if mibBuilder.loadTexts: cpwVcPerfIntervalNumber.setStatus('current')
cpwVcPerfIntervalValidData = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 2), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalValidData.setStatus('current')
cpwVcPerfIntervalTimeElapsed = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalTimeElapsed.setStatus('current')
cpwVcPerfIntervalInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalInHCPackets.setStatus('current')
cpwVcPerfIntervalInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalInHCBytes.setStatus('current')
cpwVcPerfIntervalOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalOutHCPackets.setStatus('current')
cpwVcPerfIntervalOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 4, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfIntervalOutHCBytes.setStatus('current')
cpwVcPerfTotalTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5), )
if mibBuilder.loadTexts: cpwVcPerfTotalTable.setStatus('current')
cpwVcPerfTotalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIndex"))
if mibBuilder.loadTexts: cpwVcPerfTotalEntry.setStatus('current')
cpwVcPerfTotalInHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalInHCPackets.setStatus('current')
cpwVcPerfTotalInHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalInHCBytes.setStatus('current')
cpwVcPerfTotalOutHCPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalOutHCPackets.setStatus('current')
cpwVcPerfTotalOutHCBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalOutHCBytes.setStatus('current')
cpwVcPerfTotalDiscontinuityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 5, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalDiscontinuityTime.setStatus('current')
cpwVcPerfTotalErrorPackets = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPerfTotalErrorPackets.setStatus('current')
cpwVcIdMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7), )
if mibBuilder.loadTexts: cpwVcIdMappingTable.setStatus('current')
cpwVcIdMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcType"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcID"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingPeerAddrType"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingPeerAddr"), (0, "CISCO-IETF-PW-MIB", "cpwVcIdMappingVcIndex"))
if mibBuilder.loadTexts: cpwVcIdMappingEntry.setStatus('current')
cpwVcIdMappingVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 1), CpwVcType())
if mibBuilder.loadTexts: cpwVcIdMappingVcType.setStatus('current')
cpwVcIdMappingVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 2), CpwVcIDType())
if mibBuilder.loadTexts: cpwVcIdMappingVcID.setStatus('current')
cpwVcIdMappingPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 3), InetAddressType())
if mibBuilder.loadTexts: cpwVcIdMappingPeerAddrType.setStatus('current')
cpwVcIdMappingPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 4), InetAddress())
if mibBuilder.loadTexts: cpwVcIdMappingPeerAddr.setStatus('current')
cpwVcIdMappingVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 7, 1, 5), CpwVcIndexType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcIdMappingVcIndex.setStatus('current')
cpwVcPeerMappingTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8), )
if mibBuilder.loadTexts: cpwVcPeerMappingTable.setStatus('current')
cpwVcPeerMappingEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1), ).setIndexNames((0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingPeerAddrType"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingPeerAddr"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcType"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcID"), (0, "CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcIndex"))
if mibBuilder.loadTexts: cpwVcPeerMappingEntry.setStatus('current')
cpwVcPeerMappingPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 1), InetAddressType())
if mibBuilder.loadTexts: cpwVcPeerMappingPeerAddrType.setStatus('current')
cpwVcPeerMappingPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 2), InetAddress())
if mibBuilder.loadTexts: cpwVcPeerMappingPeerAddr.setStatus('current')
cpwVcPeerMappingVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 3), CpwVcType())
if mibBuilder.loadTexts: cpwVcPeerMappingVcType.setStatus('current')
cpwVcPeerMappingVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 4), CpwVcIDType())
if mibBuilder.loadTexts: cpwVcPeerMappingVcID.setStatus('current')
cpwVcPeerMappingVcIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 8, 1, 5), CpwVcIndexType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cpwVcPeerMappingVcIndex.setStatus('current')
cpwVcUpDownNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 9), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpwVcUpDownNotifEnable.setStatus('current')
cpwVcNotifRate = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 106, 1, 10), Unsigned32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cpwVcNotifRate.setStatus('current')
cpwVcDown = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 106, 2, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"))
if mibBuilder.loadTexts: cpwVcDown.setStatus('current')
cpwVcUp = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 106, 2, 2)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"))
if mibBuilder.loadTexts: cpwVcUp.setStatus('current')
cpwVcGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1))
cpwVcCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 2))
cpwModuleCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 2, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcGroup"), ("CISCO-IETF-PW-MIB", "cpwVcPeformanceGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwModuleCompliance = cpwModuleCompliance.setStatus('current')
cpwVcGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 1)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcIndexNext"), ("CISCO-IETF-PW-MIB", "cpwVcType"), ("CISCO-IETF-PW-MIB", "cpwVcOwner"), ("CISCO-IETF-PW-MIB", "cpwVcPsnType"), ("CISCO-IETF-PW-MIB", "cpwVcSetUpPriority"), ("CISCO-IETF-PW-MIB", "cpwVcHoldingPriority"), ("CISCO-IETF-PW-MIB", "cpwVcInboundMode"), ("CISCO-IETF-PW-MIB", "cpwVcPeerAddrType"), ("CISCO-IETF-PW-MIB", "cpwVcPeerAddr"), ("CISCO-IETF-PW-MIB", "cpwVcID"), ("CISCO-IETF-PW-MIB", "cpwVcLocalGroupID"), ("CISCO-IETF-PW-MIB", "cpwVcControlWord"), ("CISCO-IETF-PW-MIB", "cpwVcLocalIfMtu"), ("CISCO-IETF-PW-MIB", "cpwVcLocalIfString"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteGroupID"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteControlWord"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteIfMtu"), ("CISCO-IETF-PW-MIB", "cpwVcRemoteIfString"), ("CISCO-IETF-PW-MIB", "cpwVcOutboundVcLabel"), ("CISCO-IETF-PW-MIB", "cpwVcInboundVcLabel"), ("CISCO-IETF-PW-MIB", "cpwVcName"), ("CISCO-IETF-PW-MIB", "cpwVcDescr"), ("CISCO-IETF-PW-MIB", "cpwVcCreateTime"), ("CISCO-IETF-PW-MIB", "cpwVcUpTime"), ("CISCO-IETF-PW-MIB", "cpwVcAdminStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcOutboundOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcInboundOperStatus"), ("CISCO-IETF-PW-MIB", "cpwVcTimeElapsed"), ("CISCO-IETF-PW-MIB", "cpwVcValidIntervals"), ("CISCO-IETF-PW-MIB", "cpwVcRowStatus"), ("CISCO-IETF-PW-MIB", "cpwVcStorageType"), ("CISCO-IETF-PW-MIB", "cpwVcUpDownNotifEnable"), ("CISCO-IETF-PW-MIB", "cpwVcNotifRate"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcGroup = cpwVcGroup.setStatus('current')
cpwVcPeformanceGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 2)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfCurrentOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalValidData"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalTimeElapsed"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfIntervalOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalInHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalInHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalOutHCPackets"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalOutHCBytes"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalDiscontinuityTime"), ("CISCO-IETF-PW-MIB", "cpwVcPerfTotalErrorPackets"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcPeformanceGroup = cpwVcPeformanceGroup.setStatus('current')
cpwVcMappingTablesGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 3)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcIdMappingVcIndex"), ("CISCO-IETF-PW-MIB", "cpwVcPeerMappingVcIndex"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcMappingTablesGroup = cpwVcMappingTablesGroup.setStatus('current')
cpwVcNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 10, 106, 3, 1, 4)).setObjects(("CISCO-IETF-PW-MIB", "cpwVcUp"), ("CISCO-IETF-PW-MIB", "cpwVcDown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cpwVcNotificationsGroup = cpwVcNotificationsGroup.setStatus('current')
mibBuilder.exportSymbols("CISCO-IETF-PW-MIB", cpwVcDown=cpwVcDown, cpwVcIdMappingVcType=cpwVcIdMappingVcType, cpwVcControlWord=cpwVcControlWord, cpwVcPerfIntervalValidData=cpwVcPerfIntervalValidData, cpwVcSetUpPriority=cpwVcSetUpPriority, cpwVcPsnType=cpwVcPsnType, cpwVcStorageType=cpwVcStorageType, cpwVcPeerMappingVcID=cpwVcPeerMappingVcID, cpwVcPeerMappingTable=cpwVcPeerMappingTable, cpwVcPerfTotalInHCBytes=cpwVcPerfTotalInHCBytes, PYSNMP_MODULE_ID=cpwVcMIB, cpwVcPerfIntervalTimeElapsed=cpwVcPerfIntervalTimeElapsed, cpwVcIdMappingPeerAddrType=cpwVcIdMappingPeerAddrType, cpwVcPeerAddrType=cpwVcPeerAddrType, cpwVcHoldingPriority=cpwVcHoldingPriority, cpwVcPerfTotalInHCPackets=cpwVcPerfTotalInHCPackets, cpwVcIndexNext=cpwVcIndexNext, cpwVcIdMappingTable=cpwVcIdMappingTable, cpwVcMappingTablesGroup=cpwVcMappingTablesGroup, cpwVcPeformanceGroup=cpwVcPeformanceGroup, cpwVcEntry=cpwVcEntry, cpwVcPeerAddr=cpwVcPeerAddr, cpwVcInboundVcLabel=cpwVcInboundVcLabel, cpwVcPerfTotalOutHCBytes=cpwVcPerfTotalOutHCBytes, cpwVcMIB=cpwVcMIB, cpwVcValidIntervals=cpwVcValidIntervals, cpwVcOwner=cpwVcOwner, cpwVcRemoteGroupID=cpwVcRemoteGroupID, cpwVcPerfIntervalTable=cpwVcPerfIntervalTable, cpwVcPeerMappingPeerAddr=cpwVcPeerMappingPeerAddr, cpwVcConformance=cpwVcConformance, cpwVcPerfIntervalOutHCPackets=cpwVcPerfIntervalOutHCPackets, cpwVcInboundOperStatus=cpwVcInboundOperStatus, cpwVcPerfCurrentTable=cpwVcPerfCurrentTable, cpwVcPerfTotalDiscontinuityTime=cpwVcPerfTotalDiscontinuityTime, cpwVcOutboundVcLabel=cpwVcOutboundVcLabel, cpwVcUp=cpwVcUp, cpwVcIdMappingVcID=cpwVcIdMappingVcID, cpwVcLocalIfString=cpwVcLocalIfString, cpwVcUpTime=cpwVcUpTime, cpwVcPeerMappingPeerAddrType=cpwVcPeerMappingPeerAddrType, cpwVcType=cpwVcType, cpwVcPeerMappingVcType=cpwVcPeerMappingVcType, cpwVcPerfIntervalEntry=cpwVcPerfIntervalEntry, cpwVcPerfIntervalNumber=cpwVcPerfIntervalNumber, cpwVcName=cpwVcName, cpwVcPerfIntervalOutHCBytes=cpwVcPerfIntervalOutHCBytes, cpwVcRemoteIfMtu=cpwVcRemoteIfMtu, cpwVcIdMappingPeerAddr=cpwVcIdMappingPeerAddr, cpwVcID=cpwVcID, cpwVcPerfIntervalInHCPackets=cpwVcPerfIntervalInHCPackets, cpwVcPerfTotalEntry=cpwVcPerfTotalEntry, cpwVcNotificationsGroup=cpwVcNotificationsGroup, cpwVcCreateTime=cpwVcCreateTime, cpwVcNotifRate=cpwVcNotifRate, cpwVcPerfCurrentInHCBytes=cpwVcPerfCurrentInHCBytes, cpwVcRemoteControlWord=cpwVcRemoteControlWord, cpwVcLocalIfMtu=cpwVcLocalIfMtu, cpwVcNotifications=cpwVcNotifications, cpwVcInboundMode=cpwVcInboundMode, cpwVcRemoteIfString=cpwVcRemoteIfString, cpwVcGroup=cpwVcGroup, cpwVcPerfTotalTable=cpwVcPerfTotalTable, cpwVcPerfTotalOutHCPackets=cpwVcPerfTotalOutHCPackets, cpwVcPeerMappingEntry=cpwVcPeerMappingEntry, cpwVcTable=cpwVcTable, cpwVcGroups=cpwVcGroups, cpwVcPerfIntervalInHCBytes=cpwVcPerfIntervalInHCBytes, cpwModuleCompliance=cpwModuleCompliance, cpwVcPerfCurrentOutHCPackets=cpwVcPerfCurrentOutHCPackets, cpwVcObjects=cpwVcObjects, cpwVcPeerMappingVcIndex=cpwVcPeerMappingVcIndex, cpwVcCompliances=cpwVcCompliances, cpwVcLocalGroupID=cpwVcLocalGroupID, cpwVcTimeElapsed=cpwVcTimeElapsed, cpwVcIndex=cpwVcIndex, cpwVcRowStatus=cpwVcRowStatus, cpwVcPerfTotalErrorPackets=cpwVcPerfTotalErrorPackets, cpwVcIdMappingEntry=cpwVcIdMappingEntry, cpwVcDescr=cpwVcDescr, cpwVcPerfCurrentEntry=cpwVcPerfCurrentEntry, cpwVcPerfCurrentInHCPackets=cpwVcPerfCurrentInHCPackets, cpwVcIdMappingVcIndex=cpwVcIdMappingVcIndex, cpwVcOperStatus=cpwVcOperStatus, cpwVcOutboundOperStatus=cpwVcOutboundOperStatus, cpwVcAdminStatus=cpwVcAdminStatus, cpwVcUpDownNotifEnable=cpwVcUpDownNotifEnable, cpwVcPerfCurrentOutHCBytes=cpwVcPerfCurrentOutHCBytes)
| 130.658163 | 3,605 | 0.7545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,790 | 0.226092 |
82b3cb2854e832088e3570125c2b7f5602582762 | 200 | py | Python | configs/sem_fpn/onaho_fpn.py | xiong-jie-y/mmsegmentation | 91159e2e5b9ac258440d714a40e0df6083aafee4 | [
"Apache-2.0"
]
| 1 | 2021-09-20T22:48:16.000Z | 2021-09-20T22:48:16.000Z | configs/sem_fpn/onaho_fpn.py | xiong-jie-y/mmsegmentation | 91159e2e5b9ac258440d714a40e0df6083aafee4 | [
"Apache-2.0"
]
| null | null | null | configs/sem_fpn/onaho_fpn.py | xiong-jie-y/mmsegmentation | 91159e2e5b9ac258440d714a40e0df6083aafee4 | [
"Apache-2.0"
]
| null | null | null | _base_ = [
'../_base_/models/fpn_r50.py', '../_base_/datasets/onaho.py',
'../_base_/default_runtime.py', '../_base_/schedules/schedule_160k.py'
]
model = dict(decode_head=dict(num_classes=2))
| 33.333333 | 74 | 0.685 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 126 | 0.63 |
82b3f2c4319142d22b0411ed2f4d9af5e7d4070c | 1,955 | py | Python | config.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
]
| null | null | null | config.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
]
| null | null | null | config.py | dhkim2810/MaskedDatasetCondensation | f52144e9cd68e46b4ebdbcaf96829edb732b79ae | [
"Apache-2.0"
]
| null | null | null | def get_default_convnet_setting():
net_width, net_depth, net_act, net_norm, net_pooling = 128, 3, 'relu', 'instancenorm', 'avgpooling'
return net_width, net_depth, net_act, net_norm, net_pooling
def get_loops(ipc):
# Get the two hyper-parameters of outer-loop and inner-loop.
# The following values are empirically good.
if ipc == 1:
outer_loop, inner_loop = 1, 1
elif ipc == 10:
outer_loop, inner_loop = 10, 50
elif ipc == 20:
outer_loop, inner_loop = 20, 25
elif ipc == 30:
outer_loop, inner_loop = 30, 20
elif ipc == 40:
outer_loop, inner_loop = 40, 15
elif ipc == 50:
outer_loop, inner_loop = 50, 10
else:
outer_loop, inner_loop = 0, 0
exit('loop hyper-parameters are not defined for %d ipc'%ipc)
return outer_loop, inner_loop
def get_eval_pool(eval_mode, model, model_eval):
if eval_mode == 'M': # multiple architectures
model_eval_pool = ['MLP', 'ConvNet', 'LeNet', 'AlexNet', 'VGG11', 'ResNet18']
elif eval_mode == 'W': # ablation study on network width
model_eval_pool = ['ConvNetW32', 'ConvNetW64', 'ConvNetW128', 'ConvNetW256']
elif eval_mode == 'D': # ablation study on network depth
model_eval_pool = ['ConvNetD1', 'ConvNetD2', 'ConvNetD3', 'ConvNetD4']
elif eval_mode == 'A': # ablation study on network activation function
model_eval_pool = ['ConvNetAS', 'ConvNetAR', 'ConvNetAL']
elif eval_mode == 'P': # ablation study on network pooling layer
model_eval_pool = ['ConvNetNP', 'ConvNetMP', 'ConvNetAP']
elif eval_mode == 'N': # ablation study on network normalization layer
model_eval_pool = ['ConvNetNN', 'ConvNetBN', 'ConvNetLN', 'ConvNetIN', 'ConvNetGN']
elif eval_mode == 'S': # itself
model_eval_pool = [model[:model.index('BN')]] if 'BN' in model else [model]
else:
model_eval_pool = [model_eval]
return model_eval_pool | 45.465116 | 103 | 0.653708 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 710 | 0.363171 |
82b41b0e9cd71d4a56a4ea2a15f286f90fd054f6 | 4,324 | py | Python | jgem/dataset/__init__.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
]
| null | null | null | jgem/dataset/__init__.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
]
| null | null | null | jgem/dataset/__init__.py | kensugino/JUGEMu | 3ebf19c96e41f1c90d63d772fd4c9c5cc3d6886f | [
"MIT"
]
| null | null | null | """
Expression Dataset for analysis of matrix (RNASeq/microarray) data with annotations
"""
import pandas as PD
import numpy as N
from matplotlib import pylab as P
from collections import OrderedDict
from ast import literal_eval
# from ..plot.matrix import matshow_clustered
class ExpressionSet(object):
def __init__(self, eData, gData=None, sData=None):
"""
eData: expression data (gene x samples) header: MultiIndex (samplename, group)
fData: gene annotation (gene x gene annotations)
pData: sample annotation (sample x sample annotations)
"""
self.eData = eData
self.gData = gData
self.sData = sData
def read(self, eFile, gFile=None, sFile=None):
pass
def write(self, eFile, gFile=None, sFile=None):
self.eData.to_csv(eFile, tupleize_cols=False, sep="\t")
if gFile is not None:
self.gData.to_csv(gFile, tupleize_cols=False, sep="\t")
if sFile is not None:
self.sData.to_csv(sFile, tupleize_cols=False, sep="\t")
def find(self, field, pat):
pass
def read_bioinfo3_data(fname):
""" read bioinfo3.table.dataset type of data """
fobj = open(fname)
groups = OrderedDict()
cnt = 0
for line in fobj:
cnt += 1
if line[:2]=='#%':
if line.startswith('#%groups:'):
gname, members = line[len('#%groups:'):].split('=')
gname = gname.strip()
members = members.strip().split(',')
groups[gname] = members
datafields = line.strip().split('=')[1].strip().split(',')
elif line.startswith('#%fields'):
fields = line.strip().split('=')[1].strip().split(',')
elif not line.strip():
continue # empty line
else:
break
df = PD.read_table(fname, skiprows=cnt-1)
f2g = {}
for g,m in groups.items():
for f in m:
f2g[f] = g
df.columns = PD.MultiIndex.from_tuples([(x, f2g.get(x,'')) for x in df.columns], names=['samplename','group'])
e = ExpressionSet(df)
return e
def read_multiindex_data(fname, tupleize=True, index_names = ['samplename','group']):
""" read dataset table with MultiIndex in the header """
if not tupleize:
df = PD.read_table(fname, header=range(len(index_names)), index_col=[0], tupleize_cols=False)
e = ExpressionSet(df)
return e
df = PD.read_table(fname, index_col=0)
df.columns = PD.MultiIndex.from_tuples(df.columns.map(literal_eval).tolist(), names=index_names)
e = ExpressionSet(df)
return e
def read_grouped_table(fname, groupfn=lambda x: '_'.join(x.split('_')[:-1])):
""" Read dataset whose group is encoded in the colname. Column 0 is index. """
df = PD.read_table(fname)
f2g = {x:groupfn(x) for x in df.columns}
df.columns = PD.MultiIndex.from_tuples([(x, f2g[x]) for x in df.columns], names=['samplename','group'])
e = ExpressionSet(df)
return e
def concatenate(dic):
""" dic: dict of DataFrames
merge all using index and outer join
"""
keys = list(dic)
d = dic[keys[0]].merge(dic[keys[1]], left_index=True, right_index=True, how='outer', suffixes=('.'+keys[0],'.'+keys[1]))
for k in keys[2:]:
d = d.merge(dic[k], left_index=True, right_index=True, how='outer', suffixes=('','.'+k))
return d
def calc_mergesortkey(dic, pos_neg_flds):
conc = concatenate(dic)
selected = ~N.isnan(conc[pos_neg_flds])
pos = conc[pos_neg_flds]>0
neg = conc[pos_neg_flds]<=0
num_pos = pos.sum(axis=1)
num_neg = neg.sum(axis=1)
pos_neg_mix = -1*(num_neg==0) + 1*(num_pos==0) # pos(-1), mix(0), neg(1)
#num_hit = num_pos - num_neg
num_hit = num_pos + num_neg
n = len(pos_neg_flds)
#position = (N.arange(1,n+1)*pos + N.arange(-1,-n-1,-1)*neg).sum(axis=1)
position = (N.arange(1,n+1)*pos + N.arange(-n,0)*neg).sum(axis=1)
strength = (conc[pos_neg_flds]*pos).sum(axis=1) + (conc[pos_neg_flds]*neg).sum(axis=1)
#msk = PD.Series(list(zip(pos_neg_mix, num_hit, position, strength)), index=conc.index)
#msk.sort()
conc['mergesortkey'] = list(zip(pos_neg_mix, num_hit, position, strength))
conc.sort('mergesortkey', inplace=True)
return conc
| 35.442623 | 124 | 0.612165 | 821 | 0.18987 | 0 | 0 | 0 | 0 | 0 | 0 | 1,048 | 0.242368 |
82b4601eafecafbb6f782f6379a8c342a3e18c6c | 8,377 | py | Python | tests/test_sql.py | YPlan/django-perf-rec | e4face96502fda64c198e6e9951da91b0857eeec | [
"MIT"
]
| 148 | 2016-09-19T13:53:34.000Z | 2018-06-27T11:48:00.000Z | tests/test_sql.py | YPlan/django-perf-rec | e4face96502fda64c198e6e9951da91b0857eeec | [
"MIT"
]
| 36 | 2016-09-19T14:19:05.000Z | 2018-07-12T16:33:12.000Z | tests/test_sql.py | YPlan/django-perf-rec | e4face96502fda64c198e6e9951da91b0857eeec | [
"MIT"
]
| 8 | 2016-09-29T12:13:07.000Z | 2018-07-11T07:53:33.000Z | from __future__ import annotations
from django_perf_rec.sql import sql_fingerprint
def test_empty():
assert sql_fingerprint("") == ""
assert sql_fingerprint("\n\n \n") == ""
def test_select():
assert sql_fingerprint("SELECT `f1`, `f2` FROM `b`") == "SELECT ... FROM `b`"
def test_select_show_columns(settings):
assert (
sql_fingerprint("SELECT `f1`, `f2` FROM `b`", hide_columns=False)
== "SELECT `f1`, `f2` FROM `b`"
)
def test_select_limit(settings):
assert (
sql_fingerprint("SELECT `f1`, `f2` FROM `b` LIMIT 12", hide_columns=False)
== "SELECT `f1`, `f2` FROM `b` LIMIT #"
)
def test_select_coalesce_show_columns(settings):
assert (
sql_fingerprint(
(
"SELECT `table`.`f1`, COALESCE(table.f2->>'a', table.f2->>'b', "
+ "'default') FROM `table`"
),
hide_columns=False,
)
== "SELECT `table`.`f1`, COALESCE(table.f2->>#, table.f2->>#, #) FROM `table`"
)
def test_select_where():
assert (
sql_fingerprint(
"SELECT DISTINCT `table`.`field` FROM `table` WHERE `table`.`id` = 1"
)
== "SELECT DISTINCT `table`.`field` FROM `table` WHERE `table`.`id` = #"
)
def test_select_where_show_columns(settings):
assert (
sql_fingerprint(
"SELECT DISTINCT `table`.`field` FROM `table` WHERE `table`.`id` = 1",
hide_columns=False,
)
== "SELECT DISTINCT `table`.`field` FROM `table` WHERE `table`.`id` = #"
)
def test_select_comment():
assert (
sql_fingerprint("SELECT /* comment */ `f1`, `f2` FROM `b`")
== "SELECT /* comment */ ... FROM `b`"
)
def test_select_comment_show_columns(settings):
assert (
sql_fingerprint("SELECT /* comment */ `f1`, `f2` FROM `b`", hide_columns=False)
== "SELECT /* comment */ `f1`, `f2` FROM `b`"
)
def test_select_join():
assert (
sql_fingerprint(
"SELECT f1, f2 FROM a INNER JOIN b ON (a.b_id = b.id) WHERE a.f2 = 1"
)
== "SELECT ... FROM a INNER JOIN b ON (a.b_id = b.id) WHERE a.f2 = #"
)
def test_select_join_show_columns(settings):
assert (
sql_fingerprint(
"SELECT f1, f2 FROM a INNER JOIN b ON (a.b_id = b.id) WHERE a.f2 = 1",
hide_columns=False,
)
== "SELECT f1, f2 FROM a INNER JOIN b ON (a.b_id = b.id) WHERE a.f2 = #"
)
def test_select_order_by():
assert (
sql_fingerprint("SELECT f1, f2 FROM a ORDER BY f3")
== "SELECT ... FROM a ORDER BY f3"
)
def test_select_order_by_limit():
assert (
sql_fingerprint("SELECT f1, f2 FROM a ORDER BY f3 LIMIT 12")
== "SELECT ... FROM a ORDER BY f3 LIMIT #"
)
def test_select_order_by_show_columns(settings):
assert (
sql_fingerprint("SELECT f1, f2 FROM a ORDER BY f3", hide_columns=False)
== "SELECT f1, f2 FROM a ORDER BY f3"
)
def test_select_order_by_multiple():
assert (
sql_fingerprint("SELECT f1, f2 FROM a ORDER BY f3, f4")
== "SELECT ... FROM a ORDER BY f3, f4"
)
def test_select_group_by():
assert (
sql_fingerprint("SELECT f1, f2 FROM a GROUP BY f1")
== "SELECT ... FROM a GROUP BY f1"
)
def test_select_group_by_show_columns(settings):
assert (
sql_fingerprint("SELECT f1, f2 FROM a GROUP BY f1", hide_columns=False)
== "SELECT f1, f2 FROM a GROUP BY f1"
)
def test_select_group_by_multiple():
assert (
sql_fingerprint("SELECT f1, f2 FROM a GROUP BY f1, f2")
== "SELECT ... FROM a GROUP BY f1, f2"
)
def test_select_group_by_having():
assert (
sql_fingerprint("SELECT f1, f2 FROM a GROUP BY f1 HAVING f1 > 21")
== "SELECT ... FROM a GROUP BY f1 HAVING f1 > #"
)
def test_select_group_by_having_show_columns(settings):
assert (
sql_fingerprint(
"SELECT f1, f2 FROM a GROUP BY f1 HAVING f1 > 21", hide_columns=False
)
== "SELECT f1, f2 FROM a GROUP BY f1 HAVING f1 > #"
)
def test_select_group_by_having_multiple():
assert (
sql_fingerprint("SELECT f1, f2 FROM a GROUP BY f1 HAVING f1 > 21, f2 < 42")
== "SELECT ... FROM a GROUP BY f1 HAVING f1 > #, f2 < #"
)
def test_insert():
assert (
sql_fingerprint("INSERT INTO `table` (`f1`, `f2`) VALUES ('v1', 2)")
== "INSERT INTO `table` (...) VALUES (...)"
)
def test_insert_show_columns(settings):
assert (
sql_fingerprint(
"INSERT INTO `table` (`f1`, `f2`) VALUES ('v1', 2)", hide_columns=False
)
== "INSERT INTO `table` (`f1`, `f2`) VALUES (#, #)"
)
def test_update():
assert (
sql_fingerprint("UPDATE `table` SET `foo` = 'bar' WHERE `table`.`id` = 1")
== "UPDATE `table` SET ... WHERE `table`.`id` = #"
)
def test_update_no_where():
assert (
sql_fingerprint("UPDATE `table` SET `foo` = 'bar'") == "UPDATE `table` SET ..."
)
def test_declare_cursor():
assert (
sql_fingerprint(
'DECLARE "_django_curs_140239496394496_1300" NO SCROLL CURSOR WITHOUT'
)
== 'DECLARE "_django_curs_#" NO SCROLL CURSOR WITHOUT'
)
def test_savepoint():
assert sql_fingerprint("SAVEPOINT `s140323809662784_x54`") == "SAVEPOINT `#`"
def test_rollback_to_savepoint():
assert (
sql_fingerprint("ROLLBACK TO SAVEPOINT `s140323809662784_x54`")
== "ROLLBACK TO SAVEPOINT `#`"
)
def test_release_savepoint():
assert (
sql_fingerprint("RELEASE SAVEPOINT `s140699855320896_x17`")
== "RELEASE SAVEPOINT `#`"
)
def test_null_value():
assert (
sql_fingerprint(
"SELECT `f1`, `f2` FROM `b` WHERE `b`.`name` IS NULL", hide_columns=False
)
== "SELECT `f1`, `f2` FROM `b` WHERE `b`.`name` IS #"
)
def test_strip_duplicate_whitespaces():
assert (
sql_fingerprint(
"SELECT `f1`, `f2` FROM `b` WHERE `b`.`f1` IS NULL LIMIT 12 "
)
== "SELECT ... FROM `b` WHERE `b`.`f1` IS # LIMIT #"
)
def test_strip_duplicate_whitespaces_recursive():
assert (
sql_fingerprint(
"SELECT `f1`, `f2`, ( COALESCE(b.f3->>'en', b.f3->>'fr', '')) "
"FROM `b` WHERE (`b`.`f1` IS NULL OR ( EXISTS COUNT(1) )) LIMIT 12 ",
hide_columns=False,
)
== "SELECT `f1`, `f2`, (COALESCE(b.f3->>#, b.f3->>#, #)) "
"FROM `b` WHERE (`b`.`f1` IS # OR (EXISTS COUNT(#))) LIMIT #"
)
def test_strip_newlines():
assert (
sql_fingerprint("SELECT `f1`, `f2`\nFROM `b`\n LIMIT 12\n\n")
== "SELECT ... FROM `b` LIMIT #"
)
def test_strip_raw_query():
assert (
sql_fingerprint(
"""
SELECT 'f1'
, 'f2'
, 'f3'
FROM "table_a" WHERE "table_a"."f1" = 1 OR (
"table_a"."type" = 'A' AND
EXISTS (
SELECT "table_b"."id"
FROM "table_b"
WHERE "table_b"."id" = 1
) = true)
"""
)
== (
'SELECT ... FROM "table_a" WHERE "table_a"."f1" = # OR '
+ '("table_a"."type" = # AND EXISTS (SELECT "table_b"."id" FROM '
+ '"table_b" WHERE "table_b"."id" = # ) = true)'
)
)
def test_in_single_value():
assert (
sql_fingerprint("SELECT `f1`, `f2` FROM `b` WHERE `x` IN (1)")
== "SELECT ... FROM `b` WHERE `x` IN (...)"
)
def test_in_multiple_values():
assert (
sql_fingerprint("SELECT `f1`, `f2` FROM `b` WHERE `x` IN (1, 2, 3)")
== "SELECT ... FROM `b` WHERE `x` IN (...)"
)
def test_in_multiple_clauses():
assert (
sql_fingerprint(
"SELECT `f1`, `f2` FROM `b` WHERE `x` IN (1, 2, 3) AND `y` IN (4, 5, 6)"
)
== "SELECT ... FROM `b` WHERE `x` IN (...) AND `y` IN (...)"
)
def test_in_multiple_values_and_clause():
assert (
sql_fingerprint(
"SELECT `f1`, `f2` FROM `b` WHERE `x` IN (1, 2, 3) AND (`y` = 1 OR `y` = 2)"
)
== "SELECT ... FROM `b` WHERE `x` IN (...) AND (`y` = # OR `y` = #)"
)
def test_in_subquery():
assert (
sql_fingerprint("SELECT `f1`, `f2` FROM `b` WHERE `x` IN (SELECT 1)")
== "SELECT ... FROM `b` WHERE `x` IN (SELECT #)"
)
| 26.178125 | 88 | 0.549481 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,997 | 0.47714 |
82b549e4607fd2be9e74cf5b94bf6e0c4162ac8a | 1,198 | py | Python | src/user_auth_api/serializers.py | Adstefnum/mockexams | af5681b034334be9c5aaf807161ca80a8a1b9948 | [
"BSD-3-Clause"
]
| null | null | null | src/user_auth_api/serializers.py | Adstefnum/mockexams | af5681b034334be9c5aaf807161ca80a8a1b9948 | [
"BSD-3-Clause"
]
| null | null | null | src/user_auth_api/serializers.py | Adstefnum/mockexams | af5681b034334be9c5aaf807161ca80a8a1b9948 | [
"BSD-3-Clause"
]
| null | null | null | from rest_framework import serializers
from user_auth_api.models import User
# User Serializer
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = [
'user_name',
'email',
'current_jamb_score',
'phone_num',
'last_name',
'first_name',
'is_staff',
'is_superuser',
'uuid',
'is_active',
'last_login',
'date_joined',
]
# Register Serializer
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = [
'user_name',
'email',
'password',
'current_jamb_score',
'phone_num',
'last_name',
'first_name',
'uuid',
]
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = User.objects.create_user(
validated_data['user_name'],
validated_data['email'],validated_data['current_jamb_score'],
validated_data['phone_num'],validated_data['password'],
validated_data['last_name'],validated_data['first_name']
)
return user | 22.603774 | 73 | 0.576795 | 1,078 | 0.899833 | 0 | 0 | 0 | 0 | 0 | 0 | 368 | 0.307179 |
82b57b3ca054137769bfb034aa43dd12bdcde046 | 9,653 | py | Python | cenv_script/cenv_script.py | technic/cenv_script | 6c3a9047faec4723f61ad5795f0d8019c0de03ec | [
"MIT"
]
| null | null | null | cenv_script/cenv_script.py | technic/cenv_script | 6c3a9047faec4723f61ad5795f0d8019c0de03ec | [
"MIT"
]
| null | null | null | cenv_script/cenv_script.py | technic/cenv_script | 6c3a9047faec4723f61ad5795f0d8019c0de03ec | [
"MIT"
]
| null | null | null | """Main module."""
import json
import os
import re
import shutil
import subprocess
import sys
from pathlib import Path
from typing import List, Optional
import yaml
ENV_FILE = "environment.yml"
class CondaEnvException(Exception):
pass
def find_environment_file():
p = Path(os.getcwd()).resolve()
while True:
env_file = p / ENV_FILE
if env_file.is_file():
return env_file
if p.parents:
p = p.parent
continue
raise CondaEnvException(
"environment.yml file not find in '%s' or in any of parent directories"
% os.getcwd()
)
def get_conda():
if sys.platform.startswith("win"):
return "conda.bat"
return "conda"
def print_args(args):
def escape(arg):
if arg.find(" ") > -1:
return '"%s"' % arg
return arg
print(">>>", " ".join(map(escape, args)))
def in_directory(file_name, dir_name):
return os.path.realpath(file_name).startswith(os.path.realpath(dir_name) + os.sep)
class CondaEnv:
def __init__(self):
super().__init__()
self._conda = get_conda()
self._env_file = find_environment_file()
with open(self._env_file) as f:
self._data = yaml.safe_load(f)
data = subprocess.check_output([self._conda, "info", "--json"])
data = json.loads(data)
active_name = data["active_prefix_name"]
active_prefix = data["active_prefix"]
if active_name != self._data["name"]:
raise CondaEnvException(
f"Active environment is {active_name} but {ENV_FILE} points to {self._data['name']}"
)
if "prefix" in self._data and active_prefix != self._data["prefix"]:
raise CondaEnvException(
f"Active environment is located in {active_prefix} but {ENV_FILE} points to {self._data['prefix']}"
)
python_exe = shutil.which("python")
if not python_exe:
raise CondaEnvException("Python not found in path")
# The following check is quite strict, but I think it is better to keep it. See comments below.
if not in_directory(python_exe, active_prefix):
raise CondaEnvException(
f"Python '{python_exe}' is not in conda prefix '{active_prefix}'"
)
@staticmethod
def pip_cmd(args):
return [
# disabled due to: https://github.com/conda/conda/issues/9572
# "run", "-n", self._data["name"], "python",
# This can lead to installing into the wrong place, but checks in the __init__ should help
os.path.realpath(shutil.which("python")),
"-m",
"pip",
] + args
def _exec_pip(self, args):
args = self.pip_cmd(args)
# return self._exec_conda(args)
print_args(args)
exit_code = subprocess.call(args)
print("-" * 80)
print("python -m pip finished with exit code: %d" % exit_code)
return exit_code
def _exec_conda(self, args):
args = [self._conda] + args
print_args(args)
exit_code = subprocess.call(args)
print("-" * 80)
print("conda finished with exit code: %d" % exit_code)
return exit_code
@staticmethod
def parse_pkg(pkg_spec: str):
m = re.match(r"^(git|hg|svn|bzr)\+.*|^[\w-]+", pkg_spec)
if m:
return m.group(0)
raise CondaEnvException("Failed to parse package specification '%s'" % pkg_spec)
def _spec_add_package(self, deps: List[str], package: str) -> bool:
"""Add given package to a deps list if it is not already there
:param deps: list of current dependencies
:param package: package spec that should be added
:return: True when deps list was mutated, False overwise
"""
name = self.parse_pkg(package)
for i, pkg in enumerate(deps):
if not isinstance(pkg, str):
continue
pkg = pkg.strip()
n = self.parse_pkg(pkg)
if n == name:
if pkg != package:
print(f"Updating spec from {pkg} to {package} ...")
deps[i] = package
break
print(f"Same package spec already found: {pkg}")
return False
else:
print(f"Adding package spec {package} to dependencies ...")
deps.append(package)
return True
def install(self, package: str):
package = package.strip()
deps = self._get_deps()
if not self._spec_add_package(deps, package):
return
exit_code = self._exec_conda(["install", "-n", self._data["name"], package])
if exit_code != 0:
raise CondaEnvException("Bad conda exitcode: %d" % exit_code)
name = self.parse_pkg(package)
if not self.check_installed(name):
raise CondaEnvException(f"Package {name} was not installed")
print("Verified that package has been installed")
self._write_env_file()
def check_installed(self, name):
data = subprocess.check_output(
[self._conda, "env", "export", "-n", self._data["name"]]
)
data = yaml.safe_load(data.decode("utf-8"))
names = set(
self.parse_pkg(x)
for x in data.get("dependencies", [])
if isinstance(x, str)
)
return name in names
def pip_install(self, package: str):
package = package.strip()
deps = self._get_pip_deps()
if not self._spec_add_package(deps, package):
return
exit_code = self._exec_pip(["install", package])
if exit_code != 0:
raise CondaEnvException("Bad conda+pip exitcode: %d" % exit_code)
name = self.parse_pkg(package)
if not self.check_pip_installed(name):
raise CondaEnvException(
f"Package {name} was not installed (not found in pip freeze)"
)
print("Verified that package has been installed")
self._write_env_file()
def check_pip_installed(self, name):
data = subprocess.check_output(self.pip_cmd(["freeze"]))
names = set(
self.parse_pkg(l.strip()) for l in data.decode("utf-8").split("\n") if l
)
return name in names
def _spec_rm_package(
self, deps: List[str], package: str
) -> (Optional[str], List[str]):
"""Remove package from the deps list if it is present
:param deps: current list of packages
:param package: spec containing a package name that should be removed
:return: tuple
- package name if it was found or none
- new list of packages
"""
name = self.parse_pkg(package)
new_deps = []
to_remove = 0
for pkg in deps:
if not isinstance(pkg, str):
continue
n = self.parse_pkg(pkg)
if n == name:
to_remove += 1
continue
new_deps.append(pkg)
if to_remove == 0:
return None, new_deps
if to_remove > 1:
print("Warning: more than one spec matched")
return name, new_deps
def remove(self, package: str):
package = package.strip()
name, new_deps = self._spec_rm_package(self._get_deps(), package)
self._set_deps(new_deps)
if name is None:
print("Specified package '%s' not found" % self.parse_pkg(package))
return
exit_code = self._exec_conda(["remove", "-n", self._data["name"], name])
if exit_code != 0:
raise CondaEnvException("Bad conda exitcode: %d" % exit_code)
if self.check_installed(name):
raise CondaEnvException(f"Package {name} was not removed")
self._write_env_file()
def pip_remove(self, package: str):
package = package.strip()
name, new_deps = self._spec_rm_package(self._get_pip_deps(), package)
self._set_pip_deps(new_deps)
if name is None:
print(
"Specified package '%s' not found in pip section"
% self.parse_pkg(package)
)
return
exit_code = self._exec_pip(["uninstall", name])
if exit_code != 0:
raise CondaEnvException("Bad conda exitcode: %d" % exit_code)
if self.check_pip_installed(name):
raise CondaEnvException(
f"Package {name} was not removed (found in pip freeze)"
)
self._write_env_file()
def _write_env_file(self):
with open(self._env_file, "w") as f:
yaml.dump(self._data, f, sort_keys=False)
print("Updated %s" % ENV_FILE)
def _get_deps(self):
if "dependencies" not in self._data:
self._data["dependencies"] = []
return self._data["dependencies"]
def _set_deps(self, value):
self._data["dependencies"] = value
def _get_pip_deps(self):
for item in self._get_deps():
if isinstance(item, dict) and "pip" in item:
return item["pip"]
self._data["dependencies"].append({"pip": []})
return self._data["dependencies"][-1]["pip"]
def _set_pip_deps(self, value):
for item in self._get_deps():
if isinstance(item, dict) and "pip" in item:
item["pip"] = value
return
self._data["dependencies"].append({"pip": []})
self._data["dependencies"][-1]["pip"] = value
| 33.171821 | 115 | 0.574536 | 8,649 | 0.895991 | 0 | 0 | 640 | 0.066301 | 0 | 0 | 2,452 | 0.254014 |
82b593a5d04b8635ad9d0bfca619ad7a94f582c9 | 2,671 | py | Python | cv_utils/cv_util_node.py | OAkyildiz/cibr_img_processing | 69f3293db80e9c0ae57369eaf2885b94adb330df | [
"MIT"
]
| null | null | null | cv_utils/cv_util_node.py | OAkyildiz/cibr_img_processing | 69f3293db80e9c0ae57369eaf2885b94adb330df | [
"MIT"
]
| null | null | null | cv_utils/cv_util_node.py | OAkyildiz/cibr_img_processing | 69f3293db80e9c0ae57369eaf2885b94adb330df | [
"MIT"
]
| null | null | null | import sys
import rospy
import types
#from std_msgs.msg import String
from sensor_msgs.msg import Image
from cibr_img_processing.msg import Ints
from cv_bridge import CvBridge, CvBridgeError
#make int msgs
#TODO: get the img size from camera_indo topics
class CVUtilNode: # abstarct this, it can easily work with other cv_utils and be an image bbm_node
def __init__(self, util, name="cv_util_node", pub_topic=False):
#self.obj_pub = rospy.Publisher("image_topic_2", ***)
self.bridge = CvBridge()
self.util=util
self.name=name
rospy.init_node(self.name, anonymous=True)
self.rate=rospy.Rate(30)
self.image_sub = rospy.Subscriber("image_topic", Image, self.callback)
self.result_pub = rospy.Publisher("results", Ints, queue_size=10) #always publish data
self.result_msgs = [-1,-1,-1] #make int msgs
self.pubs=lambda:0
self.subs=[]
if pub_topic:
self.image_pub = rospy.Publisher(pub_topic,Image, queue_size=10)
pass #do stuff with img.pub
def callback(self,data):
try:
self.util.hook(self.bridge.imgmsg_to_cv2(data, "bgr8"))
except CvBridgeError as e:
print(e)
def data_pub(self):
self.result_pub.publish(self.util.results) #try catch
def img_pub(cv_image): # to handleconverting from OpenCV to ROS
try:
self.image_pub.publish(self.bridge.cv2_to_imgmsg(cv_image, "bgr8"))
except CvBridgeError as e:
print(e)
def run(self):
self.util.init_windows()
while not rospy.is_shutdown():
try:
if self.util.loop(): break
if not -1 in self.util.results and self.util._publish:
self.data_pub()
self.util._publish = 0
# if self.util._publish:
# for pub in self.pubs:
# pub.publish
#self.rate.sleep()
except KeyboardInterrupt:
self.util.shutdown()
self.util.shutdown()
#adds a publisher to alirlaes,
def attach_pub(self, topic, type):
self.pubs.pub.append(rospy.Publisher(topic, type, queue_size=1))
# TODO:attach structs of publisher and message template instead
# so it is iterable together
#pubs.pub=... pubs.msg=type()
def attach_sub(self, topic, cb_handle):
self.subs.append = rospy.Subscriber(topic, type, cb_handle)
def attach_controls(self, fun_handle):
# bind the method to instance
self.util.external_ops=types.MethodType(fun_handle,self.util)
| 33.810127 | 98 | 0.622613 | 2,415 | 0.904156 | 0 | 0 | 0 | 0 | 0 | 0 | 649 | 0.24298 |
82b8f3579fbf367d54a1259558d837656079d6f8 | 448 | py | Python | pokepay/request/get_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
]
| null | null | null | pokepay/request/get_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
]
| null | null | null | pokepay/request/get_shop.py | pokepay/pokepay-partner-python-sdk | 7437370dc1cd0bde38959713015074315291b1e1 | [
"MIT"
]
| 1 | 2022-01-28T03:00:12.000Z | 2022-01-28T03:00:12.000Z | # DO NOT EDIT: File is generated by code generator.
from pokepay_partner_python_sdk.pokepay.request.request import PokepayRequest
from pokepay_partner_python_sdk.pokepay.response.shop_with_accounts import ShopWithAccounts
class GetShop(PokepayRequest):
def __init__(self, shop_id):
self.path = "/shops" + "/" + shop_id
self.method = "GET"
self.body_params = {}
self.response_class = ShopWithAccounts
| 32 | 91 | 0.725446 | 222 | 0.495536 | 0 | 0 | 0 | 0 | 0 | 0 | 67 | 0.149554 |
82b9e4c2e702d4c81505c6425db3c75c45108c10 | 2,191 | py | Python | clearml/backend_interface/setupuploadmixin.py | arielleoren/clearml | 01f0be9895272c483129bab784a43cbd002022a7 | [
"Apache-2.0"
]
| 2,097 | 2019-06-11T14:36:25.000Z | 2020-12-21T03:52:59.000Z | clearml/backend_interface/setupuploadmixin.py | arielleoren/clearml | 01f0be9895272c483129bab784a43cbd002022a7 | [
"Apache-2.0"
]
| 347 | 2020-12-23T22:38:48.000Z | 2022-03-31T20:01:06.000Z | clearml/backend_interface/setupuploadmixin.py | arielleoren/clearml | 01f0be9895272c483129bab784a43cbd002022a7 | [
"Apache-2.0"
]
| 256 | 2019-06-11T14:36:28.000Z | 2020-12-18T08:32:47.000Z | from abc import abstractproperty
from ..backend_config.bucket_config import S3BucketConfig
from ..storage.helper import StorageHelper
class SetupUploadMixin(object):
log = abstractproperty()
storage_uri = abstractproperty()
def setup_upload(
self, bucket_name, host=None, access_key=None, secret_key=None, region=None, multipart=True, https=True, verify=True):
"""
Setup upload options (currently only S3 is supported)
:param bucket_name: AWS bucket name
:type bucket_name: str
:param host: Hostname. Only required in case a Non-AWS S3 solution such as a local Minio server is used)
:type host: str
:param access_key: AWS access key. If not provided, we'll attempt to obtain the key from the
configuration file (bucket-specific, than global)
:type access_key: str
:param secret_key: AWS secret key. If not provided, we'll attempt to obtain the secret from the
configuration file (bucket-specific, than global)
:type secret_key: str
:param multipart: Server supports multipart. Only required when using a Non-AWS S3 solution that doesn't support
multipart.
:type multipart: bool
:param https: Server supports HTTPS. Only required when using a Non-AWS S3 solution that only supports HTTPS.
:type https: bool
:param region: Bucket region. Required if the bucket doesn't reside in the default region (us-east-1)
:type region: str
:param verify: Whether or not to verify SSL certificates. Only required when using a Non-AWS S3 solution that only supports HTTPS with self-signed certificate.
:type verify: bool
"""
self._bucket_config = S3BucketConfig(
bucket=bucket_name,
host=host,
key=access_key,
secret=secret_key,
multipart=multipart,
secure=https,
region=region,
verify=verify
)
self.storage_uri = ('s3://%(host)s/%(bucket_name)s' if host else 's3://%(bucket_name)s') % locals()
StorageHelper.add_configuration(self._bucket_config, log=self.log)
| 45.645833 | 167 | 0.665906 | 2,053 | 0.937015 | 0 | 0 | 0 | 0 | 0 | 0 | 1,381 | 0.630306 |
82ba0e0fc40394fedf62fac1ec2c951372c86121 | 2,872 | py | Python | tests/test_parser.py | szymon6927/parcels-parser | c2cee7a75edfbb0abba0fc4ea99c7a84e24e3749 | [
"MIT"
]
| null | null | null | tests/test_parser.py | szymon6927/parcels-parser | c2cee7a75edfbb0abba0fc4ea99c7a84e24e3749 | [
"MIT"
]
| null | null | null | tests/test_parser.py | szymon6927/parcels-parser | c2cee7a75edfbb0abba0fc4ea99c7a84e24e3749 | [
"MIT"
]
| null | null | null | import os
import unittest
import pandas as pd
from application.ParcelsParser import ParcelsParser
class TestPracelsParser(unittest.TestCase):
def setUp(self):
self.parser = ParcelsParser("./test_cadastral_parcels.tsv", "cadastral_parcel_identifier")
def test_if_file_exist(self):
file_path = self.parser.get_file()
self.assertTrue(file_path, os.path.isfile(file_path))
def test_if_file_doesnt_exist(self):
self.parser.set_file("./test_cadastral_parcels_wrong.tsv")
file_path = file_path = self.parser.get_file()
self.assertTrue(file_path, os.path.isfile(file_path))
def test_if_column_exist(self):
dirpath = os.path.dirname(os.path.abspath(__file__))
filepath = os.path.join(dirpath, self.parser.get_file())
df = pd.read_csv(filepath, sep='\t')
self.assertTrue(True, self.parser.get_column_name() in df.columns)
def test_get_identifiers_data(self):
dirpath = os.path.dirname(os.path.abspath(__file__))
filepath = os.path.join(dirpath, self.parser.get_file())
self.parser.set_file(filepath)
self.parser.get_identifiers_data()
data = self.parser.get_data()
self.assertTrue(7, len(data))
def test_province_county_commune(self):
segment = "301304"
province_code, county_code, commune_code = self.parser.get_province_county_commune(segment)
self.assertEqual(province_code, "30")
self.assertEqual(county_code, "13")
self.assertEqual(commune_code, "4")
def test_extract_data(self):
dirpath = os.path.dirname(os.path.abspath(__file__))
filepath = os.path.join(dirpath, self.parser.get_file())
df = pd.read_csv(filepath, sep='\t')
self.parser.set_file(filepath)
self.parser.get_identifiers_data()
self.parser.extract_data()
result = self.parser.get_result()
province_code_list = df['province_code'].astype(str).tolist()
county_code_list = df['county_code'].astype(str).tolist()
commune_code_list = df['commune_code'].astype(str).tolist()
commune_type_list = df['commune_type'].astype(str).tolist()
district_number_list = df['district_number'].astype(str).tolist()
parcel_number_list = df['parcel_number'].astype(str).tolist()
for i, item in enumerate(result):
self.assertEqual(item['province_code'], province_code_list[i])
self.assertEqual(item['county_code'], county_code_list[i])
self.assertEqual(item['commune_code'], commune_code_list[i])
self.assertEqual(item['commune_type'], commune_type_list[i])
self.assertEqual(item['district_number'], district_number_list[i])
self.assertEqual(item['parcel_number'], parcel_number_list[i])
if __name__ == '__main__':
unittest.main()
| 37.789474 | 99 | 0.683496 | 2,722 | 0.947772 | 0 | 0 | 0 | 0 | 0 | 0 | 308 | 0.107242 |
82badbb757028140899a1d3ea355a9a115e4d31b | 726 | py | Python | dataStructures/complete.py | KarlParkinson/practice | 6bbbd4a8e320732523d83297c1021f52601a20d8 | [
"MIT"
]
| null | null | null | dataStructures/complete.py | KarlParkinson/practice | 6bbbd4a8e320732523d83297c1021f52601a20d8 | [
"MIT"
]
| null | null | null | dataStructures/complete.py | KarlParkinson/practice | 6bbbd4a8e320732523d83297c1021f52601a20d8 | [
"MIT"
]
| null | null | null | import binTree
import queue
def complete(tree):
q = queue.Queue()
nonFull = False
q.enqueue(tree)
while (not q.isEmpty()):
t = q.dequeue()
if (t.getLeftChild()):
if (nonFull):
return False
q.enqueue(t.getLeftChild())
if (t.getLeftChild() == None):
nonFull = True
if (t.getRightChild()):
if (nonFull):
return False
q.enqueue(t.getRightChild())
if (t.getRightChild() == None):
nonFull = True
return True
t = binTree.BinaryTree(1)
t.insertLeft(2)
t.insertRight(3)
t.getRightChild().insertLeft(5)
t.getRightChild().insertRight(6)
print complete(t)
| 21.352941 | 40 | 0.541322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
82bbb29af0b1433647177912df15449203606a08 | 3,322 | py | Python | sd_maskrcnn/sd_maskrcnn/gop/src/eval_bnd.py | marctuscher/cv_pipeline | b641423e72ea292139a5e35a411e30c1e21c7070 | [
"MIT"
]
| 1 | 2021-03-28T17:46:45.000Z | 2021-03-28T17:46:45.000Z | sd-maskrcnn/sd_maskrcnn/gop/src/eval_bnd.py | jayef0/cv_pipeline | dc3b79062174f583a3a90ac8deea918c498c0dd5 | [
"MIT"
]
| null | null | null | sd-maskrcnn/sd_maskrcnn/gop/src/eval_bnd.py | jayef0/cv_pipeline | dc3b79062174f583a3a90ac8deea918c498c0dd5 | [
"MIT"
]
| null | null | null | # -*- encoding: utf-8
"""
Copyright (c) 2014, Philipp Krähenbühl
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Stanford University nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY Philipp Krähenbühl ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL Philipp Krähenbühl BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from .gop import *
import numpy as np
from .util import *
LATEX_OUTPUT=True
for bnd in ['st','sf','mssf','ds']:
# Load the dataset
over_segs,segmentations,boxes = loadVOCAndOverSeg( "test", detector=bnd, year="2012" )
has_box = [len(b)>0 for b in boxes]
boxes = [np.vstack(b).astype(np.int32) if len(b)>0 else np.zeros((0,4),dtype=np.int32) for b in boxes]
# Generate the proposals
s = []
s.append( (50,5,0.7) ) # ~250 props
s.append( (100,5,0.75) ) # ~450 props
s.append( (180,5,0.8) ) # ~650 props
s.append( (200,7,0.85) ) # ~1100 props
s.append( (250,10,0.9) ) # ~2200 props
s.append( (290,20,0.9) ) # ~4400 props
for N_S,N_T,iou in s:
prop_settings = setupBaseline( N_S, N_T, iou )
bo,b_bo,pool_s,box_pool_s = dataset.proposeAndEvaluate( over_segs, segmentations, boxes, proposals.Proposal( prop_settings ) )
if LATEX_OUTPUT:
print(( "Baseline %s ($%d$,$%d$) & %d & %0.3f & %0.3f & %0.3f & %0.3f & \\\\"%(bnd, N_S,N_T,np.mean(pool_s),np.mean(bo[:,0]),np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]), np.mean(bo[:,0]>=0.5), np.mean(bo[:,0]>=0.7) ) ))
else:
print(( "ABO ", np.mean(bo[:,0]) ))
print(( "cover ", np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]) ))
print(( "recall ", np.mean(bo[:,0]>=0.5), "\t", np.mean(bo[:,0]>=0.6), "\t", np.mean(bo[:,0]>=0.7), "\t", np.mean(bo[:,0]>=0.8), "\t", np.mean(bo[:,0]>=0.9), "\t", np.mean(bo[:,0]>=1) ))
print(( "# props ", np.mean(pool_s) ))
print(( "box ABO ", np.mean(b_bo) ))
print(( "box recall ", np.mean(b_bo>=0.5), "\t", np.mean(b_bo>=0.6), "\t", np.mean(b_bo>=0.7), "\t", np.mean(b_bo>=0.8), "\t", np.mean(b_bo>=0.9), "\t", np.mean(b_bo>=1) ))
print(( "# box ", np.mean(box_pool_s[~np.isnan(box_pool_s)]) ))
| 53.580645 | 219 | 0.654425 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,948 | 0.585337 |
82bc8b7d1c31f1a7b50154e6eb1646fd9530ca29 | 1,473 | py | Python | ctr_prediction/datasets/Amazon/AmazonElectronics_x1/convert_amazonelectronics_x1.py | jimzhu/OpenCTR-benchmarks | e8e723cd7a0ef5ddd40e735b85ce7669955a3a99 | [
"Apache-2.0"
]
| 59 | 2021-10-31T13:59:37.000Z | 2022-03-31T12:05:55.000Z | ctr_prediction/datasets/Amazon/AmazonElectronics_x1/convert_amazonelectronics_x1.py | jimzhu/OpenCTR-benchmarks | e8e723cd7a0ef5ddd40e735b85ce7669955a3a99 | [
"Apache-2.0"
]
| 5 | 2021-12-06T12:11:21.000Z | 2022-03-18T06:21:13.000Z | ctr_prediction/datasets/Amazon/AmazonElectronics_x1/convert_amazonelectronics_x1.py | jimzhu/OpenCTR-benchmarks | e8e723cd7a0ef5ddd40e735b85ce7669955a3a99 | [
"Apache-2.0"
]
| 17 | 2021-10-21T10:44:09.000Z | 2022-03-24T11:35:09.000Z | import pickle
import pandas as pd
# cat aa ab ac > dataset.pkl from https://github.com/zhougr1993/DeepInterestNetwork
with open('dataset.pkl', 'rb') as f:
train_set = pickle.load(f, encoding='bytes')
test_set = pickle.load(f, encoding='bytes')
cate_list = pickle.load(f, encoding='bytes')
user_count, item_count, cate_count = pickle.load(f, encoding='bytes')
train_data = []
for sample in train_set:
user_id = sample[0]
item_id = sample[2]
item_history = "^".join([str(i) for i in sample[1]])
label = sample[3]
cate_id = cate_list[item_id]
cate_history = "^".join([str(i) for i in cate_list[sample[1]]])
train_data.append([label, user_id, item_id, cate_id, item_history, cate_history])
train_df = pd.DataFrame(train_data, columns=['label', 'user_id', 'item_id', 'cate_id', 'item_history', 'cate_history'])
train_df.to_csv("train.csv", index=False)
test_data = []
for sample in test_set:
user_id = sample[0]
item_pair = sample[2]
item_history = "^".join([str(i) for i in sample[1]])
cate_history = "^".join([str(i) for i in cate_list[sample[1]]])
test_data.append([1, user_id, item_pair[0], cate_list[item_pair[0]], item_history, cate_history])
test_data.append([0, user_id, item_pair[1], cate_list[item_pair[1]], item_history, cate_history])
test_df = pd.DataFrame(test_data, columns=['label', 'user_id', 'item_id', 'cate_id', 'item_history', 'cate_history'])
test_df.to_csv("test.csv", index=False)
| 42.085714 | 119 | 0.692464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 285 | 0.193483 |
82bea645f31e2de3666e262ad0a20085ef770deb | 656 | py | Python | email_extras/admin.py | maqmigh/django-email-extras | c991b59fa53f9a5324ea7d9f3cc65bc1a9aa8e42 | [
"BSD-2-Clause"
]
| 33 | 2015-03-17T12:08:05.000Z | 2021-12-17T23:06:26.000Z | email_extras/admin.py | maqmigh/django-email-extras | c991b59fa53f9a5324ea7d9f3cc65bc1a9aa8e42 | [
"BSD-2-Clause"
]
| 26 | 2015-10-09T01:01:00.000Z | 2021-02-09T11:11:52.000Z | email_extras/admin.py | maqmigh/django-email-extras | c991b59fa53f9a5324ea7d9f3cc65bc1a9aa8e42 | [
"BSD-2-Clause"
]
| 29 | 2015-02-25T07:51:12.000Z | 2022-02-27T07:05:40.000Z |
from email_extras.settings import USE_GNUPG
if USE_GNUPG:
from django.contrib import admin
from email_extras.models import Key, Address
from email_extras.forms import KeyForm
class KeyAdmin(admin.ModelAdmin):
form = KeyForm
list_display = ('__str__', 'email_addresses')
readonly_fields = ('fingerprint', )
class AddressAdmin(admin.ModelAdmin):
list_display = ('__str__', 'key')
readonly_fields = ('key', )
def has_add_permission(self, request):
return False
admin.site.register(Key, KeyAdmin)
admin.site.register(Address, AddressAdmin)
| 26.24 | 54 | 0.652439 | 350 | 0.533537 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.088415 |
82c010e02b691e4b2aad5f24f459cf89f58d643c | 6,265 | py | Python | Tableau-Supported/Python/insert_data_with_expressions.py | TableauKyle/hyper-api-samples | 37c21c988122c6dbfb662d9ec72d90c4cd30e4cc | [
"MIT"
]
| 73 | 2020-04-29T15:41:55.000Z | 2022-03-12T04:55:24.000Z | Tableau-Supported/Python/insert_data_with_expressions.py | TableauKyle/hyper-api-samples | 37c21c988122c6dbfb662d9ec72d90c4cd30e4cc | [
"MIT"
]
| 32 | 2020-06-10T00:47:20.000Z | 2022-03-28T11:19:00.000Z | Tableau-Supported/Python/insert_data_with_expressions.py | TableauKyle/hyper-api-samples | 37c21c988122c6dbfb662d9ec72d90c4cd30e4cc | [
"MIT"
]
| 54 | 2020-05-01T20:01:51.000Z | 2022-03-28T11:11:00.000Z | # -----------------------------------------------------------------------------
#
# This file is the copyrighted property of Tableau Software and is protected
# by registered patents and other applicable U.S. and international laws and
# regulations.
#
# You may adapt this file and modify it to fit into your context and use it
# as a template to start your own projects.
#
# -----------------------------------------------------------------------------
import shutil
from pathlib import Path
from tableauhyperapi import HyperProcess, Telemetry, \
Connection, CreateMode, \
NOT_NULLABLE, NULLABLE, SqlType, TableDefinition, \
Inserter, \
escape_name, escape_string_literal, \
TableName, Name, \
HyperException
# The table is called "Extract" and will be created in the "Extract" schema.
# This has historically been the default table name and schema for extracts created by Tableau
extract_table = TableDefinition(
table_name=TableName("Extract", "Extract"),
columns=[
TableDefinition.Column(name='Order ID', type=SqlType.int(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Timestamp', type=SqlType.timestamp(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Mode', type=SqlType.text(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Priority', type=SqlType.int(), nullability=NOT_NULLABLE)
]
)
def run_insert_data_with_expressions():
"""
An example of how to push down computations to Hyper during insertion with expressions.
"""
print("EXAMPLE - Push down computations to Hyper during insertion with expressions")
path_to_database = Path("orders.hyper")
# Starts the Hyper Process with telemetry enabled to send data to Tableau.
# To opt out, simply set telemetry=Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU.
with HyperProcess(telemetry=Telemetry.SEND_USAGE_DATA_TO_TABLEAU) as hyper:
# Creates new Hyper file "orders.hyper".
# Replaces file with CreateMode.CREATE_AND_REPLACE if it already exists.
with Connection(endpoint=hyper.endpoint,
database=path_to_database,
create_mode=CreateMode.CREATE_AND_REPLACE) as connection:
connection.catalog.create_schema(schema=extract_table.table_name.schema_name)
connection.catalog.create_table(table_definition=extract_table)
# Hyper API's Inserter allows users to transform data during insertion.
# To make use of data transformation during insertion, the inserter requires the following inputs
# 1. The connection to the Hyper instance containing the table.
# 2. The table name or table defintion into which data is inserted.
# 3. List of Inserter.ColumnMapping.
# This list informs the inserter how each column in the target table is tranformed.
# The list must contain all the columns into which data is inserted.
# "Inserter.ColumnMapping" maps a valid SQL expression (if any) to a column in the target table.
# For example Inserter.ColumnMapping('target_column_name', f'{escape_name("colA")}*{escape_name("colB")}')
# The column "target_column" contains the product of "colA" and "colB" after successful insertion.
# SQL expression string is optional in Inserter.ColumnMapping.
# For a column without any transformation only the column name is required.
# For example Inserter.ColumnMapping('no_data_transformation_column')
# 4. The Column Definition of all input values provided to the Inserter
# Inserter definition contains the column definition for the values that are inserted
inserter_definition = [
TableDefinition.Column(name='Order ID', type=SqlType.int(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Timestamp Text', type=SqlType.text(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Mode', type=SqlType.text(), nullability=NOT_NULLABLE),
TableDefinition.Column(name='Ship Priority Text', type=SqlType.text(), nullability=NOT_NULLABLE)]
# Column 'Order Id' is inserted into "Extract"."Extract" as-is
# Column 'Ship Timestamp' in "Extract"."Extract" of timestamp type is computed from Column 'Ship Timestamp Text' of text type using 'to_timestamp()'
# Column 'Ship Mode' is inserted into "Extract"."Extract" as-is
# Column 'Ship Priority' is "Extract"."Extract" of integer type is computed from Colum 'Ship Priority Text' of text type using 'CASE' statement
shipPriorityAsIntCaseExpression = f'CASE {escape_name("Ship Priority Text")} ' \
f'WHEN {escape_string_literal("Urgent")} THEN 1 ' \
f'WHEN {escape_string_literal("Medium")} THEN 2 ' \
f'WHEN {escape_string_literal("Low")} THEN 3 END'
column_mappings = [
'Order ID',
Inserter.ColumnMapping(
'Ship Timestamp', f'to_timestamp({escape_name("Ship Timestamp Text")}, {escape_string_literal("YYYY-MM-DD HH24:MI:SS")})'),
'Ship Mode',
Inserter.ColumnMapping('Ship Priority', shipPriorityAsIntCaseExpression)
]
# Data to be inserted
data_to_insert = [
[399, '2012-09-13 10:00:00', 'Express Class', 'Urgent'],
[530, '2012-07-12 14:00:00', 'Standard Class', 'Low']
]
# Insert data into "Extract"."Extract" table with expressions
with Inserter(connection, extract_table, column_mappings, inserter_definition=inserter_definition) as inserter:
inserter.add_rows(rows=data_to_insert)
inserter.execute()
print("The data was added to the table.")
print("The connection to the Hyper file has been closed.")
print("The Hyper process has been shut down.")
if __name__ == '__main__':
try:
run_insert_data_with_expressions()
except HyperException as ex:
print(ex)
exit(1)
| 53.547009 | 160 | 0.653312 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,473 | 0.55435 |
82c029ca3481da78e9c1db45150fc5d81b30aeac | 2,234 | py | Python | dumplogs/bin.py | xinhuagu/dumplogs | 5580ff5fe4b054ab9a007e1a023b01fa71917f80 | [
"BSD-3-Clause"
]
| 1 | 2021-05-02T11:51:45.000Z | 2021-05-02T11:51:45.000Z | dumplogs/bin.py | xinhuagu/dumplogs | 5580ff5fe4b054ab9a007e1a023b01fa71917f80 | [
"BSD-3-Clause"
]
| null | null | null | dumplogs/bin.py | xinhuagu/dumplogs | 5580ff5fe4b054ab9a007e1a023b01fa71917f80 | [
"BSD-3-Clause"
]
| null | null | null | import boto3
import argparse
import os,sys
def main(argv=None):
argv = (argv or sys.argv)[1:]
parser = argparse.ArgumentParser(description='dump all aws log streams into files')
parser.add_argument("--profile",
dest="aws_profile",
type=str,
default=os.environ.get('AWS_PROFILE', None),
help="aws profile")
parser.add_argument("-o", "--output",
type=str,
dest='output',
default=".",
help="output folder")
parser.add_argument('group_name',help='aws loggroup name')
options,args = parser.parse_known_args(argv)
options.aws_profile
options.output
options.group_name
"""
main logic
"""
client = boto3.client('logs')
aws_profile = options.aws_profile
group_name = options.group_name
output_folder = options.output
stream_list=[]
stream_response = client.describe_log_streams(
logGroupName=group_name,
orderBy='LastEventTime',
limit=50,
)
while True:
stream_name_arr = stream_response['logStreams']
for stream_elm in stream_name_arr:
stream_name = stream_elm['logStreamName']
stream_list.append(stream_name)
if "nextToken" in stream_response:
next_token = stream_response['nextToken']
stream_response = client.describe_log_streams(
logGroupName=group_name,
orderBy='LastEventTime',
nextToken=next_token,
limit=50,
)
else:
break
print("loggroup {} has total {} streams".format(group_name,len(stream_list)))
for s_name in stream_list:
file_name=s_name.replace("[$LATEST]", "").replace("/","-")
stream_content= client.get_log_events(
logGroupName=group_name,
logStreamName=s_name,
)
print("{} ==> {}".format(s_name,file_name))
completeName = os.path.join(output_folder, file_name)
with open(completeName, "w") as text_file:
text_file.write("{}".format(stream_content))
print("Done.")
| 29.394737 | 87 | 0.581021 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 347 | 0.155327 |
82c2685c2ffd7e5c7861dd6a5e7721b4f4a54e32 | 5,239 | py | Python | ch5/gaussian_mixture.py | susantamoh84/HandsOn-Unsupervised-Learning-with-Python | 056953d0462923a674faf0a23b27239bc9f69975 | [
"MIT"
]
| 25 | 2018-09-03T11:12:49.000Z | 2022-03-13T01:42:57.000Z | Chapter05/gaussian_mixture.py | AIRob/HandsOn-Unsupervised-Learning-with-Python | 1dbe9b3fdf5255f610e0c9c52a82935baa6a4a3e | [
"MIT"
]
| null | null | null | Chapter05/gaussian_mixture.py | AIRob/HandsOn-Unsupervised-Learning-with-Python | 1dbe9b3fdf5255f610e0c9c52a82935baa6a4a3e | [
"MIT"
]
| 35 | 2018-09-15T11:06:12.000Z | 2021-12-08T04:28:55.000Z | import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.datasets import make_blobs
from sklearn.mixture import GaussianMixture
from sklearn.cluster import KMeans
from matplotlib.patches import Ellipse
# For reproducibility
np.random.seed(1000)
nb_samples = 300
nb_centers = 2
if __name__ == '__main__':
# Create the dataset
X, Y = make_blobs(n_samples=nb_samples, n_features=2, center_box=[-1, 1], centers=nb_centers,
cluster_std=[1.0, 0.6], random_state=1000)
# Show the dataset
sns.set()
fig, ax = plt.subplots(figsize=(15, 9))
ax.scatter(X[:, 0], X[:, 1], s=120)
ax.set_xlabel(r'$x_0$', fontsize=14)
ax.set_ylabel(r'$x_1$', fontsize=14)
plt.show()
# Train the model
gm = GaussianMixture(n_components=2, random_state=1000)
gm.fit(X)
Y_pred = gm.fit_predict(X)
print('Means: \n{}'.format(gm.means_))
print('Covariance matrices: \n{}'.format(gm.covariances_))
print('Weights: \n{}'.format(gm.weights_))
m1 = gm.means_[0]
m2 = gm.means_[1]
c1 = gm.covariances_[0]
c2 = gm.covariances_[1]
we1 = 1 + gm.weights_[0]
we2 = 1 + gm.weights_[1]
# Eigendecompose the covariances
w1, v1 = np.linalg.eigh(c1)
w2, v2 = np.linalg.eigh(c2)
nv1 = v1 / np.linalg.norm(v1)
nv2 = v2 / np.linalg.norm(v2)
print('Eigenvalues 1: \n{}'.format(w1))
print('Eigenvectors 1: \n{}'.format(nv1))
print('Eigenvalues 2: \n{}'.format(w2))
print('Eigenvectors 2: \n{}'.format(nv2))
a1 = np.arccos(np.dot(nv1[:, 1], [1.0, 0.0]) / np.linalg.norm(nv1[:, 1])) * 180.0 / np.pi
a2 = np.arccos(np.dot(nv2[:, 1], [1.0, 0.0]) / np.linalg.norm(nv2[:, 1])) * 180.0 / np.pi
# Perform K-Means clustering
km = KMeans(n_clusters=2, random_state=1000)
km.fit(X)
Y_pred_km = km.predict(X)
# Show the comparison of the results
fig, ax = plt.subplots(1, 2, figsize=(22, 9), sharey=True)
ax[0].scatter(X[Y_pred == 0, 0], X[Y_pred == 0, 1], s=80, marker='o', label='Gaussian 1')
ax[0].scatter(X[Y_pred == 1, 0], X[Y_pred == 1, 1], s=80, marker='d', label='Gaussian 2')
g1 = Ellipse(xy=m1, width=w1[1] * 3, height=w1[0] * 3, fill=False, linestyle='dashed', angle=a1, color='black',
linewidth=1)
g1_1 = Ellipse(xy=m1, width=w1[1] * 2, height=w1[0] * 2, fill=False, linestyle='dashed', angle=a1, color='black',
linewidth=2)
g1_2 = Ellipse(xy=m1, width=w1[1] * 1.4, height=w1[0] * 1.4, fill=False, linestyle='dashed', angle=a1,
color='black', linewidth=3)
g2 = Ellipse(xy=m2, width=w2[1] * 3, height=w2[0] * 3, fill=False, linestyle='dashed', angle=a2, color='black',
linewidth=1)
g2_1 = Ellipse(xy=m2, width=w2[1] * 2, height=w2[0] * 2, fill=False, linestyle='dashed', angle=a2, color='black',
linewidth=2)
g2_2 = Ellipse(xy=m2, width=w2[1] * 1.4, height=w2[0] * 1.4, fill=False, linestyle='dashed', angle=a2,
color='black', linewidth=3)
ax[0].set_xlabel(r'$x_0$', fontsize=16)
ax[0].set_ylabel(r'$x_1$', fontsize=16)
ax[0].add_artist(g1)
ax[0].add_artist(g1_1)
ax[0].add_artist(g1_2)
ax[0].add_artist(g2)
ax[0].add_artist(g2_1)
ax[0].add_artist(g2_2)
ax[0].set_title('Gaussian Mixture', fontsize=16)
ax[0].legend(fontsize=16)
ax[1].scatter(X[Y_pred_km == 0, 0], X[Y_pred_km == 0, 1], s=80, marker='o', label='Cluster 1')
ax[1].scatter(X[Y_pred_km == 1, 0], X[Y_pred_km == 1, 1], s=80, marker='d', label='Cluster 2')
ax[1].set_xlabel(r'$x_0$', fontsize=16)
ax[1].set_title('K-Means', fontsize=16)
ax[1].legend(fontsize=16)
# Predict the probability of some sample points
print('P([0, -2]=G1) = {:.3f} and P([0, -2]=G2) = {:.3f}'.format(*list(gm.predict_proba([[0.0, -2.0]]).squeeze())))
print('P([1, -1]=G1) = {:.3f} and P([1, -1]=G2) = {:.3f}'.format(*list(gm.predict_proba([[1.0, -1.0]]).squeeze())))
print('P([1, 0]=G1) = {:.3f} and P([1, 0]=G2) = {:.3f}'.format(*list(gm.predict_proba([[1.0, 0.0]]).squeeze())))
plt.show()
# Compute AICs, BICs, and log-likelihood
n_max_components = 20
aics = []
bics = []
log_likelihoods = []
for n in range(1, n_max_components + 1):
gm = GaussianMixture(n_components=n, random_state=1000)
gm.fit(X)
aics.append(gm.aic(X))
bics.append(gm.bic(X))
log_likelihoods.append(gm.score(X) * nb_samples)
# Show the results
fig, ax = plt.subplots(1, 3, figsize=(20, 6))
ax[0].plot(range(1, n_max_components + 1), aics)
ax[0].set_xticks(range(1, n_max_components + 1))
ax[0].set_xlabel('Number of Gaussians', fontsize=14)
ax[0].set_title('AIC', fontsize=14)
ax[1].plot(range(1, n_max_components + 1), bics)
ax[1].set_xticks(range(1, n_max_components + 1))
ax[1].set_xlabel('Number of Gaussians', fontsize=14)
ax[1].set_title('BIC', fontsize=14)
ax[2].plot(range(1, n_max_components + 1), log_likelihoods)
ax[2].set_xticks(range(1, n_max_components + 1))
ax[2].set_xlabel('Number of Gaussians', fontsize=14)
ax[2].set_title('Log-likelihood', fontsize=14)
plt.show()
| 32.339506 | 119 | 0.604314 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 883 | 0.168544 |
82c29ca8b328d9cb75ca5d391549720bbf654d8a | 5,771 | py | Python | shipyard2/shipyard2/rules/images/merge_image.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
]
| 3 | 2016-01-04T06:28:52.000Z | 2020-09-20T13:18:40.000Z | shipyard2/shipyard2/rules/images/merge_image.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
]
| null | null | null | shipyard2/shipyard2/rules/images/merge_image.py | clchiou/garage | 446ff34f86cdbd114b09b643da44988cf5d027a3 | [
"MIT"
]
| null | null | null | __all__ = [
'DEFAULT_FILTERS',
'DEFAULT_XAR_FILTERS',
'merge_image',
]
import contextlib
import logging
import tempfile
from pathlib import Path
from g1 import scripts
from g1.containers import models
from g1.containers import scripts as ctr_scripts
from . import utils
LOG = logging.getLogger(__name__)
DEFAULT_FILTERS = (
# Do not leak any source codes to the application image.
# Keep drydock path in sync with //bases:build.
('exclude', '/home/plumber/drydock'),
('exclude', '/home/plumber/.gradle'),
('exclude', '/home/plumber/.gsutil'),
('exclude', '/home/plumber/.python_history'),
('exclude', '/home/plumber/.vpython_cipd_cache'),
('exclude', '/home/plumber/.vpython-root'),
('exclude', '/home/plumber/.wget-hsts'),
('exclude', '/root/.cache'),
('exclude', '/usr/src'),
# Include only relevant files under /etc.
('include', '/etc/'),
# We use distro java at the moment.
('include', '/etc/alternatives/'),
('include', '/etc/alternatives/java'),
('include', '/etc/java*'),
('include', '/etc/java*/**'),
('include', '/etc/group'),
('include', '/etc/group-'),
('include', '/etc/gshadow'),
('include', '/etc/gshadow-'),
('include', '/etc/inputrc'),
('include', '/etc/ld.so.cache'),
('include', '/etc/passwd'),
('include', '/etc/passwd-'),
('include', '/etc/shadow'),
('include', '/etc/shadow-'),
('include', '/etc/ssl/'),
('include', '/etc/ssl/**'),
('include', '/etc/subgid'),
('include', '/etc/subgid-'),
('include', '/etc/subuid'),
('include', '/etc/subuid-'),
('include', '/etc/sudoers.d/'),
('include', '/etc/sudoers.d/**'),
('exclude', '/etc/**'),
# Exclude distro binaries from application image (note that base
# image includes a base set of distro binaries).
('exclude', '/bin'),
('exclude', '/sbin'),
# We use distro java at the moment.
('include', '/usr/bin/'),
('include', '/usr/bin/java'),
('exclude', '/usr/bin/**'),
('exclude', '/usr/bin'),
('exclude', '/usr/sbin'),
# Exclude headers.
('exclude', '/usr/include'),
('exclude', '/usr/local/include'),
# Exclude distro systemd files.
('exclude', '/lib/systemd'),
('exclude', '/usr/lib/systemd'),
# In general, don't exclude distro libraries since we might depend
# on them, except these libraries.
('exclude', '/usr/lib/apt'),
('exclude', '/usr/lib/gcc'),
('exclude', '/usr/lib/git-core'),
('exclude', '/usr/lib/python*'),
('exclude', '/usr/lib/**/*perl*'),
# Exclude these to save more space.
('exclude', '/usr/share/**'),
('exclude', '/var/**'),
)
# For XAR images, we only include a few selected directories, and
# exclude everything else.
#
# To support Python, we include our code under /usr/local in the XAR
# image (like our pod image). An alternative is to use venv to install
# our codebase, but this seems to be too much effort; so we do not take
# this approach for now.
#
# We explicitly remove CPython binaries from /usr/local/bin so that the
# `env` command will not (and should not) resolve to them.
#
# We do not include /usr/bin/java (symlink to /etc/alternatives) for
# now. If you want to use Java, you have to directly invoke it under
# /usr/lib/jvm/...
DEFAULT_XAR_FILTERS = (
('include', '/usr/'),
('include', '/usr/lib/'),
('exclude', '/usr/lib/**/*perl*'),
('include', '/usr/lib/jvm/'),
('include', '/usr/lib/jvm/**'),
('include', '/usr/lib/x86_64-linux-gnu/'),
('include', '/usr/lib/x86_64-linux-gnu/**'),
('include', '/usr/local/'),
('include', '/usr/local/bin/'),
('exclude', '/usr/local/bin/python*'),
('include', '/usr/local/bin/*'),
('include', '/usr/local/lib/'),
('include', '/usr/local/lib/**'),
('exclude', '**'),
)
@scripts.using_sudo()
def merge_image(
*,
name,
version,
builder_images,
default_filters,
filters,
output,
):
rootfs_paths = [
ctr_scripts.ctr_get_image_rootfs_path(image)
for image in builder_images
]
rootfs_paths.append(
ctr_scripts.ctr_get_image_rootfs_path(
models.PodConfig.Image(
name=utils.get_builder_name(name),
version=version,
)
)
)
filter_rules = _get_filter_rules(default_filters, filters)
with contextlib.ExitStack() as stack:
tempdir_path = stack.enter_context(
tempfile.TemporaryDirectory(dir=output.parent)
)
output_rootfs_path = Path(tempdir_path) / 'rootfs'
stack.callback(scripts.rm, output_rootfs_path, recursive=True)
LOG.info('generate application image under: %s', output_rootfs_path)
# NOTE: Do NOT overlay-mount these rootfs (and then rsync from
# the overlay) because the overlay does not include base and
# base-builder, and thus some tombstone files may not be copied
# correctly (I don't know why but rsync complains about this).
# For now our workaround is to rsync each rootfs sequentially.
for rootfs_path in rootfs_paths:
utils.rsync(rootfs_path, output_rootfs_path, filter_rules)
ctr_scripts.ctr_build_image(name, version, output_rootfs_path, output)
def _get_filter_rules(default_filters, filters):
return [
# Log which files are included/excluded due to filter rules.
'--debug=FILTER2',
# Add filters before default_filters so that the former may
# override the latter. I have a feeling that this "override"
# thing could be brittle, but let's leave this here for now.
*('--%s=%s' % pair for pair in filters),
*('--%s=%s' % pair for pair in default_filters),
]
| 34.35119 | 78 | 0.612199 | 0 | 0 | 0 | 0 | 1,445 | 0.25039 | 0 | 0 | 3,431 | 0.594524 |
82c30affdd6735cd19f09c9fa98712ebb317fd91 | 289 | py | Python | python3/best_time_stock1.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
]
| 1 | 2020-10-08T09:17:40.000Z | 2020-10-08T09:17:40.000Z | python3/best_time_stock1.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
]
| null | null | null | python3/best_time_stock1.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
]
| null | null | null | """
Space : O(1)
Time : O(n)
"""
class Solution:
def maxProfit(self, prices: List[int]) -> int:
start, dp = 10**10, 0
for i in prices:
print(start)
start = min(start, i)
dp = max(dp, i-start)
return dp
| 19.266667 | 50 | 0.439446 | 242 | 0.83737 | 0 | 0 | 0 | 0 | 0 | 0 | 37 | 0.128028 |
82c33d6f16c0ad3e4c5059353c658ad5302c575d | 175 | py | Python | environments/assets/gym_collectball/__init__.py | GPaolo/SERENE | 83bc38a37ad8f1be9695d2483fd463428d4dae23 | [
"MIT"
]
| 3 | 2021-04-19T21:55:00.000Z | 2021-12-20T15:26:12.000Z | environments/assets/gym_collectball/__init__.py | GPaolo/SERENE | 83bc38a37ad8f1be9695d2483fd463428d4dae23 | [
"MIT"
]
| null | null | null | environments/assets/gym_collectball/__init__.py | GPaolo/SERENE | 83bc38a37ad8f1be9695d2483fd463428d4dae23 | [
"MIT"
]
| null | null | null | # Created by Giuseppe Paolo
# Date: 27/08/2020
from gym.envs.registration import register
register(
id='CollectBall-v0',
entry_point='gym_collectball.envs:CollectBall'
) | 21.875 | 48 | 0.771429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 96 | 0.548571 |
82c36eb8e029351535cbcf82344721060c30bebf | 3,534 | py | Python | foreverbull/foreverbull.py | quantfamily/foreverbull-python | 4f8144b6d964e9c0d1209f0421dc960b82a15400 | [
"Apache-2.0"
]
| null | null | null | foreverbull/foreverbull.py | quantfamily/foreverbull-python | 4f8144b6d964e9c0d1209f0421dc960b82a15400 | [
"Apache-2.0"
]
| 9 | 2021-11-24T10:45:27.000Z | 2022-02-26T19:12:47.000Z | foreverbull/foreverbull.py | quantfamily/foreverbull-python | 4f8144b6d964e9c0d1209f0421dc960b82a15400 | [
"Apache-2.0"
]
| null | null | null | import logging
import threading
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import Queue
from foreverbull.worker.worker import WorkerHandler
from foreverbull_core.models.finance import EndOfDay
from foreverbull_core.models.socket import Request
from foreverbull_core.models.worker import Instance
from foreverbull_core.socket.client import ContextClient, SocketClient
from foreverbull_core.socket.exceptions import SocketClosed, SocketTimeout
from foreverbull_core.socket.router import MessageRouter
class Foreverbull(threading.Thread):
_worker_routes = {}
def __init__(self, socket: SocketClient = None, executors: int = 1):
self.socket = socket
self.running = False
self.logger = logging.getLogger(__name__)
self._worker_requests = Queue()
self._worker_responses = Queue()
self._workers: list[WorkerHandler] = []
self.executors = executors
self._routes = MessageRouter()
self._routes.add_route(self.stop, "backtest_completed")
self._routes.add_route(self._configure, "configure", Instance)
self._routes.add_route(self._stock_data, "stock_data", EndOfDay)
self._request_thread: ThreadPoolExecutor = ThreadPoolExecutor(max_workers=5)
threading.Thread.__init__(self)
@staticmethod
def on(msg_type):
def decorator(t):
Foreverbull._worker_routes[msg_type] = t
return t
return decorator
def run(self):
self.running = True
self.logger.info("Starting instance")
while self.running:
try:
context_socket = self.socket.new_context()
request = context_socket.recv()
self._request_thread.submit(self._process_request, context_socket, request)
except (SocketClosed, SocketTimeout):
self.logger.info("main socket closed, exiting")
return
self.socket.close()
self.logger.info("exiting")
def _process_request(self, socket: ContextClient, request: Request):
try:
self.logger.debug(f"recieved task: {request.task}")
response = self._routes(request)
socket.send(response)
self.logger.debug(f"reply sent for task: {response.task}")
socket.close()
except (SocketTimeout, SocketClosed) as exc:
self.logger.warning(f"Unable to process context socket: {exc}")
pass
except Exception as exc:
self.logger.error("unknown excetion when processing context socket")
self.logger.exception(exc)
def stop(self):
self.logger.info("Stopping instance")
self.running = False
for worker in self._workers:
worker.stop()
self._workers = []
def _configure(self, instance_configuration: Instance):
for _ in range(self.executors):
w = WorkerHandler(instance_configuration, **self._worker_routes)
self._workers.append(w)
return
def _stock_data(self, message: EndOfDay):
for worker in self._workers:
if worker.locked():
continue
if worker.acquire():
break
else:
raise Exception("workers are not initialized")
try:
worker.process(message)
except Exception as exc:
self.logger.error("Error processing to worker")
self.logger.exception(exc)
worker.release()
| 36.061224 | 91 | 0.649689 | 3,003 | 0.849745 | 0 | 0 | 161 | 0.045557 | 0 | 0 | 338 | 0.095642 |
82c418de34320061d50470074e4e4e6e0fe9752b | 704 | py | Python | scopus/tests/test_AffiliationSearch.py | crew102/scopus | d8791c162cef4c2f830d983b435333d9d8eaf472 | [
"MIT"
]
| null | null | null | scopus/tests/test_AffiliationSearch.py | crew102/scopus | d8791c162cef4c2f830d983b435333d9d8eaf472 | [
"MIT"
]
| null | null | null | scopus/tests/test_AffiliationSearch.py | crew102/scopus | d8791c162cef4c2f830d983b435333d9d8eaf472 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `AffiliationSearch` module."""
from collections import namedtuple
from nose.tools import assert_equal, assert_true
import scopus
s = scopus.AffiliationSearch('af-id(60021784)', refresh=True)
def test_affiliations():
received = s.affiliations
assert_true(isinstance(received, list))
order = 'eid name variant documents city country parent'
Affiliation = namedtuple('Affiliation', order)
expected = [Affiliation(eid='10-s2.0-60021784', name='New York University',
variant='', documents='101148', city='New York',
country='United States', parent='0')]
assert_equal(received, expected)
| 29.333333 | 79 | 0.691761 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 242 | 0.34375 |
82c5022208b58d4f46a1d7ce39f5bdeb44953f3f | 566 | py | Python | MechOS/simple_messages/int.py | PierceATronics/MechOS | 8eeb68b65b8c20b642db52baad1379fd0847b362 | [
"MIT"
]
| null | null | null | MechOS/simple_messages/int.py | PierceATronics/MechOS | 8eeb68b65b8c20b642db52baad1379fd0847b362 | [
"MIT"
]
| null | null | null | MechOS/simple_messages/int.py | PierceATronics/MechOS | 8eeb68b65b8c20b642db52baad1379fd0847b362 | [
"MIT"
]
| null | null | null | '''
'''
import struct
class Int:
'''
'''
def __init__(self):
'''
'''
#construct the message format
self.message_constructor = 'i'
#number of bytes for this message
self.size = 4
def _pack(self, message):
'''
'''
encoded_message = struct.pack(self.message_constructor, message)
return(encoded_message)
def _unpack(self, encoded_message):
'''
'''
message = struct.unpack(self.message_constructor, encoded_message)[0]
return(message)
| 20.962963 | 77 | 0.556537 | 542 | 0.957597 | 0 | 0 | 0 | 0 | 0 | 0 | 128 | 0.226148 |
82c56d7c16636bc69a537283da6c0edaf26dd821 | 377 | py | Python | Curso Python/PythonExercicios/ex017.py | marcos-saba/Cursos | 1c063392867e9ed86d141dad8861a2a35488b1c6 | [
"MIT"
]
| null | null | null | Curso Python/PythonExercicios/ex017.py | marcos-saba/Cursos | 1c063392867e9ed86d141dad8861a2a35488b1c6 | [
"MIT"
]
| null | null | null | Curso Python/PythonExercicios/ex017.py | marcos-saba/Cursos | 1c063392867e9ed86d141dad8861a2a35488b1c6 | [
"MIT"
]
| null | null | null | #from math import hypot
import math
print('='*5, 'Cálculo triângulo retângulo', '='*5)
cat_op = float(input('Digite o comprimento do cateto oposto: '))
cat_adj = float(input('Digite o comprimento do cateto adjacente: '))
hip = math.hypot(cat_op, cat_adj)
print(f'O comprimento da hipotenusa do triângulo retângulo, cujos catetos são {cat_op:.2f} e {cat_adj:.2f} é {hip:.2f}.')
| 47.125 | 121 | 0.729443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 264 | 0.6875 |
82c5f5ed054e4540c225e7fd44668ed1c842c358 | 312 | py | Python | exercicios/ex074.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
]
| null | null | null | exercicios/ex074.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
]
| null | null | null | exercicios/ex074.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
]
| null | null | null | from random import randint
numeros = (randint(0, 10), randint(0, 10), randint(0, 10), randint(0, 10), randint(0, 10))
print(f'Os cinco números são: ', end='')
for n in numeros: # Exibe números sorteados
print(n, end=' ')
print(f'\nO MAIOR número é {max(numeros)}')
print(f'O MENOR número é {min(numeros)}')
| 39 | 90 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 132 | 0.413793 |
82c61ef5a2ffb92917f588c48559df6bc3be2564 | 10,832 | py | Python | libs3/maxwellccs.py | tmpbci/LJ | 4c40e2ddf862f94dcfeb3cc48c41aad44a3a8d34 | [
"CNRI-Python"
]
| 7 | 2019-03-20T00:09:14.000Z | 2022-03-06T23:18:20.000Z | libs3/maxwellccs.py | tmpbci/LJ | 4c40e2ddf862f94dcfeb3cc48c41aad44a3a8d34 | [
"CNRI-Python"
]
| null | null | null | libs3/maxwellccs.py | tmpbci/LJ | 4c40e2ddf862f94dcfeb3cc48c41aad44a3a8d34 | [
"CNRI-Python"
]
| null | null | null | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Maxwell Macros
v0.7.0
by Sam Neurohack
from /team/laser
Launchpad set a "current path"
"""
from OSC3 import OSCServer, OSCClient, OSCMessage
import time
import numpy as np
import rtmidi
from rtmidi.midiutil import open_midiinput
from threading import Thread
from rtmidi.midiconstants import (CHANNEL_PRESSURE, CONTROLLER_CHANGE, NOTE_ON, NOTE_OFF,
PITCH_BEND, POLY_PRESSURE, PROGRAM_CHANGE)
import os, json
import midi3
if os.uname()[1]=='raspberrypi':
pass
port = 8090
ip = "127.0.0.1"
mididest = 'Session 1'
djdest = 'Port'
midichannel = 1
computerIP = ['127.0.0.1','192.168.2.95','192.168.2.52','127.0.0.1',
'127.0.0.1','127.0.0.1','127.0.0.1','127.0.0.1']
computer = 0
# store current value for computer 1
cc1 =[0]*140
current = {
"patch": 0,
"prefixLeft": "/osc/left/X",
"prefixRight": "/osc/right/X",
"suffix": "/amp",
"path": "/osc/left/X/curvetype",
"pathLeft": "/osc/left/X/curvetype",
"pathRight": "/osc/left/X/curvetype",
"previousmacro": -1,
"LeftCurveType": 0,
"lfo": 1,
"rotator": 1,
"translator": 1
}
specificvalues = {
# Sine: 0-32, Tri: 33-64, Square: 65-96, Line: 96-127
"curvetype": {"sin": 0, "saw": 33, "squ": 95, "lin": 127},
"freqlimit": {"1": 0, "4": 26, "16": 52, "32": 80, "127": 127},
"amptype": {"constant": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127},
"phasemodtype": {"linear": 0,"sin": 90},
"phaseoffsettype": {"manual": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127},
"ampoffsettype": { "manual": 0, "lfo1": 33, "lfo2": 95, "lfo3": 127},
"inversion": {"off": 0, "on": 127},
"colortype": {"solid": 0, "lfo": 127},
"modtype": {"sin": 0,"linear": 127},
"switch": {"off": 0,"on": 127},
"operation": {"+": 0, "-": 50, "*": 127}
}
#
# Maxwell CCs
#
def FindCC(FunctionName):
for Maxfunction in range(len(maxwell['ccs'])):
if FunctionName == maxwell['ccs'][Maxfunction]['Function']:
#print(FunctionName, "is CC", Maxfunction)
return Maxfunction
def LoadCC():
global maxwell
print("Loading Maxwell CCs Functions...")
if os.path.exists('maxwell.json'):
#print('File maxwell.json exits')
f=open("maxwell.json","r")
else:
if os.path.exists('../maxwell.json'):
#print('File ../maxwell.json exits')
f=open("../maxwell.json","r")
s = f.read()
maxwell = json.loads(s)
print(len(maxwell['ccs']),"Functions")
print("Loaded.")
# /cc cc number value
def cc(ccnumber, value, dest=mididest):
#print('Output CC',[CONTROLLER_CHANGE+midichannel-1, ccnumber, value], dest)
midi3.MidiMsg([CONTROLLER_CHANGE+midichannel-1,ccnumber,value], dest)
def NoteOn(note,velocity, dest=mididest):
midi3.NoteOn(note,velocity, mididest)
def NoteOff(note, dest=mididest):
midi3.NoteOn(note, mididest)
def Send(oscaddress,oscargs=''):
oscmsg = OSCMessage()
oscmsg.setAddress(oscaddress)
oscmsg.append(oscargs)
osclient = OSCClient()
osclient.connect((ip, port))
print("sending OSC message : ", oscmsg, "to", ip, ":",port)
try:
osclient.sendto(oscmsg, (ip, port))
oscmsg.clearData()
return True
except:
print ('Connection to', ip, 'refused : died ?')
return False
def ssawtooth(samples,freq,phase):
t = np.linspace(0+phase, 1+phase, samples)
for ww in range(samples):
samparray[ww] = signal.sawtooth(2 * np.pi * freq * t[ww])
return samparray
def ssquare(samples,freq,phase):
t = np.linspace(0+phase, 1+phase, samples)
for ww in range(samples):
samparray[ww] = signal.square(2 * np.pi * freq * t[ww])
return samparray
def ssine(samples,freq,phase):
t = np.linspace(0+phase, 1+phase, samples)
for ww in range(samples):
samparray[ww] = np.sin(2 * np.pi * freq * t[ww])
return samparray
def MixerLeft(value):
if value == 127:
Send("/mixer/value", 0)
def MixerRight(value):
if value == 127:
Send("/mixer/value", 127)
def MixerTempo(tempo):
for counter in range(127):
Send("/mixer/value", counter)
# Jog send 127 to left and 1 to right
# increase or decrease current CC defined in current path
def jogLeft(value):
path = current["pathLeft"]
print("jog : path =",path, "CC :", FindCC(path), "value", value)
MaxwellCC = FindCC(current["pathLeft"])
if value == 127:
# decrease CC
if cc1[MaxwellCC] > 0:
cc1[MaxwellCC] -= 1
else:
if cc1[MaxwellCC] < 127:
cc1[MaxwellCC] += 1
#print("sending", cc1[MaxwellCC], "to CC", MaxwellCC )
cc(MaxwellCC, cc1[MaxwellCC] , dest ='to Maxwell 1')
#RotarySpecifics(MaxwellCC, path[path.rfind("/")+1:len(path)], value)
# Jog send 127 to left and 1 to right
# increase or decrease current CC defined in current path
def jogRight(value):
path = current["pathRight"]
print("jog : path =",path, "CC :", FindCC(path), "value", value)
MaxwellCC = FindCC(current["pathRight"])
if value == 127:
# decrease CC
if cc1[MaxwellCC] > 0:
cc1[MaxwellCC] -= 1
else:
if cc1[MaxwellCC] < 127:
cc1[MaxwellCC] += 1
#print("sending", cc1[MaxwellCC], "to CC", MaxwellCC )
cc(MaxwellCC, cc1[MaxwellCC] , dest ='to Maxwell 1')
#RotarySpecifics(MaxwellCC, path[path.rfind("/")+1:len(path)], value)
# Parameter change : to left 127 / to right 0 or 1
def RotarySpecifics( MaxwellCC, specificsname, value):
global maxwell
print("Maxwell CC :",MaxwellCC)
print("Current :",maxwell['ccs'][MaxwellCC]['init'])
print("Specifics :",specificvalues[specificsname])
print("midi value :", value)
elements = list(enumerate(specificvalues[specificsname]))
print(elements)
nextype = maxwell['ccs'][MaxwellCC]['init']
for count,ele in elements:
if ele == maxwell['ccs'][MaxwellCC]['init']:
if count > 0 and value == 127:
nextype = elements[count-1][1]
if count < len(elements)-1 and value < 2:
#print("next is :",elements[count+1][1])
nextype = elements[count+1][1]
print("result :", nextype, "new value :", specificvalues[specificsname][nextype], "Maxwell CC", MaxwellCC)
maxwell['ccs'][MaxwellCC]['init'] = nextype
cc(MaxwellCC, specificvalues[specificsname][nextype], dest ='to Maxwell 1')
# Change type : trig with only with midi value 127 on a CC event
def ButtonSpecifics127( MaxwellCC, specificsname, value):
global maxwell
print("Maxwell CC :",MaxwellCC)
print("Current :",maxwell['ccs'][MaxwellCC]['init'])
print("Specifics :",specificvalues[specificsname])
print("midi value :", value)
elements = list(enumerate(specificvalues[specificsname]))
print(elements)
nextype = maxwell['ccs'][MaxwellCC]['init']
for count,ele in elements:
if ele == maxwell['ccs'][MaxwellCC]['init']:
if count >0 and value == 127:
nextype = elements[count-1][1]
if count < len(elements)-1 and value < 2:
#print("next is :",elements[count+1][1])
nextype = elements[count+1][1]
print("result :", nextype, "new value :", specificvalues[specificsname][nextype], "Maxwell CC", MaxwellCC)
maxwell['ccs'][MaxwellCC]['init'] = nextype
cc(MaxwellCC, specificvalues[specificsname][nextype], dest ='to Maxwell 1')
# Left cue button 127 = on 0 = off
def PrevPatch(value):
global current
print('PrevPatch function')
if value == 127 and current['patch'] - 1 > -1:
cc(9, 127, dest=djdest)
time.sleep(0.1)
current['patch'] -= 1
print("Current patch is now :",current['patch'])
midi3.NoteOn(current['patch'], 127, 'to Maxwell 1')
cc(9, 0, dest=djdest)
# Right cue button 127 = on 0 = off
def NextPatch(value):
global current
print('NextPatch function', current["patch"])
if value == 127 and current["patch"] + 1 < 41:
cc(3, 127, dest = djdest)
current["patch"] += 1
#ModeNote(current["patch"], 127, 'to Maxwell 1')
midi3.NoteOn(current["patch"], 127, 'to Maxwell 1')
print("Current patch is now :",current["patch"])
time.sleep(0.1)
cc(3, 0, dest = djdest)
# increase/decrease a CC
def changeCC(value, path):
global current
#path = current["pathLeft"]
MaxwellCC = FindCC(path)
cc1[MaxwellCC] += value
print("Change Left CC : path =",path, "CC :", FindCC(path), "is now ", cc1[MaxwellCC])
cc(MaxwellCC, cc1[MaxwellCC] , dest ='to Maxwell 1')
def PlusTenLeft(value):
value = 10
changeCC(value, current["pathLeft"])
def MinusTenLeft(value):
value = -10
changeCC(value, current["pathLeft"])
def PlusOneLeft(value):
value = 1
changeCC(value, current["pathLeft"])
def MinusOneLeft(value):
value = -1
changeCC(value, current["pathLeft"])
def PlusTenRight(value):
value = 10
changeCC(value, current["pathRight"])
def MinusTenRight(value):
value = -10
changeCC(value, current["pathRight"])
def PlusOneRight(value):
value = 1
changeCC(value, current["pathRight"])
def MinusOneRight(value):
value = -1
changeCC(value, current["pathRight"])
def ChangeCurveLeft(value):
MaxwellCC = FindCC(current["prefixLeft"] + '/curvetype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeFreqLimitLeft(value):
MaxwellCC = FindCC(current["prefixLeft"] + '/freqlimit')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeATypeLeft(value):
MaxwellCC = FindCC(current["prefixLeft"] + '/freqlimit')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangePMTypeLeft(value):
MaxwellCC = FindCC(current["prefixLeft"] + '/phasemodtype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangePOTypeLeft(value):
MaxwellCC = FindCC(current["prefixLeft"] + '/phaseoffsettype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeAOTypeLeft(value):
MaxwellCC = FindCC(current["prefixLeft"] + '/ampoffsettype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeCurveRight(value):
MaxwellCC = FindCC(current["prefixRight"] + '/curvetype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeCurveLFO(value):
MaxwellCC = FindCC('/lfo/'+ current["lfo"] +'/curvetype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeCurveRot(value):
MaxwellCC = FindCC('/rotator/'+ current["rotator"] +'/curvetype')
RotarySpecifics(MaxwellCC, "curvetype", value)
def ChangeCurveTrans(value):
MaxwellCC = FindCC('/translator/'+ current["translator"] +'/curvetype')
RotarySpecifics(MaxwellCC, "curvetype", value)
| 26.745679 | 110 | 0.625185 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,326 | 0.307053 |
82c72df17c47f59db7183dbcc92de68aef849d6a | 11,660 | py | Python | functions_alignComp.py | lauvegar/VLBI_spectral_properties_Bfield | 6d07b6b0549ba266d2c56adcf664219a500e75e8 | [
"MIT"
]
| 1 | 2020-03-14T14:55:17.000Z | 2020-03-14T14:55:17.000Z | functions_alignComp.py | lauvegar/VLBI_spectral_properties_Bfield | 6d07b6b0549ba266d2c56adcf664219a500e75e8 | [
"MIT"
]
| null | null | null | functions_alignComp.py | lauvegar/VLBI_spectral_properties_Bfield | 6d07b6b0549ba266d2c56adcf664219a500e75e8 | [
"MIT"
]
| 1 | 2021-01-29T14:08:16.000Z | 2021-01-29T14:08:16.000Z | import numpy as np
import matplotlib.pyplot as plt
from pylab import *
#import pyspeckit as ps
from scipy import io
from scipy import stats
from scipy.optimize import leastsq
#from lmfit import minimize, Parameters, Parameter, report_fit
#from lmfit.models import GaussianModel
import scipy.optimize as optimization
import matplotlib.ticker as ticker
import cmath as math
import pickle
import iminuit
import astropy.io.fits as pf
import os,glob
#import string,math,sys,fileinput,glob,time
#load modules
#from pylab import *
import subprocess as sub
import re
#from plot_components import get_ellipse_coords, ellipse_axis
import urllib2
from astropy import units as u
#from astropy.coordinates import SkyCoord
#FUNCTION TO READ THE HEADER AND TAKE IMPORTANT PARAMETERS AS
#cell
#BMAJ, BMIN, BPA
#date, freq and epoch
def find_nearest(array,value):
index = (np.abs(array-value)).argmin()
return array[index], index
def atoi(text):
return int(text) if text.isdigit() else text
def natural_keys(text):
'''
alist.sort(key=natural_keys) sorts in human order
http://nedbatchelder.com/blog/200712/human_sorting.html
(See Toothy's implementation in the comments)
'''
return [ atoi(c) for c in re.split('(\d+)', text) ]
def get_ellipse_coords(a=0.0, b=0.0, x=0.0, y=0.0, angle=0.0, k=2):
""" Draws an ellipse using (360*k + 1) discrete points; based on pseudo code
given at http://en.wikipedia.org/wiki/Ellipse
k = 1 means 361 points (degree by degree)
a = major axis distance,
b = minor axis distance,
x = offset along the x-axis
y = offset along the y-axis
angle = clockwise rotation [in degrees] of the ellipse;
* angle=0 : the ellipse is aligned with the positive x-axis
* angle=30 : rotated 30 degrees clockwise from positive x-axis
"""
pts = np.zeros((360*k+1, 2))
beta = -angle * np.pi/180.0
sin_beta = np.sin(beta)
cos_beta = np.cos(beta)
alpha = np.radians(np.r_[0.:360.:1j*(360*k+1)])
sin_alpha = np.sin(alpha)
cos_alpha = np.cos(alpha)
pts[:, 0] = x + (a * cos_alpha * cos_beta - b * sin_alpha * sin_beta)
pts[:, 1] = y + (a * cos_alpha * sin_beta + b * sin_alpha * cos_beta)
return pts
def ellipse_axis(x, y,s):
x1=x-s
x2=x+s
if x1<x2:
xaxis=np.linspace(x1,x2,50)
else:
xaxis=np.linspace(x2,x1,50)
y1=y-s
y2=y+s
if y1<y2:
yaxis=np.linspace(y1,y2,50)
else:
yaxis=np.linspace(y2,y1,50)
return xaxis,yaxis
def ellipse_axis_lines(x,y,size):
pts_arr=[]
pt_arr=[]
x_el_arr=[]
x_elH_arr=[]
y_el_arr=[]
y_elH_arr=[]
for i in xrange(0,len(x)):
n = len(x[i])
pts, pt = [], []
x_el, y_el = [], []
x_elH, y_elH = [], []
for k in xrange(0,n):
pts.append(get_ellipse_coords(a=size[i][k], b=size[i][k], x=x[i][k],y=y[i][k], angle=0))
pt.append(get_ellipse_coords(a=0.01, b=0.01, x=x[i][k],y=y[i][k], angle=0))
#lines axis ellipses
x_el.append(ellipse_axis(x=float(x[i][k]),y=float(y[i][k]),s=float(size[i][k]))[0])
y_el.append(ellipse_axis(x=x[i][k],y=y[i][k],s=size[i][k])[1])
x_elH.append(np.linspace(x[i][k],x[i][k],50))
y_elH.append(np.linspace(y[i][k],y[i][k],50))
pts_arr.append(pts)
pt_arr.append(pt)
x_el_arr.append(x_el)
y_el_arr.append(y_el)
x_elH_arr.append(x_elH)
y_elH_arr.append(y_elH)
return pts_arr,pt_arr,x_el_arr,y_el_arr,x_elH_arr,y_elH_arr
def read_modfile(file1,beam,errors):
nfiles = len(file1)
r_arr = []
errr_arr = [] #np.array([0.]*nfiles)
psi_arr = []
errpsi_arr = []
size_arr = []
errsize_arr = []
flux_arr = []
errflux_arr = []
ntot=0
for k in xrange (0,nfiles):
with open(file1[k]) as myfile:
count = sum(1 for line in myfile if line.rstrip('\n'))
count = count-4
#n = len(rms[k])
n = count
split_f=[]
c=[]
r=np.array([0.]*n)
errr=np.array([0.]*n)
psi=np.array([0.]*n)
errpsi=np.array([0.]*n)
size=np.array([0.]*n)
errsize=np.array([0.]*n)
tb=np.array([0.]*n)
errtb=np.array([0.]*n)
flux=np.array([0.]*n)
fluxpeak = np.array([0.]*n)
rms = np.array([0.]*n)
errflux=np.array([0.]*n)
lim_resol=np.array([0.]*n)
errlim_resol=np.array([0.]*n)
temp=file1[k]
temp_file=open(temp,mode='r')
temp_file.readline()
temp_file.readline()
temp_file.readline()
temp_file.readline()
for i in xrange(0,n):
split_f = temp_file.readline().split()
flux[i] = (float(split_f[0][:-1]))
r[i] = (float(split_f[1][:-1]))
psi[i] = (float(split_f[2][:-1])*np.pi/180.)
size[i] = (float(split_f[3][:-1])/2.)
#tb[i] = (float(split_f[7]))
if errors == True:
temp_file2=open('pos_errors.dat',mode='r')
temp_file2.readline()
temp_file2.readline()
for i in xrange(0,ntot):
temp_file2.readline()
for i in xrange(0,n):
split_f = temp_file2.readline().split()
fluxpeak[i] = (float(split_f[2][:-1]))
rms[i] = (float(split_f[1][:-1]))
for i in xrange(0,n):
errflux[i] = rms[i]
snr = fluxpeak[i]/rms[i]#[k][i] #change to flux_peak
dlim = 4/np.pi*np.sqrt(np.pi*np.log(2)*beam[k]*np.log((snr)/(snr-1.))) #np.log((snr+1.)/(snr))) 4/np.pi*beam
if size[i] > beam[k]:
ddec=np.sqrt(size[i]**2-beam[k]**2)
else:
ddec=0.
y=[dlim,ddec]
dg=np.max(y)
err_size = rms[i]*dlim/fluxpeak[i]
err_r = err_size/2.
if r[i] > 0.:
err_psi = np.real(math.atan(err_r*180./(np.pi*r[i])))
else:
err_psi = 1./5*beam[k]
if err_size < 2./5.*beam[k]:
errsize[i] = 2./5.*beam[k]
else:
errsize[i] = (err_size)
if err_r < 1./5*beam:
errr[i] = 1./5*beam
if errr[i] < 1./2.*size[i]:
errr[i] = 1./2.*size[i]
else:
errr[i] = (err_r)
errpsi[i] = (err_psi)
elif errors == 'Done':
print 'done'
else:
for i in xrange(0,n):
errflux[i] = 0.1*flux[i]
errr[i] = 1./5.*beam[k]
errpsi[i] = 0.
errsize[i] = 2./5*beam[k]
r_arr.append(r)
errr_arr.append(errr)
psi_arr.append(psi)
errpsi_arr.append(errpsi)
size_arr.append(size)
errsize_arr.append(errsize)
flux_arr.append(flux)
errflux_arr.append(errflux)
ntot = n + ntot + 1
return r_arr,errr_arr,psi_arr,errpsi_arr,size_arr,errsize_arr,tb,flux_arr,errflux_arr
def x_y(r,errr,psi,errpsi,errors):
n = len(r)
x,errx = np.array([0.]*n),np.array([0.]*n)
y,erry = np.array([0.]*n),np.array([0.]*n)
x_arr, errx_arr = [], []
y_arr, erry_arr = [], []
for i in xrange (0,n):
x=r[i]*np.sin(psi[i])
y=r[i]*np.cos(psi[i])
if errors == True:
errx=np.sqrt((errr[i]*np.cos(psi[i]))**2+(r[i]*np.sin(psi[i])*errpsi[i])**2)
erry=np.sqrt((errr[i]*np.sin(psi[i]))**2+(r[i]*np.cos(psi[i])*errpsi[i])**2)
else:
errx = errr[i]
erry = errr[i]
x_arr.append(x)
errx_arr.append(errx)
y_arr.append(y)
erry_arr.append(erry)
x_arr = np.asarray(x_arr)
errx_arr = np.asarray(errx_arr)
y_arr = np.asarray(y_arr)
erry_arr = np.asarray(erry_arr)
return x_arr,errx_arr,y_arr,erry_arr
def r_psi(x,errx,y,erry):
n = len(r)
r,errr = np.array([0.]*n),np.array([0.]*n)
psi,errpsi = np.array([0.]*n),np.array([0.]*n)
r_arr, errr_arr = [], []
psi_arr, errpsi_arr = [], []
for i in xrange (0,n):
r=np.sqrt(x[i]**2+y[i]**2)
psi=np.atan(y[i]/x[i])
#errr=np.sqrt((1/(2*r)*2*x[i]*errx[i])**2+(1/(2*r)*2*y[i]*erry[i])**2)
#errpsi=np.sqrt(((y[i]/([x[i]**2+y[i])**2])*errx[i])**2+((x[i]/([x[i]**2+y[i])**2])*erry[i])**2)
r_arr.append(r)
#errr_arr.append(errr)
psi_arr.append(psi)
#errpsi_arr.append(errpsi)
return r_arr,psi_arr
def selectComponent(realDAT,realDAT2, first_contour, pts_arr,x_el_arr,x_elH_arr,y_elH_arr,y_el_arr,ext,freq1,freq2,x,y,numComp,orientation):
levels = first_contour[0]*np.array([-1., 1., 1.41,2.,2.83,4.,5.66,8.,11.3,16.,
22.6,32.,45.3,64.,90.5,128.,181.,256.,362.,512.,
724.,1020.,1450.,2050.])
plt.figure(10)
plt.subplot(121)
cset = plt.contour(realDAT, levels, inline=1,
colors=['grey'],
extent=ext, aspect=1.0
)
for j in xrange(0,len(x_el_arr[0])):
plt.plot(pts_arr[0][j][:,0], pts_arr[0][j][:,1], color='blue',linewidth=4)
plt.plot(x_el_arr[0][j], y_elH_arr[0][j], color='blue',linewidth=4)
plt.plot(x_elH_arr[0][j], y_el_arr[0][j], color='blue',linewidth=4)
plt.xlim(ext[0],ext[1])
plt.ylim(ext[2],ext[3])
plt.axis('scaled')
plt.xlabel('Right Ascension [pixels]')
plt.ylabel('Relative Declination [pixels]')
plt.title(str('%1.3f' %(freq1))+' GHz')
levels = first_contour[1]*np.array([-1., 1., 1.41,2.,2.83,4.,5.66,8.,11.3,16.,
22.6,32.,45.3,64.,90.5,128.,181.,256.,362.,512.,
724.,1020.,1450.,2050.])
#plt.figure(2)
plt.subplot(122)
cset = plt.contour(realDAT2, levels, inline=1,
colors=['grey'],
extent=ext, aspect=1.0
)
for j in xrange(0,len(x_el_arr[1])):
plt.plot(pts_arr[1][j][:,0], pts_arr[1][j][:,1], color='blue',linewidth=4)
plt.plot(x_el_arr[1][j], y_elH_arr[1][j], color='blue',linewidth=4)
plt.plot(x_elH_arr[1][j], y_el_arr[1][j], color='blue',linewidth=4)
plt.xlim(ext[0],ext[1])
plt.ylim(ext[2],ext[3])
plt.axis('scaled')
plt.xlabel('Right Ascension [pixels]')
plt.title(str('%1.3f' %(freq2))+' GHz')
param = ginput(4*numComp,0)
near_comp1 = []
near_comp2 = []
a = 0
if orientation == 'h':
for i in xrange(0,numComp):
x_c = float(param[1+a][0])
near_comp1.append(int(find_nearest(x[0],x_c)[1]))
x_c = float(param[3+a][0])
near_comp2.append(int(find_nearest(x[1],x_c)[1]))
a = a + 4
if orientation == 'v':
for i in xrange(0,numComp):
y_c = float(param[1+a][1])
near_comp1.append(int(find_nearest(y[0],y_c)[1]))
y_c = float(param[3+a][1])
near_comp2.append(int(find_nearest(y[1],y_c)[1]))
a = a + 4
plt.show()
return near_comp1, near_comp2
def CoreShiftCalculation(indexes,x,y,errx,erry,numComp):
#indexes[0] low freq, indexes[1] high frequency
#shift high freq - low freq
if numComp == 1:
RaShift = x[1][indexes[1][0]]-x[0][indexes[0][0]]
DecShift = y[1][indexes[1][0]]-y[0][indexes[0][0]]
errRaShift = np.sqrt((errx[1][indexes[1][0]])**2+(errx[0][indexes[0][0]])**2)
errDecShift = np.sqrt((erry[1][indexes[1][0]])**2+(erry[0][indexes[0][0]])**2)
if numComp > 1:
#calculate all the Ra and Dec shifts and do an average
RaShiftArr = np.asarray([0.]*numComp)
DecShiftArr = np.asarray([0.]*numComp)
for i in xrange(0,numComp):
RaShiftArr[i] = x[1][indexes[1][i]]-x[0][indexes[0][i]]
DecShiftArr[i] = y[1][indexes[1][i]]-y[0][indexes[0][i]]
RaShift = np.sum(RaShiftArr)/len(RaShiftArr)
DecShift = np.sum(DecShiftArr)/len(DecShiftArr)
if numComp < 4:
#not enough values to do a proper dispersion, I consider the values' error as more reliable
errRaShiftArr = np.asarray([0.]*numComp)
errDecShiftArr = np.asarray([0.]*numComp)
for i in xrange(0,numComp):
#no square root because I need to square them later in the sum, so i avoid unnecessary calculations
errRaShiftArr[i] = (errx[1][indexes[1][i]])**2+(errx[0][indexes[0][i]])**2
errDecShiftArr[i] = (erry[1][indexes[1][i]])**2+(erry[0][indexes[0][i]])**2
errRaShift = np.sqrt(np.sum(errRaShiftArr))/numComp
errDecShift = np.sqrt(np.sum(errDecShiftArr))/numComp
else:
#statistical error
errRaShift = np.sqrt(np.sum((RaShiftArr-RaShift)**2))/(np.sqrt(numComp-1))
errDecShift = np.sqrt(np.sum((DecShiftArr-DecShift)**2))/(np.sqrt(numComp-1))
return RaShift, DecShift, errRaShift, errDecShift
| 29.004975 | 140 | 0.613036 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,029 | 0.174014 |
82c74e30b862d202367459727b08bf47fdb074f4 | 1,762 | py | Python | osbuild/dist.py | dnarvaez/osbuild | 08031487481ba23597f19cb3e106628e5c9d440d | [
"Apache-2.0"
]
| null | null | null | osbuild/dist.py | dnarvaez/osbuild | 08031487481ba23597f19cb3e106628e5c9d440d | [
"Apache-2.0"
]
| 1 | 2016-11-13T01:04:18.000Z | 2016-11-13T01:04:18.000Z | osbuild/dist.py | dnarvaez/osbuild | 08031487481ba23597f19cb3e106628e5c9d440d | [
"Apache-2.0"
]
| 2 | 2015-01-06T20:57:55.000Z | 2015-11-15T20:14:09.000Z | # Copyright 2013 Daniel Narvaez
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from distutils.sysconfig import parse_makefile
from osbuild import config
from osbuild import command
_dist_builders = {}
def dist_one(module_name):
for module in config.load_modules():
if module.name == module_name:
return _dist_module(module)
return False
def dist():
shutil.rmtree(config.get_dist_dir(), ignore_errors=True)
modules = config.load_modules()
for module in modules:
if not _dist_module(module):
return False
return True
def _dist_module(module):
if not module.dist:
return True
print("* Creating %s distribution" % module.name)
return _dist_builders[module.build_system](module)
def _autotools_dist_builder(module):
source_dir = module.get_source_dir()
os.chdir(source_dir)
command.run(["make", "distcheck"])
makefile = parse_makefile(os.path.join(source_dir, "Makefile"))
tarball = "%s-%s.tar.xz" % (module.name, makefile["VERSION"])
shutil.move(os.path.join(source_dir, tarball),
os.path.join(config.get_dist_dir(), tarball))
return True
_dist_builders['autotools'] = _autotools_dist_builder
| 25.536232 | 74 | 0.715096 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 654 | 0.371169 |
82c7e82524f111efe667928715ea87dcc4155b43 | 1,194 | py | Python | neural_net/game_status.py | Ipgnosis/tic_tac_toe | e1519b702531965cc647ff37c1c46d72f4b3b24e | [
"BSD-3-Clause"
]
| null | null | null | neural_net/game_status.py | Ipgnosis/tic_tac_toe | e1519b702531965cc647ff37c1c46d72f4b3b24e | [
"BSD-3-Clause"
]
| 4 | 2021-03-25T19:52:40.000Z | 2021-12-12T17:57:11.000Z | neural_net/game_status.py | Ipgnosis/tic_tac_toe | e1519b702531965cc647ff37c1c46d72f4b3b24e | [
"BSD-3-Clause"
]
| null | null | null | # node to capture and communicate game status
# written by Russell on 5/18
class Game_state():
node_weight = 1
# node_bias = 1 # not going to use this for now, but may need it later
list_of_moves = []
def __init__(self, node_list):
self.node_list = node_list
def num_moves(self):
moves = 0
for i in range(len(self.node_list)):
if self.node_list[i].cell_contains() != "":
moves += 1
return moves
def moves_list(self):
#if len(self.list_of_moves) < self.num_moves():
for i in range(len(self.node_list)):
if self.node_list[i].move != "" and self.node_list[i].position not in self.list_of_moves:
self.list_of_moves.append(self.node_list[i].position)
ret_val = self.list_of_moves
#print('list of moves: type =', type(self.list_of_moves))
return ret_val
def next_up(self):
if self.num_moves() % 2 == 0 or self.num_moves() == 0:
return "X"
else:
return "O"
def game_prop_remaining(self):
moves = self.num_moves()
return 1 - moves / 9
| 24.367347 | 101 | 0.569514 | 1,115 | 0.933836 | 0 | 0 | 0 | 0 | 0 | 0 | 257 | 0.215243 |
82c885deedbc0d14255bfcc8dfea36b0a64e58d5 | 13,340 | py | Python | alphatrading/system/db_methods/method_sqlite3.py | LoannData/Q26_AlphaTrading | b8e6983e59f942352150f76541d880143cca4478 | [
"MIT"
]
| null | null | null | alphatrading/system/db_methods/method_sqlite3.py | LoannData/Q26_AlphaTrading | b8e6983e59f942352150f76541d880143cca4478 | [
"MIT"
]
| null | null | null | alphatrading/system/db_methods/method_sqlite3.py | LoannData/Q26_AlphaTrading | b8e6983e59f942352150f76541d880143cca4478 | [
"MIT"
]
| null | null | null | """
"""
import sqlite3
import numpy as np
import math
class SQL:
def __init__(self):
self.path = None
self.connexion = None
self.cursor = None
self.verbose = False
def to_SQL_type(self, type_, mode = "format"):
"""
Function allowing to convert element type expressed in Python syntax into type
expressed into SQL syntax.
Parameter:
- type_ [str]: Types have to be committed as a string format
Returns:
- [str]: The parameter type converted in the SQL format if the type is considered in the method.
The input variable otherwise.
"""
if type(type_) == list and mode == "list":
sql_list = "("
for element in type_:
sql_list += "'"+str(element)+"'"+","
sql_list = sql_list[:-1]
sql_list += ")"
return sql_list
if mode == "format":
if type_ == "str":
return "text"
elif type_ == "int":
return "integer"
elif type_ == "float":
return "real"
else:
return type_
elif mode == "logic":
if type_ == "all":
return "ALL"
elif type_ == "any":
return "ANY"
elif type_ == "and":
return "AND"
elif type_ == "or":
return "OR"
elif type_ == "not":
return "NOT"
elif type_ == "in":
return "IN"
elif type_ == "is" or type_ == "==":
return "IS"
else:
return type_
elif mode == "operator":
if type_ == "==":
return "="
elif type_ == "!=":
return "<>"
else:
return type_
else:
return type_
def create_database(self, path):
"""
Function allowing to create a database.
Parameter:
- path [str]: Path and name of the database. Note: The folder should exist.
Returns:
None
"""
if not path[-3:] == ".db":
path += ".db"
self.path = path
self.connexion = sqlite3.connect(path)
self.cursor = self.connexion.cursor()
return
def connect_database(self, path):
"""
Function allowing to connect to an existing database
Parameter:
- path [str]: Path and name of the database. Note: The folder should exist.
Returns:
None
"""
self.create_database(path)
def execute(self,
action = None,
object = None,
argument = None):
"""
Function that execute every command following the SQL query
structure.
"""
command = action+" "+object+" "+argument
if self.verbose:
print (command)
iterator = self.cursor.execute(command)
return iterator
#=====================================================================================#
# LISTING FUNCTIONS
#=====================================================================================#
def get_table_list(self):
"""
Function returning the list of tables in the database
Parameters:
None
Returns:
- [list(str)]: ["table_name1", "table_name2", ...]
"""
action = "SELECT"
object = "name FROM sqlite_master"
argument = "WHERE type='table'"
iterator = self.execute(action = action,
object = object,
argument = argument)
table_list = [x[0] for x in iterator.fetchall()]
return table_list
def get_id_list(self, table):
"""
Function that retrieves the list of ids of the elements within
a table. If the tabe doesn't contain any elements, it return
the following list: [0]
Parameters:
- table [str]: Table name
Returns:
- [int]: List of ids of the elements in the table
in the order they have been added
"""
action = "SELECT"
object = "id"
argument = "FROM "+table
iterator = self.execute(action = action,
object = object,
argument = argument)
id_list = [x[0] for x in iterator.fetchall()]
if len(id_list) == 0 :
return [0]
return id_list
#=====================================================================================#
# CREATION & INSERTION FUNCTIONS
#=====================================================================================#
def create_table(self,
name,
structure):
"""
Function allowing to create a table in the already existing database
Parameters:
- name [str]: Name of the table
- structure [dict]: Structure of the table. Keys corresponds to the name of the columns while
associated values corresponds to the anounced type of the data.
Returns:
None
"""
action = "CREATE"
object = "TABLE"+" "+name
argument = "("
argument += "id"+" "+"integer"+", "
for key in structure.keys():
argument += key+" "+self.to_SQL_type(structure[key], mode = "format")+", "
argument = argument[:-2]
argument += ")"
self.execute(action = action,
object = object,
argument = argument)
return
def insert(self,
table,
value):
"""
Function allowing to insert an element in an existing table
of the connected database
Parameters:
- table [str] : Name of the table
- value [list] : List of the attributes of the element to be
inserted
Returns:
None
"""
# Check if there are non-common numbers in the list of numbers
# such as infinity values
# print (value)
# print (type(value[-2]))
for i in range(len(value)):
val = value[i]
if not type(val) == str:
# if type(val) == float:
# val = np.float(val)
# elif type(val) == int:
# val = np.int(val)
# print ("VAL = ",val)
if np.isinf(val) or math.isinf(val):
# print("Cond1")
if val > 1e32:
# print("Cond1.1")
value[i] = "Inf"
elif val < -1e32:
# print("Cond1.2")
value[i] = "-Inf"
else:
# print("Cond1.3")
value[i] = "+-Inf"
elif np.isnan(val):
value[i] = "NaN"
# print (value)
last_id = self.get_id_list(table)[-1]
value = [last_id+1]+value
action = "INSERT INTO"
object = table
argument = "VALUES ("
for element in value:
if type(element) == str:
element = element.replace("'", '"')
element = "'"+element+"'"
else:
element = str(element)
argument += element+","
argument = argument[:-1]
argument += ")"
self.execute(action = action,
object = object,
argument = argument)
self.connexion.commit()
return
def delete(self,
table,
where_ = None):
"""
Function allowing to delete an element from a table in the database.
Parameters:
- table [str]: Name of the table
- where_ [list(dict, str, list)]: List of conditions defining elements to be deleted. The structure of this
variable follows the scheme below:
[{
"object" : #Define the attribute name of an element,
"operator": #Define an operator defined in python syntax but provided inside a string
"value" : #A value which close the conditional statement
},
logic_operator [str] (it may be : "and", "or", "not"...)
...
The sequence of conditions has to follow logical rules otherwise it will probably raise an error.
]
"""
action = "DELETE FROM"+" "
object = table
argument = ""
if where_ is not None:
argument += "WHERE"+" "
for condition in where_:
if type(condition) == dict:
sub_object = condition["object"]
operator = self.to_SQL_type(condition["operator"], mode = "operator")
sub_value = condition["value"]
if type(sub_value) == str:
sub_value = "'"+sub_value+"'"
else:
sub_value = str(sub_value)
argument += sub_object+operator+sub_value+" "
if type(condition) == str:
argument += self.to_SQL_type(condition, mode = "logic")+" "
if type(condition) == list:
argument += self.to_SQL_type(condition, mode="list")+" "
self.execute(action = action,
object = object,
argument = argument)
self.connexion.commit()
return
def drop_table(self,
table):
"""
Function allowing to drop a table from the database
Parameters:
- table [str]: Table name
Returns:
None
"""
action = "DROP"
object = "TABLE"
argument = table
self.execute(action = action,
object = object,
argument = argument)
self.connexion.commit()
return
#=====================================================================================#
# QUERY FUNCTIONS
#=====================================================================================#
def select(self, #https://www.w3schools.com/sql/sql_select.asp
distinct = False, #https://www.w3schools.com/sql/sql_distinct.asp
columns = ["*"], #column1, column2 ...
table = None,
where_ = None, #https://www.w3schools.com/sql/sql_where.asp
orderby_ = None, #https://www.w3schools.com/sql/sql_orderby.asp
ordering = "ASC" # "DESC"
):
action = "SELECT"
if distinct:
action += " "+"DISTINCT"
object = ""
for col in columns:
object += col+", "
object = object[:-2]
if "*" in columns:
object = "*"+" "
object += "FROM"+" "+table
argument = ""
if where_ is not None:
argument += "WHERE"+" "
for condition in where_:
if type(condition) == dict:
sub_object = condition["object"]
operator = self.to_SQL_type(condition["operator"], mode = "operator")
sub_value = condition["value"]
if type(sub_value) == str:
sub_value = "'"+sub_value+"'"
else:
sub_value = str(sub_value)
argument += sub_object+operator+sub_value+" "
if type(condition) == str:
argument += self.to_SQL_type(condition, mode = "logic")+" "
if type(condition) == list:
argument += self.to_SQL_type(condition, mode="list")+" "
if orderby_ is not None:
argument += "ORDER BY"+" "
for col in orderby_:
argument += col+", "
argument = argument[:-2]
argument += " "+ordering
iterator = self.execute(action = action,
object = object,
argument = argument)
result_list = [x for x in iterator.fetchall()]
return result_list
| 32.378641 | 139 | 0.422414 | 13,273 | 0.994754 | 0 | 0 | 0 | 0 | 0 | 0 | 5,400 | 0.404707 |
82c9034910103390615809d1175c2317626103b0 | 4,705 | py | Python | pysport/horseracing/lattice_calibration.py | notbanker/pysport | fbeb1f1efa493aa26ffb58156b86ce2aee3482bf | [
"MIT"
]
| null | null | null | pysport/horseracing/lattice_calibration.py | notbanker/pysport | fbeb1f1efa493aa26ffb58156b86ce2aee3482bf | [
"MIT"
]
| null | null | null | pysport/horseracing/lattice_calibration.py | notbanker/pysport | fbeb1f1efa493aa26ffb58156b86ce2aee3482bf | [
"MIT"
]
| null | null | null | from .lattice import skew_normal_density, center_density,\
state_prices_from_offsets, densities_and_coefs_from_offsets, winner_of_many,\
expected_payoff, densities_from_offsets, implicit_state_prices, densitiesPlot
import pandas as pd # todo: get rid of this dependency
import numpy as np
def dividend_implied_racing_ability( dividends ):
return dividend_implied_ability( dividends=dividends, density=racing_density( loc=0.0 ) )
def racing_ability_implied_dividends( ability ):
return ability_implied_dividends( ability, density=racing_density( loc=0.0 ) )
RACING_L = 500
RACING_UNIT = 0.1
RACING_SCALE = 1.0
RACING_A = 1.0
def make_nan_2000( x ) :
""" Longshots """
if pd.isnull( x ):
return 2000.
else:
return x
def normalize( p ):
""" Naive renormalization of probabilities """
S = sum( p )
return [ pr/S for pr in p ]
def prices_from_dividends( dividends ):
""" Risk neutral probabilities using naive renormalization """
return normalize( [ 1. / make_nan_2000(x) for x in dividends ] )
def dividends_from_prices( prices ):
""" Australian style dividends """
return [ 1./d for d in normalize( prices ) ]
def normalize_dividends( dividends ):
return dividends_from_prices( prices_from_dividends( dividends ))
def racing_density( loc ):
""" A rough and ready distribution of performance distributions for one round """
density = skew_normal_density( L=RACING_L, unit=RACING_UNIT, loc=0, scale=RACING_SCALE, a=RACING_A )
return center_density( density )
def dividend_implied_ability( dividends, density ):
""" Infer risk-neutral implied_ability from Australian style dividends
:param dividends: [ 7.6, 12.0, ... ]
:return: [ float ] Implied ability
"""
state_prices = prices_from_dividends( dividends )
implied_offsets_guess = [ 0 for _ in state_prices]
L = len( density )/2
offset_samples = list( xrange( -L/4, L/4 ))[::-1]
ability = implied_ability( prices = state_prices, density = density, \
offset_samples = offset_samples, implied_offsets_guess = implied_offsets_guess, nIter = 3)
return ability
def ability_implied_dividends( ability, density ):
""" Return betfair style prices
:param ability:
:return: [ 7.6, 12.3, ... ]
"""
state_prices = state_prices_from_offsets( density=density, offsets = ability)
return [ 1./sp for sp in state_prices ]
def implied_ability( prices, density, offset_samples = None, implied_offsets_guess = None, nIter = 3, verbose = False, visualize = False):
""" Finds location translations of a fixed density so as to replicate given state prices for winning """
L = len( density )
if offset_samples is None:
offset_samples = list( xrange( -L/4, L/4 ))[::-1] # offset_samples should be descending TODO: add check for this
else:
_assert_descending( offset_samples )
if implied_offsets_guess is None:
implied_offsets_guess = range( len(prices) )
# First guess at densities
densities, coefs = densities_and_coefs_from_offsets( density, implied_offsets_guess )
densityAllGuess, multiplicityAllGuess = winner_of_many( densities )
densityAll = densityAllGuess.copy()
multiplicityAll = multiplicityAllGuess.copy()
guess_prices = [ np.sum( expected_payoff( density, densityAll, multiplicityAll, cdf = None, cdfAll = None)) for density in densities]
for _ in xrange( nIter ):
if visualize:
# temporary hack to check progress of optimization
densitiesPlot( [ densityAll] + densities , unit=0.1 )
implied_prices = implicit_state_prices( density=density, densityAll=densityAll, multiplicityAll = multiplicityAll, offsets=offset_samples )
implied_offsets = np.interp( prices, implied_prices, offset_samples )
densities = densities_from_offsets( density, implied_offsets )
densityAll, multiplicityAll = winner_of_many( densities )
guess_prices = [ np.sum(expected_payoff(density, densityAll, multiplicityAll, cdf = None, cdfAll = None)) for density in densities ]
approx_prices = [ np.round( pri, 3 ) for pri in prices]
approx_guesses = [ np.round( pri, 3 ) for pri in guess_prices]
if verbose:
print zip( approx_prices, approx_guesses )[:5]
return implied_offsets
def _assert_descending( xs ):
for d in np.diff( xs ):
if d>0:
raise ValueError("Not descending") | 42.772727 | 160 | 0.671413 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 788 | 0.167481 |
82ca9321fb77ad0b8c97cc3c98eb832716ddecc4 | 4,832 | py | Python | var/spack/repos/builtin/packages/autoconf/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
]
| 2,360 | 2017-11-06T08:47:01.000Z | 2022-03-31T14:45:33.000Z | var/spack/repos/builtin/packages/autoconf/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
]
| 13,838 | 2017-11-04T07:49:45.000Z | 2022-03-31T23:38:39.000Z | var/spack/repos/builtin/packages/autoconf/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
]
| 1,793 | 2017-11-04T07:45:50.000Z | 2022-03-30T14:31:53.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
class Autoconf(AutotoolsPackage, GNUMirrorPackage):
"""Autoconf -- system configuration part of autotools"""
homepage = 'https://www.gnu.org/software/autoconf/'
gnu_mirror_path = 'autoconf/autoconf-2.69.tar.gz'
version('2.71', sha256='431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c')
version('2.70', sha256='f05f410fda74323ada4bdc4610db37f8dbd556602ba65bc843edb4d4d4a1b2b7')
version('2.69', sha256='954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd4c2969',
preferred=True)
version('2.62', sha256='83aa747e6443def0ebd1882509c53f5a2133f502ddefa21b3de141c433914bdd')
version('2.59', sha256='9cd05c73c5fcb1f5ccae53dd6cac36bb8cb9c7b3e97ffae5a7c05c72594c88d8')
# https://savannah.gnu.org/support/?110396
patch('https://git.savannah.gnu.org/cgit/autoconf.git/patch/?id=05972f49ee632cd98057a3caf82ebfb9574846da',
sha256='eaa3f69d927a853313a0b06e2117c51adab6377a2278549b05abc5df93643e16',
when='@2.70')
# Apply long-time released and already in-use upstream patches to fix test cases:
# tests/foreign.at (Libtool): Be tolerant of 'quote' replacing the older `quote'
patch('http://mirrors.mit.edu/gentoo-portage/sys-devel/autoconf/files/autoconf-2.69-fix-libtool-test.patch',
sha256='7793209b33013dc0f81208718c68440c5aae80e7a1c4b8d336e382525af791a7',
when='@2.69')
# Fix bin/autoscan.in for current perl releases (reported already in January 2013)
patch('http://mirrors.mit.edu/gentoo-portage/sys-devel/autoconf/files/autoconf-2.69-perl-5.26.patch',
sha256='35c449281546376449766f92d49fc121ca50e330e60fefcfc9be2af3253082c2',
when='@2.62:2.69 ^[email protected]:')
# Fix bin/autoheader.in for current perl relases not having "." in @INC:
patch('http://mirrors.mit.edu/gentoo-portage/sys-devel/autoconf/files/autoconf-2.69-perl-5.26-2.patch',
sha256='a49dd5bac3b62daa0ff688ab4d508d71dbd2f4f8d7e2a02321926346161bf3ee',
when='@2.62:2.69 ^[email protected]:')
# Note: m4 is not a pure build-time dependency of autoconf. m4 is
# needed when autoconf runs, not only when autoconf is built.
depends_on('[email protected]:', type=('build', 'run'))
depends_on('perl', type=('build', 'run'))
build_directory = 'spack-build'
tags = ['build-tools']
executables = [
'^autoconf$', '^autoheader$', '^autom4te$', '^autoreconf$',
'^autoscan$', '^autoupdate$', '^ifnames$'
]
@classmethod
def determine_version(cls, exe):
output = Executable(exe)('--version', output=str, error=str)
match = re.search(r'\(GNU Autoconf\)\s+(\S+)', output)
return match.group(1) if match else None
def patch(self):
# The full perl shebang might be too long; we have to fix this here
# because autom4te is called during the build
patched_file = 'bin/autom4te.in'
# We save and restore the modification timestamp of the file to prevent
# regeneration of the respective man page:
with keep_modification_time(patched_file):
filter_file('^#! @PERL@ -w',
'#! /usr/bin/env perl',
patched_file)
if self.version == Version('2.62'):
# skip help2man for patched autoheader.in and autoscan.in
touch('man/autoheader.1')
touch('man/autoscan.1')
# make installcheck would execute the testsuite a 2nd time, skip it
def installcheck(self):
pass
@run_after('install')
def filter_sbang(self):
# We have to do this after install because otherwise the install
# target will try to rebuild the binaries (filter_file updates the
# timestamps)
# Revert sbang, so Spack's sbang hook can fix it up
filter_file('^#! /usr/bin/env perl',
'#! {0} -w'.format(self.spec['perl'].command.path),
self.prefix.bin.autom4te,
backup=False)
def _make_executable(self, name):
return Executable(join_path(self.prefix.bin, name))
def setup_dependent_package(self, module, dependent_spec):
# Autoconf is very likely to be a build dependency,
# so we add the tools it provides to the dependent module
executables = ['autoconf',
'autoheader',
'autom4te',
'autoreconf',
'autoscan',
'autoupdate',
'ifnames']
for name in executables:
setattr(module, name, self._make_executable(name))
| 45.158879 | 112 | 0.657078 | 4,621 | 0.956333 | 0 | 0 | 707 | 0.146316 | 0 | 0 | 2,942 | 0.608858 |
82cb0803d2457f595d667a7981bfa23935775448 | 1,096 | py | Python | src/wallet/web/schemas/categories.py | clayman-micro/wallet | b78f650aed7d57167db81a0530fd78dbc12d527e | [
"MIT"
]
| 2 | 2015-10-18T15:36:37.000Z | 2015-10-19T04:57:00.000Z | src/wallet/web/schemas/categories.py | clayman74/wallet | b78f650aed7d57167db81a0530fd78dbc12d527e | [
"MIT"
]
| 7 | 2021-06-26T16:51:13.000Z | 2021-11-29T19:05:00.000Z | src/wallet/web/schemas/categories.py | clayman-micro/wallet | b78f650aed7d57167db81a0530fd78dbc12d527e | [
"MIT"
]
| null | null | null | from aiohttp_micro.web.handlers.openapi import PayloadSchema, ResponseSchema
from marshmallow import fields, post_load, Schema
from wallet.core.entities.categories import CategoryFilters
from wallet.web.schemas.abc import CollectionFiltersSchema
class CategorySchema(Schema):
key = fields.Int(required=True, data_key="id", description="Category id")
name = fields.Str(required=True, description="Category name")
class CategoriesResponseSchema(ResponseSchema):
"""Categories list."""
categories = fields.List(fields.Nested(CategorySchema), required=True, description="Categories")
class CategoriesFilterSchema(CollectionFiltersSchema):
"""Filter categories list."""
@post_load
def make_payload(self, data, **kwargs):
return CategoryFilters(user=self.context["user"])
class ManageCategoryPayloadSchema(PayloadSchema):
"""Add new category."""
name = fields.Str(required=True, description="Category name")
class CategoryResponseSchema(ResponseSchema):
"""Get category info."""
category = fields.Nested(CategorySchema, required=True)
| 29.621622 | 100 | 0.762774 | 834 | 0.760949 | 0 | 0 | 112 | 0.10219 | 0 | 0 | 163 | 0.148723 |
82cb1f7a824b2011c270ad30649e677322c356f9 | 127 | py | Python | scons_gbd_docs/Gbd/Docs/SConscript.py | ASoftTech/Scons.Gbd.Docs | 4d9fb7585d9565f57306774efb4342fe9b8822f2 | [
"MIT"
]
| null | null | null | scons_gbd_docs/Gbd/Docs/SConscript.py | ASoftTech/Scons.Gbd.Docs | 4d9fb7585d9565f57306774efb4342fe9b8822f2 | [
"MIT"
]
| null | null | null | scons_gbd_docs/Gbd/Docs/SConscript.py | ASoftTech/Scons.Gbd.Docs | 4d9fb7585d9565f57306774efb4342fe9b8822f2 | [
"MIT"
]
| null | null | null | SConscript('Mkdocs/Common/SConscript.py')
SConscript('Pandoc/Common/SConscript.py')
SConscript('Doxygen/Common/SConscript.py')
| 31.75 | 42 | 0.811024 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 88 | 0.692913 |
82cb4d12dfd598eacff3048f5dbbafb527f62c06 | 11,563 | py | Python | seg/segmentor/tools/module_runner.py | Frank-Abagnal/HRFormer | d7d362770de8648f8e0a379a71cee25f42954503 | [
"MIT"
]
| 254 | 2021-08-13T10:05:22.000Z | 2022-03-25T09:21:45.000Z | seg/segmentor/tools/module_runner.py | Sense-X/HRFormer | 1245b88b5824fbd8cdb358b5ee909a4e537a2ef5 | [
"MIT"
]
| 17 | 2021-09-08T01:40:49.000Z | 2022-03-23T10:53:47.000Z | seg/segmentor/tools/module_runner.py | Sense-X/HRFormer | 1245b88b5824fbd8cdb358b5ee909a4e537a2ef5 | [
"MIT"
]
| 48 | 2021-08-13T14:06:58.000Z | 2022-03-30T02:41:26.000Z | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Donny You([email protected])
# Some methods used by main methods.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
from collections import OrderedDict
import torch
import torch.nn as nn
from torch.nn.parallel.scatter_gather import gather as torch_gather
from lib.extensions.parallel.data_parallel import DataParallelModel
from lib.utils.tools.logger import Logger as Log
from lib.utils.distributed import get_rank, is_distributed
class ModuleRunner(object):
def __init__(self, configer):
self.configer = configer
self._init()
def _init(self):
self.configer.add(['iters'], 0)
self.configer.add(['last_iters'], 0)
self.configer.add(['epoch'], 0)
self.configer.add(['last_epoch'], 0)
self.configer.add(['max_performance'], 0.0)
self.configer.add(['performance'], 0.0)
self.configer.add(['min_val_loss'], 9999.0)
self.configer.add(['val_loss'], 9999.0)
if not self.configer.exists('network', 'bn_type'):
self.configer.add(['network', 'bn_type'], 'torchbn')
# if self.configer.get('phase') == 'train':
# assert len(self.configer.get('gpu')) > 1 or self.configer.get('network', 'bn_type') == 'torchbn'
Log.info('BN Type is {}.'.format(self.configer.get('network', 'bn_type')))
def to_device(self, *params, force_list=False):
if is_distributed():
device = torch.device('cuda:{}'.format(get_rank()))
else:
device = torch.device('cpu' if self.configer.get('gpu') is None else 'cuda')
return_list = list()
for i in range(len(params)):
return_list.append(params[i].to(device))
if force_list:
return return_list
else:
return return_list[0] if len(params) == 1 else return_list
def _make_parallel(self, net):
if is_distributed():
local_rank = get_rank()
return torch.nn.parallel.DistributedDataParallel(
net,
device_ids=[local_rank],
output_device=local_rank,
find_unused_parameters=True
)
if len(self.configer.get('gpu')) == 1:
self.configer.update(['network', 'gathered'], True)
return DataParallelModel(net, gather_=self.configer.get('network', 'gathered'))
def load_net(self, net):
net = self.to_device(net)
net = self._make_parallel(net)
if not is_distributed():
net = net.to(torch.device('cpu' if self.configer.get('gpu') is None else 'cuda'))
net.float()
if self.configer.get('network', 'resume') is not None:
Log.info('Loading checkpoint from {}...'.format(self.configer.get('network', 'resume')))
resume_dict = torch.load(self.configer.get('network', 'resume'))
if 'state_dict' in resume_dict:
checkpoint_dict = resume_dict['state_dict']
elif 'model' in resume_dict:
checkpoint_dict = resume_dict['model']
elif isinstance(resume_dict, OrderedDict):
checkpoint_dict = resume_dict
else:
raise RuntimeError(
'No state_dict found in checkpoint file {}'.format(self.configer.get('network', 'resume')))
if list(checkpoint_dict.keys())[0].startswith('module.'):
checkpoint_dict = {k[7:]: v for k, v in checkpoint_dict.items()}
# load state_dict
if hasattr(net, 'module'):
self.load_state_dict(net.module, checkpoint_dict, self.configer.get('network', 'resume_strict'))
else:
self.load_state_dict(net, checkpoint_dict, self.configer.get('network', 'resume_strict'))
if self.configer.get('network', 'resume_continue'):
self.configer.resume(resume_dict['config_dict'])
return net
@staticmethod
def load_state_dict(module, state_dict, strict=False):
"""Load state_dict to a module.
This method is modified from :meth:`torch.nn.Module.load_state_dict`.
Default value for ``strict`` is set to ``False`` and the message for
param mismatch will be shown even if strict is False.
Args:
module (Module): Module that receives the state_dict.
state_dict (OrderedDict): Weights.
strict (bool): whether to strictly enforce that the keys
in :attr:`state_dict` match the keys returned by this module's
:meth:`~torch.nn.Module.state_dict` function. Default: ``False``.
"""
unexpected_keys = []
own_state = module.state_dict()
for name, param in state_dict.items():
if name not in own_state:
unexpected_keys.append(name)
continue
if isinstance(param, torch.nn.Parameter):
# backwards compatibility for serialized parameters
param = param.data
try:
own_state[name].copy_(param)
except Exception:
Log.warn('While copying the parameter named {}, '
'whose dimensions in the model are {} and '
'whose dimensions in the checkpoint are {}.'
.format(name, own_state[name].size(),
param.size()))
missing_keys = set(own_state.keys()) - set(state_dict.keys())
err_msg = []
if unexpected_keys:
err_msg.append('unexpected key in source state_dict: {}\n'.format(', '.join(unexpected_keys)))
if missing_keys:
# we comment this to fine-tune the models with some missing keys.
err_msg.append('missing keys in source state_dict: {}\n'.format(', '.join(missing_keys)))
err_msg = '\n'.join(err_msg)
if err_msg:
if strict:
raise RuntimeError(err_msg)
else:
Log.warn(err_msg)
def save_net(self, net, save_mode='iters'):
if is_distributed() and get_rank() != 0:
return
state = {
'config_dict': self.configer.to_dict(),
'state_dict': net.state_dict(),
}
if self.configer.get('checkpoints', 'checkpoints_root') is None:
checkpoints_dir = os.path.join(self.configer.get('project_dir'),
self.configer.get('checkpoints', 'checkpoints_dir'))
else:
checkpoints_dir = os.path.join(self.configer.get('checkpoints', 'checkpoints_root'),
self.configer.get('checkpoints', 'checkpoints_dir'))
if not os.path.exists(checkpoints_dir):
os.makedirs(checkpoints_dir)
latest_name = '{}_latest.pth'.format(self.configer.get('checkpoints', 'checkpoints_name'))
torch.save(state, os.path.join(checkpoints_dir, latest_name))
if save_mode == 'performance':
if self.configer.get('performance') > self.configer.get('max_performance'):
latest_name = '{}_max_performance.pth'.format(self.configer.get('checkpoints', 'checkpoints_name'))
torch.save(state, os.path.join(checkpoints_dir, latest_name))
self.configer.update(['max_performance'], self.configer.get('performance'))
elif save_mode == 'val_loss':
if self.configer.get('val_loss') < self.configer.get('min_val_loss'):
latest_name = '{}_min_loss.pth'.format(self.configer.get('checkpoints', 'checkpoints_name'))
torch.save(state, os.path.join(checkpoints_dir, latest_name))
self.configer.update(['min_val_loss'], self.configer.get('val_loss'))
elif save_mode == 'iters':
if self.configer.get('iters') - self.configer.get('last_iters') >= \
self.configer.get('checkpoints', 'save_iters'):
latest_name = '{}_iters{}.pth'.format(self.configer.get('checkpoints', 'checkpoints_name'),
self.configer.get('iters'))
torch.save(state, os.path.join(checkpoints_dir, latest_name))
self.configer.update(['last_iters'], self.configer.get('iters'))
elif save_mode == 'epoch':
if self.configer.get('epoch') - self.configer.get('last_epoch') >= \
self.configer.get('checkpoints', 'save_epoch'):
latest_name = '{}_epoch{}.pth'.format(self.configer.get('checkpoints', 'checkpoints_name'),
self.configer.get('epoch'))
torch.save(state, os.path.join(checkpoints_dir, latest_name))
self.configer.update(['last_epoch'], self.configer.get('epoch'))
else:
Log.error('Metric: {} is invalid.'.format(save_mode))
exit(1)
def freeze_bn(self, net, syncbn=False):
for m in net.modules():
if isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
m.eval()
if syncbn:
from lib.extensions import BatchNorm2d, BatchNorm1d
if isinstance(m, BatchNorm2d) or isinstance(m, BatchNorm1d):
m.eval()
def clip_grad(self, model, max_grad=10.):
"""Computes a gradient clipping coefficient based on gradient norm."""
total_norm = 0
for p in model.parameters():
if p.requires_grad:
modulenorm = p.grad.data.norm()
total_norm += modulenorm ** 2
total_norm = math.sqrt(total_norm)
norm = max_grad / max(total_norm, max_grad)
for p in model.parameters():
if p.requires_grad:
p.grad.mul_(norm)
def gather(self, outputs, target_device=None, dim=0):
r"""
Gathers tensors from different GPUs on a specified device
(-1 means the CPU).
"""
if not self.configer.get('network', 'gathered'):
if target_device is None:
target_device = list(range(torch.cuda.device_count()))[0]
return torch_gather(outputs, target_device, dim=dim)
else:
return outputs
def get_lr(self, optimizer):
return [param_group['lr'] for param_group in optimizer.param_groups]
def warm_lr(self, iters, scheduler, optimizer, backbone_list=(0, )):
"""Sets the learning rate
# Adapted from PyTorch Imagenet example:
# https://github.com/pytorch/examples/blob/master/imagenet/main.py
"""
if not self.configer.exists('lr', 'is_warm') or not self.configer.get('lr', 'is_warm'):
return
warm_iters = self.configer.get('lr', 'warm')['warm_iters']
if iters < warm_iters:
if self.configer.get('lr', 'warm')['freeze_backbone']:
for backbone_index in backbone_list:
optimizer.param_groups[backbone_index]['lr'] = 0.0
else:
lr_ratio = (self.configer.get('iters') + 1) / warm_iters
base_lr_list = scheduler.get_lr()
for backbone_index in backbone_list:
optimizer.param_groups[backbone_index]['lr'] = base_lr_list[backbone_index] * (lr_ratio ** 4)
| 41.894928 | 115 | 0.585488 | 10,985 | 0.950013 | 0 | 0 | 2,153 | 0.186197 | 0 | 0 | 2,952 | 0.255297 |
82cc626afaea4df2938aee10cb59917cc59cdc28 | 1,861 | py | Python | scripts/si_figs.py | gbirzu/density-dependent_dispersal_growth | edd1207f57b63e2827af385d4e868306ff308746 | [
"MIT"
]
| null | null | null | scripts/si_figs.py | gbirzu/density-dependent_dispersal_growth | edd1207f57b63e2827af385d4e868306ff308746 | [
"MIT"
]
| null | null | null | scripts/si_figs.py | gbirzu/density-dependent_dispersal_growth | edd1207f57b63e2827af385d4e868306ff308746 | [
"MIT"
]
| null | null | null | import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pickle
import scipy.stats as stats
data_path = '../data/het_average.dat'
output_dir = '../figures/'
# Configure matplotlib environment
helvetica_scale_factor = 0.92 # rescale Helvetica to other fonts of same size
mpl.rcParams['font.size'] = 10 * helvetica_scale_factor
mpl.rcParams['font.family'] = 'sans-serif'
mpl.rcParams['font.sans-serif'] = 'Helvetica Neue'
mpl.rcParams['axes.titlesize'] = 12 * helvetica_scale_factor
single_col_width = 3.43 # = 8.7 cm
double_col_width = 7.01 # = 17.8 cm
def plot_het_comparison(het_averages):
time = het_averages['time']
het_global = het_averages['global']
het_local = het_averages['local']
fig = plt.figure(figsize=(single_col_width, single_col_width))
ax = fig.add_subplot(111)
ax.set_xlabel('time, t', fontweight='bold')
ax.set_ylabel('heterozygosity, H', fontweight='bold')
ax.set_yscale('log')
ax.plot(time, het_global, ls='-', lw=2, c='k')
ax.plot(time, het_local, ls='', marker='o', markevery=5, markersize=5, markeredgecolor='r', markerfacecolor='none')
plt.tight_layout()
plt.savefig(output_dir + 'het_comparison.pdf')
def fit_Ne(het_averages, averaging='global'):
time = het_averages['time']
het = het_averages[averaging]
slope, intercept, rvalue, pvalue, stderr = stats.linregress(time, np.log(het))
return 1 / abs(slope)
if __name__ == '__main__':
with open(data_path, 'rb') as f_in:
het_averages = pickle.load(f_in)
plot_het_comparison(het_averages)
ne_global = fit_Ne(het_averages, averaging='global')
ne_local = fit_Ne(het_averages, averaging='local')
print('Ne (global averaging): ', ne_global)
print('Ne (local averaging): ', ne_local)
print('Ne difference: ', 100 * (ne_global - ne_local) / ne_global, '%')
| 33.836364 | 119 | 0.703923 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 443 | 0.238044 |
82cd32f83dde9f87b3ac04ec47ec6fefab6101d7 | 7,532 | py | Python | language.py | sanine-a/dream-atlas | cd44c43ec6cf5e7a95ae231ba7174a6891d93474 | [
"MIT"
]
| null | null | null | language.py | sanine-a/dream-atlas | cd44c43ec6cf5e7a95ae231ba7174a6891d93474 | [
"MIT"
]
| null | null | null | language.py | sanine-a/dream-atlas | cd44c43ec6cf5e7a95ae231ba7174a6891d93474 | [
"MIT"
]
| null | null | null | from random import random, choice, seed, shuffle, randint
from math import ceil
import copy
target = [ 2, 2, 3, 1, 4, 5 ]
consonants_base = [ 'p', 't', 'k', 'm', 'n' ]
vowels = [ [ 'a', 'i', 'u' ],
[ 'a', 'i', 'u', 'e', 'o' ],
[ 'a', 'A', 'i', 'I', 'u', 'U', 'e', 'E', 'o', 'O' ] ]
consonants_extra = [ 'b', 'd', 'j', 's', 'z', 'y', 'q', 'G', '?', 'N', 'r', 'f', 'v', 'T', 'D', 'S', 'Z', 'x', 'h', 'w', 'l', 'C' ]
sibilants = [ ['s',], [ 's', 'S' ], ['s', 'S', 'f'] ]
liquids = [ ['r'], ['l'], ['r','l'], ['w','y'], ['r','l','w','y'] ]
orthography1 = { 'name':'nordic', 'j':'dz', 'y':'j', 'T':'th', 'D':'ð', 'S':'sh', 'Z':'zh', 'N':'ng', '?':"'", 'G':'q', 'C':'ch', 'A':'å', 'E':'ë', 'I':'ï', 'O':'ö', 'U':'ü' }
orthography2 = { 'name':'czech', 'T':'th', 'D':'th', 'S':'š', 'Z':'ž', 'C':'č', 'G':'q', 'N':'ng', '?':'-', 'A':'á', 'E':'ě', 'I':'ý', 'O':'ó', 'U':'ú' }
orthography3 = { 'name':'french', 'T':'th', 'D':'th', 'S':'ch', 'G':'gh', 'C':'tc', '?':"'", 'N':'ng', 'Z':'z', 'k':'c', 'A':'â', 'E':'ê', 'I':'î', 'O':'ô', 'U':'û' }
orthography4 = { 'name':'mexica', 'k':'c', 'G':'gh', 'N':'ng', 'T':'th', 'D':'th', 'S':'x', 'C':'ch', '?':"'", 'Z':'zh', 'A':'á', 'E':'é', 'I':'í', 'O':'ó', 'U':'ú' }
orthographies = ( orthography1, orthography2, orthography3, orthography4 )
syllables = ( [ 'CV', ],
[ 'CV', 'V' ],
[ 'CV', 'CVC' ],
[ 'CV', 'CVC', 'V' ],
[ 'CVC', ],
[ 'CVC', 'CRVC', 'CV', 'CRV' ],
[ 'CVC', 'CRVC', 'CVRC', 'CV', 'CRV' ], [ 'CVC', 'CRVC', 'CVCC', 'CRVCC', 'CV', 'CRV' ],
[ 'CVC', 'CRVC', 'CVRC', 'CVCC', 'CRVCC', 'CV', 'CRV' ],
[ 'CV', 'CVC', 'SCV', 'SCVC' ],
[ 'CVC', 'CVCC', 'SVC', 'SVCC', 'CV', 'SCV' ],
[ 'CVC', 'CVCC', 'CRVC', 'SCVC', 'SCRVC', 'CV', 'CRV', 'SCV', 'SCRV' ] )
government = [ 'Republic of ', 'Kingdom of ', 'Confederacy of ', 'Satrapy of ','Empire of ' ]
class morpheme:
def __init__(self,morpheme,prefix):
self.morpheme = morpheme
self.prefix = prefix
def elem(obj, items):
for item in items:
if item == obj:
return True
return False
def biased_choice(items, bias=2):
i = int( random()**bias * len(items) )
return items[i]
class language:
def __init__(self):
# get phonemes
self.phonemes = {}
self.phonemes['V'] = choice(vowels)
shuffle(self.phonemes['V'])
self.phonemes['R'] = choice(liquids)
self.phonemes['S'] = choice(sibilants)
more_consonants = []
for i in range(0, int(random()*len(consonants_extra))):
c = choice(consonants_extra)
if elem(c,more_consonants):
break
else:
more_consonants.append(c)
#shuffle(more_consonants)
self.phonemes['C'] = consonants_base + more_consonants
shuffle(self.phonemes['C'])
#get syllables, orthography, and word length
self.syllables = choice(syllables)
self.orthography = choice(orthographies)
self.orthography[';'] = '' # skip syllable separators
self.wordtarget = biased_choice(target,5)
# basic morphemes & words
if random() >= 0.3:
self.prefix = False
else:
self.prefix = True
self.the = self.syllable()
self.of = self.syllable()
self.landm = []
for i in range(randint(3,6)):
self.landm.append(self.shortword())
self.waterm = []
for i in range(randint(3,6)):
self.waterm.append(self.shortword())
self.citym = []
for i in range(randint(3,6)):
self.citym.append(self.shortword())
def derive(self):
derived = copy.deepcopy(self)
if random() > 0.7:
shuffle(derived.syllables)
lm = 0
wm = 0
cm = 0
the = False
of = False
if random() > 0.5:
for i in range(randint(1,4)):
c = choice(derived.phonemes['C'])
if not elem(c,consonants_base):
derived.phonemes['C'].remove(c)
if elem(c,derived.the):
the = True
if elem(c,derived.of):
of = True
for m in derived.landm:
if elem(c,m):
derived.landm.remove(m)
lm += 1
for m in derived.waterm:
if elem(c,m):
derived.waterm.remove(m)
wm += 1
for m in derived.citym:
if elem(c,m):
derived.citym.remove(m)
cm += 1
if random() > 0.5:
for i in range(randint(1,4)):
index = randint(5,len(derived.phonemes['C']))
derived.phonemes['C'].insert(index,choice(consonants_extra))
if the:
derived.the = derived.syllable()
if of:
derived.of = derived.syllable()
for i in range(lm):
derived.landm.append(derived.shortword())
for i in range(wm):
derived.waterm.append(derived.shortword())
for i in range(cm):
derived.citym.append(derived.shortword())
return derived
def orthographic(self,string):
outstring = ""
for c in string:
try:
outstring += self.orthography[c]
except KeyError:
outstring += c
return outstring
def syllable(self):
syl = ""
stype = biased_choice(self.syllables)
for letter in stype:
try:
syl = syl+biased_choice(self.phonemes[letter])
except KeyError:
break
return syl+';'
def word(self,short=False):
w = ""
N = randint(ceil(.5*self.wordtarget),ceil(1.5*self.wordtarget))
if short and N >= 2:
N -= 1
for i in range(N):
w = w+self.syllable()
return w
def shortword(self):
sw = ""
for i in range(randint(1,ceil(self.wordtarget))):
sw += self.syllable()
return sw
def gen_name(self,morph):
if random() < 0.1:
return self.word() + ' ' + self.of + ' ' + self.word()
if random() < 0.1:
if self.prefix:
return self.word() + ' ' + self.the
else:
return self.the + ' ' + self.word()
m = ''
if random() > 0.5:
m = choice(morph)
w = self.word(bool(m))
if self.prefix:
return m + w
else:
return w + m
def cityname(self):
return self.gen_name(self.citym)
def landname(self):
return self.gen_name(self.landm)
def watername(self):
return self.gen_name(self.waterm)
def countryname(self):
if random() > 0.7:
return choice(government) + self.orthographic(self.landname()).title()
else:
return self.orthographic(self.landname()).title()
'''
lang1 = language()
for j in range(10):
print('Language '+str(j+1))
for i in range(5):
word = lang1.cityname()
print(lang1.orthographic(word).title())
lang1 = lang1.derive()
print(' ')
'''
| 34.392694 | 175 | 0.454461 | 5,084 | 0.672843 | 0 | 0 | 0 | 0 | 0 | 0 | 1,362 | 0.180254 |
82cfea168601da39ca8ee801205fdee39d24a8a0 | 446 | py | Python | week/templatetags/sidebar_data.py | uno-isqa-8950/fitgirl-inc | 2656e7340e85ab8cbeb0de19dcbc81030b9b5b81 | [
"MIT"
]
| 6 | 2018-09-11T15:30:10.000Z | 2020-01-14T17:29:07.000Z | week/templatetags/sidebar_data.py | uno-isqa-8950/fitgirl-inc | 2656e7340e85ab8cbeb0de19dcbc81030b9b5b81 | [
"MIT"
]
| 722 | 2018-08-29T17:27:38.000Z | 2022-03-11T23:28:33.000Z | week/templatetags/sidebar_data.py | uno-isqa-8950/fitgirl-inc | 2656e7340e85ab8cbeb0de19dcbc81030b9b5b81 | [
"MIT"
]
| 13 | 2018-08-29T07:42:01.000Z | 2019-04-21T22:34:30.000Z | from django import template
from week.models import SidebarContentPage,SidebarImagePage
register = template.Library()
@register.inclusion_tag('week/announcement.html')
def sidebar():
sidebar_data = SidebarContentPage.objects.get()
return {'sidebar_data':sidebar_data}
@register.inclusion_tag('week/advertisement.html')
def sidebarimage():
sidebar_image = SidebarImagePage.objects.get()
return {'sidebar_image':sidebar_image} | 26.235294 | 59 | 0.784753 | 0 | 0 | 0 | 0 | 323 | 0.724215 | 0 | 0 | 78 | 0.174888 |
82d236c6e0b9c063b565077e0441849e2549c37e | 1,097 | py | Python | tests/functional/Hydro/AcousticWave/CSPH_mod_package.py | jmikeowen/Spheral | 3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
]
| 22 | 2018-07-31T21:38:22.000Z | 2020-06-29T08:58:33.000Z | tests/Hydro/AcousticWave/CSPH_mod_package.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
]
| 41 | 2020-09-28T23:14:27.000Z | 2022-03-28T17:01:33.000Z | tests/Hydro/AcousticWave/CSPH_mod_package.py | markguozhiming/spheral | bbb982102e61edb8a1d00cf780bfa571835e1b61 | [
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
]
| 7 | 2019-12-01T07:00:06.000Z | 2020-09-15T21:12:39.000Z | #-------------------------------------------------------------------------------
# A mock physics package to mess around with the CRKSPH corrections.
#-------------------------------------------------------------------------------
from Spheral1d import *
class CRKSPH_mod_package(Physics):
def __init__(self):
Physics.__init__(self)
return
def evaluateDerivatives(self, t, dt, db, state, derivs):
return
def dt(self, db, state, derivs, t):
return pair_double_string(1e100, "No vote")
def registerState(self, dt, state):
return
def registerDerivatives(self, db, derivs):
return
def label(self):
return "CRKSPH_mod_package"
def initialize(self, t, dt, db, state, derivs):
# Grab the CRKSPH arrays.
A0_fl = state.scalarFields(HydroFieldNames.A0_CRKSPH)
A_fl = state.scalarFields(HydroFieldNames.A_CRKSPH)
B_fl = state.vectorFields(HydroFieldNames.B_CRKSPH)
A0 = A0_fl[0]
A = A_fl[0]
B = B_fl[0]
print "A", A.internalValues()
return
| 26.756098 | 80 | 0.539654 | 839 | 0.764813 | 0 | 0 | 0 | 0 | 0 | 0 | 285 | 0.259799 |
82d391d63340bb25ffc76c9865651669de389703 | 8,452 | py | Python | fbm-scraper.py | cbdelavenne/fb-messenger-media-scraper | ff4ed228f3520f208e048e34ae24d7576b0089bc | [
"MIT"
]
| 8 | 2019-11-23T17:45:11.000Z | 2021-05-27T10:41:47.000Z | fbm-scraper.py | cbdelavenne/fb-messenger-media-scraper | ff4ed228f3520f208e048e34ae24d7576b0089bc | [
"MIT"
]
| 10 | 2019-11-23T17:41:22.000Z | 2022-01-03T11:10:50.000Z | fbm-scraper.py | cbdelavenne/fb-messenger-media-scraper | ff4ed228f3520f208e048e34ae24d7576b0089bc | [
"MIT"
]
| 4 | 2020-03-21T23:24:40.000Z | 2022-02-20T10:40:38.000Z | import os
import requests
import time
import uuid
import configparser
import datetime
import fbchat
import re
from fbchat import Client, ImageAttachment
from fbchat import FBchatException
from pathlib import Path
politeness_index = 0.5 # ;)
epoch = datetime.datetime(1970, 1, 1)
# Hack to get the login to work, see: https://github.com/fbchat-dev/fbchat/issues/615#issuecomment-716089816
fbchat._state.FB_DTSG_REGEX = re.compile(r'"name":"fb_dtsg","value":"(.*?)"')
def download_file_from_url(url, target_path):
"""
Download image from a given URL to a specified target path.
:param url: URL of file to download
:param target_path: Local target path to save the file
:type url: str
:type target_path: str
"""
if url is not None:
r = requests.get(url)
with open(target_path, 'wb') as f:
print('\tDownloading image to {path}'.format(path=target_path))
f.write(r.content)
def convert_date_to_epoch(date, as_int=True):
"""
Convert a given date string to epoch (int in milliseconds)
:param date: Date string (preferred format %Y-%m-%d)
:param as_int: Return unix timestamp as an integer value, instead of a float
:type date: str
:type as_int: int
:return: int
"""
try:
dt = datetime.datetime.strptime(date, '%Y-%m-%d')
res = ((dt - epoch).total_seconds() * 1000.0) # convert to milliseconds
return int(res) if as_int else res
except ValueError:
return None
def convert_epoch_to_datetime(timestamp, dt_format='%Y-%m-%d_%H.%M.%S'):
"""
Convert epoch (unix time in ms) to a datetime string
:param timestamp: Unix time in ms
:param dt_format: Format of datetime string
:type timestamp: str
:type dt_format: str
:return:
"""
s = int(timestamp) / 1000.0
dt_str = datetime.datetime.fromtimestamp(s).strftime(dt_format)
return dt_str
if __name__ == '__main__':
config_path = Path('.') / 'config.ini'
if os.path.exists(config_path) is False:
raise Exception("Please create config.ini under this script's current directory")
# Load config file
config = configparser.ConfigParser()
config.read(config_path)
download_path = config.get('Download', 'path')
if os.path.exists(download_path) is False:
raise Exception("The path specified in download_path does not exist ({path}). Please specify a valid path in "
"config.ini".format(path=download_path))
# Initialize FB Client
fb_email = config.get('Credentials', 'email')
fb_pw = config.get('Credentials', 'password')
user_agent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36"
fb_client = Client(fb_email, fb_pw, user_agent=user_agent)
# Search for latest threads
thread_search_limit = int(config.get('Threads', 'search_limit'))
thread_search_before = convert_date_to_epoch(config.get('Threads', 'before_date'))
if thread_search_before is not None:
threads = fb_client.fetchThreadList(limit=thread_search_limit, before=thread_search_before)
else:
threads = fb_client.fetchThreadList(limit=thread_search_limit)
# Find correct thread for given user URL
my_thread = None
friend_url = config.get('Friend', 'url')
for thread in threads:
if hasattr(thread, 'url') and (thread.url == friend_url):
my_thread = thread
break
# Get Messages for my_thread
if my_thread is not None:
thread_message_count = my_thread.message_count
thread_message_name = my_thread.name
print('Found {count} messages in thread with {friend_name}'.format(count=thread_message_count,
friend_name=thread_message_name))
message_before_date = config.get('Messages', 'before_date')
message_search_limit = int(config.get('Messages', 'search_limit'))
message_search_before = convert_date_to_epoch(message_before_date)
if message_search_limit > thread_message_count:
message_search_limit = thread_message_count
print('\tWarning: Message search limit was greater than the total number of messages in thread.\n')
if message_search_before is not None:
messages = fb_client.fetchThreadMessages(my_thread.uid, limit=message_search_limit,
before=message_search_before)
print('Searching for images in the {message_limit} messages sent before {before_date}...'.format(
message_limit=message_search_limit, before_date=message_before_date))
else:
messages = fb_client.fetchThreadMessages(my_thread.uid, limit=message_search_limit)
print('Searching for images in the last {message_limit} messages...'.format(
message_limit=message_search_limit))
sender_id = None
if config.getboolean('Media', 'sender_only'):
sender_id = my_thread.uid
print('\tNote: Only images sent by {friend_name} will be downloaded (as specified by sender_only in your '
'config.ini)'.format(friend_name=thread_message_name))
# Extract Image attachments' full-sized image signed URLs (along with their original file extension)
total_count = 0
skip_count = 0
full_images = []
last_message_date = None
print('\n')
extension_blacklist = str.split(config.get('Media', 'ext_blacklist'), ',')
for message in messages:
message_datetime = convert_epoch_to_datetime(message.timestamp)
if len(message.attachments) > 0:
if (sender_id is None) or (sender_id == message.author):
for attachment in message.attachments:
if isinstance(attachment, ImageAttachment):
try:
attachment_ext = str.lower(attachment.original_extension)
if attachment_ext not in extension_blacklist:
full_images.append({
'extension': attachment_ext,
'timestamp': message_datetime,
'full_url': fb_client.fetchImageUrl(attachment.uid)
})
print('+', sep=' ', end='', flush=True)
else:
skip_count += 1
print('-', sep=' ', end='', flush=True)
total_count += 1
except FBchatException:
pass # ignore errors
last_message_date = message_datetime
# Download Full Images
if len(full_images) > 0:
images_count = len(full_images)
print('\n\nFound a total of {total_count} images. Skipped {skip_count} images that had a blacklisted '
'extension'.format(total_count=total_count, skip_count=skip_count))
print('Attempting to download {count} images...................\n'.format(count=images_count))
for full_image in full_images:
friend_name = str.lower(my_thread.name).replace(' ', '_')
file_uid = str(uuid.uuid4())
file_ext = full_image['extension']
file_timestamp = full_image['timestamp']
img_url = full_image['full_url']
image_path = ''.join([download_path, '\\', 'fb-image-', file_uid, '-', friend_name, '-',
file_timestamp, '.', file_ext])
download_file_from_url(img_url, image_path)
# Sleep half a second between file downloads to avoid getting flagged as a bot
time.sleep(politeness_index)
else:
print('No images to download in the last {count} messages'.format(count=message_search_limit))
# Reminder of last message found
print('\nLast message scanned for image attachments was dated: {last_message_date}'.format(
last_message_date=last_message_date))
else:
print('Thread not found for URL provided')
| 41.229268 | 139 | 0.614411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,710 | 0.320634 |
82d3afd1c39a5492eb62a1c160ebc7e3bbf21e20 | 1,565 | py | Python | guru/users/models.py | Jeromeschmidt/Guru | 3128a539e55b46afceb33b59c0bafaec7e9f630a | [
"MIT"
]
| null | null | null | guru/users/models.py | Jeromeschmidt/Guru | 3128a539e55b46afceb33b59c0bafaec7e9f630a | [
"MIT"
]
| 1 | 2021-02-26T02:49:34.000Z | 2021-02-26T02:49:34.000Z | guru/users/models.py | Jeromeschmidt/Guru | 3128a539e55b46afceb33b59c0bafaec7e9f630a | [
"MIT"
]
| 1 | 2020-02-24T18:09:00.000Z | 2020-02-24T18:09:00.000Z | from django.contrib.auth.models import AbstractUser
from django.db.models import (BooleanField, CASCADE, CharField, FloatField,
IntegerField, ManyToManyField, Model,
OneToOneField, PositiveSmallIntegerField)
from django.contrib.postgres.fields import ArrayField
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
class User(AbstractUser):
# First Name and Last Name do not cover name patterns
# around the globe.
name = CharField(_("Name of User"), blank=True, max_length=255)
# is_customer = BooleanField(default=True) #
# user = OneToOneField(User, on_delete=CASCADE, primary_key=True)
skills = ArrayField(CharField(max_length=10, blank=True),
size=8, null=True,
)
# ArrayField(_("A list of skills that user can help with"), null=True,
# base_field=CharField(max_length=255))
classes_taken = ArrayField(null=True,
base_field=CharField(max_length=255),
size=20)
is_teachingassistant = BooleanField(default=False)
rating = IntegerField(null=True, blank=True)
avg_reponse = FloatField(null=True, blank=True)
is_online = BooleanField(default=False)
messages_received = IntegerField(null=True, blank=True)
bio = CharField(blank=True, max_length=500)
def get_absolute_url(self):
return reverse("users:detail", kwargs={"username": self.username})
| 44.714286 | 75 | 0.676677 | 1,108 | 0.707987 | 0 | 0 | 0 | 0 | 0 | 0 | 348 | 0.222364 |
82d3d58b46fde9d57d6d1387e15cc36141a10208 | 7,676 | py | Python | movie.py | jmclinn/mapdraw | bdbddb164a82a3cf9b2673006caae4274948a420 | [
"MIT"
]
| null | null | null | movie.py | jmclinn/mapdraw | bdbddb164a82a3cf9b2673006caae4274948a420 | [
"MIT"
]
| null | null | null | movie.py | jmclinn/mapdraw | bdbddb164a82a3cf9b2673006caae4274948a420 | [
"MIT"
]
| null | null | null | import os,time
## File Variable (USER INPUT)
## ==========================
## if multiple files are being accessed to create movie...
## ...specify the beginning and ending of the file names...
## ...and the date list text file in the variables below
## Please use True or False to set whether multiple files will be accessed for movie
file_is_variable = False
## If file_is_variable = True
## --------------------------
## make sure to leave trailing slash '/' on 'path_to_files'
path_to_files = '/path/to/files/'
## For series of files with similar prefixes (file_part1) and filetypes (file_part2)
file_part1 = 'pre.fixes.'
file_part2 = '.nc'
## location of file listing (with each entry on a new line) the variable part of the filename
dates_list_text_file = '/path/to/file/variable_list.txt'
## If file_is_variable = False
## ---------------------------
#file = '/path/to/single/file.nc'
file = '/Users/Jon/Documents/other_projects/Aluie/visuals/1-12/mapdraw/sgs.nc'
## Variables (USER INPUT)
## ======================
## all variable lists must be the same length
## set unused variables equal to '_empty_'
## if variable requires double-quotes on command line include them --> '" ... "'
## -----------------------------------------------------------------------------
data = 'sgsflux' #cannot be '_empty_'
lat = 'u_lat' #cannot be '_empty_'
lon = 'u_lon' #cannot be '_empty_'
depth = 'w_dep,9' #cannot be '_empty_'
mask = '-1e33,#000000'
maxr = '100' #use for 'max'
minr = '-100' #use for 'min'
norm = '_empty_'
colors = '"0:#0000AA,45:#0000FF,50:#FFFFFF,55:#FF0000,100:#AA0000"'
clr_min_max = '_empty_'
title = '_empty_'
crop = '_empty_'
lines = '_empty_'
## Sphere (for mapping onto Earth's spherical representation)
## ----------------------------------------------------------
## For use of 'sphere' set to True. If not leave False.
sphere_mapping = False
## Number of images (must match other variable list lengths from above)
sphere_frames = 3
## Start and stop points of sphere rotation (leave start/stop the same for no rotation in lat/lon)
sphere_lon_start = -10
sphere_lon_stop = 10
sphere_lat_start = -10
sphere_lat_stop = 10
## 'zoom' argument described in README file (leave False if zoom = 1)
zoom = 1.5
## Primary Variable (USER INPUT)
## =============================
## choose from the variables above
## specify without quotes
## if not a list will only output single result
## --------------------------------------------
primary_variable = file
## Save Location (USER INPUT)
## ==========================
## provide folder location (without filename(s))
## ---------------------------------------------
save = '/Users/Jon/Desktop/'
## Image Filename Prefix (USER INPUT)
## ==================================
## prefix for output filenames before auto-incremented counter
## -----------------------------------------------------------
file_prefix = 'img_'
## Image Counter Start (USER INPUT)
## ================================
## start of auto-incremented counter
## ---------------------------------
count_start = 0
## Image File Type (USER INPUT)
## ============================
## ex: '.png' or '.jpg'
## --------------------
img_type = '.png'
## Display Toggle (USER INPUT)
## ==========================
## toggle if each image displays in the loop
## use 'yes' or 'no' to control display preference
## -----------------------------------------------
display = 'no'
# # # # # # # # # # # # # # # # # # # # # # # # #
# ---- NO USER INPUTS AFTER THIS POINT ---- #
# # # # # # # # # # # # # # # # # # # # # # # # #
## If 'file' is variable this establishes list of files to loop through (Do Not Alter)
## ===================================================================================
if file_is_variable:
file1 = []
file0 = open(dates_list_text_file,'r').read().splitlines()
for line in file0:
file1.append(str(path_to_files) + str(file_part1) + str(line) + str(file_part2))
file = file1
primary_variable = file
## Parsing of 'sphere' rotation inputs (Do Not Alter)
## ==================================================
if sphere_mapping:
lon_step = ( sphere_lon_stop - sphere_lon_start ) / ( sphere_frames - 1 )
lat_step = ( sphere_lat_stop - sphere_lat_start ) / ( sphere_frames - 1 )
sphere = []
for i in range(sphere_frames):
sphere.append(str(sphere_lon_start + lon_step * i)+','+str(sphere_lat_start + lat_step * i))
primary_variable = sphere
## Defining & Executing Command Expression (Do Not Alter)
## ======================================================
displayx = 'display ' + display
command = displayx
if title != '_empty_':
titlex = ' title ' + str(title)
command = command + titlex
if lines != '_empty_':
linesx = ' lines ' + str(lines)
command = command + linesx
if type(primary_variable) is list:
loop_len = len(primary_variable)
else:
loop_len = 1
for i in range(loop_len):
savex = ' save ' + str(save) + str(file_prefix) + str(i + int(count_start)) + str(img_type)
command = command + savex
if type(file) is list:
filei = file[i]
else:
filei = file
if i != '_empty_':
filex = ' file ' + str(filei)
command = command + filex
if type(data) is list:
datai = data[i]
else:
datai = data
if datai != '_empty_':
datax = ' data ' + str(datai)
command = command + datax
if type(lat) is list:
lati = lat[i]
else:
lati = lat
if lati != '_empty_':
latx = ' lat ' + str(lati)
command = command + latx
if type(lon) is list:
loni = lon[i]
else:
loni = lon
if loni != '_empty_':
lonx = ' lon ' + str(loni)
command = command + lonx
if type(depth) is list:
depthi = depth[i]
else:
depthi = depth
if depthi != '_empty_':
depthx = ' depth ' + str(depthi)
command = command + depthx
if type(mask) is list:
maski = mask[i]
else:
maski = mask
if maski != '_empty_':
maskx = ' mask ' + str(maski)
command = command + maskx
if type(maxr) is list:
maxri = maxr[i]
else:
maxri = maxr
if maxri != '_empty_':
maxrx = ' max ' + str(maxri)
command = command + maxrx
if type(minr) is list:
minri = minr[i]
else:
minri = minr
if minri != '_empty_':
minrx = ' min ' + str(minri)
command = command + minrx
if type(norm) is list:
normi = norm[i]
else:
normi = norm
if normi != '_empty_':
normx = ' norm ' + str(normi)
command = command + normx
if type(crop) is list:
cropi = crop[i]
else:
cropi = crop
if cropi != '_empty_':
cropx = ' crop ' + str(cropi)
command = command + cropx
if type(colors) is list:
colorsi = colors[i]
else:
colorsi = colors
if colorsi != '_empty_':
colorsx = ' colors ' + str(colorsi)
command = command + colorsx
if type(clr_min_max) is list:
clr_min_maxi = clr_min_max[i]
else:
clr_min_maxi = clr_min_max
if clr_min_maxi != '_empty_':
clr_min_maxx = ' clr_min_max ' + str(clr_min_maxi)
command = command + clr_min_maxx
if sphere_mapping:
spherei = sphere[i]
spherex = ' sphere ' + str(spherei)
command = command + spherex
if type(zoom) is list:
zoomi = zoom[i]
elif zoom:
zoomi = zoom
if zoom:
zoomx = ' zoom ' + str(zoomi)
command = command + zoomx
time0 = time.time()
os.system('python map.py ' + command)
if display == 'no':
print str(i) + ' - ' + str(round((time.time() - time0),2)) + ' sec' | 28.220588 | 98 | 0.549635 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,739 | 0.487103 |
82d45629fe3b78bf615a134ee2b08fe22d31ec28 | 4,544 | py | Python | gaetk2/tools/auth0tools.py | mdornseif/appengine-toolkit2 | 47ee6bf99b8e461ee64eae75bf24fb462d99b0ab | [
"MIT"
]
| 1 | 2018-08-16T16:15:30.000Z | 2018-08-16T16:15:30.000Z | gaetk2/tools/auth0tools.py | mdornseif/appengine-toolkit2 | 47ee6bf99b8e461ee64eae75bf24fb462d99b0ab | [
"MIT"
]
| 3 | 2018-08-14T09:52:11.000Z | 2021-12-13T19:54:07.000Z | gaetk2/tools/auth0tools.py | mdornseif/appengine-toolkit2 | 47ee6bf99b8e461ee64eae75bf24fb462d99b0ab | [
"MIT"
]
| 1 | 2018-09-28T05:55:27.000Z | 2018-09-28T05:55:27.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""gaetk2.tools.auth0.py Tools for working with auth0
Created by Maximillian Dornseif on 2017-12-05.
Copyright 2017 HUDROA. MIT Licensed.
"""
from __future__ import unicode_literals
import logging
from google.appengine.api import memcache
from auth0.v3.authentication import GetToken
from auth0.v3.exceptions import Auth0Error
from auth0.v3.management import Auth0
from gaetk2.config import gaetkconfig
logger = logging.getLogger(__name__)
def get_auth0_access_token():
"""Get a Token for the Management-API."""
ret = memcache.get('get_auth0_access_token()')
if not ret:
assert gaetkconfig.AUTH0_DOMAIN != '*unset*'
assert gaetkconfig.AUTH0_CLIENT_ID != '*unset*'
get_token = GetToken(gaetkconfig.AUTH0_DOMAIN)
token = get_token.client_credentials(
gaetkconfig.AUTH0_CLIENT_ID,
gaetkconfig.AUTH0_CLIENT_SECRET,
'https://{}/api/v2/'.format(gaetkconfig.AUTH0_DOMAIN))
ret = token['access_token']
memcache.set('get_auth0_access_token()', ret, token['expires_in'] / 2)
return ret
def create_from_credential(credential):
"""Create an entry in the Auth0.DefaultDatabase for a credential."""
if credential.external_uid:
return
if not credential.secret:
return
if not credential.email:
return
if not getattr(credential, 'name', None):
credential.name = credential.text
if not getattr(credential, 'name', None):
credential.name = credential.org_designator
auth0api = Auth0(gaetkconfig.AUTH0_DOMAIN, get_auth0_access_token())
payload = {
'connection': 'DefaultDatabase',
'email': credential.email,
'password': credential.secret,
'user_id': credential.uid,
'user_metadata': {
'name': credential.name,
'nickname': 'User fuer {}'.format(credential.org_designator)
},
'email_verified': True,
'verify_email': False,
'app_metadata': {
'org_designator': credential.org_designator,
'permissions': credential.permissions,
}
}
newuser = None
try:
newuser = auth0api.users.create(payload)
except Auth0Error as ex:
if ex.status_code in [400, 409] and ex.message == 'The user already exists.':
logger.info('The user already exists: %s %r %s', credential.uid, ex, payload)
try:
newuser = auth0api.users.get('auth0|{}'.format(credential.uid))
except:
logger.warn('email collision? %s', credential.uid)
# propbably we have an E-Mail Address collision. This means
# several Credentials with the same E-Mail Adresses.
reply = auth0api.users.list(
connection='DefaultDatabase',
q='email:"{}"'.format(credential.email),
search_engine='v2')
if reply['length'] > 0:
logger.info('reply=%s', reply)
other_uid = reply['users'][0]['user_id']
newuser = auth0api.users.get(other_uid)
# doppelbelegung bei Auth0 notieren
if newuser.get('app_metadata'):
logger.debug('app_metadata=%r', newuser['app_metadata'])
altd = newuser['app_metadata'].get('org_designator_alt', [])
altd = list(set(altd + [credential.org_designator]))
altu = newuser['app_metadata'].get('uid_alt', [])
altu = list(set(altu + [credential.uid]))
logger.warn('updating duplicate Auth0 %s %s %s %s', altd, altu, other_uid, newuser)
auth0api.users.update(
other_uid,
{'app_metadata': {'org_designator_alt': altd,
'uid_alt': altu}})
else:
logger.error('%r newuser = %s %s', 'auth0|{}'.format(credential.uid), newuser, ex)
raise
except:
logger.warn('payload = %s', payload)
raise
if newuser is None or (newuser.get('error')):
logger.warn('reply=%s payload = %s', newuser, payload)
raise RuntimeError('Auth0-Fehler: %s' % newuser)
logger.info('new auth0 user %s', newuser)
credential.meta['auth0_user_id'] = credential.external_uid = newuser['user_id']
credential.put()
return
| 39.172414 | 107 | 0.590889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,220 | 0.268486 |
82d6583dc3d6537a4f4d2769235a1441edc42642 | 705 | py | Python | Q56MergeIntervals.py | ChenliangLi205/LeetCode | 6c547c338eb05042cb68f57f737dce483964e2fd | [
"MIT"
]
| null | null | null | Q56MergeIntervals.py | ChenliangLi205/LeetCode | 6c547c338eb05042cb68f57f737dce483964e2fd | [
"MIT"
]
| null | null | null | Q56MergeIntervals.py | ChenliangLi205/LeetCode | 6c547c338eb05042cb68f57f737dce483964e2fd | [
"MIT"
]
| null | null | null | # Definition for an interval.
# class Interval:
# def __init__(self, s=0, e=0):
# self.start = s
# self.end = e
class Solution:
def merge(self, intervals):
"""
:type intervals: List[Interval]
:rtype: List[Interval]
"""
if len(intervals) <= 1:
return intervals
intervals.sort(key=lambda x: x.start)
newIntervals = [intervals[0]]
for i in range(1, len(intervals)):
cur = intervals[i]
last = newIntervals[-1]
if cur.start > last.end:
newIntervals.append(cur)
else:
last.end = max(cur.end, last.end)
return newIntervals
| 28.2 | 49 | 0.520567 | 571 | 0.809929 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.302128 |
82d79ad0214596b7ecad4fe78d6e48cdeddf92f7 | 843 | py | Python | .github/scripts/check-status.py | antmicro/f4pga-arch-defs | dac6ffd8890227ea541ee892549e41c68588ad99 | [
"ISC"
]
| null | null | null | .github/scripts/check-status.py | antmicro/f4pga-arch-defs | dac6ffd8890227ea541ee892549e41c68588ad99 | [
"ISC"
]
| 78 | 2022-03-01T19:40:20.000Z | 2022-03-31T19:56:24.000Z | .github/scripts/check-status.py | antmicro/f4pga-arch-defs | dac6ffd8890227ea541ee892549e41c68588ad99 | [
"ISC"
]
| null | null | null | #!/usr/bin/env python3
from sys import argv
from pathlib import Path
from re import compile as re_compile
PACKAGE_RE = re_compile("symbiflow-arch-defs-([a-zA-Z0-9_-]+)-([a-z0-9])")
with (Path(__file__).parent.parent.parent / 'packages.list').open('r') as rptr:
for artifact in rptr.read().splitlines():
m = PACKAGE_RE.match(artifact)
assert m, f"Package name not recognized! {artifact}"
package_name = m.group(1)
if package_name == "install":
package_name == "toolchain"
with (Path("install") /
f"symbiflow-{package_name}-latest").open("w") as wptr:
wptr.write(
'https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/'
f'foss-fpga-tools/symbiflow-arch-defs/continuous/install/{argv[1]}/{artifact}'
)
| 35.125 | 94 | 0.622776 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 343 | 0.40688 |
82d83bbbc397d5fb8c89450eac58244503912c31 | 500 | py | Python | DocOCR/urls.py | trangnm58/DocOCR | 7ec6087323cf2d06906878c55be236fb1950ce57 | [
"Apache-2.0"
]
| null | null | null | DocOCR/urls.py | trangnm58/DocOCR | 7ec6087323cf2d06906878c55be236fb1950ce57 | [
"Apache-2.0"
]
| null | null | null | DocOCR/urls.py | trangnm58/DocOCR | 7ec6087323cf2d06906878c55be236fb1950ce57 | [
"Apache-2.0"
]
| null | null | null | from django.conf.urls import url, include
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/viet_ocr/', include('viet_ocr.api.urls', namespace="viet_ocr-api")),
url(r'^api/post_process/', include('post_process.api.urls', namespace="post_process-api")),
url(r'^api/pre_process/', include('pre_process.api.urls', namespace="pre_process-api")),
url(r'^api/doc_ocr/', include('doc_ocr.api.urls', namespace="doc_ocr-api")),
]
| 45.454545 | 95 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 268 | 0.536 |
82d9c382128c028bc583ab744d986723b6f36dd9 | 839 | py | Python | utils/neuron/models/metrics/multi_task_metrics.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
]
| 12 | 2021-07-27T07:18:24.000Z | 2022-03-09T13:52:20.000Z | utils/neuron/models/metrics/multi_task_metrics.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
]
| 2 | 2021-08-03T09:21:33.000Z | 2021-12-29T14:25:30.000Z | utils/neuron/models/metrics/multi_task_metrics.py | tsingqguo/ABA | c32edbbe5705b0332a08951b5ee436b5f58c2e70 | [
"MIT"
]
| 3 | 2021-11-18T14:46:40.000Z | 2022-01-03T15:47:23.000Z | import torch
import torch.nn as nn
import neuron.ops as ops
from neuron.config import registry
@registry.register_module
class ReID_Metric(nn.Module):
def __init__(self, metric_cls, metric_rank):
super(ReID_Metric, self).__init__()
self.metric_cls = metric_cls
self.metric_rank = metric_rank
def forward(self, *args):
if len(args) == 2:
scores = None
feats, labels = args
elif len(args) == 3:
scores, feats, labels = args
else:
raise ValueError('Expected to have 2 or 3 inputs,'
'but got {}'.format(len(args)))
metrics = self.metric_rank(feats, labels)
if scores is not None:
metrics.update(self.metric_cls(scores, labels))
return metrics
| 27.064516 | 62 | 0.582837 | 714 | 0.851013 | 0 | 0 | 740 | 0.882002 | 0 | 0 | 45 | 0.053635 |
82da6f9da92810cdbbf538013279acc8cf63197f | 1,028 | py | Python | lldb/packages/Python/lldbsuite/test/functionalities/data-formatter/data-formatter-synthval/myIntSynthProvider.py | medismailben/llvm-project | e334a839032fe500c3bba22bf976ab7af13ce1c1 | [
"Apache-2.0"
]
| 2,338 | 2018-06-19T17:34:51.000Z | 2022-03-31T11:00:37.000Z | packages/Python/lldbsuite/test/functionalities/data-formatter/data-formatter-synthval/myIntSynthProvider.py | DalavanCloud/lldb | e913eaf2468290fb94c767d474d611b41a84dd69 | [
"Apache-2.0"
]
| 3,740 | 2019-01-23T15:36:48.000Z | 2022-03-31T22:01:13.000Z | packages/Python/lldbsuite/test/functionalities/data-formatter/data-formatter-synthval/myIntSynthProvider.py | DalavanCloud/lldb | e913eaf2468290fb94c767d474d611b41a84dd69 | [
"Apache-2.0"
]
| 500 | 2019-01-23T07:49:22.000Z | 2022-03-30T02:59:37.000Z | class myIntSynthProvider(object):
def __init__(self, valobj, dict):
self.valobj = valobj
self.val = self.valobj.GetChildMemberWithName("theValue")
def num_children(self):
return 0
def get_child_at_index(self, index):
return None
def get_child_index(self, name):
return None
def update(self):
return False
def has_children(self):
return False
def get_value(self):
return self.val
class myArraySynthProvider(object):
def __init__(self, valobj, dict):
self.valobj = valobj
self.array = self.valobj.GetChildMemberWithName("array")
def num_children(self, max_count):
if 16 < max_count:
return 16
return max_count
def get_child_at_index(self, index):
return None # Keep it simple when this is not tested here.
def get_child_index(self, name):
return None # Keep it simple when this is not tested here.
def has_children(self):
return True
| 22.844444 | 67 | 0.641051 | 1,024 | 0.996109 | 0 | 0 | 0 | 0 | 0 | 0 | 109 | 0.106031 |
82da9d5e6799fe68c63757266b57886cf2eb5dae | 3,198 | py | Python | incremental-update.py | tarasowski/apache-spark | e42d6abe5fa08ff1e231d16169efaed0e01fc4a9 | [
"MIT"
]
| 1 | 2019-08-13T09:17:19.000Z | 2019-08-13T09:17:19.000Z | incremental-update.py | tarasowski/apache-spark | e42d6abe5fa08ff1e231d16169efaed0e01fc4a9 | [
"MIT"
]
| null | null | null | incremental-update.py | tarasowski/apache-spark | e42d6abe5fa08ff1e231d16169efaed0e01fc4a9 | [
"MIT"
]
| null | null | null | from pyspark.sql import SparkSession
from pyspark.sql.types import DateType
from pyspark.sql.functions import col
from pyspark.sql import types as t
import sys
from pyspark.sql.window import Window
from pyspark.sql.functions import spark_partition_id
from pyspark.sql import Row
def show_partition_id(df):
return df.select(*df.columns, spark_partition_id().alias("partition_id")).show()
spark = SparkSession \
.builder \
.appName("Python Spark SQL basic example") \
.config("spark.some.config.option", "some-value") \
.getOrCreate()
# https://dwbi.org/pages/75/methods-of-incremental-loading-in-data-warehouse
customers = [
Row(1, "John", "Individual", "22-Mar-2012"),
Row(2, "Ryan", "Individual", "22-Mar-2012"),
Row(3, "Bakers", "Corporate", "23-Mar-2012"),
]
sales = [
Row(1, 1, "White sheet (A4)", 100, 4.00, "22-Mar-2012"),
Row(2, 1, "James Clip (Box)", 1, 2.50, "22-Mar-2012"),
Row(3, 2, "Whiteboard Maker", 1, 2.00, "22-Mar-2012"),
Row(4, 3, "Letter Envelop", 200, 75.00, "23-Mar-2012"),
Row(5, 1, "Paper Clip", 12, 4.00, "23-Mar-2012"),
]
batch = [
Row(1, "22-Mar-2012", "Success"),
]
customersDF = spark.createDataFrame(customers, schema=["customer_id", "customer_name", "type", "entry_date"])
salesDF = spark.createDataFrame(sales, schema=["id", "customer_id", "product_description", "qty", "revenue", "sales_date"])
batchDF = spark.createDataFrame(batch, schema=["batch_id", "loaded_untill", "status"])
customersDF.createOrReplaceTempView("customers")
salesDF.createOrReplaceTempView("sales")
batchDF.createOrReplaceTempView("batch")
_23_march_customers = spark.sql("""
select t.*
from customers t
where t.entry_date > (select nvl(
max(b.loaded_untill),
to_date("01-01-1900", "MM-DD-YYYY")
)
from batch b
where b.status = "Success")
""")
_23_march_sales = spark.sql("""
select t.*
from sales t
where t.sales_date > (select nvl(
max(b.loaded_untill),
to_date("01-01-1900", "MM-DD-YYYY")
)
from batch b
where b.status = "Success")
""")
print("customers table")
_23_march_customers.show()
print("sales table")
_23_march_sales.show()
# Incremental Data Load Patterns
# https://www.youtube.com/watch?v=INuucWEg3sY
# 1) Stage / left Outer Join (moving to another server, make a staging and left join, check null on right table, you know this data is new)
# 2) Control Table
# Load | Cust | Table | Date
# Id | Table | Id | Date
# 3) Change Data Capture
# Source based incremental loading
# https://support.timextender.com/hc/en-us/articles/115001301963-How-incremental-loading-works
# The source table have a reliable natural or surrogate key and reliable incremental field such as "ModifiedDateTime" or "TimeStamp"
| 35.932584 | 139 | 0.596936 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,973 | 0.616563 |
82dad9c48cf2ee5a8b767bdd94a5e6cdf8574098 | 116 | py | Python | asset/admin.py | shoaibsaikat/Django-Office-Management-BackEnd | bb8ec201e4d414c16f5bac1907a2641d80c5970a | [
"Apache-2.0"
]
| null | null | null | asset/admin.py | shoaibsaikat/Django-Office-Management-BackEnd | bb8ec201e4d414c16f5bac1907a2641d80c5970a | [
"Apache-2.0"
]
| null | null | null | asset/admin.py | shoaibsaikat/Django-Office-Management-BackEnd | bb8ec201e4d414c16f5bac1907a2641d80c5970a | [
"Apache-2.0"
]
| null | null | null | from django.contrib import admin
from .models import Asset
# Register your models here.
admin.site.register(Asset) | 19.333333 | 32 | 0.801724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.241379 |
82dd697abb6c6bff11f04261d8e04916561eba16 | 360 | py | Python | instagram_api/response/send_confirm_email.py | Yuego/instagram_api | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | [
"MIT"
]
| 13 | 2019-08-07T21:24:34.000Z | 2020-12-12T12:23:50.000Z | instagram_api/response/send_confirm_email.py | Yuego/instagram_api | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | [
"MIT"
]
| null | null | null | instagram_api/response/send_confirm_email.py | Yuego/instagram_api | b53f72db36c505a2eb24ebac1ba8267a0cc295bb | [
"MIT"
]
| null | null | null | from .mapper import ApiResponse, ApiResponseInterface
from .mapper.types import Timestamp, AnyType
__all__ = ['SendConfirmEmailResponse']
class SendConfirmEmailResponseInterface(ApiResponseInterface):
title: AnyType
is_email_legit: AnyType
body: AnyType
class SendConfirmEmailResponse(ApiResponse, SendConfirmEmailResponseInterface):
pass
| 24 | 79 | 0.816667 | 215 | 0.597222 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.072222 |
82de56b86e1e73fa5d0bacfcbe9e4a18d9698647 | 1,256 | py | Python | webpages/views.py | 18praneeth/udayagiri-scl-maxo | 67ac939265d7837e39329162d7dd935a52130978 | [
"MIT"
]
| 8 | 2021-01-01T17:04:45.000Z | 2021-06-24T05:53:13.000Z | webpages/views.py | 18praneeth/udayagiri-scl-maxo | 67ac939265d7837e39329162d7dd935a52130978 | [
"MIT"
]
| 11 | 2021-01-01T15:04:04.000Z | 2021-01-10T07:47:12.000Z | webpages/views.py | 18praneeth/udayagiri-scl-maxo | 67ac939265d7837e39329162d7dd935a52130978 | [
"MIT"
]
| 7 | 2020-12-14T12:44:17.000Z | 2021-01-15T14:29:13.000Z | from django.shortcuts import render, redirect
from django.contrib import messages
from .models import Contact
from django.contrib.auth.decorators import login_required
def home(request):
if request.user.is_authenticated:
return render(request, 'webpages/home.html')
else:
return render(request, 'webpages/index.html')
def about(request):
return render(request, 'webpages/about.html')
@login_required
def team(request):
return render(request, 'webpages/team.html')
@login_required
def privacy(request):
return render(request, 'webpages/privacy.html')
@login_required
def license(request):
return render(request, 'webpages/license.html')
@login_required
def contact(request):
if request.POST:
name = request.POST['name']
email = request.POST['email']
subject = request.POST['subject']
comment = request.POST['message']
message = Contact()
message.name = name
message.email = email
message.subject = subject
message.comments = comment
message.save()
messages.success(request, 'Your response is recorded')
return redirect('contact')
else:
return render(request, 'webpages/contact.html',{})
| 26.166667 | 62 | 0.680732 | 0 | 0 | 0 | 0 | 822 | 0.654459 | 0 | 0 | 218 | 0.173567 |
82df65585957bc89145bf1319aef1409ff095c3a | 3,281 | py | Python | src/pywbemReq/tupletree.py | sinbawang/smisarray | 698448c7661af1d1a4491e5aeb58825899aff710 | [
"MIT"
]
| 2 | 2019-03-13T14:02:45.000Z | 2020-02-21T02:20:47.000Z | src/pywbemReq/tupletree.py | Foglight/foglight-smis-storage-array-community-cartridge | 64c070e6c62c5c8c2052af2b402103f78d72a330 | [
"MIT"
]
| 1 | 2017-08-10T13:55:17.000Z | 2017-09-28T19:56:15.000Z | src/pywbemReq/tupletree.py | Foglight/foglight-smis-storage-array-community-cartridge | 64c070e6c62c5c8c2052af2b402103f78d72a330 | [
"MIT"
]
| null | null | null | #
# (C) Copyright 2003,2004 Hewlett-Packard Development Company, L.P.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# Author: Martin Pool <[email protected]>
#
"""
tupletree - Convert XML DOM objects to and from tuple trees.
DOM is the standard in-memory representation of XML documents, but it
is very cumbersome for some types of processing where XML encodes
object structures rather than text documents. Direct mapping to Python
classes may not be a good match either.
tupletrees may be created from an in-memory DOM using
dom_to_tupletree(), or from a string using xml_to_tupletree().
Since the Python XML libraries deal mostly with Unicode strings they
are also returned here. If plain Strings are passed in they will be
converted by xmldom.
Each node of the tuple tree is a Python 4-tuple, corresponding to an
XML Element (i.e. <tag>):
(NAME, ATTRS, CONTENTS, None)
The NAME is the name of the element.
The ATTRS are a name-value hash of element attributes.
The CONTENTS is a list of child elements.
The fourth element is reserved.
"""
import xml.dom.minidom
from pywbemReq.cim_types import is_text
__all__ = ['dom_to_tupletree', 'xml_to_tupletree']
def dom_to_tupletree(node):
"""Convert a DOM object to a pyRXP-style tuple tree.
Each element is a 4-tuple of (NAME, ATTRS, CONTENTS, None).
Very nice for processing complex nested trees.
"""
if node.nodeType == node.DOCUMENT_NODE:
# boring; pop down one level
return dom_to_tupletree(node.firstChild)
assert node.nodeType == node.ELEMENT_NODE
name = node.nodeName
attrs = {}
contents = []
for child in node.childNodes:
if child.nodeType == child.ELEMENT_NODE:
contents.append(dom_to_tupletree(child))
elif child.nodeType == child.TEXT_NODE:
assert is_text(child.nodeValue), \
"text node %s is not a string" % repr(child)
contents.append(child.nodeValue)
elif child.nodeType == child.CDATA_SECTION_NODE:
contents.append(child.nodeValue)
else:
raise RuntimeError("can't handle %s" % child)
for i in range(node.attributes.length):
attr_node = node.attributes.item(i)
attrs[attr_node.nodeName] = attr_node.nodeValue
# XXX: Cannot yet handle comments, cdata, processing instructions and
# other XML batshit.
# it's so easy in retrospect!
return name, attrs, contents, None
def xml_to_tupletree(xml_string):
"""Parse XML straight into tupletree."""
dom_xml = xml.dom.minidom.parseString(xml_string)
return dom_to_tupletree(dom_xml)
| 32.81 | 73 | 0.719902 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,139 | 0.651935 |
82e04d672370030e6dd5e6577a1aa78e567b3a27 | 1,723 | py | Python | src/Word.py | AlexandreLadriere/ColorfulWords | 48219337946639306a6854ec3b5d8814ce86d609 | [
"MIT"
]
| null | null | null | src/Word.py | AlexandreLadriere/ColorfulWords | 48219337946639306a6854ec3b5d8814ce86d609 | [
"MIT"
]
| null | null | null | src/Word.py | AlexandreLadriere/ColorfulWords | 48219337946639306a6854ec3b5d8814ce86d609 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3*
import unicodedata
class Word:
"""
Object representation for a word
Parameters
----------
text : str
word text
formatedText : str
word text without accent, punctuation, etc (UTF-8)
color : List of integers
pixel color values in rgb for the word - eg: [0, 255, 56]
"""
def __init__(self, text):
"""
Initialize a Word object with the given string
Parameters
----------
text : str
word text
"""
self.text = text
self.formatedText = self.__formatText()
@property
def color(self):
"""
Return a list of 3 values (RGB) corresponding to the color representation of the word
"""
alpha = "abcdefghijklmnopqrstuvwxyz" # alpha[1] = "b"
alphaPos = dict([ (x[1],x[0]) for x in enumerate(alpha) ]) # alphaPos["b"] = 1
colorValue = 0
for letter in self.formatedText:
if letter.isdigit():
colorValue += int(letter)
else:
colorValue += alphaPos[letter.lower()]
return [(colorValue * len(self.formatedText)) % 256, (colorValue * 2) % 256, (colorValue * 3 % 256)]
def __formatText(self):
"""
Return the formated word
"""
uniText = ''.join(e for e in self.text if e.isalnum()) # remove punctuation
uniText = ''.join(c for c in unicodedata.normalize('NFD', uniText)
if unicodedata.category(c) != 'Mn') # Remove accents and other special letter chars
uniText = uniText.replace("œ", "oe")
uniText = uniText.replace("ª", "a")
return uniText | 31.907407 | 108 | 0.546721 | 1,681 | 0.974493 | 0 | 0 | 618 | 0.358261 | 0 | 0 | 789 | 0.457391 |
82e0a5642e6f736fc7177658b00015f1cb62d455 | 2,605 | py | Python | LeetCode/Python3/DynamicProgramming/123. Best Time to Buy and Sell Stock III.py | WatsonWangZh/CodingPractice | dc057dd6ea2fc2034e14fd73e07e73e6364be2ae | [
"MIT"
]
| 11 | 2019-09-01T22:36:00.000Z | 2021-11-08T08:57:20.000Z | LeetCode/Python3/DynamicProgramming/123. Best Time to Buy and Sell Stock III.py | WatsonWangZh/LeetCodePractice | dc057dd6ea2fc2034e14fd73e07e73e6364be2ae | [
"MIT"
]
| null | null | null | LeetCode/Python3/DynamicProgramming/123. Best Time to Buy and Sell Stock III.py | WatsonWangZh/LeetCodePractice | dc057dd6ea2fc2034e14fd73e07e73e6364be2ae | [
"MIT"
]
| 2 | 2020-05-27T14:58:52.000Z | 2020-05-27T15:04:17.000Z | # Say you have an array for which the ith element is the price of a given stock on day i.
# Design an algorithm to find the maximum profit. You may complete at most two transactions.
# Note: You may not engage in multiple transactions at the same time
# (i.e., you must sell the stock before you buy again).
# Example 1:
# Input: [3,3,5,0,0,3,1,4]
# Output: 6
# Explanation: Buy on day 4 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3.
# Then buy on day 7 (price = 1) and sell on day 8 (price = 4), profit = 4-1 = 3.
# Example 2:
# Input: [1,2,3,4,5]
# Output: 4
# Explanation: Buy on day 1 (price = 1) and sell on day 5 (price = 5), profit = 5-1 = 4.
# Note that you cannot buy on day 1, buy on day 2 and sell them later, as you are
# engaging multiple transactions at the same time. You must sell before buying again.
# Example 3:
# Input: [7,6,4,3,1]
# Output: 0
# Explanation: In this case, no transaction is done, i.e. max profit = 0.
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
# M1. 两轮贪心,一个从前往后,一个从后往前。
# 首先,从前往后遍历,保留最小值buy → 记录截止第i天(包含第i天)的maxProfit;
# 然后,从后往前遍历,保留最大值sell → 记录第i天之后(不包含第i天)的maxProfit。
# 注意 - 可能只交易1次,所以保留遍历一趟后profit的值。
# if not prices:
# return 0
# # Record min-buy
# profits = [0]
# buy, profit = prices[0], 0
# for price in prices[1:]:
# buy = min(buy, price)
# profit = max(profit, price-buy)
# profits.append(profit)
# # Record max-sell - Note remember the value of profit
# sell = prices[-1]
# temp = 0
# for i in range(len(prices)-1, 0, -1):
# sell = max(sell, prices[i])
# temp = max(temp, sell - prices[i])
# profit = max(profit, temp + profits[i-1])
# return profit
# M2. DP
# 第i天有4种状态:第一笔交易买入状态最大收益buy1和第一笔交易卖出状态最大收益sell1,第二笔交易买入状态最大收益buy2和第二笔交易卖出状态最大收益sell2。
# 则有下列状态方程:
# sell2[i] = max(sell2[i-1], buy2[i-1] + prices[i])
# buy2[i] = max(buy2[i-1], sell1[i-1] - prices[i])
# sell1[i] = max(sell1[i-1], buy1[i-1] + prices[i])
# buy1[i] = max(buy1[i-1], - prices[i])
buy1 = buy2 = float('-inf')
sell1 = sell2 = 0
for price in prices:
buy1 = max(buy1, -price)
sell1 = max(sell1, buy1 + price)
buy2 = max(buy2, sell1 - price)
sell2 = max(sell2, buy2 + price)
return sell2
| 36.180556 | 98 | 0.558925 | 1,945 | 0.66089 | 0 | 0 | 0 | 0 | 0 | 0 | 2,303 | 0.782535 |
82e0abe3e486e3352d2b626c47850728c42c4ae5 | 2,719 | py | Python | robot_con/baxter/baxter_client.py | takuya-ki/wrs | f6e1009b94332504042fbde9b39323410394ecde | [
"MIT"
]
| 23 | 2021-04-02T09:02:04.000Z | 2022-03-22T05:31:03.000Z | robot_con/baxter/baxter_client.py | takuya-ki/wrs | f6e1009b94332504042fbde9b39323410394ecde | [
"MIT"
]
| 35 | 2021-04-12T09:41:05.000Z | 2022-03-26T13:32:46.000Z | robot_con/baxter/baxter_client.py | takuya-ki/wrs | f6e1009b94332504042fbde9b39323410394ecde | [
"MIT"
]
| 16 | 2021-03-30T11:55:45.000Z | 2022-03-30T07:10:59.000Z | import robotconn.rpc.baxterrobot.baxter_server_pb2 as bxtsp
import robotconn.rpc.baxterrobot.baxter_server_pb2_grpc as bxtspgc
import grpc
import pickle
import numpy as np
class BaxterClient(object):
def __init__(self, host = "localhost:18300"):
channel = grpc.insecure_channel(host)
self.stub = bxtspgc.BaxterServerStub(channel)
def bxt_set_gripper(self, pos=100, armname = "rgt"):
self.stub.bxt_set_gripper(bxtsp.Gripper_pos_armname(pos=pos,armname=armname))
def bxt_get_gripper(self, armname="rgt"):
return self.stub.bxt_get_gripper(bxtsp.Armname(armname=armname))
def bxt_get_jnts(self, armname="rgt"):
jnts = pickle.loads(self.stub.bxt_get_jnts(bxtsp.Armname(armname=armname)).jnt_angles)
jnts = [jnts["right_s0"],jnts["right_s1"],jnts["right_e0"],jnts["right_e1"],jnts["right_w0"],jnts["right_w1"],jnts["right_w2"]] \
if armname == "rgt" else [jnts["left_s0"],jnts["left_s1"],jnts["left_e0"],jnts["left_e1"],jnts["left_w0"],jnts["left_w1"],jnts["left_w2"]]
jnts = [np.rad2deg(jnt) for jnt in jnts]
return jnts
def bxt_movejnts(self, jnt_angles= [], speed=.5, armname="rgt"):
self.stub.bxt_movejnts(bxtsp.Jnt_angles_armname(jnt_angles = np.array(jnt_angles,dtype="float").tobytes(),speed=speed,armname =armname))
def bxt_movejnts_cont(self, jnt_angles_list =[], speed=.2, armname="rgt"):
self.stub.bxt_movejnts_cont(bxtsp.Jnt_angles_armname(jnt_angles = np.array(jnt_angles_list,dtype="float").tobytes(),speed=speed,armname =armname))
def bxt_get_force(self,armname):
return np.frombuffer(self.stub.bxt_get_force(bxtsp.Armname(armname=armname)).list).tolist()
def bxt_get_image(self,camera_name):
image = self.stub.bxt_get_image(bxtsp.Camera_name(name=camera_name)).list
image = np.frombuffer(image)
image = np.reshape(image,(200,320,3)).astype("uint8")
# image = image[:,:,1]
return image
if __name__=="__main__":
import time
bc = BaxterClient(host = "10.1.0.24:18300")
# tic = time.time()
# imgx = hcc.getimgbytes()
# toc = time.time()
# td = toc-tic
# tic = time.time()
# imgxs = hcc.getimgstr()
# toc = time.time()
# td2 = toc-tic
# print(td, td2)
angle_rgt = bc.bxt_get_jnts("rgt")
# print angle_rgt
# print(angle_rgt[-1])
#
#
# angle_rgt[-1] = angle_rgt[-1] - 50.0
#
# bc.bxt_movejnts(angle_rgt)
print(bc.bxt_get_jnts(armname="rgt"))
print(bc.bxt_get_jnts(armname="lft"))
import cv2 as cv
cv.imshow("w",bc.bxt_get_image("head_camera"))
cv.waitKey(0)
# print bc.bxt_get_jnts("rgt")
# print(eval("a="+bc.bxt_get_jnts())) | 38.842857 | 154 | 0.668996 | 1,804 | 0.663479 | 0 | 0 | 0 | 0 | 0 | 0 | 629 | 0.231335 |
82e393c148ab09bc52468154e5d5428989e2e585 | 5,232 | py | Python | pw_build/py/pw_build/copy_from_cipd.py | Tiggerlaboratoriet/pigweed | 7d7e7ad6223433f45af680f43ab4d75e23ad3257 | [
"Apache-2.0"
]
| 1 | 2022-01-13T10:01:05.000Z | 2022-01-13T10:01:05.000Z | pw_build/py/pw_build/copy_from_cipd.py | Tiggerlaboratoriet/pigweed | 7d7e7ad6223433f45af680f43ab4d75e23ad3257 | [
"Apache-2.0"
]
| null | null | null | pw_build/py/pw_build/copy_from_cipd.py | Tiggerlaboratoriet/pigweed | 7d7e7ad6223433f45af680f43ab4d75e23ad3257 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Copies files from CIPD to a specified directory.
By default, Pigweed installs packages from a manifest file to a CIPD
subdirectory as part of environment setup. This script will copy files from this
directory into a specified output directory.
Here's an example of how to use this script:
Let's say you have a package with a static library:
CIPD path: `pigweed/third_party/libsomething`
Files:
./libsomething/include/something.h
./libsomething/libsomething.a
And this package was referenced in my_project_packages.json, which was provided
as a --cipd-package-file in your bootstrap script.
To copy the static libraryto $PW_PROJECT_ROOT/static_libraries, you'd have an
invocation something like this:
copy_from_cipd --package-name=pigweed/third_party/libsomething \
--mainfest=$PW_PROJECT_ROOT/tools/my_project_packages.json \
--file=libsomething/libsomething.a \
--out=$PW_PROJECT_ROOT/static_libraries
"""
import argparse
import json
import logging
import os
import shutil
import subprocess
import sys
from pathlib import Path
import pw_env_setup.cipd_setup.update
logger = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--verbose',
'-v',
help='Verbose output',
action='store_true')
parser.add_argument('--manifest',
required=True,
type=Path,
help='Path to CIPD JSON manifest file')
parser.add_argument('--out-dir',
type=Path,
default='.',
help='Output folder to copy the specified file to')
parser.add_argument('--package-name',
required=True,
help='The CIPD package name')
# TODO(pwbug/334) Support multiple values for --file.
parser.add_argument('--file',
required=True,
type=Path,
help='Path of the file to copy from the CIPD package. '
'This is relative to the CIPD package root of the '
'provided manifest.')
parser.add_argument('--cipd-package-root',
type=Path,
help="Path to the root of the package's install "
'directory. This is usually at '
'PW_{manifest name}_CIPD_INSTALL_DIR')
return parser.parse_args()
def check_version(manifest, cipd_path, package_name):
base_package_name = os.path.basename(package_name)
instance_id_path = os.path.join(cipd_path, '.versions',
f'{base_package_name}.cipd_version')
with open(instance_id_path, 'r') as ins:
instance_id = json.load(ins)['instance_id']
with open(manifest, 'r') as ins:
data = json.load(ins)
# TODO(pwbug/599) Always assume this is a dict.
if isinstance(data, dict):
data = data['packages']
path = None
expected_version = None
for entry in data:
if package_name in entry['path']:
path = entry['path']
expected_version = entry['tags'][0]
if not path:
raise LookupError(f'failed to find {package_name} entry')
cmd = ['cipd', 'describe', path, '-version', instance_id]
output = subprocess.check_output(cmd).decode()
if expected_version not in output:
pw_env_setup.cipd_setup.update.update(
'cipd', (manifest, ), os.environ['PW_CIPD_INSTALL_DIR'],
os.environ['CIPD_CACHE_DIR'])
def main():
args = parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
# Try to infer CIPD install root from the manifest name.
if args.cipd_package_root is None:
file_base_name = args.manifest.stem
args.cipd_var = 'PW_{}_CIPD_INSTALL_DIR'.format(file_base_name.upper())
try:
args.cipd_package_root = os.environ[args.cipd_var]
except KeyError:
logger.error(
"The %s environment variable isn't set. Did you forget to run "
'`. ./bootstrap.sh`? Is the %s manifest installed to a '
'different path?', args.cipd_var, file_base_name)
sys.exit(1)
check_version(args.manifest, args.cipd_package_root, args.package_name)
shutil.copyfile(os.path.join(args.cipd_package_root, args.file),
os.path.join(args.out_dir, args.file))
if __name__ == '__main__':
logging.basicConfig()
main()
| 36.082759 | 80 | 0.634939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,511 | 0.479931 |
82e3d3ee1d9875b1bc637e5da752761092db4c4c | 1,248 | py | Python | globomap_api/api/v2/parsers/queries.py | pedrokiefer/globomap-api | 68e1e3a623cdb4df78327226eb5c665841d4823f | [
"Apache-2.0"
]
| 15 | 2017-08-04T17:09:52.000Z | 2021-03-05T18:11:51.000Z | globomap_api/api/v2/parsers/queries.py | pedrokiefer/globomap-api | 68e1e3a623cdb4df78327226eb5c665841d4823f | [
"Apache-2.0"
]
| 2 | 2017-09-03T23:39:35.000Z | 2019-10-07T17:18:35.000Z | globomap_api/api/v2/parsers/queries.py | pedrokiefer/globomap-api | 68e1e3a623cdb4df78327226eb5c665841d4823f | [
"Apache-2.0"
]
| 6 | 2017-08-09T13:32:38.000Z | 2020-01-31T23:28:36.000Z | """
Copyright 2018 Globo.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from flask_restplus import reqparse
search_query_parser = reqparse.RequestParser()
search_query_parser.add_argument(
'page',
type=int,
required=False,
default=1,
help='Page number'
)
search_query_parser.add_argument(
'per_page',
type=int,
required=False,
default=10,
help='Items number per page'
)
search_query_parser.add_argument(
'query',
type=str,
required=False,
default='[[{"field":"name","operator":"LIKE","value":""}]]',
help='Query'
)
execute_query_parser = reqparse.RequestParser()
execute_query_parser.add_argument(
'variable',
type=str,
required=False,
help='Variable'
)
| 26 | 75 | 0.710737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 724 | 0.580128 |
82e465ccd93333f53c7be0010a34ffe382b2a569 | 5,354 | py | Python | auto_pull_request/parser.py | Ruth-Seven/Auto-git-request | bd058707c174138efed0ffd7109cf70b25796e64 | [
"Apache-2.0"
]
| 2 | 2021-10-05T11:12:46.000Z | 2021-10-05T11:12:56.000Z | auto_pull_request/parser.py | Ruth-Seven/Auto-git-request | bd058707c174138efed0ffd7109cf70b25796e64 | [
"Apache-2.0"
]
| null | null | null | auto_pull_request/parser.py | Ruth-Seven/Auto-git-request | bd058707c174138efed0ffd7109cf70b25796e64 | [
"Apache-2.0"
]
| null | null | null |
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import fork
import sys
import click
from loguru import logger
from auto_pull_request.pull_request import Auto
from auto_pull_request import __version__
# Creates a GitHub pull-request.
@click.command("pull-request")
@click.option("--debug/--no-debug", default=False, show_default=True,
help="If true, enable debugging.")
@click.option("--target-url", "-u",
help="The remote url of branch to send a pull-request to. Default is auto-detected from .git/config. "
"Target-url should using http or ssh as protocol."
"There options, target-url, target-remote and target-branch, only needed when you cloned from your repository, or you want "
"to create a pull-request to another repository.\n"
"For example, you can check you target url by \"git config --get \"remote.origin.url\"\""
)
@click.option("--target-remote", "-r",
help="The remote name of the target repo int local git, to which we send a pull-request. Default is auto-detected from .git/config. "
"There options, target-url, target-remote and target-branch, only needed when you cloned from your repository, or you want "
"to create a pull-request to another repository.\n"
"As a example, target-remote of a cloned repository from other other people ususally is \"origin\"."
)
@click.option("--target-branch", "-b",
help="The remote branch of target branch in local git, to which we send a pull-request. Default value is auto-detected from .git/config. "
"There options, target-url, target-remote and target-branch, usually needed when you cloned from your repository, or you want "
"to custom a pull-request.\n"
)
@click.option("--fork-branch",
help="The remote branch of fork repo from which we send a pull-request. Default value is auto-detected from .git/config. "
)
@click.option("--fork-url",
help="The remote url of fork repo from which we send a pull-request. Default value is upsteam of the current branch. "
)
@click.option("--fork-remote",
help="The remote name of fork repo from which we send a pull-request. Default value is upsteam name of the current branch."
)
@click.option("--title",
help="Title of the pull request.")
@click.option("--body",
help="Body of the pull request.")
@click.option(
"--comment",
help="Comment to publish when updating the pull-request")
@click.option(
"--keep-message/--update-message", default=False, show_default=True,
help="For a existing pull-request, Don't open an editor to change the pull request body.",
)
@click.option(
"--skip-editor/--open-editor", default=False, show_default=True,
help="If not empty, use parameter of --title and --message instead of "
"opening edition for pull-requester content.")
@click.option("--labels", "-l",
help="The labels to add to the pull request. Can be used multiple times.")
@click.option("--token", prompt=True, type=str,
help="The personal token of github to log in, which will store in \"git credential\"."
"If empty, we will promot in terminal to input corresponding infos.\n"
"How to get you personal token? Please check this https://docs.github.com/en/authentication"
"/keeping-your-account-and-data-secure/creating-a-personal-access-token")
@click.option("--sync", type=click.Choice(["merge", "rebase", "force"]), default="rebase", show_default=True,
help="Choose to the git-command to sync with remote repo. Option `--allow-unrelated-histories` with `git merge` is deafault.")
@click.option("--quick-commit", type=click.Choice(["false", "ours", "theirs"]), default="false", show_default=True,
help="With using `--sync=force`, using merges occurs conflicts during sync with remote branches, `--quick_commit` will use `git checkout` can quick using local content or remote content, which is dependent on your choice, as merge results. Of course, skip the process open vim to commit.")
def main(debug, target_url, target_remote, target_branch, fork_branch, fork_url, fork_remote, title, body, keep_message, labels, comment, skip_editor, token, sync, quick_commit):
log_info(debug)
version_lint()
Auto(
target_url=target_url,
target_remote=target_remote,
target_branch=target_branch,
fork_branch=fork_branch,
fork_url=fork_url,
fork_remote=fork_remote,
title=title,
body=body,
comment=comment,
keep_message=keep_message,
labels=labels,
skip_editor=skip_editor,
token=token,
sync=sync,
quick_commit=quick_commit,
).run()
def log_info(debug):
logger.remove()
level = "DEBUG" if debug else "SUCCESS"
logger.add(sys.stderr, level=level)
def version_lint():
logger.success(f"Auto-Pull-Request ⭐️{__version__}") | 49.119266 | 293 | 0.710497 | 0 | 0 | 0 | 0 | 4,376 | 0.816723 | 0 | 0 | 3,544 | 0.661441 |
82e4981e82370f4b216afc9af7f4136625ccd93f | 3,644 | py | Python | fit1d/common/fit1d.py | michael-amat/fit1d | 0cd42874e3eba4353c564809c317510b626dee25 | [
"BSD-2-Clause"
]
| null | null | null | fit1d/common/fit1d.py | michael-amat/fit1d | 0cd42874e3eba4353c564809c317510b626dee25 | [
"BSD-2-Clause"
]
| null | null | null | fit1d/common/fit1d.py | michael-amat/fit1d | 0cd42874e3eba4353c564809c317510b626dee25 | [
"BSD-2-Clause"
]
| 9 | 2019-02-24T12:51:28.000Z | 2019-03-22T09:25:45.000Z | """
fit1d package is designed to provide an organized toolbox for different types of
1D fits that can be performed.
It is easy to add new fits and other functionalities
"""
from abc import ABC, abstractmethod
import numpy as np
from typing import List,Tuple
from fit1d.common.model import Model, ModelMock
from fit1d.common.outlier import OutLier
from fit1d.common.fit_data import FitData
class Fit1D(ABC):
"""
This is the main class of the fit1d package. It is used to allow the user to execute
fit and eval methods, in addition to calc_RMS and calc_error static services.
The properties of this class are the _model and _outlier objects and a _use_remove_outliers
boolean
"""
_outlier: OutLier
_use_remove_outliers: bool
_fit_data: FitData
# interface methods
def fit(self, x: np.ndarray, y: np.ndarray) -> FitData:
self._fit_data.x = x
self._fit_data.y = y
if self._use_remove_outliers:
self._remove_outlier()
else:
self._calc_fit_and_update_fit_data()
return self._fit_data
def eval(self, x: np.ndarray = None, model: Model = None) -> np.ndarray:
if x is not None:
self._fit_data.x = x
if model is not None:
self._fit_data.model = model
self._calc_eval()
return self._fit_data.y_fit
def calc_error(self):
"""
calc error vector , update _fit_data
:return:
"""
if self._fit_data.y is not None and self._fit_data.y_fit is not None:
self._fit_data.error_vector = self._fit_data.y - self._fit_data.y_fit
def calc_rms(self):
if self._fit_data.error_vector is not None:
self._fit_data.rms = (sum(self._fit_data.error_vector ** 2) / len(self._fit_data.error_vector)) ** 0.5
def get_fit_data(self) -> FitData:
return self._fit_data
# abstract methods
@abstractmethod
def _calc_fit(self):
"""
abstractmethod:
run fit calculation of the data update model in _fit_data.model
:return: Null
"""
pass
@abstractmethod
def _calc_eval(self):
"""
abstractmethod:
subclass calculate model eval for inner x and model
update _fit_data.y_fit
:return: Void
"""
pass
# internal methods
def _update_fit_data(self):
self._calc_eval()
self.calc_error()
self.calc_rms()
def _remove_outlier(self):
while True:
self._calc_fit_and_update_fit_data()
indexes_to_remove = self._outlier.find_outliers(self._fit_data.error_vector)
if len(indexes_to_remove) == 0:
break
else:
self._remove_indexes(indexes_to_remove)
def _remove_indexes(self, ind):
self._fit_data.x = np.delete(self._fit_data.x, ind)
self._fit_data.y = np.delete(self._fit_data.y, ind)
def _calc_fit_and_update_fit_data(self):
self._calc_fit()
self._update_fit_data()
class Fit1DMock(Fit1D):
""" Mock class. Used only for tests """
def __init__(self, outlier: OutLier, remove_outliers: bool):
self._fit_data = FitData()
self._outlier = outlier
self._use_remove_outliers = remove_outliers
def _calc_fit(self):
self._fit_data.model = ModelMock({"param1": 5.5})
def _calc_eval(self) -> np.ndarray:
if self._fit_data.y is None or len(self._fit_data.y) == 4:
self._fit_data.y_fit = np.array([11, 22, 33, 44])
else:
self._fit_data.y_fit = np.array([11, 33, 44])
| 30.366667 | 114 | 0.638035 | 3,247 | 0.891054 | 0 | 0 | 410 | 0.112514 | 0 | 0 | 926 | 0.254116 |
82e49f1ad3ed1e40ba856944b5ae80363654a869 | 133 | py | Python | ska_skeleton/__init__.py | Vinod-Sathe-Company-Limited/ska-skeleton | e93d131fc4d33d5b2f0cd715553fd5907955eccd | [
"BSD-3-Clause"
]
| null | null | null | ska_skeleton/__init__.py | Vinod-Sathe-Company-Limited/ska-skeleton | e93d131fc4d33d5b2f0cd715553fd5907955eccd | [
"BSD-3-Clause"
]
| null | null | null | ska_skeleton/__init__.py | Vinod-Sathe-Company-Limited/ska-skeleton | e93d131fc4d33d5b2f0cd715553fd5907955eccd | [
"BSD-3-Clause"
]
| null | null | null | # -*- coding: utf-8 -*-
"""Module init code."""
__version__ = '0.0.0'
__author__ = 'Your Name'
__email__ = '[email protected]'
| 13.3 | 33 | 0.609023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 85 | 0.639098 |
82e67a1fd499cdf5d94a3a3ff757c622620968ef | 2,669 | py | Python | src/users.py | dtekcth/tvmannen | 47d9441ee4000dc3600ae1a28580ba95a5b46a2a | [
"MIT"
]
| null | null | null | src/users.py | dtekcth/tvmannen | 47d9441ee4000dc3600ae1a28580ba95a5b46a2a | [
"MIT"
]
| null | null | null | src/users.py | dtekcth/tvmannen | 47d9441ee4000dc3600ae1a28580ba95a5b46a2a | [
"MIT"
]
| 1 | 2019-12-25T21:49:16.000Z | 2019-12-25T21:49:16.000Z | # Blueprint for user management in /admin/users and /admin/users/delete
from tv import login_manager, db
from flask_login import LoginManager, current_user, login_user, logout_user, login_required
from flask import Blueprint, flash, redirect, render_template, request
from data import User
from forms import RegistrationForm, ModifyUserForm
users_page = Blueprint("users", __name__)
# Page for listing, creating and deleting users
@users_page.route('/admin/users', methods=['GET', 'POST'])
@login_required
def create_user():
if current_user.role != "admin":
flash("You don't have permissions to manage users")
return redirect("/admin")
# Logged in as admin
users = User.query.all()
form = RegistrationForm()
if form.validate_on_submit():
user = User(username=form.username.data, role=form.role.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
flash('User has been created')
return redirect("/admin/users")
return render_template('users.html', form=form, users=users, user=current_user)
# Deletes an user on request for admin accounts
# Takes user_id "id" as argument
@users_page.route("/admin/users/delete")
@login_required
def delete_user():
if current_user.role != "admin":
flash("You don't have permissions to manage users")
redirect("/admin")
id = request.args.get("id")
if id == None:
flash("Invalid arguments")
return redirect("/admin")
user = User.query.filter_by(id=id).first()
if user == None:
flash("Id does not exist")
return redirect("/admin/users")
db.session.delete(user)
db.session.commit()
return redirect("/admin/users")
# User modification page, takes user id "id" as an argument
@users_page.route("/admin/users/modify", methods=['GET', 'POST'])
@login_required
def modify():
if current_user.role != "admin":
flash("You don't have permissions to manage users")
redirect("/admin")
id = request.args.get("id")
if id == None:
flash("Invalid arguments")
return redirect("/admin/users")
user = User.query.filter_by(id=id).first()
if user == None:
flash("Invalid user id")
return redirect("/admin/users")
form = ModifyUserForm()
if form.validate_on_submit():
user.set_password(form.password.data)
user.role = form.role.data
print("Role set to:", form.role.data)
db.session.commit()
flash('The user has been sucessfully modified')
return redirect("/admin/users")
else: form.role.data = user.role
return render_template('modify_user.html', form=form, user=user)
| 32.156627 | 91 | 0.683402 | 0 | 0 | 0 | 0 | 2,088 | 0.782315 | 0 | 0 | 819 | 0.306857 |
7d5335d6ee6e5dd4d8013184f474bc8d3185581f | 337 | py | Python | mxfield/models.py | krescruz/django-mxfield | 98855412d4414e239a74370380aed5d28b52eeb1 | [
"MIT"
]
| null | null | null | mxfield/models.py | krescruz/django-mxfield | 98855412d4414e239a74370380aed5d28b52eeb1 | [
"MIT"
]
| null | null | null | mxfield/models.py | krescruz/django-mxfield | 98855412d4414e239a74370380aed5d28b52eeb1 | [
"MIT"
]
| null | null | null | from django.db.models import CharField
from django.utils.translation import ugettext_lazy as _
import validators
class CURPField(CharField):
default_validators = [validators.CURPValidator()]
description = _("CURP")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 18
super(CURPField, self).__init__(*args, **kwargs)
| 25.923077 | 55 | 0.759644 | 221 | 0.655786 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.053412 |
7d53f22522d63caa5e1b6eeef4ed280bfe59205b | 5,646 | py | Python | tests/unit/test_crypt.py | oba11/salt | ddc0286d57c5ce864b60bf43e5bc3007bf7c2549 | [
"Apache-2.0"
]
| null | null | null | tests/unit/test_crypt.py | oba11/salt | ddc0286d57c5ce864b60bf43e5bc3007bf7c2549 | [
"Apache-2.0"
]
| null | null | null | tests/unit/test_crypt.py | oba11/salt | ddc0286d57c5ce864b60bf43e5bc3007bf7c2549 | [
"Apache-2.0"
]
| null | null | null | # coding: utf-8
# python libs
from __future__ import absolute_import
import os
# salt testing libs
from tests.support.unit import TestCase, skipIf
from tests.support.mock import patch, call, mock_open, NO_MOCK, NO_MOCK_REASON, MagicMock
# salt libs
import salt.utils
import salt.utils.files
from salt import crypt
# third-party libs
try:
from Cryptodome.PublicKey import RSA # pylint: disable=unused-import
HAS_PYCRYPTO_RSA = True
except ImportError:
HAS_PYCRYPTO_RSA = False
if not HAS_PYCRYPTO_RSA:
try:
from Crypto.PublicKey import RSA
HAS_PYCRYPTO_RSA = True
except ImportError:
HAS_PYCRYPTO_RSA = False
PRIVKEY_DATA = (
'-----BEGIN RSA PRIVATE KEY-----\n'
'MIIEpAIBAAKCAQEA75GR6ZTv5JOv90Vq8tKhKC7YQnhDIo2hM0HVziTEk5R4UQBW\n'
'a0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD4ZMsYqLzqjWMekLC8bjhxc+EuPo9\n'
'Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R4hOcMMZNZdi0xLtFoTfwU61UPfFX\n'
'14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttLP3sMXJvc3EvM0JiDVj4l1TWFUHHz\n'
'eFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k6ai4tVzwkTmV5PsriP1ju88Lo3MB\n'
'4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWHAQIDAQABAoIBAGOzBzBYZUWRGOgl\n'
'IY8QjTT12dY/ymC05GM6gMobjxuD7FZ5d32HDLu/QrknfS3kKlFPUQGDAbQhbbb0\n'
'zw6VL5NO9mfOPO2W/3FaG1sRgBQcerWonoSSSn8OJwVBHMFLG3a+U1Zh1UvPoiPK\n'
'S734swIM+zFpNYivGPvOm/muF/waFf8tF/47t1cwt/JGXYQnkG/P7z0vp47Irpsb\n'
'Yjw7vPe4BnbY6SppSxscW3KoV7GtJLFKIxAXbxsuJMF/rYe3O3w2VKJ1Sug1VDJl\n'
'/GytwAkSUer84WwP2b07Wn4c5pCnmLslMgXCLkENgi1NnJMhYVOnckxGDZk54hqP\n'
'9RbLnkkCgYEA/yKuWEvgdzYRYkqpzB0l9ka7Y00CV4Dha9Of6GjQi9i4VCJ/UFVr\n'
'UlhTo5y0ZzpcDAPcoZf5CFZsD90a/BpQ3YTtdln2MMCL/Kr3QFmetkmDrt+3wYnX\n'
'sKESfsa2nZdOATRpl1antpwyD4RzsAeOPwBiACj4fkq5iZJBSI0bxrMCgYEA8GFi\n'
'qAjgKh81/Uai6KWTOW2kX02LEMVRrnZLQ9VPPLGid4KZDDk1/dEfxjjkcyOxX1Ux\n'
'Klu4W8ZEdZyzPcJrfk7PdopfGOfrhWzkREK9C40H7ou/1jUecq/STPfSOmxh3Y+D\n'
'ifMNO6z4sQAHx8VaHaxVsJ7SGR/spr0pkZL+NXsCgYEA84rIgBKWB1W+TGRXJzdf\n'
'yHIGaCjXpm2pQMN3LmP3RrcuZWm0vBt94dHcrR5l+u/zc6iwEDTAjJvqdU4rdyEr\n'
'tfkwr7v6TNlQB3WvpWanIPyVzfVSNFX/ZWSsAgZvxYjr9ixw6vzWBXOeOb/Gqu7b\n'
'cvpLkjmJ0wxDhbXtyXKhZA8CgYBZyvcQb+hUs732M4mtQBSD0kohc5TsGdlOQ1AQ\n'
'McFcmbpnzDghkclyW8jzwdLMk9uxEeDAwuxWE/UEvhlSi6qdzxC+Zifp5NBc0fVe\n'
'7lMx2mfJGxj5CnSqQLVdHQHB4zSXkAGB6XHbBd0MOUeuvzDPfs2voVQ4IG3FR0oc\n'
'3/znuwKBgQChZGH3McQcxmLA28aUwOVbWssfXKdDCsiJO+PEXXlL0maO3SbnFn+Q\n'
'Tyf8oHI5cdP7AbwDSx9bUfRPjg9dKKmATBFr2bn216pjGxK0OjYOCntFTVr0psRB\n'
'CrKg52Qrq71/2l4V2NLQZU40Dr1bN9V+Ftd9L0pvpCAEAWpIbLXGDw==\n'
'-----END RSA PRIVATE KEY-----')
PUBKEY_DATA = (
'-----BEGIN PUBLIC KEY-----\n'
'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA75GR6ZTv5JOv90Vq8tKh\n'
'KC7YQnhDIo2hM0HVziTEk5R4UQBWa0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD\n'
'4ZMsYqLzqjWMekLC8bjhxc+EuPo9Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R\n'
'4hOcMMZNZdi0xLtFoTfwU61UPfFX14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttL\n'
'P3sMXJvc3EvM0JiDVj4l1TWFUHHzeFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k\n'
'6ai4tVzwkTmV5PsriP1ju88Lo3MB4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWH\n'
'AQIDAQAB\n'
'-----END PUBLIC KEY-----')
MSG = b'It\'s me, Mario'
SIG = (
b'\x07\xf3\xb1\xe7\xdb\x06\xf4_\xe2\xdc\xcb!F\xfb\xbex{W\x1d\xe4E'
b'\xd3\r\xc5\x90\xca(\x05\x1d\x99\x8b\x1aug\x9f\x95>\x94\x7f\xe3+'
b'\x12\xfa\x9c\xd4\xb8\x02]\x0e\xa5\xa3LL\xc3\xa2\x8f+\x83Z\x1b\x17'
b'\xbfT\xd3\xc7\xfd\x0b\xf4\xd7J\xfe^\x86q"I\xa3x\xbc\xd3$\xe9M<\xe1'
b'\x07\xad\xf2_\x9f\xfa\xf7g(~\xd8\xf5\xe7\xda-\xa3Ko\xfc.\x99\xcf'
b'\x9b\xb9\xc1U\x97\x82\'\xcb\xc6\x08\xaa\xa0\xe4\xd0\xc1+\xfc\x86'
b'\r\xe4y\xb1#\xd3\x1dS\x96D28\xc4\xd5\r\xd4\x98\x1a44"\xd7\xc2\xb4'
b']\xa7\x0f\xa7Db\x85G\x8c\xd6\x94!\x8af1O\xf6g\xd7\x03\xfd\xb3\xbc'
b'\xce\x9f\xe7\x015\xb8\x1d]AHK\xa0\x14m\xda=O\xa7\xde\xf2\xff\x9b'
b'\x8e\x83\xc8j\x11\x1a\x98\x85\xde\xc5\x91\x07\x84!\x12^4\xcb\xa8'
b'\x98\x8a\x8a&#\xb9(#?\x80\x15\x9eW\xb5\x12\xd1\x95S\xf2<G\xeb\xf1'
b'\x14H\xb2\xc4>\xc3A\xed\x86x~\xcfU\xd5Q\xfe~\x10\xd2\x9b')
@skipIf(NO_MOCK, NO_MOCK_REASON)
@skipIf(not HAS_PYCRYPTO_RSA, 'pycrypto >= 2.6 is not available')
class CryptTestCase(TestCase):
def test_gen_keys(self):
with patch.multiple(os, umask=MagicMock(), chmod=MagicMock(), chown=MagicMock,
access=MagicMock(return_value=True)):
with patch('salt.utils.files.fopen', mock_open()):
open_priv_wb = call('/keydir/keyname.pem', 'wb+')
open_pub_wb = call('/keydir/keyname.pub', 'wb+')
with patch('os.path.isfile', return_value=True):
self.assertEqual(crypt.gen_keys('/keydir', 'keyname', 2048), '/keydir/keyname.pem')
self.assertNotIn(open_priv_wb, salt.utils.files.fopen.mock_calls)
self.assertNotIn(open_pub_wb, salt.utils.files.fopen.mock_calls)
with patch('os.path.isfile', return_value=False):
with patch('salt.utils.files.fopen', mock_open()):
crypt.gen_keys('/keydir', 'keyname', 2048)
salt.utils.files.fopen.assert_has_calls([open_priv_wb, open_pub_wb], any_order=True)
def test_sign_message(self):
key = RSA.importKey(PRIVKEY_DATA)
with patch('salt.crypt._get_rsa_key', return_value=key):
self.assertEqual(SIG, salt.crypt.sign_message('/keydir/keyname.pem', MSG))
def test_verify_signature(self):
with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)):
self.assertTrue(crypt.verify_signature('/keydir/keyname.pub', MSG, SIG))
| 49.526316 | 108 | 0.732554 | 1,491 | 0.264081 | 0 | 0 | 1,590 | 0.281615 | 0 | 0 | 3,473 | 0.615126 |
7d54215d7a89cdc6dee240942d655951555aa1e4 | 628 | py | Python | gubbins/tests/utils_tests.py | doismellburning/django-gubbins | d94e91082adfe2ae7462209a5793b479429d40d9 | [
"BSD-2-Clause"
]
| null | null | null | gubbins/tests/utils_tests.py | doismellburning/django-gubbins | d94e91082adfe2ae7462209a5793b479429d40d9 | [
"BSD-2-Clause"
]
| 4 | 2018-12-20T13:02:40.000Z | 2018-12-21T16:09:20.000Z | gubbins/tests/utils_tests.py | doismellburning/django-gubbins | d94e91082adfe2ae7462209a5793b479429d40d9 | [
"BSD-2-Clause"
]
| 2 | 2015-01-05T10:13:42.000Z | 2020-05-29T08:17:58.000Z | import unittest
from gubbins.utils import append_params
class UtilsTest(unittest.TestCase):
def test_append_params(self):
url = 'http://www.fish.com/dir/page.html'
url = append_params(url, {'a': 1, 'b': 'a i'})
expected = 'http://www.fish.com/dir/page.html?a=1&b=a+i'
self.assertEqual(expected, url)
def test_append_params_with_existing(self):
url = 'http://www.fish.com/dir/page.html?a=b'
url = append_params(url, {'u': 1234})
expected = 'http://www.fish.com/dir/page.html?a=b&u=1234'
self.assertEqual(expected, url)
| 31.4 | 65 | 0.603503 | 547 | 0.871019 | 0 | 0 | 0 | 0 | 0 | 0 | 179 | 0.285032 |
7d553204536b771ce8440161d9597d5690c1a810 | 2,804 | py | Python | tests/components/test_power_output.py | Shivam60/j5 | 18069737644c8f1c95944386773c7643d5df5aeb | [
"MIT"
]
| null | null | null | tests/components/test_power_output.py | Shivam60/j5 | 18069737644c8f1c95944386773c7643d5df5aeb | [
"MIT"
]
| null | null | null | tests/components/test_power_output.py | Shivam60/j5 | 18069737644c8f1c95944386773c7643d5df5aeb | [
"MIT"
]
| null | null | null | """Tests for the power output classes."""
from typing import List, Optional, Type
from j5.backends import Backend
from j5.boards import Board
from j5.components.power_output import PowerOutput, PowerOutputInterface
class MockPowerOutputDriver(PowerOutputInterface):
"""A testing driver for power outputs."""
def __init__(self):
self._enabled = False
def get_power_output_enabled(self, board: Board, identifier: int) -> bool:
"""Get whether a power output is enabled."""
return self._enabled
def set_power_output_enabled(
self, board: Board, identifier: int, enabled: bool,
) -> None:
"""Set whether a power output is enabled."""
self._enabled = enabled
def get_power_output_current(self, board: Board, identifier: int) -> float:
"""Get the current being drawn on a power output, in amperes."""
return 8.1
class MockPowerOutputBoard(Board):
"""A testing board for the power output."""
@property
def name(self) -> str:
"""The name of this board."""
return "Testing Power Output Board"
@property
def serial(self) -> str:
"""The serial number of this board."""
return "SERIAL"
@property
def firmware_version(self) -> Optional[str]:
"""Get the firmware version of this board."""
return self._backend.get_firmware_version(self)
@property
def supported_components(self) -> List[Type["Component"]]:
"""List the types of component that this Board supports."""
return [PowerOutput]
def make_safe(self):
"""Make this board safe."""
pass
@staticmethod
def discover(backend: Backend):
"""Detect all of the boards on a given backend."""
return []
def test_power_output_interface_implementation():
"""Test that we can implement the PowerOutputInterface."""
MockPowerOutputDriver()
def test_power_output_instantiation():
"""Test that we can instantiate a PowerOutput."""
PowerOutput(0, MockPowerOutputBoard(), MockPowerOutputDriver())
def test_power_output_interface():
"""Test that the class returns the correct interface."""
assert PowerOutput.interface_class() is PowerOutputInterface
def test_power_output_enabled():
"""Test the is_enabled property of a PowerOutput."""
power_output = PowerOutput(0, MockPowerOutputBoard(), MockPowerOutputDriver())
assert power_output.is_enabled is False
power_output.is_enabled = True
assert power_output.is_enabled is True
def test_power_output_current():
"""Test the current property of a PowerOutput."""
power_output = PowerOutput(0, MockPowerOutputBoard(), MockPowerOutputDriver())
assert type(power_output.current) is float
assert power_output.current == 8.1
| 30.813187 | 82 | 0.690442 | 1,558 | 0.555635 | 0 | 0 | 690 | 0.246077 | 0 | 0 | 836 | 0.298146 |
7d55cd544a02e7f8eda686f396f1e614dce7adb0 | 11,660 | py | Python | msg/tools/genmsg/test/test_genmsg_msgs.py | sikuner/Firmware_Marine | 80411dc4eb5aa9dc8eb3ca8ff6d59d1cf081a010 | [
"BSD-3-Clause"
]
| 17 | 2020-03-13T00:10:28.000Z | 2021-09-06T17:13:17.000Z | msg/tools/genmsg/test/test_genmsg_msgs.py | sikuner/Firmware_Marine | 80411dc4eb5aa9dc8eb3ca8ff6d59d1cf081a010 | [
"BSD-3-Clause"
]
| 1 | 2020-08-24T03:28:49.000Z | 2020-08-24T03:28:49.000Z | msg/tools/genmsg/test/test_genmsg_msgs.py | sikuner/Firmware_Marine | 80411dc4eb5aa9dc8eb3ca8ff6d59d1cf081a010 | [
"BSD-3-Clause"
]
| 2 | 2020-03-13T09:05:32.000Z | 2021-08-13T08:28:14.000Z | # Software License Agreement (BSD License)
#
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import random
def test_bare_msg_type():
import genmsg.msgs
tests = [(None, None), ('String', 'String'), ('std_msgs/String', 'std_msgs/String'),
('String[10]', 'String'), ('string[10]', 'string'), ('std_msgs/String[10]', 'std_msgs/String'),
]
for val, res in tests:
assert res == genmsg.msgs.bare_msg_type(val)
PKG = 'genmsg'
def test_resolve_type():
from genmsg.msgs import resolve_type, bare_msg_type
for t in ['string', 'string[]', 'string[14]', 'int32', 'int32[]']:
bt = bare_msg_type(t)
t == resolve_type(t, PKG)
assert 'foo/string' == resolve_type('foo/string', PKG)
assert 'std_msgs/Header' == resolve_type('Header', 'roslib')
assert 'std_msgs/Header' == resolve_type('std_msgs/Header', 'roslib')
assert 'std_msgs/Header' == resolve_type('Header', 'stereo_msgs')
assert 'std_msgs/String' == resolve_type('String', 'std_msgs')
assert 'std_msgs/String' == resolve_type('std_msgs/String', 'std_msgs')
assert 'std_msgs/String' == resolve_type('std_msgs/String', PKG)
assert 'std_msgs/String[]' == resolve_type('std_msgs/String[]', PKG)
def test_parse_type():
import genmsg.msgs
tests = [
('a', ('a', False, None)),
('int8', ('int8', False, None)),
('std_msgs/String', ('std_msgs/String', False, None)),
('a[]', ('a', True, None)),
('int8[]', ('int8', True, None)),
('std_msgs/String[]', ('std_msgs/String', True, None)),
('a[1]', ('a', True, 1)),
('int8[1]', ('int8', True, 1)),
('std_msgs/String[1]', ('std_msgs/String', True, 1)),
('a[11]', ('a', True, 11)),
('int8[11]', ('int8', True, 11)),
('std_msgs/String[11]', ('std_msgs/String', True, 11)),
]
for val, res in tests:
assert res == genmsg.msgs.parse_type(val)
fail = ['a[1][2]', 'a[][]', '', None, 'a[', 'a[[1]', 'a[1]]']
for f in fail:
try:
genmsg.msgs.parse_type(f)
assert False, "should have failed on %s"%f
except ValueError as e:
pass
def test_Constant():
import genmsg.msgs
vals = [random.randint(0, 1000) for i in range(0, 3)]
type_, name, val = [str(x) for x in vals]
x = genmsg.msgs.Constant(type_, name, val, str(val))
assert type_ == x.type
assert name == x.name
assert val == x.val
assert x == genmsg.msgs.Constant(type_, name, val, str(val))
assert x != 1
assert not x == 1
assert x != genmsg.msgs.Constant('baz', name, val, str(val))
assert x != genmsg.msgs.Constant(type_, 'foo', val, str(val))
assert x != genmsg.msgs.Constant(type_, name, 'foo', 'foo')
# tripwire
assert repr(x)
assert str(x)
try:
genmsg.msgs.Constant(None, name, val, str(val))
assert False, "should have raised"
except: pass
try:
genmsg.msgs.Constant(type_, None, val, str(val))
assert False, "should have raised"
except: pass
try:
genmsg.msgs.Constant(type_, name, None, 'None')
assert False, "should have raised"
except: pass
try:
genmsg.msgs.Constant(type_, name, val, None)
assert False, "should have raised"
except: pass
try:
x.foo = 'bar'
assert False, 'Constant should not allow arbitrary attr assignment'
except: pass
def test_MsgSpec():
def sub_test_MsgSpec(types, names, constants, text, full_name, has_header):
m = MsgSpec(types, names, constants, text, full_name)
assert m.types == types
assert m.names == names
assert m.text == text
assert has_header == m.has_header()
assert m.constants == constants
assert list(zip(types, names)) == m.fields()
assert m == MsgSpec(types, names, constants, text, full_name)
return m
from genmsg import MsgSpec, InvalidMsgSpec
from genmsg.msgs import Field
# don't allow duplicate fields
try:
MsgSpec(['int32', 'int64'], ['x', 'x'], [], 'int32 x\nint64 x', 'x/DupFields')
assert False, "should have raised"
except InvalidMsgSpec:
pass
# don't allow invalid fields
try:
MsgSpec(['string['], ['x'], [], 'int32 x\nint64 x', 'x/InvalidFields')
assert False, "should have raised"
except InvalidMsgSpec:
pass
# allow empty msg
empty = sub_test_MsgSpec([], [], [], '', 'x/Nothing', False)
assert [] == empty.fields()
assert [] == empty.parsed_fields()
assert 'x/Nothing' == empty.full_name
assert 'x' == empty.package
assert 'Nothing' == empty.short_name
# one-field
one_field = sub_test_MsgSpec(['int32'], ['x'], [], 'int32 x', 'x/OneInt', False)
# make sure that equals tests every declared field
assert one_field == MsgSpec(['int32'], ['x'], [], 'int32 x', 'x/OneInt')
assert one_field != MsgSpec(['uint32'], ['x'], [], 'int32 x', 'x/OneInt')
assert one_field != MsgSpec(['int32'], ['y'], [], 'int32 x', 'x/OneInt')
assert one_field != MsgSpec(['int32'], ['x'], [], 'uint32 x', 'x/OneInt')
assert one_field != MsgSpec(['int32'], ['x'], [], 'int32 x', 'x/OneIntBad')
# test against __ne__ as well
assert one_field != MsgSpec(['int32'], ['x'], [], 'uint32 x', 'x/OneInt')
assert [Field('x', 'int32')] == one_field.parsed_fields(), "%s vs %s"%([Field('x', 'int32')], one_field.parsed_fields())
#test str
assert "int32 x" == str(one_field).strip()
# test variations of multiple fields and headers
two_fields = sub_test_MsgSpec(['int32', 'string'], ['x', 'str'], [], 'int32 x\nstring str', 'x/TwoFields', False)
assert [Field('x', 'int32'), Field('str', 'string')] == two_fields.parsed_fields()
one_header = sub_test_MsgSpec(['std_msgs/Header'], ['header'], [], 'Header header', 'x/OneHeader', True)
header_and_fields = sub_test_MsgSpec(['std_msgs/Header', 'int32', 'string'], ['header', 'x', 'str'], [], 'Header header\nint32 x\nstring str', 'x/HeaderAndFields', True)
embed_types = sub_test_MsgSpec(['std_msgs/Header', 'std_msgs/Int32', 'string'], ['header', 'x', 'str'], [], 'Header header\nstd_msgs/Int32 x\nstring str', 'x/EmbedTypes', True)
#test strify
assert "int32 x\nstring str" == str(two_fields).strip()
# types and names mismatch
try:
MsgSpec(['int32', 'int32'], ['intval'], [], 'int32 intval\int32 y', 'x/Mismatch')
assert False, "types and names must align"
except: pass
# test (not) equals against non msgspec
assert not (one_field == 1)
assert one_field != 1
# test constants
from genmsg.msgs import Constant
msgspec = MsgSpec(['int32'], ['x'], [Constant('int8', 'c', 1, '1')], 'int8 c=1\nuint32 x', 'x/Constants')
assert msgspec.constants == [Constant('int8', 'c', 1, '1')]
# tripwire
str(msgspec)
repr(msgspec)
# test that repr doesn't throw an error
[repr(x) for x in [empty, one_field, one_header, two_fields, embed_types]]
def test_Field():
from genmsg.msgs import Field
field = Field('foo', 'string')
assert field == Field('foo', 'string')
assert field != Field('bar', 'string')
assert field != Field('foo', 'int32')
assert field != 1
assert not field == 1
assert field.name == 'foo'
assert field.type == 'string'
assert field.base_type == 'string'
assert field.is_array == False
assert field.array_len == None
assert field.is_header == False
assert field.is_builtin == True
field = Field('foo', 'std_msgs/String')
assert field.type == 'std_msgs/String'
assert field.base_type == 'std_msgs/String'
assert field.is_array == False
assert field.array_len == None
assert field.is_header == False
assert field.is_builtin == False
field = Field('foo', 'std_msgs/String[5]')
assert field.type == 'std_msgs/String[5]'
assert field.base_type == 'std_msgs/String'
assert field.is_array == True
assert field.array_len == 5
assert field.is_header == False
assert field.is_builtin == False
field = Field('foo', 'std_msgs/String[]')
assert field.type == 'std_msgs/String[]'
assert field.base_type == 'std_msgs/String'
assert field.is_array == True
assert field.array_len == None
assert field.is_header == False
assert field.is_builtin == False
field = Field('foo', 'std_msgs/Header')
assert field.type == 'std_msgs/Header'
assert field.is_header == True
assert field.is_builtin == False
field = Field('foo', 'std_msgs/Header[]')
assert field.type == 'std_msgs/Header[]'
assert field.is_header == False
#tripwire
repr(field)
def test_is_valid_msg_type():
import genmsg.msgs
vals = [
#basic
'F', 'f', 'Foo', 'Foo1',
'std_msgs/String',
# arrays
'Foo[]', 'Foo[1]', 'Foo[10]',
]
for v in vals:
assert genmsg.msgs.is_valid_msg_type(v), "genmsg.msgs.is_valid_msg_type should have returned True for '%s'"%v
# bad cases
vals = [None, '', '#', '%', 'Foo%', 'Woo Woo',
'/', '/String',
'Foo[f]', 'Foo[1d]', 'Foo[-1]', 'Foo[1:10]', 'Foo[', 'Foo]', 'Foo[]Bar']
for v in vals:
assert not genmsg.msgs.is_valid_msg_type(v), "genmsg.msgs.is_valid_msg_type should have returned False for '%s'"%v
def test_is_valid_constant_type():
import genmsg.msgs
valid = ['int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', \
'uint64', 'float32', 'float64', 'char', 'byte', 'string']
invalid = [
'std_msgs/String', '/', 'String',
'time', 'duration','header',
]
for v in valid:
assert genmsg.msgs.is_valid_constant_type(v), "genmsg.msgs.is_valid_constant_type should have returned True for '%s'"%v
for v in invalid:
assert not genmsg.msgs.is_valid_constant_type(v), "genmsg.msgs.is_valid_constant_type should have returned False for '%s'"%v
| 38.996656 | 180 | 0.620583 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,766 | 0.408748 |
7d565d78426b6ee97241efc8582c656e0fcdebc5 | 4,118 | py | Python | custom_components/waste_collection_schedule/waste_collection_schedule/wizard/stadtreinigung_hamburg.py | UBS-P/hacs_waste_collection_schedule | 9ce0fd55010bbab3948f1ee0aa5edb4b65b7d866 | [
"MIT"
]
| 142 | 2020-04-13T18:56:12.000Z | 2022-03-30T19:44:08.000Z | custom_components/waste_collection_schedule/waste_collection_schedule/wizard/stadtreinigung_hamburg.py | UBS-P/hacs_waste_collection_schedule | 9ce0fd55010bbab3948f1ee0aa5edb4b65b7d866 | [
"MIT"
]
| 138 | 2020-04-30T18:11:30.000Z | 2022-03-30T20:56:33.000Z | custom_components/waste_collection_schedule/waste_collection_schedule/wizard/stadtreinigung_hamburg.py | UBS-P/hacs_waste_collection_schedule | 9ce0fd55010bbab3948f1ee0aa5edb4b65b7d866 | [
"MIT"
]
| 89 | 2020-06-16T05:13:08.000Z | 2022-03-28T09:28:25.000Z | #!/usr/bin/env python3
from html.parser import HTMLParser
import inquirer
import requests
# Parser for HTML input
class InputParser(HTMLParser):
def __init__(self, input_name):
super().__init__()
self._input_name = input_name
self._value = None
@property
def value(self):
return self._value
def handle_starttag(self, tag, attrs):
if tag == "input":
for attr in attrs:
if attr[0] == "name" and attr[1] == self._input_name:
for attr2 in attrs:
if attr2[0] == "value":
self._value = attr2[1]
break
break
# Parser for HTML option list
class OptionParser(HTMLParser):
def __init__(self, select_name):
super().__init__()
self._select_name = select_name
self._within_select = False
self._within_option = False
self._option_name = ""
self._option_value = "-1"
self._choices = []
@property
def choices(self):
return self._choices
def handle_starttag(self, tag, attrs):
if tag == "select":
for attr in attrs:
if attr[0] == "name" and attr[1] == self._select_name:
self._within_select = True
break
elif tag == "option" and self._within_select:
self._within_option = True
for attr in attrs:
if attr[0] == "value":
self._option_value = attr[1]
def handle_endtag(self, tag):
if tag == "select":
self._within_select = False
elif tag == "option":
if (
self._within_select
and self._within_option
and len(self._option_name) > 0
and self._option_value != ""
):
self._choices.append((self._option_name, self._option_value))
self._within_option = False
self._option_name = ""
self._option_value = "-1"
def handle_data(self, data):
if self._within_option:
self._option_name += data
def main():
# search for street
questions = [
inquirer.Text("strasse", message="Enter search string for street"),
# inquirer.Text("hausnummer", message="Enter search string for house number"),
]
answers = inquirer.prompt(questions)
answers["hausnummer"] = ""
answers["bestaetigung"] = "true"
answers["mode"] = "search"
r = requests.post(
"https://www.stadtreinigung.hamburg/privatkunden/abfuhrkalender/index.html",
data=answers,
)
# search for street
input_parser = InputParser(input_name="asId")
input_parser.feed(r.text)
if input_parser.value is not None:
answers["asId"] = input_parser.value
else:
# query returned a list of streets
parser = OptionParser(select_name="asId")
parser.feed(r.text)
questions = [
inquirer.List("asId", choices=parser.choices, message="Select street")
]
answers.update(inquirer.prompt(questions))
# search for building number
r = requests.post(
"https://www.stadtreinigung.hamburg/privatkunden/abfuhrkalender/index.html",
data=answers,
)
# parser HTML option list
parser = OptionParser(select_name="hnId")
parser.feed(r.text)
if len(parser.choices) == 0:
answers["hnId"] = ""
else:
questions = [
inquirer.List("hnId", choices=parser.choices, message="Select house number")
]
answers.update(inquirer.prompt(questions))
print("Copy the following statements into your configuration.yaml:\n")
print("# waste_collection_schedule source configuration")
print("waste_collection_schedule:")
print(" sources:")
print(" - name: stadtreinigung_hamburg")
print(" args:")
print(f" asId: {answers['asId']}")
print(f" hnId: {answers['hnId']}")
if __name__ == "__main__":
main()
| 29.205674 | 93 | 0.573579 | 2,045 | 0.4966 | 0 | 0 | 118 | 0.028655 | 0 | 0 | 962 | 0.233609 |
7d56702dbd9fe5b8f3529654e0855fa2b7b8f074 | 1,480 | py | Python | pythonstudy/convert.py | flyonskycn/pythonstudy | c2eabe40ed369046c80ba9882b2212feb34cdad6 | [
"Apache-2.0"
]
| null | null | null | pythonstudy/convert.py | flyonskycn/pythonstudy | c2eabe40ed369046c80ba9882b2212feb34cdad6 | [
"Apache-2.0"
]
| null | null | null | pythonstudy/convert.py | flyonskycn/pythonstudy | c2eabe40ed369046c80ba9882b2212feb34cdad6 | [
"Apache-2.0"
]
| null | null | null | import chardet
import sys
import codecs
import os
def findEncoding(s):
file = open(s, mode='rb')
buf = file.read()
result = chardet.detect(buf)
file.close()
return result['encoding']
def convertEncoding(s):
if os.access(s,os.W_OK):
encoding = findEncoding(s)
if encoding != 'utf-8' and encoding != 'ascii':
print("convert %s%s to utf-8" % (s, encoding))
contents = ''
with codecs.open(s, "r", encoding) as sourceFile:
contents = sourceFile.read()
with codecs.open(s, "w", "utf-8") as targetFile:
targetFile.write(contents)
else:
print("%s encoding is %s ,there is no need to convert" % (s, encoding))
else:
print("%s read only" %s)
def getAllFile(path, suffix='.'):
"recursive is enable"
f = os.walk(path)
fpath = []
for root, dir, fname in f:
for name in fname:
if name.endswith(suffix):
fpath.append(os.path.join(root, name))
return fpath
def convertAll(path):
flist = getAllFile(path, ".java")
for fname in flist:
convertEncoding(fname)
if __name__ == "__main__":
path = 'E:\\logs'
if len(sys.argv) == 1:
path = os.getcwd()
elif len(sys.argv) == 2:
path = sys.argv[1]
else:
print("error parameter")
exit()
convertAll(path) | 23.492063 | 84 | 0.531081 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 196 | 0.132432 |
7d56e588d7a6fdb0c64b6925b9b5823ebec11f36 | 4,547 | py | Python | tests/tests.py | arck1/aio-counter | ffff58bf14ca2f155be5a54c9385481fce5ee58c | [
"MIT"
]
| null | null | null | tests/tests.py | arck1/aio-counter | ffff58bf14ca2f155be5a54c9385481fce5ee58c | [
"MIT"
]
| null | null | null | tests/tests.py | arck1/aio-counter | ffff58bf14ca2f155be5a54c9385481fce5ee58c | [
"MIT"
]
| null | null | null | import unittest
from asyncio import sleep
from async_unittest import TestCase
from aio_counter import AioCounter
from aio_counter.exceptions import AioCounterException
class TestAioCounter(TestCase):
TIK = float(0.3)
TAK = float(0.6)
TTL = int(1)
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.counter = AioCounter(loop=cls.loop)
@classmethod
def tearDownClass(cls) -> None:
super().tearDownClass()
cls.counter.close()
def setUp(self) -> None:
self.counter._count = 0
self.counter._incs.clear()
self.counter._decs.clear()
# close all handlers
self.counter.close()
self.counter._handlers.clear()
def tearDown(self) -> None:
self.counter.close()
async def test_dec(self):
assert self.counter.empty()
self.counter._loop.call_later(self.TIK, self.counter.inc_nowait)
assert self.counter.count == 0
# wait until delayed inc_nowait increment counter
count = await self.counter.dec()
assert count == 0
async def test_inc(self):
assert self.counter.empty()
# fill counter
self.counter._count = self.counter.max_count
assert self.counter.count == self.counter.max_count
self.counter._loop.call_later(self.TIK, self.counter.dec_nowait)
assert self.counter.count == self.counter.max_count
# wait until delayed dec_nowait decrement counter
count = await self.counter.inc()
assert count == self.counter.max_count
def test_dec_nowait(self):
assert self.counter.empty()
try:
self.counter.dec_nowait()
except AioCounterException as e:
assert e
else:
assert False
count = self.counter.inc_nowait()
assert count == 1
assert self.counter.count == 1
count = self.counter.dec_nowait()
assert count == 0
assert self.counter.count == 0
def test_inc_nowait(self):
assert self.counter.empty()
count = self.counter.inc_nowait()
assert count == 1
assert self.counter.count == 1
# fill counter
self.counter._count = self.counter.max_count
try:
self.counter.inc_nowait()
except AioCounterException as e:
assert e
else:
assert False
async def test_ttl_inc(self):
assert self.counter.empty()
# inc with ttl = TTL
await self.counter.inc(self.TTL)
assert self.counter.count == 1
# sleep and inc() should run in one loop
await sleep(self.TTL, loop=self.loop)
# check if count was dec
assert self.counter.count == 0
async def test_bulk_inc(self):
"""
inc() with value > 1 should success only if counter changed to <value > 1> in one moment
:return:
"""
assert self.counter.empty()
# fill counter
self.counter._count = self.counter.max_count - 1
assert self.counter.count == self.counter.max_count - 1
def delayed_check(counter):
assert counter.count == counter.max_count - 1
self.counter._loop.call_later(self.TIK, delayed_check, self.counter)
self.counter._loop.call_later(self.TTL, self.counter.dec_nowait)
assert self.counter.count == self.counter.max_count - 1
await self.counter.inc(value=2)
assert self.counter.count == self.counter.max_count
async def test_bulk_dec(self):
"""
dec() with value > 1 should success only if counter changed to <value > 1> in one moment
:return:
"""
assert self.counter.empty()
await self.counter.inc()
assert self.counter.count == 1
def delayed_check(counter):
assert counter.count == 1
self.counter._loop.call_later(self.TIK, delayed_check, self.counter)
self.counter._loop.call_later(self.TTL, self.counter.inc_nowait)
assert self.counter.count == 1
await self.counter.dec(value=2)
assert self.counter.empty()
async def test_ttl_after_dec(self):
assert self.counter.empty()
await self.counter.inc(self.TTL)
assert self.counter.count == 1
count = self.counter.dec_nowait()
assert count == 0
assert self.counter.count == 0
await sleep(self.TTL, loop=self.loop)
if __name__ == '__main__':
unittest.main()
| 25.544944 | 96 | 0.61667 | 4,325 | 0.951177 | 0 | 0 | 230 | 0.050583 | 2,818 | 0.619749 | 512 | 0.112602 |
7d57683f060246ecdbe9fa25924715de937635d2 | 67 | py | Python | dexp/processing/remove_beads/__init__.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
]
| 16 | 2021-04-21T14:09:19.000Z | 2022-03-22T02:30:59.000Z | dexp/processing/remove_beads/__init__.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
]
| 28 | 2021-04-15T17:43:08.000Z | 2022-03-29T16:08:35.000Z | dexp/processing/remove_beads/__init__.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
]
| 3 | 2022-02-08T17:41:30.000Z | 2022-03-18T15:32:27.000Z | from dexp.processing.remove_beads.beadsremover import BeadsRemover
| 33.5 | 66 | 0.895522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.