id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
9759724
|
from PySide2.QtCore import QTimer
import os
from datetime import datetime
def minutes(minutes):
MINUTE = 60000
return MINUTE * minutes
class NotepadRecovery:
def __init__(self):
self.RECOVERY_DIR = "recovery"
self.TEMP_EXT = '.bak'
# Create a recovery directory if one does not exist.
try:
if not os.path.isdir(self.RECOVERY_DIR):
os.mkdir("recovery")
except Exception as error:
self.terminal("Failed to create the main recovery folder: {e}".format(e=error))
# Number of temps to generate before overwriting the last temps
self.TEMP_LIMIT = 3
self.recovery_timer = QTimer(self)
self.recovery_timer.start(minutes(3))
self.recovery_timer.timeout.connect(self.makeDocumentTemps)
def tempCountInPath(self, path, file_name):
temp_count = 0
if os.path.isdir(path):
for file_obj in os.listdir(path):
if file_obj.lower().endswith(self.TEMP_EXT) and file_name in file_obj:
temp_count += 1
return temp_count
def removeExcessTempFiles(self, path, unsaved_file_name, temp_count):
files = iter(os.listdir(path))
num_of_files_to_remove = 1
file_name = next(files)
if temp_count > self.TEMP_LIMIT:
num_of_files_to_remove = (temp_count - self.TEMP_LIMIT) + 1
while num_of_files_to_remove and file_name is not None:
if file_name.lower().endswith(self.TEMP_EXT) and unsaved_file_name in file_name:
file_path = '\\'.join([path, file_name])
os.remove(file_path)
num_of_files_to_remove -= 1
file_name = next(files)
def makeDocumentTemps(self):
todays_date = datetime.now()
date = todays_date.strftime('%b-%d-%Y')
temp_file_date = todays_date.strftime('%b-%d-%Y_%H_%M_%S')
temp_dir_path = ''.join([self.RECOVERY_DIR, '\\', 'temp_', date])
temp_file_path = ''
document_text = None
temp_path = ""
temp_name = ""
temp_number = 1
try:
if not os.path.isdir(temp_dir_path):
os.mkdir(temp_dir_path)
except Exception as error:
self.terminal("Failed to create temp folder: {e}".format(e=error))
try:
saved, saves = self.areDocumentsSaved()
if saved is not None:
if not saved:
for unsaved_name in saves:
# Check if any temp files exist in this path, and if
# the number of files exceed the temp limit.
temp_count = self.tempCountInPath(temp_dir_path, unsaved_name)
if temp_count >= self.TEMP_LIMIT:
self.removeExcessTempFiles(temp_dir_path, unsaved_name, temp_count)
document_text = self._notepads[unsaved_name].toHtml()
temp_name = ''.join([temp_file_date, unsaved_name, self.TEMP_EXT])
temp_file_path = '\\'.join([temp_dir_path, temp_name])
with open(temp_file_path, 'w') as t_f:
t_f.write(document_text)
t_f.flush()
os.fsync(t_f)
except Exception as error:
self.terminal(str(error))
|
StarcoderdataPython
|
6568532
|
<filename>tests/sync_pipfile_test.py
from .conftest import data
from .main_test import copy_file, compare_list_of_string_kw_arg
from pipenv_setup.main import cmd
import pytest
from vistir.compat import Path
@pytest.mark.parametrize(
("source_pipfile_dirname", "update_count"),
[("nasty_0", 23), ("no_original_kws_0", 23)],
)
def test_sync_pipfile_no_original(
capsys, tmp_path, shared_datadir, source_pipfile_dirname, update_count
):
"""
sync --pipfile should reference Pipfile (not Pipfile.lock) when printing results
"""
pipfile_dir = shared_datadir / source_pipfile_dirname
for filename in ("Pipfile", "Pipfile.lock", "setup.py"):
copy_file(pipfile_dir / filename, tmp_path)
with data(str(pipfile_dir), tmp_path) as path:
setup_file = path / "setup.py" # type: Path
cmd(["", "sync", "--pipfile"])
text = setup_file.read_text()
generated_setup = Path("setup.py")
assert generated_setup.exists()
generated_setup_text = generated_setup.read_text()
expected_setup_text = Path("setup.py").read_text()
for kw_arg_names in ("install_requires", "dependency_links"):
assert compare_list_of_string_kw_arg(
generated_setup_text,
expected_setup_text,
kw_arg_names,
ordering_matters=False,
)
captured = capsys.readouterr()
assert "Pipfile.lock" not in captured.out, captured.out
assert "Pipfile" in captured.out, captured.out
def test_sync_dev_pipfile_no_original(tmp_path):
"""
sync --dev --pipfile should add extras_require: {"dev": [blah]} in the absence of an
extras_require keyword
"""
# todo: this test is too simple
with data("self_0", tmp_path) as path:
setup_file = path / "setup.py" # type: Path
cmd(["", "sync", "--dev", "--pipfile"])
text = setup_file.read_text()
assert "pytest~=5.1" in text, text
assert "requirementslib~=1.5" in text, text
|
StarcoderdataPython
|
1735689
|
<gh_stars>0
#!/usr/bin/env python
# encoding: utf-8
"""
blmfire.py
Created by <NAME> on 2011-06-08. <EMAIL>
"""
from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import zip
from io import open
import sys
import os, shutil
import urllib.request
import logging, logging.handlers
from datetime import datetime
import time
import json
# Data sources
FIREFILE = 'blmfiredata.txt'
FIREURL = "https://afsmaps.blm.gov/wmsconnector/com.esri.wms.Esrimap?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetFeatureInfo&SRS=EPSG:4326&BBOX=-180,50,-130,70&WIDTH=1&HEIGHT=1&LAYERS=wmsactivefires&QUERY_LAYERS=wmsactivefires&STYLES=&EXCEPTIONS=application/vnd.ogc.se_xml&&INFO_FORMAT=text/plain&FEATURE_COUNT=100&X=0&Y=0"
FIELDMAP = {
'designation': 'firesadmin.fire.name',
'structuresburned': 'firesadmin.fire.structuresburned',
'structuresthreatened': 'firesadmin.fire.structuresthreatened',
'numberpeople': 'firesadmin.dailyfirerecord.numberpeople',
'acreage': 'firesadmin.fire.estimatedtotalacres',
'cause': 'firesadmin.fire.generalcause',
'primaryfueltype': 'firesadmin.fire.primaryfueltype',
'maintext': 'firesadmin.dailyfirerecord.summary',
'latitude': 'firesadmin.fire.latitude',
'longitude': 'firesadmin.fire.longitude'
}
# Any HTML and JavaScript paths, files and templates are indicated here
FIRETEMPLATE = 'templates/firetemplate.js'
HTMLTEMPLATE = 'templates/current_fires_template.js'
PROJECTPATH = '/datadir/UAFSMOKE/src/firemap/'
HTMLOUT = 'current_fires.js'
SITELOCATION = '/projects/UAFSMOKE/public_html/'
DEPLOYLOCATION = 'js/'
# Deploy resulting file(s) to web root?
DEPLOY = True
# Logging configuration
VERBOSE = True
LOGDIR = 'log'
LOGFILE = 'firemap.log'
log = logging.getLogger('blmfire')
log.setLevel(logging.DEBUG)
def get_raw_fire_data(firefile=FIREFILE, tryremote=True):
"""
Returns list of lines from BLM, either from file or URL
"""
log.info("Attempting to retrieve data from file...")
try:
with open(firefile, encoding='utf-8') as filehandle:
data = filehandle.read()
if len(data) == 0:
raise(IOError("Empty file."))
log.info("...success!")
except IOError as detail:
if tryremote:
log.warning("Could not open %s: %s" % (FIREFILE, detail))
log.info("Attempting to retrieve URL for fire data...")
filehandle = urllib.request.urlopen(FIREURL)
data = filehandle.read().decode('utf-8')
log.info("...success!")
else:
log.error("Could not open %s: %s" % (FIREFILE, detail))
return data.splitlines()
def sanitize_dataline(rawstring, blanks=2, quotechars='"_', lower=False):
"""
Returns list of lc stripped header field names.
Assumptions: separator = ' ' (n spaces), trailing separator and double quote at end.
"""
stripchars = quotechars + ' \n\r\t'
splitstring = '"' + ' ' * blanks + '"'
choplength = (-1) * (blanks + 1)
return [sanitize_fieldvalue(fieldname, stripchars, lower) for fieldname in rawstring[:choplength].split(splitstring)]
def sanitize_fieldvalue(rawfield, stripchars, lower=False):
"""
Returns sanitised individual field value. Strips quotes from end, replaces at middle, turns into LC
"""
result = rawfield.strip(stripchars)
result = result.replace('"', "'")
result = result.replace("'", r"\'")
if lower:
result = result.lower()
return result
def get_firedata(fieldnames, rawdata):
"""
Returns list of dictionaries from raw CSV data as per BLM web service
"""
data = []
for item in rawdata:
if not len(fieldnames)==len(item):
log.error("An item has %d elements, but there are %d fields.\nitem: %s" % (len(item), len(fieldnames), ', '.join(item)))
else:
fire = {}
for header, value in zip(fieldnames, item):
fire[header] = value
# fix temporary 2015 issue
if fire['firesadmin.fire.name'] != "Rex Complex":
data.append(fire)
return data
def select_relevant_values(dictlist):
"""
Rewrite fire data to fit what is needed for Google map
"""
firevalues = []
for item in dictlist:
firedata = {}
for field in FIELDMAP:
try:
firedata[field] = item[FIELDMAP[field]]
except KeyError:
firedata[field] = None
if not firedata['acreage']:
firedata['acreage'] = 0
lastupdateddt = datetime.fromtimestamp(
int(item['firesadmin.fire.lastupdatetime'])//1000)
firedata['lastupdated'] = datetime.strftime(
lastupdateddt, '%d %b %Y, %H:%M')
firedata['current'] = 'true'
if ((datetime.now() - lastupdateddt).days > 8):
firedata['current'] = 'false'
discoverdt = datetime.fromtimestamp(
int(item['firesadmin.fire.discoverydatetime'])//1000)
firedata['discovered'] = datetime.strftime(
discoverdt, '%d %b %Y, %H:%M')
firevalues.append(firedata)
log.info("Generated Google Maps JavaScript for %d current fires." % len(firevalues))
return firevalues
def fill_template(firelist, template=FIRETEMPLATE):
"""
Iterates over list of dictionaries, reads template and fills it for each fire dictionary. Returns JavaScript code.
"""
js_code = ''
js_template = open(os.path.join(PROJECTPATH, template), encoding='utf-8').read()
for fire in firelist:
js_code += js_template % fire
return js_code
def fill_html(fires, template=HTMLTEMPLATE, outfile=HTMLOUT):
"""
Generates web page
"""
repdict = {
'firelist': fires,
'date': datetime.strftime(datetime.now(), '%d %b %Y, %H:%M')
}
html_template = open(os.path.join(PROJECTPATH, template), encoding='utf-8').read()
outfile = os.path.join(PROJECTPATH, outfile)
html_out = open(outfile, 'w')
html_markup = html_template % repdict
html_out.write(html_markup)
html_out.close()
os.chmod(outfile, 0o664)
log.info("Wrote current firedata into %s." % outfile)
return outfile
def deploy_map(filename=HTMLOUT, location=SITELOCATION):
"""
Copies generated file to live file system location of web site
"""
log.info("Attempting to deploy file %s to location %s." %
(filename, location))
filename = os.path.join(PROJECTPATH, filename)
try:
shutil.copy(filename, location)
log.info("...success!")
except Exception as detail:
log.error("Copy operation failed: %s" % detail)
exit(1)
def main():
# Simple logging to file
logto = os.path.join(PROJECTPATH, LOGDIR, LOGFILE)
handler = logging.handlers.TimedRotatingFileHandler(
filename=logto, when='D', interval=1, backupCount=20)
console = logging.StreamHandler()
if VERBOSE:
handler.setLevel(logging.DEBUG)
else:
handler.setLevel(logging.WARNING)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
# change into working directory and get data
os.chdir(PROJECTPATH)
data = get_raw_fire_data()
fieldnames = sanitize_dataline(data[0], blanks=3, lower=True)
rawdata = [sanitize_dataline(
item.strip(), blanks=2, lower=False) for item in data[1::2]]
# the transformed data will be a list of dictionaries
data = get_firedata(fieldnames, rawdata)
finalfirelist = select_relevant_values(data)
fires = fill_template(finalfirelist)
outfile = fill_html(fires)
if DEPLOY:
deployto = os.path.join(SITELOCATION, DEPLOYLOCATION)
deploy_map(location=deployto)
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
6531710
|
<reponame>TheTechRobo/install-palc-plus
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as f:
return f.read()
setup(
name='py-getch and cprint',
version='1.0.1',
description='Portable getch() for Python. And -- printing in colour in Python',
long_description=__doc__,
author='<NAME> and <NAME>',
author_email='<EMAIL>',
url='https://github.com/thetechrobo/install-palc-plus',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'': ['LICENSE']},
zip_safe=False,
entry_points={},
)
|
StarcoderdataPython
|
5080239
|
#!/usr/bin/env python3
# author: greyshell
# description: how to use queue ADT
from collections import deque as queue
def main():
q = queue()
# enque: adding items at rear
q.append(9)
q.append(5)
q.append(3)
q.append(1)
# display the queue elements
print(f"display the queue elements: {list(q)}")
# dequeue: deleting items from front
data = q.popleft()
print(f"item deleted from font: {data}")
print(f"display the queue elements: {list(q)}")
# peek
print(f"peek the front: {q[0]}")
print(f"peek the rare: {q[-1]}")
# dequeue at rear
data = q.pop()
print(f"item deleted from rare: {data}")
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
1771264
|
"""pbs_map test suite."""
import unittest
import random
import sys
import pbs_map as ppm
print """
Note: If jobs are accepted to by the batch system but not executed, this test suite will hang.
"""
# TODO: Use showstart to identify hanging submissions
class IdentityWorker(ppm.Worker):
def do_work(self, x):
return x
class ResumedWork(object):
def __init__(self, x):
self.num_times_resumed = 0
self.x = x
def is_finished(self):
return self.num_times_resumed == 3
def next_step(self):
self.num_times_resumed += 1
return self
class ResumingIdentityWorker(IdentityWorker):
def resume_work(self, x):
if not x.is_finished():
raise ppm.IncompleteTask(x.next_step())
else:
return super(ResumingIdentityWorker, self).do_work(x.x)
def do_work(self, x):
return self.resume_work(ResumedWork(x))
class SingleShotIdentityWorker(ppm.Worker):
"""Raise an exception if used more than once.
The pbs map client should use this worker for one unit, and then
exit.
"""
def startup(self):
self.first_shot = True
def do_work(self, x):
if self.first_shot:
self.first_shot = False
return x
else:
raise SingleShotError()
class ResumingSingleShotIdentityWorker(SingleShotIdentityWorker, ResumingIdentityWorker):
pass
class RepeatListWorker(ppm.Worker):
"""Given an x, return the singleton list containing x."""
def startup(self, num_times):
self.num_times=num_times
def do_work(self, x):
return [x] * self.num_times
class SingletonListWorker(RepeatListWorker):
"""Given an x, return the singleton list containing x."""
def startup(self):
self.num_times=1
class SingleShotError(Exception):
pass
def run_pbs_map(*args, **kwargs):
return list(ppm.pbs_map(*args, **kwargs))
def run_pbs_mapcat(*args, **kwargs):
return list(ppm.pbs_mapcat(*args, **kwargs))
class PBSMapCase(object):
def run_pbs_map(self, *args, **kwargs):
return run_pbs_map(*args, **kwargs)
def single_iterators(iterator):
for x in iterator:
def single_iterator():
yield x
yield single_iterator()
class ReusedPBSMapCase(PBSMapCase):
def run_pbs_map(self, cls, work, **kwargs):
if 'startup_args' in kwargs and kwargs['startup_args']:
startup_args = kwargs['startup_args']
else:
startup_args = tuple()
results = []
with ppm.PBSMap(cls, startup_args, **kwargs) as mapper:
for single_work in work:
for result in mapper.map([single_work]):
results.append(result)
return results
class PBSMapCatCase(PBSMapCase):
def run_pbs_map(self, *args, **kwargs):
return run_pbs_mapcat(*args, **kwargs)
run_pbs_mapcat=run_pbs_map
class SingleShotPBSMapCase(object):
def run_pbs_map(self, *args, **kwargs):
kwargs['single_shot'] = True
return run_pbs_map(*args, **kwargs)
class RangeCase(object):
max_range=100
WorkerClass=IdentityWorker
def test_pbs_map(self):
xs = range(self.max_range)
results = self.run_pbs_map(self.WorkerClass, xs, queue_timeout=5, num_clients=10)
self.assertEqual(sorted(results), sorted(xs))
class ResumingRangeCase(RangeCase):
WorkerClass=ResumingIdentityWorker
class MapRangeCase(PBSMapCase, RangeCase):
pass
class MapCatRangeCase(PBSMapCatCase, RangeCase):
WorkerClass=SingletonListWorker
class RepeatRangeCase(object):
"""Run pbs map over a range list, expecting multiple copies of
each element to be returned."""
max_range=100
WorkerClass=RepeatListWorker
def test_pbs_mapcat(self):
xs = range(self.max_range)
num_times = random.randint(1,5)
results = self.run_pbs_mapcat(self.WorkerClass, xs, queue_timeout=5, num_clients=10, startup_args=(num_times,))
self.assertEqual(sorted(results), sorted(xs * num_times))
class MapCatRepeatRangeTestCase(RepeatRangeCase, PBSMapCatCase, unittest.TestCase):
pass
class SingleShotFailureCase(PBSMapCase):
"""By submitting enough jobs to guarantee each client will see at
least two, and using PBSMapCase instead of SingleShotPBSMapCase,
we ensure that a single shot worker should raise an exception.
This tests both our ability to catch worker exceptions, and the
expected behavior of the singleshot worker.
"""
WorkerClass=SingleShotIdentityWorker
max_range=100
def test_single_shot_failure(self):
num_clients=10
assert self.max_range > num_clients
xs = range(self.max_range)
try:
results = self.run_pbs_map(self.WorkerClass, xs, queue_timeout=5, num_clients=num_clients)
assert False, "Single shot error not raised. results = %s" % results
except Exception, e:
# There are some weird semantics on the type of the exception depending on how this test is called.
self.assertTrue(str(e).find('SingleShotError') > 0, 'did not receive a SingleShotError: %s' % str(e) )
class SingleShotSuccessCase(SingleShotPBSMapCase):
WorkerClass=SingleShotIdentityWorker
max_range=100
def test_single_shot_success(self):
num_clients=10
assert self.max_range > num_clients
xs = range(self.max_range)
try:
self.run_pbs_map(self.WorkerClass, xs, queue_timeout=5, num_clients=num_clients)
except SingleShotError:
assert False, "Single shot error was improperly raised."
class SingleShotFailureTestCase(SingleShotFailureCase, unittest.TestCase):
pass
class SingleShotSuccessTestCase(SingleShotSuccessCase, unittest.TestCase):
pass
class SingleShotMapIdentityTestCase(SingleShotPBSMapCase, MapRangeCase, unittest.TestCase):
pass
class FailingWorkerMixin(object):
"""Simulate processor faillure by wrapping a worker and randomly
sys.exiting instead of doing the work."""
failure_probability = 0.05
def do_work(self, x):
if random.random() <= self.failure_probability:
sys.exit(1)
else:
return super(FailingWorkerMixin, self).do_work(x)
class FailingIdentityWorker(FailingWorkerMixin, IdentityWorker):
pass
class ResumingFailingIdentityWorker(ResumingIdentityWorker, FailingWorkerMixin, IdentityWorker):
pass
class MapFailingIdentityTestCase(MapRangeCase, unittest.TestCase):
WorkerClass=FailingIdentityWorker
class SingleShotMapFailingIdentityTestCase(SingleShotPBSMapCase, MapRangeCase, unittest.TestCase):
WorkerClass=FailingIdentityWorker
class CompleteFailureWorkerMixin(FailingWorkerMixin):
# failure_probability = 0.
failure_probability = 1.
# def test_complete_failure_broken(self):
# # Complete failure tests should have a failure_probability = 1.
# # Presently however, a complete failure causes the system to go
# # into an infinite loop, preventing over thetst from being run.
# self.assertTrue(failure_probability < 1., "Unable to detect complete failures.")
class CompleteFailureIdentityWorker(CompleteFailureWorkerMixin, IdentityWorker):
pass
class ResumingCompleteFailureIdentityWorker(ResumingIdentityWorker, CompleteFailureIdentityWorker):
pass
class MapFailureCase(PBSMapCase):
WorkerClass=CompleteFailureIdentityWorker
max_range=10
def test_pbs_map(self):
xs = range(self.max_range)
try:
# This isn't clear. Am I trying to catch a PBSMapError, or a TestException?
# Depending on how I call python, both are possible
self.assertRaises(ppm.PBSMapError, self.run_pbs_map, self.WorkerClass, xs, queue_timeout=5, num_clients=10)
except Exception, e:
self.assertTrue(isinstance(e, TestException))
class MapCompleteFailureIdentityTestCase(MapFailureCase, unittest.TestCase):
pass
class TestException(Exception):
pass
class ExceptionRaisingWorkerMixin(object):
"""Randomly raise an exception instead of doing work."""
failure_probability = 0.05
def do_work(self, x):
if random.random() <= self.failure_probability:
raise TestException()
else:
return super(ExceptionRaisingWorkerMixin, self).do_work(x)
class ExceptionRaisingIdentityWorker(ExceptionRaisingWorkerMixin, IdentityWorker):
pass
class MapExceptionCase(PBSMapCase):
WorkerClass=ExceptionRaisingIdentityWorker
max_range=100
def test_raise_exception(self):
xs = range(self.max_range)
try:
self.run_pbs_map(self.WorkerClass, xs, queue_timeout=5, num_clients=10)
raise Exception("TestException was not raised.")
except Exception as e:
self.assertEqual(ppm.parse_worker_info(e.__class__), ppm.parse_worker_info(TestException().__class__))
class MapExceptionTestCase(MapExceptionCase, unittest.TestCase):
pass
class SingleShotMapExceptionTestCase(SingleShotPBSMapCase, MapExceptionCase, unittest.TestCase):
pass
# Resume cases
class ResumedSingleShotFailureTestCase(SingleShotFailureTestCase, unittest.TestCase):
WorkerClass=ResumingSingleShotIdentityWorker
max_range=20
class ResumedSingleShotSuccessTestCase(SingleShotSuccessTestCase):
WorkerClass=ResumingSingleShotIdentityWorker
max_range=20
class ResumedSingleShotMapIdentityTestCase(SingleShotMapIdentityTestCase):
WorkerClass=ResumingSingleShotIdentityWorker
max_range=20
class ResumedMapFailingIdentityTestCase(MapFailingIdentityTestCase):
WorkerClass=ResumingFailingIdentityWorker
max_range=20
class ResumedSingleShotMapFailingIdentityTestCase(SingleShotMapFailingIdentityTestCase):
WorkerClass=ResumingFailingIdentityWorker
max_range=20
class ResumedMapCompleteFailureIdentityTestCase(MapCompleteFailureIdentityTestCase):
WorkerClass=ResumingCompleteFailureIdentityWorker
max_range=20
class PBSMapTestCase(RangeCase, PBSMapCase, unittest.TestCase):
pass
class ReusedPBSMapTestCase(RangeCase, ReusedPBSMapCase, unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
StarcoderdataPython
|
5011486
|
<gh_stars>0
# Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define the Impala target context for Numba and 'builtin' impls"""
from __future__ import absolute_import
import llvm.core as lc
import llvm.passes as lp
import llvm.ee as le
from numba import types as ntypes
from numba import cgutils, lowering
from numba.targets.base import BaseContext
from numba.targets.imputils import Registry, implement, impl_attribute
from impala.udf import stringimpl
from impala.udf.abi import ABIHandling, raise_return_type
from impala.udf.types import (
FunctionContext, AnyVal, BooleanVal, BooleanValType, TinyIntVal,
TinyIntValType, SmallIntVal, SmallIntValType, IntVal, IntValType,
BigIntVal, BigIntValType, FloatVal, FloatValType, DoubleVal, DoubleValType,
StringVal, StringValType)
from impala.udf.impl_utils import (
AnyValStruct, BooleanValStruct, TinyIntValStruct, SmallIntValStruct,
IntValStruct, BigIntValStruct, FloatValStruct, DoubleValStruct,
StringValStruct)
from impala.udf.impl_utils import (
precompiled, _get_is_null, _set_is_null, _conv_numba_struct_to_clang)
registry = Registry()
register_function = registry.register
register_attribute = registry.register_attr
# ctor impls
def _ctor_factory(Struct, Type, *input_args):
@implement(Type, *input_args)
def Val_ctor(context, builder, sig, args):
[x] = args
v = Struct(context, builder)
_set_is_null(builder, v, cgutils.false_bit)
v.val = x
return v._getvalue()
return register_function(Val_ctor)
BooleanVal_ctor = _ctor_factory(BooleanValStruct, BooleanValType, ntypes.int8)
TinyIntVal_ctor = _ctor_factory(TinyIntValStruct, TinyIntValType, ntypes.int8)
SmallIntVal_ctor = _ctor_factory(SmallIntValStruct, SmallIntValType, ntypes.int16)
IntVal_ctor = _ctor_factory(IntValStruct, IntValType, ntypes.int32)
BigIntVal_ctor = _ctor_factory(BigIntValStruct, BigIntValType, ntypes.int64)
FloatVal_ctor = _ctor_factory(FloatValStruct, FloatValType, ntypes.float32)
DoubleVal_ctor = _ctor_factory(DoubleValStruct, DoubleValType, ntypes.float64)
@register_function
@implement(StringValType, ntypes.string)
def StringVal_ctor(context, builder, sig, args):
"""StringVal(ntypes.string)"""
[x] = args
iv = StringValStruct(context, builder)
_set_is_null(builder, iv, cgutils.false_bit)
fndesc = lowering.ExternalFunctionDescriptor(
'strlen', ntypes.uintp, [ntypes.CPointer(ntypes.char)])
func = context.declare_external_function(
cgutils.get_module(builder), fndesc)
strlen_x = context.call_external_function(
builder, func, fndesc.argtypes, [x])
len_x = builder.trunc(strlen_x, lc.Type.int(32))
iv.len = len_x
iv.ptr = x
return iv._getvalue()
# *Val attributes
def _is_null_attr_factory(Struct, Val):
@impl_attribute(Val, "is_null", ntypes.boolean)
def Val_is_null(context, builder, typ, value):
v = Struct(context, builder, value=value)
is_null = _get_is_null(builder, v)
return is_null
return register_attribute(Val_is_null)
def _val_attr_factory(Struct, Val, retty):
@impl_attribute(Val, "val", retty)
def Val_val(context, builder, typ, value):
v = Struct(context, builder, value=value)
return v.val
return register_attribute(Val_val)
# *Val.is_null
BooleanVal_is_null = _is_null_attr_factory(BooleanValStruct, BooleanVal)
TinyIntVal_is_null = _is_null_attr_factory(TinyIntValStruct, TinyIntVal)
SmallIntVal_is_null = _is_null_attr_factory(SmallIntValStruct, SmallIntVal)
IntVal_is_null = _is_null_attr_factory(IntValStruct, IntVal)
BigIntVal_is_null = _is_null_attr_factory(BigIntValStruct, BigIntVal)
FloatVal_is_null = _is_null_attr_factory(FloatValStruct, FloatVal)
DoubleVal_is_null = _is_null_attr_factory(DoubleValStruct, DoubleVal)
StringVal_is_null = _is_null_attr_factory(StringValStruct, StringVal)
# *Val.val
BooleanVal_val = _val_attr_factory(BooleanValStruct, BooleanVal, ntypes.int8)
TinyIntVal_val = _val_attr_factory(TinyIntValStruct, TinyIntVal, ntypes.int8)
SmallIntVal_val = _val_attr_factory(SmallIntValStruct, SmallIntVal, ntypes.int16)
IntVal_val = _val_attr_factory(IntValStruct, IntVal, ntypes.int32)
BigIntVal_val = _val_attr_factory(BigIntValStruct, BigIntVal, ntypes.int64)
FloatVal_val = _val_attr_factory(FloatValStruct, FloatVal, ntypes.float32)
DoubleVal_val = _val_attr_factory(DoubleValStruct, DoubleVal, ntypes.float64)
@register_attribute
@impl_attribute(StringVal, "len", ntypes.int32)
def StringVal_len(context, builder, typ, value):
"""StringVal::len"""
iv = StringValStruct(context, builder, value=value)
return iv.len
@register_attribute
@impl_attribute(StringVal, "ptr", ntypes.CPointer(ntypes.uint8))
def StringVal_ptr(context, builder, typ, value):
"""StringVal::ptr"""
iv = StringValStruct(context, builder, value=value)
return iv.ptr
# impl "builtins"
@register_function
@implement('is', AnyVal, ntypes.none)
def anyval_is_none_impl(context, builder, sig, args):
[x, y] = args
val = AnyValStruct(context, builder, value=x)
return builder.trunc(val.is_null, lc.Type.int(1))
def starval_is_none_impl(context, builder, sig, args):
[x, y] = args
x = builder.extract_value(x, 0)
val = AnyValStruct(context, builder, value=x)
return builder.trunc(val.is_null, lc.Type.int(1))
register_function(implement('is', BooleanVal, ntypes.none)(starval_is_none_impl))
register_function(implement('is', TinyIntVal, ntypes.none)(starval_is_none_impl))
register_function(implement('is', SmallIntVal, ntypes.none)(starval_is_none_impl))
register_function(implement('is', IntVal, ntypes.none)(starval_is_none_impl))
register_function(implement('is', BigIntVal, ntypes.none)(starval_is_none_impl))
register_function(implement('is', FloatVal, ntypes.none)(starval_is_none_impl))
register_function(implement('is', DoubleVal, ntypes.none)(starval_is_none_impl))
@register_function
@implement(ntypes.len_type, StringVal)
def len_stringval_impl(context, builder, sig, args):
[s] = args
val = StringValStruct(context, builder, value=s)
return val.len
@register_function
@implement("==", ntypes.CPointer(ntypes.uint8), ntypes.CPointer(ntypes.uint8))
def eq_ptr_impl(context, builder, sig, args):
[p1, p2] = args
return builder.icmp(lc.ICMP_EQ, p1, p2)
@register_function
@implement("==", StringVal, StringVal)
def eq_stringval(context, builder, sig, args):
module = cgutils.get_module(builder)
precomp_func = context._get_precompiled_function("EqStringValImpl")
func = module.get_or_insert_function(
precomp_func.type.pointee, precomp_func.name)
[s1, s2] = args
cs1 = _conv_numba_struct_to_clang(builder, s1, func.args[0].type)
cs2 = _conv_numba_struct_to_clang(builder, s2, func.args[1].type)
result = builder.call(func, [cs1, cs2])
return result # ret bool so no need to raise type
@register_function
@implement("!=", StringVal, StringVal)
def neq_stringval(context, builder, sig, args):
eq = eq_stringval(context, builder, sig, args)
neq = builder.xor(lc.Constant.int(lc.Type.int(1), 1), eq)
return neq
@register_function
@implement("getitem", StringVal, ntypes.intc)
def getitem_stringval(context, builder, sig, args):
module = cgutils.get_module(builder)
precomp_func = context._get_precompiled_function("GetItemStringValImpl")
func = module.get_or_insert_function(
precomp_func.type.pointee, precomp_func.name)
[s, i] = args
cs = _conv_numba_struct_to_clang(builder, s, func.args[0].type)
result = builder.call(func, [cs, i])
return raise_return_type(context, builder, StringVal, result)
@register_function
@implement("+", StringVal, StringVal)
def add_stringval(context, builder, sig, args):
module = cgutils.get_module(builder)
precomp_func = context._get_precompiled_function("AddStringValImpl")
func = module.get_or_insert_function(
precomp_func.type.pointee, precomp_func.name)
fnctx_arg = context.get_arguments(cgutils.get_function(builder))[0]
cfnctx_arg = builder.bitcast(fnctx_arg, func.args[0].type)
[s1, s2] = args
cs1 = _conv_numba_struct_to_clang(builder, s1, func.args[1].type)
cs2 = _conv_numba_struct_to_clang(builder, s2, func.args[2].type)
result = builder.call(func, [cfnctx_arg, cs1, cs2])
return raise_return_type(context, builder, StringVal, result)
LLVM_TYPE = {
AnyVal: precompiled.get_type_named("struct.impala_udf::AnyVal"),
BooleanVal: precompiled.get_type_named("struct.impala_udf::BooleanVal"),
TinyIntVal: precompiled.get_type_named("struct.impala_udf::TinyIntVal"),
SmallIntVal: precompiled.get_type_named("struct.impala_udf::SmallIntVal"),
IntVal: precompiled.get_type_named("struct.impala_udf::IntVal"),
BigIntVal: precompiled.get_type_named("struct.impala_udf::BigIntVal"),
FloatVal: precompiled.get_type_named("struct.impala_udf::FloatVal"),
DoubleVal: precompiled.get_type_named("struct.impala_udf::DoubleVal"),
StringVal: precompiled.get_type_named("struct.impala_udf::StringVal"),
}
TYPE_LAYOUT = {
AnyVal: AnyValStruct,
BooleanVal: BooleanValStruct,
TinyIntVal: TinyIntValStruct,
SmallIntVal: SmallIntValStruct,
IntVal: IntValStruct,
BigIntVal: BigIntValStruct,
FloatVal: FloatValStruct,
DoubleVal: DoubleValStruct,
StringVal: StringValStruct,
}
class ImpalaTargetContext(BaseContext):
_impala_types = (AnyVal, BooleanVal, TinyIntVal, SmallIntVal, IntVal,
BigIntVal, FloatVal, DoubleVal, StringVal)
def init(self):
self.tm = le.TargetMachine.new()
# insert registered impls
self.insert_func_defn(registry.functions)
self.insert_attr_defn(registry.attributes)
self.insert_func_defn(stringimpl.registry.functions)
self.insert_attr_defn(stringimpl.registry.attributes)
self.optimizer = self.build_pass_manager()
# once per context
self._fnctxtype = precompiled.get_type_named(
"class.impala_udf::FunctionContext")
def _get_precompiled_function(self, name):
fns = [fn for fn in precompiled.functions if name in fn.name]
assert len(fns) == 1
return fns[0]
def cast(self, builder, val, fromty, toty):
if fromty not in self._impala_types and toty not in self._impala_types:
return super(ImpalaTargetContext, self).cast(
builder, val, fromty, toty)
if fromty == toty:
return val
# handle NULLs and Nones
if fromty == ntypes.none and toty in self._impala_types:
iv = TYPE_LAYOUT[toty](self, builder)
_set_is_null(builder, iv, cgutils.true_bit)
return iv._getvalue()
if fromty in self._impala_types and toty == AnyVal:
iv1 = TYPE_LAYOUT[fromty](self, builder, value=val)
is_null = _get_is_null(builder, iv1)
iv2 = AnyValStruct(self, builder)
# this is equiv to _set_is_null, but changes the GEP bc of AnyVal's
# structure
byte = builder.zext(is_null, lc.Type.int(8))
builder.store(byte, builder.gep(
iv2._getpointer(),
[lc.Constant.int(lc.Type.int(32), 0)] * 2, inbounds=True))
return iv2._getvalue()
if fromty == BooleanVal:
v = BooleanValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.boolean, toty)
if fromty == TinyIntVal:
v = TinyIntValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.int8, toty)
if fromty == SmallIntVal:
v = SmallIntValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.int16, toty)
if fromty == IntVal:
v = IntValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.int32, toty)
if fromty == BigIntVal:
v = BigIntValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.int64, toty)
if fromty == FloatVal:
v = FloatValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.float32, toty)
if fromty == DoubleVal:
v = DoubleValStruct(self, builder, val)
return self.cast(builder, v.val, ntypes.float64, toty)
# no way fromty is a *Val starting here
if toty == BooleanVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.int8)
return BooleanVal_ctor(self, builder, None, [val])
if toty == TinyIntVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.int8)
return TinyIntVal_ctor(self, builder, None, [val])
if toty == SmallIntVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.int16)
return SmallIntVal_ctor(self, builder, None, [val])
if toty == IntVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.int32)
return IntVal_ctor(self, builder, None, [val])
if toty == BigIntVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.int64)
return BigIntVal_ctor(self, builder, None, [val])
if toty == FloatVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.float32)
return FloatVal_ctor(self, builder, None, [val])
if toty == DoubleVal:
val = super(ImpalaTargetContext, self).cast(
builder, val, fromty, ntypes.float64)
return DoubleVal_ctor(self, builder, None, [val])
if toty == StringVal:
return StringVal_ctor(self, builder, None, [val])
return super(ImpalaTargetContext, self).cast(
builder, val, fromty, toty)
def get_constant_string(self, builder, ty, val):
assert ty == ntypes.string
literal = lc.Constant.stringz(val)
gv = cgutils.get_module(builder).add_global_variable(
literal.type, 'str_literal')
gv.linkage = lc.LINKAGE_PRIVATE
gv.initializer = literal
gv.global_constant = True
# gep gets pointer to first element of the constant byte array
return gv.gep([lc.Constant.int(lc.Type.int(32), 0)] * 2)
def get_constant_struct(self, builder, ty, val):
# override for converting literals to *Vals, incl. None
if ty in self._impala_types and val is None:
iv = TYPE_LAYOUT[ty](self, builder)
_set_is_null(builder, iv, cgutils.true_bit)
return iv._getvalue()
elif ty == BooleanVal:
const = lc.Constant.int(lc.Type.int(8), val)
return BooleanVal_ctor(self, builder, None, [const])
elif ty == TinyIntVal:
const = lc.Constant.int(lc.Type.int(8), val)
return TinyIntVal_ctor(self, builder, None, [const])
elif ty == SmallIntVal:
const = lc.Constant.int(lc.Type.int(16), val)
return SmallIntVal_ctor(self, builder, None, [const])
elif ty == IntVal:
const = lc.Constant.int(lc.Type.int(32), val)
return IntVal_ctor(self, builder, None, [const])
elif ty == BigIntVal:
const = lc.Constant.int(lc.Type.int(64), val)
return BigIntVal_ctor(self, builder, None, [const])
elif ty == FloatVal:
const = lc.Constant.real(lc.Type.float(), val)
return FloatVal_ctor(self, builder, None, [const])
elif ty == DoubleVal:
const = lc.Constant.real(lc.Type.double(), val)
return DoubleVal_ctor(self, builder, None, [const])
elif ty == StringVal:
iv = StringValStruct(self, builder)
_set_is_null(builder, iv, cgutils.false_bit)
iv.len = lc.Constant.int(lc.Type.int(32), len(val))
iv.ptr = self.get_constant_string(builder, ntypes.string, val)
return iv._getvalue()
else:
return super(ImpalaTargetContext, self).get_constant_struct(
builder, ty, val)
def get_struct_type(self, struct):
if hasattr(struct, '_name'):
# our custom named structs
return precompiled.get_type_named(struct._name)
else:
return super(ImpalaTargetContext, self).get_struct_type(struct)
def get_data_type(self, ty):
if ty in LLVM_TYPE:
return LLVM_TYPE[ty]
elif ty == FunctionContext:
return lc.Type.pointer(self._fnctxtype)
else:
return super(ImpalaTargetContext, self).get_data_type(ty)
def get_array(self, builder, itemvals, itemtys):
# only handle uniform type
assert all(x == itemtys[0] for x in itemtys)
ty = itemtys[0]
if ty not in self._impala_types:
raise NotImplementedError(
"Arrays of non-Impala types not supported")
def build_pass_manager(self):
opt = 0 # let Impala optimize
# opt = 3 # optimize ourselves
pms = lp.build_pass_managers(
tm=self.tm, opt=opt, loop_vectorize=True, fpm=False)
return pms.pm
def finalize(self, func, restype, argtypes):
func.verify()
func.linkage = lc.LINKAGE_INTERNAL
module = func.module
# Generate wrapper to adapt into Impala ABI
abi = ABIHandling(self, func, restype, argtypes)
wrapper = abi.build_wrapper("numba_udf." + func.name)
module.verify()
self.optimizer.run(module)
return wrapper
|
StarcoderdataPython
|
9665901
|
<gh_stars>1-10
# flowapp/views/admin.py
from datetime import datetime, timedelta
from flask import Blueprint, render_template, redirect, flash, request, url_for
from sqlalchemy.exc import IntegrityError
from ..forms import UserForm, ActionForm, OrganizationForm, CommunityForm
from ..models import User, Action, Organization, Role, insert_user, get_existing_action, Community, \
get_existing_community, Log
from ..auth import auth_required, admin_required
from flowapp import db
admin = Blueprint('admin', __name__, template_folder='templates')
@admin.route('/log', methods=['GET'], defaults={"page": 1})
@admin.route('/log/<int:page>', methods=['GET'])
@auth_required
@admin_required
def log(page):
"""
Displays logs for last two days
"""
per_page = 20
now = datetime.now()
week_ago = now - timedelta(weeks=1)
logs = Log.query.order_by(Log.time.desc()).filter(Log.time > week_ago).paginate(page,per_page,error_out=False)
return render_template('pages/logs.j2', logs=logs)
@admin.route('/user', methods=['GET', 'POST'])
@auth_required
@admin_required
def user():
form = UserForm(request.form)
form.role_ids.choices = [(g.id, g.name)
for g in db.session.query(Role).order_by('name')]
form.org_ids.choices = [(g.id, g.name)
for g in db.session.query(Organization).order_by('name')]
if request.method == 'POST' and form.validate():
# test if user is unique
exist = db.session.query(User).filter_by(uuid=form.uuid.data).first()
if not exist:
insert_user(
uuid=form.uuid.data,
name=form.name.data,
phone=form.phone.data,
email=form.email.data,
comment=form.comment.data,
role_ids=form.role_ids.data,
org_ids=form.org_ids.data)
flash('User saved')
return redirect(url_for('admin.users'))
else:
flash(u'User {} already exists'.format(
form.email.data), 'alert-danger')
action_url = url_for('admin.user')
return render_template('forms/simple_form.j2', title="Add new user to Flowspec", form=form, action_url=action_url)
@admin.route('/user/edit/<int:user_id>', methods=['GET', 'POST'])
@auth_required
@admin_required
def edit_user(user_id):
user = db.session.query(User).get(user_id)
form = UserForm(request.form, obj=user)
form.role_ids.choices = [(g.id, g.name)
for g in db.session.query(Role).order_by('name')]
form.org_ids.choices = [(g.id, g.name)
for g in db.session.query(Organization).order_by('name')]
if request.method == 'POST' and form.validate():
user.update(form)
return redirect(url_for('admin.users'))
form.role_ids.data = [role.id for role in user.role]
form.org_ids.data = [org.id for org in user.organization]
action_url = url_for('admin.edit_user', user_id=user_id)
return render_template('forms/simple_form.j2', title=u"Editing {}".format(user.email), form=form,
action_url=action_url)
@admin.route('/user/delete/<int:user_id>', methods=['GET'])
@auth_required
@admin_required
def delete_user(user_id):
user = db.session.query(User).get(user_id)
username = user.email
db.session.delete(user)
message = u'User {} deleted'.format(username)
alert_type = 'alert-success'
try:
db.session.commit()
except IntegrityError as e:
message = u'User {} owns some rules, can not be deleted!'.format(username)
alert_type = 'alert-danger'
print(e)
flash(message, alert_type)
return redirect(url_for('admin.users'))
@admin.route('/users')
@auth_required
@admin_required
def users():
users = User.query.all()
return render_template('pages/users.j2', users=users)
@admin.route('/organizations')
@auth_required
@admin_required
def organizations():
orgs = db.session.query(Organization).all()
return render_template('pages/orgs.j2', orgs=orgs)
@admin.route('/organization', methods=['GET', 'POST'])
@auth_required
@admin_required
def organization():
form = OrganizationForm(request.form)
if request.method == 'POST' and form.validate():
# test if user is unique
exist = db.session.query(Organization).filter_by(name=form.name.data).first()
if not exist:
org = Organization(name=form.name.data, arange=form.arange.data)
db.session.add(org)
db.session.commit()
flash('Organization saved')
return redirect(url_for('admin.organizations'))
else:
flash(u'Organization {} already exists'.format(
form.name.data), 'alert-danger')
action_url = url_for('admin.organization')
return render_template('forms/simple_form.j2', title="Add new organization to Flowspec", form=form,
action_url=action_url)
@admin.route('/organization/edit/<int:org_id>', methods=['GET', 'POST'])
@auth_required
@admin_required
def edit_organization(org_id):
org = db.session.query(Organization).get(org_id)
form = OrganizationForm(request.form, obj=org)
if request.method == 'POST' and form.validate():
form.populate_obj(org)
db.session.commit()
flash('Organization updated')
return redirect(url_for('admin.organizations'))
action_url = url_for('admin.edit_organization', org_id=org.id)
return render_template('forms/simple_form.j2', title=u"Editing {}".format(org.name), form=form,
action_url=action_url)
@admin.route('/organization/delete/<int:org_id>', methods=['GET'])
@auth_required
@admin_required
def delete_organization(org_id):
org = db.session.query(Organization).get(org_id)
aname = org.name
db.session.delete(org)
message = u'Organization {} deleted'.format(aname)
alert_type = 'alert-success'
try:
db.session.commit()
except IntegrityError:
message = u'Organization {} has some users, can not be deleted!'.format(aname)
alert_type = 'alert-danger'
flash(message, alert_type)
db.session.commit()
return redirect(url_for('admin.organizations'))
@admin.route('/actions')
@auth_required
@admin_required
def actions():
actions = db.session.query(Action).all()
return render_template('pages/actions.j2', actions=actions)
@admin.route('/action', methods=['GET', 'POST'])
@auth_required
@admin_required
def action():
form = ActionForm(request.form)
if request.method == 'POST' and form.validate():
# test if Acttion is unique
exist = get_existing_action(form.name.data, form.command.data)
if not exist:
action = Action(name=form.name.data,
command=form.command.data,
description=form.description.data,
role_id=form.role_id.data)
db.session.add(action)
db.session.commit()
flash('Action saved', 'alert-success')
return redirect(url_for('admin.actions'))
else:
flash(u'Action with name {} or command {} already exists'.format(
form.name.data, form.command.data), 'alert-danger')
action_url = url_for('admin.action')
return render_template('forms/simple_form.j2', title="Add new action to Flowspec", form=form, action_url=action_url)
@admin.route('/action/edit/<int:action_id>', methods=['GET', 'POST'])
@auth_required
@admin_required
def edit_action(action_id):
action = db.session.query(Action).get(action_id)
print(action.role_id)
form = ActionForm(request.form, obj=action)
if request.method == 'POST' and form.validate():
form.populate_obj(action)
db.session.commit()
flash('Action updated')
return redirect(url_for('admin.actions'))
action_url = url_for('admin.edit_action', action_id=action.id)
return render_template('forms/simple_form.j2', title=u"Editing {}".format(action.name), form=form,
action_url=action_url)
@admin.route('/action/delete/<int:action_id>', methods=['GET'])
@auth_required
@admin_required
def delete_action(action_id):
action = db.session.query(Action).get(action_id)
aname = action.name
db.session.delete(action)
message = u'Action {} deleted'.format(aname)
alert_type = 'alert-success'
try:
db.session.commit()
except IntegrityError:
message = u'Action {} is in use in some rules, can not be deleted!'.format(aname)
alert_type = 'alert-danger'
flash(message, alert_type)
return redirect(url_for('admin.actions'))
@admin.route('/communities')
@auth_required
@admin_required
def communities():
communities = db.session.query(Community).all()
return render_template('pages/communities.j2', communities=communities)
@admin.route('/community', methods=['GET', 'POST'])
@auth_required
@admin_required
def community():
form = CommunityForm(request.form)
if request.method == 'POST' and form.validate():
# test if Coomunity name is unique
exist = get_existing_community(form.name.data)
if not exist:
community = Community(name=form.name.data,
comm=form.comm.data,
larcomm=form.larcomm.data,
extcomm=form.extcomm.data,
description=form.description.data,
role_id=form.role_id.data)
db.session.add(community)
db.session.commit()
flash('Community saved', 'alert-success')
return redirect(url_for('admin.communities'))
else:
flash(u'Community with name {} already exists'.format(
form.name.data, form.command.data), 'alert-danger')
community_url = url_for('admin.community')
return render_template('forms/simple_form.j2', title="Add new community to Flowspec", form=form,
community_url=community_url)
@admin.route('/community/edit/<int:community_id>', methods=['GET', 'POST'])
@auth_required
@admin_required
def edit_community(community_id):
community = db.session.query(Community).get(community_id)
print(community.role_id)
form = CommunityForm(request.form, obj=community)
if request.method == 'POST' and form.validate():
form.populate_obj(community)
db.session.commit()
flash('Community updated')
return redirect(url_for('admin.communities'))
community_url = url_for('admin.edit_community', community_id=community.id)
return render_template('forms/simple_form.j2', title=u"Editing {}".format(community.name), form=form,
community_url=community_url)
@admin.route('/community/delete/<int:community_id>', methods=['GET'])
@auth_required
@admin_required
def delete_community(community_id):
community = db.session.query(Community).get(community_id)
aname = community.name
db.session.delete(community)
message = u'Community {} deleted'.format(aname)
alert_type = 'alert-success'
try:
db.session.commit()
except IntegrityError:
message = u'Community {} is in use in some rules, can not be deleted!'.format(aname)
alert_type = 'alert-danger'
flash(message, alert_type)
return redirect(url_for('admin.communities'))
|
StarcoderdataPython
|
3475057
|
<gh_stars>0
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import numpy as np
import torch
class BoxCoder(object):
"""
This class encodes and decodes a set of bounding boxes into
the representation used for training the regressors.
"""
def __init__(self, weights=(10., 10., 10., 5., 15.)):
self.weights = weights
def encode(self, ex_rois, gt_rois): #输入的是ROI坐标,x1y1 x2y2 angle五个参数
ex_widths = ex_rois[:, 2] - ex_rois[:, 0] #计算宽度
ex_heights = ex_rois[:, 3] - ex_rois[:, 1] #计算高度
ex_widths = torch.clamp(ex_widths, min=1) #宽度最小为1
ex_heights = torch.clamp(ex_heights, min=1) #高度最小为1
ex_ctr_x = ex_rois[:, 0] + 0.5 * ex_widths #计算中心坐标xy
ex_ctr_y = ex_rois[:, 1] + 0.5 * ex_heights
ex_thetas = ex_rois[:, 4] #角度
gt_widths = gt_rois[:, 2] - gt_rois[:, 0]
gt_heights = gt_rois[:, 3] - gt_rois[:, 1]
gt_widths = torch.clamp(gt_widths, min=1)
gt_heights = torch.clamp(gt_heights, min=1) #获得gt宽和高
gt_ctr_x = gt_rois[:, 0] + 0.5 * gt_widths #获得gt的中心坐标
gt_ctr_y = gt_rois[:, 1] + 0.5 * gt_heights
gt_thetas = gt_rois[:, 4] #获得gt的角度
wx, wy, ww, wh, wt = self.weights
targets_dx = wx * (gt_ctr_x - ex_ctr_x) / ex_widths
targets_dy = wy * (gt_ctr_y - ex_ctr_y) / ex_heights
targets_dw = ww * torch.log(gt_widths / ex_widths)
targets_dh = wh * torch.log(gt_heights / ex_heights)
targets_dt = wt * (torch.tan(gt_thetas / 180.0 * np.pi) - torch.tan(ex_thetas / 180.0 * np.pi))
targets = torch.stack(
(targets_dx, targets_dy, targets_dw, targets_dh, targets_dt), dim=1
)
return targets #计算偏移
def decode(self, boxes, deltas, mode='xywht'):
widths = boxes[:, :, 2] - boxes[:, :, 0]
heights = boxes[:, :, 3] - boxes[:, :, 1]
widths = torch.clamp(widths, min=1)
heights = torch.clamp(heights, min=1)
ctr_x = boxes[:, :, 0] + 0.5 * widths
ctr_y = boxes[:, :, 1] + 0.5 * heights
thetas = boxes[:, :, 4]
wx, wy, ww, wh, wt = self.weights
dx = deltas[:, :, 0] / wx
dy = deltas[:, :, 1] / wy
dw = deltas[:, :, 2] / ww
dh = deltas[:, :, 3] / wh
dt = deltas[:, :, 4] / wt
pred_ctr_x = ctr_x if 'x' not in mode else ctr_x + dx * widths
pred_ctr_y = ctr_y if 'y' not in mode else ctr_y + dy * heights
pred_w = widths if 'w' not in mode else torch.exp(dw) * widths
pred_h = heights if 'h' not in mode else torch.exp(dh) * heights
pred_t = thetas if 't' not in mode else torch.atan(torch.tan(thetas / 180.0 * np.pi) + dt) / np.pi * 180.0
pred_boxes_x1 = pred_ctr_x - 0.5 * pred_w
pred_boxes_y1 = pred_ctr_y - 0.5 * pred_h
pred_boxes_x2 = pred_ctr_x + 0.5 * pred_w
pred_boxes_y2 = pred_ctr_y + 0.5 * pred_h
pred_boxes = torch.stack([
pred_boxes_x1,
pred_boxes_y1,
pred_boxes_x2,
pred_boxes_y2,
pred_t], dim=2
)
return pred_boxes #返回真实的预测框,两个坐标以及角度
|
StarcoderdataPython
|
6518276
|
#=======================================================================
__version__ = '''0.0.07'''
__sub_version__ = '''20051210041342'''
__copyright__ = '''(c) <NAME> 2003'''
#-----------------------------------------------------------------------
__doc__ = '''\
this module will define a set of mixins providing the abbility to store and
archive object state history, as well as basic operations with this history.
all of the classes bellow use the _history_state attribute to store the
history, thus, this attribute must be provided by the context using the mixin(s).
NOTE: care must be taken with this set of objects as they will prevent the deletion
of referenced objects even if those objects or references to them are explicitly
deleted. this is due to that the references to them are stored in history.
this problem can be dealt with regular archiving and deletion or pickling.
this was in part inspired by: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/302742
'''
#-----------------------------------------------------------------------
import time
import copy
import pli.logictypes as logictypes
#-----------------------------------------------------------------------
#---------------------------------------------------StateHistoryMixin---
# NOTE: might be good to exclude the '_history_state' attr from
# comparisons...
# XXX add better, more pedantic error handling...
class BasicStateHistoryMixin(object):
'''
this mixin provides basic object history functionality.
NOTE: the attribute read/write speed will not be affected.
NOTE: this depends on an externaly defined _history_state attribute that
is compatible with the dict union object.
NOTE: by default this will not add any new state to the object.
NOTE: care must be taken with this object as it will prevent the deletion
of referenced objects.
'''
__copy_snapshot_valuse__ = False
__deepcopy_snapshots__ = False
# TODO add timesamp.....
def hist_makesnapshot(self):
'''
this will save the current state to object history.
'''
self._history_state.unite(self.hist_diff())
def hist_diff(self):
'''
generate the difference dict between the current state and the last snapshot.
'''
res = {}
if not hasattr(self, '_history_state'):
##!!!!
raise 'no snapshot!'
snapshot = self._history_state
for k, v in self.__dict__.iteritems():
## if k == '_history_state':
## continue
if k not in snapshot or snapshot[k] != v:
if hasattr(self, '__copy_snapshot_valuse__') and not self.__copy_snapshot_valuse__:
res[k] = v
elif hasattr(self, '__deepcopy_snapshots__') and self.__deepcopy_snapshots__:
res[copy.deepcopy(k)] = copy.deepcopy(v)
else:
res[k] = copy.copy(v)
return res
# XXX make this faster...
def ismodified(self):
'''
retrurn True if the object is modified since the last snapshot was taken else False.
'''
if not hasattr(self, '_history_state'):
##!!!!
raise 'no snapshot!'
snapshot = self._history_state
return False in [ ( k in snapshot and v == snapshot[k] ) \
for k, v in self.__dict__.iteritems() \
if k != '_history_state']
# XXX check for depth...
# XXX should this be renamed to hist_stepback???
def hist_revert(self, level=1):
'''
will revert the state of the object to a given layer in it's history.
NOTE: if level is 0 then the object will be reverted only to the last snapshot.
NOTE: this will not revert beyond the first snapshot of the object made.
'''
snapshot = self._history_state
if level > 0 and self.hist_diff() != {}:
level -= 1
if len(snapshot.members()) > 1:
for i in xrange(level):
snapshot.popmember()
dct = self.__dict__
dct.clear()
dct.update(snapshot.todict())
#---------------------------------------------------StateHistoryMixin---
##!!! REVISE !!!##
# XXX add better, more pedantic error handling...
class StateHistoryMixin(BasicStateHistoryMixin):
'''
this mixin extends the BasicStateHistoryMixin (see its docs for moreinfo).
'''
# XXX add level support...
def hist_compact(self, level=0):
'''
this will flatten the history...
'''
if not hasattr(self, '_history_state'):
##!!!!
raise 'no snapshot!'
snapshot = self._history_state
dct = snapshot.todict()
# XXX this might not be safe...
snapshot.clearmembers()
snapshot.unite(dct)
# XXX it might be good to move this to BasicStateHistoryMixin and
# rewrite hist_revert to use it... (???)
def hist_getstate(self, level=1):
'''
this will return a dict representing the state of the object at a given level.
'''
snapshot = self._history_state
if level > 0 and self.hist_diff() != {}:
level -= 1
members = snapshot.members()
snapshot = logictypes.DictUnion(*members)
if len(members) > 1:
for i in xrange(level):
snapshot.popmember()
return snapshot.todict()
#----------------------------------------StateHistoryWithArchiveMixin---
# XXX add better, more pedantic error handling...
class StateHistoryWithArchiveMixin(BasicStateHistoryMixin):
'''
this mixin provides support for archiving of history (full or partial).
NOTE: archive restore is checked for consistency, thus, incompatible
archive restore is avoided.
'''
def hist_archive(self, level=0):
'''
this will compact the object history to the given level (default: 0) and
retrurn the truncated list of dicts.
NOTE: the returned list is usable in hist_restorearchive.
'''
snapshot = self._history_state
levels = snapshot.members()
# split the history into a tail and a head :)
head = levels[:level]
tail = levels[level:]
# collapse the tail...
tail_elem = logictypes.DictUnion(*tail[::-1]).todict()
# form a new history...
# XXX is there a better way to do this??
snapshot.clearmembers()
snapshot.tailunite(*head + (tail_elem,))
# return the archive history (list of dicts usable in
# tailunite...)
return tail
def hist_restorearchive(self, archive):
'''
this will restore the objects' history using the archive (returned by
the hist_archive method).
NOTE: this will fail if the archives state differs from the first state
stored in the curent history.
NOTE: this will remove the first state in history and replace it with
an expanded version from the archive.
'''
snapshot = self._history_state
levels = snapshot.members()
# sanity check...
if logictypes.DictUnion(*archive[::-1]).todict() != levels[-1]:
raise TypeError, 'inconsistent archive.'
snapshot.clearmembers()
snapshot.tailunite(*levels[:-1] + archive)
#-----------------------------------------------------------------------
# XXX might be good to move this elsware... (not exactly a mixin!)
#--------------------------------------------------StateHistoryObject---
class StateHistoryObject(StateHistoryMixin):
'''
'''
def __init__(self, *p, **n):
'''
'''
super(StateHistoryMixin, self).__init__(*p, **n)
self._history_state = logictypes.DictUnion()
self.hist_makesnapshot()
#=======================================================================
if __name__ == '__main__':
from pli.pattern.proxy.history import StateHistoryProxy
class O(object):
pass
o = O()
o.x = 123
o.y = 321
a = StateHistoryProxy(o)
## a = StateHistoryObject()
print a.ismodified()
print 'raw:', a.__dict__.keys()
a.a = 1
a.b = 2
print a.ismodified()
a.hist_makesnapshot()
print a.ismodified()
print 'on snapshot:', a.__dict__.keys()
a.c = 3
print a.ismodified()
print 'new state:', a.__dict__.keys()
a.hist_revert()
print a.ismodified()
print 'hist_reverted:', a.__dict__.keys()
del a.x
print o.__dict__.keys()
a.hist_revert()
a.hist_revert()
a.hist_revert()
a.hist_revert()
print o.__dict__.keys()
print '---'
# test the archive...
class HistArchProxy(StateHistoryProxy, StateHistoryWithArchiveMixin):
'''
'''
pass
a = HistArchProxy(o)
a.x = 0
a.hist_makesnapshot()
a.x = 1
a.hist_makesnapshot()
a.x = 2
a.hist_makesnapshot()
a.x = 3
a.hist_makesnapshot()
a.x = 4
a.hist_makesnapshot()
print a._history_state.members()
arch = a.hist_archive(2)
print arch
print a._history_state.members()
a.hist_restorearchive(arch)
print a._history_state.members()
#=======================================================================
# vim:set ts=4 sw=4 nowrap :
|
StarcoderdataPython
|
1678160
|
<reponame>basbeu/PyLaia<filename>laia/nn/adaptive_pool_2d_base.py
from __future__ import absolute_import
import torch
from laia.data import PaddedTensor
class AdaptivePool2dBase(torch.nn.Module):
def __init__(self, output_sizes, func):
super(AdaptivePool2dBase, self).__init__()
self._output_sizes = output_sizes
self._func = func
self._fixed_size = isinstance(output_sizes, int) or (
output_sizes[0] is not None and output_sizes[1] is not None
)
@property
def output_sizes(self):
return self._output_sizes
def forward(self, x):
x, xs = (x.data, x.sizes) if isinstance(x, PaddedTensor) else (x, None)
y = self._func(batch_input=x, output_sizes=self.output_sizes, batch_sizes=xs)
if xs is None or self._fixed_size:
return y
else:
ys = xs.clone()
dim = int(self.output_sizes[0] is None)
ys[:, dim] = self.output_sizes[dim]
return PaddedTensor(y, ys)
|
StarcoderdataPython
|
5115211
|
from dataclasses import dataclass
from typing import List, Optional
@dataclass
class Route:
id: Optional[str] = None
name: Optional[str] = None
type: Optional[str] = None
authority: Optional[str] = None
directions: Optional[List[str]] = None
alerts: Optional[List[str]] = None
|
StarcoderdataPython
|
5006569
|
<filename>4.1.1.py
def palindrome(word):
word=word.lower()
left=0
right=len(word)-1
i=True
while left<right:
if word[left]!=word[right]:
i=False
break
else:
left+=1
right-=1
return i
def test_palindrome(word, res, testId):
if palindrome(word) == res:
print ('Test '+str(testId)+' complete')
word1 = 'ротор'
test_palindrome(word1, True, 1)
word2 = 'LOL'
test_palindrome(word2, True, 2)
word3 = 'Палиндром'
test_palindrome(word3, False, 3)
word4 = 'тестирование'
test_palindrome(word4, False, 4)
word5 = 'Оно'
test_palindrome(word5, True, 5)
|
StarcoderdataPython
|
6687565
|
<gh_stars>0
"""
Creating editing files and folders for googledoc
Copyright 2019 <NAME>, <EMAIL>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import os
from apiclient.http import MediaFileUpload
from open_mlstat.google_drive.drive_object_base import GoogleDriveObject
from open_mlstat.tools.load_drive_files import load_object_by_config
from open_mlstat.security.google_object_access import ObjectAccess
MIMETYPE_FILE = "application/vnd.google-apps.file"
MIMETYPE_PHOTO = "application/vnd.google-apps.photo"
MIMETYPE_DOC = "application/vnd.google-apps.document"
MIME_TYPE_FOLDER = 'application/vnd.google-apps.folder'
MIMETYPE_UNKNOWN = "application/vnd.google-apps.unknown"
class GoogleDrive(object):
class File(GoogleDriveObject):
def __init__(self, folder, google_acc, object_access, file_name="data/test.txt", mimetype="*/*", prefix=None, contain_folder="default"):
self.mimetype = mimetype
self.file_name =file_name
name = os.path.basename(file_name)
if prefix is not None:
name = prefix + "_" + name
GoogleDriveObject.__init__(self, google_acc.drive_service, name, "files", object_acess=object_access,
contain_folder=contain_folder)
self.file_metadata = {
'name': self.name,
'mimeType': self.mimetype,
'parents': [folder.id]
}
self.data = load_object_by_config(self.config_path, self.create_file)
self.id = self.data.get('id')
self.access(self.id)
def create_file(self):
media = MediaFileUpload(self.file_name,
mimetype=self.mimetype,
resumable=True)
print("Create: ", self.file_metadata)
return self.service.files().create(body=self.file_metadata,
media_body=media, fields='id').execute()
class Folder(GoogleDriveObject):
def __init__(self, name, google_acc,object_access, parent=None, contain_folder="default"):
GoogleDriveObject.__init__(self, google_acc.drive_service, name, "folders", contain_folder=contain_folder,
object_acess=object_access)
self.file_metadata = {
'name': name,
'mimeType': MIME_TYPE_FOLDER
}
if parent is not None:
self.file_metadata["parents"] = [parent.id]
self.data = load_object_by_config(self.config_path, self.create_folder)
self.id = self.data.get('id')
self.access(self.id)
def create_folder(self):
return self.service.files().create(body=self.file_metadata, fields='id').execute()
def __init__(self):
pass
|
StarcoderdataPython
|
6631920
|
try:
import math
import sys
from PyQt4 import QtGui, QtCore
from random import randint
from time import sleep
except ImportError as ie:
print (str(ie))
# Catched if any packages are missing
missing = str(ie).split("named")[1]
print("Software needs %s installed\nPlease run pip install %s and restart\r\n" % (missing, missing))
input("Press any key to exit...")
exit()
except ValueError as e:
print (str(e))
input("Press any key to exit...")
exit()
def guiAppInit():
try:
app = QtGui.QApplication(sys.argv)
except Exception as e:
print("Unable to start QApplication.")
print(str(e))
exit()
return app
def samplesToGui(device, qt_app = None):
try:
color_list = [0] * device.number_of_sensors
for i in range(0, len(device.sensorMappedSamples[0])):
for j in range(0, device.number_of_sensors):
# 60 is added to the results in order to offset the HSV color. The HSV color RED is centered
# around 0/360, this poses a problem where the minimum and maximum values are equal in color.
# The fix is to offset the HSV value by 60.
hue = (device.sensorMappedSamples[j][i]) + 60
temp_color = QtGui.QColor()
temp_color.setHsv(hue, 255, 255, 255) # saturation[j][i], 255, 255)
temp_brush = QtGui.QBrush(temp_color)
color_list[j] = temp_brush
# TODO Call HexGridWidget.updateColor(colorList)
qt_app.display.updateColors(color_list)
qt_app.display.repaint()
sleep(0.02)
except Exception as e:
print("Unable to update colors.")
print(str(e))
exit()
class Window(QtGui.QMainWindow):
def __init__(self, number_of_hexagons, number_of_samples, parent = None):
super(Window, self).__init__(parent)
self.number_of_sensors = number_of_hexagons
self.number_of_samples = number_of_samples
self.setUpdatesEnabled(True) # Needed in order to trigger the paintEvent of a QWidget
self.setGeometry(100, 35, 750, 940) # (pos x, pos y, width, height)
self.setWindowTitle('MainWindow')
self.mainWidget = QtGui.QWidget(self)
self.verticalLayout = QtGui.QVBoxLayout(self)
self.mainWidget.setLayout(self.verticalLayout) # Vertical division of mainWidget
self.display = HexGridWidget(vertices = 6, radius = 40, angularOffset = 0, number_of_hexagons = number_of_hexagons)
self.verticalLayout.addWidget(self.display) # Adds the hex grid to the mainWidget
self.control = QtGui.QWidget(self)
self.controlLayout = QtGui.QHBoxLayout(self) # Horizontal division of the control QWidget
self.playButton = QtGui.QPushButton("Play", self)
self.stopButton = QtGui.QPushButton('Stop', self)
self.resetButton = QtGui.QPushButton('Reset', self)
self.playButton.clicked.connect(self.play)
self.stopButton.clicked.connect(self.stop)
self.resetButton.clicked.connect(self.reset)
self.controlLayout.addWidget(self.playButton)
self.controlLayout.addWidget(self.stopButton)
self.controlLayout.addWidget(self.resetButton)
self.verticalLayout.addLayout(self.controlLayout) # Adds the control buttons to the mainWidget
self.setCentralWidget(self.mainWidget)
self.mainWidget.resize(self.mainWidget.sizeHint())
self.show() # Triggers the Window's paintEvent
print ("Window initialized!")
self.colors = []
#TODO Remove block
for i in range(0, number_of_hexagons):
self.colors.append(QtGui.QBrush(QtGui.QColor(randint(0, 255), randint(0, 255), randint(0, 255), 255)))
self.display.updateColors(self.colors)
# End of block
def play(self):
print ("Clicked Play!")
#TODO Decode FSR hex value to a RGB int
def stop(self):
print ("Clicked Stop!")
#TODO Stop playback of FSR
def reset(self):
print ("Clicked Reset!")
#TODO Reset playback
for i in range(0, self.number_of_samples):
self.display.repaint()
sleep(0.02)
class HexGridWidget(QtGui.QWidget):
def __init__(self, vertices, radius, number_of_hexagons, angularOffset = 0, parent = None):
super(HexGridWidget, self).__init__(parent)
self.number_of_hexagons = number_of_hexagons
self.setGeometry(100, 35, 600, 840)
self.setUpdatesEnabled(True)
self.pen = QtGui.QPen(QtGui.QColor(0, 0, 0))
self.pen.setWidth = 3
self.brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 255))
self.brushList = []
self.polygon = []
for i in range(0, 6):
for j in range(0, 6):
self.polygon.append(self.createHexagon(vertices, radius, angularOffset))
# Move the polygon points to the next position in the grid
offsetRow = self.polygon[i * 6 + j].at(1) - self.polygon[i * 6 + j].at(3)
offsetCol = self.polygon[i * 6 + j].at(5) - self.polygon[i * 6 + j].at(3)
self.polygon[i * 6 + j].translate(j * offsetCol.x() + i * offsetRow.x(),
j * offsetCol.y() + i * offsetRow.y())
for i in range(0, self.number_of_hexagons):
self.brushList.append(QtGui.QBrush(QtGui.QColor(255, 255, 255, 255)))
def createHexagon(self, n, r, s):
hexagon = QtGui.QPolygon()
w = 360 / n
for i in range(n):
t = w * i + s
x = r * math.cos(math.radians(t))
y = r * math.sin(math.radians(t))
hexagon.append(QtCore.QPoint(x + r, (self.height() / 2) + y))
return hexagon
def updateColors(self, colorList):
for i in range(0, self.number_of_hexagons):
self.brushList[i] = colorList[i]
#return self.repaint()
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.setPen(self.pen)
painter.setBrush(self.brush)
for i in range(0, self.number_of_hexagons):
painter.setBrush(self.brushList[i])
painter.drawPolygon(self.polygon[i])
|
StarcoderdataPython
|
180280
|
<filename>batch_flattened_seq_shuffler.py
import torch
import hashlib
import dataclasses
import numpy as np
from typing import List, Tuple, final
from .misc import collate_tensors_with_variable_shapes, CollateData, inverse_permutation
from .tensors_data_class_base import TensorsDataClass
from .mixins import TensorDataClassWithSequencesMixin
from .misc import seq_lengths_to_mask
__all__ = ['BatchFlattenedSeqShuffler']
def get_random_seed_per_example(
batch_dependent_seed: bool, example_dependent_seed: bool,
initial_seed_salt: str, collate_data: CollateData) -> List[int]:
if batch_dependent_seed and example_dependent_seed:
return [
int(hashlib.sha256(f'{initial_seed_salt}|{"-".join(collate_data.example_hashes)}|{example_idx}'
.encode('ascii')).hexdigest(), 16) % (2 ** 32)
for example_idx, _ in enumerate(collate_data.example_hashes)]
elif not batch_dependent_seed and example_dependent_seed:
return [
int(hashlib.sha256(f'{initial_seed_salt}|{example_hash}'
.encode('ascii')).hexdigest(), 16) % (2 ** 32)
for example_hash in collate_data.example_hashes]
elif batch_dependent_seed and not example_dependent_seed:
return [
int(hashlib.sha256(f'{initial_seed_salt}|{"-".join(collate_data.example_hashes)}'
.encode('ascii')).hexdigest(), 16) % (2 ** 32)
for _ in collate_data.example_hashes]
else:
return [
int(hashlib.sha256(f'{initial_seed_salt}'
.encode('ascii')).hexdigest(), 16) % (2 ** 32)
for _ in collate_data.example_hashes]
@final
@dataclasses.dataclass
class BatchFlattenedSeqShuffler(TensorDataClassWithSequencesMixin, TensorsDataClass):
permutations: torch.LongTensor = dataclasses.field(default=None, init=False)
inverse_permutations: torch.LongTensor = dataclasses.field(default=None, init=False)
lengths: Tuple[int, ...] = dataclasses.field(default=())
batch_dependent_seed: bool = dataclasses.field(default=True)
example_dependent_seed: bool = dataclasses.field(default=True)
initial_seed_salt: str = dataclasses.field(default='0')
@classmethod
def get_management_fields(cls) -> Tuple[str, ...]:
return super(BatchFlattenedSeqShuffler, cls).get_management_fields() + \
('lengths', 'batch_dependent_seed', 'example_dependent_seed', 'initial_seed_salt')
@classmethod
def _collate_first_pass(
cls, inputs: List['BatchFlattenedSeqShuffler'],
collate_data: CollateData) \
-> 'BatchFlattenedSeqShuffler':
collated = super(BatchFlattenedSeqShuffler, cls)._collate_first_pass(
inputs, collate_data=collate_data)
batch_dependent_seed = inputs[0].batch_dependent_seed
example_dependent_seed = inputs[0].example_dependent_seed
initial_seed_salt = inputs[0].initial_seed_salt
random_seed_per_example = get_random_seed_per_example(
batch_dependent_seed=batch_dependent_seed,
example_dependent_seed=example_dependent_seed,
initial_seed_salt=initial_seed_salt, collate_data=collate_data)
random_state_per_example = [np.random.RandomState(rs) for rs in random_seed_per_example]
permutations = [
torch.LongTensor(random_state_per_example[example_idx].permutation(int(nr_items)))
for example_idx, inp in enumerate(inputs)
for nr_items in inp.lengths]
inverse_permutations = [inverse_permutation(perm) for perm in permutations]
collated.lengths = tuple(length for inp in inputs for length in inp.lengths)
collated.sequences_lengths = torch.LongTensor(collated.lengths)
collated.max_sequence_length = max(collated.lengths)
collated.sequences_mask = seq_lengths_to_mask(
seq_lengths=collated.sequences_lengths, max_seq_len=collated.max_sequence_length)
collated.permutations = collate_tensors_with_variable_shapes(
tensors=tuple(permutations), create_collate_mask=False,
create_collate_lengths=False, last_variable_dim=0)
collated.inverse_permutations = collate_tensors_with_variable_shapes(
tensors=tuple(inverse_permutations), create_collate_mask=False,
create_collate_lengths=False, last_variable_dim=0)
return collated
def shuffle(self, sequence_input: torch.Tensor) -> torch.Tensor:
assert sequence_input.shape[:-1] == self.permutations.shape
extended_perm = self.permutations.unsqueeze(-1).expand(sequence_input.shape)
shuffled_seqs = torch.gather(input=sequence_input, dim=1, index=extended_perm)
shuffled_seqs = shuffled_seqs.masked_fill(
~self.sequences_mask.unsqueeze(-1), 0)
return shuffled_seqs
def unshuffle(self, shuffled_sequence_input: torch.Tensor) -> torch.Tensor:
assert shuffled_sequence_input.shape[:-1] == self.inverse_permutations.shape
extended_inv_perm = self.inverse_permutations.unsqueeze(-1).expand(shuffled_sequence_input.shape)
unshuffled_seqs = torch.gather(input=shuffled_sequence_input, dim=1, index=extended_inv_perm)
unshuffled_seqs = unshuffled_seqs.masked_fill(
~self.sequences_mask.unsqueeze(-1), 0)
return unshuffled_seqs
|
StarcoderdataPython
|
5187304
|
<filename>pysem/plot.py<gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Graphviz wrapper to visualize SEM models."""
from .model import Model
import logging
try:
import graphviz
__GRAPHVIZ = True
except ModuleNotFoundError:
logging.info("No graphviz package found, visualization method is "
"unavailable")
__GRAPHVIZ = False
def semplot(mod, filename: str, inspection=None, plot_covs=False,
plot_exos=True, images=None, engine='dot', latshape='circle',
plot_ests=True, std_ests=False, show=False):
"""
Draw a SEM diagram.
Parameters
----------
mod : Model | str
Model instance.
filename : str
Name of file where to plot is saved.
inspection : pd.DataFrame, optional
Parameter estimates as returned by Model.inspect(). The default is
None.
plot_covs : bool, optional
If True, covariances are also drawn. The default is False.
plot_exos: bool, optional
If False, exogenous variables are not plotted. It might be useful,
for example, in GWAS setting, where a number of exogenous variables,
i.e. genetic markers, is oblivious. Has effect only with ModelMeans or
ModelEffects. The default is True.
images : dict, optional
Node labels can be replaced with images. It will be the case if a map
variable_name->path_to_image is provided. The default is None.
engine : str, optional
Graphviz engine name to use. The default is 'dot'.
latshape : str, optional
Graphviz-compaitable shape for latent variables. The default is
'circle'.
plot_ests : bool, optional
If True, then estimates are also plotted on the graph. The default is
True.
std_ests : bool, optional
If True and plot_ests is True, then standardized values are plotted
instead. The default is False.
show : bool, optional
If True, the
Returns
-------
Graphviz graph.
"""
if not __GRAPHVIZ:
raise ModuleNotFoundError("No graphviz module is installed.")
if type(mod) is str:
mod = Model(mod)
if not hasattr(mod, 'last_result'):
plot_ests = False
if inspection is None:
inspection = mod.inspect(std_est=std_ests)
if images is None:
images = dict()
if std_ests:
inspection['Estimate'] = inspection['Est. Std']
t = filename.split('.')
filename, ext = '.'.join(t[:-1]), t[-1]
g = graphviz.Digraph('G', format=ext, engine=engine)
g.attr(overlap='scale', splines='true')
g.attr('edge', fontsize='12')
g.attr('node', shape=latshape, fillcolor='#cae6df', style='filled')
for lat in mod.vars['latent']:
if lat in images:
g.node(lat, label='', image=images[lat])
else:
g.node(lat, label=lat)
g.attr('node', shape='box', style='')
for obs in mod.vars['observed']:
if obs in images:
g.node(obs, label='', image=images[obs])
else:
g.node(obs, label=obs)
regr = inspection[inspection['op'] == '~']
all_vars = mod.vars['all']
try:
exo_vars = mod.vars['observed_exogenous']
except KeyError:
exo_vars = set()
for _, row in regr.iterrows():
lval, rval, est = row['lval'], row['rval'], row['Estimate']
if (rval not in all_vars) or (~plot_exos and rval in exo_vars) or \
(rval == '1'):
continue
if plot_ests:
pval = row['p-value']
label = '{:.3f}'.format(float(est))
if pval != '-':
label += r'\np-val: {:.2f}'.format(float(pval))
else:
label = str()
g.edge(rval, lval, label=label)
if plot_covs:
covs = inspection[inspection['op'] == '~~']
for _, row in covs.iterrows():
lval, rval, est = row['lval'], row['rval'], row['Estimate']
if lval == rval:
continue
if plot_ests:
pval = row['p-value']
label = '{:.3f}'.format(float(est))
if pval != '-':
label += r'\np-val: {:.2f}'.format(float(pval))
else:
label = str()
g.edge(rval, lval, label=label, dir='both', style='dashed')
g.render(filename, view=show)
return g
|
StarcoderdataPython
|
5140052
|
<reponame>jnthn/intellij-community
"{}".format(1, 2)
|
StarcoderdataPython
|
3506663
|
from .encoders import *
from .decoders import *
|
StarcoderdataPython
|
11333719
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
PyCOMPSs Testbench Tasks
========================
"""
# Imports
import unittest
from modules.testMultiReturnFunctions import testMultiReturnFunctions
from modules.testMultiReturnInstanceMethods import testMultiReturnInstanceMethods
from modules.testMultiReturnIntFunctions import testMultiReturnIntFunctions
from modules.testMultiReturnIntInstanceMethods import testMultiReturnIntInstanceMethods
def main():
suite = unittest.TestLoader().loadTestsFromTestCase(testMultiReturnFunctions)
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(testMultiReturnInstanceMethods))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(testMultiReturnIntFunctions))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(testMultiReturnIntInstanceMethods))
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
1818658
|
<reponame>leighmck/wouter
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018, <NAME>
# All rights reserved.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import enum
from wouter.router import message
@enum.unique
class State(enum.Enum):
CLOSED = 'closed'
ESTABLISHING = 'establishing'
FAILED = 'failed'
ESTABLISHED = 'established'
CHALLENGING = 'challenging'
CLOSING = 'closing'
SHUTTING_DOWN = 'shutting-down'
class Session:
"""
establishment
1. client hello
2. client role and feature announcement
3. router welcome
4. router role and feature announcement
"""
def __init__(self, websocket):
self.state = State.CLOSED
self.realm = ''
self.publications = []
self.subscriptions = []
self.registrations = []
self.requests = []
self.roles = []
def hello(self):
pass
def welcome(self):
pass
def abort(self):
pass
def goodbye(self):
pass
def error(self):
pass
|
StarcoderdataPython
|
11365677
|
import pickle
from typing import Any, Dict, List, Optional, Tuple, Union
from tqdm import tqdm
from .create import Create
class _Messages:
"""custom class for messages
"""
add_msg = "adding videos"
rm_msg = "removing videos"
class Playlists:
"""Playlists class interacts with youtube playlists
"""
def __init__(self, playlist_id: str, youtube: Any, snippet: Optional[bool] = False, progress_bars: Optional[bool] = False) -> None:
"""Playlists class interacts with youtube playlists
cost = 1 for playlist information + 1 per page for 50 max results
min cost = 2, max cost = 101
Args:
playlist_id (str): playlist id
youtube (Any): a resource object with methods for interacting with the service.
snippet (Optional[bool], optional): request playlist items with snippet part for more info. Defaults to False.
progress_bars (Optional[bool], optional): display task status with progress bars. Defaults to False.
Note:
most of the methods require OAuth client access
Examples:
>>> pl = Playlists("PLQeIlACGt47P3nQEVGWmaU3669iw6q7mQ", youtube)
>>> pl.responses
>>> pl.video_ids
"""
self._youtube, self._snippet, self._progress_bars = youtube, snippet, progress_bars
self.playlist_id = playlist_id
self.link = f"https://www.youtube.com/playlist?list={playlist_id}"
self.cost = 0
self.title, self.desc, self.status = self._playlist_info()
self.responses = self._pl_responses(snippet=self._snippet)
self._playlist_items = self._playlist_item_ids()
self.video_ids = list(self._playlist_items.keys())
@classmethod
def load(cls, filepath: str, youtube: Any, playlist_id: Optional[Union[str, None]] = None,
remove: Optional[bool] = False, **kwargs):
"""Construct Playlist class from saved pickled file
This constructor creates a new playlist if playlist_id is not provided.
If you use playlist_id and want a complete sync with pickled file, then set remove=True.
Args:
filepath (str): pickled file path
youtube (Any): a resource object with methods for interacting with the service.
playlist_id (Optional[Union[str, None]], optional): playlist id. Defaults to None.
remove (Optional[bool], optional): remove uncessary videos from playlist. Defaults to False.
Returns:
Playlist: instance of Playlists class
"""
progress_bars = bool("progress_bars" in kwargs.keys() and kwargs["progress_bars"])
# loading pickled instance of ResurrectPlaylist class
loaded_pl = ResurrectPlaylist.load(filepath)
if playlist_id is None:
# create a new playlist
create_item = Create(youtube)
new_pl_id = create_item.playlist(loaded_pl.title, loaded_pl.desc, loaded_pl.status)
# load newly created playlist
new_pl = cls(new_pl_id, youtube, **kwargs)
new_pl.cost += create_item.cost
else:
new_pl = cls(playlist_id, youtube, **kwargs) # load the given playlist
new_pl.update(loaded_pl.title, loaded_pl.desc, loaded_pl.status)
# adding videos
video_ids = loaded_pl.video_ids
if progress_bars:
video_ids = tqdm(video_ids, desc=_Messages.add_msg)
for video_id in video_ids:
new_pl.add_video(video_id)
# removing videos
if playlist_id is not None and remove:
video_ids = new_pl.video_ids
if progress_bars:
video_ids = tqdm(video_ids, desc=_Messages.add_msg)
for video_id in video_ids:
if video_id not in loaded_pl.video_ids:
new_pl.remove_video(video_id)
new_pl.refresh()
return new_pl
def __len__(self) -> int:
return self.responses[0]["pageInfo"]["totalResults"]
def _playlist_info(self) -> Tuple[str, str, str]:
request = self._youtube.playlists().list(part="id,snippet,status", id=self.playlist_id)
response = request.execute()
self.cost += 1
title = response["items"][0]["snippet"]["title"]
desc = response["items"][0]["snippet"]["description"]
status = response["items"][0]["status"]["privacyStatus"]
return title, desc, status
def _pl_responses(self, playlist_id: Optional[Union[str, None]] = None, snippet: Optional[bool] = False):
if playlist_id is None:
playlist_id = self.playlist_id
part = "id,snippet,contentDetails" if snippet else "id,contentDetails"
responses = []
playlist_api_queries = {"part": part, "playlistId": playlist_id, "maxResults": 50}
request = self._youtube.playlistItems().list(**playlist_api_queries)
response = request.execute()
self.cost += 1
responses.append(response)
next_page_token = response.get("nextPageToken")
while next_page_token:
request = self._youtube.playlistItems().list(**playlist_api_queries, pageToken=next_page_token)
response = request.execute()
self.cost += 1
responses.append(response)
next_page_token = response.get("nextPageToken")
return responses
def _playlist_item_ids(self) -> Dict[str, List[str]]:
video_ids_dict = {}
for response in self.responses:
for item in response["items"]:
video_id = item["contentDetails"]["videoId"]
if video_id in video_ids_dict:
video_ids_dict[video_id] = video_ids_dict[video_id].append(item["id"])
else:
video_ids_dict[video_id] = [item["id"]]
return video_ids_dict
def refresh(self) -> None:
"""resfresh playlist responses
cost = 1 per page for 50 max results
"""
self.responses = self._pl_responses(snippet=self._snippet)
self._playlist_items = self._playlist_item_ids()
self.video_ids = list(self._playlist_items.keys())
def update(self, title: str, desc: Optional[Union[str, None]] = None, status: Optional[Union[str, None]] = None) -> dict:
"""update playlist title, description and privacy status
cost = 50
Args:
title (str): title for playlist
desc (Optional[str], optional): description for playlist. Defaults to "".
status (Optional[str], optional): privacy status for playlist. Defaults to "private".
Returns:
dict: response
"""
request_body = {
"id": self.playlist_id,
"kind": "youtube#playlist",
"snippet": {
"title": title,
}
}
if desc is not None:
request_body["snippet"]["description"] = desc
if status is not None:
request_body["status"] = {
"privacyStatus": status
}
request = self._youtube.playlists().update(part="id,snippet,status", body=request_body)
response = request.execute()
self.cost += 50
title = response["snippet"]["title"]
desc = response["snippet"]["description"]
status = response["status"]["privacyStatus"]
self.title, self.desc, self.status = title, desc, status
return response
def delete(self) -> None:
"""delete the intialized playlist from youtube forever
cost = 50
"""
request = self._youtube.playlists().delete(id=self.playlist_id)
request.execute()
self.cost += 50
def add_video(self, video_id: str) -> Union[dict, None]:
"""add videos to intialized playlist by using video id only if not present
cost = 50
Args:
video_id (str): video id
Returns:
Union[dict, None]: returns response if video id is added to playlist else None
"""
if video_id in self.video_ids:
return None
request_body = {
"snippet": {
"playlistId": self.playlist_id,
"resourceId": {
"kind": "youtube#video",
"videoId": video_id
}
}
}
request = self._youtube.playlistItems().insert(part="snippet", body=request_body)
response = request.execute()
self.cost += 50
self.video_ids.append(video_id)
return response
def copy_from(self, playlist_id: str) -> None:
"""copy videos from a given playlist to intialized playlist
Args:
playlist_id (str): playlist id
"""
copy_videos_ids = []
for item in self._pl_responses(playlist_id)["items"]:
video_id = item["contentDetails"]["videoId"]
if video_id not in copy_videos_ids and video_id not in self.video_ids:
copy_videos_ids.append(video_id)
if self._progress_bars:
copy_videos_ids = tqdm(copy_videos_ids, desc=_Messages.add_msg)
for video_id in copy_videos_ids:
self.add_video(video_id)
def remove_video(self, video_id: str, recursive: Optional[bool] = True) -> Union[dict, None]:
"""remove video from intialized playlist by using video id only if it's present
cost = 50 per removal of video
Args:
video_id (str): video id to remove
recursive (Optional[bool], optional): remove all videos with same video id. Defaults to True.
Returns:
Union[dict, None]: returns last response if removed else None
"""
if video_id not in self.video_ids:
return None
for playlist_item_id in self._playlist_items[video_id]:
request = self._youtube.playlistItems().delete(id=playlist_item_id)
response = request.execute()
self.cost += 50
self.video_ids.remove(video_id)
if not recursive:
break
return response
def clear(self, skip_ids: Optional[List[str]] = []) -> None:
"""clear/remove all videos from intialized playlist
Args:
skip_ids (Optional[List[str]], optional): list video ids to skip. Defaults to [].
"""
remove_video_ids = [video_id for video_id in self.video_ids if video_id not in skip_ids]
if self._progress_bars:
remove_video_ids = tqdm(remove_video_ids, desc=_Messages.rm_msg)
for video_id in remove_video_ids:
self.remove_video(video_id)
def remove_duplicate(self) -> None:
"""remove duplicate videos from intialized playlist
"""
remove_video_ids = [
video_id
for video_id, playlist_item_id in self._playlist_items.items()
if len(playlist_item_id) > 1
]
if self._progress_bars:
remove_video_ids = tqdm(remove_video_ids, desc=_Messages.rm_msg)
for video_id in remove_video_ids:
self.remove_video(video_id)
def save(self, filepath: str):
"""save the intialized playlist to a pickle file
Args:
filepath (str): pickle file path
Examples:
>>> pl.save("my_music_playlist.pkl")
>>> from youtube_v3_api import ResurrectPlaylist
>>> pl_data = ResurrectPlaylist.load("my_music_playlist.pkl")
>>> pl_data.video_ids
['h329290', 'hj2832']
"""
pl = ResurrectPlaylist(self.title, self.desc, self.status, self.video_ids)
pl.save(filepath)
class ResurrectPlaylist:
"""ResurrectPlaylist class saves and loads its instance in and from a pickled file
"""
def __init__(self, title: str, desc: str, status: str, video_ids: List[str]) -> None:
"""ResurrectPlaylist class saves and loads its instance in a pickled file
"""
self.title, self.desc, self.status = title, desc, status
self.video_ids = video_ids
@classmethod
def load(cls, filepath: str):
"""Construct ResurrectPlaylist class from a pickled file
Args:
filepath (str): pickled file path
Returns:
ResurrectPlaylist: instance of ResurrectPlaylist
"""
with open(filepath, "rb") as f:
pl: ResurrectPlaylist = pickle.load(f)
return cls(pl.title, pl.desc, pl.status, pl.video_ids)
def save(self, filepath: str):
"""save instance of class in a pickle file
Args:
filepath (str): pickle file path
"""
with open(filepath, "wb") as f:
pickle.dump(self, f)
|
StarcoderdataPython
|
6607090
|
import numpy as np
from .parameters import get_arguments
from .utils.read_datasets import read_dataset
from .models.encoder import encoder_base_class
import matplotlib.pyplot as plt
def main():
args = get_arguments()
train_loader, test_loader = read_dataset(args)
encoder = encoder_base_class(args)
encoder.train()
for batch_idx, (data, targets) in enumerate(train_loader):
inner_products = encoder(data)
plt.figure(figsize=(10, 5))
plt.hist(
inner_products[
np.random.choice(args.neural_net.train_batch_size),
:,
np.random.choice(
int(
(args.dataset.img_shape[0] - args.defense.patch_size)
/ args.defense.stride
+ 1
)
),
np.random.choice(
int(
(args.dataset.img_shape[0] - args.defense.patch_size)
/ args.defense.stride
+ 1
)
),
],
50,
)
plt.savefig(f"hist_{args.dictionary.type}_{batch_idx}.pdf")
plt.close()
if batch_idx == 9:
break
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
32170
|
import os
from pathlib import Path
from flask import Flask, current_app, jsonify, request
from flask_cors import CORS
from mongoengine import connect, MongoEngineConnectionError
import namesgenerator
from model import Doc
from app_logic import random_df, call_r
def create_app(config=None):
app = Flask(__name__)
CORS(app)
app.config.from_object(config)
mongo_host = os.environ.get('MONGO_HOST', default='mongodb://127.0.0.1:27017')
try:
connect(db='pyr',
host=mongo_host)
except MongoEngineConnectionError as exc:
raise exc
@app.route('/api/python')
def test():
"""Random pandas df"""
df = random_df()
return jsonify({'py': df.to_json()}), 200
@app.route('/api/r')
def from_r():
"""Dataframe from an R tibble using rpy2"""
df = call_r(Path(current_app.config['R_LOCATION'], 'rapp.r'))
return jsonify({'r': df.to_json()}), 200
"""MONGO IO API SIMULATION"""
@app.route('/api/add', methods=['POST'])
def add_doc():
try:
d = Doc(title=namesgenerator.get_random_name())
d.save()
return d.to_json(), 201
except Exception as ex:
raise ex
@app.route('/api/remove', methods=['DELETE'])
def remove_doc():
id = request.args.get('id')
try:
d = Doc.objects.get(id=id)
if d:
d.delete()
return jsonify({'ok': 1}), 200
except Exception as ex:
raise ex
return app
|
StarcoderdataPython
|
1757192
|
"""Tools for displaying tool-tips.
This includes:
* an abstract base-class for different kinds of tooltips
* a simple text-only Tooltip class
"""
from tkinter import *
class TooltipBase(object):
"""abstract base class for tooltips"""
def __init__(self, anchor_widget):
"""Create a tooltip.
anchor_widget: the widget next to which the tooltip will be shown
Note that a widget will only be shown when showtip() is called.
"""
self.anchor_widget = anchor_widget
self.tipwindow = None
def __del__(self):
self.hidetip()
def showtip(self):
"""display the tooltip"""
if self.tipwindow:
return
self.tipwindow = tw = Toplevel(self.anchor_widget)
# show no border on the top level window
tw.wm_overrideredirect(1)
try:
# This command is only needed and available on Tk >= 8.4.0 for OSX.
# Without it, call tips intrude on the typing process by grabbing
# the focus.
tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w,
"help", "noActivates")
except TclError:
pass
self.position_window()
self.showcontents()
self.tipwindow.update_idletasks() # Needed on MacOS -- see #34275.
self.tipwindow.lift() # work around bug in Tk 8.5.18+ (issue #24570)
def position_window(self):
"""(re)-set the tooltip's screen position"""
x, y = self.get_position()
root_x = self.anchor_widget.winfo_rootx() + x
root_y = self.anchor_widget.winfo_rooty() + y
self.tipwindow.wm_geometry("+%d+%d" % (root_x, root_y))
def get_position(self):
"""choose a screen position for the tooltip"""
# The tip window must be completely outside the anchor widget;
# otherwise when the mouse enters the tip window we get
# a leave event and it disappears, and then we get an enter
# event and it reappears, and so on forever :-(
#
# Note: This is a simplistic implementation; sub-classes will likely
# want to override this.
return 20, self.anchor_widget.winfo_height() + 1
def showcontents(self):
"""content display hook for sub-classes"""
# See ToolTip for an example
raise NotImplementedError
def hidetip(self):
"""hide the tooltip"""
# Note: This is called by __del__, so careful when overriding/extending
tw = self.tipwindow
self.tipwindow = None
if tw:
try:
tw.destroy()
except TclError: # pragma: no cover
pass
class OnHoverTooltipBase(TooltipBase):
"""abstract base class for tooltips, with delayed on-hover display"""
def __init__(self, anchor_widget, hover_delay=1000):
"""Create a tooltip with a mouse hover delay.
anchor_widget: the widget next to which the tooltip will be shown
hover_delay: time to delay before showing the tooltip, in milliseconds
Note that a widget will only be shown when showtip() is called,
e.g. after hovering over the anchor widget with the mouse for enough
time.
"""
super(OnHoverTooltipBase, self).__init__(anchor_widget)
self.hover_delay = hover_delay
self._after_id = None
self._id1 = self.anchor_widget.bind("<Enter>", self._show_event)
self._id2 = self.anchor_widget.bind("<Leave>", self._hide_event)
self._id3 = self.anchor_widget.bind("<Button>", self._hide_event)
def __del__(self):
try:
self.anchor_widget.unbind("<Enter>", self._id1)
self.anchor_widget.unbind("<Leave>", self._id2) # pragma: no cover
self.anchor_widget.unbind("<Button>", self._id3) # pragma: no cover
except TclError:
pass
super(OnHoverTooltipBase, self).__del__()
def _show_event(self, event=None):
"""event handler to display the tooltip"""
if self.hover_delay:
self.schedule()
else:
self.showtip()
def _hide_event(self, event=None):
"""event handler to hide the tooltip"""
self.hidetip()
def schedule(self):
"""schedule the future display of the tooltip"""
self.unschedule()
self._after_id = self.anchor_widget.after(self.hover_delay,
self.showtip)
def unschedule(self):
"""cancel the future display of the tooltip"""
after_id = self._after_id
self._after_id = None
if after_id:
self.anchor_widget.after_cancel(after_id)
def hidetip(self):
"""hide the tooltip"""
try:
self.unschedule()
except TclError: # pragma: no cover
pass
super(OnHoverTooltipBase, self).hidetip()
class Hovertip(OnHoverTooltipBase):
"A tooltip that pops up when a mouse hovers over an anchor widget."
def __init__(self, anchor_widget, text, hover_delay=1000):
"""Create a text tooltip with a mouse hover delay.
anchor_widget: the widget next to which the tooltip will be shown
hover_delay: time to delay before showing the tooltip, in milliseconds
Note that a widget will only be shown when showtip() is called,
e.g. after hovering over the anchor widget with the mouse for enough
time.
"""
super(Hovertip, self).__init__(anchor_widget, hover_delay=hover_delay)
self.text = text
def showcontents(self):
label = Label(self.tipwindow, text=self.text, justify=LEFT,
background="#ffffe0", relief=SOLID, borderwidth=1)
label.pack()
def _tooltip(parent): # htest #
top = Toplevel(parent)
top.title("Test tooltip")
x, y = map(int, parent.geometry().split('+')[1:])
top.geometry("+%d+%d" % (x, y + 150))
label = Label(top, text="Place your mouse over buttons")
label.pack()
button1 = Button(top, text="Button 1 -- 1/2 second hover delay")
button1.pack()
Hovertip(button1, "This is tooltip text for button1.", hover_delay=500)
button2 = Button(top, text="Button 2 -- no hover delay")
button2.pack()
Hovertip(button2, "This is tooltip\ntext for button2.", hover_delay=None)
if __name__ == '__main__':
from unittest import main
main('idlelib.idle_test.test_tooltip', verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(_tooltip)
|
StarcoderdataPython
|
11310853
|
<reponame>ZedThree/FreeQDSK
"""
SPDX-FileCopyrightText: © 2020 <NAME>, University of York. Email: <EMAIL>
SPDX-License-Identifier: MIT
"""
import numpy
from io import StringIO
from freeqdsk import aeqdsk
def test_writeread():
"""
Test that data can be written then read back
"""
data = {
"shot": 66832,
"time": 2384.0,
"jflag": 1,
"lflag": 0,
"limloc": "SNB",
"mco2v": 3,
"mco2r": 1,
"qmflag": "CLC",
"tsaisq": 11.7513361,
"rcencm": 169.550003,
"bcentr": -2.06767821,
"pasmat": 1213135.38,
"cpasma": 1207042.13,
"rout": 168.491165,
"zout": -6.82398081,
"aout": 63.0725098,
"eout": 1.73637426,
"doutu": 0.160389453,
"doutl": 0.329588085,
"vout": 19912044.0,
"rcurrt": 170.800049,
"zcurrt": 7.52815676,
"qsta": 6.26168156,
"betat": 0.60095495,
"betap": 0.326897353,
"ali": 1.47733176,
"oleft": 3.73984718,
"oright": 4.84749842,
"otop": 32.4465942,
"obott": 20.2485809,
"qpsib": 4.39304399,
"vertn": -0.675418258,
"rco2v": [216.307495, 155.99646, 121.109322],
"dco2v": [27324300900000.0, 29309569900000.0, 29793563200000.0],
"rco2r": [125.545105],
"dco2r": [32812950400000.0],
"shearb": 4.02759838,
"bpolav": 0.282110274,
"s1": 2.155056,
"s2": 1.09512568,
"s3": 0.640428185,
"qout": 7.93196821,
"olefs": -50.0,
"orighs": -50.0,
"otops": -50.0,
"sibdry": -0.016445132,
"areao": 19292.2441,
"wplasm": 309183.625,
"terror": 0.000789557525,
"elongm": 1.18666041,
"qqmagx": 0.620565712,
"cdflux": -0.0285836495,
"alpha": 1.32450712,
"rttt": 155.478485,
"psiref": 0.0,
"xndnt": 0.0,
"rseps1": 147.703217,
"zseps1": -116.341461,
"rseps2": -999.0,
"zseps2": -999.0,
"sepexp": 5.94302845,
"obots": -50.0,
"btaxp": -2.18051338,
"btaxv": -2.03286076,
"aaq1": 30.0145092,
"aaq2": 46.8485107,
"aaq3": 54.2332726,
"seplim": 3.73984718,
"rmagx": 172.453949,
"zmagx": 9.105937,
"simagx": 0.380751818,
"taumhd": 64.3431244,
"betapd": 0.473303556,
"betatd": 0.870102286,
"wplasmd": 447656.406,
"diamag": -2.33697938e-05,
"vloopt": 0.110378414,
"taudia": 93.1602173,
"qmerci": 0.0,
"tavem": 10.0,
}
output = StringIO()
# Write to string
aeqdsk.write(data, output)
# Move to the beginning of the buffer
output.seek(0)
# Read from string
data2 = aeqdsk.read(output)
# Check that data and data2 are the same
for key in data:
if isinstance(data[key], str):
assert data2[key] == data[key]
else:
# Number, or list of numbers
numpy.testing.assert_allclose(data2[key], data[key])
|
StarcoderdataPython
|
393548
|
<gh_stars>0
import os
from cloudmesh.common.util import path_expand
class EncryptFile(object):
def __init__(self, file_in, file_out, debug=False):
self.data = {
'file': file_in,
'secret': file_out,
'pem': path_expand('~/.ssh/id_rsa.pub.pem'),
'key': path_expand(' ~/.ssh/id_rsa')
}
self.debug = debug
def _execute(self, command):
if self.debug:
print(command)
os.system(command)
def pem_create(self):
command = path_expand("openssl rsa -in {key} -pubout > {pem}".format(**self.data))
self._execute(command)
#
# TODO: BUG
#
def pem_cat(self):
command = path_expand("cat {pem}".format(**self.data))
self._execute(command)
def encrypt(self):
# encrypt the file into secret.txt
print(self.data)
command = path_expand(
"openssl rsautl -encrypt -pubin -inkey {pem} -in {file} -out {secret}".format(**self.data))
self._execute(command)
def decrypt(self, filename=None):
if filename is not None:
self.data['secret'] = filename
command = path_expand("openssl rsautl -decrypt -inkey {key} -in {secret}".format(**self.data))
self._execute(command)
if __name__ == "__main__":
for filename in ['file.txt', 'secret.txt']:
try:
os.remove(filename)
except Exception as e:
pass
# Creating a file with data
with open("file.txt", "w") as f:
f.write("Big Data is here.")
e = EncryptFile('file.txt', 'secret.txt')
e.encrypt()
e.decrypt()
|
StarcoderdataPython
|
5017621
|
# -*- coding:utf-8 -*-
__author__ = 'SheldonChen'
class Settings(object):
# 安全检验码,以数字和字母组成的32位字符
ALIPAY_KEY = '<KEY>'
ALIPAY_INPUT_CHARSET = 'utf-8'
# 合作身份者ID,以2088开头的16位纯数字
ALIPAY_PARTNER = '2088311784493747'
# 签约支付宝账号或卖家支付宝帐户
ALIPAY_SELLER_EMAIL = '<EMAIL>'
ALIPAY_SIGN_TYPE = 'MD5'
# 付完款后跳转的页面(同步通知) 要用 http://格式的完整路径,不允许加?id=123这类自定义参数
#ALIPAY_RETURN_URL = 'http://www.cniao5.com/pay/return'
ALIPAY_RETURN_URL = 'http://www.cniao5.com/pay/return/alipay'
# 交易过程中服务器异步通知的页面 要用 http://格式的完整路径,不允许加?id=123这类自定义参数
#ALIPAY_NOTIFY_URL = 'http://www.cniao5.com/pay/notify'
ALIPAY_NOTIFY_URL = 'http://www.cniao5.com/pay/notify/alipay'
# 付完款后跳转的页面(同步通知) 要用 http://格式的完整路径,不允许加?id=123这类自定义参数
#ALIPAY_CLAZZ_RETURN_URL = 'http://www.cniao5.com/pay/clazz/return'
#ALIPAY_CLAZZ_RETURN_URL = 'http://www.cniao5.com/pay/clazz/return'
# 交易过程中服务器异步通知的页面 要用 http://格式的完整路径,不允许加?id=123这类自定义参数
#ALIPAY_CLAZZ_NOTIFY_URL = 'http://www.cniao5.com/pay/clazz/notify'
#ALIPAY_CLAZZ_NOTIFY_URL = 'http://www.cniao5.com/pay/clazz/notify'
ALIPAY_SHOW_URL = ''
# 访问模式,根据自己的服务器是否支持ssl访问,若支持请选择https;若不支持请选择http
ALIPAY_TRANSPORT = 'http'
|
StarcoderdataPython
|
6670775
|
"""Unit test for s3 bucket."""
import unittest
from unittest.mock import MagicMock
import boto3
from e2fyi.utils.aws.s3 import S3Bucket
class S3BucketTest(unittest.TestCase):
"""TestCase for S3Bucket"""
def setUp(self):
s3client = boto3.client("s3")
s3client.upload_fileobj = MagicMock() # type: ignore
self.s3client = s3client
def test_basic(self):
bucket = S3Bucket(
"bucketname", get_prefix=lambda filename: "prefix/%s" % filename
)
self.assertEqual(bucket.name, "bucketname")
self.assertEqual(bucket.prefix, "prefix/")
def test_create_resource_key(self):
bucket = S3Bucket(name="bucket", get_prefix=lambda x: "folder/%s" % x)
key = bucket.create_resource_key("filename.ext")
self.assertEqual(key, "folder/filename.ext")
def test_create_resource_uri(self):
bucket = S3Bucket(name="bucket", get_prefix=lambda x: "folder/%s" % x)
key = bucket.create_resource_uri("filename.ext")
self.assertEqual(key, "s3a://bucket/folder/filename.ext")
|
StarcoderdataPython
|
6499076
|
<filename>cp_light_control/cp_task_2.py
import time
import board
from analogio import AnalogOut
led = AnalogOut(board.A0)
while True:
#counts up from 0 to 65535, with 64 increments, ends up corresponding to the DAC's 10-bit range
for i in range(0, 65535, 64):
led.value = i
time.sleep(0.1)
|
StarcoderdataPython
|
3597427
|
<reponame>trevortomlin/Elliptic-Curve-Python
import math
import copy
import matplotlib.pyplot as plt
#https://stackoverflow.com/questions/31074172/elliptic-curve-point-addition-over-a-finite-field-in-python
def inv_mod_p(x, p):
"""
Compute an inverse for x modulo p, assuming that x
is not divisible by p.
"""
if x % p == 0:
raise ZeroDivisionError("Impossible inverse")
return pow(x, p-2, p)
class Point:
"""
Description:
Describes a point on a finite curve.
Functions:
from_xy()
from_hex()
point_to_hex() -> str
point_to_hex_compressed() -> str
hex_to_coords() -> (x, y)
hex_to_coords_compressed() -> (x, y)
double() -> Point
Variables:
curve
x
y
"""
def __init__(self, curve):
self.curve = curve
def from_xy(self, x, y):
"""
Creates a point from x and y values.
"""
self.x, self.y = x, y
def from_hex(self, hex_pt):
"""
Creates point from uncompressed or compressed hex value.
"""
print(len(hex_pt))
print((self.curve.keysize // 4) + 4)
# Uncompressed Point
if len(hex_pt) == (self.curve.keysize // 2) + 4:
self.x, self.y = self.hex_to_coords(hex_pt)
# Compressed Point
elif len(hex_pt) == (self.curve.keysize // 4) + 4:
self.x, self.y = self.hex_to_coords_compressed(hex_pt)
else:
raise Exception("Not valid hex point.")
def point_to_hex(self):
"""
Returns the representation of self as an uncompressed point.
"""
return "0x04" + hex(self.x)[2:].zfill(self.curve.keysize // 4) + hex(self.y)[2:].zfill(self.curve.keysize // 4)
def point_to_hex_compressed(self):
"""
Returns the representation of self as a compressed point.
"""
lsb = self.y & 1
if lsb == 1:
return "0x02" + hex(self.x)[2:].zfill(self.curve.keysize // 4)
elif lsb == 0:
return "0x03" + hex(self.x)[2:].zfill(self.curve.keysize // 4)
def hex_to_coords(self, hex_pt):
"""
Converts uncompressed hex value to xy points.
"""
x = int(hex_pt[4:68], 16)
y = int(hex_pt[68:], 16)
return x,y
def hex_to_coords_compressed(self, hex_pt):
"""
Converts compressed hex value to xy points.
"""
byte = hex_pt[:4]
lsb = 0
if byte == "0x02":
lsb = 1
elif byte == "0x03":
lsb = 0
x = int(hex_pt[4:], 16)
y = self.curve.evaluate(int(hex_pt[4:], 16), lsb)
return x,y
def __eq__(self, n):
if isinstance(n, Point):
return self.x == n.x and self.y == n.y
def __truediv__(self, n):
raise Exception("Points cannot be divided.")
def __floordiv__(self, n):
raise Exception("Points cannot be floor divided.")
def __mul__(self, n: int):
if type(n) is Point:
raise Exception("Points cannot be multiplied.")
bits = bin(n)[2:][::-1]
Q = 0
N = copy.copy(self)
for bit in bits:
if bit == "1":
Q = N + Q
N = N.double()
return Q
def __rmul__(self, n):
if type(n) is Point:
raise Exception("Points cannot be multiplied.")
return self * n
def double(self):
"""
Returns point that is double self.
"""
l = self.curve.tangent(self)
x = (l**2 - 2 * self.x) % self.curve.p
y = (l * (self.x - x) - self.y) % self.curve.p
r = Point(self.curve)
r.from_xy(x, y)
return r
def __neg__(self):
n = Point(self.curve)
n.from_xy(self.x, -self.y % self.curve.p)
return n
def __add__(self, b):
#P+O=P
if b == 0:
return self
#P+-P=O
if self.x == b.x:
if self.y == -b.y:
return 0
#P+P=2P
elif self.y == b.y:
return self.double()
s = self.curve.secant(self, b)
x = (s**2 - self.x - b.x) % self.curve.p
y = (s * (self.x - x) - self.y) % self.curve.p
r = Point(self.curve)
r.from_xy(x, y)
return r
def __sub__(self, b):
return self + -b
def __str__(self):
return "(" + str(self.x) + ", " + str(self.y) + ")"
class Curve:
"""
Description:
Describes a Curve over Finite Field P with the coefficients stored as [b, x, 0, x^3]
Functions:
valid() -> bool
calcValidPoint() -> list of points
graphPoints() -> float
evaluate(x, sign) -> float
tangent(a) -> float
secant(a, b) -> float
Variables:
coefficients
p
discriminant
keysize
numPoints
"""
def __init__(self, coefficients, p, keysize=256):
self.coefficients = coefficients
self.p = p
self.keysize = keysize
# Can be used later to check for validity of curve.
self.discriminant = 4 * self.coefficients[1]**3 + 27 * self.coefficients[0]**2
def valid(self, a: Point):
"""
Determines whether a given point is valid on this curve.
"""
try:
return a.y**2 % self.p == (pow(a.x, 3, self.p) + self.coefficients[1] * a.x + self.coefficients[0]) % self.p
except TypeError:
return False
def calcValidPoints(self):
"""
Calculates the number of points on the curve and stores each point in a list.
"""
# Use baby step method here to improve speed
validPoints = []
for x in range(self.p):
for y in range(self.p):
if (y ** 2) % self.p == ((x ** 3) + self.coefficients[1] * x + self.coefficients[0]) % self.p:
validPoints.append((x,y))
self.numPoints = len(validPoints)
return validPoints
def graphPoints(self):
"""
Graphs the points on the curve using Matplotlib.
"""
points = self.calcValidPoints()
print(points)
xs = [i[0] for i in points]
ys = [i[1] for i in points]
fig, ax = plt.subplots()
ax.scatter(xs, ys)
plt.axline((0, self.p/2), (self.p, self.p/2))
plt.show()
def evaluate(self, x, sign: int) -> float:
"""
Gets the y value from a given x value from the curve.
"""
y2 = 0
for index in range(len(self.coefficients)):
y2 += self.coefficients[index] * x ** index
#https://crypto.stackexchange.com/questions/20667/modulo-square-roots
y = pow(y2, ((self.p+1)//4), self.p)
if sign == 0:
y = self.p - y
return y
def tangent(self, a: Point) -> float:
"""
Returns the tangent of a point.
"""
t = (3 * a.x**2 + self.coefficients[1]) * inv_mod_p((2 * a.y), self.p)
return t
def secant(self, a: Point, b: Point):
"""
Returns the secant of a point.
"""
try:
s = (b.y - a.y) * inv_mod_p((b.x - a.x), self.p)
return s
except ZeroDivisionError as e:
print("Points cannot be the same.", e)
return 0
def __str__(self):
return "Elliptic Curve defined by y^2 = " + str(self.coefficients[3]) + "x^3 + " + str(self.coefficients[1]) + "x + " + str(self.coefficients[0]) + " in 𝔽" + str(self.p)
|
StarcoderdataPython
|
135841
|
<gh_stars>1-10
"""shufflenetv2 in pytorch
[1] <NAME>, <NAME>, <NAME>, <NAME>
ShuffleNet V2: Practical Guidelines for Efficient CNN Architecture Design
https://arxiv.org/abs/1807.11164
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
def channel_split(x, split):
"""split a tensor into two pieces along channel dimension
Args:
x: input tensor
split:(int) channel size for each pieces
"""
assert x.size(1) == split * 2
return torch.split(x, split, dim=1)
def channel_shuffle(x, groups):
"""channel shuffle operation
Args:
x: input tensor
groups: input branch number
"""
batch_size, channels, height, width = x.size()
channels_per_group = int(channels / groups)
x = x.view(batch_size, groups, channels_per_group, height, width)
x = x.transpose(1, 2).contiguous()
x = x.view(batch_size, -1, height, width)
return x
class ShuffleUnit(nn.Module):
def __init__(self, in_channels, out_channels, stride):
super().__init__()
self.stride = stride
self.in_channels = in_channels
self.out_channels = out_channels
if stride != 1 or in_channels != out_channels:
self.residual = nn.Sequential(
nn.Conv2d(in_channels, in_channels, 1),
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels, in_channels, 3, stride=stride, padding=1, groups=in_channels),
nn.BatchNorm2d(in_channels),
nn.Conv2d(in_channels, int(out_channels / 2), 1),
nn.BatchNorm2d(int(out_channels / 2)),
nn.ReLU(inplace=True)
)
self.shortcut = nn.Sequential(
nn.Conv2d(in_channels, in_channels, 3, stride=stride, padding=1, groups=in_channels),
nn.BatchNorm2d(in_channels),
nn.Conv2d(in_channels, int(out_channels / 2), 1),
nn.BatchNorm2d(int(out_channels / 2)),
nn.ReLU(inplace=True)
)
else:
self.shortcut = nn.Sequential()
in_channels = int(in_channels / 2)
self.residual = nn.Sequential(
nn.Conv2d(in_channels, in_channels, 1),
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels, in_channels, 3, stride=stride, padding=1, groups=in_channels),
nn.BatchNorm2d(in_channels),
nn.Conv2d(in_channels, in_channels, 1),
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True)
)
def forward(self, x):
if self.stride == 1 and self.out_channels == self.in_channels:
shortcut, residual = channel_split(x, int(self.in_channels / 2))
else:
shortcut = x
residual = x
shortcut = self.shortcut(shortcut)
residual = self.residual(residual)
x = torch.cat([shortcut, residual], dim=1)
x = channel_shuffle(x, 2)
return x
class ShuffleNetV2(nn.Module):
def __init__(self, ratio=1, class_num=100):
super().__init__()
if ratio == 0.5:
out_channels = [48, 96, 192, 1024]
elif ratio == 1:
out_channels = [116, 232, 464, 1024]
elif ratio == 1.5:
out_channels = [176, 352, 704, 1024]
elif ratio == 2:
out_channels = [244, 488, 976, 2048]
else:
ValueError('unsupported ratio number')
self.pre = nn.Sequential(
nn.Conv2d(3, 24, 3, padding=1),
nn.BatchNorm2d(24)
)
self.stage2 = self._make_stage(24, out_channels[0], 3)
self.stage3 = self._make_stage(out_channels[0], out_channels[1], 7)
self.stage4 = self._make_stage(out_channels[1], out_channels[2], 3)
self.conv5 = nn.Sequential(
nn.Conv2d(out_channels[2], out_channels[3], 1),
nn.BatchNorm2d(out_channels[3]),
nn.ReLU(inplace=True)
)
self.fc = nn.Linear(out_channels[3], class_num)
def forward(self, x):
x = self.pre(x)
x = self.stage2(x)
x = self.stage3(x)
x = self.stage4(x)
x = self.conv5(x)
x = F.adaptive_avg_pool2d(x, 1)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def _make_stage(self, in_channels, out_channels, repeat):
layers = []
layers.append(ShuffleUnit(in_channels, out_channels, 2))
while repeat:
layers.append(ShuffleUnit(out_channels, out_channels, 1))
repeat -= 1
return nn.Sequential(*layers)
def shufflenetv2():
return ShuffleNetV2()
|
StarcoderdataPython
|
5080067
|
a = 100
b = 100
c = 100
surfaceArea = 2*(a + b + c)
print(surfaceArea)
volume = a * b * c
print(volume)
|
StarcoderdataPython
|
3292443
|
"""
pygame-menu
https://github.com/ppizarror/pygame-menu
EXAMPLE - GAME SELECTOR
Game with 3 difficulty options.
License:
-------------------------------------------------------------------------------
The MIT License (MIT)
Copyright 2017-2021 <NAME>. @ppizarror
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-------------------------------------------------------------------------------
"""
__all__ = ['main']
import pygame
import pygame_menu
from pygame_menu.examples import create_example_window
from random import randrange
from typing import Tuple, Any, Optional, List
# -----------------------------------------------------------------------------
# Constants and global variables
# -----------------------------------------------------------------------------
ABOUT = ['pygame-menu {0}'.format(pygame_menu.__version__),
'Author: @{0}'.format(pygame_menu.__author__),
'Email: {0}'.format(pygame_menu.__email__)]
DIFFICULTY = ['EASY']
FPS = 60
WINDOW_SIZE = (640, 480)
clock: Optional['pygame.time.Clock'] = None
main_menu: Optional['pygame_menu.Menu'] = None
surface: Optional['pygame.Surface'] = None
# -----------------------------------------------------------------------------
# Methods
# -----------------------------------------------------------------------------
def change_difficulty(value: Tuple[Any, int], difficulty: str) -> None:
"""
Change difficulty of the game.
:param value: Tuple containing the data of the selected object
:param difficulty: Optional parameter passed as argument to add_selector
"""
selected, index = value
print('Selected difficulty: "{0}" ({1}) at index {2}'
''.format(selected, difficulty, index))
DIFFICULTY[0] = difficulty
def random_color() -> Tuple[int, int, int]:
"""
Return a random color.
:return: Color tuple
"""
return randrange(0, 255), randrange(0, 255), randrange(0, 255)
def play_function(difficulty: List, font: 'pygame.font.Font', test: bool = False) -> None:
"""
Main game function.
:param difficulty: Difficulty of the game
:param font: Pygame font
:param test: Test method, if ``True`` only one loop is allowed
:return: None
"""
assert isinstance(difficulty, list)
difficulty = difficulty[0]
assert isinstance(difficulty, str)
# Define globals
global main_menu
global clock
if difficulty == 'EASY':
f = font.render('Playing as a baby (easy)', True, (255, 255, 255))
elif difficulty == 'MEDIUM':
f = font.render('Playing as a kid (medium)', True, (255, 255, 255))
elif difficulty == 'HARD':
f = font.render('Playing as a champion (hard)', True, (255, 255, 255))
else:
raise Exception('unknown difficulty {0}'.format(difficulty))
# Draw random color and text
bg_color = random_color()
f_width = f.get_size()[0]
# Reset main menu and disable
# You also can set another menu, like a 'pause menu', or just use the same
# main_menu as the menu that will check all your input.
main_menu.disable()
main_menu.full_reset()
while True:
# noinspection PyUnresolvedReferences
clock.tick(60)
# Application events
events = pygame.event.get()
for e in events:
if e.type == pygame.QUIT:
exit()
elif e.type == pygame.KEYDOWN:
if e.key == pygame.K_ESCAPE:
main_menu.enable()
# Quit this function, then skip to loop of main-menu on line 250
return
# Pass events to main_menu
if main_menu.is_enabled():
main_menu.update(events)
# Continue playing
surface.fill(bg_color)
surface.blit(f, ((WINDOW_SIZE[0] - f_width) / 2, WINDOW_SIZE[1] / 2))
pygame.display.flip()
# If test returns
if test:
break
def main_background() -> None:
"""
Function used by menus, draw on background while menu is active.
:return: None
"""
global surface
surface.fill((128, 0, 128))
def main(test: bool = False) -> None:
"""
Main program.
:param test: Indicate function is being tested
:return: None
"""
# -------------------------------------------------------------------------
# Globals
# -------------------------------------------------------------------------
global clock
global main_menu
global surface
# -------------------------------------------------------------------------
# Create window
# -------------------------------------------------------------------------
surface = create_example_window('Example - Game Selector', WINDOW_SIZE)
clock = pygame.time.Clock()
# -------------------------------------------------------------------------
# Create menus: Play Menu
# -------------------------------------------------------------------------
play_menu = pygame_menu.Menu(
height=WINDOW_SIZE[1] * 0.7,
title='Play Menu',
width=WINDOW_SIZE[0] * 0.75
)
submenu_theme = pygame_menu.themes.THEME_DEFAULT.copy()
submenu_theme.widget_font_size = 15
play_submenu = pygame_menu.Menu(
height=WINDOW_SIZE[1] * 0.5,
theme=submenu_theme,
title='Submenu',
width=WINDOW_SIZE[0] * 0.7
)
for i in range(30):
play_submenu.add.button('Back {0}'.format(i), pygame_menu.events.BACK)
play_submenu.add.button('Return to main menu', pygame_menu.events.RESET)
play_menu.add.button('Start', # When pressing return -> play(DIFFICULTY[0], font)
play_function,
DIFFICULTY,
pygame.font.Font(pygame_menu.font.FONT_FRANCHISE, 30))
play_menu.add.selector('Select difficulty ',
[('1 - Easy', 'EASY'),
('2 - Medium', 'MEDIUM'),
('3 - Hard', 'HARD')],
onchange=change_difficulty,
selector_id='select_difficulty')
play_menu.add.button('Another menu', play_submenu)
play_menu.add.button('Return to main menu', pygame_menu.events.BACK)
# -------------------------------------------------------------------------
# Create menus:About
# -------------------------------------------------------------------------
about_theme = pygame_menu.themes.THEME_DEFAULT.copy()
about_theme.widget_margin = (0, 0)
about_menu = pygame_menu.Menu(
height=WINDOW_SIZE[1] * 0.6,
theme=about_theme,
title='About',
width=WINDOW_SIZE[0] * 0.6
)
for m in ABOUT:
about_menu.add.label(m, align=pygame_menu.locals.ALIGN_LEFT, font_size=20)
about_menu.add.vertical_margin(30)
about_menu.add.button('Return to menu', pygame_menu.events.BACK)
# -------------------------------------------------------------------------
# Create menus: Main
# -------------------------------------------------------------------------
main_theme = pygame_menu.themes.THEME_DEFAULT.copy()
main_menu = pygame_menu.Menu(
height=WINDOW_SIZE[1] * 0.6,
theme=main_theme,
title='Main Menu',
width=WINDOW_SIZE[0] * 0.6
)
main_menu.add.button('Play', play_menu)
main_menu.add.button('About', about_menu)
main_menu.add.button('Quit', pygame_menu.events.EXIT)
# -------------------------------------------------------------------------
# Main loop
# -------------------------------------------------------------------------
while True:
# Tick
clock.tick(FPS)
# Paint background
main_background()
# Application events
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
exit()
# Main menu
if main_menu.is_enabled():
main_menu.mainloop(surface, main_background, disable_loop=test, fps_limit=FPS)
# Flip surface
pygame.display.flip()
# At first loop returns
if test:
break
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
6555426
|
<reponame>jrstats/pymirror
import logging
from typing import Dict, Any, List
class Logger(logging.Logger):
def __init__(self, loggerName: str, config: Dict[str, Any]) -> None:
## initialise class
super().__init__(loggerName)
self.config: Dict[str, Any] = config
## formatter
self.f: logging.Formatter = logging.Formatter(self.config["format"])
## handlers
self.sh: logging.Handler = logging.StreamHandler()
self.fh: logging.Handler = logging.FileHandler(self.config["filename"])
for h in [self.sh, self.fh]:
h.setLevel(self.config["level"])
h.setFormatter(self.f)
self.addHandler(h)
self.setLevel(self.config["level"])
|
StarcoderdataPython
|
8140648
|
<reponame>maxiimilian/distlink
from .distlink import COrbitData, SMOIDResult, SLCResult, detect_suitable_options, test_peri_apo, LC, MOID_fast, MOID_direct_search
|
StarcoderdataPython
|
3397152
|
# Generated by Django 2.2.5 on 2019-12-20 11:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dockerapi', '0015_auto_20191220_1903'),
]
operations = [
migrations.AlterField(
model_name='networkinfo',
name='create_user',
field=models.IntegerField(verbose_name='创建用户ID'),
),
migrations.AlterField(
model_name='timemoudel',
name='time_id',
field=models.CharField(default='fb8a09b5-c9d7-4001-9574-baecc99d2a43', max_length=255, primary_key=True, serialize=False, verbose_name='ID'),
),
]
|
StarcoderdataPython
|
4883407
|
"""Main package."""
import pytest
import pkg_resources
import micromagnetictests.calculatortests
from .get_tests import get_tests
__version__ = pkg_resources.get_distribution(__name__).version
def test():
"""Run all package tests.
Examples
--------
1. Run all tests.
>>> import micromagnetictests as mt
...
>>> # mt.test()
"""
return pytest.main(['-v', '--pyargs',
'micromagnetictests', '-l']) # pragma: no cover
|
StarcoderdataPython
|
11234707
|
<reponame>mylenefarias/360RAT
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'upload_video_window_dark.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_UploadVideo(object):
def setupUi(self, UploadVideo):
UploadVideo.setObjectName("UploadVideo")
UploadVideo.resize(329, 131)
UploadVideo.setStyleSheet("background-color: rgb(28, 29, 73);\n"
"color: rgb(255, 255, 255);")
self.textBrowser = QtWidgets.QTextBrowser(UploadVideo)
self.textBrowser.setGeometry(QtCore.QRect(40, 40, 261, 51))
self.textBrowser.setObjectName("textBrowser")
self.retranslateUi(UploadVideo)
QtCore.QMetaObject.connectSlotsByName(UploadVideo)
def retranslateUi(self, UploadVideo):
_translate = QtCore.QCoreApplication.translate
UploadVideo.setWindowTitle(_translate("UploadVideo", "Dialog"))
self.textBrowser.setHtml(_translate("UploadVideo", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:7.8pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:14pt; font-weight:600;\">Upload video ...</span></p></body></html>"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
UploadVideo = QtWidgets.QDialog()
ui = Ui_UploadVideo()
ui.setupUi(UploadVideo)
UploadVideo.show()
sys.exit(app.exec_())
|
StarcoderdataPython
|
3498817
|
<reponame>movermeyer/pyramid_es
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from hashlib import sha1
from sqlalchemy import Column, ForeignKey, types, orm
from sqlalchemy.ext.declarative import declarative_base
from ..mixin import ElasticMixin, ESMapping, ESString, ESField
Base = declarative_base()
class Genre(Base, ElasticMixin):
__tablename__ = 'genres'
id = Column(types.String(40), primary_key=True)
title = Column(types.Unicode(40))
def __init__(self, *args, **kwargs):
Base.__init__(self, *args, **kwargs)
self.id = sha1(self.title.encode('utf-8')).hexdigest()
@classmethod
def elastic_mapping(cls):
return ESMapping(
analyzer='content',
properties=ESMapping(
ESString('title', boost=5.0)))
class Movie(Base, ElasticMixin):
__tablename__ = 'movies'
id = Column(types.String(40), primary_key=True)
title = Column(types.Unicode(40))
director = Column(types.Unicode(40))
year = Column(types.Integer)
rating = Column(types.Numeric)
genre_id = Column(None, ForeignKey('genres.id'))
genre = orm.relationship('Genre')
__elastic_parent__ = ('Genre', 'genre_id')
def __init__(self, *args, **kwargs):
Base.__init__(self, *args, **kwargs)
self.id = sha1(self.title.encode('utf-8')).hexdigest()
@property
def genre_title(self):
return self.genre and self.genre.title or ''
@classmethod
def elastic_mapping(cls):
return ESMapping(
analyzer='content',
properties=ESMapping(
ESString('title', boost=5.0),
ESString('director'),
ESField('year'),
ESField('rating'),
ESString('genre_title', analyzer='lowercase')))
class Unindexed(Base):
# Does not inherit from ElasticMixin.
__tablename__ = 'unindexed'
id = Column(types.Integer, primary_key=True)
def get_data():
mystery = Genre(title=u'Mystery')
comedy = Genre(title=u'Comedy')
action = Genre(title=u'Action')
drama = Genre(title=u'Drama')
genres = [mystery, comedy, action, drama]
movies = [
Movie(
title=u'To Catch a Thief',
director=u'<NAME>',
year=1955,
rating=7.5,
genre=mystery,
genre_id=mystery.id,
),
Movie(
title=u'Vertigo',
director=u'<NAME>',
year=1958,
rating=8.5,
genre=mystery,
genre_id=mystery.id,
),
Movie(
title=u'North by Northwest',
director=u'<NAME>',
year=1959,
rating=8.5,
genre=mystery,
genre_id=mystery.id,
),
Movie(
title=u'Destination Tokyo',
director=u'<NAME>',
year=1943,
rating=7.1,
genre=action,
genre_id=action.id,
),
Movie(
title=u'<NAME>',
director=u'<NAME>',
year=1977,
rating=8.2,
genre=comedy,
genre_id=comedy.id,
),
Movie(
title=u'Sleeper',
director=u'<NAME>',
year=1973,
rating=7.3,
genre=comedy,
genre_id=comedy.id,
),
Movie(
title=u'Captain Blood',
director=u'<NAME>',
year=1935,
rating=7.8,
genre=action,
genre_id=action.id,
),
Movie(
title=u'Metropolis',
director=u'<NAME>',
year=1927,
rating=8.4,
genre=drama,
genre_id=drama.id,
)]
return genres, movies
|
StarcoderdataPython
|
8118122
|
def test_fn(a,b,c):
print('%d' %(a+b-c))
import inspect
print(inspect.getsource(test_fn))
|
StarcoderdataPython
|
8154540
|
import os
import sys
import click
import requests
TOKEN_URL = 'https://openapi.baidu.com/oauth/2.0/token?grant_type={0}&client_id={1}&client_secret={2}'
TEXT2AUDIO_URL = 'http://tsn.baidu.com/text2audio?tex={0}&lan=zh&cuid={1}&ctp=1&tok={2}&spd={3}&pit={4}&vol={5}&per={6}'
GRANT_TYPE = 'client_credentials'
CUID = 'pyandi_ffff'
TTS_AK = '<KEY>'
TTS_SK = 'f8ae7eea10b85c8e8b13eff87e4a7f15'
def get_token():
ak, sk = TTS_AK, TTS_SK
if 'TTS_AK' in os.environ:
ak = os.environ['TTS_AK']
if 'TTS_SK' in os.environ:
sk = os.environ['TTS_SK']
url = TOKEN_URL.format(GRANT_TYPE, ak, sk)
r = requests.post(url)
if r.status_code == 200:
return r.json()['access_token']
else:
print(r.json())
raise Exception('Get Token Error!')
def text2audio(text, spd=5, pit=5, vol=5, per=2):
lst = []
tok = get_token()
while text:
_text, text = text[:1024], text[1024:]
url = TEXT2AUDIO_URL.format(_text, CUID, tok, spd, pit, vol, per)
r = requests.post(url)
if r.headers['Content-type'] == 'audio/mp3':
lst.append(r.content)
else:
print(r.json())
raise Exception('Text to audio error!')
return b''.join(lst)
# @click.command()
# @click.option('--text', '-t', help='The text from stdin.')
# @click.option('--from_file', '-f', help='The text from file.')
# @click.option('--result', '-r', default='default.mp3', help='The result file.')
# @click.option('--speedch/--no-speedch', default=False, help='Speedch or not.')
# @click.option('--speedch_app', default='mpv', help='Speedch app, e.g. "mpv".')
# @click.option('--spd', default=5, help='The speed. [0-9]')
# @click.option('--pit', default=5, help='The pitch. [0-9]')
# @click.option('--vol', default=5, help='The volume. [0-9]')
# @click.option('--per', default=2, help='The person. [0,1,3,4]')
def run(text=None, from_file='forecast.txt', result=None, speedch=False, speedch_app='mpv', spd=5, pit=5, vol=5, per=2):
if text is None and from_file is None:
raise Exception("Please give a option text or from_file!")
if text is None:
if not os.path.exists(from_file):
raise Exception('The from file {0} not exists!'.format(from_file))
text = open(from_file, 'r').read()
audio = text2audio(text, spd, pit, vol, per)
with open(result, 'wb') as f:
f.write(audio)
if speedch:
os.system('{0} {1}'.format(speedch_app, result))
if __name__ == '__main__':
try:
run(result='default.mp3')
except Exception as ex:
print(ex)
raise
sys.exit(1)
|
StarcoderdataPython
|
80974
|
print("Heeeey")
|
StarcoderdataPython
|
3448328
|
__title__ = 'git_demo'
__description__ = 'Demo of GIT workflow'
__url__ = 'https://github.com/gonzalocasas/git_demo'
__version__ = '0.1.0'
__author__ = '<NAME> Research'
__author_email__ = '<EMAIL>'
__license__ = 'MIT license'
__copyright__ = 'Copyright 2018 Gramazio Kohler Research'
__all__ = ['__author__', '__author_email__', '__copyright__', '__description__', '__license__', '__title__', '__url__', '__version__']
|
StarcoderdataPython
|
5139306
|
<reponame>halhenke/promnesia
#!/usr/bin/env python3
import sys
from pathlib import Path
from subprocess import check_call
def convert(path: Path):
suf = '.mp4'
if path.suffix == suf:
# makes it easier for shell globbing...
path = path.with_suffix('')
inp = path.with_suffix(suf)
assert inp.exists(), inp
subs = path.with_suffix('.ssa')
webm = path.with_suffix('.webm')
# jeez... https://video.stackexchange.com/a/28276/29090
# otherwise quiality sucks, e.g. letters are grainy
#
# ok, nice guide.. https://gist.github.com/Vestride/278e13915894821e1d6f#convert-to-webm
#
passfile = path.with_suffix(".pass0")
for stage in [
f'-b:v 0 -crf 30 -pass 1 -passlogfile {passfile} -an -f webm /dev/null',
f'-b:v 0 -crf 30 -pass 2 -passlogfile {passfile} {webm}' if all(
x not in str(inp) for x in (
# fucking hell, it segfaults...
'child-visits-2',
'highlights',
)) else str(webm),
]:
check_call([
'ffmpeg',
# TODO display banner if running interactively??
# '-hide_banner', '-loglevel', 'panic', # less spam
'-y', # allow overwrite
'-i', inp,
'-vf', f"ass={subs}",
*stage.split(),
]) # TODO cwd??
if __name__ == '__main__':
paths = list(map(Path, sys.argv[1:]))
from concurrent.futures import ThreadPoolExecutor
with ThreadPoolExecutor() as pool:
for _ in pool.map(convert, paths):
# need to force the iterator
pass
|
StarcoderdataPython
|
51449
|
# Import modules
import groupdocs_annotation_cloud
from Common import Common
class MoveFolder:
@classmethod
def Run(cls):
# Create instance of the API
api = groupdocs_annotation_cloud.FolderApi.from_config(Common.GetConfig())
try:
request = groupdocs_annotation_cloud.MoveFolderRequest("annotationdocs1", "annotationdocs1\\annotationdocs", Common.myStorage, Common.myStorage)
api.move_folder(request)
print("Expected response type is Void: 'annotationdocs1' folder moved to 'annotationdocs/annotationdocs1'.")
except groupdocs_annotation_cloud.ApiException as e:
print("Exception while calling API: {0}".format(e.message))
|
StarcoderdataPython
|
3506817
|
<filename>basic/dataSource.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 4 20:07:36 2021
@author: alikemalcelenk
"""
from bs4 import BeautifulSoup
import requests
import cloudscraper
import json
import pandas
# GET BINANCE PAIRS
scraper = cloudscraper.create_scraper()
url = 'https://coinmarketcap.com/tr/exchanges/binance/'
soup = BeautifulSoup(scraper.get(url).text, 'html.parser')
pairsMainClass = soup.find_all("tr", "cmc-table-row")
pairs = []
currencies = []
for index, pair in enumerate(pairsMainClass):
found_pair = pair.find("td", "cmc-table__cell cmc-table__cell--sortable cmc-table__cell--left cmc-table__cell--sort-by__market-pair")
#Pairlerin Ana divinin classı
#direk cmc-link olarak alamadım başka yerlerde de cmc-link classı kullanılmıs
pairName = found_pair.find("a", "cmc-link").text
pairLeft = pairName.rsplit('/',1)[0]
pairRight = pairName.rsplit('/',1)[1]
pairs.append((pairLeft, pairRight))
currencies.append(pairLeft)
currencies.append(pairRight)
#PAIRS - BINANCE
pairs = list(set(pairs)) #Dizinin içindeki aynı pairleri sildim.
#Sitede bazı pairlerin spot ve derivatives olarak 2 categorysi var. iksini de almaması için aynı olanları sildim. Weighti arttıyordu.
pairData = pandas.DataFrame(pairs)
pairData.columns = ["Source","Target"]
pairData.index.name = "Id"
pairData.to_csv('edges.csv')
#COINS AND TOKENS - BINANCE
currencies = list(set(currencies)) #Dizinin içindeki aynı elemanları sildim
currencyData = pandas.DataFrame(currencies)
currencyData.columns = ["Label"]
currencyData.index.name = "Id"
currencyData.to_csv('nodes.csv')
|
StarcoderdataPython
|
11349667
|
<gh_stars>1-10
def is_list_or_tuple(obj):
if type(obj) in [list, tuple]:
return True
else:
return False
def list_to_csv_string(obj):
return ",".join([str(ii) for ii in obj])
def list_to_ssv_string(obj):
return " ".join([str(ii) for ii in obj])
|
StarcoderdataPython
|
6645887
|
<gh_stars>0
#!/usr/bin/env python3
"""
Copyright (c) 2018-2020 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import requests
import json
# Helper functions for Fire Jumper Mission Validation
def get_user_details(webex_token):
url = "https://api.ciscospark.com/v1/people/me"
headers = {"Authorization": f"Bearer {webex_token}", 'Content-Type':'application/json', 'Accept':'application/json'}
response = requests.get(url, headers=headers)
response.raise_for_status()
user = response.json()
return user
def post_submission(url, threatgrid_sha, threatgrid_sample_id,
threatgrid_sample_domains, umbrella_block_list,
umbrella_blocklist_enforcement, ctr_observables,
ctr_response_url, webex_id):
data = {
"threatgrid_sha": threatgrid_sha,
"threatgrid_sample_id": threatgrid_sample_id,
"threatgrid_sample_domains": threatgrid_sample_domains,
"umbrella_block_list": umbrella_block_list,
"umbrella_blocklist_enforcement": umbrella_blocklist_enforcement,
"ctr_observables": ctr_observables,
"ctr_response_url": ctr_response_url,
"webex_id": webex_id
}
headers = {'Content-Type':'application/json',
'User-Agent': 'python-firejumper-mission-api'}
response = requests.post(url, headers=headers, data=json.dumps(data), verify=False)
response.raise_for_status()
return response.json()
|
StarcoderdataPython
|
6565052
|
"""
The purpose of this file is to provide benchmarks based on publicly accessible data that can be run on candidate models
without restrictions. As opposed to the private benchmarks hosted on www.Brain-Score.org, models can be evaluated
without having to submit them to the online platform.
This allows for quick local prototyping, layer commitment, etc.
For the final model evaluation, candidate models should still be sent to www.Brain-Score.org to evaluate them on
held-out private data.
"""
import functools
import logging
import boto3
from botocore import UNSIGNED
from botocore.config import Config
from botocore.exceptions import ClientError
import brainio_collection
from brainio_collection.fetch import BotoFetcher
from brainscore.benchmarks._neural_common import NeuralBenchmark
from brainscore.metrics.ceiling import InternalConsistency
from brainscore.metrics.regression import CrossRegressedCorrelation, pls_regression, pearsonr_correlation
from brainscore.utils import LazyLoad
from .freemanziemba2013 import load_assembly as load_freemanziemba2013, VISUAL_DEGREES as freemanziemba2013_degrees, \
NUMBER_OF_TRIALS as freemanziemba2013_trials
from .majajhong2015 import load_assembly as load_majajhong2015, VISUAL_DEGREES as majajhong2015_degrees, \
NUMBER_OF_TRIALS as majajhong2015_trials
from .rajalingham2018 import load_assembly as load_rajalingham2018, DicarloRajalingham2018I2n
_logger = logging.getLogger(__name__)
def _standard_benchmark(identifier, load_assembly, visual_degrees, number_of_trials, stratification_coord):
assembly_repetition = LazyLoad(lambda: load_assembly(average_repetitions=False))
assembly = LazyLoad(lambda: load_assembly(average_repetitions=True))
similarity_metric = CrossRegressedCorrelation(
regression=pls_regression(), correlation=pearsonr_correlation(),
crossvalidation_kwargs=dict(stratification_coord=stratification_coord))
ceiler = InternalConsistency()
return NeuralBenchmark(identifier=f"{identifier}-pls", version=1,
assembly=assembly, similarity_metric=similarity_metric,
visual_degrees=visual_degrees, number_of_trials=number_of_trials,
ceiling_func=lambda: ceiler(assembly_repetition),
parent=None, paper_link='http://www.jneurosci.org/content/35/39/13402.short')
def FreemanZiembaV1PublicBenchmark():
return _standard_benchmark('movshon.FreemanZiemba2013.V1.public',
load_assembly=functools.partial(load_freemanziemba2013, region='V1', access='public'),
visual_degrees=freemanziemba2013_degrees, number_of_trials=freemanziemba2013_trials,
stratification_coord='texture_type')
def FreemanZiembaV2PublicBenchmark():
return _standard_benchmark('movshon.FreemanZiemba2013.V2.public',
load_assembly=functools.partial(load_freemanziemba2013, region='V2', access='public'),
visual_degrees=freemanziemba2013_degrees, number_of_trials=freemanziemba2013_trials,
stratification_coord='texture_type')
def MajajHongV4PublicBenchmark():
return _standard_benchmark('dicarlo.MajajHong2015.V4.public',
load_assembly=functools.partial(load_majajhong2015, region='V4', access='public'),
visual_degrees=majajhong2015_degrees, number_of_trials=majajhong2015_trials,
stratification_coord='object_name')
def MajajHongITPublicBenchmark():
return _standard_benchmark('dicarlo.MajajHong2015.IT.public',
load_assembly=functools.partial(load_majajhong2015, region='IT', access='public'),
visual_degrees=majajhong2015_degrees, number_of_trials=majajhong2015_trials,
stratification_coord='object_name')
class RajalinghamMatchtosamplePublicBenchmark(DicarloRajalingham2018I2n):
def __init__(self):
super(RajalinghamMatchtosamplePublicBenchmark, self).__init__()
self._assembly = LazyLoad(lambda: load_rajalingham2018(access='public'))
self._ceiling_func = lambda: self._metric.ceiling(self._assembly, skipna=True)
def list_public_assemblies():
all_assemblies = brainio_collection.list_assemblies()
public_assemblies = []
for assembly in all_assemblies:
# https://github.com/brain-score/brainio_collection/blob/7892b9ec66c9e744766c794de4b73ebdf61d585c/brainio_collection/fetch.py#L181
assy_model = brainio_collection.lookup.lookup_assembly(assembly)
if assy_model['location_type'] != 'S3':
_logger.warning(f"Unknown location_type in assembly {assy_model}")
continue
probe_fetcher = _ProbeBotoFetcher(location=assy_model['location'], local_filename='probe') # filename is unused
if probe_fetcher.has_access():
public_assemblies.append(assembly)
return public_assemblies
class _ProbeBotoFetcher(BotoFetcher):
def has_access(self):
s3 = boto3.resource('s3', config=Config(signature_version=UNSIGNED))
obj = s3.Object(self.bucketname, self.relative_path)
try:
# noinspection PyStatementEffect
obj.content_length # probe
return True
except ClientError:
return False
|
StarcoderdataPython
|
6607087
|
from datetime import date, datetime, time
from django.conf import settings
from django.db import models
from django.http import Http404
from django.utils.translation import ugettext_noop
from random import randint, shuffle
from wouso.core.user.models import Player
from wouso.core.game.models import Game
from wouso.core import scoring, signals
from wouso.core.qpool.models import Schedule, Answer, Question
# Qotd uses questions from qpool
class QotdUser(Player):
""" Extension of the User object, customized for qotd """
last_answered = models.DateTimeField(null=True, blank=True, default=None)
last_answer = models.IntegerField(default=0, blank=True)
last_answer_correct = models.BooleanField(default=0, blank=True)
my_question = models.ForeignKey(Question, related_name="Mine", null=True)
@property
def has_question(self):
if self.my_question is None:
return False
qdate = self.my_question.schedule
if not (qdate.day == date.today()):
return False
return True
def set_question(self, question):
if question and not self.has_question:
self.my_question = question
self.save()
def reset_question(self):
self.my_question = None
self.save()
def set_answered(self, choice, correct):
if not self.has_answered:
self.last_answer = choice # answer id
self.last_answer_correct = correct
self.last_answered = datetime.now()
self.save()
# send signal
if correct:
action_msg = "qotd-correct"
signal_msg = ugettext_noop('has given a correct answer to QotD.')
else:
action_msg = "qotd-wrong"
signal_msg = ugettext_noop('has given a wrong answer to QotD.')
signals.addActivity.send(sender=None, user_from=self,
user_to=self,
message=signal_msg,
action=action_msg,
game=QotdGame.get_instance())
def reset_answered(self):
self.last_answered = None
self.save()
@property
def has_answered(self):
""" Check if last_answered was today """
#TODO: test this
if self.last_answered is None:
return False
else:
now = datetime.now()
today_start = datetime.combine(now, time(0, 0, 0))
today_end = datetime.combine(now, time(23, 59, 59))
return today_start <= self.last_answered <= today_end
class QotdGame(Game):
""" Each game must extend Game """
class Meta:
# A Game extending core.game.models.Game should be set as proxy
proxy = True
def __init__(self, *args, **kwargs):
# Set parent's fields
self._meta.get_field('verbose_name').default = "Question of the Day"
self._meta.get_field('short_name').default = ""
# the url field takes as value only a named url from module's urls.py
self._meta.get_field('url').default = "qotd_index_view"
super(QotdGame, self).__init__(*args, **kwargs)
@staticmethod
def get_for_today():
""" Return a Question object selected for Today """
sched = list(Schedule.objects.filter(day=date.today(),
question__active=True).all())
if not sched:
return None
shuffle(sched)
return sched[0].question
@staticmethod
def answered(user, question, choice):
correct = False
for i, a in enumerate(question.answers):
if a.id == choice:
if a.correct:
correct = True
break
user.set_answered(choice, correct) # answer id
if correct:
now = datetime.now()
pr = randint(0, 99)
scoring.score(user, QotdGame, 'qotd-ok', hour=now.hour);
if pr < settings.QOTD_BONUS_PROB:
scoring.score(user, QotdGame, 'qotd-ok-bonus', hour=now.hour)
@classmethod
def get_formulas(kls):
""" Returns a list of formulas used by qotd """
fs = []
qotd_game = kls.get_instance()
fs.append(dict(name='qotd-ok',
expression='points=4 + (1 if {hour} < 12 else -1)',
owner=qotd_game.game,
description='Points earned on a correct answer in the morning')
)
fs.append(dict(name="qotd-ok-bonus",
expression='points=2',
owner=qotd_game.game,
description='Points earned in case of bonus')
)
return fs
@classmethod
def get_api(kls):
from api import QotdHandler
return {r'^qotd/today/$': QotdHandler}
@classmethod
def get_modifiers(cls):
return ['qotd-blind', #player cannot launch QuestionOfTheDay
]
@classmethod
def get_history(cls):
today = datetime.now()
qs = Schedule.objects.filter(day__lte=today).order_by('-day')[:7]
return qs
|
StarcoderdataPython
|
5082912
|
#!/usr/bin/env python3
# color molecules from a SMILES file according to per-atom delta score
# values from another file
import matplotlib.pyplot as plot
import rdkit, sys
from rdkit import Chem
from rdkit.Chem import Draw
from rdkit.Chem.Draw import rdDepictor, SimilarityMaps
def RobustSmilesMolSupplier(filename):
with open(filename) as f:
for line in f:
words = line.split()
smile = words[0]
name = " ".join(words[1:]) # everything after the SMILES string
yield (name, Chem.MolFromSmiles(smile))
# draw all atoms in black
drawOptions = Draw.DrawingOptions()
drawOptions.elemDict = {}
drawOptions.bgColor = None
if __name__ == '__main__':
if len(sys.argv) != 3:
print("usage: %s molecules.smi molecules.delta" % sys.argv[0])
exit(1)
smiles_fn = sys.argv[1]
deltas_fn = sys.argv[2]
delta_max = 0.1 # arbitrary, to normalize deltas and color-scale them
delta_file = open(deltas_fn, 'r')
count = 0
for long_name, mol in RobustSmilesMolSupplier(smiles_fn):
# split by '_' in case name was postfixed with underscores
# and additional data
name = long_name.split('_')[0]
line = delta_file.readline()
words = line.split()
curr_name = words[0]
if curr_name != name:
print("names differ: %s != %s" % (name, curr_name))
exit(1)
delta_strings = words[1:]
nb_deltas = len(delta_strings)
nb_atoms = mol.GetNumAtoms()
assert(nb_deltas == nb_atoms)
deltas = list(map(lambda x: float(x), delta_strings))
rdDepictor.Compute2DCoords(mol) # 2D conformer for figure
# compute similarity map weights
weights = []
for delta in deltas:
# run-time check that delta is not too high or delta_max too small
assert(delta <= delta_max)
weight = delta / delta_max
weights.append(weight)
sim_map = Draw.SimilarityMaps.\
GetSimilarityMapFromWeights(mol, weights, size = (200,200),
options=drawOptions,
scale=50.0)
# the bbox param forces centering the molecule in the figure
sim_map.savefig(name + '.svg', bbox_inches = 'tight')
plot.close(sim_map)
count += 1
print('processed: %d\r' % count, end='')
print('processed: %d' % count)
delta_file.close()
|
StarcoderdataPython
|
9784427
|
<filename>notebooks/mortgage_e2e_gquant/mortgage_common.py
'''
Collection of functions to run the mortgage example.
'''
import os
from glob import glob
class MortgageTaskNames(object):
'''Task names commonly used by scripts for naming tasks when creating
a gQuant mortgage workflow.
'''
load_acqdata_task_name = 'acqdata'
load_perfdata_task_name = 'perfdata'
ever_feat_task_name = 'ever_features'
delinq_feat_task_name = 'delinq_features'
join_perf_ever_delinq_feat_task_name = 'join_perf_ever_delinq_features'
create_12mon_feat_task_name = 'create_12mon_features'
final_perf_delinq_task_name = 'final_perf_delinq_features'
final_perf_acq_task_name = 'final_perf_acq_df'
mortgage_workflow_runner_task_name = 'mortgage_workflow_runner'
xgb_trainer_task_name = 'xgb_trainer'
dask_mortgage_workflow_runner_task_name = 'dask_mortgage_workflow_runner'
dask_xgb_trainer_task_name = 'dask_xgb_trainer'
def mortgage_etl_workflow_def(
csvfile_names=None, csvfile_acqdata=None,
csvfile_perfdata=None):
'''Define the ETL (extract-transform-load) portion of the mortgage
workflow.
:returns: gQuant task-spec list. Currently a simple list of dictionaries.
Each dict is a task-spec per TaskSpecSchema.
:rtype: list
'''
from gquant.dataframe_flow import TaskSpecSchema
_basedir = os.path.dirname(__file__)
mortgage_lib_module = os.path.join(_basedir, 'mortgage_gquant_plugins.py')
# print('CSVFILE_ACQDATA: ', csvfile_acqdata)
# print('CSVFILE_PERFDATA: ', csvfile_perfdata)
# load acquisition
load_acqdata_task = {
TaskSpecSchema.task_id: MortgageTaskNames.load_acqdata_task_name,
TaskSpecSchema.node_type: 'CsvMortgageAcquisitionDataLoader',
TaskSpecSchema.conf: {
'csvfile_names': csvfile_names,
'csvfile_acqdata': csvfile_acqdata
},
TaskSpecSchema.inputs: [],
TaskSpecSchema.filepath: mortgage_lib_module
}
# load performance data
load_perfdata_task = {
TaskSpecSchema.task_id: MortgageTaskNames.load_perfdata_task_name,
TaskSpecSchema.node_type: 'CsvMortgagePerformanceDataLoader',
TaskSpecSchema.conf: {
'csvfile_perfdata': csvfile_perfdata
},
TaskSpecSchema.inputs: [],
TaskSpecSchema.filepath: mortgage_lib_module
}
# calculate loan delinquency stats
ever_feat_task = {
TaskSpecSchema.task_id: MortgageTaskNames.ever_feat_task_name,
TaskSpecSchema.node_type: 'CreateEverFeatures',
TaskSpecSchema.conf: dict(),
TaskSpecSchema.inputs: [MortgageTaskNames.load_perfdata_task_name],
TaskSpecSchema.filepath: mortgage_lib_module
}
delinq_feat_task = {
TaskSpecSchema.task_id: MortgageTaskNames.delinq_feat_task_name,
TaskSpecSchema.node_type: 'CreateDelinqFeatures',
TaskSpecSchema.conf: dict(),
TaskSpecSchema.inputs: [MortgageTaskNames.load_perfdata_task_name],
TaskSpecSchema.filepath: mortgage_lib_module
}
join_perf_ever_delinq_feat_task = {
TaskSpecSchema.task_id:
MortgageTaskNames.join_perf_ever_delinq_feat_task_name,
TaskSpecSchema.node_type: 'JoinPerfEverDelinqFeatures',
TaskSpecSchema.conf: dict(),
TaskSpecSchema.inputs: [
MortgageTaskNames.load_perfdata_task_name,
MortgageTaskNames.ever_feat_task_name,
MortgageTaskNames.delinq_feat_task_name
],
TaskSpecSchema.filepath: mortgage_lib_module
}
create_12mon_feat_task = {
TaskSpecSchema.task_id: MortgageTaskNames.create_12mon_feat_task_name,
TaskSpecSchema.node_type: 'Create12MonFeatures',
TaskSpecSchema.conf: dict(),
TaskSpecSchema.inputs: [
MortgageTaskNames.join_perf_ever_delinq_feat_task_name
],
TaskSpecSchema.filepath: mortgage_lib_module
}
final_perf_delinq_task = {
TaskSpecSchema.task_id: MortgageTaskNames.final_perf_delinq_task_name,
TaskSpecSchema.node_type: 'FinalPerfDelinq',
TaskSpecSchema.conf: dict(),
TaskSpecSchema.inputs: [
MortgageTaskNames.load_perfdata_task_name,
MortgageTaskNames.join_perf_ever_delinq_feat_task_name,
MortgageTaskNames.create_12mon_feat_task_name
],
TaskSpecSchema.filepath: mortgage_lib_module
}
final_perf_acq_task = {
TaskSpecSchema.task_id: MortgageTaskNames.final_perf_acq_task_name,
TaskSpecSchema.node_type: 'JoinFinalPerfAcqClean',
TaskSpecSchema.conf: dict(),
TaskSpecSchema.inputs: [
MortgageTaskNames.final_perf_delinq_task_name,
MortgageTaskNames.load_acqdata_task_name
],
TaskSpecSchema.filepath: mortgage_lib_module
}
task_spec_list = [
load_acqdata_task, load_perfdata_task,
ever_feat_task, delinq_feat_task, join_perf_ever_delinq_feat_task,
create_12mon_feat_task, final_perf_delinq_task, final_perf_acq_task
]
return task_spec_list
def generate_mortgage_gquant_run_params_list(
mortgage_data_path, start_year, end_year, part_count,
gquant_task_spec_list):
'''For the specified years and limit (part_count) to the number of files
(performance files), generates a list of run_params_dict.
run_params_dict = {
'replace_spec': replace_spec,
'task_spec_list': gquant_task_spec_list,
'out_list': out_list
}
replace_spec - to be passed to Dataframe flow run command's replace option.
replace_spec = {
MortgageTaskNames.load_acqdata_task_name: {
TaskSpecSchema.conf: {
'csvfile_names': csvfile_names,
'csvfile_acqdata': csvfile_acqdata
}
},
MortgageTaskNames.load_perfdata_task_name: {
TaskSpecSchema.conf: {
'csvfile_perfdata': csvfile_perfdata
}
}
}
out_list - Expected to specify one output which should be the final
dataframe produced by the mortgage ETL workflow.
Example:
from gquant.dataframe_flow import TaskGraph
task_spec_list = run_params_dict['task_spec_list']
out_list = run_params_dict['out_list']
replace_spec = run_params_dict['replace_spec']
task_graph = TaskGraph(task_spec_list)
(final_perf_acq_df,) = task_graph.run(out_list, replace_spec)
:param str mortgage_data_path: Path to mortgage data. Should have a file
"names.csv" and two subdirectories "acq" and "perf".
:param int start_year: Start year is used to traverse the appropriate range
of directories with corresponding year(s) in mortgage data.
:param int end_year: End year is used to traverse the appropriate range
of directories with corresponding year(s) in mortgage data.
:param int part_count: Limit to how many performance files to load. There
is a single corresponding acquisition file for year and quarter.
Performance files are very large csv files (1GB files) and are broken
down i.e. for a given year and quarter you could have several file
chunks: *.txt_0, *.txt_1, etc.
:param gquant_task_spec_list: Mortgage ETL workflow list of tasks. Refer to
function mortgage_etl_workflow_def.
:returns: list of run_params_dict
:rtype: list
'''
from gquant.dataframe_flow import TaskSpecSchema
csvfile_names = os.path.join(mortgage_data_path, 'names.csv')
acq_data_path = os.path.join(mortgage_data_path, 'acq')
perf_data_path = os.path.join(mortgage_data_path, 'perf')
quarter = 1
year = start_year
count = 0
out_list = [MortgageTaskNames.final_perf_acq_task_name]
mortgage_run_params_dict_list = []
while year <= end_year:
if count >= part_count:
break
perf_data_files = glob(os.path.join(
perf_data_path + "/Performance_{}Q{}*".format(
str(year), str(quarter))))
csvfile_acqdata = acq_data_path + "/Acquisition_" + \
str(year) + "Q" + str(quarter) + ".txt"
for csvfile_perfdata in perf_data_files:
if count >= part_count:
break
replace_spec = {
MortgageTaskNames.load_acqdata_task_name: {
TaskSpecSchema.conf: {
'csvfile_names': csvfile_names,
'csvfile_acqdata': csvfile_acqdata
}
},
MortgageTaskNames.load_perfdata_task_name: {
TaskSpecSchema.conf: {
'csvfile_perfdata': csvfile_perfdata
}
}
}
# Uncomment 'csvfile_perfdata' for debugging chunks in
# DaskMortgageWorkflowRunner.
run_params_dict = {
# 'csvfile_perfdata': csvfile_perfdata,
'replace_spec': replace_spec,
'task_spec_list': gquant_task_spec_list,
'out_list': out_list
}
mortgage_run_params_dict_list.append(run_params_dict)
count += 1
quarter += 1
if quarter == 5:
year += 1
quarter = 1
return mortgage_run_params_dict_list
|
StarcoderdataPython
|
4951287
|
#!/usr/bin/env python3
#
# Forked from LiteX-Boards, with additions to support USB debugging.
#
# Copyright (c) 2020 <NAME> <<EMAIL>>
# Copyright (c) 2018-2019 <NAME> <<EMAIL>>
# Copyright (c) 2018 <NAME> <<EMAIL>>
# SPDX-License-Identifier: BSD-2-Clause
import os
import argparse
import sys
from migen import *
from migen.genlib.resetsync import AsyncResetSynchronizer
from litex.build.io import DDROutput
from litex_boards.platforms import ulx3s
from litex.build.lattice.trellis import trellis_args, trellis_argdict
from litex.soc.cores.clock import *
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from litex.soc.interconnect import wishbone
from litex.soc.interconnect.csr import *
from litex.soc.cores.video import VideoECP5HDMIPHY
from litex.soc.cores.led import LedChaser
from litex.soc.cores.spi import SPIMaster
from litex.soc.cores.gpio import GPIOOut
from litex.soc.cores.uart import UARTWishboneBridge
from litedram import modules as litedram_modules
from litedram.phy import GENSDRPHY, HalfRateGENSDRPHY
from litescope import LiteScopeAnalyzer
class Matrix8x8(Module, AutoCSR):
def __init__(self, cd, rst, pads):
self.pads = pads # for o_matrix_clk/o_matrix_latch/o_matrix_mosi
self.bus = bus = wishbone.Interface(data_width = 32)
self.speed = CSRStorage(2) # for i_refresh_speed
self.specials += Instance("matrix",
i_clk = ClockSignal("sys"), #ClockSignal(cd), # figure out how to get a ClockSignal from the domain itself?
i_reset = rst, #| self.rst
i_i_refresh_speed = self.speed.storage,
o_o_matrix_clk = pads.clk,
o_o_matrix_latch = pads.latch,
o_o_matrix_mosi = pads.mosi,
i_i_wb_cyc = bus.cyc,
i_i_wb_stb = bus.stb,
i_i_wb_we = bus.we,
i_i_wb_addr = bus.adr,
i_i_wb_sel = bus.sel,
i_i_wb_wdata = bus.dat_w,
o_o_wb_ack = bus.ack,
# o_wb_stall = # no stall in regular wishbone?
o_o_wb_rdata = bus.dat_r)
# CRG ----------------------------------------------------------------------------------------------
class _CRG(Module):
def __init__(self, platform, sys_clk_freq, with_usb_pll=False, with_video_pll=False, sdram_rate="1:1"):
self.rst = Signal()
self.clock_domains.cd_sys = ClockDomain()
if sdram_rate == "1:2":
self.clock_domains.cd_sys2x = ClockDomain()
self.clock_domains.cd_sys2x_ps = ClockDomain(reset_less=True)
else:
self.clock_domains.cd_sys_ps = ClockDomain(reset_less=True)
# # #
# Clk / Rst
clk25 = platform.request("clk25")
rst = platform.request("rst")
# PLL
self.submodules.pll = pll = ECP5PLL()
self.comb += pll.reset.eq(rst | self.rst)
pll.register_clkin(clk25, 25e6)
pll.create_clkout(self.cd_sys, sys_clk_freq)
if sdram_rate == "1:2":
pll.create_clkout(self.cd_sys2x, 2*sys_clk_freq)
pll.create_clkout(self.cd_sys2x_ps, 2*sys_clk_freq, phase=180) # Idealy 90° but needs to be increased.
else:
pll.create_clkout(self.cd_sys_ps, sys_clk_freq, phase=90)
# USB PLL
if with_usb_pll:
self.submodules.usb_pll = usb_pll = ECP5PLL()
self.comb += usb_pll.reset.eq(rst | self.rst)
usb_pll.register_clkin(clk25, 25e6)
self.clock_domains.cd_usb_12 = ClockDomain()
self.clock_domains.cd_usb_48 = ClockDomain()
usb_pll.create_clkout(self.cd_usb_12, 12e6, margin=0)
usb_pll.create_clkout(self.cd_usb_48, 48e6, margin=0)
# Video PLL
if with_video_pll:
self.submodules.video_pll = video_pll = ECP5PLL()
self.comb += video_pll.reset.eq(rst | self.rst)
video_pll.register_clkin(clk25, 25e6)
self.clock_domains.cd_hdmi = ClockDomain()
self.clock_domains.cd_hdmi5x = ClockDomain()
video_pll.create_clkout(self.cd_hdmi, 40e6, margin=0)
video_pll.create_clkout(self.cd_hdmi5x, 200e6, margin=0)
# SDRAM clock
sdram_clk = ClockSignal("sys2x_ps" if sdram_rate == "1:2" else "sys_ps")
self.specials += DDROutput(1, 0, platform.request("sdram_clock"), sdram_clk)
# Prevent ESP32 from resetting FPGA
self.comb += platform.request("wifi_gpio0").eq(1)
# BaseSoC ------------------------------------------------------------------------------------------
class BaseSoC(SoCCore):
def __init__(self, device="LFE5U-45F", revision="2.0", toolchain="trellis",
sys_clk_freq=int(50e6), sdram_module_cls="MT48LC16M16", sdram_rate="1:1",
with_video_terminal=False, with_video_framebuffer=False, spiflash=False,
usb_debug=False, uart_debug=True,
**kwargs):
platform = ulx3s.Platform(device=device, revision=revision, toolchain=toolchain)
if spiflash:
self.mem_map = {**SoCCore.mem_map, **{"spiflash": 0x80000000}}
if usb_debug:
# TODO import this properly somehow?
# os.system("git clone https://github.com/gregdavill/valentyusb -b hw_cdc_eptri")
os.system("git clone <EMAIL>:im-tomu/valentyusb.git")
# os.system("git clone https://github.com/litex-hub/valentyusb -b hw_cdc_eptri")
sys.path.append("valentyusb")
# SoCCore ----------------------------------------------------------------------------------
SoCCore.__init__(self, platform, sys_clk_freq,
ident = "LiteX SoC on ULX3S",
ident_version = True,
**kwargs)
# CRG --------------------------------------------------------------------------------------
with_usb_pll = kwargs.get("uart_name", None) == "usb_acm" or usb_debug
with_video_pll = with_video_terminal or with_video_framebuffer
self.submodules.crg = _CRG(platform, sys_clk_freq, with_usb_pll, with_video_pll, sdram_rate=sdram_rate)
# USB Debug on US2 -------------------------------------------------------------------------
# This enables the use of wishbone-tool to poke memory via US2 USB interface, and hopefully
# litescope
if usb_debug:
from valentyusb.usbcore.cpu import epfifo, dummyusb
from valentyusb.usbcore import io as usbio
usb_pads = self.platform.request("usb")
usb_iobuf = usbio.IoBuf(usb_pads.d_p, usb_pads.d_n, usb_pads.pullup)
# self.submodules.usb = epfifo.PerEndpointFifoInterface(usb_iobuf, debug=True)
# just enumerate and hook up dummy debug wishbone
# random valenty
# self.submodules.usb = dummyusb.DummyUsb(usb_iobuf,
# debug=True)
# default valenty
self.submodules.usb = dummyusb.DummyUsb(usb_iobuf,
debug=True,
cdc=True,
relax_timing=True)
self.add_wb_master(self.usb.debug_bridge.wishbone)
if not hasattr(self.cpu, 'debug_bus'):
raise RuntimeError('US2 Debug requires a CPU variant with +debug')
if uart_debug:
self.submodules.uart_bridge = UARTWishboneBridge(
platform.request("uart0"),
sys_clk_freq,
# baudrate=115200)
baudrate=3000000)
self.add_wb_master(self.uart_bridge.wishbone)
# SDR SDRAM --------------------------------------------------------------------------------
if not self.integrated_main_ram_size:
sdrphy_cls = HalfRateGENSDRPHY if sdram_rate == "1:2" else GENSDRPHY
self.submodules.sdrphy = sdrphy_cls(platform.request("sdram"))
self.add_sdram("sdram",
phy = self.sdrphy,
module = getattr(litedram_modules, sdram_module_cls)(sys_clk_freq, sdram_rate),
origin = self.mem_map["main_ram"],
size = kwargs.get("max_sdram_size", 0x40000000),
l2_cache_size = kwargs.get("l2_size", 8192),
l2_cache_min_data_width = kwargs.get("min_l2_data_width", 128),
l2_cache_reverse = True
)
# Video ------------------------------------------------------------------------------------
if with_video_terminal or with_video_framebuffer:
self.submodules.videophy = VideoECP5HDMIPHY(platform.request("gpdi"), clock_domain="hdmi")
if with_video_terminal:
self.add_video_terminal(phy=self.videophy, timings="800x600@60Hz", clock_domain="hdmi")
if with_video_framebuffer:
self.add_video_framebuffer(phy=self.videophy, timings="800x600@60Hz", clock_domain="hdmi")
# Leds -------------------------------------------------------------------------------------
self.submodules.leds = LedChaser(
pads = platform.request_all("user_led"),
sys_clk_freq = sys_clk_freq)
def add_oled(self):
pads = self.platform.request("oled_spi")
pads.miso = Signal()
self.submodules.oled_spi = SPIMaster(pads, 8, self.sys_clk_freq, 8e6)
self.oled_spi.add_clk_divider()
self.submodules.oled_ctl = GPIOOut(self.platform.request("oled_ctl"))
def add_matrix(self):
pads = self.platform.request("matrix")
# https://github.com/sthornington/matrix8x8
matrix8x8_path = "/home/sthornington/git/matrix8x8/src"
self.platform.add_verilog_include_path(matrix8x8_path)
self.platform.add_sources(matrix8x8_path, "matrix.sv", "mod3.sv")
region_size = 4 * 8 # four bytes per row (8 nibbles), 8 rows
mem_map = { "matrix": 0xc0000000 }
self.mem_map.update(mem_map)
cd = self.crg.cd_sys
self.add_memory_region("matrix", self.mem_map["matrix"], region_size, type="io")
matrix = Matrix8x8(cd, self.crg.rst, pads)
self.submodules.matrix = matrix
self.add_wb_slave(self.mem_map["matrix"], matrix.bus)
# LiteScope Analyzer -----------------------------------------------------------------------
count = Signal(8)
self.sync += count.eq(count + 1)
analyzer_signals = [
matrix.bus,
count,
]
self.submodules.analyzer = LiteScopeAnalyzer(analyzer_signals,
depth = 1024,
clock_domain = "sys", # why can I not use the ClockDomain here?
csr_csv = "analyzer.csv")
# Build --------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="LiteX SoC on ULX3S")
parser.add_argument("--build", action="store_true", help="Build bitstream")
parser.add_argument("--load", action="store_true", help="Load bitstream")
parser.add_argument("--toolchain", default="trellis", help="FPGA toolchain: trellis (default) or diamond")
parser.add_argument("--device", default="LFE5U-45F", help="FPGA device: LFE5U-12F, LFE5U-25F, LFE5U-45F (default) or LFE5U-85F")
parser.add_argument("--revision", default="2.0", help="Board revision: 2.0 (default) or 1.7")
parser.add_argument("--sys-clk-freq", default=50e6, help="System clock frequency (default: 50MHz)")
parser.add_argument("--sdram-module", default="MT48LC16M16", help="SDRAM module: MT48LC16M16 (default), AS4C32M16 or AS4C16M16")
parser.add_argument("--with-spiflash", action="store_true", help="Make the SPI Flash accessible from the SoC")
parser.add_argument("--flash-boot-adr", type=lambda x: int(x,0), default=None, help="Flash boot address")
sdopts = parser.add_mutually_exclusive_group()
sdopts.add_argument("--with-spi-sdcard", action="store_true", help="Enable SPI-mode SDCard support")
sdopts.add_argument("--with-sdcard", action="store_true", help="Enable SDCard support")
parser.add_argument("--with-oled", action="store_true", help="Enable SDD1331 OLED support")
parser.add_argument("--with-matrix", action="store_true", help="Enable matrix (shift registered) support")
parser.add_argument("--sdram-rate", default="1:1", help="SDRAM Rate: 1:1 Full Rate (default), 1:2 Half Rate")
viopts = parser.add_mutually_exclusive_group()
viopts.add_argument("--with-video-terminal", action="store_true", help="Enable Video Terminal (HDMI)")
viopts.add_argument("--with-video-framebuffer", action="store_true", help="Enable Video Framebuffer (HDMI)")
parser.add_argument("--with-us2-debug", action="store_true", help="Enable Wishbone debug bridge on US2")
parser.add_argument("--with-uart-debug", action="store_true", help="Enable Wishbone debug bridge on uart0")
builder_args(parser)
soc_core_args(parser)
trellis_args(parser)
args = parser.parse_args()
soc = BaseSoC(
device = args.device,
revision = args.revision,
toolchain = args.toolchain,
sys_clk_freq = int(float(args.sys_clk_freq)),
sdram_module_cls = args.sdram_module,
sdram_rate = args.sdram_rate,
with_video_terminal = args.with_video_terminal,
with_video_framebuffer = args.with_video_framebuffer,
spiflash = args.with_spiflash,
usb_debug = args.with_us2_debug,
uart_debug = args.with_uart_debug,
**soc_core_argdict(args))
assert not (args.with_spi_sdcard and args.with_sdcard)
if args.with_spi_sdcard:
soc.add_spi_sdcard()
if args.with_sdcard:
soc.add_sdcard()
if args.with_oled:
soc.add_oled()
if args.with_matrix:
soc.add_matrix()
if args.with_spiflash:
soc.add_spi_flash(mode="1x", dummy_cycles=8)
if args.flash_boot_adr:
soc.add_constant("FLASH_BOOT_ADDRESS", args.flash_boot_adr)
builder = Builder(soc, **builder_argdict(args))
builder_kargs = trellis_argdict(args) if args.toolchain == "trellis" else {}
builder.build(**builder_kargs, run=args.build)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, soc.build_name + ".svf"))
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
9797301
|
<reponame>javokhirbek1999/pet-finder-rest-api
# Generated by Django 3.2.6 on 2021-09-01 07:34
from django.db import migrations, models
import profiles.models
class Migration(migrations.Migration):
dependencies = [
('profiles', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='avatar',
field=models.ImageField(default='profile/default.jpg', upload_to=profiles.models.upload_to, verbose_name='Profile picture'),
),
]
|
StarcoderdataPython
|
3523259
|
<gh_stars>0
import os
from flask import Flask, url_for, render_template, request
app = Flask(__name__)
@app.route('/')
def renderMain():
return render_template('home.html')
@app.route('/page1')
def renderPage1():
return render_template('page1.html')
@app.route('/page2')
def renderPage2():
return render_template('page2.html')
if __name__=="__main__":
app.run(debug=False,port=54321)
|
StarcoderdataPython
|
4890638
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from test_results import TestResult
class Test(unittest.TestCase):
def test_defaults(self):
result = TestResult("foo")
self.assertEqual(result.filename, 'foo')
self.assertEqual(result.failures, [])
self.assertEqual(result.test_run_time, 0)
def test_loads(self):
result = TestResult(filename='foo',
failures=[],
test_run_time=1.1)
s = result.dumps()
new_result = TestResult.loads(s)
self.assertTrue(isinstance(new_result, TestResult))
self.assertEqual(new_result, result)
# Also check that != is implemented.
self.assertFalse(new_result != result)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
5032537
|
#########################################################################
#
# Date: Nov 2001 Authors: <NAME>, <NAME>
#
# <EMAIL>
# <EMAIL>
#
# Copyright: <NAME>, <NAME> and TSRI
#
#########################################################################
import tkinter, numpy, _py2k_string as string, math
from mglutil.math.rotax import rotax
from mglutil.gui.Misc.Tk.KeybdModMonitor import KeyboardModifierMonitor
from mglutil.util.callback import CallbackManager
from .thumbwheel import ThumbWheel
from .optionsPanel import VectorOptionsPanel
#class vectorGUI(Tkinter.Frame, KeyboardModifierMonitor):
class vectorGUI(KeyboardModifierMonitor):
""" This class implements a vector widget.
The widget has a vector which can be moved within a sphere to generate
a 3D vector. Values are normalized and stored in self.vector
In addition, the vector can be rotated with 3 thumbwheels.
Values can be entered directly by typing them into the 3 entry forms.
Then, the 'normalize and set' button has to be pressed in order to
normalize and set the new vector.
The widget has a configure() method: vector, mode, precision and
continuous can be set this way.
vector is a list of 3 floating values, e.g. [0., 0., 1.]
mode describes the axis movement (rotation around an axis): is type
string and can be either 'X', 'Y' or 'Z'. Free movement (standard
value) is 'XY'.
continuous can be either 0 (or None) or 1. Default is 0
precision is type int and ranges from 1 - 10
master, name and size can be passed only to the constructor.
a lock() method is used to disable the various gui components of the
options panel. Usage: <instance>.lock(<component>=<value>)
component is continuous, precision or mode. value is 0 or 1. 1 disables,
0 enables.
"""
def pack(self, *args, **kw):
self.master.pack(args, **kw)
def grid(self, *args, **kw):
self.master.grid(args, **kw)
def bind(self, *args, **kw):
self.master.bind(args, **kw)
def unbind(self, *args, **kw):
self.master.unbind(args, **kw)
def grid_forget(self):
self.master.grid_forget()
def __init__(self, master=None, name='vector', size=200, continuous = 1,
vector=[0.0, 0.0, 1.0], mode='XY', precision=5,
lockContinuous=0, lockPrecision=0, lockMode=0,
callback=None, labelSide='top'):
KeyboardModifierMonitor.__init__(self)
self.callback = callback # user specified callback
self.name=name # title inside canvas
self.labelSide=labelSide # where title gets packed
self.mode=mode # axe mode: can be 'XY', 'X', 'Y' or 'Z'
self.precision=precision # floating number digits
self.continuous=continuous # can be 1 or 0
self.vector=vector # initial vector value
self.size=size # size of vector widget
self.lockContinuous = lockContinuous # set to 1 to lock menus in
# option panel
self.lockPrecision = lockPrecision
self.lockMode = lockMode
self.r = self.size/2
self.r2 = self.r*self.r
self.drawShadowX = 0
self.drawShadowY = 1
self.drawShadowZ = 0
self.fillShadowPlanes = 1
self.master = master = tkinter.Frame(master)
#Tkinter.Frame.__init__(self, master)
#Tkinter.Pack.config(self)
self.callbacks = CallbackManager() # object to manage callback
# functions. They get called with the
# current value as an argument
self.zeros = numpy.array( (0,0,0), numpy.int16)
self.viewingMatInv = numpy.array(
[[ 0.96770716, -0.03229283, -0.25 , 0. ],
[ 0.03229283, -0.96770716, 0.25 , 0. ],
[ 0.25 , 0.25 , 0.93541437, 0. ],
[ 0. , 0. , 0. , 1. ]],'f')
self.viewingMat = numpy.transpose(self.viewingMatInv)
self.createCanvas(master, size)
self.createEntries(self.frame)
tkinter.Widget.bind(self.canvas, "<ButtonPress-1>", self.mouseDown)
tkinter.Widget.bind(self.canvas, "<ButtonRelease-1>", self.mouseUp)
tkinter.Widget.bind(self.canvas, "<B1-Motion>", self.mouseMove)
self.setEntries()
self.opPanel = VectorOptionsPanel(master = self,
title="Vector GUI Options")
tkinter.Widget.bind(self.canvas, "<Button-3>", self.toggleOptPanel)
if self.callback:
self.callbacks.AddCallback(self.callback)
def toggleOptPanel(self, event=None):
# opens and closes options panel by right clicking on widget
if self.opPanel.flag:
self.opPanel.Dismiss_cb()
else:
if not hasattr(self.opPanel, 'optionsForm'):
self.opPanel.displayPanel(create=1)
else:
self.opPanel.displayPanel(create=0)
def mouseUp(self, event):
if not self.continuous:
self.callbacks.CallCallbacks(self.vector)
def mouseDown(self, event):
# remember where the mouse went down
xc = event.x - self.xm
yc = self.ym - event.y
# compute the intersection point between
z2 = self.r2-(xc*xc)-(yc*yc)
if z2>=0: # we picked inside the sphere. going for a XY rotation
self.lastPt3D = (xc, yc, math.sqrt(z2))
else: # going for a Z rotation
pass
def mouseMove(self, event):
# simple trackball, only works inside cirle
# creates an XY rotation defined by pts intersecting the spheres
xc = event.x - self.xm
yc = self.ym - event.y
# compute the intersection point between
xc2 = xc*xc
yc2 = yc*yc
z2 = self.r2-xc2-yc2
if z2 < 0:
lInvMag = 1./math.sqrt(xc2 + yc2)
xc *= lInvMag * (self.r)
yc *= lInvMag * (self.r)
z2 = 0
# compute rotation angle
a = self.lastPt3D
b = (xc, yc, math.sqrt(z2))
ang = math.acos((a[0]*b[0]+a[1]*b[1]+a[2]*b[2])/self.r2)
if self.mode=='XY':
#compute rotation axis
rotaxis = numpy.array( (a[1]*b[2] - a[2]*b[1],
a[2]*b[0] - a[0]*b[2],
a[0]*b[1] - a[1]*b[0] ), 'f' )
elif self.mode=='X': rotaxis = numpy.array( (1.,0.,0.), 'f')
elif self.mode=='Y': rotaxis = numpy.array( (0.,1.,0.), 'f')
elif self.mode=='Z': rotaxis = numpy.array( (0.,0.,1.), 'f')
mat = rotax( self.zeros, rotaxis, ang )
self.lastPt3D = b
self.updateVector(mat)
def updateVector(self, mat):
mat = numpy.reshape(mat, (4,4))
newPts = self.vector + [1]
newPts = numpy.dot( [newPts], mat )[0]
self.vector = [newPts[0], newPts[1], newPts[2]]
self.setEntries()
self.drawVector()
if self.continuous:
self.callbacks.CallCallbacks(self.vector)
def drawVector(self):
coords3D = self.vector + [1]
# apply viewing transformation to vector
newPtsWithView = numpy.dot( [coords3D],
self.viewingMat)[0]
# compute 2D projection of vector (broken on 2 segments for
# depth cueing
x1 = self.xm+int(newPtsWithView[0]*(self.xm))
y1 = self.ym+int(newPtsWithView[1]*(self.ym))
# change vector's segments coordinates
self.canvas.coords(self.lId1, self.xm, self.ym, x1, y1)
# update vector shadows
# Y=0 plane
if self.drawShadowY:
pt = [coords3D[0], 0, coords3D[2], 1.]
newPtsWithView = numpy.dot( [pt], self.viewingMat)[0]
xm = self.xm+int(newPtsWithView[0]*(self.xm))
ym = self.ym+int(newPtsWithView[1]*(self.ym))
if self.fillShadowPlanes:
self.canvas.coords(self.shadowPY,self.xm,self.ym,xm,ym,x1,y1)
self.canvas.coords(self.shadowY,self.xm,self.ym,xm,ym,x1,y1)
# X=0 plane
if self.drawShadowX:
pt = [0, coords3D[1], coords3D[2], 1.]
newPtsWithView = numpy.dot( [pt], self.viewingMat)[0]
xm = self.xm+int(newPtsWithView[0]*(self.xm))
ym = self.ym+int(newPtsWithView[1]*(self.ym))
if self.fillShadowPlanes:
self.canvas.coords(self.shadowPX,self.xm,self.ym,xm,ym,x1,y1)
self.canvas.coords(self.shadowX, self.xm, self.ym, xm, ym, x1,y1)
# Z=0 plane
if self.drawShadowZ:
pt = [coords3D[0], coords3D[1], 0, 1.]
newPtsWithView = numpy.dot( [pt], self.viewingMat)[0]
xm = self.xm+int(newPtsWithView[0]*(self.xm))
ym = self.ym+int(newPtsWithView[1]*(self.ym))
if self.fillShadowPlanes:
self.canvas.coords(self.shadowPZ,self.xm,self.ym,xm,ym,x1,y1)
self.canvas.coords(self.shadowZ, self.xm, self.ym, xm, ym, x1,y1)
if self.vector[0]<0.0:
self.canvas.tag_raise('verticalCircle', 'moving')
else:
self.canvas.tag_lower('verticalCircle', 'moving')
if self.vector[1]<0.0:
self.canvas.tag_raise('horizontalCircle', 'moving')
else:
self.canvas.tag_lower('horizontalCircle', 'moving')
if self.vector[2]<0.0 or self.vector[1]<0.0:
self.canvas.tag_raise('axis', 'moving')
else:
self.canvas.tag_lower('axis', 'moving')
def thumbx_cb(self, events=None):
val=self.thumbx.value
## valX=self.thumbx.value
## valY=self.thumby.value
## valZ=self.thumbz.value
## n = math.sqrt(valX*valX+valY*valY+valZ*valZ)
## if n == 0.0: v = [0.0, 0.0, 1.0]
## else: v = [valX/n, valY/n, valZ/n]
## val = v[0]
rot = numpy.zeros( (4,4), 'f' )
rot[0][0] = 1.0
rot[1][1] = math.cos(val)
rot[1][2] = -math.sin(val)
rot[2][1] = math.sin(val)
rot[2][2] = math.cos(val)
self.updateVector(rot)
def thumby_cb(self, events=None):
val=self.thumby.value
rot = numpy.zeros( (4,4), 'f' )
rot[0][0] = math.cos(val)
rot[0][2] = -math.sin(val)
rot[1][1] = 1.0
rot[2][0] = math.sin(val)
rot[2][2] = math.cos(val)
self.updateVector(rot)
def thumbz_cb(self, events=None):
val=self.thumbz.value
rot = numpy.zeros( (4,4), 'f' )
rot[0][0] = math.cos(val)
rot[0][1] = -math.sin(val)
rot[1][0] = math.sin(val)
rot[1][1] = math.cos(val)
rot[2][2] = 1.0
self.updateVector(rot)
def entryX_cb(self, event=None):
val = self.entryXTk.get()
if len(val) == 0: val = self.vector[0]
try:
val = float(val)
self.entryXTk.set(self.thumbx.labelFormat%val)
except ValueError:
# put back original value if someone types garbage
self.entryXTk.set(self.thumbx.labelFormat%self.vector[0])
def entryY_cb(self, event=None):
val = self.entryYTk.get()
if len(val) == 0: val = self.vector[1]
try:
val = float(val)
self.entryYTk.set(self.thumby.labelFormat%val)
except ValueError:
# put back original value if someone types garbage
self.entryYTk.set(self.thumby.labelFormat%self.vector[1])
def entryZ_cb(self, event=None):
val = self.entryZTk.get()
if len(val) == 0: val = self.vector[2]
try:
val = float(val)
self.entryZTk.set(self.thumbz.labelFormat%val)
except ValueError:
# put back original value if someone types garbage
self.entryZTk.set(self.thumbz.labelFormat%self.vector[2])
def entryV_cb(self, event=None):
v = self.entryVTk.get()
try: val = string.split(v)
except:
self.setEntries()
return
if val is None or len(val)!= 3:
self.setEntries()
return
try:
valX = float(val[0])
valY = float(val[1])
valZ = float(val[2])
except:
self.setEntries()
return
# compute normalized vector
n = math.sqrt(valX*valX+valY*valY+valZ*valZ)
if n == 0.0: v = [0.0, 0.0, 1.0]
else: v = [valX/n, valY/n, valZ/n]
self.vector = v
self.setEntries()
self.drawVector()
if self.continuous:
self.callbacks.CallCallbacks(self.vector)
def setButton_cb(self, event=None):
valX = float(self.entryXTk.get())
valY = float(self.entryYTk.get())
valZ = float(self.entryZTk.get())
# compute normalized vector
n = math.sqrt(valX*valX+valY*valY+valZ*valZ)
if n == 0.0: v = [0.0, 0.0, 1.0]
else: v = [valX/n, valY/n, valZ/n]
self.vector = v
self.setEntries()
self.drawVector()
if self.continuous:
self.callbacks.CallCallbacks(self.vector)
def createEntries(self, master):
self.f = tkinter.Frame(master)
#self.f.grid(column=3, rowspan=3)
def fX(): self.vector = [1.,0.,0.]; self.setEntries(); self.callbacks.CallCallbacks(self.vector)
def fY(): self.vector = [0.,1.,0.]; self.setEntries(); self.callbacks.CallCallbacks(self.vector)
def fZ(): self.vector = [0.,0.,1.]; self.setEntries(); self.callbacks.CallCallbacks(self.vector)
f1 = tkinter.Frame(master)
f2 = tkinter.Frame(master)
f3 = tkinter.Frame(master)
f1.pack(side='top')
f2.pack(side='top')
f3.pack(side='top')
lX = tkinter.Button(master=f1, text='x', command=fX)
lX.pack(side='left')
lY = tkinter.Button(master=f2, text='y', command=fY)
lY.pack(side='left')
lZ = tkinter.Button(master=f3, text='z', command=fZ)
lZ.pack(side='left')
#lX = Tkinter.Button(master=self.f, text='x', command=fX)
#lY = Tkinter.Button(master=self.f, text='y', command=fY)
#lZ = Tkinter.Button(master=self.f, text='z', command=fZ)
#lX.grid(row=0, column=0)
#lY.grid(row=1, column=0)
#lZ.grid(row=2, column=0)
#self.thumbx = ThumbWheel(master=self.f, width=50,
self.thumbx = ThumbWheel(master=f1, width=50,
height=20, labcfg={'text':'X:','side':'left'},
wheelPad=2, oneTurn=.1, min=-1, max=1,
showLabel=0, precision=5, type=float)
self.thumbx.callbacks.AddCallback(self.thumbx_cb)
self.thumbx.unbind("<Button-3>")
self.thumbx.canvas.unbind("<Button-3>")
#self.thumbx.grid(row=0, column=1)
self.thumbx.pack(side='left')
#self.thumby = ThumbWheel(master=self.f, width=50,
self.thumby = ThumbWheel(master=f2, width=50,
height=20, labcfg={'text':'Y:','side':'left'},
wheelPad=2, oneTurn=.1, min=-1, max=1,
showLabel=0, precision=5, type=float)
self.thumby.callbacks.AddCallback(self.thumby_cb)
self.thumby.unbind("<Button-3>")
self.thumby.canvas.unbind("<Button-3>")
#self.thumby.grid(row=1, column=1)
self.thumby.pack(side='left')
#self.thumbz = ThumbWheel(master=self.f, width=50,
self.thumbz = ThumbWheel(master=f3, width=50,
height=20, labcfg={'text':'Z:','side':'left'},
wheelPad=2, oneTurn=.1, min=-1, max=1,
showLabel=0, precision=5, type=float)
self.thumbz.callbacks.AddCallback(self.thumbz_cb)
self.thumbz.unbind("<Button-3>")
self.thumbz.canvas.unbind("<Button-3>")
#self.thumbz.grid(row=2, column=1)
self.thumbz.pack(side='left')
self.entryXTk = tkinter.StringVar()
#self.entryX = Tkinter.Entry(master=self.f, textvariable=self.entryXTk,
self.entryX = tkinter.Entry(master=f1, textvariable=self.entryXTk,
width=8)
self.entryX.bind('<Return>', self.entryX_cb)
#self.entryX.grid(row=0, column=2)
self.entryX.pack(side='left')
self.entryYTk = tkinter.StringVar()
#self.entryY = Tkinter.Entry(master=self.f, textvariable=self.entryYTk,
self.entryY = tkinter.Entry(master=f2, textvariable=self.entryYTk,
width=8)
self.entryY.bind('<Return>', self.entryY_cb)
#self.entryY.grid(row=1, column=2)
self.entryY.pack(side='left')
self.entryZTk = tkinter.StringVar()
#self.entryZ = Tkinter.Entry(master=self.f, textvariable=self.entryZTk,
self.entryZ = tkinter.Entry(master=f3, textvariable=self.entryZTk,
width=8)
self.entryZ.bind('<Return>', self.entryZ_cb)
#self.entryZ.grid(row=2, column=2)
self.entryZ.pack(side='left')
self.entryVTk = tkinter.StringVar()
self.entryV = tkinter.Entry(master, textvariable=self.entryVTk,
width=18)
self.entryV.bind('<Return>', self.entryV_cb)
self.f.pack(side='top', expand=1)
self.entryV.pack()
self.setButton=tkinter.Button(master, text='normalize and set',
command = self.setButton_cb)
self.setButton.pack(side='bottom')
def setEntries(self):
self.entryXTk.set(self.thumbx.labelFormat%self.vector[0])
self.entryYTk.set(self.thumby.labelFormat%self.vector[1])
self.entryZTk.set(self.thumbz.labelFormat%self.vector[2])
lf = '%.3f'
self.entryVTk.set(lf%self.vector[0]+' '+lf%self.vector[1]+' '\
+lf%self.vector[2])
self.drawVector()
def createCanvas(self, master, size=200):
self.frame = tkinter.Frame(self.master, relief = 'sunken', borderwidth=5)
if self.name is not None:
self.title = tkinter.Label(self.frame, text=self.name)
self.title.pack(side=self.labelSide)
self.canvas = tkinter.Canvas(self.frame, width=size, height=size)
# set the focus so that we get keyboard events, and add callbacks
self.canvas.bind('<KeyPress>', self.modifierDown)
self.canvas.bind("<KeyRelease>", self.modifierUp)
xm = self.xm = ym = self.ym = self.r
self.canvas.create_oval(0, 0, size, size)
self.canvas.create_oval(xm-(xm/4), 0, xm+(xm/4), size,
tags='verticalCircle')
self.canvas.create_oval(0, ym-(ym/4), size, ym+(ym/4),
tags='horizontalCircle')
# apply viewing transformation to vector
XaxisWithView = numpy.dot([(1.,0.,0.,1.)],self.viewingMat)[0]
x1 = self.xm+int(XaxisWithView[0]*(self.xm))
y1 = self.ym+int(XaxisWithView[1]*(self.ym))
self.canvas.create_line(xm, ym, x1, y1, fill='red', tags='axis')
XaxisWithView = numpy.dot([(0.,1.,0.,1.)],self.viewingMat)[0]
x2 = self.xm+int(XaxisWithView[0]*(self.xm))
y2 = self.ym+int(XaxisWithView[1]*(self.ym))
self.canvas.create_line(xm, ym, x2, y2, fill='green', tags='axis')
XaxisWithView = numpy.dot([(0.,0.,1.,1.)],self.viewingMat)[0]
x3 = self.xm+int(XaxisWithView[0]*(self.xm))
y3 = self.ym+int(XaxisWithView[1]*(self.ym))
self.canvas.create_line(xm, ym, x3, y3, fill='blue', tags='axis')
self.textId = self.canvas.create_text(0, size, anchor='sw', text="XY")
# shadow line in X=0 plane
self.shadowPX = self.canvas.create_polygon(0,0,0,0,0,0, fill='red',
tag='moving')
self.shadowPY = self.canvas.create_polygon(0,0,0,0,0,0, fill='green',
tag='moving')
self.shadowPZ = self.canvas.create_polygon(0,0,0,0,0,0, fill='blue',
tag='moving')
self.shadowX = self.canvas.create_line(0, 0, 0, 0, fill='black',
tag='moving')
self.shadowY = self.canvas.create_line(0, 0, 0, 0, fill='black',
tag='moving')
self.shadowZ = self.canvas.create_line(0, 0, 0, 0, fill='black',
tag='moving')
self.lId1 = self.canvas.create_line(0, 0, 0, 0, fill='black', width=3,
arrow='last')
self.canvas.pack(side='top')
self.frame.pack(expand=1, fill='x')
self.xm = self.ym = self.r
self.drawVector()
def setVector(self, value):
#setVector does not call a callback!
v = value
# compute normalized vector
n = math.sqrt(v[0]*v[0]+v[1]*v[1]+v[2]*v[2])
if n == 0.0: v = [0.0, 0.0, 1.0]
else: v = [v[0]/n, v[1]/n, v[2]/n]
self.vector = v
self.setEntries()
self.drawVector()
#####################################################################
# the 'configure' methods:
#####################################################################
def configure(self, **kw):
for key,value in list(kw.items()):
# the 'set parameter' callbacks
if key=='continuous': self.setContinuous(value)
elif key=='mode': self.setMode(value)
elif key=='precision': self.setPrecision(value)
# the 'lock entries' callbacks
elif key=='lockContinuous': self.lockContinuousCB(value)
elif key=='lockMode': self.lockModeCB(value)
elif key=='lockPrecision': self.lockPrecisionCB(value)
def setContinuous(self, cont):
""" cont can be None, 0 or 1 """
if cont != 1:
cont = None
self.continuous = cont
if hasattr(self.opPanel, 'optionsForm'):
w=self.opPanel.idf.entryByName['togCont']['widget']
if cont:
w.setvalue('on')
else:
w.setvalue('off')
def setMode(self, mode):
if mode!='XY' and mode!='X' and mode!='Y' and mode!='Z': mode = 'XY'
self.canvas.itemconfigure( self.textId, text=mode)
self.mode = mode
if hasattr(self.opPanel, 'optionsForm'):
w=self.opPanel.idf.entryByName['togAxes']['widget']
w.setvalue(mode)
def setPrecision(self, val):
val = int(val)
if val > 10: val = 10
if val < 1: val = 1
self.thumbx.configure(precision=val)
self.thumby.configure(precision=val)
self.thumbz.configure(precision=val)
self.entryXTk.set(self.thumbx.labelFormat%self.vector[0])
self.entryYTk.set(self.thumby.labelFormat%self.vector[1])
self.entryZTk.set(self.thumbz.labelFormat%self.vector[2])
if hasattr(self.opPanel, 'optionsForm'):
w = self.opPanel.idf.entryByName['selPrec']['widget']
w.setvalue(val)
if self.opPanel:
self.opPanel.updateDisplay()
#####################################################################
# the 'lock' methods:
#####################################################################
def lockContinuousCB(self, mode):
if mode != 0: mode = 1
self.lockContinuous = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockPrecisionCB(self, mode):
if mode != 0: mode = 1
self.lockPrecision = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockModeCB(self, mode):
if mode != 0: mode = 1
self.lockMode = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
if __name__ == '__main__':
test = vectorGUI(size = 200)
def foo(val):
print(val)
test.callbacks.AddCallback(foo)
|
StarcoderdataPython
|
170736
|
<reponame>mishrakeshav/Competitive-Programming
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def diameterOfBinaryTree(self, root: TreeNode) -> int:
def helper(root):
if root is None:
return 0,0
if root.left is None and root.right is None:
return 1,1
l1,m1 = helper(root.left)
l2,m2 = helper(root.right)
l = max(l1,l2) + 1
m = max(l1 + l2 + 1, m1 ,m2)
return l,m
l,m = helper(root)
if m:
return m -1
return m
|
StarcoderdataPython
|
265440
|
#!/usr/bin/env python
import unittest
from pycoin.key import Key
from pycoin.serialize import h2b
class KeyTest(unittest.TestCase):
def test_translation(self):
def do_test(exp_hex, wif, c_wif, public_pair_sec, c_public_pair_sec, address_b58, c_address_b58):
secret_exponent = int(exp_hex, 16)
sec = h2b(public_pair_sec)
c_sec = h2b(c_public_pair_sec)
keys_wif = [
Key(secret_exponent=secret_exponent),
Key.from_text(wif),
Key.from_text(c_wif),
]
key_sec = Key.from_sec(sec)
key_sec_c = Key.from_sec(c_sec)
keys_sec = [key_sec, key_sec_c]
for key in keys_wif:
self.assertEqual(key.secret_exponent(), secret_exponent)
self.assertEqual(key.public_copy().secret_exponent(), None)
v = repr(key)
if key._prefer_uncompressed:
self.assertEqual(key.wif(), wif)
else:
self.assertEqual(key.wif(), c_wif)
self.assertEqual(key.wif(use_uncompressed=True), wif)
self.assertEqual(key.wif(use_uncompressed=False), c_wif)
for key in keys_wif + keys_sec:
if key._prefer_uncompressed:
self.assertEqual(key.sec(), sec)
else:
self.assertEqual(key.sec(), c_sec)
self.assertEqual(key.sec(use_uncompressed=True), sec)
self.assertEqual(key.sec(use_uncompressed=False), c_sec)
if key._prefer_uncompressed:
self.assertEqual(key.address(), address_b58)
else:
self.assertEqual(key.address(), c_address_b58)
self.assertEqual(key.address(use_uncompressed=False), c_address_b58)
self.assertEqual(key.address(use_uncompressed=True), address_b58)
key_pub = Key.from_text(address_b58, is_compressed=False)
key_pub_c = Key.from_text(c_address_b58, is_compressed=True)
self.assertEqual(key_pub.address(), address_b58)
self.assertEqual(key_pub.address(use_uncompressed=True), address_b58)
self.assertEqual(key_pub.address(use_uncompressed=False), None)
self.assertEqual(key_pub_c.address(), c_address_b58)
self.assertEqual(key_pub_c.address(use_uncompressed=True), None)
self.assertEqual(key_pub_c.address(use_uncompressed=False), c_address_b58)
do_test("1111111111111111111111111111111111111111111111111111111111111111",
"<KEY>",
"<KEY>",
"<KEY>"\
"385b6b1b8ead809ca67454d9683fcf2ba03456d6fe2c4abe2b07f0fbdbb2f1c1",
"034f355bdcb7cc0af728ef3cceb9615d90684bb5b2ca5f859ab0f0b704075871aa",
"1MsHWS1BnwMc3tLE8G35UXsS58fKipzB7a",
"1Q1pE5vPGEEMqRcVRMbtBK842Y6Pzo6nK9")
do_test("dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd",
"5KVzsHJiUxgvBBgtVS7qBTbbYZpwWM4WQNCCyNSiuFCJzYMxg8H",
"L4ezQvyC6QoBhxB4GVs9fAPhUKtbaXYUn8YTqoeXwbevQq4U92vN",
"04ed83704c95d829046f1ac27806211132102c34e9ac7ffa1b71110658e5b9d1bd"\
"<KEY>",
"02ed83704c95d829046f1ac27806211132102c34e9ac7ffa1b71110658e5b9d1bd",
"<KEY>",
"<KEY>")
do_test("47f7616ea6f9b923076625b4488115de1ef1187f760e65f89eb6f4f7ff04b012",
"5JMys7YfK72cRVTrbwkq5paxU7vgkMypB55KyXEtN5uSnjV7K8Y",
"KydbzBtk6uc7M6dXwEgTEH2sphZxSPbmDSz6kUUHi4eUpSQuhEbq",
"042596957532fc37e40486b910802ff45eeaa924548c0e1c080ef804e523ec3ed3"\
"ed0a9004acf927666eee18b7f5e8ad72ff100a3bb710a577256fd7ec81eb1cb3",
"032596957532fc37e40486b910802ff45eeaa924548c0e1c080ef804e523ec3ed3",
"<KEY>",
"<KEY>")
# in this case, the public_pair y value is less than 256**31, and so has a leading 00 byte.
# This triggers a bug in the Python 2.7 version of to_bytes_32.
do_test("ae2aaef5080b6e1704aab382a40a7c9957a40b4790f7df7faa04b14f4db56361",
"<KEY>",
"<KEY>",
"04f650fb572d1475950b63f5175c77e8b5ed9035a209d8fb5af5a04d6bc39b7323"\
"00186733fcfe3def4ace6feae8b82dd03cc31b7855307d33b0a039170f374962",
"02f650fb572d1475950b63f5175c77e8b5ed9035a209d8fb5af5a04d6bc39b7323",
"<KEY>",
"<KEY>")
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
1623317
|
from authlib.jose import jwt
from authlib.jose.errors import BadSignatureError, DecodeError
from flask import current_app, jsonify, request
from errors import AuthorizationError, InvalidArgumentError
def get_auth_token():
"""
Parse and validate incoming request Authorization header.
NOTE. This function is just an example of how one can read and check
anything before passing to an API endpoint, and thus it may be modified in
any way, replaced by another function, or even removed from the module.
"""
expected_errors = {
KeyError: 'Authorization header is missing',
AssertionError: 'Wrong authorization type'
}
try:
scheme, token = request.headers['Authorization'].split()
assert scheme.lower() == 'bearer'
return token
except tuple(expected_errors) as error:
raise AuthorizationError(expected_errors[error.__class__])
def get_jwt():
"""
Parse the incoming request's Authorization Bearer JWT for some credentials.
Validate its signature against the application's secret key.
NOTE. This function is just an example of how one can read and check
anything before passing to an API endpoint, and thus it may be modified in
any way, replaced by another function, or even removed from the module.
"""
expected_errors = {
KeyError: 'Wrong JWT payload structure',
TypeError: '<SECRET_KEY> is missing',
BadSignatureError: 'Failed to decode JWT with provided key',
DecodeError: 'Wrong JWT structure'
}
token = get_auth_token()
try:
return ""
except tuple(expected_errors) as error:
raise AuthorizationError(expected_errors[error.__class__])
def get_json(schema):
"""
Parse the incoming request's data as JSON.
Validate it against the specified schema.
NOTE. This function is just an example of how one can read and check
anything before passing to an API endpoint, and thus it may be modified in
any way, replaced by another function, or even removed from the module.
"""
data = request.get_json(force=True, silent=True, cache=False)
'''
message = schema.validate(data)
if message:
raise InvalidArgumentError(message)
'''
return data
def jsonify_data(data):
return jsonify({'data': data})
def jsonify_errors(data):
return jsonify({'errors': [data]})
|
StarcoderdataPython
|
4646
|
<filename>plugins/panorama/panorama/__init__.py
# -*- coding: utf-8 -*-
"""
Panorama is a Pelican plugin to generate statistics from blog posts
(number of posts per month, categories and so on) display them as beautiful charts.
Project location: https://github.com/romainx/panorama
"""
__version__ = "0.2.0"
__author__ = "romainx"
from .panorama import *
|
StarcoderdataPython
|
8010509
|
import os
import time
from buildworker_scripts.utils.logutils import Log
import boto3
import botocore
import botocore.exceptions
class S3Session(object):
def __init__(self, logger=None, bucket=None):
self.s3client = None
if logger is None:
self.log = Log(name=__name__)
else:
self.log = logger
self.bucket = bucket
self.makeclient()
def makeclient(self):
self.s3client = boto3.Session().client('s3')
def upload(self, Filename, Key):
if self.s3client is None:
self.makeclient()
try:
self.s3client.upload_file(Bucket=self.bucket, Key=Key, Filename=Filename)
except botocore.exceptions.ClientError as e:
err = e.response['Error']
self.log.warn("{}/{}: {} {}".format(self.bucket, Key, err['Code'], err['Message']))
return False
return True
def download(self, Key, Filename, quiet=True):
if self.s3client is None:
self.makeclient()
try:
info = self.s3client.head_object(Bucket=self.bucket, Key=Key)
self.s3client.download_file(Bucket=self.bucket, Key=Key, Filename=Filename)
if 'LastModified' in info:
mtime = int(time.mktime(info['LastModified'].timetuple()))
os.utime(Filename, (mtime, mtime))
except botocore.exceptions.ClientError as e:
err = e.response['Error']
if quiet and err['Code'] == "404":
self.log.debug(2, "not found: {}/{}".format(self.bucket, Key))
else:
self.log.warn("{}/{}: {} {}".format(self.bucket, Key, err['Code'], err['Message']))
return False
except OSError as e:
if quiet:
pass
self.log.warn("os.utime({}): {} (errno {})".format(Filename, e.strerror, e.errno))
return False
return True
def get_object_info(self, Key, quiet=True):
if self.s3client is None:
self.makeclient()
try:
info = self.s3client.head_object(Bucket=self.bucket, Key=Key)
except botocore.exceptions.ClientError as e:
err = e.response['Error']
if quiet and err['Code'] == "404":
self.log.debug(2, "not found: {}/{}".format(self.bucket, Key))
else:
self.log.warn("{}/{}: {} {}".format(self.bucket, Key, err['Code'], err['Message']))
return None
return info
|
StarcoderdataPython
|
3336217
|
from setuptools import setup
version = {}
with open("src/datadog_cloudformation_common/version.py") as fp:
exec(fp.read(), version)
setup(
version=version["__version__"]
)
|
StarcoderdataPython
|
266589
|
<reponame>ajrichards/bayesian-examples
#!/usr/bin/env python
"""
multi-level example with GLM
Gelman et al.s (2007) radon dataset is a classic for hierarchical modeling
Radon gas is known to be the highest cause of lung cancer in non-smokers.
Here welll investigate this differences and try to make predictions of
radon levels predictions are for in different counties based on the
county itself and the presence of a basement.
radon_ic = b0 +b1*floor_ic + epison
The radon level (i) in county (c) is a linear function of radon levels that
considers multiple levels for the floor
There are 85 counties in MN
pooling - just run a regression on all data and assess
"""
import os
import matplotlib.pyplot as plt
import numpy as np
import pymc3 as pm
import pandas as pd
data = pd.read_csv('radon.csv')
county_names = data.county.unique()
county_idx = data.county_code.values
n_counties = len(data.county.unique())
print("total counties: %s"%(n_counties))
print data[['county', 'log_radon', 'floor']].head()
## unpooled
############################################################################
run_trace = False
with pm.Model() as unpooled_model:
# Independent parameters for each county
a = pm.Normal('a', 0, sd=100, shape=n_counties)
b = pm.Normal('b', 0, sd=100, shape=n_counties)
# Model error
eps = pm.HalfCauchy('eps', 5)
# Model prediction of radon level
# a[county_idx] translates to a[0, 0, 0, 1, 1, ...],
# we thus link multiple household measures of a county
# to its coefficients.
radon_est = a[county_idx] + b[county_idx]*data.floor.values
# Data likelihood
y = pm.Normal('y', radon_est, sd=eps, observed=data.log_radon)
with unpooled_model:
trace_pickle = "traces/unpooled_radon.pkl"
if run_trace or not os.path.exists(trace_pickle):
tmp = open(trace_pickle,'w')
unpooled_trace = pm.sample(niter, step, start,random_seed=123, progressbar=True)
cPickle.dump(trace,tmp)
tmp.close()
else:
print("...loading saved trace")
tmp = open(trace_pickle,'r')
unpooled_trace = cPickle.load(tmp)
############################################################################
run_trace = False
with pm.Model() as hierarchical_model:
# Hyperpriors for group nodes
mu_a = pm.Normal('mu_a', mu=0., sd=100**2)
sigma_a = pm.HalfCauchy('sigma_a', 5)
mu_b = pm.Normal('mu_b', mu=0., sd=100**2)
sigma_b = pm.HalfCauchy('sigma_b', 5)
# Intercept for each county, distributed around group mean mu_a
# Above we just set mu and sd to a fixed value while here we
# plug in a common group distribution for all a and b (which are
# vectors of length n_counties).
a = pm.Normal('a', mu=mu_a, sd=sigma_a, shape=n_counties)
# Intercept for each county, distributed around group mean mu_a
b = pm.Normal('b', mu=mu_b, sd=sigma_b, shape=n_counties)
# Model error
eps = pm.HalfCauchy('eps', 5)
radon_est = a[county_idx] + b[county_idx] * data.floor.values
# Data likelihood
radon_like = pm.Normal('radon_like', mu=radon_est, sd=eps, observed=data.log_radon)
# Inference button (TM)!
with hierarchical_model:
trace_pickle = "traces/hierarchical_radon.pkl"
if run_trace or not os.path.exists(trace_pickle):
tmp = open(trace_pickle,'w')
hierarchical_trace = pm.sample(niter, step, start,random_seed=123, progressbar=True)
cPickle.dump(trace,tmp)
tmp.close()
else:
print("...loading saved trace")
tmp = open(trace_pickle,'r')
hierarchical_trace = cPickle.load(tmp)
# Plotting the hierarchical model trace -its found values- from 500 iterations onwards (right side plot)
# and its accumulated marginal values (left side plot)
pm.traceplot(hierarchical_trace[1000:]);
plt.show()
|
StarcoderdataPython
|
1877259
|
<filename>items/migrations/0002_item_whereabouts.py
# Generated by Django 2.2.8 on 2020-01-10 16:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('items', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='item',
name='whereabouts',
field=models.CharField(default=0, max_length=100),
preserve_default=False,
),
]
|
StarcoderdataPython
|
4865228
|
<reponame>sebastien-Boussier/pygamon<filename>game.py
import pygame
import pyscroll
import pytmx.util_pygame
from player import Joueur
class Game:
def __init__(self):
# creer la fenetre du jeu
self.screen = pygame.display.set_mode((800,600))
pygame.display.set_caption("Pygamon - Adventure")
#charger la carte(tmx)
tmx_data = pytmx.util_pygame.load_pygame('assets/carte.tmx')
map_data = pyscroll.data.TiledMapData(tmx_data)
map_layer = pyscroll.orthographic.BufferedRenderer(map_data, self.screen.get_size())
#generer un joueur
player_position = tmx_data.get_object_by_name("player")
self.player = Joueur(player_position.x, player_position.y)
#deninir une liste pour les collisions
self.walls = []
for obj in tmx_data.objects:
if obj.type == "collision":
self.walls.append(pygame.Rect(obj.x, obj.y, obj.width, obj.height))
#dessiner le groupe de calque
self.groupe = pyscroll.PyscrollGroup(map_layer=map_layer, default_layer=5)
self.groupe.add(self.player)
def handle_input(self):
pressed = pygame.key.get_pressed()
if pressed[pygame.K_UP]:
print("haut")
self.player.move_above()
elif pressed[pygame.K_DOWN]:
print("bas")
self.player.move_below()
elif pressed[pygame.K_LEFT]:
print("gauche")
self.player.move_left()
elif pressed[pygame.K_RIGHT]:
print("droite")
self.player.move_right()
def update(self):
self.groupe.update()
#verification des collisions
for sprite in self.groupe.sprites():
if sprite.feet.collidelist(self.walls) > -1:
sprite.move_back()
def run(self):
clock = pygame.time.Clock()
# creer la boucle du jeu
running = True
while running:
self.player.save_location()
self.handle_input()
self.update()
self.groupe.center(self.player.rect)
self.groupe.draw(self.screen)
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
clock.tick(60)
pygame.quit()
|
StarcoderdataPython
|
6596383
|
#!/usr/bin/env python
import os
import sys
import unittest
pkg_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) # noqa
sys.path.insert(0, pkg_root) # noqa
from tests import config
from terra_notebook_utils import costs
class TestTerraNotebookUtilsCosts(unittest.TestCase):
def test_costs_estimate_n1(self):
tests = [(0.00043, True), (0.00203, False)]
for cost, preemptible in tests:
with self.subTest(preemptible=preemptible):
self.assertEqual(cost, round(costs.GCPCustomN1Cost.estimate(3, 5, 7, preemptible), 5))
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
9706626
|
<reponame>matthewwardrop/python-mplkit<filename>examples/contour_image.py
import sys
sys.path.insert(0,'..')
from mplkit.plot import *
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.colors import Normalize
from matplotlib.colorbar import ColorbarBase
from mplkit.style import *
cmap = matplotlib.cm.Blues
with SampleStyle() as style:
plt.figure()
x,y = np.linspace(-10,10,201), np.linspace(-10,10,201)
X,Y = np.meshgrid(x,y)
data = np.log(1./(1+X**2+Y**2))
contour_image(x,y,data, outline=True, label=True, cguides=[-3.2], cguide_stride=1, contour_smoothing=1)
style.savefig('contour_image.pdf')
|
StarcoderdataPython
|
8078484
|
<reponame>PandoraLS/python_toys
import time
import urllib.request
import urllib.parse
from urllib.error import HTTPError, URLError
import socket
import requests
from random import choice
from bs4 import BeautifulSoup
socket.setdefaulttimeout(30)
user_agents = [
'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)',
# 'Googlebot/2.1 (+http://www.google.com/bot.html)',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko)'
' Ubuntu Chromium/49.0.2623.108 Chrome/49.0.2623.108 Safari/537.36',
# 'Gigabot/3.0 (http://www.gigablast.com/spider.html)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; pt-BR) AppleWebKit/533.3 '
'(KHTML, like Gecko) QtWeb Internet Browser/3.7 http://www.QtWeb.net',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/41.0.2228.0 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.2 (KHTML, '
'like Gecko) ChromePlus/4.0.222.3 Chrome/4.0.222.3 Safari/532.2',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.4pre) '
'Gecko/20070404 K-Ninja/2.1.3',
'Mozilla/5.0 (Future Star Technologies Corp.; Star-Blade OS; x86_64; U; '
'en-US) iNet Browser 4.7',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; rv:2.2) Gecko/20110201',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.13) '
'Gecko/20080414 Firefox/2.0.0.13 Pogo/2.0.0.13.6866',
# 'WorldWideweb (NEXT)'
]
def get_html(url, code='utf-8'):
"""获取请求url返回的页面,默认utf-8解码"""
for i in range(3):
try:
page = urllib.request.urlopen(url)
break
except HTTPError as e:
print('!!!%s,服务器不能应答,Error Code:%s' % (url, str(e.code)))
except URLError as e:
print('!!!%s,连接服务器失败,Reason:%s' % (url, str(e.reason)))
except socket.timeout:
print('!!!%s 访问超时' % url)
time.sleep(60)
except Exception as e:
print('!!!%s 访问出错' + str(e) % url)
time.sleep(20)
else:
print('!!!%s 页面访问失败,丢弃' % url)
return ""
try:
html = page.read().decode(code, errors='ignore')
return html
except:
print('!!!%s 页面读取失败,丢弃' % url)
return ""
def get_html_with_proxies(url, code='utf-8', proxies=None, timewait=10):
"""
获取请求url返回的页面,默认utf-8解码,如果提供代理,则使用代理
否则使用默认代理
"""
if proxies is None:
proxies = {
'http': 'http://127.0.0.1:1080',
'https': 'https://127.0.0.1:1080'
}
for i in range(3):
headers = {'User-Agent': choice(user_agents)}
response = requests.get(url, proxies=proxies, headers=headers)
if response.status_code == requests.codes.ok:
response.encoding = code
return response.text
print('!!!', url, response.status_code)
time.sleep(timewait)
else:
return ""
def get_html_with_header(url, code='utf-8', header=None):
"""
获取请求url返回的页面,默认utf-8解码
对get_html进行了一些优化
1. 对404信息迅速跳过
2. 对403(服务器Forbidden)酌情添加headers伪装浏览器
"""
headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'}
if header:
headers = header
for i in range(3):
try:
req = urllib.request.Request(url=url, headers=headers)
page = urllib.request.urlopen(req)
break
except HTTPError as e:
print('!!!%s,服务器不能应答,Error Code:%s' % (url, str(e.code)))
if str(e.code) == '404':
return ''
except URLError as e:
print('!!!%s,连接服务器失败,Reason:%s' % (url, str(e.reason)))
except socket.timeout:
print('!!!%s 访问超时' % url)
time.sleep(60)
except Exception as e:
print('!!!%s 访问出错' + str(e) % url)
time.sleep(20)
else:
print('!!!%s 页面访问失败,丢弃' % url)
return ""
try:
html = page.read().decode(code, errors='ignore')
return html
except:
print('!!!%s 页面读取失败,丢弃' % url)
return ""
def get_redirect_url_and_html(url, code='utf-8'):
"""获取重定向之后的url,和重定向url的页面内容,默认utf-8解码
return redirect_url, html
"""
for i in range(3):
try:
page = urllib.request.urlopen(url)
break
except HTTPError as e:
print('!!!%s,服务器不能应答,Error Code:%s' % (url, str(e.code)))
except URLError as e:
print('!!!%s,连接服务器失败,Reason:%s' % (url, str(e.reason)))
except socket.timeout:
print('!!!%s 访问超时' % url)
time.sleep(60)
except Exception as e:
print('!!!%s 访问出错' + str(e) % url)
time.sleep(20)
else:
print('!!!%s 页面访问失败,丢弃' % url)
return None, ""
try:
redirect_url = urllib.parse.unquote(page.geturl())
html = page.read().decode(code, errors='ignore')
return redirect_url, html
except:
print('!!!%s 页面读取失败,丢弃' % url)
return None, ""
def extract(html):
res = []
soup = BeautifulSoup(html, 'lxml')
infobox = soup.find('div', {'class': 'basic-info cmn-clearfix'})
if infobox:
left = infobox.find('dl', {'class': 'basicInfo-block basicInfo-left'})
right = infobox.find('dl', {'class': 'basicInfo-block basicInfo-right'})
if left:
key = [_ for _ in left.find_all('dt')]
value = [_ for _ in left.find_all('dd')]
if len(key) == len(value):
for k, v in zip(key, value):
res.append((k.get_text(), v.get_text()))
if right:
key = [_ for _ in left.find_all('dt')]
value = [_ for _ in left.find_all('dd')]
if len(key) == len(value):
for k, v in zip(key, value):
res.append((k.get_text(), v.get_text()))
return res
if __name__ == '__main__':
# html = get_html_with_proxies('http://www.google.com')
# html = get_html_with_proxies('http://www.google.com')
# html = get_html('https://baike.baidu.com/item/2020%E5%B9%B4%E6%96%B0%E5%9E%8B%E5%86%A0%E7%8A%B6%E7%97%85%E6%AF%92%E7%96%AB%E6%83%85')
# html = get_html('https://baike.baidu.com')
html = get_html('https://baike.baidu.com/item/' + urllib.parse.quote('强制休息令'))
from pprint import pprint
res = extract(html)
pprint(res)
|
StarcoderdataPython
|
1747688
|
cr# -*- coding: utf-8 -*-
"""
This script is used to plot cohen'd using circle format.
"""
import numpy as np
import pytest
import scipy.io as sio
import matplotlib.pyplot as plt
from mne.viz import plot_connectivity_circle, circular_layout
def test_plot_connectivity_circle():
"""
Test plotting connectivity circle.
"""
# node_order = ['Amyg', 'BG', 'Tha', 'Hipp', 'Limbic', 'Visual', 'SomMot', 'Control', 'Default', 'DorsAttn', 'Sal/VentAttn'];
# label_names = ['Amyg', 'BG', 'Tha', 'Hipp', 'Limbic', 'Visual', 'SomMot', 'Control', 'Default', 'DorsAttn', 'Sal/VentAttn'];
node_order = [str(i) for i in range(246)];
label_names = [str(i) for i in range(246)];
# group_boundaries = [0, 2, 4, 6, 8, 10]
node_angles = circular_layout(label_names, node_order, start_pos=90,
group_boundaries=group_boundaries)
con_medicated = sio.loadmat(r'D:\WorkStation_2018\SZ_classification\Data\Stat_results\cohen_medicated1.mat')
con_unmedicated = sio.loadmat(r'D:\WorkStation_2018\SZ_classification\Data\Stat_results\cohen_feu1.mat')
con_medicated = con_medicated['cohen_medicated']
con_unmedicated = con_unmedicated['cohen_feu']
con_medicated[np.abs(con_medicated) <= 0.5] = 0
con_unmedicated[np.abs(con_unmedicated) <= 0.8] = 0
figs, ax = plt.subplots(1,2, facecolor ='k')
n_lines = np.sum(con_medicated[:] != 0)
plot_connectivity_circle(con_medicated, label_names, n_lines=n_lines,
node_angles=node_angles, title='test',
colormap='RdBu_r', vmin=-1, vmax=1, linewidth=2,
fontsize_names=12, textcolor='k', facecolor='w',
subplot=121, fig=figs, colorbar=True,)
n_lines = np.sum(con_unmedicated[:] != 0)
plot_connectivity_circle(con_unmedicated, label_names, n_lines=n_lines,
node_angles=node_angles, title='test',
colormap='RdBu_r', vmin=-1, vmax=1, linewidth=1.5,
fontsize_names=12, textcolor='k', facecolor='w',
subplot=122, fig=figs, colorbar=True)
# plt.tight_layout()
plt.subplots_adjust(wspace = 0.2, hspace = 0)
pytest.raises(ValueError, circular_layout, label_names, node_order,
group_boundaries=[-1])
pytest.raises(ValueError, circular_layout, label_names, node_order,
group_boundaries=[20, 0])
# plt.close('all')
if __name__ == "__main__":
test_plot_connectivity_circle()
|
StarcoderdataPython
|
3587474
|
<filename>genbank_fasta/urls.py
from django.conf.urls import url
from . import views
app_name = 'genbank_fasta'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^results/$', views.generate_results, name='generate-genbank-fasta-results'),
url(r'^results/(?P<dataset_id>[0-9]+)/$', views.results, name='create-genbank-results'),
url(r'^download/(?P<dataset_id>[0-9]+)/$', views.serve_file, name='download-genbank-fasta-results'),
]
|
StarcoderdataPython
|
12855559
|
#!/usr/bin python
#coding:utf-8
#
# 生成10^7个乱序整数
import random
RANGE = 10000000
f = open('../test/input/bitSort.input','w')
for i in random.sample(range(RANGE),RANGE):
f.write(str(i) + '\n')
f.close()
print 'generator input file success!'
|
StarcoderdataPython
|
4863623
|
from django.shortcuts import render
from metaci.api.serializers.cumulusci import OrgSerializer
from metaci.api.serializers.cumulusci import ScratchOrgInstanceSerializer
from metaci.api.serializers.cumulusci import ServiceSerializer
from metaci.cumulusci.filters import OrgFilter
from metaci.cumulusci.filters import ScratchOrgInstanceFilter
from metaci.cumulusci.models import Org
from metaci.cumulusci.models import ScratchOrgInstance
from metaci.cumulusci.models import Service
from rest_framework import viewsets
class OrgViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing Orgs
"""
serializer_class = OrgSerializer
queryset = Org.objects.all()
filter_class = OrgFilter
class ScratchOrgInstanceViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing ScratchOrgInstances
"""
serializer_class = ScratchOrgInstanceSerializer
queryset = ScratchOrgInstance.objects.all()
filter_class = ScratchOrgInstanceFilter
class ServiceViewSet(viewsets.ModelViewSet):
"""
A viewset for viewing and editing Services
"""
serializer_class = ServiceSerializer
queryset = Service.objects.all()
|
StarcoderdataPython
|
11349762
|
import discord
from discord.ext import commands
from .core.image import Reddit, Subreddits
subreddits = Subreddits()
class ImageCog(commands.Cog):
"""Image listener cogs"""
def __init__(self, client):
self.client = client
self.reddit = Reddit()
self.name = "Image Command"
async def get_embedded_submission(self, subreddit):
"""Get reddit submission from given subreddit title."""
submission = await self.client.loop.run_in_executor(None, lambda: self.reddit.get_submission(subreddit))
embed = discord.Embed(color=discord.Colour(value=11735575).orange())
embed.set_image(url=submission.url)
return embed
async def get_embedded_search_post(self, keywords):
"""Search & get submission from subreddit"""
submission = await self.client.loop.run_in_executor(None, lambda: self.reddit.search_get_post(keywords))
embed = discord.Embed(color=discord.Colour(value=11735575).orange())
embed.set_image(url=submission.url)
return embed
@commands.command()
async def meme(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.MEMES)
)
@commands.command()
async def rwtf(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.WTF)
)
@commands.command()
async def dank(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.DANKMEMES)
)
@commands.command()
async def aniwallp(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.ANIMEWALLPAPER)
)
@commands.command()
async def animeme(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.ANIMEMES)
)
@commands.command()
async def waifu(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.WAIFU)
)
@commands.command()
async def fgo(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.GRANDORDER)
)
@commands.command()
async def fgoart(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.FGOFANART)
)
@commands.command()
async def tsun(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.TSUNDERES)
)
@commands.command()
async def anime(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.ANIME)
)
@commands.command()
async def scathach(self, ctx):
if ctx.channel.is_nsfw():
await ctx.send(
embed=await self.get_embedded_submission(subreddits.SCATHACH)
)
else:
await ctx.send(":x: | This command is potentially nsfw and can only be used in nsfw channel.")
@commands.command()
async def moescape(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.MOESCAPE)
)
@commands.command()
async def saber(self, ctx):
await ctx.send(
embed=await self.get_embedded_submission(subreddits.SABER)
)
@commands.command()
async def raikou(self, ctx):
if ctx.channel.is_nsfw():
await ctx.send(embed=await self.get_embedded_submission(subreddits.MAMARAIKOU))
else:
await ctx.send(":x: | This command is potentially nsfw and can only be used in nsfw channel.")
@commands.command()
async def abby(self, ctx):
await ctx.send(
embed=await self.get_embedded_search_post("fate abby")
)
@commands.command(name="reddit_search", aliases=['r', 'reddit'])
async def search_get_reddit(self, ctx, *keywords):
await ctx.send(
embed=await self.get_embedded_search_post(" ".join(keywords))
)
def setup(client):
client.add_cog(ImageCog(client))
print('ImageListener is Loaded')
|
StarcoderdataPython
|
1835869
|
import re
from collections import namedtuple
from django.conf import settings
from django.utils.module_loading import import_string
from rest_framework.exceptions import ValidationError
from caluma.core.utils import translate_value
class BaseFormatValidator:
r"""Basic format validator class to be extended by any format validator implementation.
A custom format validator class could look like this:
```
>>> from caluma.form.format_validators import BaseFormatValidator
...
...
... class CustomFormatValidator(BaseFormatValidator):
... slug = "email"
... name = {"en": "E-mail", "de": "E-Mail"}
... regex = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
... error_msg = {"en": "Not an e-mail address", "de": "Keine E-Mail adresse"}
```
"""
def __init__(self):
if not all(
[self.slug, self.regex, self.name, self.error_msg]
): # pragma: no cover
raise NotImplementedError("Missing properties!")
def validate(self, value, document):
if not re.match(self.regex, value):
raise ValidationError(translate_value(self.error_msg))
class EMailFormatValidator(BaseFormatValidator):
slug = "email"
name = {"en": "E-mail", "de": "E-Mail", "fr": "Courriel"}
regex = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
error_msg = {
"en": "Please enter a valid Email address.",
"de": "Bitte geben Sie eine gültige E-Mail-Adresse ein.",
"fr": "Veuillez entrer une addresse e-mail valide.",
}
class PhoneNumberFormatValidator(BaseFormatValidator):
slug = "phone-number"
name = {"en": "Phone number", "de": "Telefonnummer", "fr": "numéro de téléphone"}
regex = r"^[\s\/\.\(\)-]*(?:\+|0|00)(?:[\s\/\.\(\)-]*\d[\s\/\.\(\)-]*){6,20}$"
error_msg = {
"en": "Please enter a valid phone number.",
"de": "Bitte geben Sie eine gültige Telefonnummer ein.",
"fr": "Veuillez entrer un numéro de téléphone valide.",
}
base_format_validators = [EMailFormatValidator, PhoneNumberFormatValidator]
FormatValidator = namedtuple("FormatValidator", ["slug", "name", "regex", "error_msg"])
def get_format_validators(include=None, dic=False):
"""Get all FormatValidators.
:param include: List of FormatValidators to include
:param dic: Should return a dict
:return: List of FormatValidator-objects if dic False otherwise dict
"""
format_validator_classes = [
import_string(cls) for cls in settings.FORMAT_VALIDATOR_CLASSES
] + base_format_validators
if include is not None:
format_validator_classes = [
fvc for fvc in format_validator_classes if fvc.slug in include
]
if dic:
return {ds.slug: ds for ds in format_validator_classes}
return [
FormatValidator(
slug=ds.slug,
name=translate_value(ds.name),
regex=ds.regex,
error_msg=translate_value(ds.error_msg),
)
for ds in format_validator_classes
]
|
StarcoderdataPython
|
8077766
|
#!/usr/bin/env python
#-----------------------------------------------------------------------------
# de2120_ex3_send_command.py
#------------------------------------------------------------------------
#
# Written by <NAME> @ SparkFun Electronics, April 2021
#
# This example demonstrates how to use the "send_command()" method to send
# arbitrary serial commands to the barcode reader. It also demonstrates the "CIDENA"
# or "Code ID Enable" function, which includes the barcode type when transmitting the
# decoded string.
#
# send_command() takes two strings as arguments, concatenate them, adds the command
# prefix "^_^" and the command suffix "." and then transmits the command to the module.
# For example, to enable matrix 2 of 5 scanning, which is done using the command
# "^_^M25ENA1." you would make the call "my_scanner.send_command("M25ENA", 1)"
#
# While it is valid to call "my_scanner.send_command("M25ENA1")", the former method
# is preferred in many cases.
#
# NOTE: you must put the module into COM mode by scanning the PORVIC barcode
# in the datasheet. This will put the module in the correct mode to receive
# and transmit serial.
#
# This package has been developed on a Raspberry Pi 4. Connect the DE2120 Barcode
# Scanner Breakout directly to your Pi using a USB-C cable
#
# Do you like this library? Help support SparkFun. Buy a board!
#
#==================================================================================
# Copyright (c) 2021 SparkFun Electronics
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#==================================================================================
# Example 3
from __future__ import print_function
import de2120_barcode_scanner
import time
import sys
def run_example():
print("\nSparkFun DE2120 Barcode Scanner Breakout Example 3")
my_scanner = de2120_barcode_scanner.DE2120BarcodeScanner()
if my_scanner.begin() == False:
print("\nThe Barcode Scanner module isn't connected correctly to the system. Please check wiring", \
file=sys.stderr)
return
print("\nScanner ready!")
print("\n")
print("\nTransmit Code ID with Barcode? (y/n)")
print("\n---------------------------------------------")
val = input("\nType 'y' or 'n' or scan a barcode: ")
if val == 'y':
print("\nCode ID will be displayed on scan")
my_scanner.send_command("CIDENA", "1")
elif val == 'n':
print("\nCode ID will NOT be displayed on scan")
my_scanner.send_command("CIDENA", "0")
else:
print("\nCommand not recognized")
scan_buffer = ""
while True:
scan_buffer = my_scanner.read_barcode()
if scan_buffer:
print("\nCode found: ")
print("\n" + str(scan_buffer))
scan_buffer = ""
time.sleep(0.02)
if __name__ == '__main__':
try:
run_example()
except (KeyboardInterrupt, SystemExit) as exErr:
print("\nEnding Example 3")
sys.exit(0)
|
StarcoderdataPython
|
281221
|
"""Exceptions for ReSpecTh Parser.
.. moduleauthor:: <NAME> <<EMAIL>>
"""
class ParseError(Exception):
"""Base class for errors."""
pass
class KeywordError(ParseError):
"""Raised for errors in keyword parsing."""
def __init__(self, *keywords):
self.keywords = keywords
def __str__(self):
return repr('Error: {}.'.format(self.keywords))
class UndefinedElementError(KeywordError):
"""Raised for undefined elements."""
def __str__(self):
return repr('Error: Element not defined.\n{}'.format(self.keywords))
class MissingElementError(KeywordError):
"""Raised for missing required elements."""
def __str__(self):
return repr('Error: Required element {} is missing.'.format(
self.keywords))
class MissingAttributeError(KeywordError):
"""Raised for missing required attribute."""
def __str__(self):
return repr('Error: Required attribute {} of {} is missing.'.format(
self.keywords))
class UndefinedKeywordError(KeywordError):
"""Raised for undefined keywords."""
def __str__(self):
return repr('Error: Keyword not defined: {}'.format(self.keywords))
|
StarcoderdataPython
|
161272
|
import argparse
import os
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint
from tools.fuse_conv_bn import fuse_module
from mmdet.apis import multi_gpu_test, single_gpu_test
from mmdet.core import wrap_fp16_model
from mmdet.datasets import build_dataloader, build_dataset
from mmdet.models import build_detector
'''
export PYTHONPATH=${PWD}:$PYTHONPATH
python3 tools/cal_ap.py tools/cal_ap_config.py --user_json submits/atss_r50_fpn_ms12_results.bbox.json --eval bbox
'''
def parse_args():
parser = argparse.ArgumentParser(
description='MMDet test (and eval) a model')
parser.add_argument('config', help='test config file path')
parser.add_argument('--user_json', help='user json')
parser.add_argument(
'--eval',
type=str,
nargs='+',
help='evaluation metrics, which depends on the dataset, e.g., "bbox",'
' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC')
args = parser.parse_args()
return args
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
# set cudnn_benchmark
cfg.data.test.test_mode = True
# build the dataloader
# TODO: support multiple images per gpu (only minor changes are needed)
dataset = build_dataset(cfg.data.test)
print(args.user_json)
#with open(args.user_json, 'r') as f:
#outputs = mmcv.load(args.user_json)
outputs = args.user_json
kwargs = {}
res = dataset.evaluate_json(outputs, args.eval, **kwargs)
print(res)
print(res['bbox_mAP'])
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
164192
|
<gh_stars>1-10
# Copyright (c) 2020 North Star Imaging, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import shutil
if __name__ == "__main__":
# Copy dll into current folder (required by nsiefx)
currentdir = os.path.dirname(os.path.realpath(__file__))
rootdir = os.path.dirname(os.path.dirname(currentdir))
dll = os.path.join(rootdir, "bin", "win64", "efX-SDK.dll")
shutil.copy(dll, os.path.join(currentdir, "efX-SDK.dll"))
# Copy example .nsihdr into current folder
parentdir = os.path.dirname(currentdir)
nsihdr = os.path.join(parentdir, "example.nsihdr")
shutil.copy(nsihdr, os.path.join(currentdir, "example.nsihdr"))
# Add includes directory to import path
includesdir = os.path.join(rootdir, "includes")
sys.path.append(includesdir)
import nsiefx
vol_fname = "example.nsihdr"
slice_idx = 39
slice_fname = "slice.tif"
with nsiefx.open(vol_fname) as volume:
print(volume.slice_width())
print(volume.slice_height())
print(volume.num_slices())
print(volume.vmin())
print(volume.vmax())
print(volume.voxel_size())
print(volume.data_max())
print(volume.data_min())
slice = volume.read_slice(slice_idx)
nsiefx.save_tif32(slice_fname, slice, volume.slice_height(), volume.slice_width())
print("Done")
|
StarcoderdataPython
|
8093439
|
<reponame>Sehun0819/pytea
#!/usr/bin/env python
import os
import subprocess
import argparse
import tempfile
from pathlib import Path
DEFAULT_FRONT_PATH = Path(__file__).absolute().parent / "packages"/ "pytea" / "index.js"
def parse_arg():
"""
pytorch_pgm_path : <path_to_pytorch>/<pytorch_pgm_name>.py
"""
parser = argparse.ArgumentParser("Torch2H: PyTorch Graph to H Program")
parser.add_argument("path", help="PyTorch entry file path")
parser.add_argument(
"--config", default=None, help="set path to pyteaconfig.json",
)
parser.add_argument(
"--front_path",
default=str(DEFAULT_FRONT_PATH),
help="path to constraint generator (index.js)",
)
parser.add_argument(
"--silent", action="store_true", help="do not print result (for server)"
)
parser.add_argument(
"-l",
"--log",
default=-1,
type=int,
help="severity of analysis result (0 to 3)",
)
return parser.parse_args()
def parse_log_level(args):
if 0 <= args.log <= 3:
if args.log == 0:
log_level = "--logLevel=none"
elif args.log == 1:
log_level = "--logLevel=result-only"
elif args.log == 2:
log_level = "--logLevel=reduced"
else:
log_level = "--logLevel=full"
else:
log_level = ""
return log_level
def main():
args = parse_arg()
entry_path = Path(args.path)
if not entry_path.exists():
raise Exception(f"entry path {entry_path} does not exist")
log_level = parse_log_level(args)
config = args.config
config = f"--configPath={config} " if config else ""
frontend_command = f"node {args.front_path} {entry_path} {config}{log_level}"
print(frontend_command)
subprocess.call(frontend_command, shell=True)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
4911303
|
<gh_stars>0
# -*-coding:Utf-8 -*
#--------------------------------------------------------------------------------
# jtlib: conftest.py
#
# Common test code for jtlib.
#--------------------------------------------------------------------------------
# BSD 2-Clause License
#
# Copyright (c) 2018, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#--------------------------------------------------------------------------------
from click.testing import CliRunner
import click
import pytest
@pytest.fixture(scope = 'module')
def runner():
"""Return an instance of Click's command-line runner method."""
return CliRunner()
@pytest.fixture(scope = 'module')
def context():
"""Return an instance of the JIRA tool context."""
return dict()
url_list = [
'https://jira.atlassian.com',
'https://www.example.com',
]
@pytest.fixture(scope = "module", params = url_list)
def server_url(request):
"""Different server URLs."""
return request.param
command_list = [
'issue'
'project',
]
@pytest.fixture(scope = 'module', params = command_list)
def command(request):
"""Different JIRA tool commands."""
return request.param
|
StarcoderdataPython
|
1941533
|
<gh_stars>0
#!/usr/bin/env python
#
# See top-level LICENSE file for Copyright information
#
# -*- coding: utf-8 -*-
"""
This script runs PypeIt on a set of MultiSlit images
"""
def parse_args(options=None, return_parser=False):
import argparse
parser = argparse.ArgumentParser(description='Script to run PypeIt in QuickLook on a set of '
'MOS files',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('spectrograph', type=str, help='Name of spectograph, e.g. shane_kast_blue')
parser.add_argument('full_rawpath', type=str, help='Full path to the raw files')
parser.add_argument('arc', type=str, help='Arc frame filename')
parser.add_argument('flat', type=str, help='Flat frame filename')
parser.add_argument('science', type=str, help='Science frame filename')
parser.add_argument('-b', '--box_radius', type=float,
help='Set the radius for the boxcar extraction (arcsec)')
parser.add_argument('-d', '--det', type=int, default=1,
help='Detector number. Cannot use with --slit_spat')
parser.add_argument('--ignore_headers', default=False, action='store_true',
help='Ignore bad headers?')
parser.add_argument('--user_pixflat', type=str,
help='Use a user-supplied pixel flat (e.g. keck_lris_blue)')
parser.add_argument('--slit_spat', type=str,
help='Reduce only this slit on this detector DET:SPAT_ID, e.g. 1:175')
if return_parser:
return parser
return parser.parse_args() if options is None else parser.parse_args(options)
def main(args):
import os
import numpy as np
from IPython import embed
from pypeit import pypeit
from pypeit import pypeitsetup
from pypeit.core import framematch
from pypeit import msgs
spec = args.spectrograph
# Config the run
cfg_lines = ['[rdx]']
cfg_lines += [' spectrograph = {0}'.format(spec)]
cfg_lines += [' redux_path = {0}_A'.format(os.path.join(os.getcwd(),spec))]
if args.slit_spat is not None:
msgs.info("--slit_spat provided. Ignoring --det")
else:
cfg_lines += [' detnum = {0}'.format(args.det)]
# Restrict on slit
if args.slit_spat is not None:
cfg_lines += [' slitspatnum = {0}'.format(args.slit_spat)]
# Allow for bad headers
if args.ignore_headers:
cfg_lines += [' ignore_bad_headers = True']
cfg_lines += ['[scienceframe]']
cfg_lines += [' [[process]]']
cfg_lines += [' mask_cr = False']
# Calibrations
cfg_lines += ['[baseprocess]']
cfg_lines += [' use_biasimage = False']
cfg_lines += ['[calibrations]']
# Input pixel flat?
if args.user_pixflat is not None:
cfg_lines += [' [[flatfield]]']
cfg_lines += [' pixelflat_file = {0}'.format(args.user_pixflat)]
# Reduction restrictions
cfg_lines += ['[reduce]']
cfg_lines += [' [[extraction]]']
cfg_lines += [' skip_optimal = True']
# Set boxcar radius
if args.box_radius is not None:
cfg_lines += [' boxcar_radius = {0}'.format(args.box_radius)]
cfg_lines += [' [[findobj]]']
cfg_lines += [' skip_second_find = True']
# Data files
data_files = [os.path.join(args.full_rawpath, args.arc),
os.path.join(args.full_rawpath, args.flat),
os.path.join(args.full_rawpath,args.science)]
# Setup
ps = pypeitsetup.PypeItSetup(data_files, path='./', spectrograph_name=spec,
cfg_lines=cfg_lines)
ps.build_fitstbl()
# TODO -- Get the type_bits from 'science'
bm = framematch.FrameTypeBitMask()
file_bits = np.zeros(3, dtype=bm.minimum_dtype())
file_bits[0] = bm.turn_on(file_bits[0], ['arc', 'tilt'])
file_bits[1] = bm.turn_on(file_bits[1], ['pixelflat', 'trace', 'illumflat']
if args.user_pixflat is None
else ['trace', 'illumflat'])
file_bits[2] = bm.turn_on(file_bits[2], 'science')
# PypeItSetup sorts according to MJD
# Deal with this
asrt = []
for ifile in data_files:
bfile = os.path.basename(ifile)
idx = ps.fitstbl['filename'].data.tolist().index(bfile)
asrt.append(idx)
asrt = np.array(asrt)
# Set bits
ps.fitstbl.set_frame_types(file_bits[asrt])
ps.fitstbl.set_combination_groups()
# Extras
ps.fitstbl['setup'] = 'A'
# Write
ofiles = ps.fitstbl.write_pypeit(configs='A', write_bkg_pairs=True, cfg_lines=cfg_lines)
if len(ofiles) > 1:
msgs.error("Bad things happened..")
# Instantiate the main pipeline reduction object
pypeIt = pypeit.PypeIt(ofiles[0], verbosity=2,
reuse_masters=True, overwrite=True,
logname='mos.log', show=False)
# Run
pypeIt.reduce_all()
msgs.info('Data reduction complete')
# QA HTML
msgs.info('Generating QA HTML')
pypeIt.build_qa()
return 0
|
StarcoderdataPython
|
116329
|
<reponame>vladsaveliev/sequana
"""
Some useful data sets to be used in the analysis
The command :func:`sequana.sequana_data` may be used to retrieved data from
this package. For example, a small but standard reference (phix) is used in
some NGS experiments. The file is small enough that it is provided within
sequana and its filename (full path) can be retrieved as follows::
from sequana import sequana_data
fullpath = sequana_data("phiX174.fa", "data")
Other files stored in this directory will be documented here.
"""
#: List of adapters used in various sequencing platforms
adapters = {
"adapters_netflex_pcr_free_1_fwd": "adapters_netflex_pcr_free_1_fwd.fa",
"adapters_netflex_pcr_free_1_rev": "adapters_netflex_pcr_free_1_rev.fa"
}
|
StarcoderdataPython
|
11326097
|
"""
Testing for memory overlap.
"""
# Copyright (c) 2017, <NAME> <<EMAIL>>
# All rights reserved.
# License: BSD 3 clause see https://choosealicense.com/licenses/bsd-3-clause/
from memory_overlap import share_memory
import numpy as np
import unittest
class TestMemoryOverlap(unittest.TestCase):
def test_01(self):
x = np.arange(6)
a = x[::2]
b = x[1::2]
self.assertEqual(share_memory(a, b), False)
def test_02(self):
x = np.arange(7 * 8, dtype=np.int8).reshape(7, 8)
a = x[::2, ::3]
b = x[1::5, ::2]
self.assertEqual(share_memory(a, b), True)
def test_03(self):
x = np.arange(4 * 20).reshape(4, 20).astype(np.int8)
a = x[:, ::7]
b = x[:, 3::3]
self.assertEqual(share_memory(a, b), False)
def test_04(self):
x = np.arange(7 * 8 * 4, dtype=np.int32)
np.random.shuffle(x)
x = x.reshape(7, 8, 4)
a = x[::, fc00:db20:35b:7399::5, ::]
b = x[1::, 3::4, ::]
self.assertEqual(share_memory(a, b), True)
if __name__ == '__main__':
print('=== Test ===')
unittest.main()
|
StarcoderdataPython
|
78049
|
import tweepy
# import the consumer data
from consumer_data import consumer_key, consumer_secret
import helpers
print("=========================")
print("========= twjnt =========")
print("=========================")
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
# try first to load already gotten tokens
access_token = helpers.load_access_tokens()
if access_token:
# if yes, set access token on auth and use the api
auth.set_access_token(access_token[0], access_token[1])
api = tweepy.API(auth)
else:
print("\n\nAuthentication")
print("--------------\n")
# if not, ask the user for PIN, save the token for later use and use the api
print("Copy the following link to get the permission for your app: ")
print(auth.get_authorization_url())
pin = input("After that, copy your PIN in here: ")
try:
(access_token, access_token_secret) = auth.get_access_token(pin)
helpers.save_access_tokens(access_token, access_token_secret)
api = tweepy.API(auth)
except tweepy.error.TweepError as err:
print("Could not verify you with a valid PIN.")
exit()
print("\nLogin successful. Welcome %s! :D\n\n" % api.me().screen_name)
print("What would you like to do?")
|
StarcoderdataPython
|
4819093
|
"""Vocabulary for theme-based transformer
Author: <NAME>
Email: <EMAIL>
Date: 2021/11/03
"""
import pickle
import numpy as np
import miditoolkit
import os
import math
from miditoolkit.midi import parser as mid_parser
from miditoolkit.midi import containers as ct
class Vocab(object):
def __init__(self):
"""initialize some vocabulary settings
"""
# split each beat into 4 subbeats
self.q_beat = 4
# dictionary for matching token ID to name and the other way around.
self.token2id = {}
self.id2token = {}
# midi pitch number : 1 ~ 127 (highest pitch)
self._pitch_bins = np.arange(start=1,stop=128)
# duration tokens 1~64 of self.q_beat
self._duration_bins = np.arange(start=1,stop=self.q_beat*16+1)
# velocity tokens 1~127 (corressponding to midi format)
self._velocity_bins = np.arange(start=1,stop=127)
# tempo tokens 17~197 (determined from our dataset)
self._tempo_bins = np.arange(start=17,stop=197,step=3)
# position(subbeat) tokens 0~15, indicate the relative position with in a bar
self._position_bins = np.arange(start=0,stop=16)
self.n_tokens = 0
self.token_type_base = {}
self.tracks = ["MELODY","BRIDGE","PIANO"]
self.build()
# vocab
# Note-On (129) : 0 (padding) ,1 ~ 127(highest pitch) , 128 (rest)
# Note-Duration : 1 ~ 16 beat * 3
# min resulution 1/12 notes
def build(self):
"""build our vocab
"""
self.token2id = {}
self.id2token = {}
self.n_tokens = 0
self.token2id['padding'] = 0
self.n_tokens += 1
# Note related tokens==================================================================
# Create Note-On tokens for each track
for track in self.tracks:
# Note-On
self.token_type_base = {'Note-On-{}'.format(track) : 1}
for i in self._pitch_bins:
self.token2id[ 'Note-On-{}_{}'.format(track,i) ] = self.n_tokens
self.n_tokens += 1
# Create Note-Duration tokens for each track
for track in self.tracks:
# Note-Duration
self.token_type_base['Note-Duration-{}'.format(track)] = self.n_tokens
for note_dur in self._duration_bins:
self.token2id[ 'Note-Duration-{}_{}'.format(track,note_dur) ] = self.n_tokens
self.n_tokens += 1
# Create Note-Velocity tokens for each track
for track in self.tracks:
# Note-Velocity
self.token_type_base['Note-Velocity-{}'.format(track)] = self.n_tokens
for vel in self._velocity_bins:
self.token2id[ 'Note-Velocity-{}_{}'.format(track,vel) ] = self.n_tokens
self.n_tokens += 1
# Metric related tokens==================================================================
# Tempo
self.token_type_base['Tempo'] = self.n_tokens
for tmp in self._tempo_bins:
self.token2id[ 'Tempo_{}'.format(tmp) ] = self.n_tokens
self.n_tokens += 1
# Positions
self.token_type_base['Position'] = self.n_tokens
for pos in self._position_bins:
self.token2id[ 'Position_{}'.format(pos) ] = self.n_tokens
self.n_tokens += 1
# Bar
self.token_type_base['Bar'] = self.n_tokens
self.token2id[ 'Bar' ] = self.n_tokens
self.n_tokens += 1
# Theme related tokens==================================================================
# Phrase annotation (not used in our final paper)
self.token_type_base['Phrase'] = self.n_tokens
self.token2id[ 'Phrase_Start' ] = self.n_tokens
self.n_tokens += 1
self.token2id[ 'Phrase_End' ] = self.n_tokens
self.n_tokens += 1
# Theme annotation
self.token_type_base['Theme'] = self.n_tokens
self.token2id[ 'Theme_Start' ] = self.n_tokens
self.n_tokens += 1
self.token2id[ 'Theme_End' ] = self.n_tokens
self.n_tokens += 1
for w , v in self.token2id.items():
self.id2token[v] = w
self.n_tokens = len(self.token2id)
def getPitch(self,input_event):
"""Return corresponding note pitch
if input_event is not a note, it returns -1
Args:
input_event (str or int): REMI Event Name or vocab ID
"""
if isinstance(input_event,int):
input_event = self.id2token[input_event]
elif isinstance(input_event,str):
pass
else:
try:
input_event = int(input_event)
input_event = self.id2token[input_event]
except:
raise TypeError("input_event should be int or str, input_event={}, type={}".format(input_event,type(input_event)))
if not input_event.startswith("Note-On"):
return -1
assert int(input_event.split("_")[1]) >=1 and int(input_event.split("_")[1]) <=127
return int(input_event.split("_")[1])
def midi2REMI(self,midi_path,quantize=True,trim_intro = True,trim_outro=True,include_bridge=False,extend_theme=False,bar_first=False,theme_annotations=True,verbose = False):
"""convert midi file to token representation
Args:
midi_path (str): the path of input midi file
trim_intro (bool, optional): ignore the intro part of the song. Defaults to True.
trim_outro (bool, optional): ignore the outro part of the song. Defaults to True.
include_bridge (bool, optional): ignore the intro part of the song. Defaults to False.
extend_theme (bool, optional): extend the theme region to at least MIN_MEL_NOTES=8 notes. Defaults to False.
bar_first (bool, optional): place Bar token in front of Theme-Start token. Defaults to False.
theme_annotations (bool, optional): including theme-realted tokens. Defaults to True.
verbose (bool, optional): print some message. Defaults to False.
Returns:
list: sequence of tokens
"""
MIN_MEL_NOTES = 8
midi_obj = mid_parser.MidiFile(midi_path)
# calculate the min step (in ticks) for REMI representation
min_step = midi_obj.ticks_per_beat * 4 / 16
# quantize
if quantize:
for i in range(len(midi_obj.instruments)):
for n in range(len(midi_obj.instruments[i].notes)):
midi_obj.instruments[i].notes[n].start = int(int(midi_obj.instruments[i].notes[n].start / min_step) * min_step)
midi_obj.instruments[i].notes[n].end = int(int(midi_obj.instruments[i].notes[n].end / min_step) * min_step)
if theme_annotations:
# select theme info track
theme_boundary_track = list(filter(lambda x: x.name=="theme info track",midi_obj.instruments))
assert len(theme_boundary_track) == 1
# parsing notes in each tracks (ignore BRIDGE)
notesAndtempos = []
midi_obj.instruments[0].notes = sorted(midi_obj.instruments[0].notes,key=lambda x: x.start)
# add notes
melody_start = sorted(midi_obj.instruments[0].notes,key=lambda x: x.start)[0].start
melody_end = sorted(midi_obj.instruments[0].notes,key=lambda x: x.start)[-1].end
for i in range(3):
if not include_bridge and midi_obj.instruments[i].name == "BRIDGE":
continue
if midi_obj.instruments[i].name == "Theme info track":
continue
notes = midi_obj.instruments[i].notes
for n in notes:
# assert (trim_intro and n.start>=melody_start or not trim_intro)
if trim_intro and n.start>=melody_start or not trim_intro:
if trim_outro and n.start<=melody_end or not trim_outro:
notesAndtempos.append({
"priority" : i+1,
"priority_1" : n.pitch,
"start_tick" : n.start,
"obj_type" : "Note-{}".format(midi_obj.instruments[i].name),
"obj" : n
})
# add tempos
for t in midi_obj.tempo_changes:
# assert (trim_intro and t.time>=melody_start or not trim_intro)
if trim_intro and t.time>=melody_start or not trim_intro or trim_intro:
if trim_outro and t.time<=melody_end or not trim_outro:
notesAndtempos.append({
"priority" : 0,
"priority_1" : 0,
"start_tick" : t.time,
"obj_type" : "Tempo",
"obj" : t
})
if trim_intro and len([x for x in midi_obj.tempo_changes if x.time==melody_start]) == 0:
t = [x for x in sorted(midi_obj.tempo_changes,key= lambda z: z.time) if x.time < melody_start]
if not len(t) == 0:
t = t[-1]
notesAndtempos.append({
"priority" : 0,
"priority_1" : 0,
"start_tick" : melody_start,
"obj_type" : "Tempo",
"obj" : t
})
notesAndtempos = sorted(notesAndtempos,key=lambda x: (x["start_tick"],x["priority"],-x["priority_1"]))
if theme_annotations:
theme_boundary_track = theme_boundary_track[0]
theme_boundary_pitch = min([x.pitch for x in theme_boundary_track.notes])
theme_boundaries = [ [x.start,x.end] for x in theme_boundary_track.notes if x.pitch == theme_boundary_pitch]
assert not len(theme_boundaries) == 0
if verbose:
print(theme_boundaries)
if extend_theme:
# extend theme region 8~9
for b_i,b in enumerate(theme_boundaries[:-1]):
melody_notes = [x for x in midi_obj.instruments[0].notes if x.start>= b[0] and x.start< theme_boundaries[b_i+1][0] ]
cur_bound = 0
for x in melody_notes:
if x.start < b[1]:
cur_bound += 1
else:
break
if cur_bound + 1 >= MIN_MEL_NOTES :
continue
# try to extend
extend_idx = min(MIN_MEL_NOTES,len(melody_notes)) - 1
theme_boundaries[b_i][1] = melody_notes[extend_idx].end
b_i = 0
in_theme = False
# group
bar_segments = []
bar_ticks = midi_obj.ticks_per_beat * 4
if verbose:
print("Bar tick length: {}".format(bar_ticks))
for bar_start_tick in range(0,notesAndtempos[-1]["start_tick"],bar_ticks):
if verbose:
print("Bar {} at tick: {}".format(bar_start_tick // bar_ticks,bar_start_tick))
bar_end_tick = bar_start_tick + bar_ticks
current_bar = []
bar_objs = list(filter(lambda x: x["start_tick"] >=bar_start_tick and x["start_tick"]< bar_end_tick,notesAndtempos))
bar_objs.insert(0,{"start_tick":-1})
if not bar_first:
if theme_annotations and not in_theme and theme_boundaries[b_i][0] == bar_start_tick:
current_bar.append("Theme_Start")
in_theme = True
if verbose:
print("Theme start")
if not in_theme and trim_intro and bar_start_tick+bar_ticks < melody_start:
if verbose:
print("into trimmed")
continue
current_bar.append("Bar")
else:
if not in_theme and trim_intro and bar_start_tick+bar_ticks < melody_start:
if verbose:
print("into trimmed")
continue
current_bar.append("Bar")
if theme_annotations and not in_theme and theme_boundaries[b_i][0] == bar_start_tick:
current_bar.append("Theme_Start")
in_theme = True
if verbose:
print("Theme start")
for i,obj in enumerate(bar_objs):
if obj["start_tick"]==-1 : continue
if not obj["start_tick"] == bar_objs[i-1]["start_tick"]:
# insert Position Event
pos = (obj["start_tick"] - bar_start_tick) / midi_obj.ticks_per_beat * self.q_beat
pos_index = np.argmin(abs(pos - self._position_bins)) # use the closest position
pos = self._position_bins[pos_index]
current_bar.append("Position_{}".format(pos))
if obj["obj_type"].startswith("Note"):
track_name = obj["obj_type"].split('-')[1].upper()
# add pitch
current_bar.append("Note-On-{}_{}".format(track_name,obj["obj"].pitch))
# add duration
dur = (obj["obj"].end - obj["obj"].start) / midi_obj.ticks_per_beat * self.q_beat
dur_index = np.argmin(abs(dur - self._duration_bins)) # use the closest position
dur = self._duration_bins[dur_index]
current_bar.append("Note-Duration-{}_{}".format(track_name,dur))
# add velocity
vel_index = np.argmin(abs(obj["obj"].velocity - self._velocity_bins)) # use the closest position
vel = self._velocity_bins[vel_index]
current_bar.append("Note-Velocity-{}_{}".format(track_name,vel))
elif obj["obj_type"].startswith("Tempo"):
# tempo
tmp_index = np.argmin(abs(obj["obj"].tempo - self._tempo_bins)) # use the closest position
tmp = self._tempo_bins[tmp_index]
current_bar.append(obj["obj_type"] + "_{}".format(tmp))
else:
# theme start end
current_bar.append(obj["obj_type"])
if theme_annotations and in_theme and theme_boundaries[b_i][1] == bar_start_tick + bar_ticks:
current_bar.append("Theme_End")
in_theme = False
if verbose:
print("Theme End")
if not b_i == len(theme_boundaries) - 1:
b_i += 1
bar_segments.extend(current_bar)
output_ids = [self.token2id[x] for x in bar_segments]
return output_ids
def preprocessREMI(self,remi_sequence,always_include=False,max_seq_len=512,strict=True,verbose=False):
"""Preprocess token sequence
slicing the sequence for training our models
Args:
remi_sequence (List): the music token seqeunce
always_include (Bool): selected the data including either Theme-Start or Theme-End
max_seq_len (Int): maximum sequence length for each data
strict (Bool): the returning sequence should always include Theme-Start
Return:
{
"src" : <corressponding theme condition>,
"src_theme_binary_msk" : <corressponding theme condition's theme msk>,
"tgt_segments" : <list of target sequences>,
"tgt_segments_theme_binary_msk" : <list of target sequences theme msk>
}
"""
theme_binary_msk = []
in_theme = False
src = []
src_theme_binary_msk = []
for r in remi_sequence:
if self.id2token[r] == "Theme_Start":
in_theme = True
elif self.id2token[r] == "Theme_End":
in_theme = False
theme_binary_msk.append(int(in_theme))
for i in range(1,len(theme_binary_msk)):
theme_binary_msk[i] = theme_binary_msk[i-1]*theme_binary_msk[i] + theme_binary_msk[i]
start_first_theme = remi_sequence.index(self.token2id["Theme_Start"])
end_first_theme = remi_sequence.index(self.token2id["Theme_End"])
src = remi_sequence[start_first_theme:end_first_theme+1]
src_theme_binary_msk = theme_binary_msk[start_first_theme:end_first_theme+1]
tgt_segments = []
tgt_segments_theme_msk = []
s = 0
if strict:
theme_start_pos = [i for i in range(len(remi_sequence)) if remi_sequence[i] == self.token2id["Theme_Start"]]
for t in theme_start_pos:
tgt_segments.append(remi_sequence[t:t+max_seq_len+1])
tgt_segments_theme_msk.append(theme_binary_msk[t:t+max_seq_len+1])
else:
total_s = math.ceil(len(remi_sequence) / max_seq_len)
for x in range(0,len(remi_sequence),max_seq_len):
if always_include:
# if self.token2id["Theme_Start"] in remi_sequence[x:x+max_seq_len+1]:
if self.token2id["Theme_Start"] in remi_sequence[x:x+max_seq_len+1] or self.token2id["Theme_End"] in remi_sequence[x:x+max_seq_len+1]:
s += 1
tgt_segments.append(remi_sequence[x:x+max_seq_len+1])
tgt_segments_theme_msk.append(theme_binary_msk[x:x+max_seq_len+1])
else:
tgt_segments.append(remi_sequence[x:x+max_seq_len+1])
tgt_segments_theme_msk.append(theme_binary_msk[x:x+max_seq_len+1])
if verbose and always_include:
print("Include Theme Start {}/{}".format(s,total_s))
return {
"src" : src,
"src_theme_binary_msk" : src_theme_binary_msk,
"tgt_segments" : tgt_segments,
"tgt_segments_theme_binary_msk" : tgt_segments_theme_msk
}
def REMIID2midi(self,event_ids,midi_path,verbose = False):
"""convert tokens to midi file
The output midi file will contains 3 tracks:
MELODY : melodt notes
PIANO : accompaniment notes
Theme info track : notes indicating theme region (using note start tick and end tick as the boundary of theme region)
Args:
event_ids (list): sequence of tokens
midi_path (str): the output midi file path
verbose (bool, optional): print some message. Defaults to False.
"""
# create midi file
new_mido_obj = mid_parser.MidiFile()
new_mido_obj.ticks_per_beat = 120
# create tracks
music_tracks = {}
music_tracks["MELODY"] = ct.Instrument(program=0, is_drum=False, name='MELODY')
music_tracks["PIANO"] = ct.Instrument(program=0, is_drum=False, name='PIANO')
music_tracks["Theme info track"] = ct.Instrument(program=0, is_drum=False, name='Theme info track')
# all our generated music are 4/4
new_mido_obj.time_signature_changes.append(miditoolkit.TimeSignature(4,4,0))
ticks_per_step = new_mido_obj.ticks_per_beat / self.q_beat
# convert tokens from id to string
events = []
for x in event_ids:
events.append(self.id2token[x])
# parsing tokens
last_tick = 0
current_bar_anchor = 0
current_theme_boundary = []
motif_label_segs = []
idx = 0
first_bar = True
while(idx < len(events)):
if events[idx] == "Bar":
if first_bar:
current_bar_anchor = 0
first_bar = False
else:
current_bar_anchor += new_mido_obj.ticks_per_beat * 4
idx+=1
elif events[idx].startswith("Position"):
pos = int(events[idx].split('_')[1])
last_tick = pos * ticks_per_step + current_bar_anchor
idx += 1
elif events[idx].startswith("Tempo"):
tmp = pos = int(events[idx].split('_')[1])
new_mido_obj.tempo_changes.append(ct.TempoChange(
tempo=int(tmp),
time=int(last_tick)
))
idx += 1
elif events[idx].startswith("Note"):
track_name = events[idx].split("_")[0].split("-")[2]
assert track_name in music_tracks
assert events[idx].startswith("Note-On")
assert events[idx+1].startswith("Note-Duration")
assert events[idx+2].startswith("Note-Velocity")
new_note = miditoolkit.Note(
velocity=int(events[idx+2].split("_")[1]),
pitch=int(events[idx].split("_")[1]),
start=int(last_tick),
end=int(int(events[idx+1].split('_')[1]) * ticks_per_step) + int(last_tick)
)
music_tracks[track_name].notes.append(new_note)
idx += 3
elif events[idx] == "Theme_Start":
assert len(current_theme_boundary) == 0
current_theme_boundary.append(last_tick)
idx += 1
elif events[idx] == "Theme_End":
assert len(current_theme_boundary) == 1
current_theme_boundary.append(last_tick)
motif_label_segs.append(current_theme_boundary)
music_tracks["Theme info track"].notes.append(
miditoolkit.Note(
velocity=1,
pitch=1,
start=int(current_theme_boundary[0]),
end=int(current_theme_boundary[1])
)
)
current_theme_boundary = []
idx += 1
# add tracks to midi file
new_mido_obj.instruments.extend([music_tracks[ins] for ins in music_tracks])
if verbose:
print("Saving midi to ({})".format(midi_path))
# save to disk
new_mido_obj.dump(midi_path)
def __str__(self):
"""return all tokens
Returns:
str: string of all tokens
"""
ret = ""
for w,i in self.token2id.items():
ret = ret + "{} : {}\n".format(w,i)
for i,w in self.id2token.items():
ret = ret + "{} : {}\n".format(i,w)
ret += "\nTotal events #{}".format(len(self.id2token))
return ret
def __repr__(self):
"""return string all token
Returns:
str: string of sll tokens
"""
return self.__str__()
if __name__ == '__main__':
# print all tokens
myvocab = Vocab()
print(myvocab)
|
StarcoderdataPython
|
3321628
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010-2016 PPMessage.
# <NAME>, <EMAIL>
# All rights reserved
#
# backend/ppemail.py
#
from ppmessage.core.constant import REDIS_HOST
from ppmessage.core.constant import REDIS_PORT
from ppmessage.core.constant import REDIS_EMAIL_KEY
from ppmessage.bootstrap.config import BOOTSTRAP_CONFIG
import tornado.ioloop
import tornado.options
import requests
import logging
import redis
import json
import sys
class MailGunWorker():
def __init__(self, app):
self.app = app
return
def config(self, email_config):
self.domain_name = email_config.get("domain_name")
self.api_url = "https://api.mailgun.net/v3/%s/messages" % self.domain_name
self.from_email = email_config.get("from_email")
self.from_name = email_config.get("from_name")
self.private_api_key = email_config.get("private_api_key")
if self.private_api_key == None or self.domain_name == None or \
self.from_email == None or self.from_name == None:
return None
return email_config
def work(self, email_request):
logging.info("email_request: %s" % str(email_request))
_to = email_request.get("to")
if not isinstance(_to, list):
logging.error("email to should be a list: %s" % str(type(_to)))
return
_subject = email_request.get("subject")
_text = email_request.get("text")
_html = email_request.get("html")
_data = {
"from": "%s <%s>" % (self.from_name, self.from_email),
"to": _to,
"subject": _subject,
"text": _text,
}
if _html != None:
_data["html"] = _html
logging.info("sending email via: %s to: %s" % (self.api_url, " ".join(_to)))
_r = requests.post(self.api_url, auth=("api", self.private_api_key), data=_data)
logging.info(_r.json())
return
class EmailWorker():
def __init__(self, app):
self.email_app = app
self.service_mapping = {
"mailgun": MailGunWorker
}
return
def work(self, email_request):
_email = BOOTSTRAP_CONFIG.get("email")
_type = _email.get("service_type")
_worker_class = self.service_mapping.get(_type)
if _worker_class == None:
logging.error("No worker for the mail service: %s" % _service_name)
return
_worker_object = _worker_class(self)
_worker_object.config(_email)
_worker_object.work(email_request)
return
class EmailApp():
def __init__(self):
self.redis = redis.Redis(REDIS_HOST, REDIS_PORT, db=1)
self.email_key = REDIS_EMAIL_KEY
self.email_worker = EmailWorker(self)
return
def send(self):
while True:
_request = self.redis.lpop(self.email_key)
if _request == None or len(_request) == 0:
return
_request = json.loads(_request)
self.email_worker.work(_request)
return
if __name__ == "__main__":
tornado.options.parse_command_line()
_app = EmailApp()
# set the periodic check email request to send every 1000 ms
tornado.ioloop.PeriodicCallback(_app.send, 1000).start()
logging.info("Email service starting...")
tornado.ioloop.IOLoop.instance().start()
|
StarcoderdataPython
|
11287977
|
# specifiy the actual model start and end years (model period)
mod_start_year=2020
mod_end_year=2040
years = pd.Series(range(mod_start_year,mod_end_year+1),dtype="int")
# please specify the start and end years for the visualisations
vis_start=2020
vis_end=2040
#Fundamental dictionaries that govern naming and colour coding
#url1='./agg_col.csv'
url2='./agg_pow_col.csv'
url3='./countrycode.csv'
url4='./power_tech.csv'
url5='./tech_codes.csv'
colorcode=pd.read_csv(url5,sep=',',encoding = "ISO-8859-1")
colorcode1=colorcode.drop('colour',axis=1)
colorcode2=colorcode.drop('tech_code',axis=1)
det_col=dict([(a,b) for a,b in zip(colorcode1.tech_code,colorcode1.tech_name)])
color_dict=dict([(a,b) for a,b in zip(colorcode2.tech_name,colorcode2.colour)])
#agg1=pd.read_csv(url1,sep=',',encoding = "ISO-8859-1")
agg2=pd.read_csv(url2,sep=',',encoding = "ISO-8859-1")
#agg_col=agg1.to_dict('list')
agg_pow_col=agg2.to_dict('list')
#power_tech=pd.read_csv(url4,sep=',',encoding = "ISO-8859-1")
#t_include = list(power_tech['power_tech'])
#Country code list
country_code=pd.read_csv(url3,sep=',',encoding = "ISO-8859-1")
|
StarcoderdataPython
|
9742153
|
<filename>ekf.py
# ************************************** #
# Extended Kalman Filter #
# ************************************** #
# by <NAME> #
# <NAME> #
# <NAME> #
# ************************************** #
# Department of Mechanical Engineering #
# University of Cincinnati #
# ************************************* #
import math
import numpy as np
from numpy import genfromtxt
import matplotlib.pyplot as plt
# Importing csv files and reading the data
true_odo = genfromtxt('true_odometry.csv', delimiter=',')
sen_odo = genfromtxt('sensor_odom.csv',delimiter=',')
sen_pos_x, sen_pos_y = sen_odo[1:,1], sen_odo[1:,2]
sen_pos_theta = sen_odo[1:,3]
true_x, true_y, true_theta = true_odo[1:,1], true_odo[1:,2], true_odo[1:,3]
v, w = true_odo[1:,4], true_odo[1:,5]
time = sen_odo[1:,0]
# Observation that we are making - x and y position
z = np.c_[sen_pos_x, sen_pos_y]
# Defining Prediction Function
def Prediction(x_t, P_t, F_t, B_t, U_t, G_t, Q_t):
x_t = F_t.dot(x_t) + B_t.dot(U_t)
P_t = (G_t.dot(P_t).dot(G_t.T)) + Q_t
return x_t, P_t
# Defining Update Function
def Update(x_t, P_t, Z_t, R_t, H_t):
S = np.linalg.inv( (H_t.dot(P_t).dot(H_t.T)) + R_t )
K = P_t.dot(H_t.T).dot(S)
x_t = x_t + K.dot( Z_t - H_t.dot(x_t) )
P_t = P_t - K.dot(H_t).dot(P_t)
return x_t, P_t
# Transition Matrix
F_t = np.array([ [1, 0, 0],
[0, 1, 0],
[0, 0, 1] ])
# Initial Covariance State
P_t = 0.5 * np.identity(3)
# # Process Covariance
Q_t = 1 * np.identity(3)
# # Measurement Covariance
R_t = 1 * np.identity(2)
# Measurement Matrix
H_t = np.array([ [1, 0, 0],
[0, 1, 0] ])
# Initial State
x_t = np.array([[sen_pos_x[0]], [sen_pos_y[0]], [sen_pos_theta[0]] ])
kal_x, kal_y, kal_theta = [], [], []
dyn_x, dyn_y, dyn_z = [], [], []
check_dhruv = 0
threshold_x = ( 0.1 / 1 )
threshold_y = ( 0.1 / 1 )
threshold_z = ( 0.02 / 1 )
change_r = 1000
for i in range(2113):
if i > 0:
dt = time[i] - time[i-1]
else:
dt = 0
# Jacobian Matrix - G
G_t = np.array([ [1, 0, -v[i]*(math.sin(sen_pos_theta[i]))*dt],
[0, 1, v[i]*(math.cos(sen_pos_theta[i]))*dt],
[0, 0, 1] ])
# Input Transition Matrix - B
B_t = np.array([ [dt * (math.cos(sen_pos_theta[i])), 0],
[dt * (math.sin(sen_pos_theta[i])), 0],
[0, dt] ])
# Input to the system - v and w ( velocity and turning rate )
U_t = np.array([ [v[i]],
[w[i]] ])
# Condition : Variable Q_t and R_t
# Prediction Step
x_t, P_t = Prediction(x_t, P_t, F_t, B_t, U_t, G_t, Q_t)
dyn_x.append(x_t[0])
dyn_y.append(x_t[1])
dyn_z.append(x_t[2])
# *********************************
# Dynamic Q and R : Threshold
# *********************************
if i > 1 and i < 2113:
if (abs(dyn_x[i] - kal_x[i-1]) > threshold_x) or \
(abs(dyn_y[i] - kal_y[i-1]) > threshold_y) or \
(abs(dyn_z[i] - kal_theta[i-1])) > threshold_z:
# Believe in the sensor more :: Q > R
R_t = (1/change_r) * np.identity(2)
print("*********")
check_dhruv = check_dhruv + 1
else:
# Believe in the process more :: R > Q
# Q_t = 10 * np.identity(3)
# # Measurement Covariance
R_t = (change_r) * np.identity(2)
# Reshaping the measurement data
Z_t = z[i].transpose()
Z_t = Z_t.reshape(Z_t.shape[0], -1)
# Update Step
x_t, P_t = Update(x_t, P_t, Z_t, R_t, H_t)
kal_x.append(x_t[0])
kal_y.append(x_t[1])
kal_theta.append(x_t[2])
print('\n')
print('*'*80)
print('\n'," Final Filter State Matrix : \n", x_t,'\n')
# For Plotting Purposes
kal_x = np.concatenate(kal_x).ravel()
kal_y = np.concatenate(kal_y).ravel()
kal_theta = np.concatenate(kal_theta).ravel()
# x = np.linspace(0, 71, 2113)
dyn_x = np.concatenate(dyn_x).ravel()
dyn_y = np.concatenate(dyn_y).ravel()
dyn_z = np.concatenate(dyn_z).ravel()
plt.figure(1)
plt.title('Estimated (Kalman) Pos X vs True Pos X', fontweight='bold')
plt.plot(time,kal_x[:],'g--')
plt.plot(time,true_x, linewidth=3)
plt.figure(2)
plt.title('Estimated (Kalman) Pos Y vs True Pos Y', fontweight='bold')
plt.plot(time,kal_y[:],'g--')
plt.plot(time,true_y, linewidth=3)
plt.figure(3)
plt.title('Estimated (Kalman) Theta vs True Theta', fontweight='bold')
plt.plot(time,kal_theta[:],'g--')
plt.plot(time,true_theta, linewidth=2)
plt.figure(4)
plt.title('Robot Position : Kalman vs True', fontweight='bold')
plt.plot(kal_x,kal_y,'g--')
plt.plot(true_x,true_y, linewidth=3)
plt.figure(5)
# plt.plot(time[1500:2213], dyn_x[1500:2213], linewidth=2, label="After Process")
plt.plot(time[0:400], kal_x[0:400], linewidth=2, label="Kalman")
plt.plot(time[0:400], true_x[0:400], linewidth=2, label="Ground Truth")
# plt.plot(time[1500:2213],sen_pos_x[1500:2213], linewidth=2, label="Sensor Input")
plt.legend(loc='best')
# **********************************
# Curve - Fitting
# **********************************
# import statsmodels.api as sm
from sklearn import linear_model
from scipy.optimize import curve_fit
start = 0
limit = 600
X = time[start:limit].reshape(-1,1)
y = sen_pos_x[start:limit].reshape(-1,1)
True_XX = true_x[start:limit]
# ***************************
# model = sm.OLS(y, X).fit()
# predictions = model.predict(X)
# model.summary()
##########################
lm = linear_model.LinearRegression()
model = lm.fit(X,y)
predictions = lm.predict(X)
score = lm.score(X, y)
print("\n\n score : ", score)
##########################
plt.figure(6)
plt.plot(X, predictions, 'r-', label='Linear Fit')
plt.plot(X, y, label="Sensor Input")
plt.plot(X, True_XX, label="Ground Truth")
plt.legend(loc="best")
# [0:1000]
plt.show()
# Comparing True Data and Kalman (Estimated) Data for position to see how close they are Statistically
std_k_x = np.std(kal_x)
std_true_x = np.std(true_x)
print(' Standard Deviation Kalman : ', std_k_x)
print(' Standard Deviation True : ', std_true_x)
mean_k_x = np.mean(kal_x)
mean_true_x = np.mean(true_x)
print(' Mean Kalman : ', mean_k_x)
print(' Mean True : ', mean_true_x, '\n')
std_k_y = np.std(kal_y)
std_true_y = np.std(true_y)
print(' Standard Deviation Kalman : ', std_k_y)
print(' Standard Deviation True : ', std_true_y)
mean_k_y = np.mean(kal_y)
mean_true_y = np.mean(true_y)
print(' Mean Kalman : ', mean_k_y)
print(' Mean True : ', mean_true_y, '\n')
std_k_theta = np.std(kal_theta)
std_true_theta = np.std(true_theta)
print(' Standard Deviation Kalman : ', std_k_theta)
print(' Standard Deviation True : ', std_true_theta)
mean_k_theta = np.mean(kal_theta)
mean_true_theta = np.mean(true_theta)
print(' Mean Kalman : ', mean_k_theta)
print(' Mean True : ', mean_true_theta, '\n')
print(check_dhruv)
|
StarcoderdataPython
|
1966507
|
<reponame>vladertel/tg_dj<gh_stars>1-10
import os
import json
import time
import tornado.ioloop
import tornado.web
import tornado.websocket
import asyncio
import logging
from prometheus_client import Gauge
from core.AbstractComponent import AbstractComponent
from core.models import Song
# noinspection PyAbstractClass,PyAttributeOutsideInit
class WebSocketHandler(tornado.websocket.WebSocketHandler):
def initialize(self, **kwargs):
self.server = kwargs.get("server")
loop = asyncio.get_event_loop()
loop.create_task(self.keep_alive())
self.active = True
def check_origin(self, _origin):
return True
def open(self):
self.server.logger.info('Websocket connected: %s', str(self.request.connection.context.address))
self.server.ws_clients.append(self)
track_dict, progress = self.server.get_current_state()
self.send("update", track_dict)
self.send("progress", progress)
def on_close(self):
self.server.logger.info('Websocket disconnected: %s', str(self.request.connection.context.address))
self.active = False
try:
self.server.ws_clients.remove(self)
except ValueError:
pass
async def keep_alive(self):
while self.active:
await asyncio.sleep(self.server.KEEP_ALIVE_INTERVAL)
self.send("keep_alive", {})
def send(self, msg, data):
line = json.dumps({msg: data})
try:
self.write_message(line)
except tornado.websocket.WebSocketClosedError:
self.server.logger.info('Lost websocket %s', str(self.request.connection.context.address))
self.on_close()
# noinspection PyAbstractClass,PyAttributeOutsideInit
class MainHandler(tornado.web.RequestHandler):
def initialize(self, **kwargs):
self.server: StatusWebServer = kwargs.get("server")
def get(self):
song, progress = self.server.get_current_state()
self.render(os.path.join(os.path.dirname(__file__), "index.html"), song_info=song, song_progress=progress,
stream_url=self.server.stream_url + '?ts=' + str(time.time()), ws_url=self.server.ws_url,
telegram_bot_name=self.server.telegram_bot_name)
class StatusWebServer(AbstractComponent):
def __init__(self, config):
self.config = config
self.logger = logging.getLogger("tg_dj.web")
self.logger.setLevel(getattr(logging, self.config.get("web_server", "verbosity", fallback="warning").upper()))
settings = {
"static_path": os.path.join(os.path.dirname(__file__), "static"),
"debug": False,
}
app = tornado.web.Application([
(r"/", MainHandler, dict(server=self)),
(r'/ws', WebSocketHandler, dict(server=self)),
], **settings)
app.listen(
port=self.config.getint("web_server", "listen_port", fallback=8080),
address=self.config.get("web_server", "listen_addr", fallback="127.0.0.1"),
)
self.core = None
self.ws_clients = []
self.KEEP_ALIVE_INTERVAL = 60
# noinspection PyArgumentList
self.mon_web_ws_clients = Gauge('dj_web_ws_clients', 'Number of websocket connections')
self.mon_web_ws_clients.set_function(lambda: len(self.ws_clients))
self.stream_url = self.config.get("web_server", "stream_url", fallback="/stream")
self.ws_url = self.config.get("web_server", "ws_url", fallback="auto")
self.telegram_bot_name = self.config.get("web_server", "telegram_bot_name", fallback="inbicst_dj_bot")
def get_name(self) -> str:
return "StatusWebServer"
def bind_core(self, core):
self.core = core
self.core.add_state_update_callback(self.update_state)
def get_current_state(self):
track = self.core.get_current_song()
track_dict = Song.to_dict(track)
progress = self.core.get_song_progress()
return track_dict, progress
def update_state(self, track: Song):
if track is not None:
self.broadcast_update(track.to_dict())
else:
self.broadcast_stop()
def broadcast_update(self, data):
for c in self.ws_clients:
c.send("update", data)
def broadcast_stop(self):
for c in self.ws_clients:
c.send("stop_playback", {})
|
StarcoderdataPython
|
9689313
|
import pytest
from slippinj.emr.job_flow.configuration import JobFlowConfigurationParser
class TestJobFlowConfigurationParser(object):
def setup_method(self, method):
self.__job_flow_configuration = JobFlowConfigurationParser()
def teardown_method(self, method):
self.__job_flow_configuration = None
def test_valid_configuration(self):
configuration = {
'name': 'test',
'release_label': 'test',
'availability_zone': 'test',
'instances': {
'ec2_key_name': 'test',
'master': {},
'core': {}
},
'tags': []
}
assert True == self.__job_flow_configuration.validate(configuration)
def test_invalid_configuration_in_parent_keys(self):
configuration = {
'name': 'test',
'availability_zone': 'test',
'instances': {
'ec2_key_name': 'test',
'master': {},
'core': {}
},
'tags': []
}
with pytest.raises(AttributeError) as e:
self.__job_flow_configuration.validate(configuration)
assert 'release_label not found in configuration file' in str(e.value)
def test_invalid_configuration_in_children_keys(self):
configuration = {
'name': 'test',
'release_label': 'test',
'availability_zone': 'test',
'instances': {
'ec2_key_name': 'test',
'core': {}
},
'tags': []
}
with pytest.raises(AttributeError) as e:
self.__job_flow_configuration.validate(configuration)
assert 'instances.master not found in configuration file' in str(e.value)
|
StarcoderdataPython
|
8159278
|
<gh_stars>1-10
import scs
from concurrent.futures import ThreadPoolExecutor
import time
"""
Set up a list of several SCS problems and map `scs.solve` over that list.
Compare the compute time of Python's serial `map` with a multithreaded map
from concurrent.futures. Also test times with and without verbose printing.
"""
workers = 2 # size of the threadpool
num_problems = 4
m = 1000 # size of L1 problem
repeat = 2 # number of times to repeat timing
ex = ThreadPoolExecutor(max_workers=workers)
data = [scs.examples.l1(m, seed=i) for i in range(num_problems)]
def time_scs(mapper, **kwargs):
""" Map `scs.solve` over the global `data` and return timing results
Maps with `mapper`, which may be a parallel map, such as
`concurrent.futures.ThreadPoolExecutor.map`
Pass `kwargs` onto `scs.solve` to, for example, toggle verbose output
"""
ts = []
for _ in range(repeat):
start = time.time()
# `mapper` will usually return a generator instantly
# need to consume the entire generator to find the actual compute time
# calling `list` consumes the generator. an empty `for` loop would also work
a = list(mapper(lambda x: scs.solve(*x, **kwargs), data))
end = time.time()
ts.append(end-start)
return min(ts)
serial = time_scs(map)
serialnotverbose = time_scs(map, verbose=False)
parallel = time_scs(ex.map)
parnotverbose = time_scs(ex.map, verbose=False)
print('Serial solve time: %f'%serial)
print('Serial (not verbose) solve time: %f'%serialnotverbose)
print('Parallel solve time: %f'%parallel)
print('Parallel (not verbose) solve time: %f'%parnotverbose)
|
StarcoderdataPython
|
4926993
|
<reponame>EVAyo/BaiduPCS-Py
import os
CPU_NUM = os.cpu_count() or 1
# Defines that should never be changed
OneK = 1024
OneM = OneK * OneK
OneG = OneM * OneK
OneT = OneG * OneK
OneP = OneT * OneK
OneE = OneP * OneK
|
StarcoderdataPython
|
9618190
|
<reponame>gauthamzz/vimana
import sys
import os
stderr = sys.stderr
sys.stderr = open(os.devnull, 'w')
import logging
import numpy as np
import keras
sys.stderr = stderr
from keras.models import load_model
from keras import backend as K
import tensorflow as tf
from PIL import Image
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
logger = logging.getLogger(__name__)
# Define the location where the model file is
DEFAULT_MODEL_LOCATION = "model.h5"
class KerasModel(object):
def __init__(self):
""" Keras Model should be a wrapper for all machine learning models
using keras library for deep learning.
This connects with vimana, keras model is loaded from the
DEFAULT_MODEL_LOCATION
"""
# clear keras session to prevent memory error
K.clear_session()
# load the model
self.use_model(DEFAULT_MODEL_LOCATION)
def use_model(self, model):
"""
uses the model in the model location
:param model: location or name of the model, both are same.
"""
script_dir = os.path.dirname(__file__)
rel_path = "models/" + model
abs_file_path = os.path.join(script_dir, rel_path)
try:
self.current_model = model
self.model = load_model(abs_file_path)
# https://github.com/keras-team/keras/issues/2397
self.model._make_predict_function()
self.graph = tf.get_default_graph()
logger.info('Model loaded succesfully (%s)', self.model.summary())
except Exception as e:
logger.warning('Invalid model (%s): %s', type(e).__name__, e)
def get_model_output(self, input_value, model):
"""Input value is a numpy array in the form of a list and it is
to be converted to numpy array in the dimensions of the image
returns output as an integer
Only for classification, hence assuming outputs to be only integers
"""
input_value = np.asarray(input_value)
logger.debug("Input recived by model of shape")
logger.debug(input_value.shape)
if model != self.current_model:
# update the model if its not the current one in use
self.use_model(model)
try:
with self.graph.as_default():
output = self.model.predict(input_value)
except Exception as e:
logger.warning('Invalid Output (%s): %s', type(e).__name__, e)
logger.debug("Predicted output : %i", output[0].argmax(axis=0))
# output[0].argmax(axis=0) returns the value of output
# as an integer
return output[0].argmax(axis=0)
|
StarcoderdataPython
|
9744980
|
<filename>soco_spotify_plugin/soco_spotify.py
#!/usr/bin/env python3
# coding: utf-8
"""Spotify plugin for Sonos."""
import re
from xml.sax.saxutils import escape
from soco.plugins import SoCoPlugin
from soco.music_services import MusicService
from soco.music_services.accounts import Account
from soco.data_structures import (DidlResource, DidlAudioItem, DidlAlbum,
to_didl_string, DidlPlaylistContainer)
from soco.data_structures_entry import from_didl_string
from soco.exceptions import MusicServiceException
from spotipy import *
from spotipy.oauth2 import SpotifyOAuth
spotify_services = {
"global": 2311,
"us": 3079,
}
spotify_sonos = {
"album": {
"prefix": "x-rincon-cpcontainer:1004206c",
"key": "1004206c",
"class": "object.container.album.musicAlbum",
},
"track": {
"prefix": "",
"key": "00032020",
"class": "object.item.audioItem.musicTrack",
},
"playlist": {
"prefix": "x-rincon-cpcontainer:1006206c",
"key": "1006206c",
"class": "object.container.playlistContainer",
},
}
class SpotifySocoPlugin(SoCoPlugin):
def __init__(self, soco, spotify_service, spotify_scope,
spotify_client_id, spotify_client_secret, spotify_redirect_uri):
super(SpotifySocoPlugin, self).__init__(soco)
if spotify_service not in spotify_services:
raise MusicServiceException("Unknown spotify service: '%s'" % spotify_service)
self.spotify_service = spotify_services[spotify_service]
self.sp = Spotify(auth_manager=SpotifyOAuth(client_id=spotify_client_id,
client_secret=spotify_client_secret,
redirect_uri=spotify_redirect_uri,
scope=spotify_scope))
@property
def name(self):
return 'Spotify'
def get_uri(self, spotify_title, spotify_id):
"""Get URI for spotify."""
match = re.search(r"spotify.*[:/](album|track|playlist)[:/](\w+)", spotify_id)
if not match:
return False
spotify_type = match.group(1)
encoded_uri = "spotify%3a" + match.group(1) + "%3a" + match.group(2)
enqueue_uri = spotify_sonos[spotify_type]["prefix"] + encoded_uri
metadata_template = ('<DIDL-Lite xmlns:dc="http://purl.org/dc/elements'
'/1.1/" xmlns:upnp="urn:schemas-upnp-org:metadata'
'-1-0/upnp/" xmlns:r="urn:schemas-rinconnetworks-'
'com:metadata-1-0/" xmlns="urn:schemas-upnp-org:m'
'etadata-1-0/DIDL-Lite/"><item id="{item_id}" par'
'entID="R:0/0" restricted="true"><dc:title>{item_'
'title}</dc:title><upnp:class>{item_class}</upnp:'
'class><desc id="cdudn" nameSpace="urn:schemas-ri'
'nconnetworks-com:metadata-1-0/">SA_RINCON{sn}_X_'
'#Svc{sn}-0-Token</desc></item></DIDL-Lite>')
metadata = metadata_template.format(
item_title=escape(spotify_title),
item_id=spotify_sonos[spotify_type]["key"] + encoded_uri,
item_class=spotify_sonos[spotify_type]["class"],
sn=self.spotify_service,
)
return [
("InstanceID", 0),
("EnqueuedURI", enqueue_uri),
("EnqueuedURIMetaData", metadata),
("DesiredFirstTrackNumberEnqueued", 0),
("EnqueueAsNext", 0),
]
|
StarcoderdataPython
|
9679578
|
<gh_stars>0
import os
import sys
from os.path import dirname
from pathlib import Path
from typing import Optional, List
def find_qemu(
engine: str,
script_paths: Optional[List[str]] = None,
search_paths: Optional[List[str]] = None
) -> Optional[Path]:
def find_executable(base_path: Path) -> Optional[Path]:
exts = os.environ.get('PATHEXT', '').split(os.path.pathsep)
for e in exts:
path = base_path / f'{engine}{e}'
if os.path.exists(path):
return path
return None
if script_paths is None:
script_paths = [__file__]
if search_paths is None:
search_paths = []
paths_to_check = []
if 'QEMU_DEV' in os.environ:
return Path(os.environ['QEMU_DEV'])
if 'QEMU_DIR' in os.environ:
paths_to_check.append(os.environ['QEMU_DIR'].rstrip('/').rstrip('\\'))
paths_to_check += search_paths
for script_path in script_paths:
look_at = dirname(script_path)
while True:
paths_to_check.append(look_at)
paths_to_check.append(look_at + '/qemu')
look_at_next = dirname(look_at)
if look_at_next == look_at:
break
look_at = look_at_next
paths_to_check.extend(os.environ.get('PATH', '').split(os.pathsep))
for p in paths_to_check:
found = find_executable(Path(p))
if found is not None:
return found
return Path(engine)
|
StarcoderdataPython
|
3546002
|
from enum import Enum
# u'\U00000000'
class Emoji(Enum):
digit_zero = u'\U00000030\U000020E3'
digit_one = u'\U00000031\U000020E3'
digit_two = u'\U00000032\U000020E3'
digit_three = u'\U00000033\U000020E3'
digit_four = u'\U00000034\U000020E3'
digit_five = u'\U00000035\U000020E3'
digit_six = u'\U00000036\U000020E3'
digit_seven = u'\U00000037\U000020E3'
digit_eight = u'\U00000038\U000020E3'
digit_nine = u'\U00000039\U000020E3'
emoji_digits = [Emoji.digit_zero.value,
Emoji.digit_one.value,
Emoji.digit_two.value,
Emoji.digit_three.value,
Emoji.digit_four.value,
Emoji.digit_five.value,
Emoji.digit_six.value,
Emoji.digit_seven.value,
Emoji.digit_eight.value,
Emoji.digit_nine.value]
def parse_number_to_emoji(number):
number_str = str(number)
emoji_str = ''
for digit in number_str:
emoji_str += emoji_digits[int(digit)]
return emoji_str
|
StarcoderdataPython
|
3485068
|
# Copyright 2019 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ellipticcurve.ecdsa import Ecdsa
from ellipticcurve.privateKey import PrivateKey
import signature
import base64
class signAlgorithm(object):
def loadKey(self, key_str):
self.privateKey = PrivateKey.fromPem(key_str)
def getPublicKey(self):
return self.privateKey.publicKey()
def getPublicKeySerialized(self):
return self.privateKey.publicKey().toPem()
def sign_message(self, hash_t):
# Bytearray to base64
hash_b_arr = bytearray(list(hash_t))
hash_b64 = base64.b64encode(hash_b_arr)
hash_b64_str = str(hash_b64, 'utf-8')
signed = Ecdsa.sign(hash_b64_str, self.privateKey)
return signed
def verify_signature(self, hash_b64_str, decoded_signature,
verify_key):
return Ecdsa.verify(hash_b64_str, decoded_signature, verify_key)
|
StarcoderdataPython
|
3484883
|
import random
FLOAT_TO_INT_MULTIPLIER = 2000000000
def generate_python_random_seed():
"""Generate a random integer suitable for seeding the Python random generator
"""
return int(random.uniform(0, 1.0) * FLOAT_TO_INT_MULTIPLIER)
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.