content
stringlengths 5
1.05M
|
---|
"""
This module is for alternative 3D rotation formalisms besides the Quaternion, Matrix, and Euler representations provided
by the `mathutils` library. They must implement the ``to_quaternion`` method, which returns a `mathutils.Quaternion`
instance, in order to be compatible with the rest of this library. A ``from_other`` classmethod may also be useful,
in order to convert from a mathutils representation to the alternative representation.
"""
import numpy as np
from mathutils import Quaternion, Vector
from .utils import to_quat
class Spherical:
"""An alternative 3-value representation of a rotation based on spherical coordinates.
Imagine a unit sphere centered about an object. Two spherical coordinates (an azimuthal angle, henceforth theta, and
a polar angle, henceforth phi) define a point on the surface of the sphere, and a corresponding unit vector from the
center of the sphere (the object) to the point on the surface of the sphere.
First, the +Z axis of the object is rotated to this unit vector, while the XY plane of the object is aligned such
that the +X axis points in the +phi direction and the +Y axis points in the +theta direction. It may be helpful to
visualize this rotation as such: imagine that the +Z axis of the object is a metal rod attached rigidly to the
object, extending out through the surface of the sphere. Now grab the rod and use it to rotate the object such that
the rod is passing through the point on the sphere defined by theta and phi. Finally, twist the rod such that the
original "right" direction of the object (its +X axis) is pointing towards the south pole of the sphere, along the
longitude line defined by theta. Correspondingly, this should mean that the original "up" direction of the object
(its +Y axis) is pointing eastward along the latitude line defined by phi.
Next, perform a right-hand rotation of the object about the same unit vector by a third angle (henceforth called the
roll angle). In the previous analogy, this is equivalent to then twisting the metal rod counter-clockwise by the
roll angle. This configuration is the final result of the rotation.
Note: the particular alignment of the XY plane (+X is +phi and +Y is +theta) was chosen so that "zero rotation"
(aka the identity quaternion, or (0, 0, 0) Euler angles) corresponds to (theta, phi, roll) = (0, 0, 0).
Also note that this representation only uses 3 values, and thus it has singularities at the poles where theta and
the roll angle are redundant (only their sum matters).
Attributes:
* theta: The azimuthal angle, in radians
* phi: The polar angle, in radians (0 at the north pole, pi at the south pole)
* roll: The roll angle, in radians
"""
def __init__(self, theta, phi, roll):
"""
Initializes a spherical rotation object.
:param theta: The azimuthal angle, in radians
:param phi: The polar angle, in radians (0 at the north pole, pi at the south pole)
:param roll: The roll angle, in radians
"""
self.theta = theta % (2 * np.pi)
self.phi = phi % (2 * np.pi)
self.roll = roll % (2 * np.pi)
@classmethod
def from_other(cls, obj):
"""Constructs a Spherical object from a Quaternion, Euler, or Matrix rotation object from the mathutils library.
"""
if type(obj) is cls:
return obj
obj = to_quat(obj)
# first, rotate the +Z unit vector by the object to replicate the effect of rot_quat without roll_quat
z_axis = Vector((0, 0, 1))
z_axis.rotate(obj)
# calculate the inverse of rot_quat, which is the rotation that moves the new position of the unit vector to
# its original position on the +Z axis
inv_rot_quat = Quaternion(z_axis.cross(Vector((0, 0, 1))), z_axis.angle(Vector((0, 0, 1))))
# extract roll_quat by left-multiplying by the inverse of rot_quat
roll_quat = inv_rot_quat @ obj
# calculate theta and phi from the new position of the unit vector, as well as roll directly from roll_quat
theta = np.arctan2(z_axis.y, z_axis.x)
phi = np.arccos(np.clip(z_axis.z, -1, 1))
roll = roll_quat.to_euler().z - theta
return cls(theta, phi, roll)
def to_quaternion(self):
"""Returns a `mathutils.Quaternion` representation of the rotation."""
# first, rotate about the +Z axis by the roll angle plus theta to align the +X axis with +phi and the +Y axis
# with +theta
roll_quat = Quaternion((0, 0, 1), self.roll + self.theta)
# then, rotate the +Z axis to the unit vector represented by theta and phi by rotating by phi about a vector
# tangent to the sphere pointing in the +theta direction
theta_tangent = (-np.sin(self.theta), np.cos(self.theta), 0)
rot_quat = Quaternion(theta_tangent, self.phi)
# compose the two rotations and return
return rot_quat @ roll_quat
def __eq__(self, other):
return self.theta == other.theta and self.phi == other.phi and self.roll == other.roll
def __repr__(self):
return f"<Spherical (theta={self.theta}, phi={self.phi}, roll={self.roll})>"
|
import pdb
a = "aaa"
pdb.set_trace()
b = "bbb"
c = "ccc"
final = a + b + c
print(final) |
import datetime
import sqlalchemy as sa
import sqlalchemy.orm as orm
from pypi.data.modelbase import SqlAlchemyBase
from pypi.data.releases import Release
class Package(SqlAlchemyBase):
__tablename__ = 'packages'
id = sa.Column(sa.String, primary_key=True)
created_date = sa.Column(sa.DateTime, default=datetime.datetime.now)
summary = sa.Column(sa.String, nullable=True)
description = sa.Column(sa.String, nullable=True)
home_page = sa.Column(sa.String, nullable=True)
docs_url = sa.Column(sa.String, nullable=True)
package_url = sa.Column(sa.String, nullable=True)
author_name = sa.Column(sa.String, nullable=True)
author_email = sa.Column(sa.String, nullable=True, index=True)
license = sa.Column(sa.String, index=True)
releases = orm.relation("Release", order_by=[
Release.major_ver.desc(),
Release.minor_ver.desc(),
Release.build_ver.desc(),
], back_populates='package')
def __repr__(self):
return '<Package {}>'.format(self.id)
|
# -*- coding: utf-8 -*-
import glob
import json
import os
import re
from jsonschema import validate, ValidationError
# 禁止ファイル
prohibited_file_names = ["data.json", "schema.json", "misc.json", "static.json", "private.json", "include.json", "sitepolicy.json"]
# ファイル名パターン
file_name_pattern = "^[0-9a-zA-Z\-_]+\.json$"
file_name_p = re.compile(file_name_pattern)
files = glob.glob("./*.json")
success = True
for file in files:
print("============================")
file_name = os.path.basename(file)
print(file_name)
file_success = True
# ファイル名が、禁止ファイル名になっているか?
for prohibited_file_name in prohibited_file_names:
if prohibited_file_name == file_name:
print(u"ファイル名が禁止ファイル名になっています。禁止ファイル名:" + ', '.join(prohibited_file_names))
success = False
file_success = False
if not(file_success):
continue
# ファイル名が、パターン通りか?
match = file_name_p.match(file_name)
if match is None:
print(u"ファイル名パターンに一致しません。パターン:" + file_name_pattern)
success = False
file_success = False
if not(file_success):
continue
# ファイルを読み込んで確認する
with open(file) as f:
df = json.load(f)
schema_file_path = df["$schema"]
with open(schema_file_path) as sf:
# スキーマチェック
schema = json.load(sf)
try:
validate(df, schema)
except ValidationError as e:
print(e)
success = False
file_success = False
if not(file_success):
continue
# 重複チェック
menu_name_list = []
for menu in df["menus"]:
menu_name_list.append(menu["name"])
dup = [x for x in set(menu_name_list) if menu_name_list.count(x) > 1]
if len(dup) >= 1:
# 重複あり
print(u"メニューに重複があります:" + ', '.join(dup))
success = False
file_success = False
if not(file_success):
continue
# 終了処理
print("============================")
if not(success):
exit(1)
else:
exit(0) |
import time
import datetime
import schedule
import requests
from Utils.health_report_api import health_report
# 自定义参数(请填写)
USERNAME = '' # 统一身份认证账号
PASSWORD = '' # 统一身份认证密码
N = 1 # 你要打卡的天数,1为只打今天,2为打昨天和今天.....以此类推
webhook = ''
dingding_patern = "【每日打卡信息:】"
def job():
a = health_report(USERNAME, PASSWORD, N)
# 把消息传给钉钉
dingding_report(a, webhook)
def get_current_time():
timestamp = time.time()
time_local = time.localtime(timestamp)
# 转换成新的时间格式(2016-05-05 20:28:54)
time_local = time.strftime("%Y-%m-%d %H:%M:%S", time_local)
return time_local
def dingding_report(message, webhook):
time_local = get_current_time()
data = {
"msgtype": "text",
"text": {
"content": dingding_patern + ' ' + message + '\n' + time_local
}
}
requests.post(webhook, json=data)
return
# schedule.every().day.at("08:00").do(job)
if __name__ == '__main__':
schedule.every().day.at("07:52").do(job)
while True:
schedule.run_pending()
time.sleep(60)
|
# -*- coding: utf-8 -*
from __future__ import annotations
from Core import Console
from Core import converter as Converter
from colorama import Fore
from typing import cast
class UserConfig:
def __init__(self, version: str, token: str, prefix: str, debug: bool, timestamp: bool, cache: bool, cache_time_delta: int):
self.version = version
self.token = token
self.prefix = prefix
self.debug = debug
self.timestamp = timestamp
self.cache = cache
self.cache_time_delta = cache_time_delta
def save(self):
'Saves this user config'
Converter.obj_to_json_file(self, "usr.cfg")
def update(self, invar: dict):
'Updates the user config with args'
if invar is None:
Exception("invar is None")
invar["version"] = None
self.__dict__.update((k, v) for k, v in invar.items() if v is not None)
@staticmethod
def load() -> UserConfig:
'Loads user config and returns it as UserConfig class'
try:
return cast(UserConfig, Converter.json_file_to_obj("usr.cfg"))
except Exception as e:
Console.error("UserConfig (load)", f"Load Error: {e}")
return UserConfig.creation_ui("corrupted")
@staticmethod
def creation_ui(txt="missing") -> UserConfig:
'UI for creating a usr.cfg file. Returns the newly created UserConfig class.'
Console.warning("UserConfig", f"'usr.cfg' file is {txt}!")
if(not Console.yes_or_no("Do you wish to create it (Y/n)? ", Fore.LIGHTYELLOW_EX)):
raise Exception("'usr.cfg' file is missing! The application cannot be started!")
while True:
# Console.clear()
Console.printc("Fields marked with '*' are essential!\n" +
"Other fields can be left empty.", Fore.LIGHTRED_EX)
token = ""
while True:
token = input("Token (*): ")
if token == "":
Console.printc("Token is not optional! Please enter your token.", Fore.LIGHTRED_EX)
else:
break
i_prefix = input("Prefix (default: '--'): ")
i_debug = input("Debug (True/False, default: False): ")
i_timestamp = input("Timestamp for console logs (True/False, default: False): ")
i_cache = input("Caching (True/False, default: True): ")
i_cache_td = input("Caching time delta (default: 1800 sec): ")
# var validation
prefix = i_prefix if i_prefix != "" else "--"
debug = Converter.str2bool(i_debug)
timestamp = Converter.str2bool(i_timestamp)
cache = Converter.str2bool(i_cache) if i_cache != "" else True
cache_td = int(i_cache_td) if i_cache_td != "" else 1800
print("-------------------------------------------------------------------\n" +
"Check if all values are correct!\n" +
f"token={token}\n" +
f"prefix={prefix}\n" +
f"debug={debug}\n" +
f"timestamp={timestamp}\n" +
f"caching={cache}\n" +
f"cache_time_delta={cache_td}\n" +
"-------------------------------------------------------------------\n")
if Console.yes_or_no("Save and continue (Y/n)? "):
usr_conf = UserConfig("v2", token, prefix, debug, timestamp, cache, cache_td)
usr_conf.save()
print("DONE!")
return usr_conf
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import pytest
from packaging.version import Version
from pants.backend.plugin_development import pants_requirements
from pants.backend.plugin_development.pants_requirements import (
GenerateFromPantsRequirementsRequest,
PantsRequirementsTargetGenerator,
determine_version,
)
from pants.backend.python.pip_requirement import PipRequirement
from pants.backend.python.target_types import PythonRequirementModulesField, PythonRequirementsField
from pants.engine.addresses import Address
from pants.engine.target import GeneratedTargets
from pants.testutil.rule_runner import QueryRule, RuleRunner
@pytest.mark.parametrize(
"pants_version,expected",
(
("2.4.0.dev1", "==2.4.0.dev1"),
("2.4.0rc1", ">=2.4.0rc0,<2.5"),
("2.4.0", ">=2.4.0rc0,<2.5"),
),
)
def test_determine_version(monkeypatch, pants_version: str, expected: str) -> None:
monkeypatch.setattr(pants_requirements, "PANTS_SEMVER", Version(pants_version))
assert determine_version() == expected
def test_target_generator() -> None:
rule_runner = RuleRunner(
rules=(
*pants_requirements.rules(),
QueryRule(GeneratedTargets, [GenerateFromPantsRequirementsRequest]),
),
target_types=[PantsRequirementsTargetGenerator],
)
rule_runner.write_files(
{
"BUILD": (
"pants_requirements(name='default')\n"
"pants_requirements(name='no_testutil', testutil=False)\n"
)
}
)
generator = rule_runner.get_target(Address("", target_name="default"))
result = rule_runner.request(
GeneratedTargets, [GenerateFromPantsRequirementsRequest(generator)]
)
assert len(result) == 2
pants_req = next(t for t in result.values() if t.address.generated_name == "pantsbuild.pants")
testutil_req = next(
t for t in result.values() if t.address.generated_name == "pantsbuild.pants.testutil"
)
assert pants_req[PythonRequirementModulesField].value == ("pants",)
assert testutil_req[PythonRequirementModulesField].value == ("pants.testutil",)
assert pants_req[PythonRequirementsField].value == (
PipRequirement.parse(f"pantsbuild.pants{determine_version()}"),
)
assert testutil_req[PythonRequirementsField].value == (
PipRequirement.parse(f"pantsbuild.pants.testutil{determine_version()}"),
)
generator = rule_runner.get_target(Address("", target_name="no_testutil"))
result = rule_runner.request(
GeneratedTargets, [GenerateFromPantsRequirementsRequest(generator)]
)
assert len(result) == 1
assert next(iter(result.keys())).generated_name == "pantsbuild.pants"
|
# GPLv3 License
#
# Copyright (C) 2020 Ubisoft
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
This module defines how we handle Objects identification from the server to clients.
This identification is done with a path, which is a string of the form A/B/C where C is child of B and B is child of A.
Each A, B and C and the name_full property of Objects. So the path encode both parenting and ids.
The issues with this strategy are:
- Parenting information is redundant accross Object: if we have A/B/C and A/B/D, the information of B being child of A
is expressed 2 times. This lead to messages that are bigger than necessary.
- '/' cannot be put in the name of an Object. The issue is that Blender allow this, and we have no qcheck.
We plan to change the strategy to store the name_full of the parent in the command instead.
"""
import logging
from mixer.share_data import share_data
import bpy
logger = logging.getLogger(__name__)
def get_or_create_path(path, data=None) -> bpy.types.Object:
index = path.rfind("/")
if index != -1:
share_data.pending_parenting.add(path) # Parenting is resolved after consumption of all messages
# Create or get object
elem = path[index + 1 :]
ob = share_data.blender_objects.get(elem)
if not ob:
logger.info(f"get_or_create_path: creating bpy.data.objects[{elem}] for path {path}")
ob = bpy.data.objects.new(elem, data)
share_data._blender_objects[ob.name_full] = ob
return ob
def get_or_create_object_data(path, data):
return get_or_create_path(path, data)
def get_object_path(obj):
path = obj.name_full
while obj.parent:
obj = obj.parent
if obj:
path = obj.name_full + "/" + path
return path
|
"""
Distance and Similarity Measures
Different measures of distance or similarity for different types of analysis.
"""
|
"""Test module for stack.py"""
import unittest
from random import randrange
from stack import Stack
class TestStack(unittest.TestCase):
def setUp(self):
self.stack = Stack()
def test_push(self):
self.stack.push(randrange(10))
self.assertEqual(len(self.stack), 1)
self.stack.push(randrange(10))
self.assertEqual(len(self.stack), 2)
def test_pop(self):
self.assertRaises(IndexError, self.stack.pop)
self.stack.push('a')
self.stack.push('b')
self.stack.push('c')
popped_item = self.stack.pop()
self.assertEqual(popped_item, 'c')
self.assertEqual(len(self.stack), 2)
def test_peek(self):
self.assertRaises(IndexError, self.stack.peek)
self.stack.push('a')
self.stack.push('b')
self.stack.push('c')
peeked_item = self.stack.peek()
self.assertEqual(peeked_item, 'c')
self.assertEqual(len(self.stack), 3)
def test_is_empty(self):
self.assertTrue(self.stack.is_empty())
self.stack.push(randrange(10))
self.assertFalse(self.stack.is_empty())
def test_search(self):
self.stack.push('a')
self.stack.push('b')
self.stack.push('c')
index = self.stack.find('b')
self.assertEqual(index, 1)
index = self.stack.find('c')
self.assertEqual(index, 2)
def test_str(self):
hello = ['h', 'e', 'l', 'l', 'o']
for char in hello:
self.stack.push(char)
self.assertEqual(str(hello), str(self.stack))
hello.append('world')
self.stack.push('world')
self.assertEqual(str(hello), str(self.stack))
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import csv
import sys
fund_symbols = {
'73935A104': 'QQQ'
}
def import_transactions(paths):
rows = []
for p in paths:
with open(p) as f:
reader = csv.reader(f)
x = [[col.strip() for col in row] for row in reader
if len(row) > 10 and len(row[0].split('/')) == 3]
x.reverse()
rows += x
for row in rows:
date = parse_date(row[0])
action = row[1]
symbol = fund_symbols.get(row[2], row[2])
count = parse_amount(row[5])
price = parse_amount(row[6])
fee1 = parse_amount(row[7])
fee2 = parse_amount(row[8])
interest = parse_amount(row[9])
amount = parse_amount(row[10])
if action.startswith('DIVIDEND RECEIVED'):
print(date + '|i|' + symbol + '|' + format_amount(amount))
elif action.startswith('NAME CHANGED'):
# This only happened for QQQ and wasn't very useful
pass
elif action.startswith('TRANSFERRED'):
print(date + '|d|Transfer|' + format_amount(amount))
elif (action.startswith('YOU BOUGHT') or
action.startswith('REINVESTMENT')):
if symbol != 'FDRXX':
# Buying and selling of FDRXX isn't listed
print(date + '|b|' + symbol + '|' + format_amount(count) +
'|' + format_amount(price) + '|' +
format_amount(fee1 + fee2))
elif (action.startswith('YOU SOLD') or
action.startswith('IN LIEU OF FRX SHARE')):
print(date + '|s|' + symbol + '|' + format_amount(abs(count)) +
'|' + format_amount(price) + '|' +
format_amount(fee1 + fee2))
else:
print('#', row)
def parse_amount(amount):
return float(amount) if amount else 0
def parse_date(date):
values = date.split('/')
return '-'.join([values[2], values[0], values[1]])
def format_amount(amount):
return ('%.8f' % amount).rstrip('0').rstrip('.')
if __name__ == '__main__':
import_transactions(sys.argv[1:])
|
import logging
import inspect
import os
log = logging.getLogger( __name__ )
import galaxy.jobs.rules
from .rule_helper import RuleHelper
DYNAMIC_RUNNER_NAME = "dynamic"
DYNAMIC_DESTINATION_ID = "dynamic_legacy_from_url"
class JobMappingException( Exception ):
def __init__( self, failure_message ):
self.failure_message = failure_message
class JobNotReadyException( Exception ):
def __init__( self, job_state=None, message=None ):
self.job_state = job_state
self.message = message
class JobRunnerMapper( object ):
"""
This class is responsible to managing the mapping of jobs
(in the form of job_wrappers) to job runner url strings.
"""
def __init__( self, job_wrapper, url_to_destination, job_config ):
self.job_wrapper = job_wrapper
self.url_to_destination = url_to_destination
self.job_config = job_config
def __get_rule_modules( self ):
unsorted_module_names = self.__get_rule_module_names( )
## Load modules in reverse order to allow hierarchical overrides
## i.e. 000_galaxy_rules.py, 100_site_rules.py, 200_instance_rules.py
module_names = sorted( unsorted_module_names, reverse=True )
modules = []
for rule_module_name in module_names:
try:
module = __import__( rule_module_name )
for comp in rule_module_name.split( "." )[1:]:
module = getattr( module, comp )
modules.append( module )
except BaseException, exception:
exception_str = str( exception )
message = "%s rule module could not be loaded: %s" % ( rule_module_name, exception_str )
log.debug( message )
continue
return modules
def __get_rule_module_names( self ):
rules_dir = galaxy.jobs.rules.__path__[0]
names = []
for fname in os.listdir( rules_dir ):
if not( fname.startswith( "_" ) ) and fname.endswith( ".py" ):
rule_module_name = "galaxy.jobs.rules.%s" % fname[:-len(".py")]
names.append( rule_module_name )
return names
def __invoke_expand_function( self, expand_function ):
function_arg_names = inspect.getargspec( expand_function ).args
app = self.job_wrapper.app
possible_args = { "job_id" : self.job_wrapper.job_id,
"tool" : self.job_wrapper.tool,
"tool_id" : self.job_wrapper.tool.id,
"job_wrapper" : self.job_wrapper,
"rule_helper": RuleHelper( app ),
"app" : app }
actual_args = {}
# Populate needed args
for possible_arg_name in possible_args:
if possible_arg_name in function_arg_names:
actual_args[ possible_arg_name ] = possible_args[ possible_arg_name ]
# Don't hit the DB to load the job object if not needed
if "job" in function_arg_names or "user" in function_arg_names or "user_email" in function_arg_names or "resource_params" in function_arg_names:
job = self.job_wrapper.get_job()
history = job.history
user = job.user
user_email = user and str(user.email)
if "job" in function_arg_names:
actual_args[ "job" ] = job
if "user" in function_arg_names:
actual_args[ "user" ] = user
if "user_email" in function_arg_names:
actual_args[ "user_email" ] = user_email
if "resource_params" in function_arg_names:
# Find the dymically inserted resource parameters and give them
# to rule.
app = self.job_wrapper.app
param_values = job.get_param_values( app, ignore_errors=True )
resource_params = {}
try:
resource_params_raw = param_values[ "__job_resource" ]
if resource_params_raw[ "__job_resource__select" ].lower() in [ "1", "yes", "true" ]:
for key, value in resource_params_raw.iteritems():
#if key.startswith( "__job_resource_param__" ):
# resource_key = key[ len( "__job_resource_param__" ): ]
# resource_params[ resource_key ] = value
resource_params[ key ] = value
except KeyError:
pass
actual_args[ "resource_params" ] = resource_params
return expand_function( **actual_args )
def __convert_url_to_destination( self, url ):
"""
Job runner URLs are deprecated, but dynamic mapper functions may still
be returning them. Runners are expected to be able to convert these to
destinations.
This method calls
JobHandlerQueue.DefaultJobDispatcher.url_to_destination, which in turn
calls the url_to_destination method for the appropriate runner.
"""
dest = self.url_to_destination( url )
dest['id'] = DYNAMIC_DESTINATION_ID
return dest
def __determine_expand_function_name( self, destination ):
# default look for function with name matching an id of tool, unless one specified
expand_function_name = destination.params.get('function', None)
if not expand_function_name:
for tool_id in self.job_wrapper.tool.all_ids:
if self.__last_rule_module_with_function( tool_id ):
expand_function_name = tool_id
break
return expand_function_name
def __get_expand_function( self, expand_function_name ):
matching_rule_module = self.__last_rule_module_with_function( expand_function_name )
if matching_rule_module:
expand_function = getattr( matching_rule_module, expand_function_name )
return expand_function
else:
raise Exception( "Dynamic job runner cannot find function to expand job runner type - %s" % expand_function_name )
def __last_rule_module_with_function( self, function_name ):
# self.rule_modules is sorted in reverse order, so find first
# wiht function
for rule_module in self.__get_rule_modules( ):
if hasattr( rule_module, function_name ):
return rule_module
return None
def __handle_dynamic_job_destination( self, destination ):
expand_type = destination.params.get('type', "python")
if expand_type == "python":
expand_function_name = self.__determine_expand_function_name( destination )
expand_function = self.__get_expand_function( expand_function_name )
job_destination = self.__invoke_expand_function( expand_function )
if not isinstance(job_destination, galaxy.jobs.JobDestination):
job_destination_rep = str(job_destination) # Should be either id or url
if '://' in job_destination_rep:
job_destination = self.__convert_url_to_destination(job_destination_rep)
else:
job_destination = self.job_config.get_destination(job_destination_rep)
return job_destination
else:
raise Exception( "Unhandled dynamic job runner type specified - %s" % expand_type )
def __cache_job_destination( self, params, raw_job_destination=None ):
if raw_job_destination is None:
raw_job_destination = self.job_wrapper.tool.get_job_destination( params )
#raw_job_destination_id_or_tag = self.job_wrapper.tool.get_job_destination_id_or_tag( params )
if raw_job_destination.runner == DYNAMIC_RUNNER_NAME:
job_destination = self.__handle_dynamic_job_destination( raw_job_destination )
else:
job_destination = raw_job_destination
#job_destination_id_or_tag = raw_job_destination_id_or_tag
self.cached_job_destination = job_destination
#self.cached_job_destination_id_or_tag = job_destination_id_or_tag
def get_job_destination( self, params ):
"""
Cache the job_destination to avoid recalculation.
"""
if not hasattr( self, 'cached_job_destination' ):
self.__cache_job_destination( params )
return self.cached_job_destination
def cache_job_destination( self, raw_job_destination ):
self.__cache_job_destination( None, raw_job_destination=raw_job_destination )
return self.cached_job_destination
|
#!/usr/bin/python
import os
import sys
run = os.system
def extract(line):
p1 = line.find(">")
p2 = line.rfind("<")
return line[p1+1:p2].strip()
def sgm2plain(src_plain, trg_sgm):
"Converse sgm format to plain format"
fin_src_plain = file(src_plain , "r")
fout = file(trg_sgm, "w")
#head
for line in fin_src_plain:
if line.startswith('<seg'):
print(extract(line), file=fout)
if __name__ == "__main__" :
if (len(sys.argv) != 3) :
print("exe in_plain out_sgm", file=sys.stderr)
sys.exit(0)
src_plain = sys.argv[1]
trg_sgm = sys.argv[2]
sgm2plain(src_plain, trg_sgm)
|
from datetime import date
from dateutil.relativedelta import relativedelta
START_DATE = date(2018, 11, 1)
MIN_DAYS_TO_COUNT_AS_MONTH = 10
MONTHS_PER_YEAR = 12
def calc_months_passed(year, month, day):
"""Construct a date object from the passed in arguments.
If this fails due to bad inputs reraise the exception.
Also if the new date is < START_DATE raise a ValueError.
Then calculate how many months have passed since the
START_DATE constant. We suggest using dateutil.relativedelta!
One rule: if a new month is >= 10 (MIN_DAYS_TO_COUNT_AS_MONTH)
days in, it counts as an extra month.
For example:
date(2018, 11, 10) = 9 days in => 0 months
date(2018, 11, 11) = 10 days in => 1 month
date(2018, 12, 11) = 1 month + 10 days in => 2 months
date(2019, 12, 11) = 1 year + 1 month + 10 days in => 14 months
etc.
See the tests for more examples.
Return the number of months passed int.
"""
target_date = date(year, month, day)
delta = relativedelta(target_date, START_DATE)
if delta.days < 0 or delta.months < 0 or delta.years < 0:
raise ValueError
number_of_months = 0
number_of_months += delta.years * MONTHS_PER_YEAR
number_of_months += delta.months
if delta.days >= 10:
number_of_months += 1
return number_of_months
pass |
# This is a fork of waf_unit_test.py supporting BDE-style unit tests.
from __future__ import print_function
import fnmatch
import os
import sys
import time
from waflib import Utils
from waflib import Task
from waflib import Logs
from waflib import Options
from waflib import TaskGen
from bdebuild.common import sysutil
from bdebuild.waf import lcov
testlock = Utils.threading.Lock()
test_runner_path = os.path.join(sysutil.repo_root_path(), 'bin',
'bde_runtest.py')
@TaskGen.feature('cxx', 'c')
@TaskGen.after_method('process_use')
def add_coverage(self):
if self.bld.env['with_coverage']:
if getattr(self, 'uselib', None):
if 'GCOV' not in self.uselib:
self.uselib += ['GCOV']
else:
self.uselib = ['GCOV']
@TaskGen.feature('test')
@TaskGen.after_method('apply_link')
def make_test(self):
"""Create the unit test task.
There can be only one unit test task by task generator.
"""
if getattr(self, 'link_task', None):
self.create_task('utest', self.link_task.outputs)
class utest(Task.Task):
"""Execute a unit test
"""
color = 'PINK'
after = ['vnum', 'inst']
vars = []
def runnable_status(self):
"""Return whether the test can be run.
Execute the test if the option ``--test run`` has been used.
"""
skip_test = Options.options.test not in ('run', 'changed')
if skip_test:
return Task.SKIP_ME
ret = super(utest, self).runnable_status()
if ret == Task.SKIP_ME:
if Options.options.test == 'run':
return Task.RUN_ME
return ret
def get_testcmd(self):
testcmd = [
sys.executable, test_runner_path,
'--verbosity=%s' % Options.options.test_v,
'--timeout=%s' % Options.options.test_timeout,
'-j%s' % Options.options.test_j,
self.testdriver_node.abspath()
]
if Options.options.test_junit:
testcmd += ['--junit=%s-junit.xml' %
self.testdriver_node.abspath()]
if Options.options.valgrind:
testcmd += [
'--valgrind',
'--valgrind-tool=%s' % Options.options.valgrind_tool
]
return testcmd
def run(self):
"""Execute the test.
The execution is always successful, but the results are stored on
``self.generator.bld.utest_results`` for postprocessing.
"""
self.testdriver_node = self.inputs[0]
try:
fu = getattr(self.generator.bld, 'all_test_paths')
except AttributeError:
# this operation may be performed by at most #maxjobs
fu = os.environ.copy()
lst = []
for g in self.generator.bld.groups:
for tg in g:
if getattr(tg, 'link_task', None):
s = tg.link_task.outputs[0].parent.abspath()
if s not in lst:
lst.append(s)
def add_path(dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) +
[os.environ.get(var, '')])
if Utils.is_win32:
add_path(fu, lst, 'PATH')
elif Utils.unversioned_sys_platform() == 'darwin':
add_path(fu, lst, 'DYLD_LIBRARY_PATH')
add_path(fu, lst, 'LD_LIBRARY_PATH')
else:
add_path(fu, lst, 'LD_LIBRARY_PATH')
self.generator.bld.all_test_paths = fu
cwd = self.testdriver_node.parent.abspath()
testcmd = self.get_testcmd()
start_time = time.time()
proc = Utils.subprocess.Popen(testcmd, cwd=cwd, env=fu,
stderr=Utils.subprocess.STDOUT,
stdout=Utils.subprocess.PIPE)
stdout = proc.communicate()[0]
end_time = time.time()
if not isinstance(stdout, str):
stdout = stdout.decode(sys.stdout.encoding or 'iso8859-1')
tup = (self.testdriver_node, proc.returncode, stdout,
end_time - start_time, self.generator.source[0])
self.generator.utest_result = tup
testlock.acquire()
try:
bld = self.generator.bld
Logs.debug("ut: %r", tup)
try:
bld.utest_results.append(tup)
except AttributeError:
bld.utest_results = [tup]
finally:
testlock.release()
def print_test_summary(ctx):
"""Display an execution summary.
Args:
ctx (BuildContext): The build context.
Returns:
Number of test failures and total number of tests.
"""
def get_time(seconds):
m, s = divmod(seconds, 60)
if m == 0:
return '%dms' % (seconds * 1000)
else:
return '%02d:%02d' % (m, s)
lst = getattr(ctx, 'utest_results', [])
Logs.pprint('CYAN', 'Test Summary')
total = len(lst)
tfail = len([x for x in lst if x[1]])
Logs.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, t, _) in lst:
if not code:
if ctx.options.show_test_out:
Logs.pprint('YELLOW', '[%s (TEST)] <<<<<<<<<<' % f.abspath())
Logs.info(out, extra={'c1': '', 'c2': ''})
Logs.pprint('YELLOW', '>>>>>>>>>>')
else:
msg = '%s%s%s %s(%s)%s' % (
Logs.colors.GREEN, f.abspath(), Logs.colors.NORMAL,
Logs.colors.YELLOW, get_time(t), Logs.colors.NORMAL)
Logs.info(msg, extra={'c1': '', 'c2': ''})
Logs.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, _, _) in lst:
if code:
Logs.pprint('RED', '[%s (TEST)] <<<<<<<<<<' % f.abspath())
Logs.info(out, extra={'c1': '', 'c2': ''})
Logs.pprint('RED', '>>>>>>>>>>')
return tfail, total
def generate_coverage_report(ctx):
"""Generate a test coverage report.
Args:
ctx (BuildContext): The build context.
Returns:
True if successful.
"""
lst = getattr(ctx, 'utest_results', [])
obj_dirs = []
src_dirs = []
for (tst, _, _, _, src) in lst:
if tst.parent.abspath() not in obj_dirs:
obj_dirs.append(tst.parent.abspath())
src_dirs.append(src.parent.abspath())
tmp_dir_node = ctx.bldnode.make_node('_test_coverage')
tmp_dir_node.mkdir()
if ctx.options.coverage_out:
test_coverage_out_path = ctx.options.coverage_out
else:
test_coverage_out_path = tmp_dir_node.make_node('report').abspath()
return lcov.generate_coverage_report(
obj_dirs, src_dirs, ctx.path.abspath(), ctx.bldnode.abspath(),
tmp_dir_node.abspath(), test_coverage_out_path, ctx.env['LCOV'],
ctx.env['GENHTML'])
def remove_gcda_files(ctx):
"""Remove gcda coverage files generated from previous test run.
Info about the types of gcov data files:
https://gcc.gnu.org/onlinedocs/gcc/Gcov-Data-Files.html
"""
Logs.info('Removing leftover gcda files...')
matches = []
for root, dirnames, filenames in os.walk(ctx.bldnode.abspath()):
for filename in fnmatch.filter(filenames, '*.gcda'):
matches.append(os.path.join(root, filename))
for f in matches:
os.remove(f)
def post_build_fun(ctx):
is_success = True
num_test_failures, num_tests = print_test_summary(ctx)
error_msg = ''
if num_test_failures > 0:
error_msg += '%d/%d tests have failed.' % (num_test_failures,
num_tests)
is_success = False
else:
Logs.info('All tests passed.')
if ctx.env['with_coverage']:
is_coverage_success = generate_coverage_report(ctx)
if not is_coverage_success:
is_success = False
error_msg += '\nFailed to generate coverage report.'
if not is_success:
ctx.fatal('%s (%s)' % (error_msg, str(ctx.log_timer)))
def build(ctx):
if ctx.options.test == 'run':
if ctx.env['with_coverage']:
remove_gcda_files(ctx)
ctx.add_post_fun(post_build_fun)
def configure(ctx):
if ctx.options.with_coverage:
if ctx.env.COMPILER_CC == 'gcc':
ctx.check(cxxflags=['-fprofile-arcs', '-ftest-coverage'],
stlib=['gcov'],
uselib_store='GCOV', mandatory=True)
gcov_search_paths = os.environ['PATH'].split(os.pathsep)
for gcc_path in ctx.env.CC:
gcov_search_paths.insert(0, os.path.dirname(gcc_path))
ctx.find_program('gcov', path_list=gcov_search_paths)
ctx.find_program('lcov')
ctx.find_program('genhtml')
ctx.env.LCOV = ctx.env.LCOV + ['--gcov-tool',
ctx.env.GCOV[0]]
else:
ctx.fatal('Coverage test is not supported on this compiler.')
ctx.env['with_coverage'] = ctx.options.with_coverage
def options(ctx):
"""Provide the command-line options.
"""
grp = ctx.get_option_group('configure options')
grp.add_option('--with-coverage', action='store_true', default=False,
help='generate a test coverage report using lcov',
dest='with_coverage')
grp = ctx.get_option_group('build and install options')
grp.add_option('--test', type='choice',
choices=('none', 'build', 'run', 'changed'),
default='none',
help="whether to build and run test drivers "
"(none/build/run/changed) [default: %default]. "
"none: don't build or run tests, "
"build: build tests but don't run them, "
"run: build and run tests, "
"changed: run rebuilt tests only",
dest='test')
grp.add_option('--test-v', type='int', default=0,
help='verbosity level of test output [default: %default]',
dest='test_v')
grp.add_option('--test-j', type='int', default=4,
help='amount of parallel jobs used by the test runner '
'[default: %default]. '
'This value is independent of the number of jobs '
'used by waf itself.',
dest='test_j')
grp.add_option('--show-test-out', action='store_true', default=False,
help='show output of tests even if they pass',
dest='show_test_out')
grp.add_option('--test-timeout', type='int', default=200,
help='test driver timeout [default: %default]',
dest='test_timeout')
grp.add_option('--test-junit', action='store_true', default=False,
help='create jUnit-style test results files for '
'test drivers that are executed',
dest='test_junit')
grp.add_option('--coverage-out', type='str', default=None,
help='output directory of the test coverage report',
dest='coverage_out')
grp.add_option('--valgrind', action='store_true', default=False,
help='use valgrind to run test drivers',
dest='valgrind')
grp.add_option('--valgrind-tool', type='choice', default='memcheck',
choices=('memcheck', 'helgrind', 'drd'),
help='use valgrind tool (memchk/helgrind/drd) '
'[default: %default]',
dest='valgrind_tool')
# -----------------------------------------------------------------------------
# Copyright 2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------- END-OF-FILE -----------------------------------
|
from numba import njit
from seam.add.generic import addOneColumnWithEnergyProvided
from seam.energy.energy import computeDeepEnergy
from seam.seamfinding.gencol import generateColumn
from seam.utils import transposeImg, transposeGray
@njit(parallel=True,nogil=True)
def deepAddOneColumn(npimg,npgray, npnew, pos, gdratio, heat):
energy, lastDir = generateColumn(computeDeepEnergy(npimg,npgray, gdratio, heat))
return addOneColumnWithEnergyProvided(npimg,energy,lastDir,npnew, pos)
@njit(parallel=True,nogil=True)
def deepAddOneRow(npimg,npgray,npnew, pos, gdratio,heat):
npimgt = transposeImg(npimg)
npnewt = transposeImg(npnew)
npgrayt = transposeGray(npgray)
heatt = transposeGray(heat)
post = transposeGray(pos)
ret, retnew, retpos = deepAddOneColumn(npimgt, npgrayt, npnewt, post, gdratio, heatt)
return transposeImg(ret), transposeImg(retnew), transposeGray(retpos) |
import unittest
from check_solution import check_solution
class Test_Case_Check_Solution(unittest.TestCase):
def test_check_solution(self):
self.assertTupleEqual(check_solution('RGBY', 'GGRR'), (1,1)) |
from django.urls import path
from django.contrib.auth import views as auth_views
from .views import ProdutoList, ProdutoDetail, CategoriaList, CategoriaDetail, \
UserList, UserDetail
urlpatterns = [
# Produtos
path('produtos/', ProdutoList.as_view(), name='produto-list'),
path('produtos/<int:pk>/', ProdutoDetail.as_view(), name='produto-detail'),
# Categorias
path('categorias/', CategoriaList.as_view(), name='categoria-list'),
path('categorias/<int:pk>/', CategoriaDetail.as_view(), name='categoria-detail'),
# User
path('users/', UserList.as_view(), name='usuario-list'),
path('users/<int:pk>/', UserDetail.as_view(), name='usuario-detail'),
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import codecs
try:
from setuptools import setup, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, Command # noqa
from distutils.command.install import INSTALL_SCHEMES
os.environ["CYME_NO_EVAL"] = "yes"
import cyme as distmeta
os.environ.pop("CYME_NO_EVAL", None)
sys.modules.pop("cyme", None)
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
src_dir = "cyme"
def fullsplit(path, result=None):
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
SKIP_EXTENSIONS = [".pyc", ".pyo", ".swp", ".swo"]
def is_unwanted_file(filename):
for skip_ext in SKIP_EXTENSIONS:
if filename.endswith(skip_ext):
return True
return False
for dirpath, dirnames, filenames in os.walk(src_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith("."):
del dirnames[i]
for filename in filenames:
if filename.endswith(".py"):
packages.append('.'.join(fullsplit(dirpath)))
elif is_unwanted_file(filename):
pass
else:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in
filenames]])
class RunTests(Command):
description = "Run the django test suite from the tests dir."
user_options = []
extra_env = {}
extra_args = []
def run(self):
for env_name, env_value in self.extra_env.items():
os.environ[env_name] = str(env_value)
this_dir = os.getcwd()
testproj_dir = os.path.join(this_dir, "tests")
os.chdir(testproj_dir)
sys.path.append(testproj_dir)
from django.core.management import execute_manager
os.environ["DJANGO_SETTINGS_MODULE"] = os.environ.get(
"DJANGO_SETTINGS_MODULE", "settings")
settings_file = os.environ["DJANGO_SETTINGS_MODULE"]
settings_mod = __import__(settings_file, {}, {}, [''])
prev_argv = list(sys.argv)
try:
sys.argv = [__file__, "test"] + self.extra_args
execute_manager(settings_mod, argv=sys.argv)
finally:
sys.argv = prev_argv
def initialize_options(self):
pass
def finalize_options(self):
pass
class CIRunTests(RunTests):
extra_args = ["--with-coverage3", "--with-xunit",
"--cover3-xml", "--xunit-file=nosetests.xml",
"--cover3-xml-file=coverage.xml"]
if os.path.exists("README.rst"):
long_description = codecs.open("README.rst", "r", "utf-8").read()
else:
long_description = "See http://github.com/celery/cyme"
setup(
name='cyme',
version=distmeta.__version__,
description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
platforms=["any"],
license="BSD",
packages=packages,
data_files=data_files,
zip_safe=False,
install_requires=[
"cell",
"eventlet",
"dnspython",
"Django",
"django-celery>=2.3.1",
"requests",
"dictshield",
"progressbar",
"unipath",
],
cmdclass={"test": RunTests,
"citest": CIRunTests},
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: POSIX",
"Topic :: Communications",
"Topic :: System :: Distributed Computing",
"Topic :: Software Development :: Libraries :: Python Modules",
],
entry_points={
"console_scripts": [
"cyme-branch = cyme.bin.cyme_branch:cyme_branch",
"cyme = cyme.bin.cyme:cyme",
"cyme-list-branches = cyme.bin.cyme_list_branches:main"],
},
long_description=long_description,
)
|
import math
import os.path
import pygame
import random
main_dir = os.path.split(os.path.abspath(__file__))[0]
resource_path = main_dir + os.path.sep + "resources" + os.path.sep
def norm(x, y):
"""Calculates norm of vector (x, y)."""
return math.sqrt(x ** 2 + y ** 2)
def vector_to(speed, from_x, from_y, target_x, target_y):
"""Creates a vector of length `speed` in wanted direction."""
x = target_x - from_x
y = target_y - from_y
s = speed / norm(x, y)
return x * s, y * s
class Board:
"""The game world.
The camera corner is at the offset: `self.screen_x`"""
def __init__(self, win_width, win_height):
self.win_width = win_width
self.win_height = win_height
self.width = 1200
self.height = 1200
self.screen_x = (self.width - win_width) / 2
self.screen_y = (self.height - win_height) / 2
self.limit_x = win_width / 2
self.limit_y = win_height / 2
def graph_position_of(self, board_x, board_y):
"""Pixel position of this board coordinate"""
x = board_x - self.screen_x
y = board_y - self.screen_y
return x, y
def board_position_of(self, graph_x, graph_y):
"""Board coordinate of this pixel position"""
return self.screen_x + graph_x, self.screen_y + graph_y
def set_screen_position(self, board_x, board_y):
"""Adjusts camera to center on `(board_x, board_y)`"""
self.screen_x = board_x - self.win_width / 2
self.screen_y = board_y - self.win_height / 2
if self.screen_x < 0:
self.screen_x = 0
elif self.screen_x + self.win_width > self.width:
self.screen_x = self.width - self.win_width
if self.screen_y < 0:
self.screen_y = 0
elif self.screen_y + self.win_height > self.height:
self.screen_y = self.height - self.win_height
class GameOverScreen(pygame.sprite.Sprite):
"""Game over text sprite."""
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load(resource_path + "gameover.png").convert()
self.rect = self.image.get_rect()
class GraphObject(pygame.sprite.Sprite):
"""Abstract class for any sprite object"""
def __init__(self, image, board):
pygame.sprite.Sprite.__init__(self)
self.rect = self.image.get_rect()
self.vx = 0
self.vy = 0
self.board_x = 0
self.board_y = 0
self.board = board
# self.check()
(self.rect.x, self.rect.y) = self.board.graph_position_of(self.board_x, self.board_y)
def set_board_position(self, board_p):
"""Moves the sprite to this position"""
(self.board_x, self.board_y) = board_p
self.update(0)
def check(self):
"""Makes sure the object cannot wander off outside the board."""
if self.board_x < 0:
self.board_x = 0
elif self.board_x + self.rect.width > self.board.width:
self.board_x = self.board.width - self.rect.width
if self.board_y < 0:
self.board_y = 0
elif self.board_y + self.rect.height > self.board.height:
self.board_y = self.board.height - self.rect.height
def update(self, time):
"""Move and update board and graphical position."""
self.board_x = (time * self.vx + self.board_x)
self.board_y = (time * self.vy + self.board_y)
self.check()
(self.rect.x, self.rect.y) = self.board.graph_position_of(self.board_x, self.board_y)
class Background(GraphObject):
"""Background flower. Could be extended to also have grass, etc."""
image = pygame.image.load(resource_path + "background1.png")
def __init__(self, board):
GraphObject.__init__(self, SillyMonster.image.convert(), board)
class AbstractMonster(GraphObject):
"""Abstract class for monsters. Contains common elements such as hitpoints and attack."""
def __init__(self, image, board, max_hp, speed):
GraphObject.__init__(self, image.convert(), board)
self.hp = max_hp
self.attack_wait = 3
def take_damage(self, hp):
"""Take damage. Return True if died."""
self.hp = self.hp - hp
return self.hp <= 0
def update(self, time):
"""Update attack cooldown"""
GraphObject.update(self, time)
self.attack_wait = self.attack_wait - time
def attack(self):
"""Check attack cooldown, returns damage dealt."""
if self.attack_wait <= 0:
self.attack_wait = 2
print("Ouch!")
return 1
else:
return 0
class CreepyMonster(AbstractMonster):
"""A really creepy monster that follows the player around. Looks creepy too."""
image = pygame.image.load(resource_path + "monster2.png")
speed = 220
def __init__(self, board, target):
AbstractMonster.__init__(self, CreepyMonster.image, board, 3, 250)
self.target = target
def update(self, time):
"""Adjust heading to follow the target object."""
AbstractMonster.update(self, time)
(self.vx, self.vy) = vector_to(CreepyMonster.speed, self.board_x, self.board_y, self.target.board_x,
self.target.board_y)
class SillyMonster(AbstractMonster):
"""Silly monster that ignores any other objects. Switches direction at random."""
image = pygame.image.load(resource_path + "monster1.png")
speed = 150
def __init__(self, board):
AbstractMonster.__init__(self, SillyMonster.image, board, 5, SillyMonster.speed)
self.countdown = random.uniform(5, 7)
self.random_decision()
def update(self, time):
"""Update switch direction timer."""
AbstractMonster.update(self, time)
self.countdown = self.countdown - time
if self.countdown < 0:
self.random_decision()
self.countdown = random.uniform(5, 7)
def random_decision(self):
"""Change walking direction"""
(self.vx, self.vy) = vector_to(SillyMonster.speed, 0, 0, random.uniform(-1, 1), random.uniform(-1, 1))
class Character(GraphObject):
"""Class of the player"""
image = pygame.image.load(resource_path + "character.png")
speed = 200
def __init__(self, board):
GraphObject.__init__(self, Character.image.convert(), board)
self.horizontal_dir = 0
self.vertical_dir = 0
self.hp = 3
def stop(self, xdir, ydir):
"""Stop moving in specified directions. Is called on arrow key release.
`xdir` can take values (-1, 0, 1)."""
if self.horizontal_dir == xdir:
self.horizontal_dir = 0
if self.vertical_dir == ydir:
self.vertical_dir = 0
self.fix_speed()
def move(self, xdir, ydir):
"""Move in specified directions. Is called on arrow key press."""
if xdir != 0:
self.horizontal_dir = xdir
if ydir != 0:
self.vertical_dir = ydir
self.fix_speed()
def fix_speed(self):
"""Adjust speed according to input from arrow keys."""
if self.horizontal_dir == 0 and self.vertical_dir == 0:
self.vx = 0
self.vy = 0
else:
(self.vx, self.vy) = vector_to(Character.speed, 0, 0, self.horizontal_dir, self.vertical_dir)
def take_damage(self, damage):
"""Was hit by a monster. Take some damage. Returns True if died."""
self.hp = self.hp - damage
return self.hp <= 0
class Projectile(GraphObject):
"""Projectile class."""
image = pygame.image.load(resource_path + "projectile.png")
speed = 500
max_time = 2
def __init__(self, board):
GraphObject.__init__(self, Character.image.convert(), board)
self.time_travelled = 0
self.terminated = False
def set_target(self, target_x, target_y):
"""Sets target. Will not change direction afterwards."""
(self.vx, self.vy) = vector_to(Projectile.speed, self.rect.x, self.rect.y, target_x, target_y)
def update(self, time):
"""Should self-terminate after some time has passed."""
GraphObject.update(self, time)
self.time_travelled = self.time_travelled + time
if self.time_travelled > Projectile.max_time:
self.terminated = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2021/9/9 11:02 下午
# @Author : LeiXueWei
# @CSDN/Juejin/Wechat: 雷学委
# @XueWeiTag: CodingDemo
# @File : argsparser.py
# @Project : pypi_seed
import getopt
import os
import sys
from pypi_seed.setting import VERSION
def print_error(msg):
print("\033[0;31;40m%s\033[0m" % msg)
def show_all_samples():
print("option1: ")
print("pypiseed --project demo_project --author testuser --dir=/tmp ")
print("option2: ")
print("ppc --project demo_project --author testuser --dir=/tmp ")
print("option3: ")
print("ppc -p demo_project -a testuser -d /tmp ")
print("option4: ")
print("ppc -p demo_project -a testuser -d /tmp --cli ")
def show_sample_run():
print("option1: ")
print("pypiseed --project demo_project --author testuser --dir=/tmp ")
print("option2: ")
print("ppc -p demo_project -a testuser -d /tmp ")
def show_version():
print('pypi-seed version %s' % VERSION)
print('ClIs:')
print('\tpypiseed')
print('\tppc')
def show_help():
show_version()
print('usage:')
print('\t-h, --help: print help message.')
print('\t-v, --version: print help message.')
print('\t-p, --project: your desired project name')
print('\t-d, --dir: where to save the sample project after code-generation')
print('\t-a, --author: the author information')
print('\t--cli: generate sample cli-project')
print("===========================================")
show_all_samples()
show_about()
def show_about():
print("===========================================")
print("pypi_seed #种子项目")
print("欢迎关注公众号【雷学委】【孤寒者】【布小禅】,加入Python开发者阵营!")
print("Welcome to subscribe wechat-media【雷学委】【孤寒者】【布小禅】and join python group!")
print("Upgrade cmd: pip install pypi-seed --upgrade")
print("Further queries please contact qq:【Python全栈技术学习交流】Click this link=> https://jq.qq.com/?_wv=1027&k=ISjeG32x ")
print("===========================================")
def args2dict():
argv = sys.argv[1:]
verbose = False
try:
opts, args = getopt.getopt(argv, "hvn:p:d:a:",
["help",
"version",
"cli",
"verbose",
"project=",
"name=",
"dir=",
"author=", ])
except Exception as e:
print("parameters: %s " % argv)
raise ValueError("Looks like missing value, please check usage by '-h'. Current error : %s " % str(e))
project = None
path = None
author = None
cli = False
print("opts is %s" % opts)
for opt, arg in opts:
if opt in ['-h', '--help']:
show_help()
return None
if opt in ['-v', '--version']:
show_version()
return None
if opt in ['--verbose']:
verbose = True
if opt in ['--cli']:
cli = True
if opt in ['-p', '-n', '--name', '--project']:
print("project: %s" % arg)
project = arg
elif opt in ['-a', '--author']:
print("author: %s" % arg)
author = arg
elif opt in ['-d', '--dir']:
print("directory: %s" % arg)
path = arg
if project is None:
print_error("Missing project, please input project with '-p' or '--project', e.g. -p my_project")
show_sample_run()
return None
if author is None:
print_error("Missing author, please input author with '-a' or '--author', e.g. -a testuser")
show_sample_run()
return None
if path is None:
path = os.getcwd()
print("path is not given, so will use default as current directory : %s" % path)
return dict(name=project, author=author, dir=path, verbose=verbose, with_cli=cli)
if __name__ == "__main__":
print(args2dict())
|
import subprocess
#from selenium import webdriver
#from selenium.webdriver.common.by import By
class BrowserHelper(object):
def __init__(self):
option = webdriver.ChromeOptions()
option.add_argument("--start-maximized")
# start of headless mode config
option.add_argument("--headless")
option.add_argument("--disable-gpu")
option.add_argument("--disable-dev-shm-usage")
option.add_argument("--no-sandbox")
option.add_argument("--window-size=1920,1080")
option.add_argument("--no-first-run")
option.add_argument("--no-default-browser-check")
option.add_argument("--ignore-certificate-errors")
#end of headless mode config
self.ChromeDriver = webdriver.Chrome(r'chromedriver.exe', chrome_options=option)
self.implicit_wait = 50
def access_to_url(self, url):
self.ChromeDriver.get(url)
def input_to_google_textbox(self, keyword):
try:
search_box = self.ChromeDriver.find_element(By.CLASS_NAME, "gLFyf gsfi")
search_box.send_keys(keyword)
search_box.send_keys(Keys.ENTER)
WebDriverWait(self.ChromeDriver, self.implicit_wait)
except:
print("Search on Google SE can not be performed")
def verify_google_serp_display(self):
serp = self.ChromeDriver.find_element(By.ID, "search")
if (serp is not None):
return True
|
## Program: VMTK
## Language: Python
## Date: January 10, 2018
## Version: 1.4
## Copyright (c) Richard Izzo, Luca Antiga, All rights reserved.
## See LICENSE file for details.
## This software is distributed WITHOUT ANY WARRANTY; without even
## the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
## PURPOSE. See the above copyright notices for more information.
## Note: this code was contributed by
## Richard Izzo (Github @rlizzo)
## University at Buffalo
import pytest
import vmtk.vmtkimagebinarize as binarize
def test_binarize_default_parameters(aorta_image, compare_images):
name = __name__ + '_test_binarize_default_parameters.mha'
binarizer = binarize.vmtkImageBinarize()
binarizer.Image = aorta_image
binarizer.Execute()
assert compare_images(binarizer.Image, name) == True
def test_binarize_simple_threshold(aorta_image, compare_images):
name = __name__ + '_test_binarize_simple_threshold.mha'
binarizer = binarize.vmtkImageBinarize()
binarizer.Image = aorta_image
binarizer.Threshold = 500
binarizer.Execute()
assert compare_images(binarizer.Image, name) == True
def test_binarize_modify_lower_label(aorta_image, compare_images):
name = __name__ + '_test_binarize_modify_lower_label.mha'
binarizer = binarize.vmtkImageBinarize()
binarizer.Image = aorta_image
binarizer.Threshold = 500
binarizer.LowerLabel = -1
binarizer.Execute()
assert compare_images(binarizer.Image, name) == True
def test_binarize_modify_upper_label(aorta_image, compare_images):
name = __name__ + '_test_binarize_modify_upper_label.mha'
binarizer = binarize.vmtkImageBinarize()
binarizer.Image = aorta_image
binarizer.Threshold = 500
binarizer.UpperLabel = 7
binarizer.Execute()
assert compare_images(binarizer.Image, name) == True
def test_binarize_modify_upper_and_lower_label(aorta_image, compare_images):
name = __name__ + '_test_binarize_modify_upper_and_lower_label.mha'
binarizer = binarize.vmtkImageBinarize()
binarizer.Image = aorta_image
binarizer.Threshold = 500
binarizer.UpperLabel = 7
binarizer.LowerLabel = 3
binarizer.Execute()
assert compare_images(binarizer.Image, name) == True
|
# -*- coding: utf-8 -*-
"""Example for GAM with Poisson Model and PolynomialSmoother
This example was written as a test case.
The data generating process is chosen so the parameters are well identified
and estimated.
Created on Fri Nov 04 13:45:43 2011
Author: Josef Perktold
"""
from __future__ import print_function
from statsmodels.compat.python import lrange, zip
import time
import numpy as np
#import matplotlib.pyplot as plt
np.seterr(all='raise')
from scipy import stats
from statsmodels.sandbox.gam import AdditiveModel
from statsmodels.sandbox.gam import Model as GAM #?
from statsmodels.genmod.families import family
from statsmodels.genmod.generalized_linear_model import GLM
np.random.seed(8765993)
#seed is chosen for nice result, not randomly
#other seeds are pretty off in the prediction or end in overflow
#DGP: simple polynomial
order = 3
sigma_noise = 0.1
nobs = 1000
#lb, ub = -0.75, 3#1.5#0.75 #2.5
lb, ub = -3.5, 3
x1 = np.linspace(lb, ub, nobs)
x2 = np.sin(2*x1)
x = np.column_stack((x1/x1.max()*1, 1.*x2))
exog = (x[:,:,None]**np.arange(order+1)[None, None, :]).reshape(nobs, -1)
idx = lrange((order+1)*2)
del idx[order+1]
exog_reduced = exog[:,idx] #remove duplicate constant
y_true = exog.sum(1) #/ 4.
z = y_true #alias check
d = x
y = y_true + sigma_noise * np.random.randn(nobs)
example = 3
if example == 2:
print("binomial")
f = family.Binomial()
mu_true = f.link.inverse(z)
#b = np.asarray([scipy.stats.bernoulli.rvs(p) for p in f.link.inverse(y)])
b = np.asarray([stats.bernoulli.rvs(p) for p in f.link.inverse(z)])
b.shape = y.shape
m = GAM(b, d, family=f)
toc = time.time()
m.fit(b)
tic = time.time()
print(tic-toc)
#for plotting
yp = f.link.inverse(y)
p = b
if example == 3:
print("Poisson")
f = family.Poisson()
#y = y/y.max() * 3
yp = f.link.inverse(z)
#p = np.asarray([scipy.stats.poisson.rvs(p) for p in f.link.inverse(y)], float)
p = np.asarray([stats.poisson.rvs(p) for p in f.link.inverse(z)], float)
p.shape = y.shape
m = GAM(p, d, family=f)
toc = time.time()
m.fit(p)
tic = time.time()
print(tic-toc)
for ss in m.smoothers:
print(ss.params)
if example > 1:
import matplotlib.pyplot as plt
plt.figure()
for i in np.array(m.history[2:15:3]): plt.plot(i.T)
plt.figure()
plt.plot(exog)
#plt.plot(p, '.', lw=2)
plt.plot(y_true, lw=2)
y_pred = m.results.mu # + m.results.alpha #m.results.predict(d)
plt.figure()
plt.subplot(2,2,1)
plt.plot(p, '.')
plt.plot(yp, 'b-', label='true')
plt.plot(y_pred, 'r-', label='GAM')
plt.legend(loc='upper left')
plt.title('gam.GAM Poisson')
counter = 2
for ii, xx in zip(['z', 'x1', 'x2'], [z, x[:,0], x[:,1]]):
sortidx = np.argsort(xx)
#plt.figure()
plt.subplot(2, 2, counter)
plt.plot(xx[sortidx], p[sortidx], 'k.', alpha=0.5)
plt.plot(xx[sortidx], yp[sortidx], 'b.', label='true')
plt.plot(xx[sortidx], y_pred[sortidx], 'r.', label='GAM')
plt.legend(loc='upper left')
plt.title('gam.GAM Poisson ' + ii)
counter += 1
res = GLM(p, exog_reduced, family=f).fit()
#plot component, compared to true component
x1 = x[:,0]
x2 = x[:,1]
f1 = exog[:,:order+1].sum(1) - 1 #take out constant
f2 = exog[:,order+1:].sum(1) - 1
plt.figure()
#Note: need to correct for constant which is indeterminatedly distributed
#plt.plot(x1, m.smoothers[0](x1)-m.smoothers[0].params[0]+1, 'r')
#better would be subtract f(0) m.smoothers[0](np.array([0]))
plt.plot(x1, f1, linewidth=2)
plt.plot(x1, m.smoothers[0](x1)-m.smoothers[0].params[0], 'r')
plt.figure()
plt.plot(x2, f2, linewidth=2)
plt.plot(x2, m.smoothers[1](x2)-m.smoothers[1].params[0], 'r')
plt.show()
|
import torch
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
from sklearn.decomposition import PCA
def PCA_sklearn(X):
"""Обчислення PCA за допомогою sklearn"""
pca = PCA(n_components=2)
pca.fit(X)
print('sklearn')
print(pca.components_)
print(pca.explained_variance_)
print(pca.mean_)
print()
return {'components': pca.components_,
'explained_variance': pca.explained_variance_,
'mean': pca.mean_}
def PCA_pytorch(X, k=2, center=True):
"""Обчислення PCA за допомогою PyTorch"""
n = X.size()[0]
ones = torch.ones(n).view([n, 1])
h = ((1 / n) * torch.mm(ones, ones.t())) if center else torch.zeros(n * n).view([n, n])
H = torch.eye(n) - h
X_center = torch.mm(H.double(), X.double())
u, s, v = torch.svd(X_center)
v[1:]=v[1:]*-1
components = v[:k].numpy()
explained_variance = torch.mul(s[:k], s[:k]) / (n - 1)
explained_variance = explained_variance.numpy()
mean = [X[:, 0].mean().numpy().item(), X[:, 1].mean().numpy().item()]
print('PyTorch')
print(components)
print(explained_variance)
print(mean)
print()
return {'components': components,
'explained_variance': explained_variance,
'mean': mean}
def draw_vector(v0, v1, ax=None):
ax = ax or plt.gca()
arrowprops = dict(arrowstyle='->', linewidth=2, shrinkA=0, shrinkB=0)
ax.annotate('', v1, v0, arrowprops=arrowprops)
def plot_data(X, explained_variance, components, mean):
"""Візуалізація PCA"""
plt.scatter(X['total_bill'], X['size'], alpha=0.2)
for length, vector in zip(explained_variance, components):
v = vector * 3 * np.sqrt(length)
draw_vector(mean, mean + v)
plt.xlabel('total_bill')
plt.ylabel('size')
plt.axis('equal')
plt.show()
# ініціаліція датасету
tips = sns.load_dataset("tips") # 'total_bill', 'smoker', 'time', 'size'
X = tips[['total_bill', 'size']]
# обчислення PCA за допомогою sklearn
res_sklearn = PCA_sklearn(X)
plot_data(X, res_sklearn['explained_variance'], res_sklearn['components'], res_sklearn['mean'])
# обчислення PCA за допомогою PyTorch
res_pytorch = PCA_pytorch(torch.tensor(X.values))
plot_data(X, res_pytorch['explained_variance'], res_pytorch['components'], res_pytorch['mean'])
# знешумлення
noisy = np.random.normal(X, 0.25)
plt.scatter(noisy[:, 0], noisy[:, 1], marker='>', c='g', alpha=0.5)
pca = PCA(1)
pca = pca.fit(noisy)
components = pca.transform(noisy)
filtered = pca.inverse_transform(components)
plt.scatter(filtered[:, 0], filtered[:, 1], c='r', alpha=0.5)
plt.show()
|
from __future__ import absolute_import
from __future__ import unicode_literals
import os
from datetime import datetime
from django.test import SimpleTestCase
from corehq.apps.app_manager.tests.util import TestXmlMixin
from corehq.form_processor.interfaces.processor import FormProcessorInterface
from corehq.form_processor.utils import convert_xform_to_json
from phonelog.utils import SumoLogicLog
class TestSumologic(SimpleTestCase, TestXmlMixin):
root = os.path.dirname(__file__)
file_path = ('data',)
def setUp(self):
self.domain = 'test_domain'
self.received_on = datetime.utcnow()
def _get_xform(self, filename):
xform = FormProcessorInterface(self.domain).new_xform(convert_xform_to_json(self.get_xml(filename)))
xform.received_on = self.received_on
return xform
def test_log_error(self):
xform = self._get_xform('log_subreport')
compiled_log = SumoLogicLog(self.domain, xform).log_subreport()
expected_log = (
"[log_date=2018-02-13T15:19:30.622-05] [log_submission_date={received}] [log_type=maintenance] "
"[domain={domain}] [username=t1] [device_id=014915000230428] [app_version=260] "
"[cc_version=2.43] [msg=Succesfully submitted 1 device reports to server.]"
).format(domain=self.domain, received=self.received_on)
self.assertEqual(expected_log, compiled_log)
def test_usererror(self):
xform = self._get_xform('usererror_subreport')
compiled_log = SumoLogicLog(self.domain, xform).user_error_subreport()
expected_log = (
"[log_date=2018-02-22T17:21:21.201-05] [log_submission_date={received}] [log_type=error-config] "
"[domain={domain}] [username=t1] [device_id=014915000230428] [app_version=260] "
"[cc_version=2.43] [msg=This is a test user error] [app_id=73d5f08b9d55fe48602906a89672c214] "
"[user_id=37cc2dcdb1abf5c16bab0763f435e6b7] [session=session] [expr=an expression]"
).format(domain=self.domain, received=self.received_on)
self.assertEqual(expected_log, compiled_log)
def test_forceclose(self):
xform = self._get_xform('forceclose_subreport')
compiled_log = SumoLogicLog(self.domain, xform).force_close_subreport()
expected_log = (
"[log_date=2018-02-22T17:21:21.232-05] [log_submission_date={received}] [log_type=forceclose] "
"[domain={domain}] [username=t1] [device_id=014915000230428] [app_version=260] "
"[cc_version=2.43] "
"""[msg=java.lang.RuntimeException: Unable to start activity ComponentInfo{{org.commcare.dalvik.debug/org.commcare.activities.MenuActivity}}: java.lang.RuntimeException
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2416)
at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2476)
at android.app.ActivityThread.-wrap11(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1344)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:148)
at android.app.ActivityThread.main(ActivityThread.java:5417)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:726)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:616)
Caused by: java.lang.RuntimeException
at org.commcare.activities.MenuActivity.onCreateSessionSafe(MenuActivity.java:35)
at org.commcare.activities.SessionAwareHelper.onCreateHelper(SessionAwareHelper.java:21)
at org.commcare.activities.SessionAwareCommCareActivity.onCreate(SessionAwareCommCareActivity.java:20)
at android.app.Activity.performCreate(Activity.java:6251)
at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1107)
at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2369)
... 9 more] [app_id=73d5f08b9d55fe48602906a89672c214] """
"[user_id=37cc2dcdb1abf5c16bab0763f435e6b7] [session=readable_session] [device_model=Nexus 7]"
).format(domain=self.domain, received=self.received_on)
self.assertEqual(expected_log, compiled_log)
|
import unittest
from unittest.mock import Mock
# ATS
from ats.topology import Device
from genie.metaparser.util.exceptions import SchemaEmptyParserError, \
SchemaMissingKeyError
from genie.libs.parser.iosxe.show_static_routing import ShowIpStaticRoute, \
ShowIpv6StaticDetail
# ============================================
# unit test for 'show ip static route'
# =============================================
class test_show_ip_static_route(unittest.TestCase):
'''
unit test for show ip static route
'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output_1 = {'execute.return_value': '''
Codes: M - Manual static, A - AAA download, N - IP NAT, D - DHCP,
G - GPRS, V - Crypto VPN, C - CASA, P - Channel interface processor,
B - BootP, S - Service selection gateway
DN - Default Network, T - Tracking object
L - TL1, E - OER, I - iEdge
D1 - Dot1x Vlan Network, K - MWAM Route
PP - PPP default route, MR - MRIPv6, SS - SSLVPN
H - IPe Host, ID - IPe Domain Broadcast
U - User GPRS, TE - MPLS Traffic-eng, LI - LIIN
IR - ICMP Redirect
Codes in []: A - active, N - non-active, B - BFD-tracked, D - Not Tracked, P - permanent
Static local RIB for default
M 10.1.1.0/24 [1/0] via GigabitEthernet2.2 4.0.0.2 [A]
M [3/0] via GigabitEthernet1 192.168.1.1 [N]
M 20.1.1.0/24 [3/0] via GigabitEthernet1 192.168.1.1 [A]
'''
}
golden_parsed_output_1 = {
'vrf':{
'default':{
'address_family': {
'ipv4': {
'routes': {
'10.1.1.0/24': {
'route': '10.1.1.0/24',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': True,
'next_hop': '4.0.0.2',
'outgoing_interface': 'GigabitEthernet2.2',
'preference': 1,
},
2: {
'index': 2,
'active': False,
'next_hop': '192.168.1.1',
'outgoing_interface': 'GigabitEthernet1',
'preference': 3,
},
},
},
},
'20.1.1.0/24': {
'route': '20.1.1.0/24',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': True,
'next_hop': '192.168.1.1',
'outgoing_interface': 'GigabitEthernet1',
'preference': 3,
},
},
},
},
},
},
},
},
},
}
golden_output_2 = {'execute.return_value': '''
Codes: M - Manual static, A - AAA download, N - IP NAT, D - DHCP,
G - GPRS, V - Crypto VPN, C - CASA, P - Channel interface processor,
B - BootP, S - Service selection gateway
DN - Default Network, T - Tracking object
L - TL1, E - OER, I - iEdge
D1 - Dot1x Vlan Network, K - MWAM Route
PP - PPP default route, MR - MRIPv6, SS - SSLVPN
H - IPe Host, ID - IPe Domain Broadcast
U - User GPRS, TE - MPLS Traffic-eng, LI - LIIN
IR - ICMP Redirect
Codes in []: A - active, N - non-active, B - BFD-tracked, D - Not Tracked, P - permanent
Static local RIB for VRF1
M 2.2.2.2/32 [1/0] via GigabitEthernet0/0 10.1.2.2 [A]
M [2/0] via GigabitEthernet0/1 20.1.2.2 [N]
M [3/0] via 20.1.2.2 [N]
M 3.3.3.3/32 [1/0] via GigabitEthernet0/2 [A]
M [1/0] via GigabitEthernet0/3 [A]
'''
}
golden_parsed_output_2 = {
'vrf': {
'VRF1': {
'address_family': {
'ipv4': {
'routes': {
'2.2.2.2/32': {
'route': '2.2.2.2/32',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': True,
'next_hop': '10.1.2.2',
'outgoing_interface': 'GigabitEthernet0/0',
'preference': 1,
},
2: {
'index': 2,
'active': False,
'next_hop': '20.1.2.2',
'outgoing_interface': 'GigabitEthernet0/1',
'preference': 2,
},
3: {
'index': 3,
'active': False,
'next_hop': '20.1.2.2',
'preference': 3,
},
},
},
},
'3.3.3.3/32': {
'route': '3.3.3.3/32',
'next_hop': {
'outgoing_interface': {
'GigabitEthernet0/2': {
'active': True,
'outgoing_interface': 'GigabitEthernet0/2',
'preference': 1,
},
'GigabitEthernet0/3': {
'active': True,
'outgoing_interface': 'GigabitEthernet0/3',
'preference': 1,
},
},
},
},
},
},
},
},
},
}
def test_empty_1(self):
self.device = Mock(**self.empty_output)
obj = ShowIpStaticRoute(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_static_route_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_1)
obj = ShowIpStaticRoute(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output_1)
def test_show_ip_static_route_2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_2)
obj = ShowIpStaticRoute(device=self.device)
parsed_output = obj.parse(vrf='VRF1')
self.assertEqual(parsed_output, self.golden_parsed_output_2)
# ============================================
# unit test for 'show ipv6 static detail'
# =============================================
class test_show_ipv6_static_detail(unittest.TestCase):
'''
unit test for show ipv6 static detail
'''
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_output_detail_1 = {'execute.return_value': '''
R1_iosv#show ipv6 static detail
IPv6 Static routes Table - default
Codes: * - installed in RIB, u/m - Unicast/Multicast only
U - Per-user Static route
N - ND Static route
M - MIP Static route
P - DHCP-PD Static route
R - RHI Static route
2001:2:2:2::2/128 via 2001:10:1:2::2, distance 3
Resolves to 1 paths (max depth 1)
via GigabitEthernet0/0
* 2001:2:2:2::2/128 via 2001:20:1:2::2, GigabitEthernet0/1, distance 1
2001:2:2:2::2/128 via 2001:10:1:2::2, GigabitEthernet0/0, distance 11, tag 100
Rejected by routing table
Tracked object 1 is Up
* 2001:3:3:3::3/128 via GigabitEthernet0/3, distance 1
* 2001:3:3:3::3/128 via GigabitEthernet0/2, distance 1
'''
}
golden_parsed_output_detail_1 = {
'vrf':{
'default':{
'address_family': {
'ipv6': {
'routes': {
'2001:2:2:2::2/128': {
'route': '2001:2:2:2::2/128',
'next_hop': {
'next_hop_list': {
1: {
'index': 1,
'active': False,
'next_hop': '2001:10:1:2::2',
'resolved_outgoing_interface': 'GigabitEthernet0/0',
'resolved_paths_number': 1,
'max_depth': 1,
'preference': 3,
},
2: {
'index': 2,
'next_hop': '2001:20:1:2::2',
'active': True,
'outgoing_interface': 'GigabitEthernet0/1',
'preference': 1,
},
3: {
'index': 3,
'active': False,
'next_hop': '2001:10:1:2::2',
'outgoing_interface': 'GigabitEthernet0/0',
'rejected_by':'routing table',
'preference': 11,
'tag': 100,
'track': 1,
'track_state': 'up',
},
},
},
},
'2001:3:3:3::3/128': {
'route': '2001:3:3:3::3/128',
'next_hop': {
'outgoing_interface': {
'GigabitEthernet0/3': {
'outgoing_interface': 'GigabitEthernet0/3',
'active': True,
'preference': 1,
},
'GigabitEthernet0/2': {
'outgoing_interface': 'GigabitEthernet0/2',
'active': True,
'preference': 1,
},
},
},
},
},
},
},
},
},
}
golden_output_detail_2 = {'execute.return_value':'''
R1_iosv#show ipv6 static vrf VRF1 detail
IPv6 Static routes Table - VRF1
Codes: * - installed in RIB, u/m - Unicast/Multicast only
U - Per-user Static route
N - ND Static route
M - MIP Static route
P - DHCP-PD Static route
R - RHI Static route
* 2001:2:2:2::2/128 via Null0, distance 2
* 2001:3:3:3::3/128 via Null0, distance 3
'''
}
golden_parsed_output_detail_2 = {
'vrf': {
'VRF1': {
'address_family': {
'ipv6': {
'routes': {
'2001:2:2:2::2/128': {
'route': '2001:2:2:2::2/128',
'next_hop': {
'outgoing_interface': {
'Null0': {
'outgoing_interface': 'Null0',
'active': True,
'preference': 2,
},
},
},
},
'2001:3:3:3::3/128': {
'route': '2001:3:3:3::3/128',
'next_hop': {
'outgoing_interface': {
'Null0': {
'outgoing_interface': 'Null0',
'active': True,
'preference': 3,
},
},
},
},
},
},
},
},
},
}
def test_empty_detail_1(self):
self.device = Mock(**self.empty_output)
obj = ShowIpv6StaticDetail(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_show_ip_static_detail_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_detail_1)
obj = ShowIpv6StaticDetail(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output_detail_1)
def test_show_ip_static_route_2(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_detail_2)
obj = ShowIpv6StaticDetail(device=self.device)
parsed_output = obj.parse(vrf='VRF1')
self.assertEqual(parsed_output, self.golden_parsed_output_detail_2)
if __name__ == '__main__':
unittest.main() |
#!/usr/bin/env python3
# Write a program that simulates random read coverage over a chromosome
# Report min, max, and average coverage
# Make variables for genome size, read number, read length
# Input values from the command line
# Note that you will not sample the ends of a chromosome very well
# So don't count the first and last parts of a chromsome
import sys
import random
genome_size = int(sys.argv[1])
read_num = int(sys.argv[2])
read_length = int(sys.argv[3])
coverage = []
for i in range(genome_size):
coverage.append(0)
for i in range(read_num):
r = random.randint(0, genome_size - read_length)
for j in range(read_length):
coverage[r+j] += 1
#print(coverage[read_length: -read_length])
total = 0
for count in coverage[read_length: -read_length]:
total += count
avg = total/(genome_size - (2*read_length))
coverage.sort()
#print(coverage[read_length: -read_length])
print(coverage[read_length], coverage[-read_length], avg)
'''
a = "abcdefghijk"
print(a)
print(a[1:])
print(a[:-1])
for letter in a:
print(letter)
'''
"""
python3 32xcoverage.py 1000 100 100
5 20 10.82375
"""
|
"""
The Help class, containing the custom help command.
"""
import logging
from discord.ext import commands
from botforces.utils.discord_common import (
create_general_help_embed,
create_stalk_help_embed,
create_user_help_embed,
create_problem_help_embed,
create_upcoming_help_embed,
create_duel_help_embed,
create_plotrating_help_embed,
create_plotindex_help_embed,
create_plottags_help_embed,
)
class Help(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command()
async def help(self, ctx, cmd=None):
# Checking if the author was a bot
if ctx.message.author == self.client.user or ctx.message.author.bot:
return
# If no parameter was provided
if cmd is None:
Embed = await create_general_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "user":
Embed = await create_user_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "stalk":
Embed = await create_stalk_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "problem":
Embed = await create_problem_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "upcoming":
Embed = await create_upcoming_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "duel" or cmd == "endduel":
Embed = await create_duel_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "plotrating":
Embed = await create_plotrating_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "plotindex":
Embed = await create_plotindex_help_embed(ctx.author)
await ctx.send(embed=Embed)
elif cmd == "plottags":
Embed = await create_plottags_help_embed(ctx.author)
await ctx.send(embed=Embed)
# If an invalid command was given
else:
await ctx.send(f':x: Command "{cmd}" does not exist!')
@commands.Cog.listener()
async def on_ready(self):
logging.info("-Help ready!")
def setup(client):
client.add_cog(Help(client))
|
from typing import List
import pytest
from yarl import URL
from tests.conftest import HttpRequest, wait_for_website_stub_requests
from website_monitor.website_checker import WebsiteChecker
@pytest.mark.asyncio
async def test_website_checker_sends_request_to_target(
website_checker: WebsiteChecker,
website_stub_requests: List[HttpRequest],
website_stub_url: URL,
) -> None:
await wait_for_website_stub_requests(website_stub_requests)
assert len(website_stub_requests) == 1
assert website_stub_requests[0].method == "GET"
assert str(website_stub_requests[0].path) == website_stub_url.path
assert website_stub_requests[0].data == ""
|
# importing module
import logging
import logging.handlers
import sys
def getLogger(logger_name, test=None):
""" The method generates a logger instance to be reused.
:param logger_name: incoming logger name
:return: logger instance
"""
logger = logging.getLogger(str(logger_name))
log_level = logging.DEBUG
logger.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')
filepath = 'log/service.log'
if test:
filepath = 'log/test.log'
fh = logging.handlers.RotatingFileHandler(filepath, maxBytes=104857600, backupCount=5)
fh.setLevel(log_level)
fh.setFormatter(formatter)
logger.addHandler(fh)
sh = logging.StreamHandler(sys.stdout)
sh.setLevel(log_level)
sh.setFormatter(formatter)
logger.addHandler(sh)
return logger
|
"""
Logger.py: CogRIoT console message log application
"""
__author__ = "Daniel Mazzer"
__copyright__ = "Copyright 2016, CogRIoT Project"
__credits__ = "Inatel - Wireless and Optical Convergent Access Laboratory"
__license__ = "MIT"
__maintainer__ = "Daniel Mazzer"
__email__ = "[email protected]"
import logging
class Logger():
def __init__(self):
self.logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s %(threadName)s] %(message)s',
datefmt='%d/%m/%Y %H:%M:%S')
def log(self, msg):
self.logger.info(msg) |
DAY4_DRAWS = (
"1,76,38,96,62,41,27,33,4,2,94,15,89,25,66,14,30,0,71,21,48,44,87,73,60,50,"
"77,45,29,18,5,99,65,16,93,95,37,3,52,32,46,80,98,63,92,24,35,55,12,81,51,"
"17,70,78,61,91,54,8,72,40,74,68,75,67,39,64,10,53,9,31,6,7,47,42,90,20,19,"
"36,22,43,58,28,79,86,57,49,83,84,97,11,85,26,69,23,59,82,88,34,56,13"
)
DAY4_BOARDS = [
"85 23 65 78 93\n"
"27 53 10 12 26\n"
"5 34 83 25 6\n"
"56 40 73 29 54\n"
"33 68 41 32 82\n",
"8 31 14 70 91\n"
"53 49 86 13 21\n"
"66 28 76 78 93\n"
"39 63 80 43 23\n"
"56 25 60 67 72\n",
"67 78 36 64 14\n"
"46 16 80 23 94\n"
"22 47 51 65 57\n"
"33 76 21 92 97\n"
"31 95 54 27 20\n",
"1 77 86 43 30\n"
"28 88 7 5 60\n"
"66 24 3 57 33\n"
"38 23 59 84 44\n"
"74 47 17 29 85\n",
"21 50 86 2 70\n"
"85 19 22 93 25\n"
"99 38 74 30 65\n"
"81 0 47 78 63\n"
"34 11 51 88 64\n",
"45 15 29 81 30\n"
"75 21 88 91 49\n"
"39 20 4 17 78\n"
"10 12 38 11 7\n"
"98 6 65 69 86\n",
"36 20 31 44 69\n"
"30 65 55 88 64\n"
"74 85 82 61 5\n"
"57 17 90 43 54\n"
"58 83 52 23 7\n",
"42 16 82 86 76\n"
"60 26 27 59 55\n"
"7 53 22 78 5\n"
"18 61 10 15 17\n"
"28 46 14 87 77\n",
"21 43 15 47 61\n"
"24 76 28 3 27\n"
"19 62 69 82 93\n"
"49 29 97 74 41\n"
"92 36 37 99 40\n",
"31 4 3 62 51\n"
"24 57 78 67 53\n"
"13 5 76 38 55\n"
"79 9 75 98 71\n"
"65 1 39 18 47\n",
"59 4 38 95 99\n"
"85 68 69 93 43\n"
"83 57 48 42 15\n"
"47 50 80 79 90\n"
"56 87 78 64 25\n",
"21 37 14 67 95\n"
"88 39 26 38 49\n"
"89 83 54 77 96\n"
"48 86 94 19 20\n"
"43 41 8 74 58\n",
"1 36 12 90 91\n"
"63 21 98 82 66\n"
"39 86 7 52 77\n"
"80 81 44 33 58\n"
"78 30 11 51 28\n",
"81 74 7 33 96\n"
"75 60 87 47 91\n"
"39 73 30 50 13\n"
"4 41 9 43 77\n"
"34 82 72 48 12\n",
"93 63 74 25 57\n"
"29 76 9 45 70\n"
"98 77 71 16 41\n"
"47 54 18 14 55\n"
"31 89 67 87 83\n",
"8 72 45 93 68\n"
"74 26 69 94 65\n"
"28 9 20 47 41\n"
"46 54 21 56 22\n"
"84 62 18 15 48\n",
"20 51 81 40 69\n"
"71 10 13 93 75\n"
"44 86 0 95 37\n"
"99 39 76 80 66\n"
"14 64 49 62 27\n",
"75 7 51 86 79\n"
"43 30 61 39 16\n"
"85 63 90 28 96\n"
"88 78 72 31 73\n"
"98 87 23 19 58\n",
"20 95 47 97 12\n"
"92 25 68 87 91\n"
"37 10 78 23 63\n"
"74 93 58 39 5\n"
"76 51 48 72 16\n",
"37 18 32 34 85\n"
"22 31 98 42 19\n"
"29 72 48 76 25\n"
"47 1 21 7 53\n"
"79 82 86 52 78\n",
"20 16 47 78 92\n"
"88 15 71 67 2\n"
"5 52 90 70 9\n"
"22 49 28 82 27\n"
"6 19 61 73 48\n",
"71 26 7 11 79\n"
"52 30 47 1 31\n"
"17 75 94 91 28\n"
"81 98 23 55 21\n"
"77 15 39 24 16\n",
"5 75 44 88 65\n"
"89 45 23 69 19\n"
"41 61 67 52 54\n"
"47 38 57 12 98\n"
"62 70 26 87 53\n",
"50 4 65 77 25\n"
"6 21 5 27 92\n"
"39 63 97 75 79\n"
"60 34 87 26 74\n"
"99 24 44 85 2\n",
"13 64 38 78 21\n"
"74 17 83 57 94\n"
"25 39 69 53 4\n"
"54 33 81 50 76\n"
"42 75 19 77 26\n",
"63 31 70 19 39\n"
"38 87 15 90 75\n"
"61 98 6 29 86\n"
"78 62 32 11 60\n"
"55 97 13 73 82\n",
"51 63 68 84 36\n"
"12 33 37 31 8\n"
"18 41 34 74 23\n"
"72 39 85 48 60\n"
"24 19 29 88 0\n",
"46 51 17 23 13\n"
"20 93 97 99 81\n"
"57 47 33 84 44\n"
"28 96 2 43 56\n"
"68 36 62 15 5\n",
"81 99 5 30 10\n"
"38 62 57 8 37\n"
"7 86 98 3 54\n"
"46 82 96 15 72\n"
"83 1 75 25 50\n",
"47 57 11 61 27\n"
"53 10 31 91 98\n"
"76 85 55 38 23\n"
"6 81 67 71 70\n"
"35 29 17 50 56\n",
"24 65 15 1 89\n"
"45 60 97 23 14\n"
"84 56 58 5 54\n"
"3 72 51 46 79\n"
"67 70 78 34 77\n",
"38 11 54 23 2\n"
"33 14 10 96 63\n"
"43 5 36 20 30\n"
"70 53 66 71 9\n"
"91 90 21 7 88\n",
"94 44 4 86 26\n"
"39 70 54 50 30\n"
"55 40 12 72 71\n"
"68 7 66 47 91\n"
"31 24 13 1 96\n",
"79 14 40 87 68\n"
"16 32 53 46 98\n"
"38 95 21 89 69\n"
"62 60 19 81 33\n"
"70 52 28 83 0\n",
"62 42 38 48 64\n"
"61 79 78 97 98\n"
"89 7 3 29 68\n"
"92 76 14 67 1\n"
"41 99 72 47 60\n",
"5 75 18 42 33\n"
"72 61 36 31 29\n"
"19 58 1 34 94\n"
"54 84 92 99 38\n"
"76 68 79 53 37\n",
"14 91 37 5 98\n"
"68 29 34 76 43\n"
"75 0 67 33 69\n"
"81 47 58 30 93\n"
"88 92 42 77 54\n",
"64 24 28 54 53\n"
"72 68 3 73 4\n"
"83 6 59 66 94\n"
"87 80 55 20 16\n"
"13 82 74 31 70\n",
"63 92 71 0 83\n"
"98 40 50 55 2\n"
"88 5 85 30 23\n"
"10 75 81 58 68\n"
"51 31 14 89 1\n",
"67 93 94 54 53\n"
"38 71 34 40 24\n"
"31 63 30 99 75\n"
"4 57 86 19 70\n"
"60 49 87 68 74\n",
"56 94 79 53 7\n"
"24 12 19 6 99\n"
"82 51 41 46 43\n"
"17 49 52 78 55\n"
"75 48 61 70 87\n",
"14 55 32 21 31\n"
"88 83 23 44 4\n"
"1 77 45 90 85\n"
"46 81 51 27 62\n"
"60 24 29 18 0\n",
"95 92 91 27 26\n"
"22 43 45 64 62\n"
"83 23 25 85 94\n"
"84 53 72 28 20\n"
"75 60 52 18 73\n",
"95 41 7 21 32\n"
"58 65 16 56 97\n"
"68 25 91 83 24\n"
"66 89 15 55 6\n"
"2 30 84 10 90\n",
"58 86 44 19 74\n"
"57 89 17 6 83\n"
"77 35 60 32 13\n"
"97 63 62 28 76\n"
"55 31 11 0 52\n",
"33 39 59 42 45\n"
"61 50 92 9 79\n"
"15 0 28 5 72\n"
"91 24 21 29 87\n"
"86 76 43 31 93\n",
"63 11 86 45 85\n"
"96 74 66 93 32\n"
"95 30 99 23 18\n"
"69 97 48 15 1\n"
"42 87 47 83 80\n",
"93 5 40 64 2\n"
"44 51 15 54 83\n"
"69 77 90 58 11\n"
"0 48 43 30 55\n"
"25 72 38 73 52\n",
"89 58 71 68 15\n"
"23 65 9 36 74\n"
"21 29 42 79 98\n"
"55 47 33 39 28\n"
"16 75 91 69 57\n",
"13 79 12 71 2\n"
"60 94 99 43 82\n"
"84 89 29 91 87\n"
"74 80 25 32 21\n"
"70 14 68 92 11\n",
"78 1 16 51 87\n"
"58 94 59 15 43\n"
"79 41 50 47 39\n"
"53 37 9 28 72\n"
"34 63 89 35 18\n",
"31 67 70 42 43\n"
"60 2 89 49 22\n"
"56 17 81 24 74\n"
"20 65 1 96 51\n"
"68 7 0 38 25\n",
"59 14 29 53 19\n"
"9 2 11 33 44\n"
"81 6 10 47 58\n"
"20 34 62 55 40\n"
"71 38 69 45 78\n",
"59 36 70 42 21\n"
"3 16 49 79 98\n"
"74 25 8 84 19\n"
"61 80 47 65 64\n"
"91 62 52 9 40\n",
"1 85 63 7 2\n"
"0 20 61 26 77\n"
"99 37 74 42 76\n"
"25 94 19 78 60\n"
"79 72 95 22 11\n",
"51 21 79 76 32\n"
"55 23 69 19 61\n"
"71 54 94 47 92\n"
"5 64 6 68 16\n"
"91 81 9 99 30\n",
"61 69 82 86 68\n"
"66 81 28 38 36\n"
"26 29 31 11 8\n"
"72 51 12 95 63\n"
"18 30 88 17 32\n",
"34 8 14 42 67\n"
"66 79 65 20 52\n"
"37 87 74 24 3\n"
"59 54 21 32 89\n"
"31 4 62 76 30\n",
"11 93 8 92 55\n"
"38 72 99 3 83\n"
"12 75 0 41 46\n"
"17 25 5 39 48\n"
"14 18 86 29 84\n",
"6 20 41 51 48\n"
"5 67 30 24 47\n"
"3 8 92 22 39\n"
"4 56 36 31 75\n"
"2 45 85 81 96\n",
"47 43 72 22 3\n"
"19 87 53 12 60\n"
"29 40 56 68 18\n"
"66 97 70 33 39\n"
"85 37 0 90 98\n",
"61 35 81 84 94\n"
"11 1 58 45 77\n"
"6 99 67 36 43\n"
"5 7 0 87 80\n"
"44 78 39 70 20\n",
"58 34 49 29 75\n"
"17 15 28 23 84\n"
"59 25 92 48 0\n"
"20 81 47 3 71\n"
"68 60 5 22 87\n",
"90 32 41 39 6\n"
"36 78 67 24 50\n"
"55 72 52 75 44\n"
"87 15 92 31 58\n"
"83 89 68 19 43\n",
"99 44 53 68 25\n"
"71 67 16 19 36\n"
"35 58 14 86 48\n"
"88 18 61 24 23\n"
"87 9 91 37 15\n",
"37 5 63 68 28\n"
"41 50 76 99 64\n"
"34 92 78 94 71\n"
"11 96 97 42 58\n"
"33 45 0 93 48\n",
"33 68 9 12 81\n"
"60 98 28 8 99\n"
"14 17 6 82 15\n"
"57 69 43 38 29\n"
"47 84 76 22 18\n",
"79 70 92 38 47\n"
"12 82 98 46 0\n"
"76 15 53 59 97\n"
"18 52 49 29 96\n"
"44 64 68 89 24\n",
"95 14 17 27 42\n"
"55 43 57 29 25\n"
"34 73 86 50 16\n"
"69 37 75 63 39\n"
"78 79 3 4 30\n",
"27 31 15 92 46\n"
"36 23 72 40 50\n"
"51 99 55 89 21\n"
"12 70 84 63 85\n"
"78 88 77 75 0\n",
"15 67 40 39 28\n"
"9 79 22 52 75\n"
"96 65 86 98 14\n"
"97 87 44 84 68\n"
"36 26 89 43 27\n",
"79 59 48 27 36\n"
"85 92 93 76 24\n"
"2 25 7 42 90\n"
"23 29 74 35 86\n"
"58 60 31 75 57\n",
"10 43 83 75 8\n"
"88 12 38 30 9\n"
"60 67 59 76 6\n"
"55 45 74 34 25\n"
"97 49 65 96 69\n",
"59 86 15 3 19\n"
"89 4 74 61 23\n"
"52 98 8 79 39\n"
"95 17 22 14 51\n"
"50 18 94 30 84\n",
"19 63 58 72 67\n"
"35 93 29 91 0\n"
"39 26 43 84 21\n"
"70 42 2 53 12\n"
"59 99 8 1 86\n",
"23 86 34 22 65\n"
"71 10 16 50 91\n"
"66 89 49 81 43\n"
"40 7 26 75 61\n"
"62 59 2 46 95\n",
"24 21 0 49 25\n"
"92 42 48 12 7\n"
"81 93 59 68 3\n"
"14 23 63 39 29\n"
"35 43 6 44 89\n",
"67 74 95 34 10\n"
"39 90 59 44 51\n"
"17 16 97 24 62\n"
"20 54 76 63 88\n"
"87 66 14 78 82\n",
"96 86 67 59 79\n"
"66 3 30 77 71\n"
"2 91 99 82 31\n"
"48 65 75 98 53\n"
"63 54 64 76 1\n",
"85 96 40 98 24\n"
"16 20 10 23 17\n"
"79 59 53 42 65\n"
"67 2 5 80 75\n"
"62 38 19 74 73\n",
"43 10 79 92 8\n"
"52 36 4 5 67\n"
"56 29 33 24 97\n"
"85 17 53 75 65\n"
"62 64 1 21 83\n",
"93 92 79 17 12\n"
"40 88 6 82 34\n"
"90 96 53 25 43\n"
"14 62 54 10 39\n"
"49 68 41 16 44\n",
"67 99 24 58 76\n"
"43 53 59 54 51\n"
"47 6 61 8 2\n"
"80 68 90 14 4\n"
"29 46 94 89 50\n",
"14 45 19 33 43\n"
"6 55 4 31 80\n"
"51 2 69 68 61\n"
"71 70 79 91 93\n"
"66 18 54 13 87\n",
"8 45 61 54 30\n"
"85 16 19 82 37\n"
"56 39 11 47 4\n"
"74 70 10 60 91\n"
"21 63 95 53 72\n",
"71 21 63 86 27\n"
"53 52 40 23 81\n"
"2 47 92 68 15\n"
"46 45 31 8 1\n"
"34 80 37 11 69\n",
"96 0 15 90 66\n"
"65 43 92 83 18\n"
"3 47 19 8 32\n"
"71 26 42 34 28\n"
"62 99 55 5 12\n",
"37 99 30 21 3\n"
"63 18 68 47 27\n"
"57 0 65 85 20\n"
"7 58 40 92 43\n"
"15 19 5 4 53\n",
"46 16 45 95 68\n"
"6 44 31 47 73\n"
"84 82 71 75 94\n"
"26 25 17 32 49\n"
"18 96 13 58 9\n",
"71 36 13 68 10\n"
"84 7 60 79 41\n"
"1 83 43 81 97\n"
"90 53 80 19 38\n"
"48 25 32 42 29\n",
"37 68 86 44 78\n"
"87 67 77 70 60\n"
"45 34 27 15 47\n"
"12 21 13 55 26\n"
"81 41 63 40 74\n",
"24 50 93 94 57\n"
"99 4 56 5 28\n"
"42 31 22 6 76\n"
"90 89 16 49 59\n"
"9 7 43 71 54\n",
"69 75 94 38 46\n"
"52 64 50 72 42\n"
"76 63 13 60 10\n"
"99 80 43 33 17\n"
"25 31 4 89 22\n",
"88 57 22 66 34\n"
"85 16 87 95 59\n"
"73 2 46 5 29\n"
"25 69 53 6 14\n"
"96 77 19 91 43\n",
"46 99 52 47 76\n"
"89 53 24 13 59\n"
"45 5 1 30 19\n"
"68 25 22 10 73\n"
"42 27 31 0 94\n",
"42 44 98 89 87\n"
"65 10 80 56 41\n"
"3 35 95 48 43\n"
"85 97 83 12 94\n"
"50 38 93 47 17\n",
"16 73 18 81 89\n"
"6 48 54 93 19\n"
"35 52 88 49 31\n"
"43 79 83 14 28\n"
"50 62 98 26 22\n",
"38 47 7 20 35\n"
"45 76 63 96 24\n"
"98 53 2 87 80\n"
"83 86 92 48 1\n"
"73 60 26 94 6\n",
"80 50 29 53 92\n"
"66 90 79 98 46\n"
"40 21 58 38 60\n"
"35 13 72 28 6\n"
"48 76 51 96 12\n",
"79 80 24 37 51\n"
"86 70 1 22 71\n"
"52 69 10 83 13\n"
"12 40 3 0 30\n"
"46 50 48 76 5\n",
]
|
# You are using Python
class Node() :
def __init__(self, value=None) :
self.data = value
self.next = None
class LinkedList() :
def __init__(self) :
self.head = None
self.tail = None
def insertElements(self, arr) :
"""
Recieves an array of integers and inserts them sequentially into the linked list.
"""
# Loop through each element and insert it into the end of the list
for num in arr :
temp_node = Node(num)
# If the linked list is empty, the current number becomes the head of the list
if self.head is None :
self.head = temp_node
self.tail = temp_node
else :
# Otherwise, we append the numebr to the end of the list
self.tail.next = temp_node
self.tail = temp_node
# Returns the head pointer to the list
return self.head
def printList(head) :
"""
When invoked, it prints the linked list in a single line.
"""
# Iterate through the list, printing all values
ptr = head
while ptr :
print(ptr.data, end=" ")
ptr = ptr.next
print()
def reverseList(head, k) :
"""
Provided with the head of a linked list, performs the k element reverse, and returns the new head of the modified list
"""
ptr = head
new_head = None
new_tail = None
num_reverse = (n % k) if (n%k) != 0 else k
while ptr :
left_portion_start = ptr
i = 1
while ptr.next and i < num_reverse :
ptr = ptr.next
i += 1
right_portion_start = ptr.next
ptr.next = None
if new_head is None :
new_head = left_portion_start
new_tail = ptr
else :
ptr.next = new_head
new_head = left_portion_start
ptr = right_portion_start
num_reverse = k
return new_head
# Recieve the values of n, k, and the array of numbers
input_array = list(map(int, input().strip().split()))
n, k, arr = input_array[0], input_array[1], input_array[2:]
linkedlist = LinkedList()
head = linkedlist.insertElements(arr)
new_head = reverseList(head, k)
printList(new_head)
|
#! /usr/bin/env python3
import os.path
from setuptools import setup, find_packages
import magictag
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='magictag',
version=magictag.__version__,
description=magictag.__doc__.strip().split('\n\n', 1)[0],
long_description=long_description,
url='https://github.com/MrDOS/magictag',
author=magictag.__author__,
author_email=magictag.__contact__,
license=magictag.__license__,
packages=['magictag'],
entry_points = {'console_scripts': ['magictag=magictag:main']},
install_requires=['chardet', 'mutagen', 'titlecase'],
extras_require={'album art': ['python-itunes']}
)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'CrearClub.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(713, 557)
self.listaEquiposDisp = QtGui.QListView(Dialog)
self.listaEquiposDisp.setGeometry(QtCore.QRect(30, 130, 271, 361))
self.listaEquiposDisp.setObjectName(_fromUtf8("listaEquiposDisp"))
self.listaEquiposSel = QtGui.QListView(Dialog)
self.listaEquiposSel.setGeometry(QtCore.QRect(410, 130, 271, 361))
self.listaEquiposSel.setObjectName(_fromUtf8("listaEquiposSel"))
self.textNomClub = QtGui.QLineEdit(Dialog)
self.textNomClub.setGeometry(QtCore.QRect(100, 50, 511, 29))
self.textNomClub.setObjectName(_fromUtf8("textNomClub"))
self.label = QtGui.QLabel(Dialog)
self.label.setGeometry(QtCore.QRect(40, 60, 61, 17))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setGeometry(QtCore.QRect(250, 10, 241, 17))
font = QtGui.QFont()
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(Dialog)
self.label_3.setGeometry(QtCore.QRect(90, 110, 141, 17))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(Dialog)
self.label_4.setGeometry(QtCore.QRect(470, 110, 151, 17))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.botonAgregarEquipo = QtGui.QPushButton(Dialog)
self.botonAgregarEquipo.setGeometry(QtCore.QRect(310, 260, 91, 29))
self.botonAgregarEquipo.setObjectName(_fromUtf8("botonAgregarEquipo"))
self.botonEliminarEquipo = QtGui.QPushButton(Dialog)
self.botonEliminarEquipo.setGeometry(QtCore.QRect(310, 310, 91, 29))
self.botonEliminarEquipo.setObjectName(_fromUtf8("botonEliminarEquipo"))
self.botonAceptar = QtGui.QPushButton(Dialog)
self.botonAceptar.setGeometry(QtCore.QRect(570, 510, 97, 29))
self.botonAceptar.setObjectName(_fromUtf8("botonAceptar"))
self.botonCancelar = QtGui.QPushButton(Dialog)
self.botonCancelar.setGeometry(QtCore.QRect(440, 510, 97, 29))
self.botonCancelar.setObjectName(_fromUtf8("botonCancelar"))
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Dialog", None))
self.label.setText(_translate("Dialog", "Nombre:", None))
self.label_2.setText(_translate("Dialog", "Nuevo Club Deportivo", None))
self.label_3.setText(_translate("Dialog", "Equipos Disponibles", None))
self.label_4.setText(_translate("Dialog", "Equipos Seleccionados", None))
self.botonAgregarEquipo.setText(_translate("Dialog", "Agegar ->", None))
self.botonEliminarEquipo.setText(_translate("Dialog", "<-Eliminar", None))
self.botonAceptar.setText(_translate("Dialog", "Aceptar", None))
self.botonCancelar.setText(_translate("Dialog", "Cancelar", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
import pytest
from django.contrib.auth import get_user_model
from django.core.files.uploadedfile import SimpleUploadedFile
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from openslides.assignments.models import Assignment, AssignmentPoll
from openslides.core.models import Tag
from openslides.mediafiles.models import Mediafile
from openslides.utils.autoupdate import inform_changed_data
from tests.count_queries import count_queries
from tests.test_case import TestCase
@pytest.mark.django_db(transaction=False)
def test_assignment_db_queries():
"""
Tests that only the following db queries are done:
* 1 requests to get the list of all assignments,
* 1 request to get all related users,
* 1 request to get the agenda item,
* 1 request to get the list of speakers,
* 1 request to get the tags,
* 1 request to get the attachments and
* 1 Request to get the polls of the assignment
* 1 Request to get the options of these polls
"""
for index in range(10):
assignment = Assignment.objects.create(title=f"assignment{index}", open_posts=1)
for i in range(2):
AssignmentPoll.objects.create(
assignment=assignment,
title="test_title_nah5Ahh6IkeeM8rah3ai",
pollmethod=AssignmentPoll.POLLMETHOD_YN,
type=AssignmentPoll.TYPE_NAMED,
)
assert count_queries(Assignment.get_elements)() == 8
class CreateAssignment(TestCase):
"""
Tests basic creation of assignments.
"""
def test_simple(self):
response = self.client.post(
reverse("assignment-list"),
{"title": "test_title_ef3jpF)M329f30m)f82", "open_posts": 1},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
assignment = Assignment.objects.get()
self.assertEqual(assignment.title, "test_title_ef3jpF)M329f30m)f82")
self.assertEqual(assignment.number_poll_candidates, False)
def test_with_tags_and_mediafiles(self):
Tag.objects.create(name="test_tag")
Mediafile.objects.create(
title="test_file", mediafile=SimpleUploadedFile("title.txt", b"content")
)
response = self.client.post(
reverse("assignment-list"),
{
"title": "test_title_ef3jpF)M329f30m)f82",
"open_posts": 1,
"tags_id": [1],
"attachments_id": [1],
},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
assignment = Assignment.objects.get()
self.assertEqual(assignment.title, "test_title_ef3jpF)M329f30m)f82")
self.assertTrue(assignment.tags.exists())
self.assertTrue(assignment.attachments.exists())
def test_number_poll_candidates(self):
response = self.client.post(
reverse("assignment-list"),
{
"title": "test_title_EFBhGQkQciwZtjSc7BVy",
"open_posts": 1,
"number_poll_candidates": True,
},
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
assignment = Assignment.objects.get()
self.assertEqual(assignment.number_poll_candidates, True)
class CandidatureSelf(TestCase):
"""
Tests self candidation view.
"""
def setUp(self):
self.client.login(username="admin", password="admin")
self.assignment = Assignment.objects.create(
title="test_assignment_oikaengeijieh3ughiX7", open_posts=1
)
def test_nominate_self(self):
response = self.client.post(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="admin")
.exists()
)
def test_nominate_self_twice(self):
self.assignment.add_candidate(get_user_model().objects.get(username="admin"))
response = self.client.post(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="admin")
.exists()
)
def test_nominate_self_when_finished(self):
self.assignment.set_phase(Assignment.PHASE_FINISHED)
self.assignment.save()
response = self.client.post(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 400)
def test_nominate_self_during_voting(self):
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
response = self.client.post(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
Assignment.objects.get(pk=self.assignment.pk).candidates.exists()
)
def test_nominate_self_during_voting_non_admin(self):
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
admin = get_user_model().objects.get(username="admin")
group_admin = admin.groups.get(name="Admin")
group_delegates = type(group_admin).objects.get(name="Delegates")
admin.groups.add(group_delegates)
admin.groups.remove(group_admin)
inform_changed_data(admin)
response = self.client.post(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 403)
def test_withdraw_self(self):
self.assignment.add_candidate(get_user_model().objects.get(username="admin"))
response = self.client.delete(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 200)
self.assertFalse(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="admin")
.exists()
)
def test_withdraw_self_twice(self):
response = self.client.delete(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 400)
def test_withdraw_self_when_finished(self):
self.assignment.add_candidate(get_user_model().objects.get(username="admin"))
self.assignment.set_phase(Assignment.PHASE_FINISHED)
self.assignment.save()
response = self.client.delete(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 400)
def test_withdraw_self_during_voting(self):
self.assignment.add_candidate(get_user_model().objects.get(username="admin"))
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
response = self.client.delete(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 200)
self.assertFalse(
Assignment.objects.get(pk=self.assignment.pk).candidates.exists()
)
def test_withdraw_self_during_voting_non_admin(self):
self.assignment.add_candidate(get_user_model().objects.get(username="admin"))
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
admin = get_user_model().objects.get(username="admin")
group_admin = admin.groups.get(name="Admin")
group_delegates = type(group_admin).objects.get(name="Delegates")
admin.groups.add(group_delegates)
admin.groups.remove(group_admin)
inform_changed_data(admin)
response = self.client.delete(
reverse("assignment-candidature-self", args=[self.assignment.pk])
)
self.assertEqual(response.status_code, 403)
class CandidatureOther(TestCase):
def setUp(self):
self.client = APIClient()
self.client.login(username="admin", password="admin")
self.assignment = Assignment.objects.create(
title="test_assignment_leiD6tiojigh1vei1ait", open_posts=1
)
self.user = get_user_model().objects.create_user(
username="test_user_eeheekai4Phue6cahtho",
password="test_password_ThahXazeiV8veipeePh6",
)
def test_invalid_data_empty_dict(self):
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]), {}
)
self.assertEqual(response.status_code, 400)
def test_invalid_data_string_instead_of_integer(self):
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": "string_instead_of_integer"},
)
self.assertEqual(response.status_code, 400)
def test_invalid_data_user_does_not_exist(self):
# ID of a user that does not exist.
# Be careful: Here we do not test that the user does not exist.
inexistent_user_pk = self.user.pk + 1000
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": inexistent_user_pk},
)
self.assertEqual(response.status_code, 400)
def test_nominate_other(self):
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="test_user_eeheekai4Phue6cahtho")
.exists()
)
def test_nominate_other_twice(self):
self.assignment.add_candidate(
get_user_model().objects.get(username="test_user_eeheekai4Phue6cahtho")
)
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 400)
def test_nominate_other_when_finished(self):
self.assignment.set_phase(Assignment.PHASE_FINISHED)
self.assignment.save()
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 400)
def test_nominate_other_during_voting(self):
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 200)
self.assertTrue(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="test_user_eeheekai4Phue6cahtho")
.exists()
)
def test_nominate_other_during_voting_non_admin(self):
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
admin = get_user_model().objects.get(username="admin")
group_admin = admin.groups.get(name="Admin")
group_delegates = type(group_admin).objects.get(name="Delegates")
admin.groups.add(group_delegates)
admin.groups.remove(group_admin)
inform_changed_data(admin)
response = self.client.post(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 403)
def test_delete_other(self):
self.assignment.add_candidate(self.user)
response = self.client.delete(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 200)
self.assertFalse(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="test_user_eeheekai4Phue6cahtho")
.exists()
)
def test_delete_other_twice(self):
response = self.client.delete(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 400)
def test_delete_other_when_finished(self):
self.assignment.add_candidate(self.user)
self.assignment.set_phase(Assignment.PHASE_FINISHED)
self.assignment.save()
response = self.client.delete(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 400)
def test_delete_other_during_voting(self):
self.assignment.add_candidate(self.user)
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
response = self.client.delete(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 200)
self.assertFalse(
Assignment.objects.get(pk=self.assignment.pk)
.candidates.filter(username="test_user_eeheekai4Phue6cahtho")
.exists()
)
def test_delete_other_during_voting_non_admin(self):
self.assignment.add_candidate(self.user)
self.assignment.set_phase(Assignment.PHASE_VOTING)
self.assignment.save()
admin = get_user_model().objects.get(username="admin")
group_admin = admin.groups.get(name="Admin")
group_delegates = type(group_admin).objects.get(name="Delegates")
admin.groups.add(group_delegates)
admin.groups.remove(group_admin)
inform_changed_data(admin)
response = self.client.delete(
reverse("assignment-candidature-other", args=[self.assignment.pk]),
{"user": self.user.pk},
)
self.assertEqual(response.status_code, 403)
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Subscribe(models.Model):
firstName = models.CharField(
_("First Name"), max_length=225, blank=True, null=True, )
lastName = models.CharField(
_("Last Name"), max_length=225, blank=True, null=True, )
email = models.EmailField(
_("Email Subscribe"), max_length=255, blank=True, null=True, unique=True
)
accepted = models.BooleanField(
_("Accept Policy"), default=False)
class Meta:
ordering = ()
def __str__(self):
return self.email
|
from models.instructions.shared import Instruction
class CreateCol(Instruction):
def __init__(self, column_name, type_column, properties):
self._column_name = column_name
self._type_column = type_column
self._properties = properties
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass
class Unique(Instruction):
def __init__(self, column_list):
self._column_list = column_list
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass
class Check(Instruction):
def __init__(self, column_condition):
self._column_condition = column_condition
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass
class PrimaryKey(Instruction):
def __init__(self, column_list):
self._column_list = column_list
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass
class ForeignKey(Instruction):
def __init__(self, column_list, table_name, table_column_list):
self._column_list = column_list
self._table_name = table_name
self._table_column_list = table_column_list
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass
class Constraint(Instruction):
def __init__(self, column_name, column_condition):
self._column_name = column_name
self._column_condition = column_condition
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass
class ColumnTipo(Instruction):
def __init__(self, tipoColumna, paramOne, paramTwo):
self._tipoColumna = tipoColumna
self._paramOne = paramOne
self._paramTwo = paramTwo
self._tac = ''
def __repr__(self):
return str(vars(self))
def process(self,instruction):
pass |
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 26 16:02:47 2018
@author: Ian-A
torch project
"""
from __future__ import print_function
from pyqtgraph.Qt import QtCore, QtGui
import pyqtgraph.opengl as gl
import pyqtgraph as pg
import torch
import torch.nn as nn #import torch neural network library
import torch.nn.functional as F #import functional neural network module
import numpy as np
import time
import json
import torch.optim as optim
try:
mw.close()
mw = QtGui.QMainWindow()
except NameError:
app = QtGui.QApplication([])
mw = QtGui.QMainWindow()
class DeepNeuralNetwork(nn.Module):
def __init__(self,u):
super(DeepNeuralNetwork, self).__init__() #load super class for training data
self.fc1 = nn.Linear(1, u) #defining fully connected with input 2 and output 3
self.fc2 = nn.Linear(u, u) #defining fully connected with input 3 and output 3
self.fc3 = nn.Linear(u, u)
self.fc4 = nn.Linear(u, 1) #defining fully connected with input 3 and output 1
self.ReLu = nn.ReLU() #defining Rectified Linear Unit as activation function
self.Sigmoid = nn.Softsign() #defining Rectified Linear Unit as activation function
self.Tanhshrink = nn.Tanh()
self.Softplus = nn.ELU()
def forward(self, x): #feed forward
layer1 = x.view(-1, 1) #make it flat in one dimension from 0 - 784
# print(layer1)
layer2 = self.ReLu(self.fc1(layer1)) #layer2 = layer1 -> fc1 -> relu
layer3 = self.Sigmoid(self.fc2(layer2)) #layer3 = layer2 -> fc2 -> Sigmoid
layer4 = self.Tanhshrink(self.fc3(layer3)) #layer3 = layer2 -> fc2 -> Sigmoid
layer5 = self.Tanhshrink(self.fc4(layer4)) #layer3 = layer2 -> fc2 -> Sigmoid
return layer5 #softmax activation to layer4
# F.log_softmax(layer4)
def __repr__(self):
return json.dumps(self.__dict__)
# create your optimizer
dtype = torch.float
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# device = torch.device("cpu")
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 100000, 100, 100, 10
# Create random Tensors to hold inputs and outputs
def f(x):
"""Nonlinear function to be learnt with a multi-layer Perceptron."""
return np.sin(5*x)/(5*x)
# amount of test/validation data
x_train = 5*np.random.random_sample((N,1))-5
print(x_train)
# training inputs/features
x_test = 2*np.random.random_sample((N,1))-1
# testing inputs/features
v_train = 0.01*np.random.randn(N ,1)
# noise on training set
v_test = 0*np.random.randn(N ,1)
# no noise on testing set
y_train = f(x_train) + v_train
# training outputs
y_test = f(x_test) + v_test
# testing outputs
x = torch.tensor(x_train, dtype=torch.float, requires_grad=False, device= device)
y = torch.tensor(y_train, dtype=torch.float, requires_grad=False, device= device)
# Use the nn package to define our model as a sequence of layers. nn.Sequential
# is a Module which contains other Modules, and applies them in sequence to
# produce its output. Each Linear Module computes output from input using a
# linear function, and holds internal Tensors for its weight and bias.
model = DeepNeuralNetwork(100).to(device)
optimizer = optim.SGD(model.parameters(), lr=0.1)
# The nn package also contains definitions of popular loss functions; in this
# case we will use Mean Squared Error (MSE) as our loss function.
# loss_fn = torch.nn.KLDivLoss(size_average=False)
criterion = nn.MSELoss()
n = 1000
time1 = time.time()
for t in range(n):
optimizer.zero_grad() # zero the gradient buffers
output = model(x)
loss = criterion(output, y)
#print(model.fc1.bias.grad)
if (t%100==0):
print(t/n)
loss.backward()
#print(f"after:{model.fc1.bias.grad}")
optimizer.step()
time1 = time.time()-time1
print(time1)
counter = 0
for param in model.parameters():
#print(counter)
counter += 1
# param -= learning_rate * param.grad
#print(param)
x = torch.tensor(x_test, dtype=torch.float, device= device)
y_pred = model(x)
x_np = x.cpu().detach().numpy()
y_pred = y_pred.cpu().detach().numpy()
#print(y_pred.T)
#print(x_test.T,y_test.T)
n=len(x_test.T[0])
mw.resize(800,800)
view = pg.GraphicsLayoutWidget() ## GraphicsView with GraphicsLayout inserted by default
mw.setCentralWidget(view)
mw.show()
mw.setWindowTitle('pyqtgraph example: ScatterPlot')
w1 = view.addPlot()
s1 = pg.ScatterPlotItem(size=5, pen=pg.mkPen(None), brush=pg.mkBrush(255, 255, 255, 120))
pos = []
for k in range(0,n):
pos.append(np.array([x_test.T[0][k],y_test.T[0][k]]))
pos1 = np.array(pos)
pos2 = []
for k in range(0,n):
pos2.append(np.array([x_np.T[0][k],y_pred.T[0][k]]))
pos2 = np.array(pos2)
spots2 = [{'pos': pos2.T[:,i], 'data': 2} for i in range(n)]
spots = [{'pos': pos1.T[:,i], 'data': 1} for i in range(n)]
s1.addPoints(spots)
w1.addItem(s1)
s2 = pg.ScatterPlotItem(size=5, pen=pg.mkPen(None), brush=pg.mkBrush(255, 0, 0, 120))
s2.addPoints(spots2)
w1.addItem(s2)
mw.show()
## Start Qt event loop unless running in interactive mode.
"""
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
"""
|
import pexpect
from climatic.CoreCli import CoreCli
from climatic.connections.Ssh import Ssh
from climatic.connections.Ssh import PTY_WINSIZE_COLS as SSH_PTY_WINSIZE_COLS
from climatic.connections.Ser2Net import Ser2Net
from climatic.connections.Ser2Net import PTY_WINSIZE_COLS as SER2NET_PTY_WINSIZE_COLS
from typing import Optional
####################################################################################################
## OcNOS
class OcNOS(CoreCli):
""" Extend CoreCli with IP Infusion's OcNOS customizations.
"""
def __init__(self, connection, **opts):
""" Initialize OcNOS CLI.
@param connection The connection object to be used for accessing the CLI.
@param opts Same options as CoreCli initializer.
"""
if not 'marker' in opts:
self.marker = '\n[^ ]+#'
if not 'error_marker' in opts:
self.error_marker = '%'
CoreCli.__init__(self,
connection,
**opts)
####################################################################################################
## SshOcNOS
class SshOcNOS(OcNOS):
""" Connects to a OcNOS CLI using SSH.
"""
def __init__(self,
ip: str,
username: Optional[str]="ocnos",
password: Optional[str]="ocnos",
port: Optional[int]=22,
**opts):
""" Initialize SSH OcNOS CLI.
@param ip IP address of target. Ex: '234.168.10.12'
@param username username for opening SSH connection
@param password password for authentication in SSH connection
@param port Port used for SSH connection. Defaults to 22
@param opts Same options as CoreCli initializer.
"""
self.name = "OcNOS.SSH"
ssh = Ssh(ip, username, port=port)
OcNOS.__init__(self,
ssh,
username=username,
password=password,
pty_winsize_cols=SSH_PTY_WINSIZE_COLS,
**opts)
def login(self):
""" Login to CLI interface.
"""
while True:
index = self.connection.terminal.expect(
['Are you sure you want to continue connecting',
'password',
r'\n[^\s]+>',
r'\n[^\s]+#'],
timeout=10)
if index == 0:
self.connection.terminal.sendline('yes')
if index == 1:
self.connection.terminal.waitnoecho()
self.connection.terminal.sendline(self.password)
if index == 2:
self.connection.terminal.sendline('enable')
if index >= 3:
break
def logout(self):
""" Logout from CLI interface.
"""
# Send exit until login is reached.
self.connection.terminal.sendline()
self.connection.terminal.expect(r'\n[^ ]+#', timeout=5)
self.connection.terminal.sendline('exit')
self.connection.terminal.expect(r'\n[^ ]+>', timeout=5)
self.connection.terminal.sendline('exit')
####################################################################################################
## Ser2NetOcNOS
class Ser2NetOcNOS(OcNOS):
""" Connects to a OcNOS CLI using Ser2Net.
"""
def __init__(self,
ip: str,
port: int,
username: Optional[str]="ocnos",
password: Optional[str]="ocnos",
**opts):
""" Initialize OcNOS CLI.
@param ip IP address of target. Ex: '234.168.10.12'
@param port The port corresponding to the desired serial device.
@param username username for authentication to OcNOS.
@param password password for authentication to OcNOS.
@param opts Same options as CoreCli initializer.
"""
self.name = "OcNOS.Ser2Net"
ser2net = Ser2Net(ip, port)
OcNOS.__init__(self,
ser2net,
username=username,
password=password,
pty_winsize_cols=SER2NET_PTY_WINSIZE_COLS,
**opts)
def login(self):
""" Login to CLI interface.
"""
iteration = 0
while True:
index = self.connection.terminal.expect(
['login', 'Password', r'\n[^\s]*>', r'\n[^\s]*#', pexpect.TIMEOUT],
timeout=self.timeout)
if index == 0:
break
else:
iteration = iteration + 1
if iteration >= 10:
TimeoutError("Could not reach login prompt after 10 iterations. Aborting!")
self.connection.terminal.sendcontrol('d')
# Enter credentials
self.connection.terminal.sendline(self.username)
self.connection.terminal.expect('Password:')
self.connection.terminal.waitnoecho()
self.connection.terminal.sendline(self.password)
self.connection.terminal.expect(r'\n[^ ]+>', timeout=self.timeout)
self.connection.terminal.sendline('enable')
self.connection.terminal.expect(r'\n[^ ]+#', timeout=self.timeout)
def logout(self):
""" Logout from CLI interface.
"""
# To avoid deadlock in the while loop, just send a new line when there is an active
# match. This means it was not terminated by an exception
if self.connection.terminal.match != None:
self.connection.terminal.sendline()
while True:
index = self.connection.terminal.expect(['login:', 'closed', r'\n[^\s]*>', r'\n[^\s]*#'],
timeout=self.timeout)
if index <= 1:
break
else:
self.connection.terminal.sendcontrol('d')
|
'''/*---------------------------------------------------------------------------------------------
* Copyright (c) VituTech. All rights reserved.
* Licensed under the Apache License 2.0. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'''
"""A setuptools based setup module for h5json.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup
from os import path
import os
here = path.abspath(path.dirname(__file__))
root_dir = 'vitu'
py_modules = set()
py_packages = set()
for root, dirs, files in os.walk(root_dir):
for f in files:
if f.endswith('.py'):
if root == root_dir:
py_modules.add(root + '.' + f.split('.')[0])
if root != root_dir:
py_packages.add(root.replace(os.path.sep, '.'))
py_packages.add('vitudata')
py_packages.add('vitudata.apis')
print(py_modules)
print(py_packages)
setup(
name='vitu',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.2.0',
description='VITU/Halo - backtest framework',
long_description='VITU/Halo - backtest framework',
# The project's main homepage.
url='http://vitu.ai',
# Author details
author='VituTech',
author_email='[email protected]',
# Choose your license
# license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
# 'License :: OSI Approved :: BSD License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
python_requires='>=3.0, <4',
# What does your project relate to?
keywords='vitu halo backtest',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
#packages=find_packages(exclude=['docs', 'test*']),
packages=list(py_packages),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
py_modules=list(py_modules),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
setup_requires=['pkgconfig', 'six'],
zip_safe=False,
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
'vitu': ['wqy-microhei.ttc']
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[('vitu', ['vitu/wqy-microhei.ttc'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
)
|
from os import path
from pathlib import Path
from time import time
from docutils import io
from docutils.core import Publisher, publish_parts
from docutils.frontend import OptionParser
from docutils.io import NullOutput, StringOutput
from sphinx.application import ENV_PICKLE_FILENAME, Sphinx
from sphinx.builders import Builder
from sphinx.io import (SphinxDummySourceClass, SphinxDummyWriter,
SphinxStandaloneReader, read_doc)
from sphinx.util import relative_uri, rst
from sphinx.util.docutils import sphinx_domains
from sphinx.util.parallel import SerialTasks
absolute_path = str(Path('sphinx_input').absolute())
app = Sphinx(absolute_path, absolute_path, absolute_path, absolute_path, 'html', freshenv=True)
# app.build(True, ['sphinx_input/pathlib.rst'])
# app.builder.build_specific(['sphinx_input/pathlib.rst'])
# app.builder.build([absolute_path + '/pathlib.rst'])
# publish_parts(None)
rst_input_string = '''Basic use
---------
Importing the main class::
>>> from pathlib import Path
'''
# updated_docnames = set(app.builder.env.update(app.builder.config, app.builder.srcdir, app.builder.doctreedir))
config = app.builder.config
config_changed = False
app.builder.env.srcdir = app.builder.srcdir
app.builder.env.doctreedir = app.builder.doctreedir
app.builder.env.find_files(config, app.builder.env.app.builder)
app.builder.env.config = config
app.builder.env._nitpick_ignore = set(app.builder.env.config.nitpick_ignore)
added, changed, removed = app.builder.env.get_outdated_files(config_changed)
print('added:', added)
print('changed:', changed)
print('removed:', removed)
# docnames = sorted(added | changed)
docnames = ['pathlib']
app.builder.env.app.emit('env-before-read-docs', app.builder.env, docnames)
start_time = time()
# app.builder.env._read_serial(docnames, app.builder.env.app)
for docname in docnames:
print('docname', docname)
app.emit('env-purge-doc', app.builder.env, docname)
app.builder.env.clear_doc(docname)
# app.builder.env.read_doc(docname, app)
app.builder.env.prepare_settings(docname)
docutilsconf = path.join(app.builder.env.srcdir, 'docutils.conf')
# read docutils.conf from source dir, not from current dir
OptionParser.standard_config_files[1] = docutilsconf
if path.isfile(docutilsconf):
app.builder.env.note_dependency(docutilsconf)
with sphinx_domains(app.builder.env), rst.default_role(docname, app.builder.env.config.default_role):
doctree = read_doc(app.builder.env.app, app.builder.env, app.builder.env.doc2path(docname))
# post-processing
for domain in app.builder.env.domains.values():
domain.process_doc(app.builder.env, docname, doctree)
# allow extension-specific post-processing
if app:
app.emit('doctree-read', doctree)
print('docnames', docnames)
updated_docnames = set(docnames)
print('updated_docnames:', updated_docnames)
# app.builder.write(set(), list(updated_docnames), 'update')
docnames = set(updated_docnames)
app.builder.prepare_writing(docnames)
# app.builder._write_serial(sorted(docnames))
for docname in docnames:
print('docname:', docname)
doctree = app.builder.env.get_and_resolve_doctree(docname, app.builder, doctree)
app.builder.write_doc_serialized(docname, doctree)
# app.builder.write_doc(docname, doctree)
destination = StringOutput(encoding='utf-8')
doctree.settings = app.builder.docsettings
app.builder.secnumbers = app.builder.env.toc_secnumbers.get(docname, {})
app.builder.fignumbers = app.builder.env.toc_fignumbers.get(docname, {})
app.builder.imgpath = relative_uri(app.builder.get_target_uri(docname), '_images')
app.builder.dlpath = relative_uri(app.builder.get_target_uri(docname), '_downloads')
app.builder.current_docname = docname
app.builder.docwriter.write(doctree, destination)
app.builder.docwriter.assemble_parts()
body = app.builder.docwriter.parts['fragment']
print(body)
print('time elapsed:', time() - start_time)
|
from typing import List
def reverse_string(s:List[str]) -> None:
"""
To practice use of recursion
"""
def helper(start, end):
if start < end:
s[start], s[end] = s[end], s[start]
helper(start + 1, end - 1)
print("before", s)
n = len(s)
if n > 1: helper(0, n - 1)
print("after", s)
if __name__ == "__main__":
reverse_string(["h", "e", "l", "l", "o"])
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
# EXIT CODE
NON_RESTART_EXIT_CODE = 64 # If a container exited with the code 64, do not restart it.
KILL_ALL_EXIT_CODE = 65 # If a container exited with the code 65, kill all containers with the same job_id.
|
#!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: 4/27/14
###Function: plot zOR vs. CFR & proxies (supp figure)
### lab-confirmed hospitalization rates per 100,000 in US population (CDC data)
### proportion of P&I deaths of all-cause mortality vs. ILI cases of all visits (CDC data)
### proportion of P&I deaths of all-cause mortality vs. lab-confirmed hospitalization rates per 100,000 in US population (CDC data)
### Acute ILI vs non-acute ILI visits (SDI data)
### Acute ILI (inpatient) attack rate
###Import data:
#### CDC_Source/Import_Data/all_cdc_source_data.csv: "uqid", "yr", "wk", "num_samples", "perc_pos", "a_H1", "a_unsub" , "a_H3", "a_2009H1N1", "a_nosub", "b", "a_H3N2", "season", "allcoz_all", "allcoz_65.", "allcoz_45.64", "allcoz_25.44", "allcoz_1.24", "allcoz_.1", "pi_only", "ped_deaths", "hosp_0.4", "hosp_18.49", "hosp_50.64", "hosp_5.17", "hosp_65.", "hosp_tot", "ilitot", "patients", "providers", "perc_wt_ili", "perc_unwt_ili", "ili_0.4", "ili_5.24", "ili_25.64", "ili_25.49", "ili_50.64", "ili_65."
#### SQL_export/F1.csv (outpatient data only): 'season', 'wk', 'yr', 'wknum', 'outpatient & office ILI', 'outpatient & office anydiag', 'pop'
#### SQL_export/Supp_acuteILI_wk.csv (inpatient/ER data only): 'season', 'wk', 'yr', 'wknum', 'inpatient & ER ILI', 'inpatient & ER anydiag', 'pop'
###Command Line: python Supp_zOR_CFR_CHR.py
##############################################
### notes ###
# calculated fatality and hospitalization rates are proxy rates because the denominator is ILI and US population, respectively, rather than lab-confirmed flu cases
### packages/modules ###
import csv
import matplotlib.pyplot as plt
import numpy as np
## local modules ##
import functions as fxn
### data structures ###
### called/local plotting parameters ###
ps = fxn.gp_plotting_seasons
sl = fxn.gp_seasonlabels
fs = 24
fssml = 16
### data files ###
# calculate zOR by season (outpatient incidence by age group from SDI data)
incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/OR_allweeks_outpatient.csv','r')
incid = csv.reader(incidin, delimiter=',')
popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/totalpop_age.csv', 'r')
pop = csv.reader(popin, delimiter=',')
thanksin=open('/home/elee/Dropbox/My_Bansal_Lab/Clean_Data_for_Import/ThanksgivingWeekData_cl.csv', 'r')
thanksin.readline() # remove header
thanks=csv.reader(thanksin, delimiter=',')
# calculate total CFR and CHR from CDC data using deaths, ILI, lab-confirmed hospitalization rate per 100,000
cdcin=open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/CDC_Source/Import_Data/all_cdc_source_data.csv', 'r')
cdcin.readline() # remove header
cdc=csv.reader(cdcin, delimiter=',')
# calculate acute to non-acute ILI cases from total SDI data
outpatientSDIin=open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/F1.csv','r')
outpatientSDI=csv.reader(outpatientSDIin, delimiter=',')
inpatientSDIin=open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/Supp_acuteILI_wk.csv','r')
inpatientSDI=csv.reader(inpatientSDIin, delimiter=',')
# calculate acute ILI (inpatient) attack rate
inpatientin=open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/Supp_acuteILI_wk.csv','r')
inpatient=csv.reader(inpatientin, delimiter=',')
### program ###
## import CDC data for chr, cfr, and deaths:ili
# d_CHR[seasonnum] = cumulative lab-confirmed case-hospitalization rate per 100,000 in population over the period from week 40 to week 17 during flu season
# d_CFR[seasonnum] = P&I deaths of all flu season deaths in 122 cities/outpatient ILI cases of all flu season patient visits to outpatient offices in ILINet
# d_deaths[seasonnum] = (P&I deaths from wks 40 to 20, all cause deaths from wks to 40 to 20)
# d_ILI[seasonnum] = (ILI cases from wks 40 to 20, all patients from wks 40 to 20)
d_CHR, d_CFR, d_deaths, d_ILI = fxn.cdc_import_CFR_CHR(cdc)
## import ILI proportion of outpatient and inpatient cases
# d_ILI_anydiag_outp/inp[seasonnum] = ILI outp or inp cases/ outp or inp any diagnosis cases
d_ILI_anydiag_outp = fxn.proportion_ILI_anydiag(outpatientSDI)
d_ILI_anydiag_inp = fxn.proportion_ILI_anydiag(inpatientSDI)
## import season level attack rate for inpatient ILI cases
# d_inpatientAR[seasonnum] = ILI AR in inpatient facilities per 100,000 population
d_inpatientAR = fxn.ILI_AR(inpatient)
## import SDI data for zOR ##
# dict_wk[week] = seasonnum, dict_incid[week] = ILI cases per 10,000 in US population in second calendar year of flu season, dict_OR[week] = OR
d_wk, d_incid, d_OR = fxn.week_OR_processing(incid, pop)
d_zOR = fxn.week_zOR_processing(d_wk, d_OR)
# d_incid53ls[seasonnum] = [ILI wk 40 per 100000, ILI wk 41 per 100000,...], d_OR53ls[seasonnum] = [OR wk 40, OR wk 41, ...], d_zOR53ls[seasonnum] = [zOR wk 40, zOR wk 41, ...]
d_incid53ls, d_OR53ls, d_zOR53ls = fxn.week_plotting_dicts(d_wk, d_incid, d_OR, d_zOR)
# d_classifzOR[seasonnum] = (mean retrospective zOR, mean early warning zOR)
d_classifzOR = fxn.classif_zOR_processing(d_wk, d_incid53ls, d_zOR53ls, thanks)
# plot values
retrozOR = [d_classifzOR[s][0] for s in ps]
CHR = [d_CHR[s] for s in ps] # missing data for s2 & 3
CFR = [d_CFR[s] for s in ps] # missing data for s2
dI_ratio = [d_deaths[s][0]/d_ILI[s][0] for s in ps] # missing data for s2
inp_outp = [d_ILI_anydiag_inp[s]/d_ILI_anydiag_outp[s] for s in ps]
inpAR = [d_inpatientAR[s] for s in ps]
print CHR
print CFR
print dI_ratio
print retrozOR
print 'retrozOR_hosprate', np.corrcoef(retrozOR, CHR)
print 'retrozOR_mortrisk', np.corrcoef(retrozOR, CFR)
print 'retrozOR_dIratio', np.corrcoef(retrozOR, dI_ratio)
print 'retrozOR_inpoutp', np.corrcoef(retrozOR, inp_outp)
print 'retrozOR_inpatientAR', np.corrcoef(retrozOR, inpAR)
# draw plots
# mean retrospective zOR vs. cumulative lab-confirmed hospitalization rate per 100,000 in population
fig1 = plt.figure()
ax1 = fig1.add_subplot(1,1,1)
ax1.plot(CHR, retrozOR, marker = 'o', color = 'black', linestyle = 'None')
for s, x, y in zip(sl, CHR, retrozOR):
ax1.annotate(s, xy=(x,y), xytext=(-10,5), textcoords='offset points', fontsize=fssml)
ax1.set_ylabel(fxn.gp_sigma_r, fontsize=fs)
ax1.set_xlabel('Hospitalization Rate per 100,000', fontsize=fs)
ax1.tick_params(axis='both', labelsize=fssml)
ax1.set_xlim([0, 35])
ax1.invert_yaxis()
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs/Supp/zOR_CFR_CHR/zOR_HospPerPop.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
# mean retrospective zOR vs. proportion of P&I deaths of all-cause mortality divided by proportion of ILI cases from all visits
fig2 = plt.figure()
ax2 = fig2.add_subplot(1,1,1)
ax2.plot(CFR, retrozOR, marker = 'o', color = 'black', linestyle = 'None')
for s, x, y in zip(sl, CFR, retrozOR):
ax2.annotate(s, xy=(x,y), xytext=(-10,5), textcoords='offset points', fontsize=fssml)
ax2.set_ylabel(fxn.gp_sigma_r, fontsize=fs)
ax2.set_xlabel('P&I Mortality Risk:ILI Case Proportion', fontsize=fs)
ax2.tick_params(axis='both', labelsize=fssml)
ax2.invert_yaxis()
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs/Supp/zOR_CFR_CHR/zOR_ILIMortalityRisk.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
# mean retrospective zOR vs. ratio of P&I deaths to ILI cases (two different data sources)
fig3 = plt.figure()
ax3 = fig3.add_subplot(1,1,1)
ax3.plot(dI_ratio, retrozOR, marker = 'o', color = 'black', linestyle = 'None')
for s, x, y in zip(sl, dI_ratio, retrozOR):
ax3.annotate(s, xy=(x,y), xytext=(-10,5), textcoords='offset points', fontsize=fssml)
ax3.set_ylabel(fxn.gp_sigma_r, fontsize=fs)
ax3.set_xlabel('P&I Deaths to ILI', fontsize=fs)
ax3.tick_params(axis='both', labelsize=fssml)
ax3.invert_yaxis()
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs/Supp/zOR_CFR_CHR/zOR_DeathILIRatio.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
# mean retrospective zOR vs. ratio of proportion of ILI cases in inpatient and outpatient facilities
fig4 = plt.figure()
ax4 = fig4.add_subplot(1,1,1)
ax4.plot(inp_outp, retrozOR, marker = 'o', color = 'black', linestyle = 'None')
for s, x, y in zip(sl, inp_outp, retrozOR):
ax4.annotate(s, xy=(x,y), xytext=(-10,5), textcoords='offset points', fontsize=fssml)
ax4.set_ylabel(fxn.gp_sigma_r, fontsize=fs)
ax4.set_xlabel('Inpatient to Outpatient ILI Proportion of All Cases', fontsize=fs)
ax4.tick_params(axis='both', labelsize=fssml)
ax4.invert_yaxis()
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs/Supp/zOR_CFR_CHR/zOR_InpatientOutpatient.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
# mean retrospective zOR vs. inpatient ILI attack rate per 100,000 population
fig5 = plt.figure()
ax5 = fig5.add_subplot(1,1,1)
ax5.plot(inpAR, retrozOR, marker = 'o', color = 'black', linestyle = 'None')
for s, x, y in zip(sl, inpAR, retrozOR):
ax5.annotate(s, xy=(x,y), xytext=(-10,5), textcoords='offset points', fontsize=fssml)
ax5.set_ylabel(fxn.gp_sigma_r, fontsize=fs)
ax5.set_xlabel('Inpatient ILI Attack Rate per 100,000', fontsize=fs)
ax5.tick_params(axis='both', labelsize=fssml)
ax5.set_xlim([0,140])
ax5.invert_yaxis()
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs/Supp/zOR_CFR_CHR/zOR_InpatientAR.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
|
'''
Made possible thanks to http://www.danielhall.me/2014/09/creating-rr-records-in-route53-with-ansible/
In Ansible lots of things take lists (or comma seperated
strings), however lots of things return dicts. One
example of this is the hostvars and groups variable.
'groups' returns a list of machines in a group, and
'hostvars' is a dict containing all the hosts. So if you
need a list of ip addresses of those hosts for the
route53 module you cant.
'''
def fetchlistfromdict(d, l):
result = []
for i in l:
result.append(d[i])
return result
class FilterModule(object):
def filters(self):
return {
'fetchlistfromdict': fetchlistfromdict,
}
|
from .client import Client
from .parser import Parser
# devices
from .devices.binary_device import BinaryDevice
from .devices.normalized_device import NormalizedDevice
from .devices.sequence_device import SequenceDevice
from .devices.value_device import ValueDevice |
import csv
import datetime
from .constants import MAX_NUMBER, MIN_NUMBER, \
MAX_YEAR, MIN_YEAR, \
PERIODS_ABBR, \
START_DATE
class Params:
def __init__(self, year, month):
if year >= MIN_YEAR and year <= MAX_YEAR:
self.year = year
else:
raise ValueError('year must be between {} and {} inclusive: year={!r}'.format(MIN_YEAR, MAX_YEAR, year))
if month >= 1 and month <= 12:
self.month = month
else:
raise ValueError('month must be between 1 and 12 inclusive: month={!r}'.format(month))
self.yy = to_yy(year)
self.mmm = to_mmm(month)
def __repr__(self):
return '{}(year={!r}, yy={!r}, month={!r}, mmm={!r})'.format(self.__class__.__name__, self.year, self.yy, self.month, self.mmm)
def to_yy(year):
"""Returns the last 2 digits of the year."""
return str(year % 100).zfill(2)
MONTHS_ABBR = ['',
'Jan', 'Feb', 'Mar',
'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep',
'Oct', 'Nov', 'Dec'
]
def to_mmm(month):
"""Returns the first 3 letters of the month.
The first letter is capitalized.
"""
return MONTHS_ABBR[month]
class Settings:
DEFAULT_TIMEOUT = 5
DEFAULT_URL = 'http://nlcb.co.tt/app/index.php/pwresults/playwhemonthsum'
def __init__(self, timeout=DEFAULT_TIMEOUT, url=DEFAULT_URL):
self.timeout = timeout
self.url = url
def __repr__(self):
return '{}(timeout={!r}, url={!r})'.format(self.__class__.__name__, self.timeout, self.url)
class Result:
@classmethod
def from_csvline(cls, csvline, delimiter=','):
if isinstance(csvline, str):
csvline = csvline.split(delimiter)
else:
try:
csvline = list(map(str, csvline))
except:
csvline = []
line = csvline + ['', '', '', '']
draw = line[0]
year, month, day = (line[1].split('-') + ['', '', ''])[:3]
period = line[2]
number = line[3]
return cls(draw, year, month, day, period, number)
def __init__(self, draw, year, month, day, period, number):
original_args = {
'draw': draw,
'year': year,
'month': month,
'day': day,
'period': period,
'number': number
}
self.errors = errors = []
self.draw = None
self.date = None
self.period = None
self.number = None
# Clean and validate draw
draw = _parse_int(draw)
if draw < 1:
errors.append('draw must be a positive integer: draw={!r}'.format(original_args['draw']))
else:
self.draw = draw
# Clean and validate year, month, day
year = _parse_int(year)
month = _parse_int(month)
day = _parse_int(day)
try:
self.date = datetime.date(year, month, day)
except ValueError:
errors.append('year, month and day must represent a valid date: year={!r}, month={!r}, day={!r}'.format(
original_args['year'],
original_args['month'],
original_args['day'])
)
# Clean and validate period
period = _parse_str(period).upper()
if period not in PERIODS_ABBR:
errors.append('period must be one of {}: period={!r}'.format(', '.join(PERIODS_ABBR), original_args['period']))
else:
self.period = period
# Clean and validate number
number = _parse_int(number)
if number < MIN_NUMBER or number > MAX_NUMBER:
errors.append('number must be an integer between {} and {} inclusive: number={!r}'.format(MIN_NUMBER, MAX_NUMBER, original_args['number']))
else:
self.number = number
def __eq__(self, other):
return type(other) is type(self) and \
self.is_valid() and other.is_valid() and \
self.draw == other.draw and \
self.date == other.date and \
self.period == other.period and \
self.number == other.number
def is_valid(self):
return not self.errors
def full_error_message(self):
if hasattr(self, 'lineno'):
header = 'Line {}: {!r}'.format(self.lineno, self.line)
else:
header = '{!r}'.format(self)
reasons = '\n'.join(map(lambda e: ' ' + e, self.errors))
return header + '\n' + reasons
def __repr__(self):
return '{}(draw={!r}, date={!r}, period={!r}, number={!r})'.format(self.__class__.__name__, self.draw, self.date, self.period, self.number)
def _parse_int(x):
try:
return int(x)
except:
return 0
def _parse_str(x):
try:
return str(x)
except:
return ''
class Results(list):
@classmethod
def from_csvfile(cls, csvfile):
delimiter = csv.get_dialect('excel').delimiter
results = []
for lineno, line in enumerate(csv.reader(csvfile), start=1):
contents = delimiter.join(line)
if contents.strip():
result = Result.from_csvline(line, delimiter=delimiter)
# Track these values for error reporting purposes
result.lineno = lineno
result.line = contents
results.append(result)
return cls(results)
def __init__(self, results):
super().__init__()
self.invalid = []
for result in results:
if result.is_valid():
self.append(result)
else:
self.invalid.append(result)
def all_valid(self):
return not bool(self.invalid)
def full_error_messages(self):
messages = '\n'.join(map(lambda r: r.full_error_message(), self.invalid))
footer = 'Total errors = {}'.format(len(self.invalid))
return messages + '\n\n' + footer
def date_range(start_date=None, period=PERIODS_ABBR[0], today=datetime.date.today):
if start_date is None:
start_date = START_DATE
period = PERIODS_ABBR[0]
elif period == PERIODS_ABBR[-1]:
start_date += datetime.timedelta(days=1)
end_date = today()
start_year = start_date.year
end_year = end_date.year
for year in range(start_year, end_year + 1):
start_month = start_date.month if year == start_year else 1
end_month = end_date.month if year == end_year else 12
for month in range(start_month, end_month + 1):
yield year, month
|
import time
import os
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common import utils
chrome_options = Options()
chrome_options.add_argument("nwapp=" + os.path.dirname(os.path.abspath(__file__)))
testdir = os.path.dirname(os.path.abspath(__file__))
os.chdir(testdir)
port = str(utils.free_port())
tpl = open('index_html.tpl', 'r')
content = tpl.read().replace('{port}', port)
tpl.close()
html = open('index.html', 'w')
html.write(content)
html.close()
tpl = open('index_js.tpl', 'r')
content = tpl.read().replace('{port}', port)
tpl.close()
html = open('index.js', 'w')
html.write(content)
html.close()
driver = webdriver.Chrome(executable_path=os.environ['CHROMEDRIVER'], chrome_options=chrome_options)
time.sleep(1)
try:
print driver.current_url
driver.implicitly_wait(10)
driver.find_element_by_tag_name('button').click()
result = 'node-main test'
counter = 0
while not 'success' in result and counter < 10:
time.sleep(1)
result = driver.find_element_by_id('result').get_attribute('innerHTML')
print result
counter = counter + 1
assert('success' in result)
finally:
driver.quit()
|
from trading.exchanges.websockets_exchanges.implementations.binance_websocket import BinanceWebSocketClient
from .abstract_websocket import AbstractWebSocket
|
import io
from os import path
from setuptools import setup
import isdayoff
here = path.abspath(path.dirname(__file__))
def long_description():
with io.open(file=path.join(here, "README.md"), encoding="utf-8") as file:
return file.read()
def requirements():
with io.open(file=path.join(here, "requirements.txt")) as file:
return file.readlines()
setup(
name='isdayoff',
version=isdayoff.ProdCalendar.__version__,
description='Checking the date for belonging to a non-working day, according to official decrees and orders.',
long_description=long_description(),
long_description_content_type="text/markdown",
url='https://github.com/kobylinsky-m/isdayoff',
author='Maxim Kobylinsky',
author_email='[email protected]',
license="MIT",
packages=['isdayoff'],
install_requires=requirements(),
zip_safe=False
)
|
# ----------------- BEGIN LICENSE BLOCK ---------------------------------
#
# Copyright (C) 2018-2019 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
# ----------------- END LICENSE BLOCK -----------------------------------
"..."
import Globs
from qgis.gui import QgsMapToolEmitPoint
from qgis.core import QgsField
from PyQt4.QtCore import QVariant
from .QGISLayer import WGS84PointLayer
class MapSnappingTest(QgsMapToolEmitPoint):
"..."
TITLE = "Map-Snapped"
SYMBOL = "diamond"
COLOR = "226, 226, 0"
SIZE = "5"
def __init__(self, action, snapper):
"..."
QgsMapToolEmitPoint.__init__(self, Globs.iface.mapCanvas())
self.action = action
self.snapper = snapper
self.action.setChecked(False)
self.layer_group = None
self.layer = None
def destroy(self):
"..."
self.layer = None
def activate(self):
"..."
super(MapSnappingTest, self).activate()
self.__create_layer__()
self.action.setChecked(True)
Globs.log.info("Map Snapping Test Activated")
def deactivate(self):
"..."
super(MapSnappingTest, self).deactivate()
self.action.setChecked(False)
self.layer.remove_all_features()
self.layer.refresh()
Globs.log.info("Map Snapping Test Deactivated")
def canvasReleaseEvent(self, event): # pylint: disable=invalid-name
"..."
self.layer.remove_all_features()
raw_pt = self.toLayerCoordinates(self.layer.layer, event.pos())
mmpts = self.snapper.snap(raw_pt)
if mmpts is not None:
for mmpt in mmpts:
self.layer.add_lla(mmpt[5], [mmpt[0], mmpt[1], mmpt[2], mmpt[3], mmpt[4]])
self.layer.refresh()
def __create_layer__(self):
"..."
if self.layer is None:
attrs = [QgsField("Lane Id", QVariant.LongLong),
QgsField("Pos Type", QVariant.String),
QgsField("Long-T-Left", QVariant.Double),
QgsField("Long-T-Right", QVariant.Double),
QgsField("Lateral-T", QVariant.Double)]
self.layer = WGS84PointLayer(Globs.iface,
self.TITLE,
self.SYMBOL,
self.COLOR,
self.SIZE,
attrs,
self.layer_group)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-11-08 19:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auditGroupResults', '0007_auto_20181030_2223'),
]
operations = [
migrations.AddField(
model_name='auditgroupguidelineresult',
name='implementation_score_v',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgroupguidelineresult',
name='implementation_score_w',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgroupresult',
name='implementation_score_v',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgroupresult',
name='implementation_score_w',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgrouprulecategoryresult',
name='implementation_score_v',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgrouprulecategoryresult',
name='implementation_score_w',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgroupruleresult',
name='implementation_score_v',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgroupruleresult',
name='implementation_score_w',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgrouprulescoperesult',
name='implementation_score_v',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='auditgrouprulescoperesult',
name='implementation_score_w',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
]
|
from .framework import (
selenium_test,
SeleniumTestCase,
UsesHistoryItemAssertions,
)
class UploadsTestCase(SeleniumTestCase, UsesHistoryItemAssertions):
@selenium_test
def test_upload_simplest(self):
self.perform_upload(self.get_filename("1.sam"))
self.history_panel_wait_for_hid_ok(1)
history_contents = self.current_history_contents()
history_count = len(history_contents)
assert history_count == 1, "Incorrect number of items in history - expected 1, found %d" % history_count
hda = history_contents[0]
assert hda["name"] == '1.sam', hda
assert hda["extension"] == "sam", hda
self.history_panel_click_item_title(hid=1, wait=True)
self.assert_item_dbkey_displayed_as(1, "?")
@selenium_test
def test_upload_specify_ext(self):
self.perform_upload(self.get_filename("1.sam"), ext="txt")
self.history_panel_wait_for_hid_ok(1)
history_contents = self.current_history_contents()
hda = history_contents[0]
assert hda["name"] == '1.sam'
assert hda["extension"] == "txt", hda
@selenium_test
def test_upload_specify_genome(self):
self.perform_upload(self.get_filename("1.sam"), genome="hg18")
self.history_panel_wait_for_hid_ok(1)
self.history_panel_click_item_title(hid=1, wait=True)
self.assert_item_dbkey_displayed_as(1, "hg18")
@selenium_test
def test_upload_specify_ext_all(self):
self.perform_upload(self.get_filename("1.sam"), ext_all="txt")
self.history_panel_wait_for_hid_ok(1)
history_contents = self.current_history_contents()
hda = history_contents[0]
assert hda["name"] == '1.sam'
assert hda["extension"] == "txt", hda
@selenium_test
def test_upload_specify_genome_all(self):
self.perform_upload(self.get_filename("1.sam"), genome_all="hg18")
self.history_panel_wait_for_hid_ok(1)
self.history_panel_click_item_title(hid=1, wait=True)
self.assert_item_dbkey_displayed_as(1, "hg18")
@selenium_test
def test_upload_list(self):
self.upload_list([self.get_filename("1.tabular")], name="Test List")
self.history_panel_wait_for_hid_ok(2)
# Make sure modals disappeared - both List creator (TODO: upload).
self.wait_for_selector_absent_or_hidden(".collection-creator")
self.assert_item_name(2, "Test List")
# Make sure source item is hidden when the collection is created.
self.history_panel_wait_for_hid_hidden(1)
@selenium_test
def test_upload_pair(self):
self.upload_list([self.get_filename("1.tabular"), self.get_filename("2.tabular")], name="Test Pair")
self.history_panel_wait_for_hid_ok(3)
# Make sure modals disappeared - both collection creator (TODO: upload).
self.wait_for_selector_absent_or_hidden(".collection-creator")
self.assert_item_name(3, "Test Pair")
# Make sure source items are hidden when the collection is created.
self.history_panel_wait_for_hid_hidden(1)
self.history_panel_wait_for_hid_hidden(2)
@selenium_test
def test_upload_pair_specify_extension(self):
self.upload_list([self.get_filename("1.tabular"), self.get_filename("2.tabular")], name="Test Pair", ext="txt", hide_source_items=False)
self.history_panel_wait_for_hid_ok(3)
self.history_panel_wait_for_hid_ok(1)
history_contents = self.current_history_contents()
hda = history_contents[0]
assert hda["name"] == '1.tabular'
assert hda["extension"] == "txt", hda
@selenium_test
def test_upload_paired_list(self):
self.upload_paired_list([self.get_filename("1.tabular"), self.get_filename("2.tabular")], name="Test Paired List")
self.history_panel_wait_for_hid_ok(3)
# Make sure modals disappeared - both collection creator (TODO: upload).
self.wait_for_selector_absent_or_hidden(".collection-creator")
self.assert_item_name(3, "Test Paired List")
# Make sure source items are hidden when the collection is created.
self.history_panel_wait_for_hid_hidden(1)
self.history_panel_wait_for_hid_hidden(2)
|
from math import sin as s
|
from django.apps import AppConfig
class SelfServiceConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'apps.self_service'
|
import requests
from bs4 import BeautifulSoup
import urllib.request
stories = []
def getTheGoodStuff(newsstories):
global stories
for data in newsstories:
htmlatag = data.find("h2", class_="title").find("a")
headline = htmlatag.getText()
url = htmlatag.get("href")
d = {"headline": headline,
"url": url}
stories.append(d)
def scrapeWebsites():
global stories
# Getting stories from Fox News.
foxnews = "http://www.foxnews.com/"
page = urllib.request.urlopen(foxnews)
r = requests.get(foxnews)
data = r.text
soup = BeautifulSoup(data, "lxml")
for i in range(0, 15):
foundstories = soup.find_all(
"article", class_="article story-" + str(i))
getTheGoodStuff(foundstories)
def displayStories():
global stories
for i in range(0, len(stories)):
print(stories[i]["headline"])
print(stories[i]['url'])
print("")
scrapeWebsites()
displayStories()
|
import time
from ui import GridWorldWindow
from mdp import GridMDP, value_iteration, policy_extraction, policy_evaluation, policy_iteration, values_converged, policy_converged
class ViewController(object):
def __init__(self, metadata):
self.gridworld = GridWorldWindow(metadata=metadata)
self.mdp = GridMDP(metadata=metadata)
# bind buttons
self.gridworld.btn_value_iteration_1_step.configure(command=self._value_iteration_1_step)
self.gridworld.btn_value_iteration_100_steps.configure(command=self._value_iteration_100_steps)
self.gridworld.btn_value_iteration_slow.configure(command=self._value_iteration_slow)
self.gridworld.btn_policy_iteration_1_step.configure(command=self._policy_iteration_1_step)
self.gridworld.btn_policy_iteration_100_steps.configure(command=self._policy_iteration_100_steps)
self.gridworld.btn_policy_iteration_slow.configure(command=self._policy_iteration_slow)
self.gridworld.btn_reset.configure(command=self._reset_grid)
def _value_iteration_1_step(self):
values = value_iteration(self.mdp.values, self.mdp, num_iter=1)
policy = policy_extraction(values, self.mdp)
self.gridworld.update_grid(values, policy)
self.mdp.update_values(values)
self.mdp.update_policy(policy)
def _value_iteration_100_steps(self):
values = value_iteration(self.mdp.values, self.mdp, num_iter=100)
policy = policy_extraction(values, self.mdp)
self.gridworld.update_grid(values, policy)
self.mdp.update_values(values)
self.mdp.update_policy(policy)
def _value_iteration_slow(self):
# run one iteration of value iteration at a time
old_values = dict(self.mdp.values)
for i in range(100):
values = value_iteration(self.mdp.values, self.mdp, num_iter=1)
policy = policy_extraction(values, self.mdp)
self.gridworld.update_grid(values, policy)
self.mdp.update_values(values)
self.mdp.update_policy(policy)
self.gridworld.window.update()
time.sleep(0.25)
self.gridworld.window.update()
new_values = dict(values)
if values_converged(new_values, old_values):
break
old_values = new_values
self.gridworld.show_dialog('Value Iteration has converged in {} steps!'.format(i+1))
def _policy_iteration_1_step(self):
policy, values = policy_iteration(self.mdp.policy, self.mdp, num_iter=1)
self.gridworld.update_grid(values, policy)
self.mdp.update_values(values)
self.mdp.update_policy(policy)
def _policy_iteration_100_steps(self):
policy_iteration(self.mdp, num_iter=100)
self.gridworld.update_grid(self.mdp.values, self.mdp.policy)
def _policy_iteration_slow(self):
# run one iteration of policy iteration at a time
old_policy = dict(self.mdp.policy)
for i in range(100):
policy_iteration(self.mdp, num_iter=1)
self.gridworld.update_grid(self.mdp.values, self.mdp.policy)
self.gridworld.window.update()
time.sleep(0.25)
self.gridworld.window.update()
new_policy = dict(self.mdp.policy)
if policy_converged(new_policy, old_policy):
break
old_policy = new_policy
self.gridworld.show_dialog('Policy Iteration has converged in {} steps!'.format(i+1))
def _reset_grid(self):
self.mdp.clear()
self.gridworld.clear()
def run(self):
# main UI loop
self.gridworld.run()
|
#%%
import itertools
import os
import pandas as pd
#import csv
import olefile
#%%
def combine_paths(directory, files):
return (os.path.join(directory, filename) for filename in files)
def get_excel_for_district(district_path):
files = os.walk(district_path)
files_per_directory = [combine_paths(walk[0],walk[2]) for walk in files]
all_files = list(itertools.chain(*files_per_directory))
return (f for f in all_files if f.endswith('xls') or f.endswith('xlsx'))
def get_districts(root_path):
"""
Start from the directory containing all the districts. A district is assumed to be any
directory in root_path.
"""
return (os.path.join(root_path,directory) for directory in os.listdir(root_path) if os.path.isdir(os.path.join(root_path,directory)))
def get_districts_with_files(root_path):
return ((district, get_excel_for_district(district)) for district in get_districts(root_path))
def get_OLE_metadata(filename):
print(filename)
try :
ole = olefile.OleFileIO(filename)
meta = ole.get_metadata()
metadata = {"filename": [(filename.replace("\\", "/"))],
"author": [meta.author],
"last_saved_by":[meta.last_saved_by],
"create_time": [meta.create_time],
"last_saved_time": [meta.last_saved_time]}
except :
metadata = {"filename":[filename.replace("\\", "/")],
"problem": ["Not working"]}
return pd.DataFrame.from_dict(metadata)
def full_function(root_path) :
for district, files in get_districts_with_files(root_path) :
for filename in files :
yield get_OLE_metadata(filename)
#%%
data_path = 'data/raw/rbv_credes/'
out = pd.DataFrame()
for results in full_function(data_path) :
out = out.append(results)
out.to_csv(data_path + "windows_metadata.csv") |
class ConvBlock(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(ConvBlock, self).__init__()
self.relu = nn.ReLU()
self.conv = nn.Conv1d(in_channels, out_channels, **kwargs)
self.batchnorm = nn.BatchNorm1d(out_channels)
def forward(self, x):
return self.relu(self.batchnorm(self.conv(x)))
class InceptionBlock(nn.Module):
def __init__(self, in_channels, out_1x1, red_3x3, out_3x3, red_5x5, out_5x5, out_1x1_pool):
super(InceptionBlock, self).__init__()
self.branch1 = ConvBlock(in_channels, out_1x1, kernel_size = 1)
self.branch2 = nn.Sequential(
ConvBlock(in_channels, red_3x3, kernel_size = 1),
ConvBlock(red_3x3, out_3x3, kernel_size = 3, padding = 1)
)
self.branch3 = nn.Sequential(
ConvBlock(in_channels, red_5x5, kernel_size = 1),
ConvBlock(red_5x5, out_5x5, kernel_size = 5, padding = 2)
)
self.branch4 = nn.Sequential(
nn.MaxPool1d(kernel_size=3, stride = 1, padding = 1),
ConvBlock(in_channels, out_1x1_pool, kernel_size = 1)
)
def forward(self, x):
return torch.cat(
[self.branch1(x), self.branch2(x), self.branch3(x), self.branch4(x)], 1
)
class SeismicNet(nn.Module):
def __init__(self):
super(SeismicNet, self).__init__()
self.conv1 = ConvBlock(3, 192, kernel_size = 2, stride = 2)
self.inception_1a = InceptionBlock(192, 64, 96, 128, 16, 32, 32)
self.inception_1b = InceptionBlock(256, 128, 128, 192, 32, 96, 64)
self.maxpool1 = nn.MaxPool1d(kernel_size=3, stride=2, padding = 1)
self.averagepool1 = nn.AvgPool1d(kernel_size= 7, stride= 1)
self.dropout = nn.Dropout2d(p = 0.15)
self.fc1 = nn.Linear(3360, 1024)
self.fc2 = nn.Linear(1024, 256)
self.fc3 = nn.Linear(256, 1)
def forward(self, x):
x = self.conv1(x)
x = self.inception_1a(x)
x = self.inception_1b(x)
x = self.maxpool1(x)
x = self.averagepool1(x)
x = self.dropout(x)
x = torch.flatten(x, 1)
x = self.fc1(x)
x = self.fc2(x)
x = self.fc3(x)
return x |
#!/usr/bin/python3
import platform
import subprocess as sp
import threading
import time
import vrep
class SimHelper(object):
def __init__(self):
if platform.system() != 'Darwin':
self.start_vrep()
self.setup_vrep_remote()
if platform.system() != 'Darwin':
self.check_vrep()
'''
Turn on V-REP application
'''
def start_vrep(self):
try:
check_vrep_running = sp.check_output(["pidof", "vrep"])
self.pid = int(check_vrep_running.split()[0])
print("V-REP already running...")
launch_vrep = False
except sp.CalledProcessError:
launch_vrep = True
pass
if launch_vrep:
print("Starting V-REP...")
sp.call(['/bin/bash', '-i', '-c', "vrep"])
time.sleep(5.0)
check_vrep_running = sp.check_output(["pidof", "vrep"])
self.pid = int(check_vrep_running.split()[0])
pass
'''
Setup V-REP remote connection
'''
def setup_vrep_remote(self):
try:
vrep.simxFinish(-1)
self.clientID = vrep.simxStart('127.0.0.1', 19999, True, True, 5000, 5)
if self.clientID == -1:
print("Failed to connect to remote API Server")
self.sim_functions.exit_sim()
except KeyboardInterrupt:
self.exit_sim()
'''
Check V-REP running
'''
def check_vrep(self):
t = threading.Timer(60.0, self.check_vrep)
t.daemon = True
print("Checking V-REP")
t.start()
self.start_vrep()
'''
Start V-REP simulation
'''
def start_sim(self):
# Set Simulator
vrep.simxSynchronous(self.clientID, True)
dt = .05
vrep.simxSetFloatingParameter(self.clientID,
vrep.sim_floatparam_simulation_time_step,
dt, # specify a simulation time step
vrep.simx_opmode_oneshot)
vrep.simxStartSimulation(self.clientID, vrep.simx_opmode_blocking)
return
'''
Stop V-REP simulation
'''
def stop_sim(self):
vrep.simxStopSimulation(self.clientID, vrep.simx_opmode_oneshot_wait)
return
'''
Pause V-REP simulation
'''
def pause_sim(self):
vrep.simxPauseSimulation(self.clientID, vrep.simx_opmode_oneshot_wait)
return
'''
Exit sequence
'''
def exit_sim(self):
self.stop_sim()
vrep.simxFinish(self.clientID)
return
'''
Step V-REP simulation
'''
def step_sim(self):
vrep.simxSynchronousTrigger(self.clientID)
return
'''
Reset V-REP simulation
'''
def reset(self, display_disabled):
self.stop_sim()
time.sleep(0.1)
self.start_sim()
if display_disabled:
self.display_disabled()
return
'''
Fetch handle for object
'''
def get_handle(self, obj_name):
err, handle = vrep.simxGetObjectHandle(self.clientID, obj_name, vrep.simx_opmode_blocking)
return handle
'''
Load V-REP scene
'''
def load_scene(self, scene_name):
vrep.simxLoadScene(self.clientID, scene_name + ".ttt", 0xFF, vrep.simx_opmode_blocking)
return
'''
Turn off V-REP display
'''
def display_disabled(self):
vrep.simxSetBooleanParameter(self.clientID, vrep.sim_boolparam_display_enabled, False,
vrep.simx_opmode_oneshot_wait)
vrep.simxSetBooleanParameter(self.clientID, vrep.sim_boolparam_browser_visible, False,
vrep.simx_opmode_oneshot_wait)
vrep.simxSetBooleanParameter(self.clientID, vrep.sim_boolparam_hierarchy_visible, False,
vrep.simx_opmode_oneshot_wait)
return
|
import time
import re
def process_data():
txt = load_txt() # 加载文件
sen_list = phrasing(txt) # 分句
gen_used_input(sen_list) # 生成json
def load_txt():
txt = ''
with open('data/lianyin.txt', 'r', encoding='utf8') as ly:
txt = ly.read()
return txt
def phrasing(par):
sentences = re.split('[;;,,!!.。\s]', par)
while '' in sentences:
sentences.remove('')
print("-------切分好的句子如下:--------")
for line in sentences:
print(line)
return sentences
def gen_used_input(sen_list):
"""
用来生成可用的输入信息
:param sen_list: 一系列分割好的句子
:return: 给句子初定一个类型
"""
with open('data/cnews/show_data.txt', 'w', encoding='utf8') as file_object:
for sen in sen_list:
file_object.write('孕产次')
file_object.write('\t')
file_object.write(sen)
file_object.write('\n')
|
# coding=utf-8
from typing import Tuple
from .passenger import (
Passenger,
AdultPsg,
TeenPsg,
ChildPsg,
BabyPsg,
SeniorPsg,
DisabilityAPsg,
DisabilityBPsg,
)
class Discount:
title = ""
disc_code = ""
max_cnt = 0
min_cnt = 0
allow_psg = dict()
def __init__(self, *args, **kwargs):
raise NotImplementedError("%s is abstarct class" % type(self).__name__)
def __repr__(self):
return self.title
def _vaild(self, psgrs) -> Tuple[bool, str]:
def name(objs):
def _name(o):
return o.__name__ if isinstance(o, type) else type(o).__name__
if not isinstance(objs, (list, tuple, set)):
return _name(objs)
return ", ".join(tuple(_name(o) for o in objs))
insts = set(type(ins) for ins in psgrs)
essential_psgr = set(
filter(lambda x: self.allow_psg[x]["min"] > 0, self.allow_psg)
)
if not (insts & essential_psgr):
return False, f"{name(self)}에 {name(essential_psgr)} 승객이 포함되야 합니다."
total = 0
for p in psgrs:
total += p.count
allow = self.allow_psg.get(type(p))
if not allow:
return False, f"{name(p)}은(는) {name(self)}에 적용할 수 없습니다."
if not (allow["min"] <= p.count <= allow["max"]):
return False, f"{name(p)}은(는) {allow['max']}명을 초과할 수 없습니다."
if not (self.min_cnt <= total <= self.max_cnt):
return False, f"{self.title} 최대 적용 인원은 {self.max_cnt}명 입니다."
return True, ""
class TeenDisc(Discount):
"""청소년 드림"""
def __init__(self):
self.title = "청소년 드림"
self.disc_code = "B121410002GY"
self.max_cnt = 9
self.min_cnt = 1
self.allow_psg = {
AdultPsg: {"min": 0, "max": 9},
TeenPsg: {"min": 1, "max": 1},
ChildPsg: {"min": 0, "max": 9},
SeniorPsg: {"min": 0, "max": 9},
DisabilityAPsg: {"min": 0, "max": 9},
DisabilityBPsg: {"min": 0, "max": 9},
}
class YouthDisc(Discount):
"""힘내라 청춘"""
def __init__(self):
self.title = "힘내라 청춘"
self.disc_code = "Y20150924001"
self.max_cnt = 1
self.min_cnt = 1
self.allow_psg = {
AdultPsg: {"min": 1, "max": 1},
}
class MomDisc(Discount):
"""맘편한 KTX"""
def __init__(self):
self.title = "맘편한 KTX"
self.disc_code = "Y20150924002"
self.max_cnt = 2
self.min_cnt = 1
self.allow_psg = {
AdultPsg: {"min": 1, "max": 2},
ChildPsg: {"min": 0, "max": 1},
BabyPsg: {"min": 0, "max": 1},
SeniorPsg: {"min": 0, "max": 1},
DisabilityAPsg: {"min": 0, "max": 1},
DisabilityBPsg: {"min": 0, "max": 1},
}
class FamilyDisc(Discount):
"""다자녀행복"""
def __init__(self):
self.title = "다자녀행복"
self.disc_code = "Y20151104001"
self.max_cnt = 9
self.min_cnt = 1
self.allow_psg = {
AdultPsg: {"min": 1, "max": 9},
ChildPsg: {"min": 0, "max": 8},
BabyPsg: {"min": 0, "max": 8},
}
class StoGDisc(Discount):
"""서울광명 특가"""
def __init__(self):
self.title = "서울광명 특가"
self.disc_code = "Y20190313001"
self.max_cnt = 9
self.min_cnt = 1
self.allow_psg = {
AdultPsg: {"min": 1, "max": 9},
}
class BasicLive(Discount):
"""기차누리(기초생활수급자)"""
def __init__(self):
self.title = "기차누리(기초)"
self.disc_code = "Y20180208001"
self.max_cnt = 1
self.min_cnt = 1
self.allow_psg = {
AdultPsg: {"min": 1, "max": 1},
} |
from scipy.spatial.kdtree import KDTree
from heapq import heappush, heappop
from collections import namedtuple
from .utils import INF, elapsed_time, get_pairs, random_selector, default_selector
from .smoothing import smooth_path
import time
import numpy as np
Metric = namedtuple('Metric', ['p_norm', 'weights'])
Node = namedtuple('Node', ['g', 'parent'])
unit_cost_fn = lambda v1, v2: 1.
zero_heuristic_fn = lambda v: 0
def retrace_path(visited, vertex):
if vertex is None:
return []
return retrace_path(visited, visited[vertex].parent) + [vertex]
def dijkstra(start_v, neighbors_fn, cost_fn=unit_cost_fn):
# Update the heuristic over time
# TODO: overlap with discrete
start_g = 0
visited = {start_v: Node(start_g, None)}
queue = [(start_g, start_v)]
while queue:
current_g, current_v = heappop(queue)
if visited[current_v].g < current_g:
continue
for next_v in neighbors_fn(current_v):
next_g = current_g + cost_fn(current_v, next_v)
if (next_v not in visited) or (next_g < visited[next_v].g):
visited[next_v] = Node(next_g, current_v)
heappush(queue, (next_g, next_v))
return visited
def wastar_search(start_v, end_v, neighbors_fn, cost_fn=unit_cost_fn,
heuristic_fn=zero_heuristic_fn, w=1., max_cost=INF, max_time=INF):
# TODO: lazy wastar to get different paths
#heuristic_fn = lambda v: cost_fn(v, end_v)
priority_fn = lambda g, h: g + w*h
goal_test = lambda v: v == end_v
start_time = time.time()
start_g = 0
start_h = heuristic_fn(start_v)
visited = {start_v: Node(start_g, None)}
queue = [(priority_fn(start_g, start_h), start_g, start_v)]
while queue and (elapsed_time(start_time) < max_time):
_, current_g, current_v = heappop(queue)
if visited[current_v].g < current_g:
continue
if goal_test(current_v):
return retrace_path(visited, current_v)
for next_v in neighbors_fn(current_v):
next_g = current_g + cost_fn(current_v, next_v)
if (next_v not in visited) or (next_g < visited[next_v].g):
visited[next_v] = Node(next_g, current_v)
next_h = heuristic_fn(next_v)
if priority_fn(next_g, next_h) < max_cost:
heappush(queue, (priority_fn(next_g, next_h), next_g, next_v))
return None
##################################################
def get_embed_fn(weights):
return lambda q: weights * q
def get_distance_fn(weights, p_norm=2):
embed_fn = get_embed_fn(weights)
return lambda q1, q2: np.linalg.norm(embed_fn(q2) - embed_fn(q1), ord=p_norm)
##################################################
def check_vertex(v, samples, colliding_vertices, collision_fn):
if v not in colliding_vertices:
colliding_vertices[v] = collision_fn(samples[v])
return not colliding_vertices[v]
def check_edge(v1, v2, samples, colliding_edges, collision_fn, extend_fn):
if (v1, v2) not in colliding_edges:
segment = default_selector(extend_fn(samples[v1], samples[v2]))
colliding_edges[v1, v2] = any(map(collision_fn, segment))
colliding_edges[v2, v1] = colliding_edges[v1, v2]
return not colliding_edges[v1, v2]
def check_path(path, colliding_vertices, colliding_edges, samples, extend_fn, collision_fn):
for v in random_selector(path):
if not check_vertex(v, samples, colliding_vertices, collision_fn):
return False
for v1, v2 in default_selector(get_pairs(path)):
if not check_edge(v1, v2, samples, colliding_edges, collision_fn, extend_fn):
return False
return True
##################################################
def compute_graph(samples, weights=None, p_norm=2, max_degree=10, max_distance=INF, approximate_eps=0.):
vertices = list(range(len(samples)))
edges = set()
if not vertices:
return vertices, edges
if weights is None:
weights = np.ones(len(samples[0]))
embed_fn = get_embed_fn(weights)
embedded = list(map(embed_fn, samples))
kd_tree = KDTree(embedded)
for v1 in vertices:
# TODO: could dynamically compute distances
distances, neighbors = kd_tree.query(embedded[v1], k=max_degree + 1, eps=approximate_eps,
p=p_norm, distance_upper_bound=max_distance)
for d, v2 in zip(distances, neighbors):
if (d < max_distance) and (v1 != v2):
edges.update([(v1, v2), (v2, v1)])
# print(time.time() - start_time, len(edges), float(len(edges))/len(samples))
return vertices, edges
##################################################
def lazy_prm(start, goal, sample_fn, extend_fn, collision_fn, num_samples=100,
weights=None, p_norm=2, lazy=False, max_cost=INF, max_time=INF, **kwargs): #, max_paths=INF):
"""
:param start: Start configuration - conf
:param goal: End configuration - conf
:param sample_fn: Sample function - sample_fn()->conf
:param extend_fn: Extension function - extend_fn(q1, q2)->[q', ..., q"]
:param collision_fn: Collision function - collision_fn(q)->bool
:param max_time: Maximum runtime - float
:param kwargs: Keyword arguments
:return: Path [q', ..., q"] or None if unable to find a solution
"""
# TODO: compute parameters using start, goal, and sample_fn statistics
# TODO: multi-query motion planning
start_time = time.time()
# TODO: can embed pose and/or points on the robot for other distances
if weights is None:
weights = np.ones(len(start))
distance_fn = get_distance_fn(weights, p_norm=p_norm)
# TODO: can compute cost between waypoints from extend_fn
samples = []
while len(samples) < num_samples:
conf = sample_fn()
if (distance_fn(start, conf) + distance_fn(conf, goal)) < max_cost:
samples.append(conf)
start_index, end_index = 0, 1
samples[start_index] = start
samples[end_index] = goal
cost_fn = lambda v1, v2: distance_fn(samples[v1], samples[v2])
vertices, edges = compute_graph(samples, p_norm=p_norm, **kwargs)
neighbors_from_index = {v: set() for v in vertices}
for v1, v2 in edges:
neighbors_from_index[v1].add(v2)
colliding_vertices, colliding_edges = {}, {}
def neighbors_fn(v1):
for v2 in neighbors_from_index[v1]:
if not colliding_vertices.get(v2, False) and not colliding_edges.get((v1, v2), False):
yield v2
if not lazy:
for vertex in vertices:
check_vertex(vertex, samples, colliding_vertices, collision_fn)
for vertex1, vertex2 in edges:
check_edge(vertex1, vertex2, samples, colliding_edges, collision_fn, extend_fn)
visited = dijkstra(end_index, neighbors_fn, cost_fn)
heuristic_fn = lambda v: visited[v].g if v in visited else INF
path = None
while (elapsed_time(start_time) < max_time) and (path is None): # TODO: max_attempts
# TODO: extra cost to prioritize reusing checked edges
lazy_path = wastar_search(start_index, end_index, neighbors_fn=neighbors_fn,
cost_fn=cost_fn, heuristic_fn=heuristic_fn,
max_cost=max_cost, max_time=max_time-elapsed_time(start_time))
if lazy_path is None:
break
cost = sum(cost_fn(v1, v2) for v1, v2 in get_pairs(lazy_path))
print('Length: {} | Cost: {:.3f} | Vertices: {} | Edges: {} | Time: {:.3f}'.format(
len(lazy_path), cost, len(colliding_vertices), len(colliding_edges), elapsed_time(start_time)))
if check_path(lazy_path, colliding_vertices, colliding_edges, samples, extend_fn, collision_fn):
path = lazy_path
if path is None:
return path, edges, colliding_vertices, colliding_edges
solution = [start]
for q1, q2 in get_pairs(path):
solution.extend(extend_fn(samples[q1], samples[q2]))
return solution, samples, edges, colliding_vertices, colliding_edges
##################################################
def replan_loop(start_conf, end_conf, sample_fn, extend_fn, collision_fn, params_list, smooth=0, **kwargs):
if collision_fn(start_conf) or collision_fn(end_conf):
return None
from .meta import direct_path
path = direct_path(start_conf, end_conf, extend_fn, collision_fn)
if path is not None:
return path
for num_samples in params_list:
path = lazy_prm(start_conf, end_conf, sample_fn, extend_fn, collision_fn,
num_samples=num_samples, **kwargs)
if path is not None:
return smooth_path(path, extend_fn, collision_fn, max_iterations=smooth)
return None
|
import argparse
import sys
from typing import Callable
# import the code from this package
import shell_command_logger
from shell_command_logger import print_color
from shell_command_logger.backports import TimeParseException
from shell_command_logger.config import InvalidConfigException
from shell_command_logger.cli import alias, check, config, log, replay, search, symlink
from shell_command_logger.main_file import set_python_main_file
from shell_command_logger.debug import init_debugging
# local files
from ..backports import Dict
class SubcommandHandlerException(Exception):
pass
class SubcommandHandler:
def __init__(self, argument_parser, subcommand_variable_name: str = "subcommand", subcommand_required: bool = False) -> None:
self.ap = argument_parser
self.subcommand_variable_name = subcommand_variable_name
self.ap_subparsers = self.ap.add_subparsers(metavar="SUBCOMMAND", required=subcommand_required, dest=subcommand_variable_name)
# Maps from subcommand names to the coresponding main functions
self.main_function_map: Dict[str, Callable] = {}
def register_module(self, module) -> None:
for name in module.SUBCOMMAND_NAMES:
ap_module = self.ap_subparsers.add_parser(name, **module.ARG_PARSER_OPTIONS)
module.populate_agrument_parser(ap_module)
if name in self.main_function_map:
raise SubcommandHandlerException(f"The subcommand '{name}' is specified twice")
self.main_function_map[name] = module.subcommand_main
def subcommand_main(self, args) -> int:
subcommand_name = getattr(args, self.subcommand_variable_name)
if not subcommand_name:
# If no subcommand is specified, we show the help
self.ap.print_help()
return 1
fn_main = self.main_function_map.get(subcommand_name)
if fn_main:
return fn_main(args)
else:
raise SubcommandHandlerException(f"No subcommand with name '{subcommand_name}' registered")
def main(main_python_file: str) -> None:
# Register the calling binaries path
set_python_main_file(main_python_file)
if symlink_name := shell_command_logger.cli.log.get_name_when_called_by_symlink():
exit_code = shell_command_logger.cli.log.record_command_when_called_by_symlink(symlink_name, sys.argv[1:])
sys.exit(exit_code)
# Setting up argument parser
ap = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="The shell-command-logger (scl) allows you to record commands. Afterwards the recorded commands can be replay and searched.",
epilog=f"Installed version: {shell_command_logger.get_version_string()}\nDocumentation: https://shell-command-logger.six-two.dev/"
)
ap.add_argument("-V", "--version", action="version", version=shell_command_logger.get_version_string())
ap.add_argument("-d", "--debug", action="store_true", help="print debugging information")
handler = SubcommandHandler(ap)
for module in [alias, check, config, log, replay, search, symlink]:
handler.register_module(module)
# Run the selected submodule
args = ap.parse_args()
if args.debug:
init_debugging(True)
try:
exit_code = handler.subcommand_main(args)
except InvalidConfigException as ex:
print_color("Your configuration is not valid:", "red", bold=True)
print_color(str(ex), "red", bold=True)
print_color("Hint: You can use 'scl config --defaults' to reset your configuration to the defaults", "yellow")
exit_code = 1
except TimeParseException as ex:
print_color(str(ex), "red", bold=True)
exit_code = 1
sys.exit(exit_code)
|
#!/usr/bin/env python3
import argparse
import glob
import numpy as np
import os.path
np.seterr(invalid='ignore') # don't care if we divide by zero in this script
DATA_DIR = '../data'
STATS_FILES = glob.glob(os.path.join(DATA_DIR, 'olim*.txt'))
def get_problem_shape(header):
return tuple(int(s.split('=')[1]) for s in header.split(','))
def process_line(line):
ijk_str, line = line.split(':')
i, j, k = map(int, ijk_str.split(','))
visits_str, line_str, tri_str, tetra_str = line.split(',')
return np.array([
int(visits_str.split('=')[1]),
int(line_str.split('=')[1]),
*tuple(map(int, tri_str.split('=')[1].split('/'))),
*tuple(map(int, tetra_str.split('=')[1].split('/')))])
def init_stats_arrays(n):
return np.empty((n, n, n)), np.empty((n, n, n)), np.empty((n, n, n)), \
np.empty((n, n, n)), np.empty((n, n, n)), np.empty((n, n, n))
def load_stats_file(path):
with open(path) as f:
lines = f.readlines()
header, lines = lines[0], lines[1:]
d, w, h = get_problem_shape(header)
return np.array(list(map(process_line, lines))).reshape(d, w, h, 6)
def stats_file_path_to_size(s):
return int(s.split('/')[-1].split('.')[1])
def stats_file_path_to_olim_and_size(s):
olim, size_str = s.split('/')[-1].split('.')[:2]
size = int(size_str)
return olim, size
def build_table(max_n=np.inf, quiet=False):
table = dict()
for path in STATS_FILES:
olim, n = stats_file_path_to_olim_and_size(path)
if n > max_n or 'rhr' not in olim:
continue
print('- %s, n = %d' % (olim, n))
if olim not in table:
table[olim] = dict()
if n not in table[olim]:
table[olim][n] = dict()
def get_ratio_mean(i):
return np.nanmean(stats[:, :, :, i]/stats[:, :, :, 0])
stats = load_stats_file(path)
table[olim][n]['avg_visits'] = stats[:,:,:,0].mean()
table[olim][n]['avg_line'] = get_ratio_mean(1)
table[olim][n]['avg_tri_nd'] = get_ratio_mean(2)
table[olim][n]['avg_tri_tot'] = get_ratio_mean(3)
table[olim][n]['avg_tetra_nd'] = get_ratio_mean(4)
table[olim][n]['avg_tetra_tot'] = get_ratio_mean(5)
return table
def make_tabular_lines(table):
olims = list(table.keys())
ns = sorted(table[olims[0]].keys())
keys = list(table[olims[0]][ns[0]].keys())
lines = []
lines.append(r'\begin{tabular}{c|r|r|r|rr|rr}')
lines.append('&' + ' & '.join([
r'\multirow{2}{*}{\centering $n$}',
r'\multirow{2}{*}{Avg. Visits}',
r'\multirow{2}{*}{$d = 0$}',
r'\multicolumn{2}{c}{$d = 1$}',
r'\multicolumn{2}{c}{$d = 2$}']) + r' \\')
lines.append(
' & '.join(
['& & & '] +
([r'\multicolumn{1}{c}{Nondeg.}', r'\multicolumn{1}{c}{Total}'] * 2)) +
r' \\')
for olim in olims:
lines.append(r'\midrule')
for i, n in enumerate(ns):
line = (r'& %d & ' % n) + ' & '.join([
'%0.4f' % x for x in [table[olim][n][key] for key in keys]]) + \
r' \\'
if i == 0:
line = (r'\multirow{%d}{*}{\texttt{%s}} ' % (
len(ns), olim.replace('_', r'\_'))) + line
lines.append(line)
lines.append(r'\midrule')
lines.append(r'\end{tabular}')
return lines
def output_table(table, output_path):
with open(output_path, 'w') as f:
for line in make_tabular_lines(table):
print(line, file=f)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('output_path', type=str)
args = parser.parse_args()
output_path = args.output_path
table = build_table()
output_table(table, output_path)
|
"""Python Cookbook
Chapter 13, recipe 4 settings
"""
class Configuration:
"""
Generic Configuration
"""
url = {
'scheme': 'http',
'netloc': 'forecast.weather.gov',
'path': '/shmrn.php'
}
class Bahamas(Configuration):
"""
Weather forecast for Offshore including the Bahamas
"""
query = {'mz': ['AMZ117', 'AMZ080']}
class Chesapeake(Configuration):
"""
Weather for Cheaspeake Bay
"""
query = {'mz': ['ANZ532']}
|
import string
import hashlib
import random
password2hash = b"REDACTED"
hashresult = hashlib.md5(password2hash).digest()
sha1 = hashlib.sha1(hashresult)
sha224 = hashlib.sha224(sha1.digest())
for i in range(0, 10):
sha1 = hashlib.sha1(sha224.digest())
sha224 = hashlib.sha224(sha1.digest())
output = sha224.hexdigest()
print("output: " + output) |
"""Virtual cameras compliant with the glTF 2.0 specification as described at
https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#reference-camera
Author: Matthew Matl
"""
import abc
import numpy as np
import six
import sys
from .constants import DEFAULT_Z_NEAR, DEFAULT_Z_FAR
@six.add_metaclass(abc.ABCMeta)
class Camera(object):
"""Abstract base class for all cameras.
Note
----
Camera poses are specified in the OpenGL format,
where the z axis points away from the view direction and the
x and y axes point to the right and up in the image plane, respectively.
Parameters
----------
znear : float
The floating-point distance to the near clipping plane.
zfar : float
The floating-point distance to the far clipping plane.
``zfar`` must be greater than ``znear``.
name : str, optional
The user-defined name of this object.
"""
def __init__(self,
znear=DEFAULT_Z_NEAR,
zfar=DEFAULT_Z_FAR,
name=None):
self.name = name
self.znear = znear
self.zfar = zfar
@property
def name(self):
"""str : The user-defined name of this object.
"""
return self._name
@name.setter
def name(self, value):
if value is not None:
value = str(value)
self._name = value
@property
def znear(self):
"""float : The distance to the near clipping plane.
"""
return self._znear
@znear.setter
def znear(self, value):
value = float(value)
if value < 0:
raise ValueError('z-near must be >= 0.0')
self._znear = value
@property
def zfar(self):
"""float : The distance to the far clipping plane.
"""
return self._zfar
@zfar.setter
def zfar(self, value):
value = float(value)
if value <= 0 or value <= self.znear:
raise ValueError('zfar must be >0 and >znear')
self._zfar = value
@abc.abstractmethod
def get_projection_matrix(self, width=None, height=None):
"""Return the OpenGL projection matrix for this camera.
Parameters
----------
width : int
Width of the current viewport, in pixels.
height : int
Height of the current viewport, in pixels.
"""
pass
class PerspectiveCamera(Camera):
"""A perspective camera for perspective projection.
Parameters
----------
yfov : float
The floating-point vertical field of view in radians.
znear : float
The floating-point distance to the near clipping plane.
If not specified, defaults to 0.05.
zfar : float, optional
The floating-point distance to the far clipping plane.
``zfar`` must be greater than ``znear``.
If None, the camera uses an infinite projection matrix.
aspectRatio : float, optional
The floating-point aspect ratio of the field of view.
If not specified, the camera uses the viewport's aspect ratio.
name : str, optional
The user-defined name of this object.
"""
def __init__(self,
yfov,
znear=DEFAULT_Z_NEAR,
zfar=None,
aspectRatio=None,
name=None):
super(PerspectiveCamera, self).__init__(
znear=znear,
zfar=zfar,
name=name,
)
self.yfov = yfov
self.aspectRatio = aspectRatio
@property
def yfov(self):
"""float : The vertical field of view in radians.
"""
return self._yfov
@yfov.setter
def yfov(self, value):
value = float(value)
if value <= 0.0:
raise ValueError('Field of view must be positive')
self._yfov = value
@property
def zfar(self):
"""float : The distance to the far clipping plane.
"""
return self._zfar
@zfar.setter
def zfar(self, value):
if value is not None:
value = float(value)
if value <= 0 or value <= self.znear:
raise ValueError('zfar must be >0 and >znear')
self._zfar = value
@property
def aspectRatio(self):
"""float : The ratio of the width to the height of the field of view.
"""
return self._aspectRatio
@aspectRatio.setter
def aspectRatio(self, value):
if value is not None:
value = float(value)
if value <= 0.0:
raise ValueError('Aspect ratio must be positive')
self._aspectRatio = value
def get_projection_matrix(self, width=None, height=None):
"""Return the OpenGL projection matrix for this camera.
Parameters
----------
width : int
Width of the current viewport, in pixels.
height : int
Height of the current viewport, in pixels.
"""
aspect_ratio = self.aspectRatio
if aspect_ratio is None:
if width is None or height is None:
raise ValueError('Aspect ratio of camera must be defined')
aspect_ratio = float(width) / float(height)
a = aspect_ratio
t = np.tan(self.yfov / 2.0)
n = self.znear
f = self.zfar
P = np.zeros((4,4))
P[0][0] = 1.0 / (a * t)
P[1][1] = 1.0 / t
P[3][2] = -1.0
if f is None:
P[2][2] = -1.0
P[2][3] = -2.0 * n
else:
P[2][2] = (f + n) / (n - f)
P[2][3] = (2 * f * n) / (n - f)
return P
class OrthoCamera(Camera):
"""Simulate Opengl glOrtho.
Parameters
----------
xmag : float
The floating-point horizontal magnification of the view.
ymag : float
The floating-point vertical magnification of the view.
znear : float
The floating-point distance to the near clipping plane.
If not specified, defaults to 0.05.
zfar : float
The floating-point distance to the far clipping plane.
``zfar`` must be greater than ``znear``.
If not specified, defaults to 100.0.
name : str, optional
The user-defined name of this object.
"""
def __init__(self,
xmag,
ymag,
zmag,
name=None):
super(OrthoCamera, self).__init__(
name=name,
)
self.xmag = xmag
self.ymag = ymag
self.zmag = zmag
@property
def xmag(self):
"""float : The horizontal magnification of the view.
"""
return self._xmag
@xmag.setter
def xmag(self, value):
value = float(value)
self._xmag = value
@property
def ymag(self):
"""float : The vertical magnification of the view.
"""
return self._ymag
@ymag.setter
def ymag(self, value):
value = float(value)
self._ymag = value
@property
def zmag(self):
"""float : The vertical magnification of the view.
"""
return self._zmag
@zmag.setter
def zmag(self, value):
value = float(value)
self._zmag = value
def get_projection_matrix(self, width=None, height=None):
"""Return the OpenGL projection matrix for this camera.
Parameters
----------
width : int
Width of the current viewport, in pixels.
Unused in this function.
height : int
Height of the current viewport, in pixels.
Unused in this function.
"""
xmag = self.xmag
ymag = self.ymag
zmag = self.zmag
P = np.zeros((4,4))
P[0][0] = 1.0 / xmag
P[1][1] = 1.0 / ymag
P[2][2] = 1.0 / zmag
P[3][3] = 1.0
return P
class OrthographicCamera(Camera):
"""A perspective camera for perspective projection.
Parameters
----------
xmag : float
The floating-point horizontal magnification of the view.
ymag : float
The floating-point vertical magnification of the view.
znear : float
The floating-point distance to the near clipping plane.
If not specified, defaults to 0.05.
zfar : float
The floating-point distance to the far clipping plane.
``zfar`` must be greater than ``znear``.
If not specified, defaults to 100.0.
name : str, optional
The user-defined name of this object.
"""
def __init__(self,
xmag,
ymag,
znear=DEFAULT_Z_NEAR,
zfar=DEFAULT_Z_FAR,
name=None):
super(OrthographicCamera, self).__init__(
znear=znear,
zfar=zfar,
name=name,
)
self.xmag = xmag
self.ymag = ymag
@property
def xmag(self):
"""float : The horizontal magnification of the view.
"""
return self._xmag
@xmag.setter
def xmag(self, value):
value = float(value)
if value <= 0.0:
raise ValueError('X magnification must be positive')
self._xmag = value
@property
def ymag(self):
"""float : The vertical magnification of the view.
"""
return self._ymag
@ymag.setter
def ymag(self, value):
value = float(value)
if value <= 0.0:
raise ValueError('Y magnification must be positive')
self._ymag = value
@property
def znear(self):
"""float : The distance to the near clipping plane.
"""
return self._znear
@znear.setter
def znear(self, value):
value = float(value)
# if value <= 0:
# raise ValueError('z-near must be > 0.0')
self._znear = value
def get_projection_matrix(self, width=None, height=None):
"""Return the OpenGL projection matrix for this camera.
Parameters
----------
width : int
Width of the current viewport, in pixels.
Unused in this function.
height : int
Height of the current viewport, in pixels.
Unused in this function.
"""
xmag = self.xmag
ymag = self.ymag
# If screen width/height defined, rescale xmag
if width is not None and height is not None:
xmag = width / height * ymag
n = self.znear
f = self.zfar
P = np.zeros((4,4))
P[0][0] = 1.0 / xmag
P[1][1] = 1.0 / ymag
P[2][2] = 2.0 / (n - f)
P[2][3] = (f + n) / (n - f)
P[3][3] = 1.0
return P
class OrthographicCamera2D(Camera):
"""A perspective camera for orthographic projection.
Parameters
----------
xmag : float
The floating-point horizontal magnification of the view.
ymag : float
The floating-point vertical magnification of the view.
znear : float
The floating-point distance to the near clipping plane.
If not specified, defaults to 0.05.
zfar : float
The floating-point distance to the far clipping plane.
``zfar`` must be greater than ``znear``.
If not specified, defaults to 100.0.
name : str, optional
The user-defined name of this object.
"""
def __init__(self,
xmag,
ymag,
name=None):
super(OrthographicCamera2D, self).__init__(
name=name,
)
self.xmag = xmag
self.ymag = ymag
@property
def xmag(self):
"""float : The horizontal magnification of the view.
"""
return self._xmag
@xmag.setter
def xmag(self, value):
value = float(value)
if value <= 0.0:
raise ValueError('X magnification must be positive')
self._xmag = value
@property
def ymag(self):
"""float : The vertical magnification of the view.
"""
return self._ymag
@ymag.setter
def ymag(self, value):
value = float(value)
if value <= 0.0:
raise ValueError('Y magnification must be positive')
self._ymag = value
def get_projection_matrix(self, width=None, height=None):
"""Return the OpenGL projection matrix for this camera.
Parameters
----------
width : int
Width of the current viewport, in pixels.
Unused in this function.
height : int
Height of the current viewport, in pixels.
Unused in this function.
"""
xmag = self.xmag
ymag = self.ymag
# If screen width/height defined, rescale xmag
if width is not None and height is not None:
xmag = width / height * ymag
P = np.zeros((4,4))
P[0][0] = 1.0 / xmag
P[1][1] = 1.0 / ymag
P[2][2] = -1.0
P[2][3] = 0.0
P[3][3] = 1.0
return P
class IntrinsicsCamera(Camera):
"""A perspective camera with custom intrinsics.
Parameters
----------
fx : float
X-axis focal length in pixels.
fy : float
Y-axis focal length in pixels.
cx : float
X-axis optical center in pixels.
cy : float
Y-axis optical center in pixels.
znear : float
The floating-point distance to the near clipping plane.
If not specified, defaults to 0.05.
zfar : float
The floating-point distance to the far clipping plane.
``zfar`` must be greater than ``znear``.
If not specified, defaults to 100.0.
name : str, optional
The user-defined name of this object.
"""
def __init__(self,
fx,
fy,
cx,
cy,
znear=DEFAULT_Z_NEAR,
zfar=DEFAULT_Z_FAR,
name=None):
super(IntrinsicsCamera, self).__init__(
znear=znear,
zfar=zfar,
name=name,
)
self.fx = fx
self.fy = fy
self.cx = cx
self.cy = cy
@property
def fx(self):
"""float : X-axis focal length in meters.
"""
return self._fx
@fx.setter
def fx(self, value):
self._fx = float(value)
@property
def fy(self):
"""float : Y-axis focal length in meters.
"""
return self._fy
@fy.setter
def fy(self, value):
self._fy = float(value)
@property
def cx(self):
"""float : X-axis optical center in pixels.
"""
return self._cx
@cx.setter
def cx(self, value):
self._cx = float(value)
@property
def cy(self):
"""float : Y-axis optical center in pixels.
"""
return self._cy
@cy.setter
def cy(self, value):
self._cy = float(value)
def get_projection_matrix(self, width, height):
"""Return the OpenGL projection matrix for this camera.
Parameters
----------
width : int
Width of the current viewport, in pixels.
height : int
Height of the current viewport, in pixels.
"""
width = float(width)
height = float(height)
cx, cy = self.cx, self.cy
fx, fy = self.fx, self.fy
if sys.platform == 'darwin':
cx = self.cx * 2.0
cy = self.cy * 2.0
fx = self.fx * 2.0
fy = self.fy * 2.0
P = np.zeros((4,4))
P[0][0] = 2.0 * fx / width
P[1][1] = 2.0 * fy / height
P[0][2] = 1.0 - 2.0 * cx / (width - 1.0)
P[1][2] = 2.0 * cy / (height - 1.0) - 1.0
P[3][2] = -1.0
n = self.znear
f = self.zfar
if f is None:
P[2][2] = -1.0
P[2][3] = -2.0 * n
else:
P[2][2] = (f + n) / (n - f)
P[2][3] = (2 * f * n) / (n - f)
return P
__all__ = ['Camera', 'PerspectiveCamera', 'OrthographicCamera',
'IntrinsicsCamera']
|
#!/usr/bin/env python
import sys
import os
import numpy as np
import matplotlib.pyplot as plt
import math
from math import factorial
import tf
data1 = np.loadtxt("vicon.txt", skiprows=2)
time1 = data1[:,0] - data1[0,0]
vx_vicon = data1[:,1]
vy_vicon = data1[:,2]
# yaw = data1[:,3]
# qx = data1[:,4]
# qy = data1[:,5]
# qz = data1[:,6]
# qw = data1[:,7]
# wz_vicon = np.zeros(len(qx)-1)
# outtxt = open("../results/vicon_RotCwz.txt",'w')
# for i in xrange (len(qx)-10):
# # q1 = tf.transformations.quaternion_from_euler(0, 0, yaw[i])
# # q2 = tf.transformations.quaternion_from_euler(0, 0, yaw[i+1])
# q1 = (qx[i], qy[i], qz[i], qw[i])
# q2 = (qx[i+1], qy[i+1], qz[i+1], qw[i+1])
# dq = tf.transformations.quaternion_multiply(tf.transformations.quaternion_inverse(q2), q1)
# euler = tf.transformations.euler_from_quaternion(dq)
# d_yaw = euler[2]
# dt = (time1[i+10]-time1[i])/10
# # if dt<0.01:
# # wz_vicon[i] = wz_vicon[i-1]
# # else:
# wz_vicon[i] = -d_yaw/dt
# outtxt.write(str.format("{0:.9f} ", data1[i,0]))
# outtxt.write(str.format("{0:.9f} ", wz_vicon[i]))
# outtxt.write('0 ')
# # outtxt.write(str.format("{0:.9f} ", yaw))
# outtxt.write('0 ')
# outtxt.write('0 ')
# outtxt.write('0 ')
# outtxt.write('0 ')
# outtxt.write('0\n')
# outtxt.close()
# print dt
data2 = np.loadtxt("px4.txt", skiprows=2)
time2 = data2[:,0] - data2[0,0]
vx_of = data2[:,1]
vy_of = data2[:,2]
data3 = np.loadtxt("cf.txt", skiprows=0)
time3 = data3[:,0] - data3[0,0]
vx_cf = 0.75*data3[:,1]
vy_cf = 0.75*data3[:,2]
wz_cf = data3[:,1]
# for i in xrange (4, len(vx_cf)):
# vx_cf[i] = 0.5*vx_cf[i]+0.2*vx_cf[i-1]+0.15*vx_cf[i-2]+0.10*vx_cf[i-3]+0.05*vx_cf[i-4]
# vy_cf[i] = 0.5*vy_cf[i]+0.2*vy_cf[i-1]+0.15*vy_cf[i-2]+0.10*vy_cf[i-3]+0.05*vy_cf[i-4]
#data curve smoothing filter
"""
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
try:
window_size = np.abs(np.int(window_size))
order = np.abs(np.int(order))
except ValueError, msg:
raise ValueError("window_size and order have to be of type int")
if window_size % 2 != 1 or window_size < 1:
raise TypeError("window_size size must be a positive odd number")
if window_size < order + 2:
raise TypeError("window_size is too small for the polynomials order")
order_range = range(order+1)
half_window = (window_size -1) // 2
# precompute coefficients
b = np.mat([[k**i for i in order_range] for k in range(-half_window, half_window+1)])
m = np.linalg.pinv(b).A[deriv] * rate**deriv * factorial(deriv)
# pad the signal at the extremes with
# values taken from the signal itself
firstvals = y[0] - np.abs( y[1:half_window+1][::-1] - y[0] )
lastvals = y[-1] + np.abs(y[-half_window-1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve( m[::-1], y, mode='valid')
"""
if __name__ == "__main__":
# vy_filt = savitzky_golay(vy_of, 51, 3)
plt.figure()
plt.subplot(211)
plt.plot(time2, vx_of, 'y', label="PX4Flow")
plt.plot(time3, vx_cf, 'b', label="correlation flow")
plt.plot(time1, vx_vicon, c='r', linewidth=2.0, label="ground truth")
plt.xlabel("time[s]")
plt.ylabel("speed[m/s]")
legend = plt.legend(loc='upper right')
plt.subplot(212)
plt.plot(time2, vy_of, c='y', label="PX4Flow")
plt.plot(time3, vy_cf, c='b', label="correlation flow")
plt.plot(time1, vy_vicon, c='r', linewidth=2.0, label="ground truth")
plt.xlabel("time[s]")
plt.ylabel("speed[m/s]")
legend = plt.legend(loc='upper right')
# plt.plot(time1[1:], wz_vicon, c='r', linewidth=2.0, label="ground truth")
# plt.plot(time3, wz_cf, 'b', label="correlation flow")
# plt.xlabel("time[s]")
# plt.ylabel("z-angular rate[rad/s]")
# legend = plt.legend(loc='lower right')
plt.show()
|
import re
import sys
from . import PIL_class
from .DNA_nupack_classes import group
from ..utils import error, match
def load_spec(filename):
"""Load a PIL style input specification."""
f = open(filename, "r")
# Create new specification object ...
spec = PIL_class.Spec()
# ... and populate it with the contents of the file.
for line in f:
# Strip comments and whitespace
line = re.sub(r"#.*\n", "", line)
line = line.strip()
# Skip empty lines
if not line:
continue
#print line,
# Read the command name off (if there is a command name)
command = line.split()[0]
if command == "sequence":
name, template = parse_seq(line)
spec.add_seq(name, template)
elif command in ("super-sequence", "sup-sequence"):
name, sub_seq_names = parse_sup_seq(line)
spec.add_sup_seq(name, sub_seq_names)
elif command == "strand":
name, seq_names, dummy = parse_strand(line)
spec.add_strand(name, seq_names, dummy)
elif command == "structure":
name, strand_names, struct, params = parse_struct(line)
spec.add_struct(name, strand_names, struct, params)
elif command == "equal":
seq_names = parse_equal(line)
spec.add_equal(seq_names)
elif command == "kinetic":
pass
else:
error("Parse Error in file '%s': Command '%s' not valid.\n%s" % (filename, command, line)) # TODO: more info
return spec
def parse_seq(line):
"""Parse sequence statements"""
m = match(r"sequence ([\w-]+) = ([^:\s]*)( : .*)?", line)
if not m:
error("Invalid sequence statement format:\n"
"Should be: sequence <name> = <constraint template>\n"
"Was: %s" % line)
name, template = m.group(1, 2)
if not set(template).issubset( set(group.keys()) ):
error("Sequence's constraint template must be in allowed alphabet (%r).\nLine: %s" % (list(group.keys()), line))
return name, template
def parse_sup_seq(line):
"""Parse super-sequence statements"""
m = match(r"sup(er)?-sequence ([\w-]+) = ([^:]*)( : .*)?", line)
if not m:
error("Invalid super-sequence statement format:\n"
"Should be: super-sequence <name> = <sub-sequence names>\n"
"Was: %s" % line)
name, seq_names = m.group(2, 3)
seq_names = seq_names.split()
return name, seq_names
def parse_strand(line):
"""Parse strand statements"""
m = match(r"strand (\[dummy\] )?([\w-]+) = ([^:]*)( : .*)?", line)
if not m:
error("Invalid strand statement format:\n"
"Should be: strand <name> = <sequence names>\n"
"or: strand [dummy] <name> = <sequence names>\n"
"Was: %s" % line)
dummy, name, seq_names = m.group(1, 2, 3)
dummy = bool(dummy)
seq_names = seq_names.split()
return name, seq_names, dummy
def parse_struct(line):
"""Parse structure statements"""
m = match(r"structure (\[(\w+)\])? ([\w-]+) = ([^:]*) : (.*)", line)
if not m:
error("Invalid structure statement format:\n"
"Should be: structure <name> = <strand names> : <secondary structure>\n"
"or: structure [<parameters>] <name> = <strand names> : <secondary structure>\n"
"Was: %s" % line)
params, name, strand_names, struct = m.group(2, 3, 4, 5)
strand_names = strand_names.split("+")
strand_names = [sname.strip() for sname in strand_names] # Clean off whitespace
struct = struct.replace(" ", "").replace("\t", "") # Clear out whitespace
if not set(struct).issubset( set(".()+") ):
error('Secondary structure must only use allowd alphabet (".()+").\nLine: %s' % line)
return name, strand_names, struct, params
def parse_equal(line):
"""Parse super-sequence statements"""
seq_names = line.split()[1:]
return seq_names
if __name__ == "__main__":
import sys
print(load_spec(sys.argv[1]))
|
# -*- coding: utf-8 -*-
"""
@author : Wang Meng
@github : https://github.com/tianpangji
@software : PyCharm
@file : urls.py
@create : 2020/9/9 20:07
"""
from django.urls import path, include
from drf_admin.utils import routers
from monitor.views import users, service, error, ip, crud
router = routers.AdminRouter()
router.register(r'ip', ip.IpBlackListViewSet, basename="ip") # ip黑名单管理
urlpatterns = [
path('users/', users.OnlineUsersListAPIView.as_view()), # 在线用户监控
path('service/', service.ServiceMonitorAPIView.as_view()), # 服务监控
path('error/', error.ErrorLogAPIView.as_view()), # 错误日志监控
path('crud/', crud.CRUDListAPIView.as_view()), # CRUD变更记录
path('', include(router.urls)),
]
|
"""
Implementation of attack methods. Running this file as a program will
apply the attack to the model specified by the config file and store
the examples in an .npy file.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import json
import tensorflow as tf
import numpy as np
import cifar10_input
from pgd_attack import LinfPGDAttack
from art.attacks import FastGradientMethod
from art.attacks import DeepFool
from art.attacks import AdversarialPatch
from art.attacks import HopSkipJump
from art.attacks import CarliniL2Method
from art.attacks import CarliniLInfMethod
from art.attacks import ProjectedGradientDescent
from art.classifiers import TFClassifier
from art.utils import load_cifar10
parser = argparse.ArgumentParser(description='TF CIFAR PGD')
parser.add_argument('--model-ckpt', default='/data/hzzheng/Code.baseline-atta.cifar10_challenge.10.21/data-model/m.10.model/checkpoint-44000',
help='Log path.')
parser.add_argument('--gpuid', type=int, default=0,
help='The ID of GPU.')
parser.add_argument('--atta-loop', type=int, default=10,
help='ATTA attack measurement loop.')
parser.add_argument('--model-name', default='m.3.model',
help='model name')
parser.add_argument('--model-dir', default='./models/data-model/',
help='The dir of the saved model')
parser.add_argument('--ckpt-step', type=int, default=4000,
help='checkpoint step')
parser.add_argument('--ckpt', type=int, default=0,
help='checkpoint')
parser.add_argument('--ckpt-start', type=int, default=0,
help='checkpoint')
parser.add_argument('--ckpt-end', type=int, default=69000,
help='checkpoint')
parser.add_argument('--batch-size', type=int, default=128,
help='checkpoint')
parser.add_argument('--data-size', type=int, default=10000,
help='checkpoint')
args = parser.parse_args()
GPUID = args.gpuid
os.environ["CUDA_VISIBLE_DEVICES"] = str(GPUID)
# log_file = open(args.log_path, 'w')
if __name__ == '__main__':
import json
from model import Model
with open('config.json') as config_file:
config = json.load(config_file)
model = Model('eval')
logits = model.pre_softmax
input_ph = model.x_input
labels_ph = model.y_input
loss = model.mean_xent
saver = tf.train.Saver()
# Setup the parameters
epsilon = 0.031 # Maximum perturbation
batch_size = 128
model_ckpt = args.model_ckpt
# (x_train, y_train), (x_test, y_test), min_pixel_value, max_pixel_value = load_cifar10()
# x_test = x_test[0:20, :]
# y_test = y_test[0:20]
data_path = config['data_path']
cifar = cifar10_input.CIFAR10Data(data_path)
x_test = cifar.eval_data.xs[0:50, :]
y_test = cifar.eval_data.ys[0:50]
# print(x_test.shape)
# print(min_pixel_value)
# print(max_pixel_value)
with tf.Session() as sess:
saver.restore(sess, model_ckpt)
classifier = TFClassifier(input_ph=input_ph, logits=logits, sess=sess,
loss=loss, output_ph=labels_ph)
predictions = classifier.predict(x_test)
print(x_test[0])
# print(predictions)
print(np.argmax(predictions, axis=1))
accuracy = np.sum(np.argmax(predictions, axis=1) == y_test) / len(y_test)
print('Accuracy on benign test examples: {}%'.format(accuracy * 100))
# FGSM
attack = FastGradientMethod(classifier=classifier, eps=epsilon, eps_step=epsilon/10)
x_test_adv = attack.generate(x=x_test/255.0)
predictions = classifier.predict(x_test_adv*255.0)
accuracy = np.sum(np.argmax(predictions, axis=1) == y_test) / len(y_test)
print('Accuracy on adversarial test examples: {}%'.format(accuracy * 100))
adv_crafter_deepfool = DeepFool(classifier, batch_size=batch_size, epsilon=epsilon)
x_test_adv = adv_crafter_deepfool.generate(x=x_test/255.0)
predictions = classifier.predict(x_test_adv*255.0)
print(np.argmax(predictions, axis=1))
accuracy = np.sum(np.argmax(predictions, axis=1) == y_test) / len(y_test)
print('Accuracy on adversarial test examples: {}%'.format(accuracy * 100))
# pgd 20
adv_crafter_pgd_20 = ProjectedGradientDescent(classifier, eps=epsilon, eps_step=0.00775, max_iter=20, batch_size=batch_size)
x_test_adv = adv_crafter_pgd_20.generate(x=x_test/255.0)
# print(x_test_adv)
predictions = classifier.predict(x_test_adv*255.0)
accuracy = np.sum(np.argmax(predictions, axis=1) == y_test) / len(y_test)
print('Accuracy on adversarial test examples: {}%'.format(accuracy * 100))
# C&W 20
# adv_crafter_cwinf = CarliniLInfMethod(classifier, eps=epsilon, learning_rate=epsilon/10, max_iter=20, batch_size=batch_size)
# x_test_adv = adv_crafter_cwinf.generate(x=x_test/255.0)
# predictions = classifier.predict(x_test_adv*255.0)
# accuracy = np.sum(np.argmax(predictions, axis=1) == y_test) / len(y_test)
# print('Accuracy after C&W attack: {}%'.format(accuracy * 100))
|
# -*- coding: utf-8 -*-
__all__ = ["Base_cir", "Orig_cir", "Elev_cir", "Slope_cir", "Tpi_cir"]
from .base_cir import Base_cir
from .orig_cir import Orig_cir
from .elev_cir import Elev_cir
from .slope_cir import Slope_cir
from .tpi_cir import Tpi_cir
|
"""Add TranslationSyncLogs
Revision ID: 21e927fdf78c
Revises: 44d704928d8c
Create Date: 2015-04-20 23:34:51.724151
"""
# revision identifiers, used by Alembic.
revision = '21e927fdf78c'
down_revision = '44d704928d8c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('TranslationSyncLogs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('start_datetime', sa.DateTime(), nullable=True),
sa.Column('end_datetime', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(u'ix_TranslationSyncLogs_end_datetime', 'TranslationSyncLogs', ['end_datetime'], unique=False)
op.create_index(u'ix_TranslationSyncLogs_start_datetime', 'TranslationSyncLogs', ['start_datetime'], unique=False)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_TranslationSyncLogs_start_datetime', table_name='TranslationSyncLogs')
op.drop_index(u'ix_TranslationSyncLogs_end_datetime', table_name='TranslationSyncLogs')
op.drop_table('TranslationSyncLogs')
### end Alembic commands ###
|
#! /usr/local/bin/python2.7
# -*- coding: utf-8 -*-
import sys
import os.path
import random
reload(sys)
sys.setdefaultencoding('utf-8')
def load_voc_list(filename):
voc = []
with open(filename, 'r') as fd:
for line in fd:
voc.append(line.strip())
return voc
if __name__ == "__main__" :
f_name = sys.argv[1]
count = 0
with open(f_name, 'r') as fd:
for line in fd:
sents = line.strip().replace(',,,', ',').split("。")
for sent in sents:
q = sent.strip().replace(" ", "")
l = sent.strip().replace(" ", "||")
if len(q.decode('utf-8')) <2:
continue
if len(q.decode('utf-8'))>128:
count = count + 1
print q+"\t"+l
print count
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from unittest import TestCase
from uw_sws.util import fdao_sws_override
from uw_pws.util import fdao_pws_override
from uw_sws.registration import get_schedule_by_regid_and_term
from uw_sws.term import get_current_term
@fdao_sws_override
@fdao_pws_override
class SWSIndependentStudy(TestCase):
def test_instructor_list(self):
term = get_current_term()
schedule = get_schedule_by_regid_and_term(
"BB000000000000000000000000000004", term)
self.assertEquals(len(schedule.sections), 1)
instructors = schedule.sections[0].meetings[0].instructors
self.assertEquals(len(instructors), 2)
self.assertEquals(instructors[0].uwregid,
'A9D2DDFA6A7D11D5A4AE0004AC494FFE')
self.assertEquals(instructors[1].uwregid,
'FBB38FE46A7C11D5A4AE0004AC494FFE')
|
"""
Basic statistics calculations on binary classification rank order arrays.
Following https://en.wikipedia.org/wiki/Evaluation_of_binary_classifiers
"""
import numpy as np
#combinatorics_helpers as ch
class Stat:
"abstract superclass for shared behavior"
def default_curve(self):
return AreaUnderCurve(TH, self)
def curve_points(self, array, close=True):
return self.default_curve().curve_points(array, close)
def curve_area(self, array, points=None):
return self.default_curve().curve_area(array, points)
class Length(Stat):
"data size"
abbreviation = "L"
def __call__(self, array, threshold=None):
return len(array)
L = Length()
class IndexStandardDeviation(Stat):
"Standard deviation of hit indices."
abbreviation = "ISD"
def __call__(self, array, threshold=None):
(nz,) = np.nonzero(array)
if len(nz) > 1:
return nz.std()
else:
return 0.0
ISD = IndexStandardDeviation()
class AverageSquaredRunLength(Stat):
"average length of string of 0's or 1's."
abbreviation = "SRL"
def __call__(self, array, threshold=None):
ln = len(array)
if ln < 1:
return 0
n_runs = 0
sum_squared_lengths = 0
current_run_start = 0
for i in range(1, ln):
if array[i-1] != array[i]:
n_runs += 1
current_run_length = i - current_run_start
sum_squared_lengths += current_run_length ** 2
current_run_start = i
else:
pass
# final run
n_runs += 1
current_run_length = ln - current_run_start
sum_squared_lengths += current_run_length ** 2
return sum_squared_lengths / float(n_runs)
SRL = AverageSquaredRunLength()
class Selected(Stat):
"number of entries before the threshold"
abbreviation = "TH"
def __call__(self, array, threshold=None):
return threshold
TH = Selected()
class ConditionalPositive(Stat):
"The number of real positive cases in the data"
abbreviation = "P"
def __call__(self, array, threshold=None):
return np.count_nonzero(array)
P = ConditionalPositive()
class ConditionalNegative(Stat):
"The number of real negative cases in the data"
abbreviation = "N"
def __call__(self, array, threshold=None):
return len(array) - np.count_nonzero(array)
N = ConditionalNegative()
class TruePositive(Stat):
"The number of trues before the threshold"
abbreviation = "TP"
def __call__(self, array, threshold):
return P(array[:threshold])
TP = TruePositive()
class TrueNegative(Stat):
"The number of falses after the threshold"
abbreviation = "TN"
def __call__(self, array, threshold):
return N(array[threshold:])
TN = TrueNegative()
class FalsePositive(Stat):
"The number of trues after the threshold"
abbreviation = "FP"
def __call__(self, array, threshold):
return P(array[threshold:])
FP = FalsePositive()
class FalseNegative(Stat):
"The number of falses before the threshold"
abbreviation = "FN"
def __call__(self, array, threshold):
return N(array[:threshold])
FN = FalseNegative()
class Recall(Stat):
"proportion of trues before the threshold of all trues"
abbreviation = "recall"
def __call__(self, array, threshold):
Pa = P(array, threshold)
if Pa > 0:
return TP(array, threshold) / Pa
return 1.0 # default
TPR = Recall()
recall = TPR
class FalsePositiveRate(Stat):
"proportion of trues before the threshold of all trues"
abbreviation = "FPR"
def __call__(self, array, threshold):
fn = FN(array, threshold)
if fn > 0:
n = N(array)
return fn / n
return 0.0 # default
FPR = FalsePositiveRate()
# skip specificity TNR for now
class Precision(Stat):
"proportion of trues before the threshold of all results before the threshold"
abbreviation = "precision"
def __call__(self, array, threshold):
if threshold <= 0:
return 1.0
return TP(array, threshold) / threshold
PPV = Precision()
precision = PPV
# skip NPV for now
# skip FNR for now
# skip FPR for now
class F1score(Stat):
abbreviation = 'F1'
def __call__(self, array, threshold, epsilon=1e-10):
pr = precision(array, threshold)
re = recall(array, threshold)
#("pr, re", pr, re)
denominator = pr + re
if abs(denominator) < epsilon:
return 0.0
return (pr * re) / denominator
F1 = F1score()
class PhiCoefficient(Stat):
"""
https://en.wikipedia.org/wiki/Matthews_correlation_coefficient
"""
abbreviation = "PHI"
def __call__(self, array, threshold, epsilon=1e-10):
tp = TP(array, threshold)
tn = TN(array, threshold)
fp = FP(array, threshold)
fn = FN(array, threshold)
den2 = (tp + fp) * (tp + fn) * (tn + fp) * (tn + fn)
if den2 < epsilon:
return 0.0 # ?????
numerator = tp * tn - fp * fn
den = np.sqrt(den2)
return numerator / den
PHI = PhiCoefficient()
# Specials
class AveragePreference(Stat):
"Average distance of true values before threshold to the threshold"
abbreviation = "ATP"
def __call__(self, array, threshold=None):
if threshold is None:
threshold = len(array)
sum = 0.0
count = 0
for i in range(threshold):
if array[i]:
sum += (threshold - i)
count += 1
if count == 0:
#print("count of 0", (count, threshold, sum))
return 0.0 # default
if count == threshold:
#print ("count == threshold", (count, threshold, sum))
return 1.0
# normalize to [0..1]
unnormalized = sum / count
minimum = (count - 1) * 0.5
maximum = threshold - minimum
numerator = unnormalized - minimum
denominator = maximum - minimum
stat = numerator / denominator
#print ("normalized", (count, threshold, sum, unnormalized, minimum, maximum, numerator, denominator, stat))
return stat
ATP = AveragePreference()
class AverageLogPreference(Stat):
abbreviation = "ALP"
# use caching for mins and maxes
mins = {}
maxes = {}
def get_max(self, for_length, count):
assert for_length >= count
maxes = self.maxes
key = (for_length, count)
if key in maxes:
return maxes[key]
test_array = self.max_array(for_length, count)
(result, count1) = self.summation(test_array)
#assert count1 == count
#print (" max", test_array, count, result)
maxes[key] = result
return result
def max_array(self, for_length, count):
test_array = np.zeros((for_length,))
test_array[:count] = 1
return test_array
def get_min(self, for_length, count):
assert for_length >= count
mins = self.mins
key = (for_length, count)
if key in mins:
return mins[key]
test_array = self.min_array(for_length, count)
(result, count1) = self.summation(test_array)
#assert count1 == count
mins[key] = result
#print (" min", test_array, count, result)
return result
def min_array(self, for_length, count):
test_array = np.zeros((for_length,))
test_array[-count:] = 1
return test_array
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
for i in range(len(array)):
if array[i]:
sum += np.log(ln - i)
count += 1
# ("summation", array, count, sum)
return (sum, count)
def __call__(self, array, threshold, epsilon=1e-6):
truncated = array[:threshold]
(unnormalized, count) = self.summation(truncated)
if count < 1:
return 0.0
if count == threshold:
return 1.0
#unnormalized = logs / count
minimum = self.get_min(threshold, count)
maximum = self.get_max(threshold, count)
assert minimum - epsilon <= unnormalized <= maximum + epsilon, repr((array, minimum, unnormalized, maximum))
numerator = unnormalized - minimum
denominator = maximum - minimum
stat = 0.0
if denominator > epsilon:
stat = numerator / denominator
#print ("normalized", (truncated, count, threshold, minimum, maximum, numerator, denominator, stat))
return stat
ALP = AverageLogPreference()
class NormalizedSquaredRunLength(AverageLogPreference):
abbreviation = "NSRL"
# use caching for mins and maxes
mins = {}
maxes = {}
def max_array(self, for_length, count):
test_array = np.zeros((for_length,), dtype=np.int)
test_array[:count] = 1
return test_array
def min_array(self, for_length, count):
# invert if more than half full
assert count <= for_length
if count > for_length/2.0:
return 1 - self.min_array(for_length, for_length - count)
array = np.zeros((for_length,), dtype=np.int)
if count < 1:
return array
shift = (for_length + 1) / float(count + 1)
#print ("for_length, count, shift", for_length, count, shift)
#assert shift >= 1.0, "I'm not considering shift < 1.0 now. unimplemented."
for i in range(count):
findex = (i + 1) * shift
index = int(findex)
array[index-1] = 1
#print(array, index, findex)
return array
def min_array0(self, for_length, count):
# doesn't work for for_length=2, count=1
array = np.zeros((for_length,), dtype=np.int)
if count < 1:
return array
assert count <= for_length
(shift, extra) = divmod(for_length+1, count+1)
lastindex = 0
for i in range(count):
offset = shift
if extra > 0:
offset += 1
extra -= 1
index = lastindex + offset
array[index] = 1
lastindex = index
return array
def min_array_broken(self, for_length, count, test_array=None, min_index=None, max_index=None):
# recursively put a 1 at the center (wrong)
if test_array is None:
test_array = np.zeros((for_length,), dtype=np.int)
min_index = 0
max_index = for_length
assert 0 <= count <= max_index - min_index
assert min_index <= max_index <= for_length
if count > 0:
count1 = count - 1
center = int((min_index + max_index)/2)
test_array[center] = 1
left_count = int(count1 / 2)
right_count = count1 - left_count
self.min_array(for_length, left_count, test_array, min_index, center)
self.min_array(for_length, right_count, test_array, center+1, max_index)
return test_array
def summation(self, array):
sum = SRL(array)
count = int(array.sum())
return (sum, count)
NSRL = NormalizedSquaredRunLength()
class NormalizedIndexSTD(AverageLogPreference):
abbreviation = "NSTD"
# use caching for mins and maxes
mins = {}
maxes = {}
def max_array(self, for_length, count):
h1 = int(count / 2)
h2 = count - h1
test_array = np.zeros((for_length,))
test_array[:h1] = 1
test_array[-h2:] = 1
return test_array
def summation(self, array):
sum = ISD(array)
count = int(array.sum())
return (sum, count)
NSTD = NormalizedIndexSTD()
class VariancePenalizedPreference(AverageLogPreference):
abbreviation = "VPP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
count = int(array.sum())
ln = len(array)
asp = ASP(array, ln)
nstd = NSTD(array, ln)
sum = asp * (1.0 - nstd)
return (sum, count)
VPP = VariancePenalizedPreference()
class RunLengthPenalizedPreference(AverageLogPreference):
abbreviation = "RLPP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
count = int(array.sum())
ln = len(array)
asp = ASP(array, ln)
nsrl = NSRL(array, ln)
#sum = asp * (1.0 - nsrl)
sum = 1 - (1 - asp) * (1 + nsrl) / 2.0
return (sum, count)
RLPP = RunLengthPenalizedPreference()
class RunLengthEnhancedPreference(AverageLogPreference):
abbreviation = "REPP"
# use caching for mins and maxes
mins = {}
maxes = {}
#def get_min(self, for_length, count):
# return 0.0 # fake it
def summation(self, array):
count = int(array.sum())
ln = len(array)
asp = ASP(array, ln)
nsrl = NSRL(array, ln)
enhancement_factor = 0.5
#asp1 = 1.0 - asp
sum = asp
if asp > enhancement_factor:
sum = asp + enhancement_factor * (asp - enhancement_factor) * nsrl
return (sum, count)
REPP = RunLengthEnhancedPreference()
class VarianceEnhancedPreference(AverageLogPreference):
abbreviation = "VEP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
count = int(array.sum())
ln = len(array)
asp = ASP(array, ln)
nstd = NSTD(array, ln)
# arbitrary parameter prevents stat from going to 1.0 when variance is 1.0
enhancement_factor = 0.5
asp1 = 1.0 - asp
sum = asp + enhancement_factor * asp1 * nstd
return (sum, count)
VEP = VarianceEnhancedPreference()
class AverageSquaredPreference(AverageLogPreference):
abbreviation = "ASP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
for i in range(len(array)):
if array[i]:
sum += (ln - i) ** 2
count += 1
# ("summation", array, count, sum)
return (sum, count)
ASP = AverageSquaredPreference()
class AverageExponentialPreference(AverageLogPreference):
abbreviation = "AEP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
for i in range(len(array)):
if array[i]:
sum += np.exp(ln - i)
count += 1
# ("summation", array, count, sum)
return (sum, count)
AEP = AverageExponentialPreference()
class ReversedLogPreference(AverageLogPreference):
abbreviation = "RLP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
M = np.log(ln + 1)
for i in range(ln):
if array[i]:
sum += M - np.log(i+1)
count += 1
# ("summation", array, count, sum)
return (sum, count)
RLP = ReversedLogPreference()
class SquaredFalsePenalty(AverageLogPreference):
#xxxx currently broken
abbreviation = "SFP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
for i in range(ln):
if not array[i]:
sum += i * i
count += 1
# ("summation", array, count, sum)
return (sum, ln - count)
SFP = SquaredFalsePenalty()
class LogFalsePenalty(AverageLogPreference):
#xxxx currently broken
abbreviation = "LFP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
for i in range(ln):
if not array[i]:
sum += np.log(i+1)
count += 1
# ("summation", array, count, sum)
return (sum, ln - count)
LFP = LogFalsePenalty()
class SqrtFalsePenalty(AverageLogPreference):
#xxxx currently broken
abbreviation = "sqrtFP"
# use caching for mins and maxes
mins = {}
maxes = {}
def summation(self, array):
sum = 0.0
count = 0
ln = len(array)
for i in range(ln):
if not array[i]:
sum += np.sqrt(i+1)
count += 1
# ("summation", array, count, sum)
return (sum, ln - count)
sqrtFP = SqrtFalsePenalty()
class AreaUnderCurve(Stat):
"area under the curve for two statistics"
def __init__(self, x_stat, y_stat, abbreviation=None):
if abbreviation is None:
abbreviation = "AUC(%s, %s)" % (x_stat.abbreviation, y_stat.abbreviation)
self.abbreviation = abbreviation
self.x_stat = x_stat
self.y_stat = y_stat
def curve_points(self, array, close=True):
points = []
x_stat = self.x_stat
y_stat = self.y_stat
for threshold in range(len(array) + 1):
x = x_stat(array, threshold)
y = y_stat(array, threshold)
points.append([x, y])
if close:
# drop verticals to y=0
[x, y] = points[-1]
points.append([x, 0])
[x, y] = points[0]
points.append([x, 0])
return points
def curve_area(self, array, points=None):
if points is None:
points = self.curve_points(array, close=True)
#(points)
result = 0.0
[last_x, last_y] = points[-1]
for point in points:
[x, y] = point
base = x - last_x
height = 0.5 * (y + last_y)
result += base * height
[last_x, last_y] = point
return result
def __call__(self, array, threshold=None):
return self.curve_area(array)
AUPR = AreaUnderCurve(recall, precision, "AUPR")
AUROC = AreaUnderCurve(FPR, recall, "AUROC")
class MinimizedAUPR(AverageLogPreference):
#xxxx currently broken
abbreviation = "mAUPR"
# use caching for mins and maxes
mins = {}
maxes = {}
list_prefix = [0,0]
def summation(self, array):
L = list(array)
count = int(array.sum())
muted = np.array(self.list_prefix + L, dtype=np.int)
sum1 = AUPR(muted)
return (sum1, count)
mAUPR = MinimizedAUPR()
class MaximizedAUPR(MinimizedAUPR):
#xxxx currently broken
abbreviation = "MAUPR"
# use caching for mins and maxes
mins = {}
maxes = {}
list_prefix = [1,1]
MAUPR = MaximizedAUPR()
ALL_METRICS = [
#L,
#TH,
#P,
#N,
#TP,
#TN,
#FP,
#FN,
#TPR,
#PPV,
RLPP,
REPP,
ISD,
SRL,
NSTD,
NSRL,
VPP,
VEP,
F1,
#PHI,
ATP,
ALP,
ASP,
RLP,
AEP,
SFP,
LFP,
AUPR,
AUROC,
MAUPR,
mAUPR,
sqrtFP,
]
ABBREVIATION_TO_STATISTIC = {}
for statistic in ALL_METRICS:
ABBREVIATION_TO_STATISTIC[statistic.abbreviation] = statistic
def RankOrder(*values):
"convenience"
return np.array(values, dtype=np.int)
def test():
# threshold 1 2 3 4 5 6 7 8 9
example = RankOrder(1,0,1,1,0,1,0,0,0,1)
assert L(example) == 10
ca = L.curve_area(example)
assert ca == 100, repr(ca)
assert P(example) == 5
assert N(example) == 5
assert TH(example, 3) == 3
assert TP(example, 3) == 2
assert TN(example, 3) == 4
assert FP(example, 3) == 3
assert FN(example, 3) == 1
assert TPR(example, 3) == 2/5.0
assert PPV(example, 3) == 2/3.0
assert recall(RankOrder(1,0), 1) == 1.0
p = precision(RankOrder(1,0), 1)
assert p == 1.0, repr(p)
f1 = F1(example, 3)
assert f1 == 1.0/4.0, repr(f1)
phi = PHI(example, 3)
assert phi != 0, repr(phi) # smoke test
aupr = AUPR(RankOrder(1,0))
assert aupr == 1.0, repr(aupr)
aupr = AUPR(RankOrder(0,1,0,1,0,1,0))
assert int(aupr * 100) == 37, repr(aupr)
print()
auroc = AUROC(RankOrder(1,0))
assert auroc == 1.0, repr(auroc)
auroc = AUROC(RankOrder(0,1,0,1,0,1,0))
assert int(auroc * 100) == 50, repr(auroc)
auroc = AUROC(RankOrder(0,0,0,1,1))
assert int(auroc * 100) == 0, repr(auroc)
# more smoke tests
for metric in ALL_METRICS:
for threshold in range(len(example) + 1):
try:
assert metric(example, threshold) is not None, repr((None, example, threshold))
except:
print("exception at", threshold, "for", metric.abbreviation, metric.__doc__)
raise
"""
for i in (0,1):
for j in (0,1):
for k in (0,1):
for m in (0,1):
test = RankOrder(i, j, k, m)
print()
for threshold in range(5):
aa = PHI(test, threshold)
print(test, "at", threshold, "gives", aa)
#print("mins", ALP.mins)
#print("maxes", ALP.maxes)
return
"""
test = ATP(RankOrder(1, 1, 0, 0, 0), 4)
assert test == 1.0, repr(test)
test = ATP(RankOrder(1, 0, 1, 0, 0), 4)
assert test == 2.5 / 3.0, repr(test)
test = ATP(RankOrder(1, 0, 1, 0, 0), 3)
assert test == 0.75, repr(test)
test = ATP(RankOrder(1, 0, 1, 0, 0), 2)
assert test == 1.0, repr(test)
test = ATP(RankOrder(1, 0, 1, 0, 0), 1)
assert test == 1.0, repr(test)
test = ATP(RankOrder(1, 0, 1, 0, 0), 0)
assert test == 0, repr(test)
test = ALP(RankOrder(1, 1, 0, 0, 0), 4)
assert test == 1.0, repr(test)
test = ALP(RankOrder(1, 0, 1, 0, 0), 4)
assert 0 < test < 1
test = ALP(RankOrder(1, 0, 1, 0, 0), 3)
assert 0 < test < 1
test = ATP(RankOrder(1, 0, 1, 0, 0), 2)
assert test == 1.0, repr(test)
test = ALP(RankOrder(1, 0, 1, 0, 0), 1)
assert test == 1.0, repr(test)
test = ALP(RankOrder(1, 0, 1, 0, 0), 0)
assert test == 0, repr(test)
print ("all okay")
if __name__ == "__main__":
test() |
import os
import json
import numpy as np
import concurrent.futures
from MolRep.Utils.config_from_dict import Config
from MolRep.Evaluations.DataloaderWrapper import DataLoaderWrapper
from MolRep.Utils.utils import *
class KFoldAssessment:
"""
Class implementing a sufficiently general framework to do model ASSESSMENT
"""
def __init__(self, outer_folds, model_selector, exp_path, model_configs, dataset_config, outer_processes=2):
self.outer_folds = outer_folds
self.outer_processes = outer_processes
self.model_selector = model_selector
self.model_configs = model_configs # Dictionary with key:list of possible values
self.dataset_config = dataset_config
# Create the experiments folder straight away
if self.outer_folds is None:
self.outer_folds = 1
self.exp_path = exp_path
self.__NESTED_FOLDER = os.path.join(exp_path, str(self.outer_folds) + '_NESTED_CV')
self.__OUTER_FOLD_BASE = 'OUTER_FOLD_'
self._OUTER_RESULTS_FILENAME = 'outer_results.json'
self._ASSESSMENT_FILENAME = 'assessment_results.json'
def process_results(self):
outer_TR_scores = []
outer_TS_scores = []
assessment_results = {}
for i in range(1, self.outer_folds+1):
try:
config_filename = os.path.join(self.__NESTED_FOLDER, self.__OUTER_FOLD_BASE + str(i),
self._OUTER_RESULTS_FILENAME)
with open(config_filename, 'r') as fp:
outer_fold_scores = json.load(fp)
outer_TR_scores.append(outer_fold_scores['OUTER_TR'])
outer_TS_scores.append(outer_fold_scores['OUTER_TS'])
except Exception as e:
print(e)
outer_TR_scores = np.array(outer_TR_scores)
outer_TS_scores = np.array(outer_TS_scores)
assessment_results['avg_TR_score'] = outer_TR_scores.mean()
assessment_results['std_TR_score'] = outer_TR_scores.std()
assessment_results['avg_TS_score'] = outer_TS_scores.mean()
assessment_results['std_TS_score'] = outer_TS_scores.std()
with open(os.path.join(self.__NESTED_FOLDER, self._ASSESSMENT_FILENAME), 'w') as fp:
json.dump(assessment_results, fp)
def risk_assessment(self, dataset, experiment_class, debug=False, other=None):
"""
:param experiment_class: the kind of experiment used
:param debug:
:param other: anything you want to share across processes
:return: An average over the outer test folds. RETURNS AN ESTIMATE, NOT A MODEL!!!
"""
if not os.path.exists(self.__NESTED_FOLDER):
os.makedirs(self.__NESTED_FOLDER)
pool = concurrent.futures.ProcessPoolExecutor(max_workers=self.outer_processes)
for outer_k in range(self.outer_folds):
# Create a separate folder for each experiment
kfold_folder = os.path.join(self.__NESTED_FOLDER, self.__OUTER_FOLD_BASE + str(outer_k + 1))
if not os.path.exists(kfold_folder):
os.makedirs(kfold_folder)
json_outer_results = os.path.join(kfold_folder, self._OUTER_RESULTS_FILENAME)
if not os.path.exists(json_outer_results):
if not debug:
pool.submit(self._risk_assessment_helper, dataset, outer_k,
experiment_class, kfold_folder, debug, other)
else: # DEBUG
self._risk_assessment_helper(dataset, outer_k, experiment_class, kfold_folder, debug, other)
else:
# Do not recompute experiments for this outer fold.
print(f"File {json_outer_results} already present! Shutting down to prevent loss of previous experiments")
continue
# Create a separate folder for each experiment
# kfold_folder = os.path.join(self.__NESTED_FOLDER, self.__OUTER_FOLD_BASE + str(outer_k + 1))
# if not os.path.exists(kfold_folder):
# os.makedirs(kfold_folder)
# else:
# # Do not recompute experiments for this outer fold.
# print(f"Outer folder {outer_k} already present! Shutting down to prevent loss of previous experiments")
# continue
pool.shutdown() # wait the batch of configs to terminate
self.process_results()
def _risk_assessment_helper(self, dataset, outer_k, experiment_class, exp_path, debug=False, other=None):
dataset_getter = DataLoaderWrapper(dataset, outer_k)
best_config = self.model_selector.model_selection(dataset_getter, experiment_class, exp_path,
self.model_configs, self.dataset_config, debug, other)
# Retrain with the best configuration and test
experiment = experiment_class(best_config['config'], dataset_config, exp_path)
# Set up a log file for this experiment (run in a separate process)
logger = Logger.Logger(str(os.path.join(experiment.exp_path, 'experiment.log')), mode='a')
dataset_getter.set_inner_k(None) # needs to stay None
training_scores, test_scores = [], []
# Mitigate bad random initializations
for i in range(5):
model_path = str(os.path.join(experiment.exp_path, f'experiment_run_{i}.pt'))
training_score, test_score = experiment.run_test(dataset_getter, logger, other={'model_path': model_path})
print(f'Final training run {i + 1}: {training_score}, {test_score}')
training_scores.append(training_score)
test_scores.append(test_score)
training_score = sum(training_scores) / 5
test_score = sum(test_scores) / 5
logger.log('End of Outer fold. TR score: ' + str(training_score) + ' TS score: ' + str(test_score))
with open(os.path.join(exp_path, self._OUTER_RESULTS_FILENAME), 'w') as fp:
json.dump({'best_config': best_config, 'OUTER_TR': training_score, 'OUTER_TS': test_score}, fp)
|
import requests
from models.web_page_status import WebPageStatus
class WebPage:
KEY_LOCATION = 'Location'
UA_TYPE_PC = "PC"
UA_TYPE_SP = "SP"
USER_AGENT = {
UA_TYPE_PC: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
UA_TYPE_SP: "Mozilla/5.0 (iPhone; CPU iPhone OS 13_2_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.0.3 Mobile/15E148 Safari/604.1"
}
def __init__(self, url):
self.url = url
def get(self, ua_type=UA_TYPE_PC, user_agent=None):
ua = user_agent if not user_agent is None else self.USER_AGENT[ua_type]
headers = {'user-agent': ua}
response = requests.get(self.url, headers=headers)
return response
def get_status_code(self, user_agent={}):
results = []
ua = user_agent if user_agent else self.USER_AGENT
for ua_type in ua:
headers = {'user-agent': self.USER_AGENT[ua_type]}
response = requests.get(self.url, headers=headers, allow_redirects=False)
location = response.headers[self.KEY_LOCATION] if self.KEY_LOCATION in response.headers else ''
results.append(WebPageStatus(self.url, ua_type, response.status_code, location))
return results
|
import argparse
import os
import shutil
import time
from models import GraphModel, edge_loss
from dataset import HungarianDataset
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.distributed as dist
import torch.optim
import logging
from torch_geometric.data import DataLoader
"""
Running Commands
CUDA_VISIBLE_DEVICES=1 python3 train.py
"""
"""
Installations:
pip3 install torch_geometric
pip install lapsolver
export CUDA=cu92
pip3 install torch-scatter==latest+${CUDA} torch-sparse==latest+${CUDA} -f https://pytorch-geometric.com/whl/torch-1.5.0.html
"""
lr = 1e-1
weight_decay = 1e-4
batch_size = 64
print_freq = 20
num_train_samples = 10000
num_test_samples = 1024
num_epochs = 50
def main():
model = GraphModel()
model = torch.nn.DataParallel(model).cuda()
optimizer = torch.optim.Adam(model.parameters(), lr=lr,
weight_decay=weight_decay)
dataset = HungarianDataset(num_train_samples, mode="train")
train_loader = DataLoader(
dataset, batch_size=batch_size,
num_workers=4, pin_memory=True, drop_last=True)
test_dataset = HungarianDataset(num_test_samples, mode="test")
test_loader = DataLoader(
test_dataset, batch_size=batch_size,
num_workers=4, pin_memory=True, drop_last=True)
for epoch in range(0,num_epochs):
adjust_learning_rate(optimizer, epoch)
validate(test_loader, model, epoch)
# train for one epoch
train(train_loader, model, optimizer, epoch)
def train(train_loader, model, optimizer, epoch):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
model.train()
end = time.time()
for i, graph in enumerate(train_loader):
data_time.update(time.time() - end)
graph.to(torch.device("cuda"))
preds = model(graph)
loss = edge_loss(preds, graph.labels)
# if epoch > 10:
# import pdb; pdb.set_trace()
# print(torch.sigmoid(preds[-1])[graph.labels>0.5])
# print(torch.sigmoid(preds[-1])[graph.labels<0.5][:100])
optimizer.zero_grad()
loss.backward()
optimizer.step()
losses.update(loss.item(), batch_size)
batch_time.update(time.time() - end)
end = time.time()
if i % print_freq == 0:
print('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses))
def validate(test_loader, model, epoch):
batch_time = AverageMeter()
data_time = AverageMeter()
top_pos = AverageMeter()
top_neg = AverageMeter()
losses = AverageMeter()
model.eval()
end = time.time()
for i, graph in enumerate(test_loader):
data_time.update(time.time() - end)
graph.to(torch.device("cuda"))
preds = model(graph)
loss = edge_loss(preds, graph.labels)
losses.update(loss.item(), batch_size)
batch_time.update(time.time() - end)
end = time.time()
acc_pos, num_pos, acc_neg, num_neg = accuracy(preds[-1], graph.labels)
top_pos.update(acc_pos, num_pos)
top_neg.update(acc_neg, num_neg)
if i % print_freq == 0:
print('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Pos_Acc {top_pos.val:.3f} ({top_pos.avg:.3f})\t'
'Neg_Acc {top_neg.val:.3f} ({top_neg.avg:.3f})'.format(
i, len(test_loader), batch_time=batch_time, loss=losses,
top_pos=top_pos, top_neg=top_neg))
def accuracy(preds, labels):
preds = preds.view(-1)
labels = labels.view(-1)
pos_correct = ((preds >= 0.5) & (labels > 0.5)).sum()
pos_acc = pos_correct.float()/((labels > 0.5).sum())
neg_correct = ((preds < 0.5) & (labels < 0.5)).sum()
neg_acc = neg_correct.float()/((labels < 0.5).sum())
return pos_acc,(labels > 0.5).sum(), neg_acc, (labels < 0.5).sum()
def adjust_learning_rate(optimizer, epoch):
global lr
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
new_lr = lr * (0.1 ** (epoch // 50))
for param_group in optimizer.param_groups:
param_group['lr'] = new_lr
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
if __name__ == '__main__':
main()
|
from stacks.daily_temperatures import daily_temperatures, daily_temperatures_brute_force
def test_daily_temperatures_brute_force():
assert daily_temperatures_brute_force([73, 74, 75, 71, 69, 72, 76, 73]) == [1, 1, 4, 2, 1, 1, 0, 0]
def test_daily_temperatures():
assert daily_temperatures([73, 74, 75, 71, 69, 72, 76, 73]) == [1, 1, 4, 2, 1, 1, 0, 0]
|
from pathlib import Path
import logging
from .logger import Logger
from .log_formatter import LogFormatter
class FileLogger(Logger):
fmt = LogFormatter(use_colour=False, output_ts=False)
logger = None
def __init__(self, folder, format=None):
if format is None:
format = ("%(asctime)s|%(levelname)s|%(message)s",)
formatter = logging.Formatter(format)
log_file = Path(folder, "sayn.log")
if not log_file.parent.exists():
log_file.parent.mkdir(parents=True)
handler = logging.FileHandler(log_file)
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
self.logger = logger
def print(self, s=None):
if s is not None:
if s["level"] == "info":
func = self.logger.info
elif s["level"] == "error":
func = self.logger.error
elif s["level"] == "warning":
func = self.logger.warning
else:
func = self.logger.debug
s = s["message"]
if isinstance(s, str):
s = [s]
elif not isinstance(s, list):
raise ValueError("error in logging print")
func(f"{s[0]}")
for e in s[1:]:
for l in e.split("\n"):
func(f"{l}")
|
class Solution:
def projectionArea(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
M, N = len(grid), len(grid[0])
rowMax, colMax = [0] * M, [0] * N
xy = sum(0 if grid[i][j] == 0 else 1 for i in range(M) for j in range(N))
xz = sum(list(map(max, grid)))
yz = sum(list(map(max, [[grid[i][j] for i in range(M)] for j in range(N)])))
return xy + xz + yz
grid = [[1,2],[3,4]]
p = Solution()
print(p.projectionArea(grid)) |
import json
from typing import Iterator, Dict
def load_json(file_path: str) -> Iterator[Dict]:
with open(file_path, 'r') as handle:
return json.load(handle)
|
# coding=utf-8
import argparse
from configparser import ConfigParser
import codecs
class NonExistedProperty(BaseException):
def __init__(self, msg):
self.args = msg
# Note that Config class is NOT thread-safe
class Config:
__instance = None
def __init__(self):
pass
def __new__(cls, *args, **kwd):
if Config.__instance is None:
Config.__instance = object.__new__(cls, *args, **kwd)
Config.__instance.cp = ConfigParser()
with codecs.open('../settings.conf', 'r', encoding='utf-8') as f:
Config.__instance.cp.read_file(f)
return Config.__instance
def get_property(self, section, option):
if self.cp.has_option(section, option):
return self.cp.get(section, option)
else:
raise NonExistedProperty("[%s] %s" %(section, option))
def set_property(self, section, option, value):
self.cp.set(section, option, value)
self.cp.write(open('../settings.conf', 'w', encoding="utf-8"))
def display_all_properties(self):
for section in self.cp.sections():
print("[%s]" % section)
for option in self.cp.options(section):
print("%s = %s" % (option, self.cp.get(section, option)))
print()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Interfaces of Configuration Operations.")
group2 = parser.add_mutually_exclusive_group()
group2.add_argument("-a", "--all", action="store_true",
help="Display all properties.")
group2.add_argument("-g", "--get", action="store",
help="Get property based on SECTION and OPTION.",
metavar=("SECTION","OPTION"), type=str, nargs=2)
group2.add_argument("-s", "--set", action="store",
help="Set VALUE to property of SECTION and OPTION.",
metavar=("SECTION", "OPTION", "VALUE"), type=str, nargs=3)
args = parser.parse_args()
print(args)
conf = Config()
if args.all:
conf.display_all_properties()
elif args.get is not None:
print(conf.get_property(args.get[0], args.get[1]))
elif args.set is not None:
conf.set_property(args.set[0], args.set[1], args.set[2])
|
#!/usr/bin/env python
# coding=utf-8
from flask import Flask
app = Flask(__name__)
app.config['SECRET_KEY'] = 'FUCKTHEGRADEPOINT'
import index
|
from setuptools import setup, find_packages
setup(
name="discopyro-bpl",
version="0.2",
packages=find_packages(),
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
install_requires=["docutils>=0.3"],
# metadata to display on PyPI
author="Eli Sennesh",
author_email="[email protected]",
description="A generative model of compositionality in symmetric monoidal (Kleisli) categories",
keywords="Bayesian program learning deep generative model compositionality category",
project_urls={
"Source Code": "https://github.com/neu-pml/discopyro",
},
)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
### Require Anaconda3
### ============================
### 3D FSC Software Package
### Analysis Section
### Written by Yong Zi Tan and Dmitry Lyumkis
### Downloaded from https://github.com/nysbc/Anisotropy
###
### See Paper:
### Addressing preferred specimen orientation in single-particle cryo-EM through tilting
### 10.1038/nmeth.4347
###
### Credits:
### 1) UCSF Chimera, especially Tom Goddard
### 2) mrcfile 1.0.0 by Colin Palmer (https://github.com/ccpem/mrcfile)
###
### Version 3.0 (23 July 2017)
###
### Revisions
### 1.0 - Created analysis program
### 2.0 - Combined with plotting, thresholding and sphericity
### 3.0 - New thresholding algorithm that scales better with large box sizes
### ============================
version = "3.0"
#pythonlib
import matplotlib
matplotlib.use('Agg')
import os
import numpy as np
import mrcfile
import sys
import copy
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
from math import sqrt, acos, atan2, cos, sin, pi
from skimage import measure
from scipy.ndimage.filters import gaussian_filter
## Progress bar, adapted from https://gist.github.com/aubricus/f91fb55dc6ba5557fbab06119420dd6a
## Remove progress bar until stty issues are sorted out
def cartesian_to_spherical(vector):
"""Convert the Cartesian vector [x, y, z] to spherical coordinates [r, theta, phi].
The parameter r is the radial distance, theta is the polar angle, and phi is the azimuth.
@param vector: The Cartesian vector [x, y, z].
@type vector: numpy rank-1, 3D array
@return: The spherical coordinate vector [r, theta, phi].
@rtype: numpy rank-1, 3D array
"""
# The radial distance.
r = np.linalg.norm(vector)
# Unit vector.
unit = vector / r
# The polar angle.
theta = acos(unit[2])
# The azimuth.
phi = atan2(unit[1], unit[0])
# Return the spherical coordinate vector.
return np.array([r, theta, phi], np.float64)
def spherical_to_cartesian(spherical_vect, cart_vect):
"""Convert the spherical coordinate vector [r, theta, phi] to the Cartesian vector [x, y, z].
The parameter r is the radial distance, theta is the polar angle, and phi is the azimuth.
@param spherical_vect: The spherical coordinate vector [r, theta, phi].
@type spherical_vect: 3D array or list
@param cart_vect: The Cartesian vector [x, y, z].
@type cart_vect: 3D array or list
"""
# Trig alias.
sin_theta = sin(spherical_vect[1])
# The vector.
cart_vect[0] = spherical_vect[0] * cos(spherical_vect[2]) * sin_theta
cart_vect[1] = spherical_vect[0] * sin(spherical_vect[2]) * sin_theta
cart_vect[2] = spherical_vect[0] * cos(spherical_vect[1])
return cart_vect
def StandardDeviation(input):
input_num = [float(c) for c in input]
mean = sum(input_num) / len(input_num)
diff = [a - mean for a in input_num]
sq_diff = [b ** 2 for b in diff]
ssd = sum(sq_diff)
variance = ssd / (len(input_num) - 1)
sd = sqrt(variance)
return sd
def Mean(input):
input_num = [float(c) for c in input]
mean = sum(input_num) / len(input_num)
return mean
def convert_highpassfilter_to_Fourier_Shells(ThreeDFSC,apix,highpassfilter):
a = open("Results_" + ThreeDFSC + "/ResEM" + ThreeDFSC + "OutglobalFSC.csv","r")
b = a.readlines()
b.pop(0)
globalspatialfrequency = []
globalfsc = []
for i in b:
k = (i.strip()).split(",")
globalspatialfrequency.append(float(k[0])/apix)
globalfsc.append(float(k[2]))
for i in range(len(globalspatialfrequency)):
if ((1.0/globalspatialfrequency[i])*apix) <= highpassfilter:
highpassfilter_fouriershell = i-1
break
if highpassfilter_fouriershell < 0:
highpassfilter_fouriershell = 0
return highpassfilter_fouriershell
def calculate_distance(p1,p2):
return sqrt((p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2 + (p2[2] - p1[2]) ** 2)
def threshold_binarize(inmrc, thresholded, thresholdedbinarized, FSCCutoff, ThresholdForSphericity, highpassfilter, apix):
# Thresholds
cutoff_fsc = float(FSCCutoff)
cutoff_binarize = float(ThresholdForSphericity)
min_cutoff = min(cutoff_fsc,cutoff_binarize)
# Read MRC
inputmrc = (mrcfile.open(inmrc)).data
# Coordinates
center = (inputmrc.shape[0]/2,inputmrc.shape[1]/2,inputmrc.shape[2]/2)
radius = int(inputmrc.shape[0]/2 + 0.5)
# Fill up new np array
boxsize = inputmrc.shape[0]
outarraythresholded = np.zeros((boxsize,)*3)
outarraythresholdedbinarized = np.zeros((boxsize,)*3)
# Find distance of all points to center
points_array = []
for i in range(boxsize):
for j in range(boxsize):
for k in range(boxsize):
dist = calculate_distance((i,j,k),center)
points_array.append([dist,i,j,k])
# Sort array
points_array.sort()
# Threshold each point locally
counter = 0
total_iterations = len(points_array)
number_of_progress_bar_updates = 200
iterations_per_progress_bar_update = int(total_iterations/number_of_progress_bar_updates)
memory_inmrc_thresholded = np.copy(inputmrc)
memory_inmrc_thresholdedbinarized = np.copy(inputmrc)
for i in points_array:
x = i[1]
y = i[2]
z = i[3]
if i[0] < highpassfilter: # Implement high pass filter
outarraythresholded[x][y][z] = inputmrc[x][y][z]
outarraythresholdedbinarized[x][y][z] = 1
memory_inmrc_thresholded[x][y][z] = 1
memory_inmrc_thresholdedbinarized[x][y][z] = 1
elif memory_inmrc_thresholded[x][y][z] < min_cutoff: # If value is smaller than the lowest cutoff, skip the calculations below to speed things up
outarraythresholded[x][y][z] = 0
outarraythresholdedbinarized[x][y][z] = 0
memory_inmrc_thresholded[x][y][z] = 0
memory_inmrc_thresholdedbinarized[x][y][z] = 0
else:
twentysix_neighboring_points = [[calculate_distance((x-1,y,z),center),x-1,y,z]]
twentysix_neighboring_points.append([calculate_distance((x,y-1,z),center),x,y-1,z])
twentysix_neighboring_points.append([calculate_distance((x,y,z-1),center),x,y,z-1])
twentysix_neighboring_points.append([calculate_distance((x-1,y-1,z),center),x-1,y-1,z])
twentysix_neighboring_points.append([calculate_distance((x-1,y,z-1),center),x-1,y,z-1])
twentysix_neighboring_points.append([calculate_distance((x,y-1,z-1),center),x,y-1,z-1])
twentysix_neighboring_points.append([calculate_distance((x-1,y-1,z-1),center),x-1,y-1,z-1])
twentysix_neighboring_points.append([calculate_distance((x+1,y,z),center),x+1,y,z])
twentysix_neighboring_points.append([calculate_distance((x,y+1,z),center),x,y+1,z])
twentysix_neighboring_points.append([calculate_distance((x,y,z+1),center),x,y,z+1])
twentysix_neighboring_points.append([calculate_distance((x+1,y+1,z),center),x+1,y+1,z])
twentysix_neighboring_points.append([calculate_distance((x+1,y,z+1),center),x+1,y,z+1])
twentysix_neighboring_points.append([calculate_distance((x,y+1,z+1),center),x,y+1,z+1])
twentysix_neighboring_points.append([calculate_distance((x+1,y+1,z+1),center),x+1,y+1,z+1])
twentysix_neighboring_points.append([calculate_distance((x+1,y-1,z),center),x+1,y-1,z])
twentysix_neighboring_points.append([calculate_distance((x+1,y,z-1),center),x+1,y,z-1])
twentysix_neighboring_points.append([calculate_distance((x+1,y-1,z-1),center),x+1,y-1,z-1])
twentysix_neighboring_points.append([calculate_distance((x-1,y+1,z),center),x-1,y+1,z])
twentysix_neighboring_points.append([calculate_distance((x,y+1,z-1),center),x,y+1,z-1])
twentysix_neighboring_points.append([calculate_distance((x-1,y+1,z-1),center),x-1,y+1,z-1])
twentysix_neighboring_points.append([calculate_distance((x-1,y,z+1),center),x-1,y,z+1])
twentysix_neighboring_points.append([calculate_distance((x,y-1,z+1),center),x,y-1,z+1])
twentysix_neighboring_points.append([calculate_distance((x-1,y-1,z+1),center),x-1,y-1,z+1])
twentysix_neighboring_points.append([calculate_distance((x+1,y+1,z-1),center),x+1,y+1,z-1])
twentysix_neighboring_points.append([calculate_distance((x+1,y-1,z+1),center),x+1,y-1,z+1])
twentysix_neighboring_points.append([calculate_distance((x-1,y+1,z+1),center),x-1,y+1,z+1])
twentysix_neighboring_points.sort()
#Closest point to center
closest_x = twentysix_neighboring_points[0][1]
closest_y = twentysix_neighboring_points[0][2]
closest_z = twentysix_neighboring_points[0][3]
if memory_inmrc_thresholded[x][y][z] < cutoff_fsc:
outarraythresholded[x][y][z] = 0
memory_inmrc_thresholded[x][y][z] = 0
elif memory_inmrc_thresholded[closest_x][closest_y][closest_z] < cutoff_fsc:
outarraythresholded[x][y][z] = 0
memory_inmrc_thresholded[x][y][z] = 0
else:
outarraythresholded[x][y][z] = inputmrc[x][y][z]
if memory_inmrc_thresholdedbinarized[x][y][z] < cutoff_binarize:
outarraythresholdedbinarized[x][y][z] = 0
memory_inmrc_thresholdedbinarized[x][y][z] = 0
elif memory_inmrc_thresholdedbinarized[closest_x][closest_y][closest_z] < cutoff_binarize:
outarraythresholdedbinarized[x][y][z] = 0
memory_inmrc_thresholdedbinarized[x][y][z] = 0
else:
outarraythresholdedbinarized[x][y][z] = 1
counter += 1
mrc_write = mrcfile.new(thresholded,overwrite=True)
mrc_write.set_data(outarraythresholded.astype('<f4'))
mrc_write.voxel_size = (float(apix),float(apix),float(apix))
mrc_write.update_header_from_data()
mrc_write.close()
mrc_write = mrcfile.new(thresholdedbinarized,overwrite=True)
mrc_write.set_data(outarraythresholdedbinarized.astype('<f4'))
mrc_write.voxel_size = (float(apix),float(apix),float(apix))
mrc_write.update_header_from_data()
mrc_write.close()
def calculate_sphericity(inmrc):
# read MRC
inputmrc = (mrcfile.open(inmrc)).data
inputmrc_copy = copy.deepcopy(inputmrc)
extended_inputmrc = np.zeros((inputmrc.shape[0]+10,inputmrc.shape[1]+10,inputmrc.shape[2]+10),dtype=np.float) ## Had to extend it before Gaussian filter, else you might edge effects
extended_inputmrc[6:6+inputmrc.shape[0], 6:6+inputmrc.shape[1], 6:6+inputmrc.shape[2]] = inputmrc_copy
# Gaussian filtering
# Sigma=1 works well
blurred = gaussian_filter(extended_inputmrc,sigma=1)
# Find surfaces using marching cube algorithm
verts, faces, normals, values = measure.marching_cubes_lewiner(blurred,level=0.5) ## Fixed thresholded due to Gaussian blurring
# Find surface area
surface_area = measure.mesh_surface_area(verts,faces)
# Find volume
blurred[blurred >= 0.5] = 1
blurred[blurred < 0.5] = 0
volume = np.sum(blurred)
# Calculate sphericity
sphericity = (((pi)**(1/3))*((6*volume)**(2/3)))/(surface_area)
return sphericity
def histogram_sample(inmrc, highpassfilter):
# read MRC
inputmrc = (mrcfile.open(inmrc)).data
# coordinates
center = [inputmrc.shape[0]/2,inputmrc.shape[1]/2,inputmrc.shape[2]/2]
radius = int(inputmrc.shape[0]/2 + 0.5)
# fill up new numpy array
boxsize = inputmrc.shape[0]
outarray = np.zeros((boxsize,)*3)
outarray2 = np.zeros((boxsize,)*3)
histogram_sampling = np.empty([radius,10*10]) # write out the histogram 1D FSCs
counter = 0
for theta in np.arange(1,360,36):
#print("theta: %d" % (theta))
for phi in np.arange(1,360,36):
setrest = False
for r in range(radius):
# convert polar to cartesian and read mrc
spherical_vect = [r, theta, phi]
cart_vect = spherical_to_cartesian(spherical_vect, [0,0,0])
cart_vect_new = np.add(cart_vect, center)
x = int(cart_vect_new[0])
y = int(cart_vect_new[1])
z = int(cart_vect_new[2])
# binarize
# if setrest is True, everything beyond this radius value should be 0
if (r > int(highpassfilter)):
histogram_sampling[r][counter] = inputmrc[x][y][z]
else:
histogram_sampling[r][counter] = 1
counter += 1
return histogram_sampling
def HistogramCreation(histogram_sampling,histogram,ThreeDFSC,apix,cutoff,sphericity,global_resolution):
stddev = []
mean = []
for i in histogram_sampling:
stddev.append(StandardDeviation(i))
mean.append(Mean(i))
#print (stddev)
#print (mean)
stdplusone = [mean[a] + stddev[a] for a in range(len(mean))]
stdminusone = [mean[a] - stddev[a] for a in range(len(mean))]
## Open Global FSC
a = open("Results_" + ThreeDFSC + "/ResEM" + ThreeDFSC + "OutglobalFSC.csv","r")
b = a.readlines()
b.pop(0)
globalspatialfrequency = []
globalfsc = []
for i in b:
k = (i.strip()).split(",")
globalspatialfrequency.append(float(k[0])/apix)
globalfsc.append(float(k[2]))
#print (len(globalspatialfrequency))
maxrange = max(globalspatialfrequency)
minrange = min(globalspatialfrequency)
## Calculate Sum of Standard Deviation
## http://stats.stackexchange.com/questions/25848/how-to-sum-a-standard-deviation
sumofvar = 0
for a in stddev:
sumofvar += a ** 2
sumofstd = sqrt(sumofvar)
#print ("\n\n")
#print ("Sum of Standard Deviation is %s" % sumofstd)
#print ("\n\n")
## Histogram
histogramlist = []
for i in range(len(histogram_sampling[0])):
for j in range(len(histogram_sampling)):
if float(histogram_sampling[j][i]) < cutoff: ##Changed to 0.5
break
else:
output = globalspatialfrequency[j]
histogramlist.append(float(output))
#print (histogramlist)
## Plotting
plt.title("Histogram and Directional FSC Plot for %s \n Sphericity = %0.3f out of 1. Global resolution = %0.2f $\AA$.\n \n \n \n" % (str(ThreeDFSC),sphericity,global_resolution))
ax1 = plt.subplot(111)
ax1.set_xlim([minrange,maxrange])
n, bins, patches = plt.hist(histogramlist, bins=10, range=(minrange,maxrange))
ax1.set_ylabel("Percentage of Per Angle FSC (%)", color="#0343df")
for tl in ax1.get_yticklabels():
tl.set_color("#0343df")
ax2 = ax1.twinx()
ax2.set_ylim([0,1])
ax2.set_xlim([minrange,maxrange])
ax2.plot(globalspatialfrequency, globalfsc, linewidth=3, color="#e50000")
ax2.plot(globalspatialfrequency, stdplusone, linewidth=1, linestyle="--", color="#15b01a")
ax2.plot(globalspatialfrequency, stdminusone, linewidth=1, linestyle="--", color="#15b01a")
ax2.plot((minrange,maxrange), (cutoff, cutoff), linestyle="--", color="#929591")
ax2.set_ylabel("Directional Fourier Shell Correlation", color='#e50000')
for tl in ax2.get_yticklabels():
tl.set_color("r")
blue_patch = mpatches.Patch(color="#0343df", label="Histogram of Directional FSC")
red_solid_line = mlines.Line2D([], [], color="#e50000", linewidth=3, label="Global FSC")
green_dotted_line = mlines.Line2D([], [], color="#15b01a", linestyle="--", label="$\pm$1 S.D. from Mean of Directional FSC")
#box = ax1.get_position()
#ax1.set_position([box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9])
#ax2.set_position([box.x0, box.y0 + box.height * 0.1, box.width, box.height * 0.9])
ax1.legend(handles=[blue_patch, green_dotted_line, red_solid_line],loc='center', bbox_to_anchor=(0.5, 1.1), ncol=2)
xlabel = ax1.set_xlabel("Spatial Frequency ($\AA^{-1}$)")
#plt.show()
plt.savefig("Results_" + ThreeDFSC + "/" + histogram + ".pdf", bbox_extra_artists=[xlabel], bbox_inches="tight")
plt.savefig("Results_" + ThreeDFSC + "/" + histogram + ".png", bbox_extra_artists=[xlabel], bbox_inches="tight")
#Flush out plots
plt.clf()
plt.cla()
plt.close()
## Return useful values for ChimeraOutputCreate
# Max Res, Min Res, global spatial frequency list, global FSC list
return(1/float(max(histogramlist)),1/float(min(histogramlist)),globalspatialfrequency,globalfsc)
def ChimeraOutputCreate(ThreeDFSC,apix,maxRes,minRes,fullmap,globalspatialfrequency,globalfsc,global_resolution):
## Generate Lineplot.py File
with open(os.path.realpath(__file__)[:-24] + "Chimera/lineplot_template.py") as f:
replaced1 = f.read().replace("#==apix==#", str(apix))
replaced2 = replaced1.replace("#==maxres==#", str(maxRes))
replaced3 = replaced2.replace("#==minres==#", str(minRes))
replaced4 = replaced3.replace("#==global_x==#", str(globalspatialfrequency))
replaced5 = replaced4.replace("#==global_y==#", str(globalfsc))
replaced6 = replaced5.replace("#==global_res==#",str(global_resolution))
with open("Results_" + str(ThreeDFSC) + "/Chimera/lineplot.py", "w") as f:
f.write(replaced6)
## Obtain origins for maps
# read MRCS
input3DFSC = (mrcfile.open("Results_" + str(ThreeDFSC) + "/Chimera/" + ThreeDFSC + ".mrc")).data
inputFullMap = (mrcfile.open(fullmap)).data #Full maps can be anywhere
# coordinates
center3DFSC = str(int(input3DFSC.shape[0]/2)) + "," + str(int(input3DFSC.shape[1]/2)) + "," + str(int(input3DFSC.shape[2]/2))
centerFullMap = str(int(inputFullMap.shape[0]/2)) + "," + str(int(inputFullMap.shape[1]/2)) + "," + str(int(inputFullMap.shape[2]/2))
## 3DFSCPlot_Chimera.cmd File
with open(os.path.realpath(__file__)[:-24] + "Chimera/3DFSCPlot_Chimera_Template.cmd") as f:
replaced1 = f.read().replace("#===3DFSC====#", str(os.path.basename(ThreeDFSC)) + ".mrc")
replaced2 = replaced1.replace("#==apix==#", str(apix))
replaced3 = replaced2.replace("#==Origin3DFSC==#", str(center3DFSC))
replaced4 = replaced3.replace("#==OriginFullMap==#", str(centerFullMap))
replaced5 = replaced4.replace("#===FullMap====#", str(os.path.basename(fullmap)))
with open("Results_" + str(ThreeDFSC) + "/Chimera/3DFSCPlot_Chimera.cmd", "w") as f:
f.write(replaced5)
def check_globalFSC(ThreeDFSC,apix):
a = open("Results_" + ThreeDFSC + "/ResEM" + ThreeDFSC + "OutglobalFSC.csv","r")
b = a.readlines()
b.pop(0)
globalspatialfrequency = []
globalfsc = []
for i in b:
k = (i.strip()).split(",")
globalspatialfrequency.append(float(k[0])/apix)
globalfsc.append(float(k[2]))
shells_below_pt143 = 0
total_shells_past_first_pt143 = 0
resolution_below_pt143 = []
for i in range(len(globalfsc)):
if (float(globalfsc[i]) < 0.143):
shells_below_pt143 += 1
resolution_below_pt143.append((1/(globalspatialfrequency[i]))*apix)
if (shells_below_pt143 > 0):
total_shells_past_first_pt143 += 1
if (shells_below_pt143 == 0):
print ("\n\033[1;31;40mWarning: Your global half-map FSC does not fall below 0.143. You may have reached the Nyquist sampling limit. Try unbinning your data. \033[0;37;40m")
resolution_below_pt143.append(apix)
if (shells_below_pt143 != total_shells_past_first_pt143):
print ("\n\033[1;31;40mWarning: Your global half-map FSC rises above 0.143 after the first crossing. Check your refinement and masking. \033[0;37;40m")
return resolution_below_pt143[0] ## Returns global resolution
def main(halfmap1,halfmap2,fullmap,apix,ThreeDFSC,dthetaInDegrees,histogram,FSCCutoff,ThresholdForSphericity,HighPassFilter):
# Part 00
# Warnings and checks. Invisible to user unless something is wrong
global_resolution = check_globalFSC(ThreeDFSC,apix)
# Part 01
print ("\n\033[1;34;40mAnalysis Step 01: Generating thresholded and thresholded + binarized maps. \033[0;37;40m")
print ("These maps can be used to make figures, and are required for calculating sphericity.")
FourierShellHighPassFilter = convert_highpassfilter_to_Fourier_Shells(ThreeDFSC,apix,HighPassFilter)
threshold_binarize("Results_" + ThreeDFSC + "/ResEM" + ThreeDFSC + "Out.mrc", "Results_" + ThreeDFSC + "/" + ThreeDFSC + "_Thresholded.mrc", "Results_" + ThreeDFSC + "/" + ThreeDFSC + "_ThresholdedBinarized.mrc", FSCCutoff, ThresholdForSphericity,FourierShellHighPassFilter,apix)
print ("Results_" + ThreeDFSC + "/" + ThreeDFSC + "_Thresholded.mrc at " + str(FSCCutoff) + " cutoff and Results_" + ThreeDFSC + "/" + ThreeDFSC + "_ThresholdedBinarized.mrc at " + str(ThresholdForSphericity) + " cutoff for sphericity generated.")
# Part 02
print ("\n\033[1;34;40mAnalysis Step 02: Calculating sphericity. \033[0;37;40m")
sphericity = calculate_sphericity("Results_" + ThreeDFSC + "/" + ThreeDFSC + "_ThresholdedBinarized.mrc")
print ("Sphericity is %0.2f out of 1. 1 represents a perfect sphere." % (sphericity))
# Part 03
print ("\n\033[1;34;40mAnalysis Step 03: Generating Histogram. \033[0;37;40m")
histogram_sampling = histogram_sample("Results_" + ThreeDFSC + "/ResEM" + ThreeDFSC + "Out.mrc",FourierShellHighPassFilter)
# Part 04
maxRes, minRes, globalspatialfrequency, globalfsc = HistogramCreation(histogram_sampling,histogram,ThreeDFSC,apix,FSCCutoff,sphericity,global_resolution)
print ("Results_" + ThreeDFSC + "/" + histogram + ".pdf generated.")
# Part 05
print ("\n\033[1;34;40mAnalysis Step 04: Generating Output Files for Chimera Viewing of 3DFSC \033[0;37;40m")
os.system("mkdir Results_" + str(ThreeDFSC) + "/Chimera")
os.system("cp Results_" + str(ThreeDFSC) + "/" + str(ThreeDFSC) + ".mrc " + " Results_" + str(ThreeDFSC) + "/Chimera/")
os.system("cp " + fullmap + " Results_" + str(ThreeDFSC) + "/Chimera/")
ChimeraOutputCreate(ThreeDFSC,apix,maxRes,minRes,fullmap,globalspatialfrequency,globalfsc,global_resolution)
print ("Results_" + str(ThreeDFSC) + "/Chimera/3DFSCPlot_Chimera.cmd and Results_" + str(ThreeDFSC) + "/Chimera/lineplot.py generated.")
print ("To view in Chimera, open 3DFSCPlot_Chimera.cmd in Chimera, with lineplot.py and the mrc files in the Chimera folder in the same directory.")
if __name__ == '__main__':
main(sys.argv[1],sys.argv[2],sys.argv[3],sys.argv[4],sys.argv[5],sys.argv[6],sys.argv[7],sys.argv[8],sys.argv[9],sys.argv[10])
|
# -*- coding: utf-8 -*-
from . import geo
class CountryMiddleware(object):
"""
This is a middleware that parses a request
and decides which country the request came from.
"""
def process_request(self, request):
request.COUNTRY_CODE = geo.get_country_from_request(request)
|
#!/usr/bin/env python
# encoding=utf-8
from __future__ import print_function
import threading
from .models import Wxgroups,Wxfriends
import sys
import time
import json
import re
import hashlib
reload(sys)
sys.setdefaultencoding("utf-8")
class GroupsThread(threading.Thread):
def __init__ (self, gitem):
threading.Thread.__init__(self)
self.gitem = gitem
def run(self):
group = self.gitem
#过滤emoji奇葩字符
emoji_pattern = re.compile(
u"(\ud83d[\ude00-\ude4f])|" # emoticons
u"(\ud83c[\udf00-\uffff])|" # symbols & pictographs (1 of 2)
u"(\ud83d[\u0000-\uddff])|" # symbols & pictographs (2 of 2)
u"(\ud83d[\ude80-\udeff])|" # transport & map symbols
u"(\ud83c[\udde0-\uddff])|" # flags (iOS)
u"(\U00010000-\U0010ffff)|"
u"([\uD800-\uDBFF][\uDC00-\uDFFF])"
"+", flags=re.UNICODE
)
nick_name = emoji_pattern.sub(r'', group['NickName'])
try:
#只是更新username
print(group['UserName'])
groups_obj = Wxgroups.objects.get(user_id=uid, nick_name=nick_name)
groups_obj.user_name = group['UserName']
groups_obj.save()
#Wxgroups.objects.filter(user_id=uid, nick_name=nick_name).update(user_name=group['UserName'])
except Wxgroups.DoesNotExist:
#保存图片路径 uid+attr_id
#wechat.savaImg(item['HeadImgUrl'], img_name)
img_name = str(uid) + "_" + group['EncryChatRoomId']
img_name = wechat_obj.savaImg(group['UserName'], img_name)
#img_name = self.saveGroupImg("https://wx2.qq.com"+group['HeadImgUrl'], img_name)
print(img_name)
Wxgroups.objects.create(
user_id=uid,
room_id = group['EncryChatRoomId'],
user_name = emoji_pattern.sub(r'', group['UserName']),
nick_name = nick_name,
remark_name = emoji_pattern.sub(r'', group['RemarkName']),
img = img_name,
)
#好友
class FriendsThread(threading.Thread):
def __init__ (self, fitem):
threading.Thread.__init__(self)
self.fitem = fitem
def run(self):
item = self.fitem
#attr也不唯一, 这里使用 arr+nick的md5
m5 = hashlib.md5()
m5.update(str(item['AttrStatus'])+item['NickName'])
item['AttrStatus'] = m5.hexdigest()
try:
#过滤emoji奇葩字符
emoji_pattern = re.compile(
u"(\ud83d[\ude00-\ude4f])|" # emoticons
u"(\ud83c[\udf00-\uffff])|" # symbols & pictographs (1 of 2)
u"(\ud83d[\u0000-\uddff])|" # symbols & pictographs (2 of 2)
u"(\ud83d[\ude80-\udeff])|" # transport & map symbols
u"(\ud83c[\udde0-\uddff])|" # flags (iOS)
u"(\U00010000-\U0010ffff)|"
u"([\uD800-\uDBFF][\uDC00-\uDFFF])"
"+", flags=re.UNICODE
)
#img_name = str(uid)+ "_"+ str(item['AttrStatus'])
try:
#只是更新username
#begin = time.time()
friends_obj = Wxfriends.objects.get(user_id=uid, attr_id=item['AttrStatus'])
friends_obj.user_name =item['UserName']
friends_obj.save()
#经测试 update还要慢一些
#Wxfriends.objects.filter(user_id=uid, attr_id=item['AttrStatus']).update(user_name=item['UserName'])
#stop = time.time()
#print(str(item['AttrStatus'])+"=====>"+str(stop-begin))
except Wxfriends.DoesNotExist:
try:
img_name = wechat_obj.savaImg(item['UserName'], item['AttrStatus'])
print(img_name)
Wxfriends.objects.create(
user_id=uid,
attr_id=item['AttrStatus'],
user_name =item['UserName'],
nick_name =emoji_pattern.sub(r'', item['NickName']),
remark_name =item['RemarkName'],
province =item['Province'],
city =item['City'],
sex =item['Sex'],
#img =item['HeadImgUrl'],
img = img_name,
sign = emoji_pattern.sub(r'', item['Signature']),
contact_flag =item['ContactFlag'],
sns_flag =item['SnsFlag']
)
except Exception, e:
print(item['AttrStatus'], e)
except Exception, e:
print(item['AttrStatus'], e)
def handel_friends(request):
from .wechat import Wechat
init = time.time()
global uid
uid = request.user.id
global wechat_obj
wechat_obj = Wechat(request)
print('get friends...')
friends = wechat_obj.friends()
#多线程写入数据库
print('threading start....'+str(len(friends)))
start = time.time()
thread_list = [FriendsThread(item) for item in friends]
for t in thread_list:
t.setDaemon(True)
t.start()
for t in thread_list:
t.join()
end = time.time()
print(start-init)
print(end-start)
print('threading stop....')
def handel_groups(wechat, groups):
global uid
uid = wechat.uid
global wechat_obj
wechat_obj = wechat
print('get groups...')
#多线程写入数据库
print('threading start....')
start = time.time()
thread_list = [GroupsThread(item) for item in groups]
for t in thread_list:
t.setDaemon(True)
t.start()
for t in thread_list:
t.join()
end = time.time()
print(end-start)
print('threading stop....')
if __name__ == '__main__':
print('this is test...')
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import absolute_import, unicode_literals, division
# fix import path
import sys
import os
os.chdir(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
sys.path.append(os.getcwd())
from puls.tasks import suppliers
import codecs
def runtest(cls, fixture):
results = []
queue = []
def test_component(_id, name, price, stock, url):
results.append(_id)
assert len(_id) < 32, "ID too long"
assert isinstance(price, float), "Price must be floating point"
assert price > 0, "Price must be positive"
assert isinstance(stock, bool), "Stock must be True or False"
assert len(url) < 256, "URL too long"
handler = cls()
handler.found = test_component
handler.enqueue = lambda u, d, f=False: queue.append(u)
handler.handle(codecs.open(fixture, "rb", "utf-8").read(), True)
assert results, "No components found"
assert len(results) == 40, "Partial resultset found"
assert queue, "No follow up page found"
assert len(queue) == 1, "Multiple follow up pages"
runtest(suppliers.EMag, "tests/fixtures/emag.html")
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import division
from collections import Counter, deque
import numpy as np
import math
from .tensor import Tensor
from . import layer
from singa.proto import model_pb2
from . import singa_wrap as singa
CTensor = singa.Tensor
training = False
class Operation(object):
'''
An operation includes the forward and backward function of
tensor calculation.
Steps to add a specific operation Xxxx:
1. create a subclass of Operation, name it as Xxxx
2. if Xxxx is implemented using other Operations, then override
_do_forward() function;
if Xxxx is implemented using CTensor operations,
then override the forward() and backward(); The arguments of forward()
and backward() should only include CTensor;
if Xxxx is implemented by calling functions in layer.py, then override
__call__(), forward() and backward(). TODO(wangwei) avoid this complex
case.
'''
def __call__(self, *xs):
return self._do_forward(*xs)
def _do_forward(self, *xs):
'''
Do not call this function from user code. It is called by __call__().
Args:
xs, Tensor instance(s)
Returns:
Tensor instance(s)
'''
# TODO add the pre hook
assert all([isinstance(x, Tensor) for x in xs]), \
'xs should include only Tensor instances'
# need to do backward if any of its input arg needs gradient
self.requires_grad = any([x.requires_grad for x in xs])
self.src = []
for x in xs:
if x.stores_grad:
# store the tensor whose gradient needs be returned in
# backward(), e.g. if x is parameter
self.src.append((x.creator, id(x), x, x.stores_grad))
else:
# for intermediate tensors, they will be released soon;
# no need to store them --> use None
self.src.append((x.creator, id(x), None, x.stores_grad))
# get the CTensor (data) if the input arg is Tensor
xs = tuple(x.data for x in xs)
ys = self.forward(*xs)
if not isinstance(ys, tuple):
ys = (ys,)
# create Tensor based on CTensor(data);
# assume outputs are all Tensor instances
ys = tuple(Tensor(device=y.device,
data=y,
requires_grad=self.requires_grad,
creator=self) for y in ys)
# map from python id to output index
self.y_id2idx = {id(y): i for i, y in enumerate(ys)}
# TODO add the post hook
return ys
def _do_backward(self, *dys):
dxs = self.backward(*dys)
if not isinstance(dxs, tuple):
dxs = (dxs,)
return dxs
def forward(self, *xs):
'''Forward propagation.
Args:
xs: input args consisting of only CTensors.
Returns:
CTensor instance(s)
'''
raise NotImplementedError
def backward(self, *dys):
''' Backward propagation.
Args:
dys: input args consisting of only CTensors.
Returns:
CTensor instance(s)
'''
raise NotImplementedError
def get_params(self):
return []
class Dummy(Operation):
'''Dummy operation whice serves as a placehoder for autograd
Args:
name(string): set it for debug
'''
def __init__(self, tensor, name=None):
self.name = name
self.src = []
self.y_id2idx = {id(tensor): 0}
self.requires_grad = False
class ReLU(Operation):
def forward(self, x):
'''
Args:
x(CTensor): input tensor
Returns:
a new CTensor whose element y = x if x >= 0; otherwise 0;
'''
if training:
self.input = x
return singa.ReLU(x)
def backward(self, dy):
'''
Args:
dy(CTensor): dL / dy
Returns:
dx(CTensor): dL / dx = dy if x >= 0; otherwise 0;
'''
dx = singa.GTFloat(self.input, 0.0)
return singa.__mul__(dy, dx)
def relu(x):
return ReLU()(x)[0]
class Matmul(Operation):
'''For matrix multiplication'''
def forward(self, x, w):
'''Do forward propgation.
Store the x(or w) if w(or x) requires gradient.
Args:
x (CTensor): matrix
w (CTensor): matrix
Returns:
a CTensor for the result
'''
if training:
self.input = (x, w)
return singa.Mult(x, w)
def backward(self, dy):
'''
Args:
dy (CTensor): data for the dL / dy, L is the loss
Returns:
a tuple for (dx, dw)
'''
return singa.Mult(dy, self.input[1].T()), \
singa.Mult(self.input[0].T(), dy)
def matmul(x, w):
return Matmul()(x, w)[0]
class AddBias(Operation):
'''
Add Bias to each row / column of the Tensor, depending on the axis arg.
'''
def __init__(self, axis=0):
'''
To indicate the calculation axis, 0 for row, 1 for column.
Args:
axis: 0 or 1, default is 0.
'''
self.axis = axis
def forward(self, x, b):
'''
Args:
x: matrix.
b: bias to be added.
Return:
the result Tensor
'''
if self.axis == 0:
singa.AddRow(b, x)
elif self.axis == 1:
singa.AddColumn(b, x)
return x
def backward(self, dy):
'''
Args:
dy (CTensor): data for the dL / dy, L is the loss.
Return:
a tuple for (db, dx), db is data for dL / db, dx is data
for dL / dx.
'''
if self.axis == 0:
return dy, singa.Sum(dy, 0)
elif self.axis == 1:
return dy, singa.Sum(dy, 0)
def add_bias(x, b, axis=0):
return AddBias(axis)(x, b)[0]
class SoftMax(Operation):
'''
Apply SoftMax for each row of the Tensor or each column of the Tensor
according to the parameter axis.
'''
def __init__(self, axis=0):
self.axis = axis
def forward(self, x):
'''
Args:
x(data): the input 1d or 2d tensor
Returns:
the result Tensor
'''
if self.axis == 1:
x = x.T()
self.output = singa.SoftMax(x)
if self.axis == 0:
return self.output
elif self.axis == 1:
return self.output.T()
def backward(self, dy):
'''
Args:
dy (CTensor): data for the dL / dy, L is the loss
Returns:
dx (Ctensor): data for the dL / dx, L is the loss,
x is the input of current Opertion
'''
# calculations are made on numpy array
if self.axis == 1:
dy = dy.T()
grad = ctensor2numpy(dy)
output = ctensor2numpy(self.output)
out_1 = np.einsum('ki,ki->ki', grad, output)
medium_out = np.einsum('ki,kj->kij', output, output)
out_2 = np.einsum('kij,kj->ki', medium_out, grad)
out = out_1 - out_2
dx = CTensor(out_1.shape)
dx.CopyFloatDataFromHostPtr(out.flatten())
if self.axis == 0:
return dx
elif self.axis == 1:
return dx.T()
def soft_max(x, axis=0):
return SoftMax(axis)(x)[0]
class CrossEntropy(Operation):
'''
Calculte CrossEntropy loss for a batch of training data.
'''
def forward(self, x, t):
'''
Args:
x (CTensor): 1d or 2d tensor, the prediction data(output)
of current network.
t (CTensor): 1d or 2d tensor, the target data for training.
Returns:
loss (CTensor): scalar.
'''
loss = CTensor((1,))
loss_data = -singa.SumAsFloat(singa.__mul__(t, singa.Log(x)))
loss.SetFloatValue(loss_data / x.shape()[0])
self.x = x
self.t = t
self.input = (x, t)
return loss
def backward(self, dy=1.0):
'''
Args:
dy (float or CTensor): scalar, accumulate gradient from outside
of current network, usually equal to 1.0
Returns:
dx (CTensor): data for the dL /dx, L is the loss, x is the output
of current network. note that this is true for
dy = 1.0
'''
dx = singa.__div__(self.t, self.x)
dx *= float(-1 / self.x.shape()[0])
if isinstance(dy, float):
# dtype of dy: float
dx *= dy
return dx, None
elif isinstance(dy, CTensor):
pass # TODO, broadcast elementwise multiply seems not support
def cross_entropy(y, t):
return CrossEntropy()(y, t)[0]
def ctensor2numpy(x):
'''
To be used in SoftMax Operation.
Convert a singa_tensor to numpy_tensor.
'''
np_array = x.GetFloatValue(int(x.Size()))
return np_array.reshape(x.shape())
class Conv2d(Operation):
def __init__(self, in_channels, out_channels, kernel_size=3, stride=1,
padding=0, dilation=1, groups=1, bias=True, **kwargs):
inner_params = {'name': 'Conv2d',
'border_mode': 'same',
'cudnn_prefer': 'fastest',
'workspace_byte_limit': 1024,
'data_format': 'NCHW',
'W_specs': {'init': 'xavier'},
'b_specs': {'init': 'constant'},
'input_sample_shape': None}
# TODO valid value of inner_params check
for kwarg in kwargs:
if kwarg not in inner_params:
raise TypeError('Keyword argument not understood:', kwarg)
else:
inner_params[kwarg] = kwargs[kwarg]
self.in_channels = in_channels
self.out_channels = out_channels
self.W_specs = inner_params['W_specs']
self.b_specs = inner_params['b_specs']
if isinstance(kernel_size, int):
self.kernel_size = (kernel_size, kernel_size)
else:
self.kernel_size = kernel_size
if padding == 0:
pad = None
else:
pad = padding
if dilation != 1 or groups != 1:
raise ValueError('Not implemented yet')
self.PyLayer = layer.Conv2D(inner_params['name'],
nb_kernels=out_channels,
kernel=kernel_size,
stride=stride,
border_mode=inner_params['border_mode'],
cudnn_prefer=inner_params['cudnn_prefer'],
workspace_byte_limit=inner_params[
'workspace_byte_limit'],
data_format=inner_params['data_format'],
use_bias=bias,
W_specs=self.W_specs,
b_specs=self.b_specs,
pad=pad,
input_sample_shape=inner_params['input_sample_shape'])
def get_params(self):
assert self.init_value is True, 'must initialize before get_params()'
if self.bias:
return (self.w, self.b)
else:
return self.w
def __call__(self, x):
if training:
self.flag = model_pb2.kTrain
else:
self.flag = model_pb2.kEval
if not self.PyLayer.has_setup:
self.PyLayer.setup(x.shape[1:])
param_data = self.PyLayer.layer.param_values()
if not hasattr(self, 'w'):
self.w = Tensor(device=param_data[0].device, data=param_data[
0], requires_grad=True, stores_grad=True)
std = math.sqrt(
2.0 / (self.in_channels * self.kernel_size[0] * self.kernel_size[1] + self.out_channels))
self.w.gaussian(0.0, std)
xs = [x, self.w]
if len(param_data) == 2:
if not hasattr(self, 'b'):
self.b = Tensor(device=param_data[1].device, data=param_data[
1], requires_grad=True, stores_grad=True)
self.b.set_value(0.0)
xs.append(self.b)
xs = tuple(xs)
return self._do_forward(*xs)[0]
def forward(self, *xs):
return self.PyLayer.layer.Forward(self.flag, xs[0])
def backward(self, dy):
ret = self.PyLayer.layer.Backward(self.flag, dy)
return (ret[0],) + ret[1]
class Linear(Operation):
def __init__(self, in_features, out_features, bias=True):
self.in_features = in_features
self.out_features = out_features
self.w_shape = (in_features, out_features)
self.b_shape = (1, out_features)
self.bias = bias
self.init_value = False
def get_params(self):
assert self.init_value is True, 'must initialize before get_params()'
if self.bias:
return (self.w, self.b)
else:
return self.w
def __call__(self, x):
if self.init_value is False:
self.w = Tensor(shape=self.w_shape,
requires_grad=True, stores_grad=True)
std = math.sqrt(2.0 / (self.in_features + self.out_features))
self.w.gaussian(0.0, std)
if self.bias:
self.b = Tensor(shape=self.b_shape,
requires_grad=True, stores_grad=True)
self.b.set_value(0.0)
self.init_value = True
y = matmul(x, self.w)
if self.bias:
y = add_bias(y, self.b, axis=0)
return y
class MaxPool2d(Operation):
def __init__(self, kernel_size=3, stride=1, padding=0, dilation=1,
return_indices=False, ceil_mode=False, **kwargs):
inner_params = {'name': 'MaxPool2d',
'border_mode': 'same',
'data_format': 'NCHW',
'input_sample_shape': None
}
for kwarg in kwargs:
if kwarg not in inner_params:
raise TypeError('Keyword argument not understood:', kwarg)
else:
inner_params[kwarg] = kwargs[kwarg]
if padding == 0:
pad = None
else:
pad = padding
if dilation != 1 or return_indices or ceil_mode:
raise ValueError('Not implemented yet')
self.PyLayer = layer.Pooling2D(inner_params['name'],
model_pb2.PoolingConf.MAX,
kernel_size, stride, inner_params[
'border_mode'],
pad, inner_params['data_format'],
inner_params['input_sample_shape'])
def __call__(self, x):
if training:
self.flag = model_pb2.kTrain
else:
self.flag = model_pb2.kEval
if not self.PyLayer.has_setup:
self.PyLayer.setup(x.shape[1:])
return self._do_forward(x)
def forward(self, *xs):
return self.PyLayer.layer.Forward(self.flag, xs[0])
def backward(self, dy):
return self.PyLayer.layer.Backward(0, dy)[0]
def max_pool_2d(x, kernel_size=3, stride=1, padding=0, dilation=1,
return_indices=False, ceil_mode=False, **kwargs):
return MaxPool2d(kernel_size, stride, padding, dilation, return_indices,
ceil_mode, **kwargs)(x)[0]
class Flatten(Operation):
def __init__(self):
self.PyLayer = layer.Flatten('flatten', 1)
def __call__(self, x):
if training:
self.flag = model_pb2.kTrain
else:
self.flag = model_pb2.kEval
if not self.PyLayer.has_setup:
self.PyLayer.setup(x.shape[1:])
return self._do_forward(x)
def forward(self, *xs):
return self.PyLayer.layer.Forward(self.flag, xs[0])
def backward(self, dy):
return self.PyLayer.layer.Backward(0, dy)[0]
def flatten(x):
return Flatten()(x)[0]
def infer_dependency(op):
'''
Infer the dependency of all operations with the
given op as the last operation.
Operation A is depending on B is A uses the output(s) of B.
Args:
op: an Operation instance, e.g. the loss operation.
Return:
a Counter instance with the operation as the key,
and the number of operations that are depending on it as the value
'''
# dependency = {}
dependency_count = Counter()
queue = deque([op])
while len(queue) > 0:
cur_op = queue.pop()
for src_op, _, _, _ in cur_op.src:
if src_op not in dependency_count and \
(not isinstance(src_op, Dummy)):
# dependency[src_op] = [Counter() for _ in src_op.y_id2idx]
dependency_count[src_op] = 0
queue.append(src_op)
# y_idx = src_op.y_id2idx[x_id]
# dependency[src_op][y_idx][cur_op] += 1
dependency_count[src_op] += 1
return dependency_count
def backward(y, dy=None):
'''
Run the backward propagation starting at y.
Args:
y: a Tensor instance, usually the loss
dy: a number or a Tensor instance, for the gradient of the
objective/loss w.r.t y, usually 1.0
Return:
a dictionary storing the gradient tensors of all tensors
whose stores_grad is true (e.g. parameter tensors)
'''
dependency = infer_dependency(y.creator)
assert y.size() == 1, 'y must be a Tensor with a single value;'\
'size of y is % d' % y.size()
# by default the dy is a tensor with 1.0 for each sample;
if dy is None:
dy = float(1.0)
elif isinstance(dy, Tensor):
dy = dy.data
else:
dy = float(dy)
# ready is a queue of (operation, dy list)
ready = deque([(y.creator, (dy,))])
not_ready = {} # mapping: op->[dy]
gradients = {} # mapping: x->dx if x.stores_grad
if y.stores_grad:
gradients[y] = dy
while len(ready) > 0:
op, dys = ready.pop()
if not op.requires_grad or isinstance(op, Dummy):
continue
# if not isinstance(op, tensor.Dummy):
dxs = op._do_backward(*dys)
# TODO src and dx must match
assert len(op.src) == len(dxs), \
'the number of src ops (=%d) and dx (=%d) not match' \
% (len(op.src), len(dxs))
for (src_op, x_id, y, y_stores_grad), dx in zip(op.src, dxs):
# prefix x is w.r.t op; prefix y is w.r.t src_op.
# x_id is the python id of one input arg of src_op, denoted as x.
# y_idx (below) is the index of x among the outputs of src_op.
# not_ready[src_op][y_idx] records the intermediate gradient
# of the y_idx'th output of src_op. 'intermediate gradient'
# indicates that if this output is used in multiple children
# operations, then we have to add the graident (dx) from all these
# children operations. When src_op is ready, it means that
# the gradient of all its outputs are available, i.e. all children
# operations have been backwarded.
# y is None if y.stores_grad is false; otherwise it is a Tensor
y_idx = src_op.y_id2idx[x_id]
if src_op not in not_ready:
# src_op may have mulitple outputs
not_ready[src_op] = [None for _ in src_op.y_id2idx]
not_ready[src_op][y_idx] = dx
else:
dxs = not_ready[src_op]
if dxs[y_idx] is None:
dxs[y_idx] = dx
else:
# add the gradient from another children operation that
# uses y_idx'th output of src_op as input arg
dxs[y_idx] += dx
if y_stores_grad:
# store the gradient for final return, e.g. if x is parameter
g = not_ready[src_op][y_idx]
gradients[y] = Tensor(device=g.device, data=g)
dependency[src_op] -= 1
if src_op.requires_grad is True:
if dependency[src_op] == 0:
if not isinstance(src_op, Dummy):
ready.append((src_op, not_ready[src_op]))
del not_ready[src_op]
return gradients
|
from pathlib import Path
def test_dynamic_configuration(notes: Path) -> None:
import pytz
from types import SimpleNamespace as NS
from my.core.cfg import tmp_config
with tmp_config() as C:
C.orgmode = NS(paths=[notes])
# TODO ugh. this belongs to tz provider or global config or someting
C.weight = NS(default_timezone=pytz.timezone('Europe/London'))
from my.body.weight import from_orgmode
weights = [0.0 if isinstance(x, Exception) else x.value for x in from_orgmode()]
assert weights == [
0.0,
62.0,
0.0,
61.0,
62.0,
0.0,
]
import pytest # type: ignore
def test_environment_variable(tmp_path: Path) -> None:
cfg_dir = tmp_path / 'my'
cfg_file = cfg_dir / 'config.py'
cfg_dir.mkdir()
cfg_file.write_text('''
class feedly:
pass
class just_for_test:
pass
''')
import os
oenv = dict(os.environ)
try:
os.environ['MY_CONFIG'] = str(tmp_path)
# should not raise at least
import my.rss.feedly
import my.config as c
assert hasattr(c, 'just_for_test')
finally:
os.environ.clear()
os.environ.update(oenv)
import sys
# TODO wtf??? doesn't work without unlink... is it caching something?
cfg_file.unlink()
del sys.modules['my.config'] # meh..
import my.config as c
assert not hasattr(c, 'just_for_test')
from dataclasses import dataclass
def test_user_config() -> None:
from my.core.common import classproperty
class user_config:
param1 = 'abacaba'
# TOOD fuck. properties don't work here???
@classproperty
def param2(cls) -> int:
return 456
extra = 'extra!'
@dataclass
class test_config(user_config):
param1: str
param2: int # type: ignore[assignment] # TODO need to figure out how to trick mypy for @classproperty
param3: str = 'default'
assert test_config.param1 == 'abacaba'
assert test_config.param2 == 456
assert test_config.param3 == 'default'
assert test_config.extra == 'extra!'
from my.core.cfg import make_config
c = make_config(test_config)
assert c.param1 == 'abacaba'
assert c.param2 == 456
assert c.param3 == 'default'
assert c.extra == 'extra!'
@pytest.fixture
def notes(tmp_path: Path):
ndir = tmp_path / 'notes'
ndir.mkdir()
logs = ndir / 'logs.org'
logs.write_text('''
#+TITLE: Stuff I'm logging
* Weight (org-capture) :weight:
** [2020-05-01 Fri 09:00] 62
** 63
this should be ignored, got no timestamp
** [2020-05-03 Sun 08:00] 61
** [2020-05-04 Mon 10:00] 62
''')
misc = ndir / 'misc.org'
misc.write_text('''
Some misc stuff
* unrelated note :weight:whatever:
''')
try:
yield ndir
finally:
pass
@pytest.fixture(autouse=True)
def prepare():
from .common import reset_modules
reset_modules()
yield
|
"""
Hessian tool for neural networks based on pytorch 0.4.1
"""
name = 'Hessian Flow'
from .eigen import *
|
{
'includes': [
'common.gypi',
],
'target_defaults': {
'conditions': [
['skia_os != "win"', {
'sources/': [ ['exclude', '_win.(h|cpp)$'],
],
}],
['skia_os != "mac"', {
'sources/': [ ['exclude', '_mac.(h|cpp)$'],
],
}],
['skia_os != "linux"', {
'sources/': [ ['exclude', '_unix.(h|cpp)$'],
],
}],
['skia_os != "ios"', {
'sources/': [ ['exclude', '_iOS.(h|cpp)$'],
],
}],
['skia_os != "android"', {
'sources/': [ ['exclude', '_android.(h|cpp)$'],
],
}],
[ 'skia_os == "android"', {
'defines': [
'GR_ANDROID_BUILD=1',
],
}],
[ 'skia_os == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'skia_os == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'skia_os == "ios"', {
'defines': [
'GR_IOS_BUILD=1',
],
}],
[ 'skia_os == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
],
}],
# nullify the targets in this gyp file if skia_gpu is 0
[ 'skia_gpu == 0', {
'sources/': [
['exclude', '.*'],
],
'defines/': [
['exclude', '.*'],
],
'include_dirs/': [
['exclude', '.*'],
],
'link_settings': {
'libraries/': [
['exclude', '.*'],
],
},
'direct_dependent_settings': {
'defines/': [
['exclude', '.*'],
],
'include_dirs/': [
['exclude', '.*'],
],
},
}],
],
'direct_dependent_settings': {
'conditions': [
[ 'skia_os == "android"', {
'defines': [
'GR_ANDROID_BUILD=1',
],
}],
[ 'skia_os == "mac"', {
'defines': [
'GR_MAC_BUILD=1',
],
}],
[ 'skia_os == "linux"', {
'defines': [
'GR_LINUX_BUILD=1',
],
}],
[ 'skia_os == "ios"', {
'defines': [
'GR_IOS_BUILD=1',
],
}],
[ 'skia_os == "win"', {
'defines': [
'GR_WIN32_BUILD=1',
'GR_GL_FUNCTION_TYPE=__stdcall',
],
}],
],
'include_dirs': [
'../deps/skia/include/gpu',
],
},
},
'targets': [
{
'target_name': 'skgr',
'type': 'static_library',
'include_dirs': [
'../deps/skia/include/config',
'../deps/skia/include/core',
'../deps/skia/src/core',
'../deps/skia/include/gpu',
'../deps/skia/src/gpu',
],
'dependencies': [
'angle.gyp:*',
],
'export_dependent_settings': [
'angle.gyp:*',
],
'sources': [
'../deps/skia/include/gpu/SkGpuCanvas.h',
'../deps/skia/include/gpu/SkGpuDevice.h',
'../deps/skia/include/gpu/SkGr.h',
'../deps/skia/include/gpu/SkGrPixelRef.h',
'../deps/skia/include/gpu/SkGrTexturePixelRef.h',
'../deps/skia/include/gpu/gl/SkGLContext.h',
'../deps/skia/include/gpu/gl/SkMesaGLContext.h',
'../deps/skia/include/gpu/gl/SkANGLEGLContext.h',
'../deps/skia/include/gpu/gl/SkNativeGLContext.h',
'../deps/skia/include/gpu/gl/SkNullGLContext.h',
'../deps/skia/include/gpu/gl/SkDebugGLContext.h',
'../deps/skia/src/gpu/SkGpuCanvas.cpp',
'../deps/skia/src/gpu/SkGpuDevice.cpp',
'../deps/skia/src/gpu/SkGr.cpp',
'../deps/skia/src/gpu/SkGrFontScaler.cpp',
'../deps/skia/src/gpu/SkGrPixelRef.cpp',
'../deps/skia/src/gpu/SkGrTexturePixelRef.cpp',
'../deps/skia/src/gpu/gl/SkGLContext.cpp',
'../deps/skia/src/gpu/gl/SkNullGLContext.cpp',
'../deps/skia/src/gpu/gl/debug/SkDebugGLContext.cpp',
'../deps/skia/src/gpu/gl/mac/SkNativeGLContext_mac.cpp',
'../deps/skia/src/gpu/gl/win/SkNativeGLContext_win.cpp',
'../deps/skia/src/gpu/gl/unix/SkNativeGLContext_unix.cpp',
'../deps/skia/src/gpu/gl/mesa/SkMesaGLContext.cpp',
'../deps/skia/src/gpu/gl/angle/SkANGLEGLContext.cpp',
'../deps/skia/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp',
'../deps/skia/src/gpu/android/SkNativeGLContext_android.cpp',
],
'conditions': [
[ 'not skia_mesa', {
'sources!': [
'../deps/skia/src/gpu/gl/mesa/SkMesaGLContext.cpp',
],
}],
[ 'skia_mesa and skia_os == "mac"', {
'include_dirs': [
'$(SDKROOT)/usr/X11/include/',
],
}],
[ 'not skia_angle', {
'sources!': [
'../deps/skia/include/gpu/gl/SkANGLEGLContext.h',
'../deps/skia/src/gpu/gl/angle/SkANGLEGLContext.cpp',
'../deps/skia/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp',
],
}],
],
},
{
'target_name': 'gr',
'type': 'static_library',
'include_dirs': [
'../deps/skia/include/core',
'../deps/skia/include/config',
'../deps/skia/include/gpu',
'../deps/skia/src/core', # SkRasterClip.h
'../deps/skia/src/gpu'
],
'dependencies': [
'angle.gyp:*',
],
'export_dependent_settings': [
'angle.gyp:*',
],
'sources': [
'../deps/skia/include/gpu/GrAARectRenderer.h',
'../deps/skia/include/gpu/GrClipData.h',
'../deps/skia/include/gpu/GrColor.h',
'../deps/skia/include/gpu/GrConfig.h',
'../deps/skia/include/gpu/GrContext.h',
'../deps/skia/include/gpu/GrContextFactory.h',
'../deps/skia/include/gpu/GrCustomStage.h',
'../deps/skia/include/gpu/GrCustomStageUnitTest.h',
'../deps/skia/include/gpu/GrFontScaler.h',
'../deps/skia/include/gpu/GrGlyph.h',
'../deps/skia/include/gpu/GrInstanceCounter.h',
'../deps/skia/include/gpu/GrKey.h',
'../deps/skia/include/gpu/GrMatrix.h',
'../deps/skia/include/gpu/GrNoncopyable.h',
'../deps/skia/include/gpu/GrPaint.h',
'../deps/skia/include/gpu/GrPoint.h',
'../deps/skia/include/gpu/GrProgramStageFactory.h',
'../deps/skia/include/gpu/GrRect.h',
'../deps/skia/include/gpu/GrRefCnt.h',
'../deps/skia/include/gpu/GrRenderTarget.h',
'../deps/skia/include/gpu/GrResource.h',
'../deps/skia/include/gpu/GrSamplerState.h',
'../deps/skia/include/gpu/GrScalar.h',
'../deps/skia/include/gpu/GrSurface.h',
'../deps/skia/include/gpu/GrTextContext.h',
'../deps/skia/include/gpu/GrTexture.h',
'../deps/skia/include/gpu/GrTypes.h',
'../deps/skia/include/gpu/GrUserConfig.h',
'../deps/skia/include/gpu/gl/GrGLConfig.h',
'../deps/skia/include/gpu/gl/GrGLConfig_chrome.h',
'../deps/skia/include/gpu/gl/GrGLFunctions.h',
'../deps/skia/include/gpu/gl/GrGLInterface.h',
'../deps/skia/src/gpu/GrAAHairLinePathRenderer.cpp',
'../deps/skia/src/gpu/GrAAHairLinePathRenderer.h',
'../deps/skia/src/gpu/GrAAConvexPathRenderer.cpp',
'../deps/skia/src/gpu/GrAAConvexPathRenderer.h',
'../deps/skia/src/gpu/GrAARectRenderer.cpp',
'../deps/skia/src/gpu/GrAddPathRenderers_default.cpp',
'../deps/skia/src/gpu/GrAllocator.h',
'../deps/skia/src/gpu/GrAllocPool.h',
'../deps/skia/src/gpu/GrAllocPool.cpp',
'../deps/skia/src/gpu/GrAtlas.cpp',
'../deps/skia/src/gpu/GrAtlas.h',
'../deps/skia/src/gpu/GrBinHashKey.h',
'../deps/skia/src/gpu/GrBufferAllocPool.cpp',
'../deps/skia/src/gpu/GrBufferAllocPool.h',
'../deps/skia/src/gpu/GrClipData.cpp',
'../deps/skia/src/gpu/GrContext.cpp',
'../deps/skia/src/gpu/GrCustomStage.cpp',
'../deps/skia/src/gpu/GrDefaultPathRenderer.cpp',
'../deps/skia/src/gpu/GrDefaultPathRenderer.h',
'../deps/skia/src/gpu/GrDrawState.h',
'../deps/skia/src/gpu/GrDrawTarget.cpp',
'../deps/skia/src/gpu/GrDrawTarget.h',
'../deps/skia/src/gpu/GrGeometryBuffer.h',
'../deps/skia/src/gpu/GrClipMaskManager.h',
'../deps/skia/src/gpu/GrClipMaskManager.cpp',
'../deps/skia/src/gpu/GrGpu.cpp',
'../deps/skia/src/gpu/GrGpu.h',
'../deps/skia/src/gpu/GrGpuFactory.cpp',
'../deps/skia/src/gpu/GrGpuVertex.h',
'../deps/skia/src/gpu/GrIndexBuffer.h',
'../deps/skia/src/gpu/GrInOrderDrawBuffer.cpp',
'../deps/skia/src/gpu/GrInOrderDrawBuffer.h',
'../deps/skia/src/gpu/GrMatrix.cpp',
'../deps/skia/src/gpu/GrMemory.cpp',
'../deps/skia/src/gpu/GrMemoryPool.cpp',
'../deps/skia/src/gpu/GrMemoryPool.h',
'../deps/skia/src/gpu/GrPath.h',
'../deps/skia/src/gpu/GrPathRendererChain.cpp',
'../deps/skia/src/gpu/GrPathRendererChain.h',
'../deps/skia/src/gpu/GrPathRenderer.cpp',
'../deps/skia/src/gpu/GrPathRenderer.h',
'../deps/skia/src/gpu/GrPathUtils.cpp',
'../deps/skia/src/gpu/GrPathUtils.h',
'../deps/skia/src/gpu/GrPlotMgr.h',
'../deps/skia/src/gpu/GrRandom.h',
'../deps/skia/src/gpu/GrRectanizer.cpp',
'../deps/skia/src/gpu/GrRectanizer.h',
'../deps/skia/src/gpu/GrRedBlackTree.h',
'../deps/skia/src/gpu/GrRenderTarget.cpp',
'../deps/skia/src/gpu/GrResource.cpp',
'../deps/skia/src/gpu/GrResourceCache.cpp',
'../deps/skia/src/gpu/GrResourceCache.h',
'../deps/skia/src/gpu/GrStencil.cpp',
'../deps/skia/src/gpu/GrStencil.h',
'../deps/skia/src/gpu/GrStencilAndCoverPathRenderer.cpp',
'../deps/skia/src/gpu/GrStencilAndCoverPathRenderer.h',
'../deps/skia/src/gpu/GrStencilBuffer.cpp',
'../deps/skia/src/gpu/GrStencilBuffer.h',
'../deps/skia/src/gpu/GrTBSearch.h',
'../deps/skia/src/gpu/GrTDArray.h',
'../deps/skia/src/gpu/GrSWMaskHelper.cpp',
'../deps/skia/src/gpu/GrSWMaskHelper.h',
'../deps/skia/src/gpu/GrSoftwarePathRenderer.cpp',
'../deps/skia/src/gpu/GrSoftwarePathRenderer.h',
'../deps/skia/src/gpu/GrSurface.cpp',
'../deps/skia/src/gpu/GrTemplates.h',
'../deps/skia/src/gpu/GrTextContext.cpp',
'../deps/skia/src/gpu/GrTextStrike.cpp',
'../deps/skia/src/gpu/GrTextStrike.h',
'../deps/skia/src/gpu/GrTextStrike_impl.h',
'../deps/skia/src/gpu/GrTexture.cpp',
'../deps/skia/src/gpu/GrTHashCache.h',
'../deps/skia/src/gpu/GrTLList.h',
'../deps/skia/src/gpu/GrVertexBuffer.h',
'../deps/skia/src/gpu/gr_unittests.cpp',
'../deps/skia/src/gpu/effects/Gr1DKernelEffect.h',
'../deps/skia/src/gpu/effects/GrColorTableEffect.cpp',
'../deps/skia/src/gpu/effects/GrColorTableEffect.h',
'../deps/skia/src/gpu/effects/GrConvolutionEffect.cpp',
'../deps/skia/src/gpu/effects/GrConvolutionEffect.h',
'../deps/skia/src/gpu/effects/GrMorphologyEffect.cpp',
'../deps/skia/src/gpu/effects/GrMorphologyEffect.h',
'../deps/skia/src/gpu/effects/GrSingleTextureEffect.cpp',
'../deps/skia/src/gpu/effects/GrSingleTextureEffect.h',
'../deps/skia/src/gpu/effects/GrTextureDomainEffect.cpp',
'../deps/skia/src/gpu/effects/GrTextureDomainEffect.h',
'../deps/skia/src/gpu/gl/GrGLCaps.cpp',
'../deps/skia/src/gpu/gl/GrGLCaps.h',
'../deps/skia/src/gpu/gl/GrGLContextInfo.cpp',
'../deps/skia/src/gpu/gl/GrGLContextInfo.h',
'../deps/skia/src/gpu/gl/GrGLCreateNativeInterface_none.cpp',
'../deps/skia/src/gpu/gl/GrGLCreateNullInterface.cpp',
'../deps/skia/src/gpu/gl/GrGLDefaultInterface_none.cpp',
'../deps/skia/src/gpu/gl/GrGLDefaultInterface_native.cpp',
'../deps/skia/src/gpu/gl/GrGLDefines.h',
'../deps/skia/src/gpu/gl/GrGLIndexBuffer.cpp',
'../deps/skia/src/gpu/gl/GrGLIndexBuffer.h',
'../deps/skia/src/gpu/gl/GrGLInterface.cpp',
'../deps/skia/src/gpu/gl/GrGLIRect.h',
'../deps/skia/src/gpu/gl/GrGLPath.cpp',
'../deps/skia/src/gpu/gl/GrGLPath.h',
'../deps/skia/src/gpu/gl/GrGLProgram.cpp',
'../deps/skia/src/gpu/gl/GrGLProgram.h',
'../deps/skia/src/gpu/gl/GrGLProgramStage.cpp',
'../deps/skia/src/gpu/gl/GrGLProgramStage.h',
'../deps/skia/src/gpu/gl/GrGLRenderTarget.cpp',
'../deps/skia/src/gpu/gl/GrGLRenderTarget.h',
'../deps/skia/src/gpu/gl/GrGLShaderBuilder.cpp',
'../deps/skia/src/gpu/gl/GrGLShaderBuilder.h',
'../deps/skia/src/gpu/gl/GrGLShaderVar.h',
'../deps/skia/src/gpu/gl/GrGLSL.cpp',
'../deps/skia/src/gpu/gl/GrGLSL.h',
'../deps/skia/src/gpu/gl/GrGLStencilBuffer.cpp',
'../deps/skia/src/gpu/gl/GrGLStencilBuffer.h',
'../deps/skia/src/gpu/gl/GrGLTexture.cpp',
'../deps/skia/src/gpu/gl/GrGLTexture.h',
'../deps/skia/src/gpu/gl/GrGLUtil.cpp',
'../deps/skia/src/gpu/gl/GrGLUtil.h',
'../deps/skia/src/gpu/gl/GrGLUniformManager.cpp',
'../deps/skia/src/gpu/gl/GrGLUniformManager.h',
'../deps/skia/src/gpu/gl/GrGLUniformHandle.h',
'../deps/skia/src/gpu/gl/GrGLVertexBuffer.cpp',
'../deps/skia/src/gpu/gl/GrGLVertexBuffer.h',
'../deps/skia/src/gpu/gl/GrGpuGL.cpp',
'../deps/skia/src/gpu/gl/GrGpuGL.h',
'../deps/skia/src/gpu/gl/GrGpuGL_program.cpp',
'../deps/skia/src/gpu/gl/debug/GrGLCreateDebugInterface.cpp',
'../deps/skia/src/gpu/gl/debug/GrFakeRefObj.h',
'../deps/skia/src/gpu/gl/debug/GrBufferObj.h',
'../deps/skia/src/gpu/gl/debug/GrBufferObj.cpp',
'../deps/skia/src/gpu/gl/debug/GrFBBindableObj.h',
'../deps/skia/src/gpu/gl/debug/GrRenderBufferObj.h',
'../deps/skia/src/gpu/gl/debug/GrTextureObj.h',
'../deps/skia/src/gpu/gl/debug/GrTextureObj.cpp',
'../deps/skia/src/gpu/gl/debug/GrTextureUnitObj.h',
'../deps/skia/src/gpu/gl/debug/GrTextureUnitObj.cpp',
'../deps/skia/src/gpu/gl/debug/GrFrameBufferObj.h',
'../deps/skia/src/gpu/gl/debug/GrFrameBufferObj.cpp',
'../deps/skia/src/gpu/gl/debug/GrShaderObj.h',
'../deps/skia/src/gpu/gl/debug/GrShaderObj.cpp',
'../deps/skia/src/gpu/gl/debug/GrProgramObj.h',
'../deps/skia/src/gpu/gl/debug/GrProgramObj.cpp',
'../deps/skia/src/gpu/gl/debug/GrDebugGL.h',
'../deps/skia/src/gpu/gl/debug/GrDebugGL.cpp',
'../deps/skia/src/gpu/gl/mac/GrGLCreateNativeInterface_mac.cpp',
'../deps/skia/src/gpu/gl/win/GrGLCreateNativeInterface_win.cpp',
'../deps/skia/src/gpu/gl/unix/GrGLCreateNativeInterface_unix.cpp',
'../deps/skia/src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp',
'../deps/skia/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp',
'../deps/skia/src/gpu/android/GrGLCreateNativeInterface_android.cpp',
],
'defines': [
'GR_IMPLEMENTATION=1',
],
'conditions': [
[ 'skia_nv_path_rendering', {
'defines': [
'GR_GL_USE_NV_PATH_RENDERING=1',
],
}],
[ 'skia_os == "linux"', {
'sources!': [
'../deps/skia/src/gpu/gl/GrGLDefaultInterface_none.cpp',
'../deps/skia/src/gpu/gl/GrGLCreateNativeInterface_none.cpp',
],
'link_settings': {
'libraries': [
'-lGL',
'-lX11',
],
},
}],
[ 'skia_mesa and skia_os == "linux"', {
'link_settings': {
'libraries': [
'-lOSMesa',
],
},
}],
[ 'skia_os == "mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
],
},
'sources!': [
'../deps/skia/src/gpu/gl/GrGLDefaultInterface_none.cpp',
'../deps/skia/src/gpu/gl/GrGLCreateNativeInterface_none.cpp',
],
}],
[ 'skia_mesa and skia_os == "mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/usr/X11/lib/libOSMesa.dylib',
],
},
'include_dirs': [
'$(SDKROOT)/usr/X11/include/',
],
}],
[ 'not skia_mesa', {
'sources!': [
'../deps/skia/src/gpu/gl/mesa/GrGLCreateMesaInterface.cpp',
],
}],
[ 'skia_os == "win"', {
'sources!': [
'../deps/skia/src/gpu/gl/GrGLDefaultInterface_none.cpp',
'../deps/skia/src/gpu/gl/GrGLCreateNativeInterface_none.cpp',
],
}],
[ 'not skia_angle', {
'sources!': [
'../deps/skia/include/gpu/gl/SkANGLEGLContext.h',
'../deps/skia/src/gpu/gl/angle/GrGLCreateANGLEInterface.cpp',
'../deps/skia/src/gpu/gl/angle/SkANGLEGLContext.cpp',
],
}],
[ 'skia_os == "android"', {
'sources!': [
'../deps/skia/src/gpu/gl/GrGLDefaultInterface_none.cpp',
'../deps/skia/src/gpu/gl/GrGLCreateNativeInterface_none.cpp',
],
'link_settings': {
'libraries': [
'-lGLESv2',
'-lEGL',
],
},
}],
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
|
# -*- coding: utf-8 -*-
"""
Extract slides from course video
Method: detect frame difference
Pckage need to be installed:
opencv:
opt 1: conda install -c menpo opencv
opt 2: conda install -c conda-forge opencv
Zhenhao Ge, 2020-04-15
"""
import os
import cv2
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import glob
from pathlib import Path
import img2pdf
import argparse
# setup parameters
pattern = 'slide-{0:03}.jpg'
blk_size = 500
def showimg(img):
image = Image.fromarray(img, 'RGB');
image.show()
def imgs2gif(imgs, gifname):
images = [Image.open(img).convert('RGB') for img in imgs]
images[0].save(gifname, save_all=True, append_images=images[1:])
def imgs2pdf(imgs, pdfname, verbose=True):
with open(pdfname, 'wb') as f:
f.write(img2pdf.convert(imgs))
if verbose:
print('wrote images to {}'.format(pdfname))
def plot_diff(diffs, pngname, verbose=True):
plt.plot(diffs)
plt.xlabel('frame index (1 frame / sec)')
plt.ylabel('mean difference')
plt.title('frame differences')
plt.savefig(pngname)
#diffs_sorted = sorted(diffs)[::-1]
#plt.plot(diffs_sorted)
if verbose:
print('wrote diff plot to {}'.format(pngname))
def extract_slides(video_path, pdfname):
"""
check frames at rate of 1 frame per second, and if diff between previous and
current frame is greater than 0, extract that frame as slide, merge all
extracted slide images into pdf file.
"""
# get output dir
output_dir = os.path.splitext(video_path)[0]
# get video file handler
vidcap = cv2.VideoCapture(video_path)
nframes = int(vidcap.get(cv2.CAP_PROP_FRAME_COUNT))
fps = vidcap.get(cv2.CAP_PROP_FPS)
idxs = list(range(0,nframes,int(fps))) # get 1 image per second
nidxs = len(idxs)
# read the first image
success, img1 = vidcap.read()
#showimg(img1)
#height, width = img1.shape[:2]
# write the first slide
nslides = 0 # count #slides extracted
print('writing slide {} (frame {}) ...'.format(nslides, 0))
output_path = os.path.join(output_dir, pattern.format(nslides))
cv2.imwrite(output_path, img1);
diffs = []
for i in range(1,nidxs):
# track status
if i % blk_size == 1:
lower, upper = i, min(i+blk_size-1, nidxs)
print('processing: {}/{} ~ {}/{} ...'.format(lower, nidxs, upper, nidxs))
# extract frame with specific frame index
vidcap.set(cv2.CAP_PROP_POS_FRAMES, idxs[i])
sucess, img2 = vidcap.read()
#showimg(img2)
# pass black screen
if np.max(img2) <= 1:
continue
# write frame as slide if mean diff > 0
# note: np.mean() != sum(sum())/(width x height)
diff = np.mean(abs(img1 - img2))
if diff > 0:
nslides += 1
print('writing slide {} (frame {}) ...'.format(nslides, idxs[i]))
output_path = os.path.join(output_dir, pattern.format(nslides))
cv2.imwrite(output_path, img2);
# post-processing
diffs.append(diff)
img1 = img2[:]
# get smallest non-zero diff value (diff between the 2 most similar slides)
diffs_no_zeros = [d for d in diffs if d!=0]
print('smallest non-zero diff: {}'.format(min(diffs_no_zeros)))
# plot and save diff plot
pngname = os.path.join(output_dir, 'diff.png')
plot_diff(diffs, pngname)
# merge slide images into pdf file
imgs = glob.glob(os.path.join(output_dir, 'slide*.jpg'))
imgs2pdf(imgs, pdfname)
def parse_args():
usage = "usage: extract slides from video frames by comparing the difference" \
+ " within the adjacent frames"
parser = argparse.ArgumentParser(description=usage)
parser.add_argument('--video-dir', default=os.getcwd())
return parser.parse_args()
def main():
args = parse_args()
# setup dir and file path (paramenters need to be specified)
video_dir = args.video_dir
#video_dir = r'C:\Users\zge\Dropbox\Video\Courses\edX_IBM_DeepLearning2'
print('processing videos in video dir: {}'.format(video_dir))
video_paths = glob.glob(os.path.join(video_dir, '*.mp4'))
nvideos = len(video_paths)
for i, video_path in enumerate(video_paths):
print('[{}/{}] processing {} ...'.format(i+1, nvideos, video_path))
# specify the output dir
output_dir = os.path.splitext(video_path)[0]
if not os.path.isdir(output_dir):
print('creating dir: {}'.format(output_dir))
os.makedirs(output_dir)
# get the target pdf name
pdfname = os.path.join(str(Path(output_dir).parent),
'{}.pdf'.format(os.path.basename(output_dir)))
# extract slides if the target pdf file does not exist
if not os.path.isfile(pdfname):
extract_slides(video_path, pdfname)
else:
print('{} already exist, skip!'.format(pdfname))
if __name__ == '__main__':
main()
|
"""
voltLib: Load fontFeatures objects from Microsoft VOLT
======================================================
This module is experimental and incomplete.
"""
import logging
import re
from io import StringIO
from fontTools.ttLib import TTFont, TTLibError
from fontTools.voltLib import ast as VAst
from fontTools.voltLib.parser import Parser
from fontFeatures import ValueRecord, FontFeatures, Routine, Substitution, RoutineReference
log = logging.getLogger()
class Group:
def __init__(self, group):
self.name = group.name.lower()
self.groups = [
x.group.lower() for x in group.enum.enum if isinstance(x, VAst.GroupName)
]
def __lt__(self, other):
if self.name in other.groups:
return True
if other.name in self.groups:
return False
return self.name < other.name
class VoltParser:
_NOT_LOOKUP_NAME_RE = re.compile(r"[^A-Za-z_0-9.]")
_NOT_CLASS_NAME_RE = re.compile(r"[^A-Za-z_0-9.\-]")
def __init__(self, file_or_path, font=None):
self._file_or_path = file_or_path
self._font = font
self._glyph_map = {}
self._glyph_order = None
self._gdef = {}
self._glyphclasses = {}
self._features = {}
self._lookups = {}
self._marks = set()
self._ligatures = {}
self._markclasses = {}
self._anchors = {}
self._settings = {}
self._lookup_names = {}
self._class_names = {}
def _lookupName(self, name):
if name not in self._lookup_names:
res = self._NOT_LOOKUP_NAME_RE.sub("_", name)
while res in self._lookup_names.values():
res += "_"
self._lookup_names[name] = res
return self._lookup_names[name]
def _className(self, name):
if name not in self._class_names:
res = self._NOT_CLASS_NAME_RE.sub("_", name)
while res in self._class_names.values():
res += "_"
self._class_names[name] = res
return self._class_names[name]
def _collectStatements(self, doc):
# Collect and sort group definitions first, to make sure a group
# definition that references other groups comes after them since VOLT
# does not enforce such ordering, and feature file require it.
groups = [s for s in doc.statements if isinstance(s, VAst.GroupDefinition)]
for statement in sorted(groups, key=lambda x: Group(x)):
self._groupDefinition(statement)
for statement in doc.statements:
if isinstance(statement, VAst.GlyphDefinition):
self._glyphDefinition(statement)
elif isinstance(statement, VAst.AnchorDefinition):
self._anchorDefinition(statement)
elif isinstance(statement, VAst.SettingDefinition):
self._settingDefinition(statement)
elif isinstance(statement, VAst.GroupDefinition):
pass # Handled above
elif isinstance(statement, VAst.ScriptDefinition):
self._scriptDefinition(statement)
elif not isinstance(statement, VAst.LookupDefinition):
raise NotImplementedError(statement)
# Lookup definitions need to be handled last as they reference glyph
# and mark classes that might be defined after them.
for statement in doc.statements:
if isinstance(statement, VAst.LookupDefinition):
self._lookupDefinition(statement)
# Now rearrange features
for feature, thing in self._features.items():
for script, thing in thing.items():
for lang, lookups in thing.items():
for lookup in lookups:
ref = RoutineReference(name=lookup)
ref.resolve(self.ff)
ref.language = (script, lang)
self.ff.addFeature(feature, [ref])
def convert(self):
doc = Parser(self._file_or_path).parse()
if self._font is not None:
self._glyph_order = self._font.getGlyphOrder()
self.ff = FontFeatures()
self._collectStatements(doc)
return self.ff
def _glyphName(self, glyph):
try:
name = glyph.glyph
except AttributeError:
name = glyph
return name
def _groupName(self, group):
try:
name = group.group
except AttributeError:
name = group
return self.ff.namedClasses[name.lower()]
def _coverage(self, coverage):
items = []
for item in coverage:
if isinstance(item, VAst.GlyphName):
items.append([self._glyphName(item)])
elif isinstance(item, VAst.GroupName):
items.append(self._groupName(item))
elif isinstance(item, VAst.Enum):
items.append(self._enum(item))
elif isinstance(item, VAst.Range):
items.append((item.start, item.end))
else:
raise NotImplementedError(item)
return items
def _enum(self, enum):
return self._coverage(enum.enum)
def _context(self, context):
out = []
for item in context:
coverage = self._coverage(item)
if not isinstance(coverage, (tuple, list)):
coverage = [coverage]
out.extend(coverage)
return out
def _groupDefinition(self, group):
name = self._className(group.name)
glyphs = [self._glyphName(x) for x in group.enum.enum]
self.ff.namedClasses[group.name.lower()] = glyphs
def _glyphDefinition(self, glyph):
try:
self._glyph_map[glyph.name] = self._glyph_order[glyph.id]
except TypeError:
pass
if glyph.type in ("BASE", "MARK", "LIGATURE", "COMPONENT"):
self.ff.glyphclasses[glyph.name] = glyph.type.lower()
if glyph.type == "MARK":
self._marks.add(glyph.name)
elif glyph.type == "LIGATURE":
self._ligatures[glyph.name] = glyph.components
def _scriptDefinition(self, script):
stag = script.tag
for lang in script.langs:
ltag = lang.tag
for feature in lang.features:
lookups = {l.split("\\")[0]: True for l in feature.lookups}
ftag = feature.tag
if ftag not in self._features:
self._features[ftag] = {}
if stag not in self._features[ftag]:
self._features[ftag][stag] = {}
assert ltag not in self._features[ftag][stag]
self._features[ftag][stag][ltag] = lookups.keys()
def _settingDefinition(self, setting):
if setting.name.startswith("COMPILER_"):
self._settings[setting.name] = setting.value
else:
log.warning(f"Unsupported setting ignored: {setting.name}")
def _adjustment(self, adjustment):
adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment
adv_device = adv_adjust_by and adv_adjust_by.items() or None
dx_device = dx_adjust_by and dx_adjust_by.items() or None
dy_device = dy_adjust_by and dy_adjust_by.items() or None
return ValueRecord(
xPlacement=dx,
yPlacement=dy,
xAdvance=adv,
xPlaDevice=dx_device,
yPlaDevice=dy_device,
xAdvDevice=adv_device,
)
def _anchor(self, adjustment):
adv, dx, dy, adv_adjust_by, dx_adjust_by, dy_adjust_by = adjustment
return (dx or 0, dy or 0)
def _anchorDefinition(self, anchordef):
anchorname = anchordef.name
glyphname = anchordef.glyph_name
anchor = self._anchor(anchordef.pos)
if anchorname.startswith("MARK_"):
name = "_".join(anchorname.split("_")[1:])
markclass = self._className(name)
glyph = self._glyphName(glyphname)
self._markclasses[(glyphname, anchorname)] = markclass
else:
if glyphname not in self._anchors:
self._anchors[glyphname] = {}
if anchorname not in self._anchors[glyphname]:
self._anchors[glyphname][anchorname] = {}
self._anchors[glyphname][anchorname][anchordef.component] = anchor
def _gposLookup(self, lookup, fealookup):
statements = fealookup.statements
pos = lookup.pos
if isinstance(pos, VAst.PositionAdjustPairDefinition):
for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items():
coverage_1 = pos.coverages_1[idx1 - 1]
coverage_2 = pos.coverages_2[idx2 - 1]
# If not both are groups, use “enum pos” otherwise makeotf will
# fail.
enumerated = False
for item in coverage_1 + coverage_2:
if not isinstance(item, VAst.GroupName):
enumerated = True
glyphs1 = self._coverage(coverage_1)
glyphs2 = self._coverage(coverage_2)
record1 = self._adjustment(pos1)
record2 = self._adjustment(pos2)
assert len(glyphs1) == 1
assert len(glyphs2) == 1
statements.append(
ast.PairPosStatement(
glyphs1[0], record1, glyphs2[0], record2, enumerated=enumerated
)
)
elif isinstance(pos, VAst.PositionAdjustSingleDefinition):
for a, b in pos.adjust_single:
glyphs = self._coverage(a)
record = self._adjustment(b)
assert len(glyphs) == 1
statements.append(
ast.SinglePosStatement([(glyphs[0], record)], [], [], False)
)
elif isinstance(pos, VAst.PositionAttachDefinition):
anchors = {}
for marks, classname in pos.coverage_to:
for mark in marks:
# Set actually used mark classes. Basically a hack to get
# around the feature file syntax limitation of making mark
# classes global and not allowing mark positioning to
# specify mark coverage.
for name in mark.glyphSet():
key = (name, "MARK_" + classname)
self._markclasses[key].used = True
markclass = ast.MarkClass(self._className(classname))
for base in pos.coverage:
for name in base.glyphSet():
if name not in anchors:
anchors[name] = []
if classname not in anchors[name]:
anchors[name].append(classname)
for name in anchors:
components = 1
if name in self._ligatures:
components = self._ligatures[name]
marks = []
for mark in anchors[name]:
markclass = ast.MarkClass(self._className(mark))
for component in range(1, components + 1):
if len(marks) < component:
marks.append([])
anchor = None
if component in self._anchors[name][mark]:
anchor = self._anchors[name][mark][component]
marks[component - 1].append((anchor, markclass))
base = self._glyphName(name)
if name in self._marks:
mark = ast.MarkMarkPosStatement(base, marks[0])
elif name in self._ligatures:
mark = ast.MarkLigPosStatement(base, marks)
else:
mark = ast.MarkBasePosStatement(base, marks[0])
statements.append(mark)
elif isinstance(pos, VAst.PositionAttachCursiveDefinition):
# Collect enter and exit glyphs
enter_coverage = []
for coverage in pos.coverages_enter:
for base in coverage:
for name in base.glyphSet():
enter_coverage.append(name)
exit_coverage = []
for coverage in pos.coverages_exit:
for base in coverage:
for name in base.glyphSet():
exit_coverage.append(name)
# Write enter anchors, also check if the glyph has exit anchor and
# write it, too.
for name in enter_coverage:
glyph = self._glyphName(name)
entry = self._anchors[name]["entry"][1]
exit = None
if name in exit_coverage:
exit = self._anchors[name]["exit"][1]
exit_coverage.pop(exit_coverage.index(name))
statements.append(ast.CursivePosStatement(glyph, entry, exit))
# Write any remaining exit anchors.
for name in exit_coverage:
glyph = self._glyphName(name)
exit = self._anchors[name]["exit"][1]
statements.append(ast.CursivePosStatement(glyph, None, exit))
else:
raise NotImplementedError(pos)
def _gposContextLookup(
self, lookup, prefix, suffix, ignore, fealookup, targetlookup
):
statements = fealookup.statements
assert not lookup.reversal
pos = lookup.pos
if isinstance(pos, VAst.PositionAdjustPairDefinition):
for (idx1, idx2), (pos1, pos2) in pos.adjust_pair.items():
glyphs1 = self._coverage(pos.coverages_1[idx1 - 1])
glyphs2 = self._coverage(pos.coverages_2[idx2 - 1])
assert len(glyphs1) == 1
assert len(glyphs2) == 1
glyphs = (glyphs1[0], glyphs2[0])
if ignore:
statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])
else:
lookups = (targetlookup, targetlookup)
statement = ast.ChainContextPosStatement(
prefix, glyphs, suffix, lookups
)
statements.append(statement)
elif isinstance(pos, VAst.PositionAdjustSingleDefinition):
glyphs = [ast.GlyphClass()]
for a, b in pos.adjust_single:
glyph = self._coverage(a)
glyphs[0].extend(glyph)
if ignore:
statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])
else:
statement = ast.ChainContextPosStatement(
prefix, glyphs, suffix, [targetlookup]
)
statements.append(statement)
elif isinstance(pos, VAst.PositionAttachDefinition):
glyphs = [ast.GlyphClass()]
for coverage, _ in pos.coverage_to:
glyphs[0].extend(self._coverage(coverage))
if ignore:
statement = ast.IgnorePosStatement([(prefix, glyphs, suffix)])
else:
statement = ast.ChainContextPosStatement(
prefix, glyphs, suffix, [targetlookup]
)
statements.append(statement)
else:
raise NotImplementedError(pos)
def _gsubLookup(self, lookup, prefix, suffix, ignore, chain, routine):
sub = lookup.sub
for key, val in sub.mapping.items():
if not key or not val:
path, line, column = sub.location
log.warning(f"{path}:{line}:{column}: Ignoring empty substitution")
continue
statement = None
glyphs = self._coverage(key)
replacements = self._coverage(val)
if ignore:
chain_context = (prefix, glyphs, suffix)
statement = ast.IgnoreSubstStatement([chain_context])
else:
statement = Substitution(
glyphs,
replacements,
precontext=prefix,
postcontext=suffix,
lookups=chain
)
if isinstance(sub, VAst.SubstitutionReverseChainingSingleDefinition):
statement.reversed = True
routine.rules.append(statement)
def _lookupDefinition(self, lookup):
mark_attachement = None
mark_filtering = None
flags = 0
if lookup.direction == "RTL":
flags |= 1
if not lookup.process_base:
flags |= 2
# FIXME: Does VOLT support this?
# if not lookup.process_ligatures:
# flags |= 4
if not lookup.process_marks:
flags |= 8
elif isinstance(lookup.process_marks, str):
mark_attachement = self._groupName(lookup.process_marks)
elif lookup.mark_glyph_set is not None:
mark_filtering = self._groupName(lookup.mark_glyph_set)
if "\\" in lookup.name:
# Merge sub lookups as subtables (lookups named “base\sub”),
# makeotf/feaLib will issue a warning and ignore the subtable
# statement if it is not a pairpos lookup, though.
name = lookup.name.split("\\")[0]
if name.lower() not in self._lookups:
fealookup = ast.LookupBlock(self._lookupName(name))
if lookupflags is not None:
fealookup.statements.append(lookupflags)
fealookup.statements.append(ast.Comment("# " + lookup.name))
else:
fealookup = self._lookups[name.lower()]
fealookup.statements.append(ast.SubtableStatement())
fealookup.statements.append(ast.Comment("# " + lookup.name))
self._lookups[name.lower()] = fealookup
else:
routine = Routine(name=lookup.name, flags=flags,
markFilteringSet = mark_filtering,
markAttachmentSet = mark_attachement)
self.ff.routines.append(routine)
self._lookups[lookup.name.lower()] = routine
contexts = []
if lookup.context:
for context in lookup.context:
prefix = self._context(context.left)
suffix = self._context(context.right)
ignore = context.ex_or_in == "EXCEPT_CONTEXT"
contexts.append([prefix, suffix, ignore, False])
# It seems that VOLT will create contextual substitution using
# only the input if there is no other contexts in this lookup.
if ignore and len(lookup.context) == 1:
contexts.append([[], [], False, True])
else:
contexts.append([[], [], False, False])
targetlookup = None
for prefix, suffix, ignore, chain in contexts:
if lookup.sub is not None:
self._gsubLookup(lookup, prefix, suffix, ignore, chain, routine)
if lookup.pos is not None:
if self._settings.get("COMPILER_USEEXTENSIONLOOKUPS"):
fealookup.use_extension = True
if prefix or suffix or chain or ignore:
if not ignore and targetlookup is None:
targetname = self._lookupName(lookup.name + " target")
targetlookup = ast.LookupBlock(targetname)
fealookup.targets = getattr(fealookup, "targets", [])
fealookup.targets.append(targetlookup)
self._gposLookup(lookup, targetlookup)
self._gposContextLookup(
lookup, prefix, suffix, ignore, fealookup, targetlookup
)
# else:
# self._gposLookup(lookup, fealookup)
def main(args=None):
import argparse
parser = argparse.ArgumentParser(description="Convert VOLT/VTP to feature files.")
parser.add_argument("input", metavar="INPUT", help="input font/VTP file to process")
parser.add_argument("featurefile", metavar="FEATUEFILE", help="output feature file")
parser.add_argument(
"-q", "--quiet", action="store_true", help="Suppress non-error messages"
)
parser.add_argument(
"--traceback", action="store_true", help="Don’t catch exceptions"
)
options = parser.parse_args(args)
if options.quiet:
log.setLevel(logging.ERROR)
logging.basicConfig(format="%(levelname)s: %(message)s")
file_or_path = options.input
font = None
try:
font = TTFont(file_or_path)
if "TSIV" in font:
file_or_path = StringIO(font["TSIV"].data.decode("utf-8"))
else:
log.error('"TSIV" table is missing, font was not saved from VOLT?')
return 1
except TTLibError:
pass
converter = VoltParser(file_or_path, font)
try:
ff = converter.convert()
except NotImplementedError as e:
if options.traceback:
raise
location = getattr(e.args[0], "location", None)
message = f'"{e}" is not supported'
if location:
path, line, column = location
log.error(f"{path}:{line}:{column}: {message}")
else:
log.error(message)
return 1
print(ff.asFea())
if __name__ == "__main__":
import sys
sys.exit(main())
|
import zlib
class StateManager:
def __init__(self):
self.saved = True
self.undo_stack = []
self.undo_ptr = -1
def compress_text(self, text):
return zlib.compress(text.encode())
def decompress_text(self, text):
return zlib.decompress(text).decode()
def push_state(self, caret, text):
self.saved = False
self.undo_stack = self.undo_stack[ : self.undo_ptr + 1]
self.undo_ptr += 1
self.undo_stack.append((caret.copy(), self.compress_text(text)))
def undo(self):
self.saved = False
self.undo_ptr = max(0, self.undo_ptr - 1)
caret, compressed = self.undo_stack[self.undo_ptr]
return (caret, self.decompress_text(compressed))
def redo(self):
if self.undo_ptr + 1 < len(self.undo_stack):
self.saved = False
self.undo_ptr += 1
caret, compressed = self.undo_stack[self.undo_ptr]
return (caret, self.decompress_text(compressed))
return (None, None)
def clear_stack(self):
self.undo_ptr = -1
self.undo_stack = []
self.saved = True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.