content
stringlengths 5
1.05M
|
---|
#coding:utf-8
import re
import markdown
class MyMarkDown:
__table = None
__header = """
|Log Name|Log Type|Sub Log Type|Description|
|-|-|-|-|
"""
__line = "|%s|`%s`|%s|%s|\n"
__log_enum_pattern = re.compile('^([a-zA-Z0-9_]+)\s*\(\s*([0-9]+)\s*,\s*([0-9]+)\s*,\s*[a-zA-Z0-9_]+\s*,\s*"(.+)"\s*[\),\s]+$')
__exts = ['markdown.extensions.extra', 'markdown.extensions.codehilite','markdown.extensions.tables','markdown.extensions.toc']
def __init__(self):
"""构造方法"""
self.__table = self.__header
pass
def addLine(self, name, type_name, sub_type="", desc=""):
"""添加一行"""
self.__table = self.__table + self.__line % (name, type_name, sub_type, desc)
pass
def addLineByString(self, data):
"""添加一行,入参是原始字符串,自行切割、解析
data=AROUNDAPI_DEL_SUBWAY_LINE(39002, 2000, ERROR_SUB_LOG_TYPE, "水星API-删除地铁线")
"""
m = self.__log_enum_pattern.match(data)
if m:
self.addLine(m.group(1), m.group(2), m.group(3), m.group(4))
else:
pass
def getAll(self):
html_text = markdown.markdown(self.__table, extensions=self.__exts)
return html_text
if __name__ == '__main__':
s = MyMarkDown()
s.addLine("avc", "xxx", "x22", "asdasd")
s.addLine("a222vc", "xxx", "x22", "asdasd")
# s.addLineByString('AROUNDAPI_DEL_SUBWAY_LINE(39002, 2000, ERROR_SUB_LOG_TYPE, "test for show")')
print(s.getAll()) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# В основной ветке программы вызывается функция cylinder(), которая вычисляет площадь цилиндра. В теле cylinder()
# определена функция circle(), вычисляющая площадь круга по формуле . В теле cylinder() у пользователя спрашивается,
# хочет ли он получить только площадь боковой поверхности цилиндра, которая вычисляется по формуле , или полную
# площадь цилиндра. В последнем случае к площади боковой поверхности цилиндра должен добавляться удвоенный результат
# вычислений функции circle().
import math
def circle(R):
return R ** 2 * math.pi
def cylinder(R, H, full_S=True):
circle(R)
S_cylinder = 2 * math.pi * R * H
if full_S:
return S_cylinder + 2 * circle(r)
else:
return S_cylinder
if __name__ == '__main__':
r = float(input("Введите радиус: "))
h = float(input("Введите высоту: "))
c = input("Площадь боковой поверхности S_cylinder или полная площадь full_S?"
"Введите название того что хотите найти на английском ")
s = cylinder(r, h, full_S=(c == 'full_S'))
print(s) |
from worin.preproc import codify_letter
def test_codify_letter():
assert codify_letter('a') == (24, 37, 56, 77)
assert codify_letter('한') == (0, 56, 57, 81)
assert codify_letter('韓') == (35, 37, 56, 77) |
from calculate_anything.exceptions import ExtendedException
from typing import Any, Optional
__all__ = ['QueryResult']
class QueryResult:
def __init__(
self,
icon: str = '',
name: str = '',
description: str = '',
clipboard: Optional[str] = None,
value: Any = None,
error: Optional[ExtendedException] = None,
order: int = 0,
):
self.icon = icon
self.name = name
self.description = description
self.clipboard = clipboard
self.value = value
self.error = error
self.order = order
|
import json
from .card import Card
from .deck import Deck
class Rectangle:
def __init__(self, **kwargs):
self.card_id = kwargs.get("CardID", 0)
self.card_code = kwargs.get("CardCode", None)
self.top_left_x = kwargs.get("TopLeftX", 0)
self.top_left_y = kwargs.get("TopLeftY", 0)
self.width = kwargs.get("Width", 0)
self.height = kwargs.get("Height", 0)
self.isLocalPlayer = kwargs.get("LocalPlayer")
self.card = Card(self.card_code)
def __repr__(self):
return f"Rectangle(Card: {self.card_code})"
class Screen:
def __init__(self, **kwargs):
self.width = kwargs.get("ScreenWidth", 0)
self.height = kwargs.get("ScreenHeight", 0)
class GameFrame:
def __init__(self, **kwargs):
self.player = kwargs.get("PlayerName", "The Man With No Name")
self.opponent = kwargs.get("OpponentName", "The Man With No Name")
self.game_state = kwargs.get("GameState", "Menus")
self.screen = Screen(**kwargs.get("Screen", None))
self._rectangles = kwargs.get("Rectangles", [])
self.rectangles = self.parse_rectangles()
def parse_rectangles(self):
rects = [
rect for rect in self._rectangles if rect["CardCode"] != "face"
]
return list(map(lambda x: Rectangle(**x), rects))
@property
def player_rects(self):
return filter(lambda x: x.isLocalPlayer, self.rectangles)
@property
def opponent_rects(self):
return filter(lambda x: not x.isLocalPlayer, self.rectangles)
class Game:
def __init__(self, player, opponent, screen, player_deck):
self.player = player
self.opponent = opponent
self.screen = screen
self.player_cards_used = Deck()
self.opponent_cards_used = Deck()
self.initial_player_deck = self.current_player_deck = player_deck
self.result = None
def process_frame(self, frame):
for rect in frame.player_rects:
if rect.card_id not in [
card.id for card in self.player_cards_used.cards
]:
current_card = Card(CardID=rect.card_id,
CardCode=rect.card_code)
self.player_cards_used.add_card(current_card)
#remove card from current player deck
for rect in frame.opponent_rects:
if rect.card_id not in [
card.id for card in self.opponent_cards_used.cards
]:
current_card = Card(CardID=rect.card_id,
CardCode=rect.card_code)
self.opponent_cards_used.add_card(current_card)
class ExpeditionState:
def __init__(self, **kwargs):
self.is_active = kwargs.get("IsActive", False)
self.state = kwargs.get("State", "Inactive")
self.record = kwargs.get("Record", [])
self.draft_picks = kwargs.get("DraftPicks", [])
self.deck = kwargs.get("Deck") # TODO: convert to Deck instance
self.games_played = kwargs.get("Games", 0)
self.wins = kwargs.get("Wins", 0)
self.losses = kwargs.get("Losses", 0)
def __repr__(self):
return f"Expedition(State: {self.state}, Games Played: {self.games_played})" |
"""
Displaying a custom label for each individual point
===================================================
mpldatacursor's *point_labels* functionality can be emulated with an event
handler that sets the annotation text with a label selected from the target
index.
"""
import matplotlib.pyplot as plt
import mplcursors
import numpy as np
labels = ["a", "b", "c", "d", "e"]
x = np.array([0, 1, 2, 3, 4])
fig, ax = plt.subplots()
line, = ax.plot(x, x, "ro")
mplcursors.cursor(ax).connect(
"add", lambda sel: sel.annotation.set_text(labels[sel.target.index]))
plt.show()
|
#!/usr/bin/env python3
# Copyright (c) 2018 Dynamic Robotics Laboratory
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from cassiemujoco import *
from cassiemujoco_ctypes import joint_filter_t, drive_filter_t
import time
import numpy as np
import math
import matplotlib.pyplot as plt
"""
Test the end-effector Jacobian, don't care
1. dynamically consistent (Mx*J*Mx^-1);
2. closed-loop constraints;
3. rotational components.
Only to get the rough estimate of the Jacobian matrix on motors.
"""
# Initialize cassie simulation
sim = CassieSim("../model/cassie.xml")
vis = CassieVis(sim)
# Set control parameters
u = pd_in_t()
test_jac_control = True
test_random = True
if test_jac_control:
qpos = sim.qpos()
qpos[2] = 1.5
sim.set_qpos(qpos)
sim.hold()
# Record time
t = time.monotonic()
count = 0
ltarget = np.array([0, 0.13, -0.8])
rtarget = np.array([0, -0.13, -0.5])
kp = np.array([70, 70, 100, 100, 50])
kd = np.array([7.0, 7.0, 8.0, 8.0, 5.0])
# Run until window is closed or vis is quit
draw_state = vis.draw(sim)
vel_idx = [6, 7, 8, 12, 18, 19, 20, 21, 25, 31]
pos_idx = [7, 8, 9, 14, 20, 21, 22, 23, 28, 34]
ts_noise_up = np.array([[0, 0, 0.13, 0.13, 0],
[0.13, 0, 0.00, 0.00, 0],
[0, 0, 0.13, 0.13, 0]])
ts_noise = np.block([
[ts_noise_up, np.zeros((3,5))],
[np.zeros((3,5)), ts_noise_up]
])
offset = np.array([0.0045, 0.0, 0.4973, -1.1997, -1.5968, 0.0045, 0.0, 0.4973, -1.1997, -1.5968])
while draw_state:
if not vis.ispaused():
for i in range(60):
jacpl = sim.get_jacobian(name='left-foot').reshape(3, -1)
jacpr = sim.get_jacobian(name='right-foot').reshape(3, -1)
jacp = np.concatenate((jacpl, jacpr))
jacp_motor = jacp.take(vel_idx, axis=1)
jdag = np.linalg.pinv(jacp_motor)
if test_jac_control:
lpos = np.array(sim.foot_pos()[0:3]) - np.array(sim.qpos()[0:3])
rpos = np.array(sim.foot_pos()[3:6]) - np.array(sim.qpos()[0:3])
dxl = ltarget - lpos
dxr = rtarget - rpos
# print(np.dot(jdag[:,0:3], dxl).shape)
# print(np.dot(jdag[:,3:6], dxr).shape)
dq = np.dot(jdag[:,0:3], dxl) + np.dot(jdag[:,3:6], dxr)
# print(dq.shape)
# print(dq)
# print(lpos)
qpos = sim.qpos()
mpos = [qpos[i] for i in pos_idx]
for i in range(5):
u.leftLeg.motorPd.pGain[i] = kp[i] * 0.1
u.rightLeg.motorPd.pGain[i] = kp[i] * 0.1
u.leftLeg.motorPd.dGain[i] = kd[i] * 0.1
u.rightLeg.motorPd.dGain[i] = kd[i] * 0.1
u.leftLeg.motorPd.torque[i] = 0 # Feedforward torque
u.rightLeg.motorPd.torque[i] = 0
u.leftLeg.motorPd.pTarget[i] = dq[i] + mpos[i]
u.rightLeg.motorPd.pTarget[i] = dq[i+5] + mpos[i+5]
u.leftLeg.motorPd.dTarget[i] = 0
u.rightLeg.motorPd.dTarget[i] = 0
y = sim.step_pd(u)
else:
action = np.random.uniform(-10, 10, size=10)
for i in range(5):
u.leftLeg.motorPd.pGain[i] = kp[i]
u.rightLeg.motorPd.pGain[i] = kp[i]
u.leftLeg.motorPd.dGain[i] = kd[i]
u.rightLeg.motorPd.dGain[i] = kd[i]
u.leftLeg.motorPd.torque[i] = 0 # Feedforward torque
u.rightLeg.motorPd.torque[i] = 0
u.leftLeg.motorPd.pTarget[i] = action[i] + offset[i]
u.rightLeg.motorPd.pTarget[i] = action[i+5] + offset[i+5]
u.leftLeg.motorPd.dTarget[i] = 0
u.rightLeg.motorPd.dTarget[i] = 0
y = sim.step_pd(u)
sd_final = np.matmul(jdag, ts_noise)
# print(sd_final.shape)
# print("new js noise matrix")
# for i in range(10):
# for j in range(10):
# print("{: 3.2f}".format(sd_final[i][j]), end=" ")
# print("\n")
# input()
draw_state = vis.draw(sim)
count += 1
while time.monotonic() - t < 60*0.0005:
time.sleep(0.0001)
t = time.monotonic()
|
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, Add
from models.encoder import create_image_encoder, create_caption_encoder
from models.utils import load_pre_trained_image_model
def create_model(
image_model_weights, state_size, dropout,
word_idx, glove_file, mark_start, mark_end, vocab_size, max_tokens=16
):
# Load pre-trained image model
topic_model, feature_model = load_pre_trained_image_model(image_model_weights)
# Encode Images
feature_input, image_model_output = create_image_encoder(feature_model, state_size, dropout)
# Encode Captions
topic_input, caption_input, caption_model_output = create_caption_encoder(
topic_model, word_idx, glove_file, mark_start, mark_end, state_size, dropout, vocab_size, max_tokens
)
# merge encoders and create the decoder
merge_net = Add()([image_model_output, caption_model_output])
merge_net = Dense(state_size, activation='relu')(merge_net)
outputs = Dense(vocab_size, activation='softmax', name='caption_output')(merge_net)
# Define model
model = Model(
inputs=[feature_input, topic_input, caption_input],
outputs=outputs
)
print(model.summary())
model.compile(loss='categorical_crossentropy', optimizer='adam')
return model
|
from anarcho import app, db
from anarcho.models.track import Track
from flask import request, jsonify
@app.route('/api/track/list', methods=['GET'])
def tracks_list():
result = Track.query.all()
return jsonify(data=[i.to_dict() for i in result])
@app.route('/api/track', methods=['POST'])
def track_post():
track_log = request.values.get("log", "").encode("utf-8")
track_time = request.values.get("time", "")
track = Track(track_log, track_time)
db.session.add(track)
db.session.commit()
return track.to_json()
|
import pika, sys, os
import time
time.sleep(10)
def main():
credentials = pika.PlainCredentials('admin', 'admin')
connection = pika.BlockingConnection(pika.ConnectionParameters(
'rabbitmq',
5672,
'/',
credentials))
channel = connection.channel()
channel.queue_declare(queue='hey')
def callback(ch, method, properties, body):
print(" [x] Received %r" % body)
channel.basic_consume(queue='hey', on_message_callback=callback, auto_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
try:
sys.exit(0)
except SystemExit:
os._exit(0)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import ListManager as LM
class ModeManager(LM.ListManager):
MODE_SINGLE = u'選択再生モード'
MODE_CONTINUOUS = u'連続再生モード'
MODE_RANDOM = u'ランダム再生モード'
def __init__(self):
mode_list = [self.MODE_SINGLE,
self.MODE_CONTINUOUS,
self.MODE_RANDOM]
LM.ListManager.__init__(self, mode_list)
def is_require_name_sort(self):
name = self.get()['name']
return (name is self.MODE_SINGLE or
name is self.MODE_CONTINUOUS)
def is_require_random_sort(self):
name = self.get()['name']
return name is self.MODE_RANDOM
def is_auto_mode(self):
name = self.get()['name']
return (name is self.MODE_CONTINUOUS or
name is self.MODE_RANDOM)
|
from .logerator import before_and_after
from .logerator import time_write
__all__ = ['before_and_after', 'time_write'] |
import logging
# adjust config to your own preferences
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(message)s',
filename='myapp.log',
filemode='w')
class LoggingException(Exception):
logger = logging.getLogger()
logLevel = logging.ERROR
def __init__(self):
self.logger.log(self.logLevel, self.logMessage())
def logMessage(self):
return 'Exception occured'
if __name__ == '__main__':
class BlewIt(LoggingException):
logLevel = logging.WARNING
def logMessage(self):
return 'you blew it'
try:
raise BlewIt
except:
pass
|
from keycloak_admin_aio.types import GroupRepresentation
from .... import KeycloakResource
class GroupsByIdChildren(KeycloakResource):
"""Get children of a group by id.
.. code:: python
from keycloak_admin_aio import KeycloakAdmin, GroupRepresentation
kc: KeycloakAdmin # must be instantiated
group_id: str # uuid
"""
def get_url(self) -> str:
return f"{self._get_parent_url()}/children"
async def create(self, group_representation: GroupRepresentation):
"""Create a parent/child group relationship.
.. code:: python
group_representation = GroupRepresentation(name="child-group-name")
await kc.goups.by_id(group_id).children.add(group_representation)
"""
connection = await self._get_connection()
await connection.post(self.get_url(), json=group_representation.to_dict())
|
class OperatorShader:
pass
|
#!/usr/bin/env python3
# gcp-ssh-key-adder.py
# Adds ssh-keys to GCP the easy way
# 2018, Sebastian Weigand || [email protected]
# Version 1.0.1
import subprocess
import yaml
import argparse
import os
import sys
import tempfile
import logging
# =============================================================================
# Initialization
# =============================================================================
parser = argparse.ArgumentParser(
description='Add SSH keys to Google Cloud, the easy way!',
epilog='A tdg script.')
parser.add_argument(
'ssh_key_files',
metavar='public-ssh-key-file',
nargs='+',
help='path to public SSH key file you wish to add')
parser.add_argument(
'-i',
'--info',
action="store_true",
default=False,
help='enable info logging mode')
parser.add_argument(
'-d',
'--debug',
action="store_true",
default=False,
help='enable debug logging mode')
args = parser.parse_args()
logger = logging.getLogger('gcp-ssh-key-adder')
logger.setLevel(logging.WARN)
formatter = logging.Formatter('[%(asctime)s] | %(levelname)-8s | %(message)s')
console = logging.StreamHandler()
console.setLevel(logging.WARN)
console.setFormatter(formatter)
logger.addHandler(console)
if args.info:
logger.setLevel(logging.INFO)
console.setLevel(logging.INFO)
if args.debug:
logger.setLevel(logging.DEBUG)
console.setLevel(logging.DEBUG)
# =============================================================================
# Helper functions
# =============================================================================
def eprint(*text):
print(*text, file=sys.stderr)
# =============================================================================
# New Key Parsing
# =============================================================================
# Path cleaning for multiple SSH keys:
ssh_key_file_paths = [
os.path.realpath(os.path.expanduser(path)) for path in args.ssh_key_files
]
# Path sanity:
logger.info('Checking SSH paths for existence and readability...')
for path in ssh_key_file_paths:
logger.debug('Processing: %s' % path)
if not os.path.exists(path):
exit('Could not read path: %s' % path)
if not os.access(path, os.R_OK):
exit('Insufficient privilegs to read path: %s' % path)
# Process new keys:
keys_ok = True
ssh_keys = []
logger.info('Reading in SSH keys from paths...')
for path in ssh_key_file_paths:
logger.debug('Processing: %s' % path)
with open(path, 'r') as f:
_ssh_key = f.read()
try:
_key_type, _key, _userhost = _ssh_key.split()
_user = _userhost.split('@')[0]
_new_key = '{user}:{key_type} {key} {userhost}'.format(
user=_user, key_type=_key_type, key=_key, userhost=_userhost)
ssh_keys.append(_new_key)
except ValueError as e:
eprint(
'Invalid SSH key format (expecting <key_type> <key> <user@host> per line): ',
_ssh_key)
keys_ok = False
if not keys_ok:
exit('Errors encountered while parsing SSH keys, so aborting.')
# =============================================================================
# Old Key Parsing
# =============================================================================
# Get current SSH keys:
metadata_command = subprocess.run(
['gcloud', 'compute', 'project-info', 'describe'], stdout=subprocess.PIPE)
if metadata_command.returncode != 0:
exit('gcloud command invocation error')
logger.debug('Parsing YAML from gcloud command...')
metadata = yaml.load(metadata_command.stdout)
logger.debug('Reading keys from parsed YAML...')
for key in metadata['commonInstanceMetadata']['items']:
if key['key'] == 'ssh-keys':
ssh_keys += key['value'].splitlines()
break
# =============================================================================
# File Generation
# =============================================================================
# File creation:
fd, tempfile_path = tempfile.mkstemp()
logger.debug('Created temporary file: %s' % tempfile_path)
logger.debug('Note: Will not write exact duplicate keys...')
with open(tempfile_path, 'w') as f:
for ssh_key in set(ssh_keys):
f.write(ssh_key)
f.write('\n')
os.close(fd)
logger.debug('Wrote and closed temporary file.')
# =============================================================================
# Invocation
# =============================================================================
print('Updating keys in Google Cloud (this may take a few moments)...')
update_command = subprocess.run(
[
'gcloud', 'compute', 'project-info', 'add-metadata',
'--metadata-from-file', 'ssh-keys=' + tempfile_path
],
stdout=subprocess.PIPE)
os.remove(tempfile_path)
logger.debug('Removed temporary file.')
if update_command.returncode != 0:
exit('gcloud command invocation error')
|
from pytest import fixture, raises
from ebu_tt_live.config import AppConfig, UniversalNodes
import ebu_tt_live.config.node as node_config
import ebu_tt_live.config.carriage as carriage_config
import ebu_tt_live.node as processing_node
from ebu_tt_live.config.common import ConfigurableComponent
from ebu_tt_live.errors import ConfigurationError
@fixture(autouse=True)
def reset_backend():
# This fixture makes sure the backend reference deleted before every test
ConfigurableComponent._backend = None
def test_simple_producer():
val_source = {
"nodes": {
"node1": {
"id": "producer1",
"type": "simple-producer",
"show_time": True,
"sequence_identifier": "TestSequence1",
"output": {
"carriage": {
"type": "direct",
"id": "default"
}
}
}
},
"backend": {
"type": "dummy"
}
}
app = AppConfig(
values_source_list=[val_source]
)
app.start()
def test_handover_conf():
val_source = {
"nodes": {
"node1": {
"id": "handover1",
"type": "handover",
"authors_group_identifier": "TestGroup1",
"sequence_identifier": "TestSequence1",
"input": {
"carriage": {
"type": "direct",
"id": "default_in"
}
},
"output": {
"carriage": {
"type": "direct",
"id": "default_out"
}
}
}
},
"backend": {
"type": "dummy"
}
}
app = AppConfig(
values_source_list=[val_source]
)
app.start()
created_node_configurator = app.get_node("node1")
assert isinstance(created_node_configurator, node_config.Handover)
assert isinstance(created_node_configurator.component, processing_node.HandoverNode)
assert created_node_configurator.component._sequence_identifier == 'TestSequence1'
assert created_node_configurator.component._authors_group_identifier == 'TestGroup1'
assert isinstance(created_node_configurator._input.carriage, carriage_config.DirectInput)
assert isinstance(created_node_configurator.output.carriage, carriage_config.DirectOutput)
def test_handover_default_conf():
val_source = {
"nodes": {
"node1": {
"id": "handover1",
"type": "handover",
"input": {
"carriage": {
"type": "direct",
"id": "default_in"
}
},
"output": {
"carriage": {
"type": "direct",
"id": "default_out"
}
}
}
},
"backend": {
"type": "dummy"
}
}
app = AppConfig(
values_source_list=[val_source]
)
app.start()
created_node_configurator = app.get_node("node1")
assert isinstance(created_node_configurator, node_config.Handover)
assert isinstance(created_node_configurator.component, processing_node.HandoverNode)
assert created_node_configurator.component._sequence_identifier == 'HandoverSequence1'
assert created_node_configurator.component._authors_group_identifier == 'AuthorsGroup1'
assert isinstance(created_node_configurator._input.carriage, carriage_config.DirectInput)
assert isinstance(created_node_configurator.output.carriage, carriage_config.DirectOutput)
def test_simple_producer_wrong_backend():
val_source = {
"nodes": {
"node1": {
"id": "producer1",
"type": "simple-producer",
"show_time": True,
"sequence_identifier": "TestSequence1",
"output": {
"carriage": {
"type": "websocket",
"listen": "ws://localhost:9001"
}
}
}
},
"backend": {
"type": "dummy"
}
}
app = AppConfig(
values_source_list=[val_source]
)
with raises(AttributeError):
app.start()
def test_simple_consumer():
val_source = {
"nodes": {
"node1": {
"id": "consumer1",
"type": "simple-consumer",
"input": {
"carriage": {
"type": "direct",
"id": "default"
}
}
}
},
"backend": {
"type": "dummy"
}
}
app = AppConfig(
values_source_list=[val_source]
)
app.start()
def test_simple_consumer_wrong_backend():
val_source = {
"nodes": {
"node1": {
"id": "consumer1",
"type": "simple-consumer",
"input": {
"carriage": {
"type": "websocket",
"connect": ["ws://localhost:9000/TestSequence2/subscribe"]
}
}
}
},
"backend": {
"type": "dummy"
}
}
app = AppConfig(
values_source_list=[val_source]
)
with raises(AttributeError):
app.start()
def test_wrong_node_type():
val_source = {
"nodes": {
"node1": {
"id": "consumer1",
"type": "no-such-consumer",
"input": {
"carriage": {
"type": "websocket",
"connect": ["ws://localhost:9000/TestSequence2/subscribe"]
}
}
}
},
"backend": {
"type": "dummy"
}
}
with raises(ConfigurationError):
app = AppConfig(
values_source_list=[val_source]
)
app.start()
|
#!/usr/bin/env python3
import re
# Read input
with open('06_input.txt', 'r') as f:
points = [tuple(int(c) for c in re.findall(r"\d+", line)) for line in f]
# Define number of points, and coordinates of the bounding box
n = len(points)
w_min = min([p[0] for p in points])
h_min = min([p[1] for p in points])
w_max = max([p[0] for p in points])
h_max = max([p[1] for p in points])
# Helper Functions
def dist(p1, p2):
''' Computes the Manhattan distance between points p1 and p2 '''
return abs(p1[0]-p2[0]) + abs(p1[1]-p2[1])
def closest(w, h):
''' Find which landmarks are the closest to the given point '''
howfar = [dist((w, h), p) for p in points]
m = min(howfar)
return [i for i, d in enumerate(howfar) if d == m]
# Part 1
# Find which landmarks will produce infinite areas
# by considering all points on the bounding box
exclude = set()
for w in range(w_max):
c = closest(w, 0)
if len(c) == 1:
exclude.add(c[0])
c = closest(w, h_max)
if len(c) == 1:
exclude.add(c[0])
for h in range(h_max):
c = closest(0, h)
if len(c) == 1:
exclude.add(c[0])
c = closest(w_max, h)
if len(c) == 1:
exclude.add(c[0])
# For every point within the bounding box record the closes landmark
tally = [0] * len(points)
for h in range(h_max):
for w in range(w_max):
c = closest(w, h)
if len(c) == 1:
tally[c[0]] += 1
tally = [t for i, t in enumerate(tally) if i not in exclude]
print("Part 1:", max(tally))
# Part 2
cnt = 0
N = 10000
# For n landmarks any point further away than (N // n) units will have
# the cumulative distance of at least N.
for w in range(w_min - N // n, w_max + N // n + 1):
for h in range(h_min - N // n, h_max + N // n + 1):
if sum([dist((w, h), p) for p in points]) < N:
cnt += 1
print("Part 2:", cnt)
|
import sys
import unittest
from pympler.util.compat import HTMLParser, HTTPConnection, StringIO
from pympler.util.compat import Request, urlopen, URLError
from socket import error as socket_error
from time import sleep
from pympler.classtracker import ClassTracker
from pympler.garbagegraph import start_debug_garbage, end_debug_garbage
from pympler.process import get_current_thread_id
from pympler.web import start_profiler, start_in_background
# Use separate process for server if available. Otherwise use a thread.
#try:
# from multiprocessing import Process
#except ImportError:
from threading import Thread as Process
_server = None
class Trash(object):
pass
class Server(Process):
def __init__(self):
super(Server, self).__init__()
self.daemon = True
def run(self):
"""
Redirect bottle logging messages so it doesn't clutter the test output
and start the web GUI.
"""
tracker = ClassTracker()
tracker.track_class(Trash)
tracked_trash = Trash()
tracker.create_snapshot()
sys.stdout = StringIO()
sys.stderr = StringIO()
start_profiler(debug=True, quiet=True, tracker=tracker)
class WebGuiTest(unittest.TestCase):
defaulthost = 'localhost:8090'
defaulturl = 'http://' + defaulthost
class LinkChecker(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.errors = 0
def follow(self, link):
if link.startswith('http://'):
return
conn = HTTPConnection(WebGuiTest.defaulthost)
conn.request("GET", link)
response = conn.getresponse()
body = response.read()
conn.close()
if response.status not in [200, 302, 303, 307]:
sys.stderr.write('\nLINK-ERROR: %s, %d, %s' % (link, response.status, response.reason))
if response.status == 500:
sys.stderr.write(body)
self.errors += 1
def handle_starttag(self, tag, attrs):
if tag == 'a':
for name, value in attrs:
if name == 'href':
self.follow(value)
def setUp(self):
"""Use the same server for all tests."""
global _server
if not _server:
_server = Server()
_server.start()
wait = 5
running = False
while not running and wait > 0:
try:
urlopen(WebGuiTest.defaulturl).read()
running = True
except (URLError, socket_error, IOError):
wait -= 1
sleep(1)
def get(self, url, status=200):
conn = HTTPConnection(self.defaulthost)
conn.request("GET", url)
response = conn.getresponse()
body = response.read()
conn.close()
self.assertEqual(response.status, status)
try:
body = body.decode()
except UnicodeDecodeError:
pass
return body
def test_overview(self):
"""Test availability of web gui."""
req = Request(self.defaulturl)
page = str(urlopen(req).read())
self.assert_("Process overview" in page)
def test_links(self):
"""Test all linked pages are available."""
req = Request(self.defaulturl)
page = str(urlopen(req).read())
parser = self.LinkChecker()
parser.feed(page)
parser.close()
self.assertEqual(parser.errors, 0)
def test_static_files(self):
"""Test if static files are served."""
for filename in ['style.css', 'jquery.flot.min.js']:
self.get('/static/%s' % filename, status=200)
def test_traceback(self):
"""Test if stack traces can be viewed.
First test valid tracebacks, then the invalid ones.
Also check if we can expand the locals of the current stackframe and
access size information of local data (dummy).
"""
class Dummy(object):
pass
dummy = Dummy()
for threadid in sys._current_frames():
resp = self.get('/traceback/%d' % threadid, status=200)
if threadid == get_current_thread_id():
locals_id = id(locals())
self.assertTrue('id="%d' % locals_id in resp, resp)
resp = self.get('/objects/%d' % locals_id, status=200)
self.assertTrue('dummy' in resp, resp)
self.assertTrue('id="%d' % id(dummy) in resp, resp)
self.get('/objects/%d' % id(dummy), status=200)
self.get('/traceback/gabelstapler', status=500)
body = self.get('/traceback/12345', status=200)
self.assertTrue("Cannot retrieve stacktrace for thread 12345" in body, body)
def test_garbage(self):
"""Test if reference cycles can be viewed."""
start_debug_garbage()
try:
body = self.get('/garbage', status=200)
#self.assertTrue('0 reference cycles' in body, body)
cycle = ['spam', 'eggs']
cycle.append(cycle)
del cycle
body = self.get('/garbage', status=200)
#self.assertTrue('0 reference cycles' in body, body)
body = self.get('/refresh', status=303)
body = self.get('/garbage', status=200)
#self.assertTrue('1 reference cycle' in body, body)
self.assertTrue('/garbage/0' in body)
body = self.get('/garbage/0', status=200)
self.assertTrue('/garbage/graph/0' in body, body)
self.assertTrue('/garbage/graph/0?reduce=' in body, body)
body = self.get('/garbage/graph/0', status=200)
body = self.get('/garbage/graph/0?reduce=on', status=200)
finally:
end_debug_garbage()
def test_tracker(self):
resp = self.get('/tracker', status=200)
clsname = '%s.Trash' % (Trash.__module__)
self.assertTrue(clsname in resp, resp)
resp = self.get('/tracker/class/%s' % clsname, status=200)
self.assertTrue('1 instance' in resp, resp)
def test_start_in_background(self):
"""Test server can be started in background mode."""
tracker = ClassTracker()
thread = start_in_background(port=64546, stats=tracker.stats)
self.assertEqual(thread.daemon, True)
if __name__ == "__main__":
suite = unittest.TestSuite()
tclasses = [WebGuiTest,]
for tclass in tclasses:
names = unittest.getTestCaseNames(tclass, 'test_')
suite.addTests(map(tclass, names))
if not unittest.TextTestRunner().run(suite).wasSuccessful():
sys.exit(1)
|
# -*- coding: utf-8 -*-
# Author: XuMing <[email protected]>
# Brief:
import os
import tarfile
import keras
from keras.models import Model
from babi.util import get_stories
from babi.util import vectorize_stories
RNN = keras.layers.recurrent.LSTM
EMBED_HIDDEN_SIZE = 50
SENT_HIDDEN_SIZE = 100
QUERY_HIDDEN_SIZE = 100
BATCH_SIZE = 32
EPOCH = 2
save_model_path = 'babi_rnn_model.h5'
print("RNN,Embed,Sent,Query={},{},{},{}".format(RNN, EMBED_HIDDEN_SIZE, SENT_HIDDEN_SIZE, QUERY_HIDDEN_SIZE))
challenge = 'tasks_1-20_v1-2/en/qa2_two-supporting-facts_{}.txt'
pwd_path = os.path.abspath(os.path.dirname(__file__))
print('pwd_path:', pwd_path)
path = os.path.join(pwd_path, '../../data/babi_tasks_1-20_v1-2.tar.gz')
print('path:', path)
with tarfile.open(path) as tar:
train = get_stories(tar.extractfile(challenge.format('train')))
test = get_stories(tar.extractfile(challenge.format('test')))
vocab = set()
for story, q, a in train + test:
vocab |= set(story + q + [a])
vocab = sorted(vocab)
vocab_size = len(vocab) + 1
word_idx = dict((c, i + 1) for i, c in enumerate(vocab))
story_maxlen = max(map(len, (x for x, _, _ in train + test)))
query_maxlen = max(map(len, (x for _, x, _ in train + test)))
idx_story, idx_query, idx_answer = vectorize_stories(train, word_idx, story_maxlen, query_maxlen)
test_idx_story, test_idx_query, test_idx_answer = vectorize_stories(test, word_idx, story_maxlen, query_maxlen)
print('vocab:', vocab)
print('idx_story.shape:', idx_story.shape)
print('idx_query.shape:', idx_query.shape)
print('idx_answer.shape:', idx_answer.shape)
print('story max len:', story_maxlen)
print('query max len:', query_maxlen)
def train():
print('build model...')
sentence = keras.layers.Input(shape=(story_maxlen,), dtype='int32')
encoded_sentence = keras.layers.Embedding(vocab_size, EMBED_HIDDEN_SIZE)(sentence)
encoded_sentence = keras.layers.Dropout(0.3)(encoded_sentence)
question = keras.layers.Input(shape=(query_maxlen,), dtype='int32')
encoded_question = keras.layers.Embedding(vocab_size, EMBED_HIDDEN_SIZE)(question)
encoded_question = keras.layers.Dropout(0.3)(encoded_question)
encoded_question = RNN(EMBED_HIDDEN_SIZE)(encoded_question)
encoded_question = keras.layers.RepeatVector(story_maxlen)(encoded_question)
merged = keras.layers.add([encoded_sentence, encoded_question])
merged = RNN(EMBED_HIDDEN_SIZE)(merged)
merged = keras.layers.Dropout(0.3)(merged)
preds = keras.layers.Dense(vocab_size, activation='softmax')(merged)
model = Model([sentence, question], preds)
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
print('training')
model.fit([idx_story, idx_query], idx_answer, batch_size=BATCH_SIZE, epochs=EPOCH, validation_split=0.05)
loss, acc = model.evaluate([test_idx_story, test_idx_query], test_idx_answer, batch_size=BATCH_SIZE)
print('Test loss / test accuracy= {:.4f} / {:.4f}'.format(loss, acc))
# loss: 1.6114 - acc: 0.3758 - val_loss: 1.6661 - val_acc: 0.3800
# Test loss / test accuracy= 1.6762 / 0.3050
model.save(save_model_path)
print('save model:', save_model_path)
if __name__ == '__main__':
train()
|
# Copyright (c) 2019 Dubalu LLC
# Copyright (c) 2017 Elasticsearch
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to you under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .errors import BulkIndexError, ScanError
from .bulk import streaming_bulk, bulk, parallel_bulk, chunk_objects, process_bulk_chunk
from .reindex import scan, reindex
|
"""
Various utility functions used by main modules.
"""
import os
import re
from datetime import timedelta
from datetime import datetime
import pandas as pd
import numpy as np
import requests
from bs4 import BeautifulSoup
def walk_dir(directory):
"""
list recursively all files in directory
"""
files_list = []
for path, _, files in os.walk(directory):
for file in files:
files_list.append(os.path.join(path, file))
return files_list
def dist_to_holiday(date, holidays, direction='next'):
"""
find the next holiday and current delta between date and holiday.
Look for previous holiday if direction is 'previous'.
"""
for delta in range(366):
if direction == 'next':
day = holidays.get(date + timedelta(days=delta))
elif direction == 'previous':
day = holidays.get(date + timedelta(days=-delta))
if day:
break
return delta, day
def get_calendar_holidays(dt_series, holidays):
"""
given an input date series and holidays object,
return a dataframe with next and previous holidays type and delay to/from
"""
calendar = pd.DataFrame(dt_series, columns=['dt']).sort_values('dt')
def func(direction):
def myfunc(date):
return dist_to_holiday(
date,
holidays=holidays,
direction=direction)
return myfunc
next_h = np.array(list(calendar['dt'].map(func('next'))))
calendar['next_delay'] = next_h[:, 0]
calendar['next_holiday'] = next_h[:, 1]
prev_h = np.array(list(calendar['dt'].map(func('previous'))))
calendar['prev_delay'] = prev_h[:, 0]
calendar['prev_holiday'] = prev_h[:, 1]
return calendar
def search_config(**kwargs):
"""wrapper for html search criteria"""
config = {}
if 'text' in kwargs:
config['text'] = re.compile(str(kwargs.pop('text')))
if 'name' in kwargs:
config['name'] = kwargs.pop('name')
config['attrs'] = kwargs
return config
def html_doc(url, search_cfg):
"""
extract all html elements from web page satisfying a search criteria
"""
data = requests.get(url).text
docs = BeautifulSoup(data, "lxml").find_all(**search_cfg)
return docs
def href_from_doc_list(doc_list, tag='href'):
"""
extract a given tag from list of html elements.
"""
return [doc.get(tag) for doc in doc_list]
def text_from_doc_list(doc_list):
"""
extract a text from list of html elements.
"""
return [doc.text for doc in doc_list]
def docs_from_url(url, search_cfg):
"""
extract html elements from url satisfying a config dict.
"""
data = requests.get(url).text
docs = BeautifulSoup(data, "lxml").find_all(**search_cfg)
return docs
def href_from_url(url, search_cfg):
"""
extract hyperlinks from web page.
requires a search_cfg to determine which links to download.
"""
data = requests.get(url).text
docs = BeautifulSoup(data, "lxml").find_all(**search_cfg)
return [doc.get('href') for doc in docs]
def years_query(query):
"""
process a year query and returns the list of years integers
satisfying the query.
ex.
"2020,2021,2023" --> [2020, 2021, 2023]
"2020-2022" --> [2020, 2021, 2022]
"2020, 2021-2023" --> [2020, 2021, 2022, 2023]
"2020-" --> [2020, 2021, ..., datetime.now().year]
"""
coma_sep = [comp.strip() for comp in query.split(",")]
years_list = []
for comp in coma_sep:
if comp.find('-') >= 0:
start, end = comp.split('-')
if len(start.strip()) == 0:
start = 2010
else:
start = int(start.strip())
if len(end.strip()) == 0:
end = datetime.now().year
else:
end = int(end.strip())
years_list += list(range(start, end + 1))
else:
if len(comp.strip()) > 0:
years_list.append(int(comp))
return list(set(years_list))
def is_in_path(file_path, find_str):
"""
check if find_str is in filename given file path
"""
file_path = file_path.lower()
find_str = find_str.lower()
return file_path.split('/')[-1].find(find_str) >= 0
def format_column_names(df):
"""
standardize pandas df columns names.
"""
def reformat(txt):
txt = txt.lower()
txt = txt.replace(' ', '_')
txt = txt.replace('-', '_')
txt = txt.replace('.', '_')
return txt
columns = {col: reformat(col) for col in df.columns}
df.rename(columns, axis=1, inplace=True)
|
from pygate_grpc.client import PowerGateClient
from google.protobuf.json_format import MessageToDict
import os
import sqlite3
import csv
from datetime import datetime
from tabulate import tabulate
api = os.getenv('POWERGATE_API')
token = os.getenv('POWERGATE_TOKEN')
user = os.getenv('POWERGATE_USER')
powergate = PowerGateClient(api, is_secure=False)
jobs = powergate.admin.storage_jobs.executing(user_id=user, cids='')
jobs_dict = MessageToDict(jobs)
jobs = []
for storage_job in jobs_dict["storageJobs"]:
utc_date = datetime.utcfromtimestamp(int(storage_job["createdAt"]))
cid = storage_job["cid"]
abs_path = os.getcwd()
split = os.path.split(abs_path)
db_path = os.path.join(
split[0], "pipeline/deplatformr_open_images_workflow.sqlite")
workflow_db = sqlite3.connect(db_path)
cursor = workflow_db.cursor()
cursor.execute("SELECT name from packages where cid = ?", (cid,),)
filename = cursor.fetchone()
table = []
try:
for deal in storage_job["dealInfo"]:
try:
message = deal["message"]
except:
message = ""
try:
price = deal["pricePerEpoch"]
except:
price = 0
try:
id = deal["dealId"]
except:
id = "n/a"
table+=[(id, deal["stateName"], deal["miner"], price, message)]
except Exception as e:
print(e)
jobs.append({"filename": filename[0], "job_id": storage_job["id"], "CID": cid, "Date": str(utc_date), "Deals": table})
# sort by package name
jobs.sort(key=lambda x: x['filename'], reverse=False)
for job in jobs:
print(job["filename"])
print("Job: " + job["job_id"])
print("CID: " + job["CID"])
print(job["Date"])
print(tabulate(job["Deals"]))
print("")
print(str(len(jobs_dict["storageJobs"])) + " jobs currently executing.") |
# -*- coding: utf-8 -*-
from .intappwebsocketsclient import *
G=globals.G
g_local_ip=''
def get_local_ip():
"获取内网ip"
global g_local_ip
if g_local_ip:
return g_local_ip
try:
socket_objs = [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]
ip_from_ip_port = [(s.connect(("8.8.8.8", 53)), s.getsockname()[0], s.close()) for s in socket_objs][0][1]
ip_from_host_name = [ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][:1]
g_local_ip = [l for l in (ip_from_ip_port, ip_from_host_name) if l][0]
except (Exception) as e:
print("get_local_ip found exception : %s" % e)
return g_local_ip if("" != g_local_ip and None != g_local_ip) else socket.gethostbyname(socket.gethostname())
global globalqueue
globalqueue = Queue()
def messagequeue():
while True:
if not globalqueue.empty():
# print("执行队列")
value=globalqueue.get()
if value['args']:
try:
sqlite('task').where("key = '"+value['task']['key']+"' and code!=4").update({"code":3,"res":"正在执行"})
value['target'](*value['args'])
except:
sqlite('task').where("key = '"+value['task']['key']+"' and code!=4").update({"code":1,"res":"失败"})
else:
try:
value['target']()
except:
sqlite('task').where("key = '"+value['task']['key']+"' and code!=4").update({"code":1,"res":"失败"})
sqlite('task').where("key = '"+value['task']['key']+"' and code!=4").update({"code":4,"res":"执行完成"})
else:
time.sleep(1)
def add_queue(target,args=None,title="默认任务",describes=""):
"""添加队列
target 方法名 必须
args 方法参数 非必须 如
title 任务名称
descs 任务描述
msg 任务执行结果
使用方式如:add_queue(target=aa,args=(1,))
"""
ttt=times()
task={"title":title,"describes":describes,"code":2,"key":md5(str(ttt)+str(random.randint(100000,999999))),"res":"","addtime":ttt}
sqlite('task').insert(task)
key={"target":target,"args":args,"task":task}
globalqueue.put(key)
def get_process_id(name):
try:
child = subprocess.Popen(['pgrep', '-f', name],stdout=subprocess.PIPE, shell=False)
response = child.communicate()[0]
return [int(pid) for pid in response.split()]
except:
return []
def is_url(url):
"判断url合法性"
if re.match(r'^https?:/{2}\w.+$', url):
return True
else:
return False
def returnjson(data=[],code=0,msg="成功",status='200 ok'):
"""在浏览器输出包装过的json
参数 data 结果 默认[]
参数 code body状态码 默认0
参数 msg body状态描述 默认 成功
参数 status http状态码 默认 200
返回 json字符串结果集
"""
res={
"code":code,
"msg":msg,
"time":times(),
"data":data
}
return json_encode(res),status,{"Content-Type":"application/json; charset=utf-8"}
def file_get_content(k):
"获取文件内容"
if os.path.isfile(k):
f=open(k,'r',encoding="utf-8")
con=f.read()
f.close()
else:
con=''
return con
def file_set_content(k,data):
f=open(k,'w',encoding="utf-8")
f.write(data)
f.close()
return True
def randoms(lens=6,types=1):
"""生成随机字符串
lens 长度
types 1数字 2字母 3字母加数字
"""
strs="0123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM,!@#$%^&*()_+=-;',./:<>?"
if types==1:
strs="0123456789"
elif types==2:
strs="qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM"
elif types==3:
strs="0123456789qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM"
k=''
i=0
while i < lens:
k+=random.choice(strs)
i+=1
return k |
from flask import Flask
from flask import render_template
from flask import request
from energy_eval import energy_gpr_mix
import json
from layoutanalysis import layoutanalysis
from layoutservice2 import layoutservice2
from layoutservice3 import layoutservice3
from layoutservice_cube import layoutservice_cube
from desc_getter import desc_getter
import requests
import pdb
# Variables
# Variables
app = Flask(__name__)
#app.config.update(MAX_CONTENT_LENGTH=20971520)
app.config['MAX_CONTENT_LENGTH'] = 20 * 1024 * 1024
# Main Page
@app.route("/")
def index():
return "Home Page"
# Testing Page: d3.js
# http://localhost:8000/service?url=https://qua-kit.ethz.ch/exercise/33/3485/geometry
# http://localhost:8000/test?url=https://qua-kit.ethz.ch/exercise/36/1686/geometry
# http://localhost:8000/test?url=https://qua-kit.ethz.ch/exercise/40/4575/geometry
# url = "https://qua-kit.ethz.ch/exercise/40/4575"
# url = request.args.get('url')
@app.route("/test")
def ppprint():
url = request.args.get('url')
print(url)
file = requests.get(url).text
#print(file)
b = json.loads(file) # load: convert json --> python list
f_o = json.dumps(layoutservice3(b))
#print("fo", f_o)
url_desc = url.replace('geometry', 'info')
desc_list = desc_getter(url_desc)['subInfoDescription']
return render_template("index_chi.html", data=f_o, addr_design=url, desc=desc_list) # data passed to a web page
#return render_template("index_s1.html", data=f_o) # data passed to a web page
@app.route("/service_cube", methods=['GET','POST'])
def cube():
if request.method == "POST":
print("POST DETECTED!")
geom = request.form.get('geometry')
geom_list = json.loads(geom)
f_o = json.dumps(layoutservice_cube(geom_list))
return render_template("index_cube.html", data=f_o) # data passed to a web page
elif request.method == "GET":
url = request.args.get('url')
print("GET detected!")
#print(url)
file = requests.get(url).text
# print(file)
b = json.loads(file)
f_o = json.dumps(layoutservice_cube(b))
return render_template("index_cube.html", data=f_o, addr_design=url) # data passed to a web page
@app.route("/service_3", methods=['GET','POST'])
def service_3():
if request.method == "POST":
print("POST DETECTED!")
# geom = request.value.get('geometry') # get all the parameters
#geom = request.get_json('url') # get the geometry in json format (if the previous line doesn't work)
geom = request.args.get('url')
print("POST DETECTED and url get!")
print(geom)
geom_list = json.loads(geom)
f_o = json.dumps(layoutservice3(geom_list))
return render_template("index_chi.html", data=f_o) # data passed to a web page
elif request.method == "GET":
url = request.args.get('url')
print("GET detected!")
print(url)
file = requests.get(url).text
print(file)
b = json.loads(file) # load: convert json --> python list
f_o = json.dumps(layoutservice3(b))
url_desc = url.replace('geometry', 'info')
desc_list = desc_getter(url_desc)['subInfoDescription']
return render_template("index_chi.html", data=f_o, addr_design=url, desc=desc_list) # data passed to a web page
@app.route("/service_1")
def service1():
url = request.args.get('url')
print(url)
file = requests.get(url).text
print(file)
b = json.loads(file) # load: convert json --> python list
f_o = json.dumps(layoutanalysis(b))
#print("fo", f_o)
url_desc = url.replace('geometry', 'info')
desc_list = desc_getter(url_desc)['subInfoDescription']
return render_template("index.html", data=f_o, addr_design=url, desc=desc_list) # data passed to a web page
#return render_template("index_s1.html", data=f_o) # data passed to a web page
#
@app.route("/service_2", methods=['GET','POST'])
def service_2():
if request.method == "POST":
print("POST DETECTED!")
# geom = request.value.get('geometry') # get all the parameters
#geom = request.get_json('url') # get the geometry in json format (if the previous line doesn't work)
geom = request.args.get('url')
print("POST DETECTED and url get!")
print(geom)
geom_list = json.loads(geom)
f_o = json.dumps(layoutservice2(geom_list))
return render_template("index_plab_slim.html", data=f_o) # data passed to a web page
elif request.method == "GET":
url = request.args.get('url')
print("GET detected!")
print(url)
file = requests.get(url).text
print(file)
b = json.loads(file) # load: convert json --> python list
f_o = json.dumps(layoutservice2(b))
url_desc = url.replace('geometry', 'info')
desc_list = desc_getter(url_desc)['subInfoDescription']
return render_template("index_plab.html", data=f_o, addr_design=url, desc=desc_list) # data passed to a web page
@app.route('/post', methods=('POST',))
def view_post():
print(request.form["geometry"])
return request.form["geometry"]
# def internal():
# url = request.args.get('url')
# print(url)
# f_o = json.dumps(layoutanalysis(url))
# #print(f_o)
# # data = requests.get(url).text
# return render_template("index.html", data=f_o, addr_design=url) # data passed to a web page
if __name__ == "__main__":
#app.run(host='129.132.32.168', port=5000, debug=True)
app.run(host='0.0.0.0',port=8000,debug=True)
|
"""
Class for interfacing with the Primesense RGBD sensor
Author: Jeff Mahler
"""
import logging
import numpy as np
import pylibfreenect2 as lf2
from autolab_core import (
CameraIntrinsics,
ColorImage,
DepthImage,
IrImage,
Image,
)
from autolab_core.constants import MM_TO_METERS
from .camera_sensor import CameraSensor
class Kinect2PacketPipelineMode:
"""Type of pipeline for Kinect packet processing."""
OPENGL = 0
CPU = 1
OPENCL = 2
AUTO = 3
class Kinect2FrameMode:
"""Type of frames that Kinect processes."""
COLOR_DEPTH = 0
COLOR_DEPTH_IR = 1
class Kinect2RegistrationMode:
"""Kinect registration mode."""
NONE = 0
COLOR_TO_DEPTH = 1
class Kinect2DepthMode:
"""Kinect depth mode setting."""
METERS = 0
MILLIMETERS = 1
class Kinect2Sensor(CameraSensor):
# constants for image height and width (in case they're needed somewhere)
"""Class for interacting with a Kinect v2 RGBD sensor directly through
protonect driver. https://github.com/OpenKinect/libfreenect2
"""
# Constants for image height and width (in case they're needed somewhere)
COLOR_IM_HEIGHT = 1080
COLOR_IM_WIDTH = 1920
DEPTH_IM_HEIGHT = 424
DEPTH_IM_WIDTH = 512
def __init__(
self,
packet_pipeline_mode=Kinect2PacketPipelineMode.AUTO,
registration_mode=Kinect2RegistrationMode.COLOR_TO_DEPTH,
depth_mode=Kinect2DepthMode.METERS,
device_num=0,
frame=None,
):
"""Initialize a Kinect v2 sensor directly to the protonect driver with
the given configuration. When kinect is connected to the protonect
driver directly, the iai_kinect kinect_bridge cannot be run at the
same time.
Parameters
----------
packet_pipeline_mode : int
Either Kinect2PacketPipelineMode.OPENGL,
Kinect2PacketPipelineMode.OPENCL or
Kinect2PacketPipelineMode.CPU -- indicates packet processing type.
If not specified the packet pipeline will be determined
automatically.
registration_mode : int
Either Kinect2RegistrationMode.NONE or
Kinect2RegistrationMode.COLOR_TO_DEPT -- The mode for registering
a color image to the IR camera frame of reference.
depth_mode : int
Either Kinect2DepthMode.METERS or Kinect2DepthMode.MILLIMETERS --
the units for depths returned from the Kinect frame arrays.
device_num : int
The sensor's device number on the USB bus.
frame : :obj:`str`
The name of the frame of reference in which the sensor resides.
If None, this will be set to 'kinect2_num', where num is replaced
with the device number.
"""
self._device = None
self._running = False
self._packet_pipeline_mode = packet_pipeline_mode
self._registration_mode = registration_mode
self._depth_mode = depth_mode
self._device_num = device_num
self._frame = frame
if self._frame is None:
self._frame = "kinect2_%d" % (self._device_num)
self._color_frame = "%s_color" % (self._frame)
self._ir_frame = (
self._frame
) # same as color since we normally use this one
def __del__(self):
"""Automatically stop the sensor for safety."""
if self.is_running:
self.stop()
@property
def color_intrinsics(self):
""":obj:`CameraIntrinsics` : Color camera intrinsics of Kinect."""
if self._device is None:
raise RuntimeError(
"Kinect2 device not runnning. Cannot return color intrinsics"
)
camera_params = self._device.getColorCameraParams()
return CameraIntrinsics(
self._color_frame,
camera_params.fx,
camera_params.fy,
camera_params.cx,
camera_params.cy,
)
@property
def ir_intrinsics(self):
""":obj:`CameraIntrinsics` : IR camera intrinsics for the Kinect."""
if self._device is None:
raise RuntimeError(
"Kinect2 device not runnning. Cannot return IR intrinsics"
)
camera_params = self._device.getIrCameraParams()
return CameraIntrinsics(
self._ir_frame,
camera_params.fx,
camera_params.fy,
camera_params.cx,
camera_params.cy,
height=Kinect2Sensor.DEPTH_IM_HEIGHT,
width=Kinect2Sensor.DEPTH_IM_WIDTH,
)
@property
def is_running(self):
"""bool : True if the stream is running, or false otherwise."""
return self._running
@property
def frame(self):
""":obj:`str` : The reference frame of the sensor."""
return self._frame
@property
def color_frame(self):
""":obj:`str` : The reference frame of the color sensor."""
return self._color_frame
@property
def ir_frame(self):
""":obj:`str` : The reference frame of the IR sensor."""
return self._ir_frame
def start(self):
"""Starts the Kinect v2 sensor stream.
Raises
------
IOError
If the Kinect v2 is not detected.
"""
# setup logger
self._logger = lf2.createConsoleLogger(lf2.LoggerLevel.Warning)
lf2.setGlobalLogger(self._logger)
# open packet pipeline
self._pipeline = None
if (
self._packet_pipeline_mode == Kinect2PacketPipelineMode.OPENGL
or self._packet_pipeline_mode == Kinect2PacketPipelineMode.AUTO
):
# Try OpenGL packet pipeline first or if specified
try:
self._pipeline = lf2.OpenGLPacketPipeline()
except BaseException:
logging.warning(
"OpenGL not available. "
"Defaulting to CPU-based packet pipeline."
)
if self._pipeline is None and (
self._packet_pipeline_mode == Kinect2PacketPipelineMode.OPENCL
or self._packet_pipeline_mode == Kinect2PacketPipelineMode.AUTO
):
# Try OpenCL if available
try:
self._pipeline = lf2.OpenCLPacketPipeline()
except BaseException:
logging.warning(
"OpenCL not available. Defaulting to CPU packet pipeline."
)
if (
self._pipeline is None
or self._packet_pipeline_mode == Kinect2PacketPipelineMode.CPU
): # CPU packet pipeline
self._pipeline = lf2.CpuPacketPipeline()
# check devices
self._fn_handle = lf2.Freenect2()
self._num_devices = self._fn_handle.enumerateDevices()
if self._num_devices == 0:
raise IOError(
"Failed to start stream. No Kinect2 devices available!"
)
if self._num_devices <= self._device_num:
raise IOError(
"Failed to start stream. Device num %d unavailable!"
% (self._device_num)
)
# open device
self._serial = self._fn_handle.getDeviceSerialNumber(self._device_num)
self._device = self._fn_handle.openDevice(
self._serial, pipeline=self._pipeline
)
# add device sync modes
self._listener = lf2.SyncMultiFrameListener(
lf2.FrameType.Color | lf2.FrameType.Ir | lf2.FrameType.Depth
)
self._device.setColorFrameListener(self._listener)
self._device.setIrAndDepthFrameListener(self._listener)
# start device
self._device.start()
# open registration
self._registration = None
if self._registration_mode == Kinect2RegistrationMode.COLOR_TO_DEPTH:
logging.debug("Using color to depth registration")
self._registration = lf2.Registration(
self._device.getIrCameraParams(),
self._device.getColorCameraParams(),
)
self._running = True
def stop(self):
"""Stops the Kinect2 sensor stream.
Returns
-------
bool
True if the stream was stopped, False if the device was already
stopped or was not otherwise available.
"""
# check that everything is running
if not self._running or self._device is None:
logging.warning(
"Kinect2 device %d not runnning. Aborting stop"
% (self._device_num)
)
return False
# stop the device
self._device.stop()
self._device.close()
self._device = None
self._running = False
return True
def frames(self, skip_registration=False):
"""Retrieve a new frame from the Kinect and convert it to a
ColorImage and a DepthImage
Parameters
----------
skip_registration : bool
If True, the registration step is skipped.
Returns
-------
:obj:`tuple` of :obj:`ColorImage`, :obj:`DepthImage`
The ColorImage and DepthImage of the current frame.
Raises
------
RuntimeError
If the Kinect stream is not running.
"""
color_im, depth_im, _, _ = self._frames_and_index_map(
skip_registration=skip_registration
)
return color_im, depth_im
def median_depth_img(self, num_img=1):
"""Collect a series of depth images and return the median of the set.
Parameters
----------
num_img : int
The number of consecutive frames to process.
Returns
-------
:obj:`DepthImage`
The median DepthImage collected from the frames.
"""
depths = []
for _ in range(num_img):
_, depth, _ = self.frames()
depths.append(depth)
return Image.median_images(depths)
def _frames_and_index_map(self, skip_registration=False):
"""Retrieve a new frame from the Kinect and return a ColorImage,
DepthImage, IrImage, and a map from depth pixels to color
pixel indices.
Parameters
----------
skip_registration : bool
If True, the registration step is skipped.
Returns
-------
:obj:`tuple` of :obj:`ColorImage`, :obj:`DepthImage`,
:obj:`IrImage`, :obj:`numpy.ndarray`
The ColorImage, DepthImage, and IrImage of the
current frame, and an ndarray that maps pixels
of the depth image to the index of the
corresponding pixel in the color image.
Raises
------
RuntimeError
If the Kinect stream is not running.
"""
if not self._running:
raise RuntimeError(
"Kinect2 device %s not runnning. Cannot read frames"
% (self._device_num)
)
# read frames
frames = self._listener.waitForNewFrame()
unregistered_color = frames["color"]
distorted_depth = frames["depth"]
ir = frames["ir"]
# apply color to depth registration
color_frame = self._color_frame
color = unregistered_color
depth = distorted_depth
color_depth_map = (
np.zeros([depth.height, depth.width]).astype(np.int32).ravel()
)
if (
not skip_registration
and self._registration_mode
== Kinect2RegistrationMode.COLOR_TO_DEPTH
):
color_frame = self._ir_frame
depth = lf2.Frame(
depth.width, depth.height, 4, lf2.FrameType.Depth
)
color = lf2.Frame(
depth.width, depth.height, 4, lf2.FrameType.Color
)
self._registration.apply(
unregistered_color,
distorted_depth,
depth,
color,
color_depth_map=color_depth_map,
)
# convert to array (copy needed to prevent reference of deleted data
color_arr = np.copy(color.asarray())
color_arr[:, :, [0, 2]] = color_arr[:, :, [2, 0]] # convert BGR to RGB
color_arr[:, :, 0] = np.fliplr(color_arr[:, :, 0])
color_arr[:, :, 1] = np.fliplr(color_arr[:, :, 1])
color_arr[:, :, 2] = np.fliplr(color_arr[:, :, 2])
color_arr[:, :, 3] = np.fliplr(color_arr[:, :, 3])
depth_arr = np.fliplr(np.copy(depth.asarray()))
ir_arr = np.fliplr(np.copy(ir.asarray()))
# convert from mm to meters
if self._depth_mode == Kinect2DepthMode.METERS:
depth_arr = depth_arr * MM_TO_METERS
# Release and return
self._listener.release(frames)
return (
ColorImage(color_arr[:, :, :3], color_frame),
DepthImage(depth_arr, self._ir_frame),
IrImage(ir_arr.astype(np.uint16), self._ir_frame),
color_depth_map,
)
|
from allauth.account.signals import user_signed_up
from django.conf import settings
from django.db import transaction
from django.db.models.signals import post_save
from django.dispatch import receiver
from rgd import models
from rgd.utility import skip_signal
@receiver(post_save, sender=models.ChecksumFile)
@skip_signal()
def _post_save_checksum_file(sender, instance, *args, **kwargs):
transaction.on_commit(lambda: instance._post_save_event_task(*args, **kwargs))
@receiver(user_signed_up)
def set_new_user_inactive(sender, **kwargs):
if getattr(settings, 'RGD_AUTO_APPROVE_SIGN_UP', None):
# Use setting `RGD_AUTO_APPROVE_SIGN_UP` to automatically approve all users
return
user = kwargs.get('user')
try:
models.WhitelistedEmail.objects.get(email=user.email)
user.is_active = True
except models.WhitelistedEmail.DoesNotExist:
user.is_active = False
user.save(update_fields=['is_active'])
|
"""
백준 12865번 : 평범한 배낭
"""
from collections import deque
import sys
input = sys.stdin.readline
N, K = map(int, input().split())
things = deque()
for _ in range(N):
w, v = map(int, input().split())
things.append([w, v])
dp = [[0 for _ in range(K+1)] for _ in range(N)]
for i in range(N):
w, v = things.popleft()
for j in range(K+1):
if i == 0:
if j >= w:
dp[i][j] = v
else:
if j < w:
dp[i][j] = dp[i-1][j]
else:
dp[i][j] = max(v + dp[i-1][j-w], dp[i-1][j])
print(dp[-1][-1]) |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from PIL import Image, ImageDraw
from .colormap import colormap
__all__ = ['visualize_results']
def visualize_results(image,
im_id,
catid2name,
threshold=0.5,
bbox_results=None,
mask_results=None,
lmk_results=None):
"""
Visualize bbox and mask results
"""
if mask_results:
image = draw_mask(image, im_id, mask_results, threshold)
if bbox_results:
image = draw_bbox(image, im_id, catid2name, bbox_results, threshold)
if lmk_results:
image = draw_lmk(image, im_id, lmk_results, threshold)
return image
def draw_mask(image, im_id, segms, threshold, alpha=0.7):
"""
Draw mask on image
"""
mask_color_id = 0
w_ratio = .4
color_list = colormap(rgb=True)
img_array = np.array(image).astype('float32')
for dt in np.array(segms):
if im_id != dt['image_id']:
continue
segm, score = dt['segmentation'], dt['score']
if score < threshold:
continue
import pycocotools.mask as mask_util
mask = mask_util.decode(segm) * 255
color_mask = color_list[mask_color_id % len(color_list), 0:3]
mask_color_id += 1
for c in range(3):
color_mask[c] = color_mask[c] * (1 - w_ratio) + w_ratio * 255
idx = np.nonzero(mask)
img_array[idx[0], idx[1], :] *= 1.0 - alpha
img_array[idx[0], idx[1], :] += alpha * color_mask
return Image.fromarray(img_array.astype('uint8'))
def draw_bbox(image, im_id, catid2name, bboxes, threshold):
"""
Draw bbox on image
"""
draw = ImageDraw.Draw(image)
catid2color = {}
color_list = colormap(rgb=True)[:40]
for dt in np.array(bboxes):
if im_id != dt['image_id']:
continue
catid, bbox, score = dt['category_id'], dt['bbox'], dt['score']
if score < threshold:
continue
xmin, ymin, w, h = bbox
xmax = xmin + w
ymax = ymin + h
if catid not in catid2color:
idx = np.random.randint(len(color_list))
catid2color[catid] = color_list[idx]
color = tuple(catid2color[catid])
# draw bbox
draw.line(
[(xmin, ymin), (xmin, ymax), (xmax, ymax), (xmax, ymin),
(xmin, ymin)],
width=2,
fill=color)
# draw label
text = "{} {:.2f}".format(catid2name[catid], score)
tw, th = draw.textsize(text)
draw.rectangle(
[(xmin + 1, ymin - th), (xmin + tw + 1, ymin)], fill=color)
draw.text((xmin + 1, ymin - th), text, fill=(255, 255, 255))
return image
def draw_lmk(image, im_id, lmk_results, threshold):
draw = ImageDraw.Draw(image)
catid2color = {}
color_list = colormap(rgb=True)[:40]
for dt in np.array(lmk_results):
lmk_decode, score = dt['landmark'], dt['score']
if im_id != dt['image_id']:
continue
if score < threshold:
continue
for j in range(5):
x1 = int(round(lmk_decode[2 * j]))
y1 = int(round(lmk_decode[2 * j + 1]))
draw.ellipse(
(x1, y1, x1 + 5, y1 + 5), fill='green', outline='green')
return image
|
from tkinter import * #Imports tkinter items----text needed this to work in a gui
import tkinter #imports tkinter----tkinter is the main gui modual I am useing
import json #imports json----allows the code to use .json files
import time #imports time----allows use of time (didnt use was use in a test then keep just because)
root = Tk() #creates the window for tkinter which is now set to root
root.configure(background='gray') #the root window is configured to have a gray background color
text = Text(root, wrap=WORD, cursor="arrow") #sets the text variable to a text box created in the root window and makes the cursor look like and arrow when hovering over
root.title('Text Adventure') #sets the root windows title to Text Adventure
root.geometry("1000x700") #created a pixle grid for the root window
root.resizable(0, 0) #disables the ability to change the root window size because a lot of the stuff is set to points not to grid size
canvas = Canvas(root, width=700, height=700) #creates a canvas in the root window allowing me to import shapes,colors,and images into it. set the canvas size in root window
canvas.pack(side="left") #sets the canvas to the left side. I dont 100% know what pack is but it is needed for all tkinter items you will see all for the buttons and text has it as well
MapIm=PhotoImage(file='TownMap.png') #sets the image for the map to a variable so it can be called latter
canvas.create_image(1,1,image=MapIm, anchor=NW) #Latter IS NOW! the canvas creates the image over it starting from the point (1,1) the pixel grid starts in the top left corner. this also anchors the image to the northwest of the box/canvas
MyOval = canvas.create_oval(450,450,475,475, fill='red') #this is just the oval that is created on the map to show starting point I did plan on makeing it move but didnt have the time
current='' #makes a global variable for current because for my loop I need it to change and it cant change if it is in the def that redefinds it each time
count=0 #makes a global variable for count. this is used 2 time for the first text and setting current for the first time as stated above if I didnt it would be reset each time allowing you to go no where
if count ==0: #first count is used here if you can read basic logic you can read it IF COUNT is equal to 0 then do the following. if you dont know how it works then welp I cant help you
text.insert(INSERT,"\n\nDirections: Click a direction then click continue. If your location repeats it means you can not go in the direction you have chosen. Scroll to read text. Main goal is to just explore the town. There is no automatic end so you can leave by closing the window or clicking on quit.\n\n")
text.insert(INSERT,"\n\nYou are at Your House\n") #
text.insert(INSERT,"This is your house.") # This is just the first text displayed in the text window. its logicly read as well
text.insert(INSERT,'\nWhere would you like to go? ') # in text(the variable defind above)insert the string ....
def check_input(event): # and here it is the game loop this is where the magic happens. were the event is used for making a loop(latter a button is used to call to this)
gameFile = 'game.json' #this just sets the .json file to a variable
SB.place(bordermode=OUTSIDE, height=90, width=150, x=775, y=550) #this make the continue button an puts it on the screen
B1.place(bordermode=OUTSIDE, height=30, width=50, x=40000, y=40000) #
B2.place(bordermode=OUTSIDE, height=30, width=50, x=40000, y=40060) #These are button that are defined latter
B3.place(bordermode=OUTSIDE, height=30, width=50, x=35000, y=40030) #I didnt want to remove button and replace button each time so I put this here
B4.place(bordermode=OUTSIDE, height=30, width=50, x=45000, y=43000) #It sends the buttons to timbuktu based on cord
game = {} #creates the game dictionary used a lot
with open(gameFile) as json_file: #this opens the json file I think I just took it from the first code because it did what I needed
game = json.load(json_file) #this sets game the the .json file and tells it to load it
global current #calls the global variable current
global count #calls the global variable count
if count ==0: #this is why count was made first just to set current then not allow it to be reset
current = 'Your House' #sets current
count=count+1 #adds one to count so all programs using count will stop basicly for a one use item
r = game['rooms'] #explaned below because I got tired of typing up here so I started at bottom
c = r[current] #^^^^^^^^^^^
verbs=game['verbs'] #sets the variable verbs to the game dict verbs for easy calling
toReturn = event.widget['text'] #gets the input event and sets it to a variable and change to a string
for v in verbs: #checks to see if inputed item is a verb and does stuff again I took this from first code because I needed it for this to work
if toReturn == v['v']: #^^^^^^^
toReturn=v['map'] #^^^^^
for e in game['rooms'][current]['exits']: #modified from first code so that current works. removed all of that selection crap and made it simple with current and the input
if toReturn == e['verb'] and e['target'] != 'NoExit': #logicly read and explaned a little^^^
current=e['target'] #sets current to the target which is in the .json file based on the verb from the input
return current #returns current to global variable after changing
#if current=='Your House':
# MyOval = canvas.create_oval(450,450,475,475, fill='red')
#elif current=='the dirt road':
# MyOval = canvas.create_oval(500,420,525,445, fill='red')
#elif current=="Faendal's House":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=='the center of town':
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=='the local inn':
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the town's shopping district":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="Sven and Hilde's House":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the town's southern wall":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the archery range":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the town's lumber mill":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the lumber yard":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the town's northern wall":
# MyOval = canvas.create_oval(0,0,0,0, fill='red')
#elif current=="the town's eastern wall":
# MyOval = canvas.create_oval(0,0,0,0, fill='red') #all a test that was not done made to show the player where they are at but not done ran out of time
def TextBlock(event): #the def used for text and updating the current correctly so the program isnt behind
global current #explaned
gameFile = 'game.json' #explaned
canvas.delete(MyOval) #deletes the starting oval made for finding start
game = {} #explaned
with open(gameFile) as json_file: #explaned
game = json.load(json_file) #explaned above just reused to make this one undersand the variable
r = game['rooms'] #sets the rooms from the json file to a variable to make it easier for calling
c = r[current] #current stuff
text.insert(INSERT,'\n\nYou are at '+ current +"\n") #
text.insert(INSERT,c['desc']) #inserts text into the root window text box
text.insert(INSERT,'\nWhere would you like to go? ') #
SB.place(bordermode=OUTSIDE, height=90, width=150, x=50000, y=40000) #sends continue button to timbuktu
B1.place(bordermode=OUTSIDE, height=30, width=50, x=825, y=430) #
B2.place(bordermode=OUTSIDE, height=30, width=50, x=825, y=490) #calls buttons back from timbuktu
B3.place(bordermode=OUTSIDE, height=30, width=50, x=775, y=460) #
B4.place(bordermode=OUTSIDE, height=30, width=50, x=875, y=460) #
def Quit(event): #the quit button calls here and ends the program
root.destroy() #closes the root window
B1 = tkinter.Button(root, text ="NORTH") #
B2 = tkinter.Button(root, text ="SOUTH") #button names the setting them the the root window
B3 = tkinter.Button(root, text ="WEST") #
B4 = tkinter.Button(root, text ="EAST") #
SB=tkinter.Button(root, text='Continue') #
text.pack() #
Quitb=tkinter.Button(root, text = "Quit") #
Quitb.bind('<Button-1>', Quit) #
Quitb.pack() #button placement and size as well as event handlers
Quitb.place(bordermode=OUTSIDE, height=30, width=50, x=950, y=5) #
SB.bind('<Button-1>', TextBlock) #
SB.pack() #
SB.place(bordermode=OUTSIDE, height=90, width=150, x=775, y=550) #
B1.bind('<Button-1>', check_input) #
B1.pack() #buttons just buttons
B1.place(bordermode=OUTSIDE, height=30, width=50, x=825, y=430) #at this point after days of making this code and testing like crazy
B2.bind('<Button-1>', check_input) #and now commenting on everything
B2.pack() #all i have to say is this is all the buttons and there event inputs
B2.place(bordermode=OUTSIDE, height=30, width=50, x=825, y=490) #still dont know what pack is
B3.bind('<Button-1>', check_input) #this is all kinda logical you can basicly read it
B3.pack() #
B3.place(bordermode=OUTSIDE, height=30, width=50, x=775, y=460) #
B4.bind('<Button-1>', check_input) #
B4.pack() #
B4.place(bordermode=OUTSIDE, height=30, width=50, x=875, y=460) #
root.mainloop() #the loop that allows the buttons to be presses without ending right after basicly keeps everything loaded on the root window
#I made all of this code other then the 10 lines I used from the first code given dont think I just coppied crap and said I made it I know what it all does took we a week just to make in free time |
lista = []
dados = []
oper = ''
maior = menor = 0
nomeMaior = ''
nomeMenor = ''
while oper != 'sair':
dados.append(str(input('Digite o seu nome: ')))
dados.append(int(input('Digite o seu peso: ')))
if len(lista) == 0:
maior = menor = dados[1]
else:
if dados[1] > maior:
maior = dados[1]
nomeMaior = dados[0]
if dados[1] < menor:
menor = dados[1]
nomeMenor = dados[0]
lista.append(dados[:])
dados.clear()
oper = str(input('Deseja continuar? [Sim/Sair] ')).strip().lower()
if oper == 'sair':
break
print('-=' * 30)
print(f'Foram cadastradas {len(lista)} pessoas no total.')
print(f'O maior peso foi de {maior}Kg, da pessoa {nomeMaior}')
print(f'O menor peso foi de {menor}Kg, da pessoa {nomeMenor}')
|
from django.contrib import admin
from atm_analytics.analytics.models import Case, AtmCase, AtmErrorEventViewer, AtmErrorXFS, AtmJournal
class AtmJournalInline(admin.TabularInline):
classes = ('grp-collapse grp-open',)
model = AtmJournal
extra = 0
class AtmCaseAdmin(admin.ModelAdmin):
inlines = (AtmJournalInline,)
class AtmCaseInline(admin.TabularInline):
classes = ('grp-collapse grp-open',)
model = AtmCase
extra = 0
class CaseAdmin(admin.ModelAdmin):
inlines = (AtmCaseInline,)
admin.site.register(AtmCase, AtmCaseAdmin)
admin.site.register(Case, CaseAdmin)
admin.site.register(AtmErrorXFS)
admin.site.register(AtmErrorEventViewer)
|
from unittest.mock import Mock, PropertyMock, patch
import pytest
import factories_v5
from zeep import exceptions as zeep_exceptions
from zeep import xsd
from aeat import Config, Controller, wsdl
def test_config_as_str():
config = Config('ens_presentation', test_mode=True)
assert 'Servicio de Presentación ENS V5.0' in config.__str__()
@pytest.mark.parametrize('signed', [True, False])
@patch('aeat.Client')
def test_controller_is_built_from_config_obj(client, signed):
config = Mock(signed=signed)
ctrl = Controller.build_from_config(config, Mock(), Mock())
assert isinstance(ctrl, Controller)
assert signed is ctrl.config.signed
@pytest.mark.parametrize('test_mode,expected_port', [
(True, 'IE315V5Pruebas'),
(False, 'IE315V5'),
])
def test_config_is_built_from_service_name(test_mode, expected_port):
config = Config('ens_presentation', test_mode=test_mode)
assert config.wsdl.endswith('IE315V5.wsdl')
assert 'IE315V5' == config.operation
assert 'IE315V5Service' == config.service
assert expected_port == config.port
@patch('aeat.Controller.operation')
def test_controller_marks_signature_as_skip_if_config_is_signed(operation_patch):
config = Mock(signed=True)
ctrl = Controller(Mock(), config)
ctrl.request({'arg': 'x'})
operation_patch.assert_called_with(arg='x', Signature=xsd.SkipValue)
@patch('aeat.Controller.operation', new_callable=PropertyMock)
def test_controller_with_99999_error(operation_patch, zeep_response):
def response():
return zeep_response('enswsv5', 'ens_presentation_IE315V5.wsdl',
'ens_presentation_error_99999.xml', 'IE315V5')
operation_patch.return_value = lambda **kwargs: response()
ctrl = Controller(Mock(), Mock())
result = ctrl.request(factories_v5.ENSPresentationFactory())
assert not result.valid
assert 'Mensaje REENVIABLE. Codigo[99999].' == result.error
assert result.data is None
@patch('aeat.Controller.operation', new_callable=PropertyMock)
def test_controller_with_html_error(operation_patch, zeep_response):
def response():
return zeep_response('enswsv5', 'ens_presentation_IE315V5.wsdl',
'unknown_certificate.html',
'IE315V5')
operation_patch.return_value = lambda **kwargs: response()
ctrl = Controller(Mock(), Mock())
result = ctrl.request(factories_v5.ENSPresentationFactory())
assert not result.valid
assert 'Wrong AEAT response' == result.error
assert result.data is None
def test_controller_operation():
service = Mock()
service.myoperation = Mock()
ctrl = Controller(Mock(service=service), Mock(operation='myoperation'))
assert service.myoperation == ctrl.operation
@pytest.mark.parametrize('service_name', [
name for name, _ in wsdl.ADUANET_SERVICES.items()
])
def test_aduanet_services_configuration(service_name):
config = Config(service_name, True)
assert isinstance(config, Config)
assert config.verbose_name is not None
assert config.wsdl is not None
assert config.operation is not None
assert config.port is not None
assert config.service is not None
assert config.signed is not None
assert config.port.endswith('Pruebas')
@pytest.mark.parametrize('detail,exception_cls', [
('Wrong AEAT response', zeep_exceptions.XMLSyntaxError),
('Wrong AEAT response', zeep_exceptions.ValidationError),
('Unknown error', Exception),
])
@patch('aeat.Controller.operation', new_callable=PropertyMock)
def test_controller_operation_request_exception_handling(operation_patch, detail, exception_cls):
def operation(arg, Signature):
raise exception_cls
operation_patch.return_value = operation
config = Mock(signed=True)
ctrl = Controller(Mock(), config)
result = ctrl.request({'arg': 'x'})
assert not result.valid
assert detail == result.error
@patch('aeat.Controller.operation', new_callable=PropertyMock)
def test_controller_with_ens_presentation_success_message(operation_patch, zeep_response):
def response():
return zeep_response('enswsv5', 'ens_presentation_IE315V5.wsdl',
'ens_presentation_success_IE328V5Sal.xml', 'IE315V5')
operation_patch.return_value = lambda **kwargs: response()
ctrl = Controller(Mock(), Mock(operation='IE315V4'))
result = ctrl.request(factories_v5.ENSPresentationFactory())
assert result.valid
@patch('aeat.Controller.operation', new_callable=PropertyMock)
def test_controller_result_includes_raw_request_and_response(operation_patch, zeep_response):
def response():
return zeep_response('enswsv5,' 'ens_presentation_IE315V5.wsdl',
'ens_presentation_success_IE328V5Sal.xml', 'IE315V5')
operation_patch.return_value = lambda **kwargs: response()
history_plugin = Mock(last_sent='xyz', last_received='zyx')
ctrl = Controller(Mock(), Mock(operation='IE315V4'), history_plugin)
result = ctrl.request(factories_v5.ENSPresentationFactory())
assert 'xyz' == result.raw_request
assert 'zyx' == result.raw_response
@pytest.mark.parametrize('response_xml', [
'ens_presentation_error_IE316V5Sal.xml',
'ens_presentation_error_IE917V5Sal.xml',
])
@patch('aeat.Controller.operation', new_callable=PropertyMock)
def test_controller_with_incorrect_responses(operation_patch, zeep_response, response_xml):
def response():
return zeep_response('enswsv5', 'ens_presentation_IE315V5.wsdl',
response_xml, 'IE315V5')
operation_patch.return_value = lambda **kwargs: response()
ctrl = Controller(Mock(), Mock(operation='IE315V5'))
result = ctrl.request(factories_v5.ENSPresentationFactory())
# Response is Valid
assert result.valid
assert result.data is not None
assert not result.error
|
import time
import torch
from PIL import Image
from comparisons import *
from depth import *
from model import *
from resize_test import *
from torch.autograd import Variable
from torchvision.utils import save_image
from utils import *
from weights import *
class DepthPrediction:
def __init__(self, weight_file, batch_size):
self.weight_file = weight_file
self.model = Model(batch_size)
self.model_gpu = self.model.cuda()
self.dtype = torch.cuda.FloatTensor
self.model_gpu.load_state_dict(load_weights(self.model_gpu, self.weight_file, self.dtype))
print("Model on cuda? {0}".format(next(self.model_gpu.parameters()).is_cuda))
def print_model(self):
print(self.model)
def predict(self, img):
resize_img = down_size(img,320,240)
resize_img_save = torch.from_numpy(resize_img).permute(2, 0, 1).unsqueeze(0).float()
save_image(resize_img_save, "resize_image.jpg")
cropped_img = center_crop(resize_img, 304, 228)
cropped_img_save = torch.from_numpy(cropped_img).permute(2, 0, 1).unsqueeze(0).float()
save_image(cropped_img_save, "cropped_img.jpg")
pytorch_img = torch.from_numpy(cropped_img).permute(2,0,1).unsqueeze(0).float()
save_image(pytorch_img, "input_image.jpg")
pytorch_img = pytorch_img.cuda()
pytorch_input = Variable(pytorch_img)
t = time.time()
out_img_pred = self.model_gpu(pytorch_input)
print("Finished image in {0} s".format(time.time() - t))
out_img_pred_ = torch.squeeze(out_img_pred)
out_img_pred_ = out_img_pred_.detach()
out_img_pred_np = out_img_pred_.cpu().numpy()
return out_img_pred_np
def export_model(self):
x = Variable(torch.randn(1, 3, 228, 304), requires_grad=True).cuda()
# Export the model
torch.onnx._export(self.model, x.long(), "depth_pred.onnx", export_params=True) # model being run
def main_mod(): #(test_image_no)
# if __name__ == '__main__':
# depth_gt_out = depth_gt(int(input("Enter image number from 0 to 1448")))
# depth_gt_out = depth_gt(int(test_image_no))
# depth_gt_out = depth_gt(test_image_no)
prediction = DepthPrediction('NYU_ResNet-UpProj.npy', 1)
# print('depth_gt_out')
# print(depth_gt_out, depth_gt_out.shape)
# img = img_as_float(imread(sys.argv[1])) #orignal
img = Image.open("test_image.png")
depth_gt_img = Image.open("depth_gt.png")
depth_gt_out = depth_gt(depth_gt_img)
out_img_pred_np = prediction.predict(img)
# print("Predicted depth values of size 160,128 {0}".format(out_img_pred_np))
# print('shape')
# print(out_img_pred_np.shape)
# print("Ground truth values of size 608,456 {0}".format(depth_gt_out))
# print('shape')
# print(depth_gt_out.shape)
# print(depth_gt_out.size)
depth_pred_inter = resize_depth_pred(out_img_pred_np)
# print('depth_pred_inter')
# print(depth_pred_inter, depth_pred_inter.shape)
# print(list(depth_gt_out.shape))
# print("GT Depth values {0}" .format(depth_gt_out))
# delta_percent_1, delta_percent_2, delta_percent_3 = delta_calculate(depth_pred_inter, depth_gt_out)
#
# abs_rel = abs_rel_diff(depth_pred_inter, depth_gt_out)
#
# sqr_rel = sqr_rel_diff(depth_pred_inter, depth_gt_out)
#
# rmse_lin = rmse_linear(depth_pred_inter, depth_gt_out)
#
# rmse_l = rmse_log(depth_pred_inter, depth_gt_out)
# benchmarks = [delta_percent_1, delta_percent_2, delta_percent_3, abs_rel, sqr_rel, rmse_lin, rmse_l]
# # prediction.print_model()
#
# # prediction.export_model()
#
# return delta_percent_1, delta_percent_2, delta_percent_3, abs_rel, sqr_rel, rmse_lin, rmse_l
|
# -*- coding: utf-8 -*-
# pragma pylint: disable=unused-argument, no-self-use
#
# (c) Copyright IBM Corp. 2010, 2019. All Rights Reserved.
#
#
# Utils collection for MitreAttack
# --------------------------------
#
from fn_mitre_integration.lib.mitre_attack import MitreAttack
from fn_mitre_integration.lib.mitre_attack import MitreAttackTactic
def get_techniques(tactic_names=None, tactic_ids=None):
"""
Get techniques for all input tactics
:param tactic_names: string of tactic names separated by comma
:param tactic_ids: string of tactic ids separated by comma
:return: techniques
"""
mitre_attack = MitreAttack()
tactics = []
if tactic_names is not None:
tactics = tactic_names.split(', ')
elif tactic_ids is not None:
t_ids = tactic_ids.split(', ')
tactics =[MitreAttackTactic.get_name(tid) for tid in t_ids ]
ret = []
for tactic in tactics:
techs = mitre_attack.get_tactic_techniques(tactic_name=tactic)
tactic_dict = {
"tactic_name": tactic,
"tactic_id": MitreAttackTactic.get_id(tactic),
"tactic_ref": MitreAttack.get_tactic_url(tactic),
"techs": techs
}
ret.append(tactic_dict)
return ret
|
import os
from typing import List, Type
from aws_cdk import (
aws_lambda as lambda_,
aws_dynamodb as dynamodb,
core,
)
import boto3
from parallelagram.config_parser import read_config, ParallelagramConfig
project_dir = os.path.dirname(os.path.realpath(os.path.dirname(__file__)))
config_path = os.path.join(project_dir, "parallel-config.json")
def load_code(code_path: str):
if code_path.startswith("s3://"):
s3 = boto3.client("s3")
path = code_path.split("://")[1]
bucket = path.split("/")[0]
key = "/".join(path.split("/")[1:])
handler_code = (
s3.get_object(Bucket=bucket, Key=key)["Body"].read().decode("utf-8")
)
else:
with open(code_path, "r") as code_in:
handler_code = code_in.read()
return handler_code
def get_trace_value(config_trace: bool) -> lambda_.Tracing:
if config_trace:
tracing_value = lambda_.Tracing.ACTIVE
else:
tracing_value = lambda_.Tracing.DISABLED
return tracing_value
def get_runtime(config_runtime: str) -> lambda_.Runtime:
runtimes = {
"python3.8": lambda_.Runtime.PYTHON_3_8,
"python3.7": lambda_.Runtime.PYTHON_3_7,
"python3.6": lambda_.Runtime.PYTHON_3_6,
"python2.7": lambda_.Runtime.PYTHON_2_7,
"node12.x": lambda_.Runtime.NODEJS_12_X,
"node10.x": lambda_.Runtime.NODEJS_10_X,
"java11": lambda_.Runtime.JAVA_11,
"java8": lambda_.Runtime.JAVA_8,
"java8_corretto": lambda_.Runtime.JAVA_8_CORRETTO,
}
try:
return runtimes[config_runtime]
except KeyError as ke:
print(
f"No lambda runtime exists for configured value {ke.args[0]}."
f"Valid configurable runtimes: {[runtimes.keys()]}"
)
class LambdaStack(core.Stack):
def __init__(self, scope: core.Stack, id: str, **kwargs):
super().__init__(scope, id)
config = read_config(config_path)
self.make_lambda_stack(config)
def make_lambda_stack(self, config: ParallelagramConfig) -> List[Type[core.Stack]]:
lambda_list = []
existing_tables = {}
for i, l in enumerate(config.lambdas):
table_name = f"table_{i}"
if table_name not in existing_tables:
table = dynamodb.Table(
self,
table_name,
partition_key=dynamodb.Attribute(
name="response_id", type=dynamodb.AttributeType.STRING
),
read_capacity=l.response_table_read_capacity,
write_capacity=l.response_table_write_capacity,
time_to_live_attribute="ttl",
)
existing_tables.update({table_name: table})
fn = lambda_.Function(
self,
l.lambda_name,
code=lambda_.Code.asset(l.code_path),
handler=l.lambda_handler,
timeout=core.Duration.seconds(l.timeout),
memory_size=l.memory_size,
runtime=get_runtime(l.runtime),
tracing=get_trace_value(l.tracing),
)
existing_tables.get(table_name).grant_read_write_data(fn)
return lambda_list
|
from django.core.urlresolvers import reverse
from django.test import SimpleTestCase
from mock import patch
from mockpay import views, access_settings
class ViewsTests(SimpleTestCase):
@patch('mockpay.views.requests')
def test_entry_requirements(self, requests):
"""Entry point only accepts POSTs, and needs two fields set"""
response = self.client.get(reverse('entry'))
self.assertEqual(405, response.status_code)
message = {}
response = self.client.post(reverse('entry'), message)
self.assertEqual(400, response.status_code)
message = {'agency_id': 'AGENCY'}
response = self.client.post(reverse('entry'), message)
self.assertEqual(400, response.status_code)
message = {'agency_tracking_id': 'TRACKTRACK'}
response = self.client.post(reverse('entry'), message)
self.assertEqual(400, response.status_code)
# None of the tests thus far should have made an http request
self.assertFalse(requests.post.called)
@patch('mockpay.views.requests')
def test_entry_requirements_valid(self, requests):
"""A successful POST from the browser should result in a POST to the
configured url with the provided agency_/tracking_id"""
mock_config = {"AGENCY": {"transaction_url": "exexex"}}
with self.settings(AGENCY_CONFIG=mock_config):
message = {'agency_id': 'AGENCY', 'agency_tracking_id': 'TRA'}
self.client.post(reverse('entry'), message)
self.assertTrue(requests.post.called)
self.assertEqual(requests.post.call_args[0][0], 'exexex')
self.assertEqual(requests.post.call_args[1]['data'], message)
def test_agency_response_to_dict(self):
"""This function should return error strings if given XML, duplicate
keys, or missing keys. Otherwise, we should parse a nice dictionary"""
responses = set()
clean = views.agency_response_to_dict("<xml>")
self.assertTrue(isinstance(clean, str))
responses.add(clean)
clean = views.agency_response_to_dict("key=value\nkey=other")
self.assertTrue(isinstance(clean, str))
responses.add(clean)
clean = views.agency_response_to_dict("key=")
self.assertTrue(isinstance(clean, str))
responses.add(clean)
self.assertEqual(len(responses), 3) # different error messages
clean = views.agency_response_to_dict("key1=value1\nkey2=value2\r\n"
+ "key3=value3")
self.assertEqual(clean, {"key1": "value1", "key2": "value2",
"key3": "value3"})
def test_generate_form_no_form(self):
"""The form is looked up; if it's not present, we get an error"""
with self.settings(FORM_CONFIGS={"111": []}):
response = views.generate_form(None, None, None,
{"form_id": "2222"})
self.assertEqual(response.status_code, 400)
def test_generate_form(self):
"""The generated form should include only fields in the form config"""
config = {"111": [{"name": "field1", "status": "editable"},
{"name": "field2", "status": "locked"},
{"name": "field3", "status": "hidden"}]}
params = {"form_id": "111", "field2": "value2", "field4": "value4"}
with self.settings(FORM_CONFIGS=config):
response = views.generate_form(None, "AGE", "APP", params)
self.assertContains(response, "agency_id")
self.assertContains(response, "AGE")
self.assertContains(response, "app_name")
self.assertContains(response, "APP")
self.assertContains(response, "field1")
self.assertContains(response, "field2")
self.assertContains(response, "field3")
self.assertContains(response, "value2")
self.assertNotContains(response, "field4")
self.assertNotContains(response, "value4")
@patch('mockpay.views.send_status_to_agency')
def test_exit_redirect(self, send_status_to_agency):
"""Test first a successful redirect, then a canceled redirect, then an
error redirect (bad info from the agency server"""
send_status_to_agency.return_value = {'response_message': 'OK'}
data = {'failure_return_url': 'FFFF', 'success_return_url': 'SSSS',
'agency_id': 'AGAGAG'}
response = self.client.post(reverse('redirect'), data=data)
self.assertNotContains(response, 'FFFF')
self.assertContains(response, 'SSSS')
data['cancel'] = 'Cancel'
response = self.client.post(reverse('redirect'), data=data)
self.assertContains(response, 'FFFF')
self.assertNotContains(response, 'SSSS')
del data['cancel']
send_status_to_agency.return_value = "error occurred"
response = self.client.post(reverse('redirect'), data=data)
self.assertContains(response, 'FFFF')
self.assertNotContains(response, 'SSSS')
send_status_to_agency.return_value = {'response_message': 'Error'}
response = self.client.post(reverse('redirect'), data=data)
self.assertContains(response, 'FFFF')
self.assertNotContains(response, 'SSSS')
class AccessSettingsTests(SimpleTestCase):
def test_lookup_config(self):
"""Fetching a key should first look in the app info and cascade up"""
mock_config = {"AGENCY": {"key_a": "value_1", "key_b": "value_2",
"apps": {"APPNAME": {"key_b": "value_3"}}}}
with self.settings(AGENCY_CONFIG=mock_config):
# Agency not present in config
self.assertEqual(
access_settings.lookup_config("key_b", "other", None), None)
# App not present in config
self.assertEqual(
access_settings.lookup_config("key_b", "AGENCY", "NONAPP"),
None)
# Key not present in config
self.assertEqual(
access_settings.lookup_config("key_z", "AGENCY", "APPNAME"),
None)
# App overrides Agency setting
self.assertEqual(
access_settings.lookup_config("key_b", "AGENCY", "APPNAME"),
"value_3")
# Final parameter overrides everything
self.assertEqual(
access_settings.lookup_config("key_b", "AGENCY", "APPNAME",
{"key_b": "value_10"}),
"value_10")
# Falls back to Agency setting
self.assertEqual(
access_settings.lookup_config("key_a", "AGENCY", "APPNAME"),
"value_1")
self.assertEqual(
access_settings.lookup_config("key_b", "AGENCY", None),
"value_2")
def test_clean_response(self):
"""Verify that required fields must be present, and that no additional
fields are accepted"""
nodata = access_settings.clean_response({})
self.assertTrue(isinstance(nodata, str))
data = {'protocol_version': 'pv', 'response_message': 'rm',
'action': 'a', 'form_id': 'fi', 'agency_tracking_id': 'ati',
'invalid_field': 'if'}
new_field = access_settings.clean_response(data)
self.assertTrue(isinstance(new_field, str))
self.assertNotEqual(nodata, new_field) # different errors
del(data['invalid_field'])
data['payment_amount'] = '20.25'
result = access_settings.clean_response(data)
self.assertEqual(data, result)
self.assertNotEqual(id(data), id(result))
|
import arcpy
# get passed in arguments
mapDoc = arcpy.GetParameterAsText(0)
wrkspc = arcpy.GetParameterAsText(1)
datasetName = arcpy.GetParameterAsText(2)
#wrkspc = r"C:\Data\OSM\Mxds\NewOSMDEV.sde\sde.SDE.TempTest08"
# set mxd
mxd = arcpy.mapping.MapDocument(mapDoc)
# change data source locations
for lyr in arcpy.mapping.ListLayers(mxd):
if lyr.supports("DATASOURCE"):
print lyr.dataSource
lyrDs = lyr.dataSource
i = lyrDs.rfind("_osm_")
if i > 0:
fCNameExt = lyrDs[i:]
newFCName = datasetName + fCNameExt
print newFCName
lyr.replaceDataSource(wrkspc, "SDE_WORKSPACE", newFCName)
print lyr.dataSource
# find any broken data sources and delete layer
for df in arcpy.mapping.ListDataFrames(mxd):
for lyr in arcpy.mapping.ListLayers(mxd, "", df):
for brklyr in arcpy.mapping.ListBrokenDataSources(lyr):
print 'Removing layer ' + brklyr.name + ' due to broken data source. '
arcpy.mapping.RemoveLayer(df, brklyr)
# Set data frame extent
df = arcpy.mapping.ListDataFrames(mxd)[0]
desc = arcpy.Describe(wrkspc + '\\' + datasetName)
df.extent = desc.extent
# Save mxd
mxd.save() |
from datetime import datetime
from functools import partial
import pandas as pd
from matplotlib import pyplot as plt
from src.backtester import BackTester
from src.pricer import read_price_df
from src.finta.ta import TA
from src.orders.order import Order, OrderStatus, OrderSide
# Strategy rules:
# Buy signal:
# 1. close price cross 50 EMA from the bottom
# 2. Price above 200 EMA
# 3. SL 1.5 * ATR
# 4. TP 3 * ATR
# sell signal:
# 1. close price cross 50 EMA from the bottom
# 2. Price above 200 EMA
# 3. SL 1.5 * ATR
# 4. TP 3 * ATR
def signal(short_win, long_win, row):
if row[f'smma_{long_win}'] < row[f'smma_{short_win}'] < row['close'] and row['open'] < row[f'smma_{short_win}']:
return 1
if row[f'smma_{long_win}'] > row[f'smma_{short_win}'] > row['close'] and row['open'] > row[f'smma_{short_win}']:
return -1
return 0
def plot(df):
fig = plt.figure()
ax1 = fig.add_subplot(111, ylabel='Price in $')
df['close'].plot(ax=ax1, color='r', lw=2.)
df[['ema_50', 'ema_200']].plot(ax=ax1, lw=2.)
ax1.plot(df.loc[df.signal == 1.0].index, df.ema_50[df.signal == 1.0], '^', markersize=10, color='m')
ax1.plot(df.loc[df.signal == -1.0].index, df.ema_50[df.signal == -1.0], 'v', markersize=10, color='k')
plt.show()
def sample_data(instrument: str, start: datetime, end: datetime, short_window: int = 100, long_window: int = 350) -> pd.DataFrame:
price_feed = read_price_df(instrument=instrument, granularity='D', start=start, end=end)
price_feed[f'smma_{short_window}'] = TA.SMMA(price_feed, period=short_window, adjust=False)
price_feed[f'smma_{long_window}'] = TA.SMMA(price_feed, period=long_window, adjust=False)
price_feed['atr'] = TA.ATR(price_feed[['high', 'low', 'close']])
price_feed['signal'] = price_feed.apply(partial(signal, short_window, long_window), axis=1)
return price_feed
def create_orders(instrument: str, ohlc: pd.DataFrame, sl_multiplier: float, tp_multiplier: float) -> list:
price_data = ohlc.reset_index().to_dict('records')
orders = []
next_buy = False
next_sell = False
for el in price_data:
if next_buy:
orders.append(
Order(order_date=el['time'], side=OrderSide.LONG, instrument=instrument, entry=el['open'],
sl=el['open'] - el['atr'] * sl_multiplier, tp=el['open'] + el['atr'] * tp_multiplier, status=OrderStatus.FILLED))
next_buy = False
elif next_sell:
orders.append(
Order(order_date=el['time'], side=OrderSide.SHORT, instrument=instrument, entry=el['open'],
sl=el['open'] + el['atr'] * sl_multiplier, tp=el['open'] - el['atr'] * tp_multiplier, status=OrderStatus.FILLED))
next_sell = False
if el['signal'] == 1:
next_buy = True
elif el['signal'] == -1:
next_sell = True
return orders
if __name__ == '__main__':
dfs = []
instruments = [('XAU_USD', 100), ('GBP_USD', 10000), ('EUR_USD', 10000), ('GBP_AUD', 10000),
('USD_JPY', 1000), ('AUD_USD', 10000), ('USD_SGD', 10000)]
back_tester = BackTester(strategy='MA with ATR exit')
for instrument, lot_size in instruments:
df = sample_data(instrument=instrument, start=datetime(2005, 1, 1), end=datetime(2020, 3, 31), short_window=50, long_window=200)
orders = create_orders(instrument, df, 1.5, 3)
back_tester.lot_size = lot_size
print(f"{'-' * 10} {instrument} {'-' * 10}")
p = back_tester.run(df, orders, suffix=f'_{instrument}')
dfs.append(p)
back_tester.plot_chart(dfs)
|
# -*- coding:utf-8 -*-
"""
huobi Trade module.
https://huobiapi.github.io/docs/spot/v1/cn
Project: alphahunter
Author: HJQuant
Description: Asynchronous driven quantitative trading framework
"""
import json
import hmac
import copy
import gzip
import base64
import urllib
import hashlib
import datetime
from urllib import parse
from urllib.parse import urljoin
from collections import defaultdict, deque
from typing import DefaultDict, Deque, List, Dict, Tuple, Optional, Any
from quant.gateway import ExchangeGateway
from quant.state import State
from quant.utils import tools, logger
from quant.const import MARKET_TYPE_KLINE, INDICATE_ORDER, INDICATE_ASSET, INDICATE_POSITION
from quant.order import Order, Fill, SymbolInfo
from quant.position import Position
from quant.asset import Asset
from quant.tasks import SingleTask, LoopRunTask
from quant.utils.websocket import Websocket
from quant.utils.decorator import async_method_locker
from quant.utils.http_client import AsyncHttpRequests
from quant.order import ORDER_ACTION_BUY, ORDER_ACTION_SELL
from quant.order import ORDER_TYPE_LIMIT, ORDER_TYPE_MARKET, ORDER_TYPE_IOC
from quant.order import LIQUIDITY_TYPE_MAKER, LIQUIDITY_TYPE_TAKER
from quant.order import ORDER_STATUS_SUBMITTED, ORDER_STATUS_PARTIAL_FILLED, ORDER_STATUS_FILLED, ORDER_STATUS_CANCELED, ORDER_STATUS_FAILED
from quant.market import Kline, Orderbook, Trade, Ticker
from quant.trader import Trader
__all__ = ("HuobiRestAPI", "HuobiTrader", )
class HuobiRestAPI:
""" huobi REST API 封装
"""
def __init__(self, host, access_key, secret_key):
""" 初始化
@param host 请求host
@param access_key API KEY
@param secret_key SECRET KEY
"""
self._host = host
self._access_key = access_key
self._secret_key = secret_key
self._account_id = None
async def get_symbols_info(self):
""" 获取所有交易对基础信息
@return data list 所有交易对基础信息
"""
return await self.request("GET", "/v1/common/symbols")
async def get_server_time(self):
""" 获取服务器时间
@return data int 服务器时间戳(毫秒)
"""
return await self.request("GET", "/v1/common/timestamp")
async def get_user_accounts(self):
""" 获取账户信息
"""
return await self.request("GET", "/v1/account/accounts", auth=True)
async def get_account_id(self):
""" 获取账户id
"""
if self._account_id:
return self._account_id
success, error = await self.get_user_accounts()
if error:
return None
for item in success:
if item["type"] == "spot":
self._account_id = str(item["id"])
return self._account_id
return None
async def get_account_balance(self):
""" 获取账户信息
"""
account_id = await self.get_account_id()
uri = "/v1/account/accounts/{account_id}/balance".format(account_id=account_id)
return await self.request("GET", uri, auth=True)
async def get_balance_all(self):
""" 母账户查询其下所有子账户的各币种汇总余额
"""
return await self.request("GET", "/v1/subuser/aggregate-balance", auth=True)
async def create_order(self, symbol, price, quantity, order_type):
""" 创建订单
@param symbol 交易对
@param quantity 交易量
@param price 交易价格
@param order_type 订单类型 buy-market, sell-market, buy-limit, sell-limit
@return order_no 订单id
"""
account_id = await self.get_account_id()
info = {
"account-id": account_id,
"price": price,
"amount": quantity,
"source": "api",
"symbol": symbol,
"type": order_type
}
if order_type == "buy-market" or order_type == "sell-market":
info.pop("price")
return await self.request("POST", "/v1/order/orders/place", body=info, auth=True)
async def revoke_order(self, order_no):
""" 撤销委托单
@param order_no 订单id
@return True/False
"""
uri = "/v1/order/orders/{order_no}/submitcancel".format(order_no=order_no)
return await self.request("POST", uri, auth=True)
async def revoke_orders(self, order_nos):
""" 批量撤销委托单
@param order_nos 订单列表
* NOTE: 单次不超过50个订单id
"""
body = {
"order-ids": order_nos
}
return await self.request("POST", "/v1/order/orders/batchcancel", body=body, auth=True)
async def get_open_orders(self, symbol):
""" 获取当前还未完全成交的订单信息
@param symbol 交易对
* NOTE: 查询上限最多500个订单
"""
account_id = await self.get_account_id()
params = {
"account-id": account_id,
"symbol": symbol,
"size": 500
}
return await self.request("GET", "/v1/order/openOrders", params=params, auth=True)
async def get_order_status(self, order_no):
""" 获取订单的状态
@param order_no 订单id
"""
uri = "/v1/order/orders/{order_no}".format(order_no=order_no)
return await self.request("GET", uri, auth=True)
async def request(self, method, uri, params=None, body=None, auth=False):
""" 发起请求
@param method 请求方法 GET POST
@param uri 请求uri
@param params dict 请求query参数
@param body dict 请求body数据
"""
url = urljoin(self._host, uri)
params = params if params else {}
if auth:
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
params.update({"AccessKeyId": self._access_key,
"SignatureMethod": "HmacSHA256",
"SignatureVersion": "2",
"Timestamp": timestamp})
host_name = urllib.parse.urlparse(self._host).hostname.lower()
params["Signature"] = self.generate_signature(method, params, host_name, uri)
if method == "GET":
headers = {
"Content-type": "application/x-www-form-urlencoded",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/39.0.2171.71 Safari/537.36"
}
else:
headers = {
"Accept": "application/json",
"Content-type": "application/json"
}
_, success, error = await AsyncHttpRequests.fetch(method, url, params=params, data=json.dumps(body), headers=headers, timeout=10)
if error:
return success, error
if success.get("status") != "ok":
return None, success
return success.get("data"), None
def generate_signature(self, method, params, host_url, request_path):
""" 创建签名
"""
query = "&".join(["{}={}".format(k, parse.quote(str(params[k]))) for k in sorted(params.keys())])
payload = [method, host_url, request_path, query]
payload = "\n".join(payload)
payload = payload.encode(encoding="utf8")
secret_key = self._secret_key.encode(encoding="utf8")
digest = hmac.new(secret_key, payload, digestmod=hashlib.sha256).digest()
signature = base64.b64encode(digest)
signature = signature.decode()
return signature
class HuobiTrader(Websocket, ExchangeGateway):
""" huobi Trade模块
"""
def __init__(self, **kwargs):
"""Initialize."""
self.cb = kwargs["cb"]
state = None
self._platform = kwargs.get("platform")
self._symbols = kwargs.get("symbols")
self._strategy = kwargs.get("strategy")
self._account = kwargs.get("account")
self._access_key = kwargs.get("access_key")
self._secret_key = kwargs.get("secret_key")
if not self._platform:
state = State(self._platform, self._account, "param platform miss")
elif self._account and (not self._access_key or not self._secret_key):
state = State(self._platform, self._account, "param access_key or secret_key miss")
elif not self._strategy:
state = State(self._platform, self._account, "param strategy miss")
elif not self._symbols:
state = State(self._platform, self._account, "param symbols miss")
if state:
logger.error(state, caller=self)
SingleTask.run(self.cb.on_state_update_callback, state)
return
self._host = "https://api.huobi.me"
self._wss = "wss://api.huobi.me"
url = self._wss + "/ws/v1"
super(HuobiTrader, self).__init__(url, send_hb_interval=0, **kwargs)
#self.heartbeat_msg = "ping"
# Initializing our REST API client.
self._rest_api = HuobiRestAPI(self._host, self._access_key, self._secret_key)
self._account_id = None
self._syminfo:DefaultDict[str: Dict[str, Any]] = defaultdict(dict)
self._orders:DefaultDict[str: Dict[str, Order]] = defaultdict(dict)
#e.g. {"BTC": {"free": 1.1, "locked": 2.2, "total": 3.3}, ... }
self._assets: DefaultDict[str: Dict[str, float]] = defaultdict(lambda: {k: 0.0 for k in {'free', 'locked', 'total'}})
"""
可以订阅两种订单更新频道,新方式和旧方式
新方式:
优点:延时小,大约100毫秒,不乱序,不丢包.
缺点:包含信息量不全面,需要程序自己维护上下文状态才能获取完整信息.
旧方式:
优点:信息包含全面,程序不需要自己维护上下文状态.
缺点:延时大,大约270毫秒,乱序,可能丢包(比如服务重启的时候).
"""
self._use_old_style_order_channel = False #默认用新方式订阅
self._pending_order_infos = [] #提交订单函数返回和订单websocket通知顺序不确定,所以需要借助这个变量构造一个完整订单通知上层策略
self._order_channel = []
for sym in self._symbols:
if self._use_old_style_order_channel:
self._order_channel.append("orders.{}".format(sym))
else:
self._order_channel.append("orders.{}.update".format(sym))
if self._account != None:
self.initialize()
#如果四个行情回调函数都为空的话,就根本不需要执行市场行情相关代码
if (self.cb.on_kline_update_callback or
self.cb.on_orderbook_update_callback or
self.cb.on_trade_update_callback or
self.cb.on_ticker_update_callback):
#市场行情数据
HuobiMarket(**kwargs)
async def create_order(self, symbol, action, price, quantity, order_type=ORDER_TYPE_LIMIT):
""" 创建订单
@param symbol 交易对
@param action 交易方向 BUY / SELL
@param price 委托价格
@param quantity 委托数量
@param order_type 委托类型 LIMIT / MARKET
"""
if action == ORDER_ACTION_BUY:
if order_type == ORDER_TYPE_LIMIT:
t = "buy-limit"
elif order_type == ORDER_TYPE_MARKET:
t = "buy-market"
elif order_type == ORDER_TYPE_IOC:
t = "buy-ioc"
else:
logger.error("order_type error! order_type:", order_type, caller=self)
return None, "order type error"
elif action == ORDER_ACTION_SELL:
if order_type == ORDER_TYPE_LIMIT:
t = "sell-limit"
elif order_type == ORDER_TYPE_MARKET:
t = "sell-market"
elif order_type == ORDER_TYPE_IOC:
t = "sell-ioc"
else:
logger.error("order_type error! order_type:", order_type, caller=self)
return None, "order type error"
else:
logger.error("action error! action:", action, caller=self)
return None, "action error"
price = tools.float_to_str(price)
quantity = tools.float_to_str(quantity)
result, error = await self._rest_api.create_order(symbol, price, quantity, t)
#=====================================================
#是否订阅的是新的订单更新频道
if not self._use_old_style_order_channel:
#如果下单成功,将新订单保存到缓存里
if error == None:
order_no = result
tm = tools.get_cur_timestamp_ms()
o = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"order_no": order_no,
"action": action,
"symbol": symbol,
"price": float(price),
"quantity": float(quantity),
"remain": float(quantity),
"status": ORDER_STATUS_SUBMITTED,
"order_type": order_type,
"ctime": tm,
"utime": tm
#avg_price
}
order = Order(**o)
self._orders[symbol][order_no] = order
#如果提交订单函数返回的订单ID存在于_pending_order_infos列表中,那证明是websocket通知先于订单函数返回
msgs = [i for i in self._pending_order_infos if (i["data"]["symbol"]==order.symbol and str(i["data"]["order-id"])==order.order_no)]
if len(msgs) >= 1:
msg = msgs[0]
self._update_order_and_fill(msg) #这里可以理解为重新update一次,因为上一次在websocket通知回调中update没成功(参考上下文代码逻辑去理解)
self._pending_order_infos.remove(msg) #把已经处理过的订单数据从_pending_order_infos里面删除
#=====================================================
return result, error
async def revoke_order(self, symbol, *order_nos):
""" 撤销订单
@param symbol 交易对
@param order_nos 订单号列表,可传入任意多个,如果不传入,那么就撤销所有订单
备注:关于批量删除订单函数返回值格式,如果函数调用失败了那肯定是return None, error
如果函数调用成功,但是多个订单有成功有失败的情况,比如输入3个订单id,成功2个,失败1个,那么
返回值统一都类似:
return [(成功订单ID, None),(成功订单ID, None),(失败订单ID, "失败原因")], None
"""
# 如果传入order_nos为空,即撤销全部委托单
if len(order_nos) == 0:
orders, error = await self.get_orders(symbol)
if error:
return [], error
if not orders:
return [], None
order_nos = [o.order_no for o in orders]
# 如果传入order_nos为一个委托单号,那么只撤销一个委托单
if len(order_nos) == 1:
success, error = await self._rest_api.revoke_order(order_nos[0])
if error:
return order_nos[0], error
else:
return order_nos[0], None
# 如果传入order_nos数量大于1,那么就批量撤销传入的委托单
if len(order_nos) > 1:
"""
{
"status": "ok",
"data": {
"success": [
"5983466"
],
"failed": [
{
"err-msg": "Incorrect order state",
"order-state": 7,
"order-id": "",
"err-code": "order-orderstate-error",
"client-order-id": "first"
},
{
"err-msg": "The record is not found.",
"order-id": "",
"err-code": "base-not-found",
"client-order-id": "second"
}
]
}
}
"""
s, e = await self._rest_api.revoke_orders(order_nos)
if e:
return [], e
result = []
for x in s["success"]:
result.append((x, None))
for x in s["failed"]:
result.append((x["order-id"], x["err-msg"]))
return result, None
async def get_orders(self, symbol):
""" 获取当前挂单列表
Args:
symbol: Trade target
Returns:
orders: Order list if successfully, otherwise it's None.
error: Error information, otherwise it's None.
"""
#{"status": "ok", "data": [{"filled-cash-amount": "0.0", "filled-fees": "0.0", "filled-amount": "0.0", "symbol": "trxeth", "source": "web", "created-at": 1575100309209, "amount": "17.000000000000000000", "account-id": 11261082, "price": "0.000100000000000000", "id": 58040174635, "state": "submitted", "type": "buy-limit"}, {"filled-cash-amount": "0.0", "filled-fees": "0.0", "filled-amount": "0.0", "symbol": "trxeth", "source": "web", "created-at": 1575018429010, "amount": "10.000000000000000000", "account-id": 11261082, "price": "0.000100000000000000", "id": 57906933472, "state": "submitted", "type": "buy-limit"}]}
success, error = await self._rest_api.get_open_orders(symbol)
if error:
return None, error
else:
orders:List[Order] = []
for order_info in success:
order = self._convert_order_format(order_info)
orders.append(order)
return orders, None
async def get_assets(self):
""" 获取交易账户资产信息
Args:
None
Returns:
assets: Asset if successfully, otherwise it's None.
error: Error information, otherwise it's None.
"""
#{"status": "ok", "data": {"id": 11261082, "type": "spot", "state": "working", "list": [{"currency": "lun", "type": "trade", "balance": "0"}, {"currency": "lun", "type": "frozen", "balance": "0"}]}}
success, error = await self._rest_api.get_account_balance()
if error:
return None, error
assets: DefaultDict[str: Dict[str, float]] = defaultdict(lambda: {k: 0.0 for k in {'free', 'locked', 'total'}})
for d in success["list"]:
b = d["balance"]
if b == "0": continue
c = d["currency"]
t = d["type"]
if t == "trade":
assets[c]["free"] = float(b)
elif t == "frozen":
assets[c]["locked"] = float(b)
for (_, v) in assets.items():
v["total"] = v["free"] + v["locked"]
ast = Asset(self._platform, self._account, assets, tools.get_cur_timestamp_ms(), True)
return ast, None
async def get_position(self, symbol):
""" 获取当前持仓
Args:
symbol: Trade target
Returns:
position: Position if successfully, otherwise it's None.
error: Error information, otherwise it's None.
"""
raise NotImplementedError
async def get_symbol_info(self, symbol):
""" 获取指定符号相关信息
Args:
symbol: Trade target
Returns:
symbol_info: SymbolInfo if successfully, otherwise it's None.
error: Error information, otherwise it's None.
"""
"""
[{
symbol-partition = "main", #交易区,可能值: [main,innovation]
symbol = "trxeth", #交易对
state = "online", #交易对状态;可能值: [online,offline,suspend] online - 已上线;offline - 交易对已下线,不可交易;suspend -- 交易暂停
base-currency = "trx", #交易对中的基础币种
quote-currency = "eth", #交易对中的报价币种
price-precision = 8, #交易对报价的精度(小数点后位数)
amount-precision = 2, #交易对基础币种计数精度(小数点后位数)
value-precision = 8, #交易对交易金额的精度(小数点后位数)
min-order-amt = 1, #交易对最小下单量 (下单量指当订单类型为限价单或sell-market时,下单接口传的'amount')
max-order-amt = 10000000, #交易对最大下单量
min-order-value = 0.001, #最小下单金额 (下单金额指当订单类型为限价单时,下单接口传入的(amount * price)。当订单类型为buy-market时,下单接口传的'amount')
#"leverage-ratio":4 #交易对杠杆最大倍数(杠杆交易才有这个字段)
},]
"""
info = self._syminfo[symbol]
if not info:
return None, "Symbol not exist"
price_tick = 1/float(10**info["price-precision"])
size_tick = 1/float(10**info["amount-precision"])
size_limit = info["min-order-amt"]
value_tick = 1/float(10**info["value-precision"])
value_limit = info["min-order-value"]
base_currency = info["base-currency"]
quote_currency = info["quote-currency"]
settlement_currency = info["quote-currency"]
symbol_type = "spot"
is_inverse = False
multiplier = 1
syminfo = SymbolInfo(self._platform, symbol, price_tick, size_tick, size_limit, value_tick, value_limit, base_currency, quote_currency, settlement_currency, symbol_type, is_inverse, multiplier)
return syminfo, None
async def invalid_indicate(self, symbol, indicate_type):
""" update (an) callback function.
Args:
symbol: Trade target
indicate_type: INDICATE_ORDER, INDICATE_ASSET, INDICATE_POSITION
Returns:
success: If execute successfully, return True, otherwise it's False.
error: If execute failed, return error information, otherwise it's None.
"""
async def _task():
if indicate_type == INDICATE_ORDER and self.cb.on_order_update_callback:
success, error = await self.get_orders(symbol)
if error:
state = State(self._platform, self._account, "get_orders error: {}".format(error), State.STATE_CODE_GENERAL_ERROR)
SingleTask.run(self.cb.on_state_update_callback, state)
return
for order in success:
SingleTask.run(self.cb.on_order_update_callback, order)
elif indicate_type == INDICATE_ASSET and self.cb.on_asset_update_callback:
success, error = await self.get_assets()
if error:
state = State(self._platform, self._account, "get_assets error: {}".format(error), State.STATE_CODE_GENERAL_ERROR)
SingleTask.run(self.cb.on_state_update_callback, state)
return
SingleTask.run(self.cb.on_asset_update_callback, success)
if indicate_type == INDICATE_ORDER or indicate_type == INDICATE_ASSET:
SingleTask.run(_task)
return True, None
elif indicate_type == INDICATE_POSITION:
raise NotImplementedError
else:
logger.error("indicate_type error! indicate_type:", indicate_type, caller=self)
return False, "indicate_type error"
@property
def rest_api(self):
return self._rest_api
async def connected_callback(self):
""" 建立连接之后,授权登陆,然后订阅相关频道等
"""
#进行登录认证,然后订阅需要登录后才能订阅的私有频道
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
params = {
"AccessKeyId": self._access_key,
"SignatureMethod": "HmacSHA256",
"SignatureVersion": "2",
"Timestamp": timestamp
}
signature = self._rest_api.generate_signature("GET", params, "api.huobi.me", "/ws/v1")
params["op"] = "auth"
params["Signature"] = signature
await self.send_json(params)
async def _auth_success_callback(self):
""" 授权成功之后回调
"""
#获取现货账户ID
self._account_id = await self._rest_api.get_account_id()
if self._account_id == None:
state = State(self._platform, self._account, "get_account_id error", State.STATE_CODE_GENERAL_ERROR)
SingleTask.run(self.cb.on_state_update_callback, state)
#初始化过程中发生错误,关闭网络连接,触发重连机制
await self.socket_close()
return
#获取相关符号信息
success, error = await self._rest_api.get_symbols_info()
if error:
state = State(self._platform, self._account, "get_symbols_info error: {}".format(error), State.STATE_CODE_GENERAL_ERROR)
SingleTask.run(self.cb.on_state_update_callback, state)
#初始化过程中发生错误,关闭网络连接,触发重连机制
await self.socket_close()
return
for info in success:
self._syminfo[info["symbol"]] = info #符号信息一般不变,获取一次保存好,其他地方要用直接从这个变量获取就可以了
#获取账户余额,更新资产
#{"status": "ok", "data": {"id": 11261082, "type": "spot", "state": "working", "list": [{"currency": "lun", "type": "trade", "balance": "0"}, {"currency": "lun", "type": "frozen", "balance": "0"}]}}
success, error = await self._rest_api.get_account_balance()
if error:
state = State(self._platform, self._account, "get_account_balance error: {}".format(error), State.STATE_CODE_GENERAL_ERROR)
SingleTask.run(self.cb.on_state_update_callback, state)
#初始化过程中发生错误,关闭网络连接,触发重连机制
await self.socket_close()
return
for d in success["list"]:
b = d["balance"]
if b == "0": continue
c = d["currency"]
t = d["type"]
if t == "trade":
self._assets[c]["free"] = float(b)
#elif t == "frozen":
# self._assets[c]["locked"] = b
ast = Asset(self._platform, self._account, self._assets, tools.get_cur_timestamp_ms(), True)
if self.cb.on_asset_update_callback:
SingleTask.run(self.cb.on_asset_update_callback, ast)
#获取当前未完成订单
for sym in self._symbols:
success, error = await self.get_orders(sym)
if error:
state = State(self._platform, self._account, "get_orders error: {}".format(error), State.STATE_CODE_GENERAL_ERROR)
SingleTask.run(self.cb.on_state_update_callback, state)
#初始化过程中发生错误,关闭网络连接,触发重连机制
await self.socket_close()
return
for order in success:
#是否订阅的是新的订单更新频道
if not self._use_old_style_order_channel:
self._orders[sym][order.order_no] = order
if self.cb.on_order_update_callback:
SingleTask.run(self.cb.on_order_update_callback, order)
#订阅账号资产信息
if self.cb.on_asset_update_callback:
params = {
"op": "sub",
"topic": "accounts",
"model": "0"
}
await self.send_json(params)
#订阅订单更新频道
if self.cb.on_order_update_callback or self.cb.on_fill_update_callback:
for ch in self._order_channel:
params = {
"op": "sub",
"topic": ch
}
await self.send_json(params)
#计数初始化0
self._subscribe_response_count = 0
async def process_binary(self, raw):
""" 处理websocket上接收到的消息
@param raw 原始的压缩数据
"""
#{'op': 'error', 'ts': 1575003013045, 'err-code': 1002, 'err-msg': 'internal error : auth not received.'}
#{'op': 'close', 'ts': 1575003013045}
#{'op': 'auth', 'ts': 1575003739511, 'err-code': 0, 'data': {'user-id': 12053842}}
#{'op': 'ping', 'ts': 1575003876880}
#{'op': 'sub', 'ts': 1575003877414, 'topic': 'orders.eoseth.update', 'err-code': 0}
#{'op': 'sub', 'ts': 1575003882668, 'topic': 'orders.trxeth.update', 'err-code': 0}
#{'op': 'sub', 'ts': 1575003888499, 'topic': 'accounts', 'err-code': 0
#==创建订单:
#{'op': 'notify', 'ts': 1575004328706, 'topic': 'accounts', 'data': {'event': 'order.place', 'list': [{'account-id': 10432498, 'currency': 'eth', 'type': 'trade', 'balance': '0.71662865'}]}}
#{'op': 'notify', 'ts': 1575004328733, 'topic': 'orders.trxeth.update', 'data': {'role': 'taker', 'match-id': 100413368307, 'filled-cash-amount': '0', 'filled-amount': '0', 'price': '0.0001', 'order-id': 57886011451, 'client-order-id': '', 'order-type': 'buy-limit', 'unfilled-amount': '10', 'symbol': 'trxeth', 'order-state': 'submitted'}}
#==撤销订单:
#{'op': 'notify', 'ts': 1575004686930, 'topic': 'orders.trxeth.update', 'data': {'role': 'taker', 'match-id': 100413372769, 'filled-cash-amount': '0', 'filled-amount': '0', 'price': '0.0001', 'order-id': 57886011451, 'client-order-id': '', 'order-type': 'buy-limit', 'unfilled-amount': '10', 'symbol': 'trxeth', 'order-state': 'canceled'}}
#{'op': 'notify', 'ts': 1575004687037, 'topic': 'accounts', 'data': {'event': 'order.cancel', 'list': [{'account-id': 10432498, 'currency': 'eth', 'type': 'trade', 'balance': '0.71762865'}]}}
msg = json.loads(gzip.decompress(raw).decode())
logger.debug("msg:", msg, caller=self)
op = msg.get("op")
if op == "auth": # 授权
if msg["err-code"] != 0:
state = State(self._platform, self._account, "Websocket connection authorized failed: {}".format(msg), State.STATE_CODE_GENERAL_ERROR)
logger.error(state, caller=self)
SingleTask.run(self.cb.on_state_update_callback, state)
return
logger.info("Websocket connection authorized successfully.", caller=self)
await self._auth_success_callback()
elif op == "error": # error
state = State(self._platform, self._account, "Websocket error: {}".format(msg), State.STATE_CODE_GENERAL_ERROR)
logger.error(state, caller=self)
SingleTask.run(self.cb.on_state_update_callback, state)
elif op == "close": # close
return
elif op == "ping": # ping
params = {
"op": "pong",
"ts": msg["ts"]
}
await self.send_json(params)
elif op == "sub": # 返回订阅操作是否成功
exist = False
for ch in self._order_channel:
if msg["topic"] == ch:
exist = True
break
if msg["topic"] == "accounts":
exist = True
if not exist:
return
if msg["err-code"] == 0:
self._subscribe_response_count = self._subscribe_response_count + 1 #每来一次订阅响应计数就加一
count = len(self._order_channel)+1 #应该要返回的订阅响应数
if self._subscribe_response_count == count: #所有的订阅都成功了,通知上层接口都准备好了
state = State(self._platform, self._account, "Environment ready", State.STATE_CODE_READY)
SingleTask.run(self.cb.on_state_update_callback, state)
else:
state = State(self._platform, self._account, "subscribe event error: {}".format(msg), State.STATE_CODE_GENERAL_ERROR)
logger.error(state, caller=self)
SingleTask.run(self.cb.on_state_update_callback, state)
elif op == "notify": # 频道更新通知
if msg["topic"] == "accounts":
self._update_asset(msg)
else:
for ch in self._order_channel:
if msg["topic"] == ch:
if self._use_old_style_order_channel:
self._update_order_and_fill_old_style(msg)
else:
self._update_order_and_fill(msg)
break
def _convert_order_format(self, order_info):
symbol = order_info["symbol"]
order_no = str(order_info["id"])
remain = float(order_info["amount"]) - float(order_info["filled-amount"])
action = ORDER_ACTION_BUY if order_info["type"] in ["buy-market", "buy-limit", "buy-ioc", "buy-limit-maker", "buy-stop-limit"] else ORDER_ACTION_SELL
if order_info["type"] in ["buy-market", "sell-market"]:
order_type = ORDER_TYPE_MARKET
elif order_info["type"] in ["buy-ioc", "sell-ioc"]:
order_type = ORDER_TYPE_IOC
else:
order_type = ORDER_TYPE_LIMIT
ctime = order_info["created-at"]
utime = order_info["created-at"]
state = order_info["state"]
if state == "canceled":
status = ORDER_STATUS_CANCELED
elif state == "partial-canceled":
status = ORDER_STATUS_CANCELED
elif state == "created":
status = ORDER_STATUS_SUBMITTED
elif state == "submitting":
status = ORDER_STATUS_SUBMITTED
elif state == "submitted":
status = ORDER_STATUS_SUBMITTED
elif state == "partical-filled":
status = ORDER_STATUS_PARTIAL_FILLED
elif state == "filled":
status = ORDER_STATUS_FILLED
else:
logger.error("status error! order_info:", order_info, caller=self)
status = ORDER_STATUS_NONE
info = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"order_no": order_no,
"action": action,
"symbol": symbol,
"price": float(order_info["price"]),
"quantity": float(order_info["amount"]),
"remain": remain,
"status": status,
"order_type": order_type,
"ctime": ctime,
"utime": utime
#avg_price
}
return Order(**info)
def _update_fill(self, order_info, ctime):
"""处理成交通知
"""
symbol = order_info["symbol"]
order_no = str(order_info["order-id"])
fill_no = str(order_info["match-id"])
price = float(order_info["price"]) #成交价格
size = float(order_info["filled-amount"]) #成交数量
side = ORDER_ACTION_BUY if order_info["order-type"] in ["buy-market", "buy-limit", "buy-ioc", "buy-limit-maker", "buy-stop-limit"] else ORDER_ACTION_SELL
liquidity = LIQUIDITY_TYPE_TAKER if order_info["role"]=="taker" else LIQUIDITY_TYPE_MAKER
f = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"fill_no": fill_no,
"order_no": order_no,
"side": side, #成交方向,买还是卖
"symbol": symbol,
"price": price, #成交价格
"quantity": size, #成交数量
"liquidity": liquidity, #maker成交还是taker成交
#"fee": fee, #通知里没提供,所以只能注释,或者也可以自己算
"ctime": ctime
}
fill = Fill(**f)
if self.cb.on_fill_update_callback:
SingleTask.run(self.cb.on_fill_update_callback, fill)
def _update_order_and_fill(self, msg):
"""
{
'op': 'notify',
'ts': 1575004328733,
'topic': 'orders.trxeth.update',
'data': {
'role': 'taker', #最近成交角色(当order-state = submitted, canceled, partial-canceled时,role 为缺省值taker;当order-state = filled, partial-filled 时,role 取值为taker 或maker。)
'match-id': 100413368307, #最近撮合编号(当order-state = submitted, canceled, partial-canceled时,match-id 为消息序列号;当order-state = filled, partial-filled 时,match-id 为最近撮合编号。)
'filled-cash-amount': '0', #最近成交数额
'filled-amount': '0', #最近成交数量
'price': '0.0001', #最新价(当order-state = submitted 时,price 为订单价格;当order-state = canceled, partial-canceled 时,price 为零;当order-state = filled, partial-filled 时,price 为最近成交价。)
'order-id': 57886011451, #订单编号
'client-order-id': '', #用户自编订单号
'order-type': 'buy-limit', #订单类型,包括buy-market, sell-market, buy-limit, sell-limit, buy-ioc, sell-ioc, buy-limit-maker, sell-limit-maker, buy-stop-limit, sell-stop-limit
'unfilled-amount': '10', #最近未成交数量(当order-state = submitted 时,unfilled-amount 为原始订单量;当order-state = canceled OR partial-canceled 时,unfilled-amount 为未成交数量;当order-state = filled 时,如果 order-type = buy-market,unfilled-amount 可能为一极小值;如果order-type <> buy-market 时,unfilled-amount 为零;当order-state = partial-filled AND role = taker 时,unfilled-amount 为未成交数量;当order-state = partial-filled AND role = maker 时,unfilled-amount 为未成交数量。)
'symbol': 'trxeth', #交易代码
'order-state': 'submitted' #订单状态, 有效取值: submitted, partial-filled, filled, canceled, partial-canceled
}
}
"""
order_info = msg["data"]
state = order_info["order-state"]
if state == "canceled":
status = ORDER_STATUS_CANCELED
elif state == "partial-canceled":
status = ORDER_STATUS_CANCELED
elif state == "created":
status = ORDER_STATUS_SUBMITTED
elif state == "submitting":
status = ORDER_STATUS_SUBMITTED
elif state == "submitted":
status = ORDER_STATUS_SUBMITTED
elif state == "partical-filled":
status = ORDER_STATUS_PARTIAL_FILLED
elif state == "filled":
status = ORDER_STATUS_FILLED
else:
logger.error("status error! order_info:", order_info, caller=self)
return
symbol = order_info["symbol"]
order_no = str(order_info["order-id"])
action = ORDER_ACTION_BUY if order_info["order-type"] in ["buy-market", "buy-limit", "buy-ioc", "buy-limit-maker", "buy-stop-limit"] else ORDER_ACTION_SELL
if order_info["order-type"] in ["buy-market", "sell-market"]:
order_type = ORDER_TYPE_MARKET
elif order_info["order-type"] in ["buy-ioc", "sell-ioc"]:
order_type = ORDER_TYPE_IOC
else:
order_type = ORDER_TYPE_LIMIT
#tm = msg["ts"]
tm = tools.get_cur_timestamp_ms()
order = self._orders[symbol].get(order_no)
if order == None:
#执行到这里一般有几种情况,比如说用户在web网站下单,也有可能是我们自己的策略调用下单函数下的单,但是在下单函数返回前,websocket先收到订单通知.
#不管是什么情况,总之只保存10个最新的订单信息,如果超过10个,就把最老的一个订单信息删除掉,把最新的保存
if len(self._pending_order_infos) > 10:
self._pending_order_infos.pop() #弹出列表中最后一个元素
self._pending_order_infos.insert(0, msg) #保存到列表第一个位置
return #如果收到的订单通知在缓存中不存在的话就直接忽略不处理
if action == ORDER_ACTION_BUY and order_type == ORDER_TYPE_MARKET:
order.remain = order.quantity - float(order_info["filled-cash-amount"]) #市价买单传入的是金额,输出的也要是金额
else:
order.remain = float(order_info["unfilled-amount"])
order.status = status
order.utime = tm
if self.cb.on_order_update_callback:
SingleTask.run(self.cb.on_order_update_callback, order)
if status in [ORDER_STATUS_CANCELED, ORDER_STATUS_FILLED]:
self._orders[symbol].pop(order_no) #这个订单完成了,从缓存里面删除
#如果是成交通知,就处理成交回调
if status == ORDER_STATUS_PARTIAL_FILLED or status == ORDER_STATUS_FILLED:
self._update_fill(order_info, tm)
def _update_order_and_fill_old_style(self, msg):
""" 更新订单信息
@param msg 订单信息
"""
#{'op': 'notify', 'ts': 1575268899866, 'topic': 'orders.trxeth', 'data': {'seq-id': 100418110944, 'order-id': 58326818953, 'symbol': 'trxeth', 'account-id': 11261082, 'order-amount': '10', 'order-price': '0.000104', 'created-at': 1575268899682, 'order-type': 'buy-limit', 'order-source': 'spot-web', 'order-state': 'filled', 'role': 'taker', 'price': '0.00010399', 'filled-amount': '10', 'unfilled-amount': '0', 'filled-cash-amount': '0.0010399', 'filled-fees': '0.02'}}
#{'op': 'notify', 'ts': 1575269220762, 'topic': 'orders.trxeth', 'data': {'seq-id': 100418116512, 'order-id': 58324882527, 'symbol': 'trxeth', 'account-id': 11261082, 'order-amount': '10', 'order-price': '0.00010376', 'created-at': 1575269220597, 'order-type': 'buy-limit', 'order-source': 'spot-web', 'order-state': 'canceled', 'role': 'taker', 'price': '0.00010376', 'filled-amount': '0', 'unfilled-amount': '10', 'filled-cash-amount': '0', 'filled-fees': '0'}}
#{'op': 'notify', 'ts': 1575269259564, 'topic': 'orders.trxeth', 'data': {'seq-id': 100418116991, 'order-id': 58327457834, 'symbol': 'trxeth', 'account-id': 11261082, 'order-amount': '9.98', 'order-price': '0', 'created-at': 1575269259451, 'order-type': 'sell-market', 'order-source': 'spot-web', 'order-state': 'filled', 'role': 'taker', 'price': '0.00010407', 'filled-amount': '9.98', 'unfilled-amount': '0', 'filled-cash-amount': '0.0010386186', 'filled-fees': '0.0000020772372'}}
#{'op': 'notify', 'ts': 1575269323862, 'topic': 'orders.trxeth', 'data': {'seq-id': 100418118242, 'order-id': 58327583981, 'symbol': 'trxeth', 'account-id': 11261082, 'order-amount': '0.001', 'order-price': '0', 'created-at': 1575269323654, 'order-type': 'buy-market', 'order-source': 'spot-web', 'order-state': 'filled', 'role': 'taker', 'price': '0.00010425', 'filled-amount': '9.59232613908872901', 'unfilled-amount': '0', 'filled-cash-amount': '0.000999999999999999', 'filled-fees': '0.019184652278177458'}}
"""
{
'op': 'notify',
'ts': 1575269323862,
'topic': 'orders.trxeth',
'data': {
'seq-id': 100418118242,
'order-id': 58327583981,
'symbol': 'trxeth',
'account-id': 11261082,
'order-amount': '0.001',
'order-price': '0',
'created-at': 1575269323654,
'order-type': 'buy-market',
'order-source': 'spot-web',
'order-state': 'filled',
'role': 'taker',
'price': '0.00010425',
'filled-amount': '9.59232613908872901',
'unfilled-amount': '0',
'filled-cash-amount': '0.000999999999999999',
'filled-fees': '0.019184652278177458'}
}
"""
tm = msg["ts"]
order_info = msg["data"]
symbol = order_info["symbol"]
order_no = str(order_info["order-id"])
action = ORDER_ACTION_BUY if order_info["order-type"] in ["buy-market", "buy-limit", "buy-ioc", "buy-limit-maker", "buy-stop-limit"] else ORDER_ACTION_SELL
if order_info["order-type"] in ["buy-market", "sell-market"]:
order_type = ORDER_TYPE_MARKET
elif order_info["order-type"] in ["buy-ioc", "sell-ioc"]:
order_type = ORDER_TYPE_IOC
else:
order_type = ORDER_TYPE_LIMIT
quantity = float(order_info["order-amount"])
if action == ORDER_ACTION_BUY and order_type == ORDER_TYPE_MARKET:
remain = quantity - float(order_info["filled-cash-amount"]) #市价买单传入的是金额,输出的也要是金额
else:
remain = float(order_info["unfilled-amount"])
ctime = order_info["created-at"]
utime = tm
state = order_info["order-state"]
if state == "canceled":
status = ORDER_STATUS_CANCELED
elif state == "partial-canceled":
status = ORDER_STATUS_CANCELED
elif state == "created":
status = ORDER_STATUS_SUBMITTED
elif state == "submitting":
status = ORDER_STATUS_SUBMITTED
elif state == "submitted":
status = ORDER_STATUS_SUBMITTED
elif state == "partical-filled":
status = ORDER_STATUS_PARTIAL_FILLED
elif state == "filled":
status = ORDER_STATUS_FILLED
else:
logger.error("status error! order_info:", order_info, caller=self)
return
info = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"order_no": order_no,
"action": action,
"symbol": symbol,
"price": float(order_info["order-price"]),
"quantity": quantity,
"remain": remain,
"status": status,
"order_type": order_type,
"ctime": ctime,
"utime": utime
#avg_price
}
order = Order(**info)
if self.cb.on_order_update_callback:
SingleTask.run(self.cb.on_order_update_callback, order)
#=====================================================================================
#接下来处理成交回调
if status == ORDER_STATUS_PARTIAL_FILLED or status == ORDER_STATUS_FILLED:
fill_no = str(order_info["seq-id"])
price = float(order_info["price"]) #成交价格
size = float(order_info["filled-amount"]) #成交数量
side = action
liquidity = LIQUIDITY_TYPE_TAKER if order_info["role"]=="taker" else LIQUIDITY_TYPE_MAKER
fee = float(order_info["filled-fees"])
f = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"fill_no": fill_no,
"order_no": order_no,
"side": side, #成交方向,买还是卖
"symbol": symbol,
"price": price, #成交价格
"quantity": size, #成交数量
"liquidity": liquidity, #maker成交还是taker成交
"fee": fee,
"ctime": tm
}
fill = Fill(**f)
if self.cb.on_fill_update_callback:
SingleTask.run(self.cb.on_fill_update_callback, fill)
def _update_asset(self, msg):
"""
{
'op': 'notify',
'ts': 1575004687037,
'topic': 'accounts',
'data': {
'event': 'order.cancel', #资产变化通知相关事件说明,比如订单创建(order.place) 、订单成交(order.match)、订单成交退款(order.refund)、订单撤销(order.cancel) 、点卡抵扣交易手续费(order.fee-refund)、杠杆账户划转(margin.transfer)、借币本金(margin.loan)、借币计息(margin.interest)、归还借币本金利息(margin.repay)、其他资产变化(other)
'list': [
{
'account-id': 10432498, #账户id
'currency': 'eth', #币种
'type': 'trade', #交易(trade),借币(loan),利息(interest)
'balance': '0.71762865' #账户余额 (当订阅model=0时,该余额为可用余额;当订阅model=1时,该余额为总余额)
}
]
}
}
"""
tm = msg["ts"]
account_info = msg["data"]
for d in account_info["list"]:
b = d["balance"]
c = d["currency"]
self._assets[c]["free"] = float(b)
ast = Asset(self._platform, self._account, self._assets, tm, True)
SingleTask.run(self.cb.on_asset_update_callback, ast)
@staticmethod
def mapping_layer():
""" 获取符号映射关系.
Returns:
layer: 符号映射关系
"""
layer = Trader.MAPPING_LAYER()
layer.is_upper = True
layer.map_dict = {
"BTC/USDT": "btcusdt",
"ETH/USDT": "ethusdt",
"EOS/USDT": "eosusdt",
"BCH/USDT": "bchusdt",
"BSV/USDT": "bsvusdt",
"LTC/USDT": "ltcusdt",
"XRP/USDT": "xrpusdt",
"ADA/USDT": "adausdt",
"TRX/USDT": "trxusdt",
#
"ETH/BTC": "ethbtc",
"EOS/BTC": "eosbtc",
"BCH/BTC": "bchbtc",
"BSV/BTC": "bsvbtc",
"LTC/BTC": "ltcbtc",
"XRP/BTC": "xrpbtc",
"ADA/BTC": "adabtc",
"TRX/BTC": "trxbtc",
#
"EOS/ETH": "eoseth",
"TRX/ETH": "trxeth"
}
return layer
class HuobiMarket(Websocket):
""" Huobi Market Server.
"""
def __init__(self, **kwargs):
self.cb = kwargs["cb"]
self._platform = kwargs["platform"]
self._symbols = kwargs["symbols"]
self._wss = "wss://api.huobi.me"
url = self._wss + "/ws"
super(HuobiMarket, self).__init__(url, send_hb_interval=0, **kwargs)
#self.heartbeat_msg = "ping"
self._c_to_s = {} # {"channel": "symbol"}
self._prev_kline_map = defaultdict(lambda:None)
self.initialize()
async def connected_callback(self):
"""After create Websocket connection successfully, we will subscribing orderbook/trade/kline events."""
if self.cb.on_kline_update_callback:
for symbol in self._symbols:
channel = self._symbol_to_channel(symbol, "kline")
if channel:
data = {"sub": channel}
await self.send_json(data)
if self.cb.on_orderbook_update_callback:
for symbol in self._symbols:
channel = self._symbol_to_channel(symbol, "depth")
if channel:
data = {"sub": channel}
await self.send_json(data)
if self.cb.on_trade_update_callback:
for symbol in self._symbols:
channel = self._symbol_to_channel(symbol, "trade")
if channel:
data = {"sub": channel}
await self.send_json(data)
async def process_binary(self, raw):
""" Process binary message that received from Websocket connection.
Args:
raw: Binary message received from Websocket connection.
"""
data = json.loads(gzip.decompress(raw).decode())
logger.debug("data:", json.dumps(data), caller=self)
channel = data.get("ch")
if not channel:
if data.get("ping"):
hb_msg = {"pong": data.get("ping")}
await self.send_json(hb_msg)
return
symbol = self._c_to_s[channel]
if channel.find("kline") != -1:
cur_kline = data["tick"]
if self._prev_kline_map[symbol]: #如果存在前一根k线
prev_kline = self._prev_kline_map[symbol]
if prev_kline["id"] != cur_kline["id"]: #前一根k线的开始时间与当前k线开始时间不同,意味着前一根k线已经统计完毕,通知上层策略
info = {
"platform": self._platform,
"symbol": symbol,
"open": prev_kline["open"],
"high": prev_kline["high"],
"low": prev_kline["low"],
"close": prev_kline["close"],
"volume": prev_kline["amount"], #火币现货接口居然用amount表示成交量,vol表示成交额,也是晕了.
"timestamp": prev_kline["id"]*1000, #id字段表示以秒为单位的开始时间,转换为毫秒为单位
"kline_type": MARKET_TYPE_KLINE
}
kline = Kline(**info)
SingleTask.run(self.cb.on_kline_update_callback, kline)
self._prev_kline_map[symbol] = cur_kline
elif channel.find("depth") != -1:
d = data["tick"]
asks = d["asks"][:20] #[[price, quantity],....]
bids = d["bids"][:20]
info = {
"platform": self._platform,
"symbol": symbol,
"asks": asks,
"bids": bids,
"timestamp": d["ts"]
}
ob = Orderbook(**info)
SingleTask.run(self.cb.on_orderbook_update_callback, ob)
elif channel.find("trade") != -1:
tick = data["tick"]
for t in tick["data"]:
info = {
"platform": self._platform,
"symbol": symbol,
"action": ORDER_ACTION_BUY if t["direction"] == "buy" else ORDER_ACTION_SELL,
"price": t["price"],
"quantity": t["amount"],
"timestamp": t["ts"]
}
trade = Trade(**info)
SingleTask.run(self.cb.on_trade_update_callback, trade)
def _symbol_to_channel(self, symbol, channel_type):
""" Convert symbol to channel.
Args:
symbol: Trade pair name.
channel_type: channel name, kline / trade / depth.
"""
if channel_type == "kline":
channel = "market.{s}.kline.1min".format(s=symbol)
elif channel_type == "depth":
channel = "market.{s}.depth.step0".format(s=symbol)
elif channel_type == "trade":
channel = "market.{s}.trade.detail".format(s=symbol)
else:
logger.error("channel type error! channel type:", channel_type, calle=self)
return None
self._c_to_s[channel] = symbol
return channel
|
import chainer
from chainer.backends import cuda
from chainercv import utils
import numpy as np
from net import Generator
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--model_path', default='./progressive_growing_of_gans/Gs_chainer.npz')
args = parser.parse_args()
chainer.config.train = False
latent = np.random.randn(4, 512).astype(np.float32)
generator = Generator()
chainer.serializers.load_npz(args.model_path, generator)
with chainer.no_backprop_mode():
img = generator(latent)
print(img.shape)
# [-1, 1] -> [0, 255]
image = cuda.to_cpu(img.array) * 127.5 + 127.5
image = image.clip(0.0, 255.0).astype(np.float32)
utils.write_image(utils.tile_images(image, 2), 'out.png')
if __name__ == "__main__":
main()
|
grocery = ("apple", "banana", "cherry")
fruit = iter(grocery)
print(fruit.next())
print(fruit.next())
print(fruit.next())
simpleString = "Hello"
breakdown = iter(simpleString)
print(breakdown.next())
print(breakdown.next())
print(breakdown.next())
print(breakdown.next())
print(breakdown.next())
|
class Solution:
# @return a string
def fractionToDecimal(self, numerator, denominator):
dic = {}
res = []
res_first = 0
prefix = ''
if numerator * denominator < 0:
prefix = "-"
numerator = abs(numerator)
denominator = abs(denominator)
if numerator > denominator:
res_first = numerator / denominator
numerator %= denominator
while numerator not in dic.keys() and numerator:
numerator *= 10
res.append(numerator / denominator)
dic[numerator/10] = len(res) - 1
numerator = numerator % denominator
res2 = 0
print res
if len(res) == 0:
return prefix + str(res_first)
if not numerator:
res2 = "".join(str(c) for c in res[:])
return prefix + str(res_first) + '.' + res2
else:
repeat = dic[numerator]
res2 = "".join(str(c) for c in res[:repeat])
res_rest = "".join([str(c) for c in res[repeat:]])
dot = '.'
return prefix + str(res_first) + dot + res2 + "(" + res_rest + ")"
if __name__ == "__main__":
s = Solution()
tests = [(1, 2), (2, 1), (0, 3), (7, 3), (1, 90), (-22, -2), (-2, 3), (1, 214748364), (-1, -2147483648)]
for test in tests:
print s.fractionToDecimal(*test)
|
# @Author: Manuel Rodriguez <vallemrv>
# @Date: 29-Aug-2017
# @Email: [email protected]
# @Last modified by: valle
# @Last modified time: 27-Feb-2018
# @License: Apache license vesion 2.0
from .constant import constant
from .fields import *
from .relatedfields import *
from .model import Model
CASCADE = constant.CASCADE
SET_NULL = constant.SET_NULL
SET_DEFAUT = constant.SET_NULL
NO_ACTION = constant.NO_ACTION
|
__author__ = 'chuqiao'
import EventsPortal
EventsPortal.deleteDataInSolr("http://139.162.217.53:8983/solr/eventsportal")
|
from datetime import datetime, timezone, timedelta
class DateTime:
"""
Date-time class supporting automatic conversions to epoch time.
"""
_date_time = None # datetime object
def __init__(self, year=1970, month=1, day=1,
hour=0, minute=0, second=0, tzinfo=timezone.utc,
epoch=-1,
date_str='', time_str='', datetime_obj=None):
"""
Initialize with epoch time if given, otherwise
with date and time strings if given, otherwise
with datetime_obj if give, otherwise
year, month, day, hour, minute, second.
:param year: year.
:param month: month.
:param day: day.
:param hour: hour.
:param minute: minute.
:param second: second.
:param tzinfo: timezone.
:param epoch: epoch time.
:param date_str: date in the format year-month-day = %Y-%m-%d.
:param time_str: time in the format hour:minute:second
= %H:%M:%S
:param datetime_obj: datetime object
"""
if epoch >= 0:
self._date_time = datetime.fromtimestamp(epoch, tzinfo)
elif date_str and time_str:
d = date_str.strip() + ' ' + time_str.strip()
self._date_time = datetime.strptime(d, '%Y-%m-%d %H:%M:%S')
elif datetime:
self._date_time = datetime_obj
else:
self._date_time = datetime(year, month, day,
hour, minute, second, tzinfo)
@property
def epoch(self):
"""
Epoch time.
:return: epoch time in seconds.
"""
return self._date_time.timestamp()
def __str__(self):
"""
String representation.
:return: string representation.
"""
return '{:%Y-%m-%d %H:%M:%S}'.format(self._date_time)
def __add__(self, other):
"""
Add a time delta.
:param other: seconds or datetime.timedelta object.
"""
if isinstance(other, timedelta):
shifted = self._date_time + other
else:
shifted = self._date_time + timedelta(seconds=other)
return DateTime(datetime_obj=shifted)
|
from socket import *
serverSocket = socket(AF_INET, SOCK_STREAM)
host = "10.205.9.45"
port = 1234
bandwidth = 20*(10**6) #~20MB
fileName = "message(recv).txt"
receiveFile = open(fileName, 'wb+')
serverSocket.bind((host, port))
serverSocket.listen(5)
print('Ready to receive')
connectionSocket, address = serverSocket.accept()
receiveBytes = connectionSocket.recv(bandwidth)
while (receiveBytes):
print("Receiving...")
receiveFile.write(receiveBytes)
receiveBytes = connectionSocket.recv(bandwidth)
receiveFile.close()
print("Done Receiving")
connectionSocket.send('Thank you for the file'.encode())
connectionSocket.close() |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import inspect
import time
from grakn.client import GraknClient
from kglib.kgcn.pipeline.pipeline import pipeline
from kglib.utils.grakn.synthetic.examples.diagnosis.generate import generate_example_graphs
from kglib.utils.grakn.type.type import get_thing_types, get_role_types
from kglib.utils.graph.iterate import multidigraph_data_iterator
from kglib.utils.graph.query.query_graph import QueryGraph
from kglib.utils.graph.thing.queries_to_graph import build_graph_from_queries
KEYSPACE = "diagnosis"
URI = "localhost:48555"
# Existing elements in the graph are those that pre-exist in the graph, and should be predicted to continue to exist
PREEXISTS = 0
# Candidates are neither present in the input nor in the solution, they are negative samples
CANDIDATE = 1
# Elements to infer are the graph elements whose existence we want to predict to be true, they are positive samples
TO_INFER = 2
# Categorical Attribute types and the values of their categories
CATEGORICAL_ATTRIBUTES = {'name': ['Diabetes Type II', 'Multiple Sclerosis', 'Blurred vision', 'Fatigue', 'Cigarettes',
'Alcohol']}
# Continuous Attribute types and their min and max values
CONTINUOUS_ATTRIBUTES = {'severity': (0, 1), 'age': (7, 80), 'units-per-week': (3, 29)}
TYPES_TO_IGNORE = ['candidate-diagnosis', 'example-id', 'probability-exists', 'probability-non-exists', 'probability-preexists']
ROLES_TO_IGNORE = ['candidate-patient', 'candidate-diagnosed-disease']
# The learner should see candidate relations the same as the ground truth relations, so adjust these candidates to
# look like their ground truth counterparts
TYPES_AND_ROLES_TO_OBFUSCATE = {'candidate-diagnosis': 'diagnosis',
'candidate-patient': 'patient',
'candidate-diagnosed-disease': 'diagnosed-disease'}
def diagnosis_example(num_graphs=200,
num_processing_steps_tr=5,
num_processing_steps_ge=5,
num_training_iterations=1000,
keyspace=KEYSPACE, uri=URI):
"""
Run the diagnosis example from start to finish, including traceably ingesting predictions back into Grakn
Args:
num_graphs: Number of graphs to use for training and testing combined
num_processing_steps_tr: The number of message-passing steps for training
num_processing_steps_ge: The number of message-passing steps for testing
num_training_iterations: The number of training epochs
keyspace: The name of the keyspace to retrieve example subgraphs from
uri: The uri of the running Grakn instance
Returns:
Final accuracies for training and for testing
"""
tr_ge_split = int(num_graphs*0.5)
generate_example_graphs(num_graphs, keyspace=keyspace, uri=uri)
client = GraknClient(uri=uri)
session = client.session(keyspace=keyspace)
graphs = create_concept_graphs(list(range(num_graphs)), session)
with session.transaction().read() as tx:
# Change the terminology here onwards from thing -> node and role -> edge
node_types = get_thing_types(tx)
[node_types.remove(el) for el in TYPES_TO_IGNORE]
edge_types = get_role_types(tx)
[edge_types.remove(el) for el in ROLES_TO_IGNORE]
print(f'Found node types: {node_types}')
print(f'Found edge types: {edge_types}')
ge_graphs, solveds_tr, solveds_ge = pipeline(graphs,
tr_ge_split,
node_types,
edge_types,
num_processing_steps_tr=num_processing_steps_tr,
num_processing_steps_ge=num_processing_steps_ge,
num_training_iterations=num_training_iterations,
continuous_attributes=CONTINUOUS_ATTRIBUTES,
categorical_attributes=CATEGORICAL_ATTRIBUTES,
output_dir=f"./events/{time.time()}/")
with session.transaction().write() as tx:
write_predictions_to_grakn(ge_graphs, tx)
session.close()
client.close()
return solveds_tr, solveds_ge
def create_concept_graphs(example_indices, grakn_session):
"""
Builds an in-memory graph for each example, with an example_id as an anchor for each example subgraph.
Args:
example_indices: The values used to anchor the subgraph queries within the entire knowledge graph
grakn_session: Grakn Session
Returns:
In-memory graphs of Grakn subgraphs
"""
graphs = []
infer = True
for example_id in example_indices:
print(f'Creating graph for example {example_id}')
graph_query_handles = get_query_handles(example_id)
with grakn_session.transaction().read() as tx:
# Build a graph from the queries, samplers, and query graphs
graph = build_graph_from_queries(graph_query_handles, tx, infer=infer)
obfuscate_labels(graph, TYPES_AND_ROLES_TO_OBFUSCATE)
graph.name = example_id
graphs.append(graph)
return graphs
def obfuscate_labels(graph, types_and_roles_to_obfuscate):
# Remove label leakage - change type labels that indicate candidates into non-candidates
for data in multidigraph_data_iterator(graph):
for label_to_obfuscate, with_label in types_and_roles_to_obfuscate.items():
if data['type'] == label_to_obfuscate:
data.update(type=with_label)
break
def get_query_handles(example_id):
"""
Creates an iterable, each element containing a Graql query, a function to sample the answers, and a QueryGraph
object which must be the Grakn graph representation of the query. This tuple is termed a "query_handle"
Args:
example_id: A uniquely identifiable attribute value used to anchor the results of the queries to a specific
subgraph
Returns:
query handles
"""
# === Hereditary Feature ===
hereditary_query = inspect.cleandoc(f'''match
$p isa person, has example-id {example_id};
$par isa person;
$ps(child: $p, parent: $par) isa parentship;
$diag(patient:$par, diagnosed-disease: $d) isa diagnosis;
$d isa disease, has name $n;
get;''')
vars = p, par, ps, d, diag, n = 'p', 'par', 'ps', 'd', 'diag', 'n'
hereditary_query_graph = (QueryGraph()
.add_vars(vars, PREEXISTS)
.add_role_edge(ps, p, 'child', PREEXISTS)
.add_role_edge(ps, par, 'parent', PREEXISTS)
.add_role_edge(diag, par, 'patient', PREEXISTS)
.add_role_edge(diag, d, 'diagnosed-disease', PREEXISTS)
.add_has_edge(d, n, PREEXISTS))
# === Consumption Feature ===
consumption_query = inspect.cleandoc(f'''match
$p isa person, has example-id {example_id};
$s isa substance, has name $n;
$c(consumer: $p, consumed-substance: $s) isa consumption,
has units-per-week $u; get;''')
vars = p, s, n, c, u = 'p', 's', 'n', 'c', 'u'
consumption_query_graph = (QueryGraph()
.add_vars(vars, PREEXISTS)
.add_has_edge(s, n, PREEXISTS)
.add_role_edge(c, p, 'consumer', PREEXISTS)
.add_role_edge(c, s, 'consumed-substance', PREEXISTS)
.add_has_edge(c, u, PREEXISTS))
# === Age Feature ===
person_age_query = inspect.cleandoc(f'''match
$p isa person, has example-id {example_id}, has age $a;
get;''')
vars = p, a = 'p', 'a'
person_age_query_graph = (QueryGraph()
.add_vars(vars, PREEXISTS)
.add_has_edge(p, a, PREEXISTS))
# === Risk Factors Feature ===
risk_factor_query = inspect.cleandoc(f'''match
$d isa disease;
$p isa person, has example-id {example_id};
$r(person-at-risk: $p, risked-disease: $d) isa risk-factor;
get;''')
vars = p, d, r = 'p', 'd', 'r'
risk_factor_query_graph = (QueryGraph()
.add_vars(vars, PREEXISTS)
.add_role_edge(r, p, 'person-at-risk', PREEXISTS)
.add_role_edge(r, d, 'risked-disease', PREEXISTS))
# === Symptom ===
vars = p, s, sn, d, dn, sp, sev, c = 'p', 's', 'sn', 'd', 'dn', 'sp', 'sev', 'c'
symptom_query = inspect.cleandoc(f'''match
$p isa person, has example-id {example_id};
$s isa symptom, has name $sn;
$d isa disease, has name $dn;
$sp(presented-symptom: $s, symptomatic-patient: $p) isa symptom-presentation, has severity $sev;
$c(cause: $d, effect: $s) isa causality;
get;''')
symptom_query_graph = (QueryGraph()
.add_vars(vars, PREEXISTS)
.add_has_edge(s, sn, PREEXISTS)
.add_has_edge(d, dn, PREEXISTS)
.add_role_edge(sp, s, 'presented-symptom', PREEXISTS)
.add_has_edge(sp, sev, PREEXISTS)
.add_role_edge(sp, p, 'symptomatic-patient', PREEXISTS)
.add_role_edge(c, s, 'effect', PREEXISTS)
.add_role_edge(c, d, 'cause', PREEXISTS))
# === Diagnosis ===
diag, d, p, dn = 'diag', 'd', 'p', 'dn'
diagnosis_query = inspect.cleandoc(f'''match
$p isa person, has example-id {example_id};
$d isa disease, has name $dn;
$diag(patient: $p, diagnosed-disease: $d) isa diagnosis;
get;''')
diagnosis_query_graph = (QueryGraph()
.add_vars([diag], TO_INFER)
.add_vars([d, p, dn], PREEXISTS)
.add_role_edge(diag, d, 'diagnosed-disease', TO_INFER)
.add_role_edge(diag, p, 'patient', TO_INFER))
# === Candidate Diagnosis ===
candidate_diagnosis_query = inspect.cleandoc(f'''match
$p isa person, has example-id {example_id};
$d isa disease, has name $dn;
$diag(candidate-patient: $p, candidate-diagnosed-disease: $d) isa candidate-diagnosis;
get;''')
candidate_diagnosis_query_graph = (QueryGraph()
.add_vars([diag], CANDIDATE)
.add_vars([d, p, dn], PREEXISTS)
.add_role_edge(diag, d, 'candidate-diagnosed-disease', CANDIDATE)
.add_role_edge(diag, p, 'candidate-patient', CANDIDATE))
return [
(symptom_query, lambda x: x, symptom_query_graph),
(diagnosis_query, lambda x: x, diagnosis_query_graph),
(candidate_diagnosis_query, lambda x: x, candidate_diagnosis_query_graph),
(risk_factor_query, lambda x: x, risk_factor_query_graph),
(person_age_query, lambda x: x, person_age_query_graph),
(consumption_query, lambda x: x, consumption_query_graph),
(hereditary_query, lambda x: x, hereditary_query_graph)
]
def write_predictions_to_grakn(graphs, tx):
"""
Take predictions from the ML model, and insert representations of those predictions back into the graph.
Args:
graphs: graphs containing the concepts, with their class predictions and class probabilities
tx: Grakn write transaction to use
Returns: None
"""
for graph in graphs:
for node, data in graph.nodes(data=True):
if data['prediction'] == 2:
concept = data['concept']
concept_type = concept.type_label
if concept_type == 'diagnosis' or concept_type == 'candidate-diagnosis':
neighbours = graph.neighbors(node)
for neighbour in neighbours:
concept = graph.nodes[neighbour]['concept']
if concept.type_label == 'person':
person = concept
else:
disease = concept
p = data['probabilities']
query = (f'match'
f'$p id {person.id};'
f'$d id {disease.id};'
f'$kgcn isa kgcn;'
f'insert'
f'$pd(patient: $p, diagnosed-disease: $d, diagnoser: $kgcn) isa diagnosis,'
f'has probability-exists {p[2]:.3f},'
f'has probability-non-exists {p[1]:.3f},'
f'has probability-preexists {p[0]:.3f};')
tx.query(query)
tx.commit()
if __name__ == "__main__":
diagnosis_example()
|
#!/usr/bin/env python3
import sys # we will need the several modules later
import math
# Let's play more with numbers.
# To determine a type of any object, we can use the builtin function type().
###########################################
# #
# ints #
# #
###########################################
print(type(1)) # The type of 1 is int.
# OK, int is a class. But is not it inefficient to have even integers instances of a class?
# No, it is not. Internally, the Python uses "primitive" type to hold integer values.
print(type(2 ** 1000)) # The type of "large" integers is also int. But it definitely cannot be stored to the "primitive" int.
# For "large" integers, the Python uses (transparently) a different internal representation (a sequence of numbers).
# Where is the boundary between "regular" and "large" integers?
print(sys.maxsize) # The largest of the regular integer values
# How many bits the regular integers occupy?
print(math.log(sys.maxsize, 2))
# Is not computing with int objects inefficient?
# For the commonly used numbers (-5 to 256) Python creates a pool of objects that are reused
# Let's check it with the id() function
# Note: id() returns a unique identification of an object
# In CPython it is the address of the object in memory
print(f'id(1) = {id(1)}')
print(f'id(2) = {id(2)}')
print('id(1 + 1) has to be the same value as id(2)')
print(f'id(1 + 1) = {id(1 + 1)}')
print('But it is not true for ints outside -5..256 interval')
print(f'id(257) = {id(257)}')
print(f'id(256 + 1) = {id(256 + 1)}')
###########################################
# #
# Decimal and Fraction #
# #
###########################################
# Floats are nice but inherently imprecise. Let's consider the following comaparison.
# It should be evaluated to True, but it is not (0.1 cannot be exactly represented in float}
print(0.1 + 0.1 + 0.1 == 0.3)
print(1/10 + 1/10 + 1/10 == 3/10) # Even this is not true.
# 0.1 cannot be precisely represented
# Let's try to print 0.1
print(f'Printing 0.1: {0.1}')
# It prints 0.1, but by default, only a limited number of significant digits is printed
# Let's try to print 0.1 to 20 significant digits
print(f'Printing 0.1 to 20 significant digits: {0.1:.20g}')
# But sometimes precise arithmetic is needed (e.g., counting money]).
# Let's use Decimal and Fraction types.
# They are not builtin types, i.e., they need to be imported, but they are in the Python standard library.
from decimal import Decimal # Decimal - precise float-point numbers
# from - import imports the given entity to be used by a 'short' name only
# We can also use import decimal.Decimal but then it would be necessary to use the long name
# end, even more, it could have performance penalties
print(Decimal('0.1') + Decimal('0.1') + Decimal('0.1') == Decimal('0.3')) # Now, it is true
from fractions import Fraction # Fraction - precise fractions
print(Fraction(1, 10) + Fraction(1, 10) + Fraction(1, 10) == Fraction(3, 10)) # This is also true
|
import pytest
from apsis.sqlite import SqliteDB
from apsis.program import OutputMetadata, Output
#-------------------------------------------------------------------------------
def test0():
db = SqliteDB.create(path=None).output_db
len(db.get_metadata("r42")) == 0
with pytest.raises(LookupError):
db.get_data("r42", "output")
data = b"The quick brown fox jumped over the lazy dogs.\x01\x02\x03"
output = Output(OutputMetadata("combined output", len(data)), data)
db.add("r42", "output", output)
meta = db.get_metadata("r42")
assert list(meta.keys()) == ["output"]
assert meta["output"].name == "combined output"
assert db.get_data("r42", "output") == data
|
"""
API for attaching, detaching, and reading extended metadata to HopsFS files/directories.
It uses the Hopsworks /xattrs REST API
"""
from hops import constants, util, hdfs
from hops.exceptions import RestAPIError
import urllib
def set_xattr(hdfs_path, xattr_name, value):
"""
Attach an extended attribute to an hdfs_path
Args:
:hdfs_path: path of a file or directory
:xattr_name: name of the extended attribute
:value: value of the extended attribute
Returns:
None
"""
value = str(value)
hdfs_path = urllib.parse.quote(hdfs._expand_path(hdfs_path))
headers = {constants.HTTP_CONFIG.HTTP_CONTENT_TYPE: constants.HTTP_CONFIG.HTTP_APPLICATION_JSON}
method = constants.HTTP_CONFIG.HTTP_PUT
resource_url = constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_REST_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_PROJECT_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs.project_id() + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_XATTR_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs_path + constants.DELIMITERS.QUESTION_MARK_DELIMITER + constants.XATTRS.XATTRS_PARAM_NAME + \
constants.DELIMITERS.JDBC_CONNECTION_STRING_VALUE_DELIMITER + xattr_name
response = util.send_request(method, resource_url, data=value, headers=headers)
response_object = response.json()
if response.status_code >= 400:
error_code, error_msg, user_msg = util._parse_rest_error(response_object)
raise RestAPIError("Could not attach extened attributes from a path (url: {}), server response: \n " \
"HTTP code: {}, HTTP reason: {}, error code: {}, error msg: {}, user msg: {}".format(
resource_url, response.status_code, response.reason, error_code, error_msg, user_msg))
def get_xattr(hdfs_path, xattr_name=None):
"""
Get the extended attribute attached to an hdfs_path.
Args:
:hdfs_path: path of a file or directory
:xattr_name: name of the extended attribute
Returns:
A dictionary with the extended attribute(s) as key value pair(s). If the :xattr_name is None,
the API returns all associated extended attributes.
"""
hdfs_path = urllib.parse.quote(hdfs._expand_path(hdfs_path))
headers = {constants.HTTP_CONFIG.HTTP_CONTENT_TYPE: constants.HTTP_CONFIG.HTTP_APPLICATION_JSON}
method = constants.HTTP_CONFIG.HTTP_GET
resource_url = constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_REST_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_PROJECT_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs.project_id() + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_XATTR_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs_path
if xattr_name is not None:
resource_url += constants.DELIMITERS.QUESTION_MARK_DELIMITER + constants.XATTRS.XATTRS_PARAM_NAME + \
constants.DELIMITERS.JDBC_CONNECTION_STRING_VALUE_DELIMITER + xattr_name
response = util.send_request(method, resource_url, headers=headers)
response_object = response.json()
if response.status_code >= 400:
error_code, error_msg, user_msg = util._parse_rest_error(response_object)
raise RestAPIError("Could not get extened attributes attached to a path (url: {}), server response: \n " \
"HTTP code: {}, HTTP reason: {}, error code: {}, error msg: {}, user msg: {}".format(
resource_url, response.status_code, response.reason, error_code, error_msg, user_msg))
results = {}
for item in response_object["items"]:
results[item["name"]] = item["value"]
return results
def remove_xattr(hdfs_path, xattr_name):
"""
Remove an extended attribute attached to an hdfs_path
Args:
:hdfs_path: path of a file or directory
:xattr_name: name of the extended attribute
Returns:
None
"""
hdfs_path = urllib.parse.quote(hdfs._expand_path(hdfs_path))
headers = {constants.HTTP_CONFIG.HTTP_CONTENT_TYPE: constants.HTTP_CONFIG.HTTP_APPLICATION_JSON}
method = constants.HTTP_CONFIG.HTTP_DELETE
resource_url = constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_REST_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_PROJECT_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs.project_id() + constants.DELIMITERS.SLASH_DELIMITER + \
constants.REST_CONFIG.HOPSWORKS_XATTR_RESOURCE + constants.DELIMITERS.SLASH_DELIMITER + \
hdfs_path + constants.DELIMITERS.QUESTION_MARK_DELIMITER + constants.XATTRS.XATTRS_PARAM_NAME + \
constants.DELIMITERS.JDBC_CONNECTION_STRING_VALUE_DELIMITER + xattr_name
response = util.send_request(method, resource_url, headers=headers)
if response.status_code >= 400:
response_object = response.json()
error_code, error_msg, user_msg = util._parse_rest_error(response_object)
raise RestAPIError("Could not remove extened attributes from a path (url: {}), server response: \n " \
"HTTP code: {}, HTTP reason: {}, error code: {}, error msg: {}, user msg: {}".format(
resource_url, response.status_code, response.reason, error_code, error_msg, user_msg)) |
'''OpenGL extension ARB.texture_query_lod
This module customises the behaviour of the
OpenGL.raw.GL.ARB.texture_query_lod to provide a more
Python-friendly API
Overview (from the spec)
This extension provides a new set of fragment shader texture
functions (textureLOD) that return the results of automatic
level-of-detail computations that would be performed if a texture
lookup were performed.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/texture_query_lod.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.ARB.texture_query_lod import *
### END AUTOGENERATED SECTION |
import util
import numpy as np
import sys
import random
PRINT = True
###### DON'T CHANGE THE SEEDS ##########
random.seed(42)
np.random.seed(42)
def small_classify(y):
classifier, data = y
return classifier.classify(data)
class AdaBoostClassifier:
"""
AdaBoost classifier.
Note that the variable 'datum' in this code refers to a counter of features
(not to a raw samples.Datum).
"""
def __init__( self, legalLabels, max_iterations, weak_classifier, boosting_iterations):
self.legalLabels = legalLabels
self.boosting_iterations = boosting_iterations
self.classifiers = [weak_classifier(legalLabels, max_iterations) for _ in range(self.boosting_iterations)]
self.alphas = [0]*self.boosting_iterations
def train( self, trainingData, trainingLabels):
"""
The training loop trains weak learners with weights sequentially.
The self.classifiers are updated in each iteration and also the self.alphas
"""
self.features = trainingData[0].keys()
# "*** YOUR CODE HERE ***"
n = len(trainingData)
trainingLabels = np.array(trainingLabels)
w = np.ones(n) / n
for m in range(self.boosting_iterations):
self.classifiers[m].train(trainingData, trainingLabels, w)
error = 0.0
pred = np.array(self.classifiers[m].classify(trainingData))
error += w[trainingLabels != pred].sum()
f = error / (1 - error)
w[trainingLabels == pred] *= f
w /= w.sum()
self.alphas[m] = -np.log(f)
# util.raiseNotDefined()
def classify( self, data):
"""
Classifies each datum as the label that most closely matches the prototype vector
for that label. This is done by taking a polling over the weak classifiers already trained.
See the assignment description for details.
Recall that a datum is a util.counter.
The function should return a list of labels where each label should be one of legaLabels.
"""
# "*** YOUR CODE HERE ***"
poll = 0
for m in range(self.boosting_iterations):
pred = np.array(self.classifiers[m].classify(data))
poll = np.add(poll, pred * self.alphas[m])
poll = np.sign(poll)
poll[poll == 0] = np.random.choice([-1, 1], np.count_nonzero(poll == 0))
return list(poll)
# util.raiseNotDefined() |
# Created by Arno on 02/12/2019
# Source: https://stackoverflow.com/questions/3368969/find-string-between-two-substrings
import re
# Source: https://stackoverflow.com/questions/54059917/generate-all-length-n-permutations-of-true-false
import itertools
import sys
import escape_helpers
class Unit:
name = ""
notation = ""
uri = ""
definition = ""
supported_units = (
"cm^3", "d", "degC", "degF", "degree_east", "degree_north", "g",
"kg", "km", "l", "yr")
# html = open(sys.argv[1]) # The command line argument should point to this file: "units.html"
html = open("units.html")
unit_list = set()
lastReadUnit = None
sectionName = ""
prefPassed = False
failedlines = ""
failed = False
for line in html:
if '<td class="conceptURI">' in line:
lastReadUnit = Unit()
parsed = re.search('<td class="conceptURI"><a href="(.*)">', line)
try:
lastReadUnit.uri = parsed.group(1)
except:
failed = True
failedlines += line
if '<td class="SN">' in line:
parsed = re.search('<td class="SN">(.*)<a href=""></a></td>', line)
try:
lastReadUnit.definition = parsed.group(1)
except:
pass
if '<td class="PREF">' in line:
prefPassed = True
parsed = re.search('<td class="PREF">(.*)<a href=""></a></td>', line)
try:
lastReadUnit.name = parsed.group(1)
except:
failed = True
failedlines += line
elif prefPassed and '<td class="ALT">' in line:
prefPassed = False
parsed = re.search('<td class="ALT">(.*)<a href=""></a></td>', line)
try:
lastReadUnit.notation = parsed.group(1)
except:
failed = True
failedlines += line
if not failed:
if lastReadUnit.notation in supported_units:
unit_list.add(lastReadUnit)
else:
failed = False
html.close()
def str_query(uri, relation, value):
if value is not None:
uri_relations = ("ext:unitUri")
escaped_value = ""
if relation in uri_relations:
escaped_value = escape_helpers.sparql_escape_uri(value)
else:
escaped_value = escape_helpers.sparql_escape(value)
if isinstance(value, bool):
# Fix for weird problem with booleans
escaped_value = escaped_value.replace("False", "false")
escaped_value = escaped_value.replace("True", "true")
escaped_value = escaped_value.replace("^^xsd:boolean",
"^^<http://mu.semte.ch/vocabularies/typed-literals/boolean>")
return "\t\t{uri} {relation} {value} . \n".format(uri=uri, relation=relation,
value=escaped_value)
return ""
# base_uri = "http://vocab.nerc.ac.uk/collection/P06/current/{id}/"
query_str = "INSERT DATA { \n"
query_str += "\tGRAPH {app_uri} {{ \n".format(
app_uri=escape_helpers.sparql_escape_uri("http://mu.semte.ch/application"))
for unit in unit_list:
uri = escape_helpers.sparql_escape_uri(unit.uri)
print(unit.name, unit.notation, unit.uri)
query_str += "\t\t{uri} a ext:Unit . \n".format(uri=uri)
relation = "ext:unitName"
query_str += str_query(uri, relation, unit.name)
relation = "ext:unitNotation"
query_str += str_query(uri, relation, unit.notation)
relation = "ext:unitUri"
query_str += str_query(uri, relation, unit.uri)
relation = "ext:unitDefinition"
query_str += str_query(uri, relation, unit.definition)
relation = "mu:uuid"
query_str += str_query(uri, relation, unit.uri[-5:-1])
query_str += "\t}\n"
query_str += "}\n"
prefixes = "PREFIX ext: {uri}\n".format(
uri=escape_helpers.sparql_escape_uri("http://mu.semte.ch/vocabularies/ext/"))
prefixes += "PREFIX mu: {uri}\n".format(
uri=escape_helpers.sparql_escape_uri("http://mu.semte.ch/vocabularies/core/"))
query_str = prefixes + query_str
# f = open(sys.argv[2], "w+") # argv[2] should be cpp.json
f = open("query.sparql", "w+")
f.write(query_str)
f.close()
fails = open("error.log", "w+")
fails.write(failedlines)
fails.close()
|
# projects/views.py
from rest_framework import generics
from .models import Project
from .serializers import ProjectSerializer
from .serializers import ProjectCreateSerializer
class ProjectListView(generics.ListAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectCreate(generics.CreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectCreateSerializer
class ProjectRetrieve(generics.RetrieveAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectUpdate(generics.UpdateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDelete(generics.DestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
|
from pydantic import BaseModel
from datetime import date
from typing import Optional, List
class OrderBase(BaseModel):
date: date
total: str
class OrderCreate(OrderBase):
pass
class Order(OrderBase):
id: int
customer_id: int
class Config:
orm_mode = True
class CustomerBase(BaseModel):
name: str
email: str
birthday: Optional[date]
class CustomerCreate(CustomerBase):
pass
class Customer(CustomerBase):
id: int
is_active: bool
orders: List[Order] = []
class Config:
orm_mode = True
|
import os
from glob import glob
import pandas as pd
import numpy as np
from srp.config import C
def output_mean_variance_table(root):
index_label = []
rows = []
col_names = None
for subdir in os.listdir(root):
if subdir.startswith(('.', '_')):
continue
index_label.append(subdir)
csvs = glob(os.path.join(root, subdir, '*.csv'))
metrics = []
for csv in csvs:
if not col_names:
col_names = pd.read_csv(csv).columns[1:].tolist()
history = pd.read_csv(csv)
metrics.append(history.iloc[len(history)-5, 1:].values)
# print (index_label)
mean = np.mean(metrics, axis=0)
std = np.std(metrics, axis=0)
rows.append(["{:.3f}±{:.3f}".format(m,s) for (m, s) in zip(mean, std)])
return pd.DataFrame(data=np.array(rows), columns=col_names, index=index_label)
def output_folds_table(root):
csvs = glob(os.path.join(root, '*/*.csv'))
useful_cols = pd.read_csv(csvs[0]).columns[1:].tolist()
rows = []
index_label = []
for exp in csvs:
history = pd.read_csv(exp)
assert history.columns[1:].tolist() == useful_cols
rows.append(history.iloc[len(history)-5, 1:].values)
index_label.append(os.path.basename(os.path.dirname(exp))+'_'+os.path.basename(exp)[-5:-4])
assert len(useful_cols) == len(rows[0])
return pd.DataFrame(data=np.array(rows), columns=useful_cols, index=index_label)
if __name__ == '__main__':
experiment_root =os.path.join(C.DATA, 'experiments')
roots = [exp for exp in os.listdir(experiment_root) if not exp.startswith(('.', '_'))]
for r in roots:
df = output_folds_table(os.path.join(experiment_root, r))
df.to_csv('folds_{}_c{}r{}.csv'.format(r, C.TRAIN.REGRESSION_WEIGHT, C.TRAIN.CLASSIFICATION_WEIGHT))
df = output_mean_variance_table(os.path.join(experiment_root, r))
df.to_csv('summary_{}_c{}r{}.csv'.format(r, C.TRAIN.REGRESSION_WEIGHT, C.TRAIN.CLASSIFICATION_WEIGHT))
|
import os, sys, subprocess, json
import pprint
def get_os_env(env_file=os.environ['HOME'] + "/.bashrc"):
'''
Return all the os variables in the environment profile file
:param env_file: Specify the profile file from where to source. Default=$HOME/.bashrc
:return:
'''
source = "source " + env_file
dump = '/usr/bin/python -c "import os, json;print json.dumps(dict(os.environ))"'
pipe = subprocess.Popen(['/bin/bash', '-c', '%s && %s' %(source,dump)], stdout=subprocess.PIPE)
if sys.version_info.major == 3:
env = json.loads(str(pipe.stdout.read(),'utf-8'))
else:
env = json.loads(pipe.stdout.read())
return env
if __name__ == '__main__':
my_env = get_os_env()
pprint.pprint(my_env)
|
# Exercise 008: Measurement converter
"""Create a program that reads a value in meters and display it converted to centimeters and millimeters. """
# First we ask for the value
# Since we working with sizes, we're gonna do a Type casting to get a float variable
meters = float(input("Enter a value: "))
# A cool thing we can do with f-strings
# is make the calculation inside the print u.u
print(
f"The multiples of {meters}m are: {meters / 10}dam, {meters / 100}hm, e {meters / 1000}km"
)
print(
f"The submultiples of {meters}m are: {meters * 10}dm, {meters * 100}cm e {meters * 1000}mm"
)
|
import behave
import os
import json
@behave.when(u'I update ontology with labels from file "{file_path}"')
def step_impl(context, file_path):
file_path = os.path.join(os.environ['DATALOOP_TEST_ASSETS'], file_path)
with open(file_path) as f:
context.labels = json.load(f)
context.ontology.add_labels(context.labels)
context.recipe.ontologies.update(context.ontology)
@behave.when(u'I update ontology attributes to "{attribute1}", "{attribute2}"')
def step_impl(context, attribute1, attribute2):
context.ontology.attributes = [attribute1, attribute2]
context.recipe.ontologies.update(context.ontology)
@behave.when(u'I update ontology system metadata')
def step_impl(context):
context.ontology.metadata['system']['something'] = 'value'
context.recipe.ontologies.update(ontology=context.ontology, system_metadata=True)
|
import requests
import base64
from Crypto.Util.number import long_to_bytes, bytes_to_long
import hashlib
from Crypto.Cipher import AES
import zlib
import sys
#refer to PGP ASCII ARMOR and Raidx-64(https://tools.ietf.org/html/rfc4880#section-6.2)
def getPackets(m):
m = m.split("\n")
f = 0
r = ""
for l in m:
if len(l) == 0:
f = 1
continue
if (f == 0): continue
if l[0] == "=": break
r = r + l
return base64.b64decode(r)
def getCompressedLength(msg):
url = "http://drinks.teaser.insomnihack.ch/generateEncryptedVoucher"
data = {"recipientName" : msg, "drink" : "beer"}
headers = {"Content-type" : "application/json"}
resp = requests.post(url, json=data, headers=headers)
m = getPackets(resp.text)
return ord(m[16])
#refer to https://www.rogdham.net/2018/09/17/csaw-ctf-2018-write-ups.en
def solve(oracle, suffix, charset):
out = []
for c in charset:
#data = c + suffix
data = suffix + c
data *= 5
while len(data) < 20:
data = '<' + data # pad
out.append((c, oracle(data)))
max_value = max(out, key=lambda o: o[1])[1]
return [o[0] for o in out if o[1] != max_value]
def solve_all(oracle):
suffixes = ['||']
charset = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_-|!@#$*.,?/=\\ "
while suffixes:
new_suffixes = []
for suffix in suffixes:
if suffix:
# skip loops at the right of suffix
if suffix.endswith(suffix[-1:] * 3):
continue
if suffix.endswith(suffix[-2:] * 3):
continue
chars = solve(oracle, suffix, charset)
if not(chars):
yield suffix
continue
for char in chars:
#new_suffixes.append(char + suffix)
new_suffixes.append(suffix + char)
print suffixes
suffixes = new_suffixes
for solved in solve_all(getCompressedLength):
print solved
'''
print "###############"
print "# Failed Try #"
print "###############"
#refer to https://github.com/EmpireCTF/empirectf/blob/master/writeups/2018-09-14-CSAW-CTF-Quals/scripts/flatcrypt.py
#padding = b"1234567890ABCDEFGHIJ"
padding = b"abcdefghijklmnopqrst"
known_flag = b"||G1MME_B33R_PLZ_1MM_S0_V3RY_TH1RSTY"
alphabet = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_-|!@#$*.<>,?/=\\ "
while True:
bestCandidate = None
bestLen = 10000
worstLen = -1
for candidate in alphabet:
check = padding + known_flag + candidate + padding
resultLen = getCompressedLength(check)
# update best and worst results
if resultLen < bestLen:
bestLen = resultLen
bestCandidate = candidate
worstLen = max(worstLen, resultLen)
sys.stdout.write('.')
sys.stdout.flush()
if worstLen == bestLen:
print "done!"
break
known_flag = known_flag + bestCandidate
print known_flag
#'''
'''
url = "http://drinks.teaser.insomnihack.ch/generateEncryptedVoucher"
data = {"recipientName" : "matta", "drink" : "beer"}
headers = {"Content-type" : "application/json"}
voucher = (requests.post(url, json=data, headers=headers)).text
print voucher
url = "http://drinks.teaser.insomnihack.ch/redeemEncryptedVoucher"
data = {"encryptedVoucher" : voucher, "passphrase" : known_flag[2:]}
headers = {"Content-type" : "application/json"}
resp = requests.post(url, json=data, headers=headers)
print resp.text
'''
|
import re
text=""
while True:
usrInput=input()
if usrInput!="":
text+=usrInput+"\n"
else:
break
lookForNumbers=re.compile(r"\d+")
nums=lookForNumbers.findall(text)
print(' '.join(nums))
|
from binaryninja import *
import re
def create_tmp(base, depth):
"""
Split the base file into several smaller ones based on depth.
For some reason Binary Ninja does not want to process the base
file correctly if it is too big.
"""
pass
def import_map(bv,function):
map_path = get_open_filename_input("MAP file...")
if not map_path:
log_error("No MAP file selected")
return
with open(map_path, "r") as f_map:
# First line is empty
if f_map.readline().strip():
log_error("Invalid MAP file format")
return
# Second line is field values
if not re.match(r"^Start\s+Length\s+Name\s+Class$", f_map.readline().strip()):
log_error("Invalid MAP file format")
return
# Read lines until the end and create symbols for valid functions
for current_line in f_map.readlines():
match = re.match(r"^(?P<address>[a-fA-F0-9]{5,16})\s+(function)\s+(?P<library>[\w]+)\.(?P<procedure>[\w]+):(?P<return>[\w]+);\s+[\w]+;$", current_line.strip())
if match:
address = int(match.group("address"), 16)
if not bv.get_function_at(address):
bv.add_function(address)
if not bv.get_symbol_at(address):
bv.define_user_symbol(Symbol(SymbolType.FunctionSymbol, address, match.group("procedure")))
PluginCommand.register_for_address("Import MAP from IDR...", "Import MAP", import_map)
|
'''
Welcome to yet another attempt at being a bad programmer, brought to you by
the rainy rain. Today, we're gonna try to implement the vigenere cypher, or
as they call it "Le Chiffrement de Vigenère". So come follow my adventures.
'''
#Step rei : create an alphabet.
import string
alphabet = list(string.ascii_lowercase)
#Step one : get the message to cypher.
missive = input("Please enter the message you wanna cypher, general : ")
#Step two : make a cypher key (it'll be randomly generated, you just enter the
#length of the key.
import random
key = int(input("please enter the length of the key you wanna use : "))
cle = ''
for i in range(key):
cle += alphabet[random.randint(0,25)]
#Step three : create the vigenere cypher
#Many problems have to be solved here.
def chiffre_vigenere(missive):
missiveChiffre = ''
for i in range(len(missive)):
if missive[i].isalpha() == False:
missiveChiffre += missive[i]
else:
rotation = cle[i % 3]
pos = alphabet.index(missive[i].lower()) + alphabet.index(rotation)
missiveChiffre += alphabet[pos % 25].upper()
return missiveChiffre
#Step Four : test the vigenere cypher.
cypher = chiffre_vigenere(missive)
print(cypher)
|
import os
import sys
import glob
import numpy as np
import pickle
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import torch
def main():
out_dir = sys.argv[1]
img_dir = sys.argv[2]
gt_file = sys.argv[3]
categories = sys.argv[4].split()
color = sys.argv[5]
try:
hyp_dir = sys.argv[6]
except:
hyp_dir = None
try:
hyp2_dir = sys.argv[7]
except:
hyp2_dir = None
if hyp_dir and hyp2_dir:
hier = True
else:
hier = False
c_map = {'color':color, 'marker':'x'}
gt_data, gt_relatives, gt_order, _, _ = torch.load(gt_file)
id_to_idx = {}
for idx, d in enumerate(gt_data):
id_to_idx[d["parent"] + d["id"]] = idx
# img_path = []
# img_names = []
# for ext in ['tif', 'jpg','jpeg','png']:
# ext_paths = glob.glob(os.path.join(img_dir, "*."+ext))
# img_paths.append(ext_paths)
# img_names.append(
# [os.path.basename(x).strip("."+ext)for x in ext_paths]
# )
c_pos = len(categories)
for img_name in gt_order.keys():
# --- TODO: change to any extension, no just tif
for ext in ["tif", "jpg", "JPG", "png", "JPEG", "jpeg"]:
if os.path.isfile(os.path.join(img_dir, img_name + "." + ext)):
img_path = os.path.join(img_dir, img_name + "." + ext)
break
print("Image not found for file", img_name)
img = mpimg.imread(img_path)
if len(img.shape) == 2:
plt.imshow(img, cmap='gray')
else:
plt.imshow(img)
z = gt_order[img_name]
points = np.zeros((len(z), 2))
for j, element in enumerate(z):
feats = gt_data[id_to_idx[img_name + element]]["features"]
center = np.array([feats[-2]+(feats[-1]-feats[-2])/2, feats[-4]+(feats[-3]-feats[-4])/2])
points[j] = (center*(img.shape[0:2])).astype(np.int)[::-1] - np.array([30, 0])
#points[j] = (
# gt_data[id_to_idx[img_name + element]]["features"][c_pos:c_pos+2].numpy()
# * (img.shape[0:2][::-1])
#).astype(np.int) - np.array([30, 0])
#plt.plot(points[:, 0], points[:, 1], "go-")
plt.plot(points[:, 0], points[:, 1],
color='green',
marker='o',
markeredgecolor='black',
markerfacecolor='None',
)
if hyp_dir and not hyp2_dir:
points = np.zeros((len(z), 2))
with open(os.path.join(hyp_dir, img_name + ".pickle"), "rb") as fh:
s = pickle.load(fh)[0]
for j, element in enumerate(s):
feats = gt_data[id_to_idx[img_name + z[element]]]["features"]
center = np.array([feats[-2]+(feats[-1]-feats[-2])/2, feats[-4]+(feats[-3]-feats[-4])/2])
points[j] = (center*(img.shape[0:2])).astype(np.int)[::-1] + np.array([30, 0])
#points[j] = (
# gt_data[id_to_idx[img_name + z[element]]]["features"][
# c_pos:c_pos+2
# ].numpy()
# * (img.shape[0:2][::-1])
#).astype(np.int) + np.array([30, 0])
plt.plot(points[:, 0], points[:, 1], **c_map)
plt.axis("off")
plt.tight_layout()
plt.savefig(
out_dir + "/" + img_name + ".svg", dpi=150, bbox_inches="tight"
)
plt.close()
if __name__ == "__main__":
main()
|
# Copyright (c) 2020 DeNA Co., Ltd.
# Licensed under The MIT License [see LICENSE for details]
# evaluation of policies or planning algorithms
import random
import time
import multiprocessing as mp
from .environment import prepare_env, make_env
from .connection import send_recv, accept_socket_connections, connect_socket_connection
from .agent import RandomAgent, RuleBasedAgent, Agent, EnsembleAgent, SoftAgent
network_match_port = 9876
def view(env, player=None):
if hasattr(env, 'view'):
env.view(player=player)
else:
print(env)
def view_transition(env):
if hasattr(env, 'view_transition'):
env.view_transition()
else:
pass
class NetworkAgentClient:
def __init__(self, agent, env, conn):
self.conn = conn
self.agent = agent
self.env = env
def run(self):
while True:
command, args = self.conn.recv()
if command == 'quit':
break
elif command == 'outcome':
print('outcome = %f' % args[0])
elif hasattr(self.agent, command):
if command == 'action' or command == 'observe':
view(self.env)
ret = getattr(self.agent, command)(self.env, *args, show=True)
if command == 'action':
player = args[0]
ret = self.env.action2str(ret, player)
else:
ret = getattr(self.env, command)(*args)
if command == 'update':
reset = args[1]
if reset:
self.agent.reset(self.env, show=True)
view_transition(self.env)
self.conn.send(ret)
class NetworkAgent:
def __init__(self, conn):
self.conn = conn
def update(self, data, reset):
return send_recv(self.conn, ('update', [data, reset]))
def outcome(self, outcome):
return send_recv(self.conn, ('outcome', [outcome]))
def action(self, player):
return send_recv(self.conn, ('action', [player]))
def observe(self, player):
return send_recv(self.conn, ('observe', [player]))
def exec_match(env, agents, critic, show=False, game_args={}):
''' match with shared game environment '''
if env.reset(game_args):
return None
for agent in agents.values():
agent.reset(env, show=show)
while not env.terminal():
if show:
view(env)
if show and critic is not None:
print('cv = ', critic.observe(env, None, show=False)[0])
turn_players = env.turns()
actions = {}
for p, agent in agents.items():
if p in turn_players:
actions[p] = agent.action(env, p, show=show)
else:
agent.observe(env, p, show=show)
if env.step(actions):
return None
if show:
view_transition(env)
outcome = env.outcome()
if show:
print('final outcome = %s' % outcome)
return outcome
def exec_network_match(env, network_agents, critic, show=False, game_args={}):
''' match with divided game environment '''
if env.reset(game_args):
return None
for p, agent in network_agents.items():
info = env.diff_info(p)
agent.update(info, True)
while not env.terminal():
if show:
view(env)
if show and critic is not None:
print('cv = ', critic.observe(env, None, show=False)[0])
turn_players = env.turns()
actions = {}
for p, agent in network_agents.items():
if p in turn_players:
action = agent.action(p)
actions[p] = env.str2action(action, p)
else:
agent.observe(p)
if env.step(actions):
return None
for p, agent in network_agents.items():
info = env.diff_info(p)
agent.update(info, False)
outcome = env.outcome()
for p, agent in network_agents.items():
agent.outcome(outcome[p])
return outcome
def build_agent(raw, env):
if raw == 'random':
return RandomAgent()
elif raw == 'rulebase':
return RuleBasedAgent()
return None
class Evaluator:
def __init__(self, env, args):
self.env = env
self.args = args
self.default_opponent = 'random'
def execute(self, models, args):
opponents = self.args.get('eval', {}).get('opponent', [])
if len(opponents) == 0:
opponent = self.default_opponent
else:
opponent = random.choice(opponents)
agents = {}
for p, model in models.items():
if model is None:
agents[p] = build_agent(opponent, self.env)
else:
agents[p] = Agent(model, self.args['observation'])
outcome = exec_match(self.env, agents, None)
if outcome is None:
print('None episode in evaluation!')
return None
return {'args': args, 'result': outcome, 'opponent': opponent}
def wp_func(results):
games = sum([v for k, v in results.items() if k is not None])
win = sum([(k + 1) / 2 * v for k, v in results.items() if k is not None])
if games == 0:
return 0.0
return win / games
def eval_process_mp_child(agents, critic, env_args, index, in_queue, out_queue, seed, show=False):
random.seed(seed + index)
env = make_env({**env_args, 'id': index})
while True:
args = in_queue.get()
if args is None:
break
g, agent_ids, pat_idx, game_args = args
print('*** Game %d ***' % g)
agent_map = {env.players()[p]: agents[ai] for p, ai in enumerate(agent_ids)}
if isinstance(list(agent_map.values())[0], NetworkAgent):
outcome = exec_network_match(env, agent_map, critic, show=show, game_args=game_args)
else:
outcome = exec_match(env, agent_map, critic, show=show, game_args=game_args)
out_queue.put((pat_idx, agent_ids, outcome))
out_queue.put(None)
def evaluate_mp(env, agents, critic, env_args, args_patterns, num_process, num_games, seed):
in_queue, out_queue = mp.Queue(), mp.Queue()
args_cnt = 0
total_results, result_map = [{} for _ in agents], [{} for _ in agents]
print('total games = %d' % (len(args_patterns) * num_games))
time.sleep(0.1)
for pat_idx, args in args_patterns.items():
for i in range(num_games):
if len(agents) == 2:
# When playing two player game,
# the number of games with first or second player is equalized.
first_agent = 0 if i < (num_games + 1) // 2 else 1
tmp_pat_idx, agent_ids = (pat_idx + '-F', [0, 1]) if first_agent == 0 else (pat_idx + '-S', [1, 0])
else:
tmp_pat_idx, agent_ids = pat_idx, random.sample(list(range(len(agents))), len(agents))
in_queue.put((args_cnt, agent_ids, tmp_pat_idx, args))
for p in range(len(agents)):
result_map[p][tmp_pat_idx] = {}
args_cnt += 1
network_mode = agents[0] is None
if network_mode: # network battle mode
agents = network_match_acception(num_process, env_args, len(agents), network_match_port)
else:
agents = [agents] * num_process
for i in range(num_process):
in_queue.put(None)
args = agents[i], critic, env_args, i, in_queue, out_queue, seed
if num_process > 1:
mp.Process(target=eval_process_mp_child, args=args).start()
if network_mode:
for agent in agents[i]:
agent.conn.close()
else:
eval_process_mp_child(*args, show=True)
finished_cnt = 0
while finished_cnt < num_process:
ret = out_queue.get()
if ret is None:
finished_cnt += 1
continue
pat_idx, agent_ids, outcome = ret
if outcome is not None:
for idx, p in enumerate(env.players()):
agent_id = agent_ids[idx]
oc = outcome[p]
result_map[agent_id][pat_idx][oc] = result_map[agent_id][pat_idx].get(oc, 0) + 1
total_results[agent_id][oc] = total_results[agent_id].get(oc, 0) + 1
for p, r_map in enumerate(result_map):
print('---agent %d---' % p)
for pat_idx, results in r_map.items():
print(pat_idx, {k: results[k] for k in sorted(results.keys(), reverse=True)}, wp_func(results))
print('total', {k: total_results[p][k] for k in sorted(total_results[p].keys(), reverse=True)}, wp_func(total_results[p]))
def network_match_acception(n, env_args, num_agents, port):
waiting_conns = []
accepted_conns = []
for conn in accept_socket_connections(port):
if len(accepted_conns) >= n * num_agents:
break
waiting_conns.append(conn)
if len(waiting_conns) == num_agents:
conn = waiting_conns[0]
accepted_conns.append(conn)
waiting_conns = waiting_conns[1:]
conn.send(env_args) # send accept with environment arguments
agents_list = [
[NetworkAgent(accepted_conns[i * num_agents + j]) for j in range(num_agents)]
for i in range(n)
]
return agents_list
def get_model(env, model_path):
import torch
from .model import ModelWrapper
model = env.net()()
model.load_state_dict(torch.load(model_path))
model.eval()
return ModelWrapper(model)
def client_mp_child(env_args, model_path, conn):
env = make_env(env_args)
model = get_model(env, model_path)
NetworkAgentClient(Agent(model), env, conn).run()
def eval_main(args, argv):
env_args = args['env_args']
prepare_env(env_args)
env = make_env(env_args)
model_path = argv[0] if len(argv) >= 1 else 'models/latest.pth'
num_games = int(argv[1]) if len(argv) >= 2 else 100
num_process = int(argv[2]) if len(argv) >= 3 else 1
agent1 = Agent(get_model(env, model_path))
critic = None
print('%d process, %d games' % (num_process, num_games))
seed = random.randrange(1e8)
print('seed = %d' % seed)
agents = [agent1] + [RandomAgent() for _ in range(len(env.players()) - 1)]
evaluate_mp(env, agents, critic, env_args, {'default': {}}, num_process, num_games, seed)
def eval_server_main(args, argv):
print('network match server mode')
env_args = args['env_args']
prepare_env(env_args)
env = make_env(env_args)
num_games = int(argv[0]) if len(argv) >= 1 else 100
num_process = int(argv[1]) if len(argv) >= 2 else 1
print('%d process, %d games' % (num_process, num_games))
seed = random.randrange(1e8)
print('seed = %d' % seed)
evaluate_mp(env, [None] * len(env.players()), None, env_args, {'default': {}}, num_process, num_games, seed)
def eval_client_main(args, argv):
print('network match client mode')
while True:
try:
host = argv[1] if len(argv) >= 2 else 'localhost'
conn = connect_socket_connection(host, network_match_port)
env_args = conn.recv()
except EOFError:
break
model_path = argv[0] if len(argv) >= 1 else 'models/latest.pth'
mp.Process(target=client_mp_child, args=(env_args, model_path, conn)).start()
conn.close()
|
import plotly.plotly as py
import plotly.graph_objs as go
trace = dict(
type='scattergeo',
lon = [42, 39], lat = [12, 22],
marker = ['Rome', 'Greece'],
mode = 'markers')
py.iplot([trace]) |
import dsz, dsz.version.checks.windows
import ops, ops.cmd
import os.path
def getregvalue(hive, key, value):
cmd = ops.cmd.getDszCommand('registryquery')
cmd.hive = hive
cmd.key = key
if (value != ''):
cmd.value = value
obj = cmd.execute()
if cmd.success:
if (value == ''):
for key in obj.key:
for value in key.value:
if (value.name == ''):
return (key.updatedate, key.updatetime, value.value)
else:
return (obj.key[0].updatedate, obj.key[0].updatetime, obj.key[0].value[0].value)
else:
return (None, None, None)
def getdirinfo(pathtocheck):
cmd = ops.cmd.getDszCommand('dir', path=('"%s"' % os.path.dirname(pathtocheck)), mask=('"%s"' % os.path.basename(pathtocheck)))
obj = cmd.execute()
if cmd.success:
try:
return (obj.diritem[0].fileitem[0].filetimes.accessed.time, obj.diritem[0].fileitem[0].filetimes.created.time, obj.diritem[0].fileitem[0].filetimes.modified.time)
except:
pass
return (None, None, None)
def checkmvinprocserver():
(moddate, modtime, value) = getregvalue('l', 'SOFTWARE\\Classes\\CLSID\\{1945f23e-0573-4e7e-9641-37215654bce4}', '')
if (value == 'Internet Traffic Handler'):
dsz.ui.Echo(('Internet Traffic Handler key found [%s %s]' % (moddate, modtime)), dsz.GOOD)
else:
dsz.ui.Echo('Internet Traffic Handler key not found', dsz.ERROR)
return
(moddate, modtime, value) = getregvalue('l', 'SOFTWARE\\Classes\\CLSID\\{1945f23e-0573-4e7e-9641-37215654bce4}\\InprocServer32', '')
if (value is not None):
dsz.ui.Echo(('InProcServer32 key found [%s %s]' % (moddate, modtime)), dsz.GOOD)
(fileaccessed, filecreated, filemodified) = getdirinfo(value)
if (fileaccessed is not None):
dsz.ui.Echo(('Found %s [a:%s , c:%s , m:%s]' % (value, fileaccessed, filecreated, filemodified)), dsz.GOOD)
else:
dsz.ui.Echo(('Did not find %s' % value), dsz.ERROR)
else:
dsz.ui.Echo('InProcServer32 key not found', dsz.ERROR)
(moddate, modtime, value) = getregvalue('l', 'SOFTWARE\\Classes\\CLSID\\{1945f23e-0573-4e7e-9641-37215654bce4}\\InprocServer32', 'ThreadingModel')
if (value is not None):
dsz.ui.Echo(('ThreadingModel key found (%s) [%s %s]' % (value, moddate, modtime)), dsz.GOOD)
else:
dsz.ui.Echo('ThreadingModel key not found', dsz.ERROR)
(moddate, modtime, value) = getregvalue('l', 'SOFTWARE\\Classes\\Protocols\\Filter\\text/html', 'CLSID')
if (value is not None):
dsz.ui.Echo(('text/html key found (%s) [%s %s]' % (value, moddate, modtime)), dsz.GOOD)
else:
dsz.ui.Echo('text/html key not found', dsz.ERROR)
def checkvalinprocserver():
if dsz.version.checks.windows.IsVistaOrGreater():
(moddate, modtime, value) = getregvalue('l', 'SOFTWARE\\Classes\\CLSID\\{C90250F3-4D7D-4991-9B69-A5C5BC1C2AE6}\\InProcServer32', '')
pass
else:
(moddate, modtime, value) = getregvalue('l', 'SOFTWARE\\Classes\\CLSID\\{B8DA6310-E19B-11D0-933C-00A0C90DCAA9}\\InProcServer32', '')
pass
if (value is not None):
dsz.ui.Echo(('InProcServer32 key found [%s %s]' % (moddate, modtime)), dsz.GOOD)
(fileaccessed, filecreated, filemodified) = getdirinfo(value)
if (fileaccessed is not None):
dsz.ui.Echo(('Found %s [a:%s , c:%s , m:%s]' % (value, fileaccessed, filecreated, filemodified)), dsz.GOOD)
else:
dsz.ui.Echo(('Did not find %s' % value), dsz.ERROR)
else:
dsz.ui.Echo('InProcServer32 key not found', dsz.ERROR)
def checkstate(guid):
(moddate, modtime, value) = getregvalue('l', ('SOFTWARE\\Classes\\CLSID\\{%s}\\TypeLib' % guid), 'DigitalProductId')
if (value is not None):
dsz.ui.Echo(('State information found (DigitalProductId) [%s %s]' % (moddate, modtime)), dsz.GOOD)
dsz.ui.Echo(('State information is %s bytes in length' % (len(value) / 2)), dsz.GOOD)
else:
dsz.ui.Echo(('State information not found in %s' % guid), dsz.ERROR)
def checkclientid(guid):
(moddate, modtime, value) = getregvalue('l', ('SOFTWARE\\Classes\\CLSID\\{%s}\\TypeLib' % guid), '')
if (value is not None):
dsz.ui.Echo(('Client ID found (%s) [%s %s]' % (value, moddate, modtime)), dsz.GOOD)
dsz.ui.Echo(('Client ID: %s' % int(decodeguid(value, '8C936AF9243D11D08ED400C04FC2C17B'), 16)), dsz.GOOD)
else:
dsz.ui.Echo(('Client ID not found in %s' % guid), dsz.ERROR)
def checkversion(guid):
(moddate, modtime, value) = getregvalue('l', ('SOFTWARE\\Classes\\CLSID\\{%s}\\Version' % guid), '')
if (value is not None):
dsz.ui.Echo(('Version found (%s) [%s %s]' % (value, moddate, modtime)), dsz.GOOD)
else:
dsz.ui.Echo(('Version not found in %s' % guid), dsz.ERROR)
def checkselfdelete(guid):
(moddate, modtime, value) = getregvalue('l', ('SOFTWARE\\Classes\\CLSID\\{%s}\\MiscStatus' % guid), '')
if (value is not None):
dsz.ui.Echo(('Self-delete found (%s) [%s %s]' % (value, moddate, modtime)), dsz.GOOD)
if (value == '0'):
dsz.ui.Echo('Self-delete reports 0x0', dsz.GOOD)
else:
dsz.ui.Echo(('Self-delete reports 0x%s' % decodeguid(value, 'ce0f73870bb5e60b8b4e25c48cebf039')), dsz.ERROR)
else:
dsz.ui.Echo(('Self-delete not found in %s' % guid), dsz.ERROR)
def decodeguid(guid, key):
guid = guid.replace('-', '').replace('{', '').replace('}', '')
decryptleft = int(guid[0:16], 16)
decryptright = int(guid[16:32], 16)
leftkey = int(key[0:16], 16)
rightkey = int(key[16:32], 16)
return ('%016X%016X' % ((decryptleft ^ leftkey), (decryptright ^ rightkey)))
def main():
dsz.ui.Echo('==================================')
dsz.ui.Echo('=============== VAL ==============')
dsz.ui.Echo('==================================')
dsz.ui.Echo('Checking for location on disk')
checkvalinprocserver()
dsz.ui.Echo('')
dsz.ui.Echo('Checking state information')
checkstate('6AF33D21-9BC5-4F65-8654-B8059B822D91')
dsz.ui.Echo('')
dsz.ui.Echo('Checking client ID')
checkclientid('77032DAA-B7F2-101B-A1F0-01C29183BCA1')
dsz.ui.Echo('')
dsz.ui.Echo('Checking version')
checkversion('77032DAA-B7F2-101B-A1F0-01C29183BCA1')
dsz.ui.Echo('')
dsz.ui.Echo('Checking self-deletion')
checkselfdelete('77032DAA-B7F2-101B-A1F0-01C29183BCA1')
dsz.ui.Echo('')
dsz.ui.Echo('==================================')
dsz.ui.Echo('=============== MV ===============')
dsz.ui.Echo('==================================')
dsz.ui.Echo('Checking for location on disk')
checkmvinprocserver()
dsz.ui.Echo('')
dsz.ui.Echo('Checking state information')
checkstate('B812789D-6FDF-97AB-834B-9F4376B2C8E1')
dsz.ui.Echo('')
dsz.ui.Echo('Checking client ID')
checkclientid('B812789D-6FDF-97AB-834B-9F4376B2C8E1')
dsz.ui.Echo('')
dsz.ui.Echo('Checking version')
checkversion('B812789D-6FDF-97AB-834B-9F4376B2C8E1')
dsz.ui.Echo('')
dsz.ui.Echo('Checking self-deletion')
checkselfdelete('B812789D-6FDF-97AB-834B-9F4376B2C8E1')
dsz.ui.Echo('')
if (__name__ == '__main__'):
try:
main()
except RuntimeError as e:
dsz.ui.Echo(('\nCaught RuntimeError: %s' % e), dsz.ERROR) |
# -*- coding: utf-8 -*-
"""
@author: Uwe Ziegenhagen, [email protected]
"""
import toml
import pandas as pd
import numpy as np
from Transaction import Transaction
from Classification import Classification
from Category import Category
from Account import Account
import sqlalchemy
t = Transaction()
temp_classification = Classification()
temp_category = Category()
temp_account = Account()
configuration = toml.load('settings.toml')
db_database = configuration["database"]
db_user = configuration["user"]
db_password = configuration["password"]
db_host=configuration["host"]
database_connection = sqlalchemy.create_engine('mysql+mysqlconnector://{0}:{1}@{2}/{3}'.
format(db_user, db_password,
db_host, db_database))
class PyQifParser():
"""
Parses a QIF File generated by (German) Quicken 2019.
"""
def __init__(self, qiffile, encoding='cp1252'):
self.transactions = pd.DataFrame(columns=('Date', 'Amount',
'Category', 'Reference',
'Payee', 'Description',
'Cleared'))
self.classifications = pd.DataFrame(columns=('Label', 'Description'))
self.categories = pd.DataFrame(columns=('Label', 'Description',
'Parent', 'Type'))
self.accounts = pd.DataFrame(columns=('Label', 'Description', 'Type'))
self.__qiffile = qiffile
self.__mode = None
self.encoding = encoding
self.dateformat = '%m.%d.%y'
self.__autoswitch = None
def get_transactions(self):
"""
Returns the dataframe for the transactions, sets columns 'Month'
and 'Year' based on the transaction date
"""
# add columns for Month and Year
self.transactions['Reference'] = self.transactions['Reference'].fillna(value=0)
self.transactions['Year'] = self.transactions['Date'].dt.year
self.transactions['Month'] = self.transactions['Date'].dt.month
return self.transactions
def df_memberpayments(self, outputfile):
"""
calculates a Pivot of payments from the transactions for
a specific year. This is specific for usage at Dingfabrik
"""
t = self.get_transactions()
t = t[t['Category'].str.startswith('Mitgliedsbeitrag_2110', na=False)]
t['Mitglied'] = t['Category'].str.split("/",expand=True)[1] # + ' - ' + t['Payee']
table = pd.pivot_table(t, values='Amount', index=['Mitglied'], columns=['Year', 'Month'], aggfunc=np.sum)
# df['index1'] = df.index
table.to_excel(outputfile)
def transactions_to_pickle(self, path):
"""
Exports the transactions into a pickle file
"""
# add columns for Month and Year
self.transactions['Year'] = self.transactions['Date'].dt.year
self.transactions['Month'] = self.transactions['Date'].dt.month
self.transactions.to_pickle(path)
def to_excel(self, outputfile):
"""
Exports the transactions, accounts, classifications and
categories into an Excel-file
"""
with pd.ExcelWriter(outputfile, engine='xlsxwriter') as writer:
self.transactions.to_excel(writer, sheet_name='Transactions', index=False)
self.accounts.to_excel(writer, sheet_name='Accounts', index=False)
self.classifications.to_excel(writer, sheet_name='Classifications', index=False)
self.categories.to_excel(writer, sheet_name='Categories', index=False)
writer.save()
def to_beancount(self, outputfile):
"""
Exports the transactions, accounts, classifications and
categories into an Excel-file
@TODO: waits for detailed specifications, currency is still hardwired
"""
self.transactions['bcdate'] = self.transactions['Date'].dt.strftime('%Y-%m-%d')
with open(outputfile, "w", encoding="utf-8") as writer:
for entry in self.transactions.index:
writer.write(self.transactions['bcdate'][entry] + ' * "' + str(self.transactions['Description'][entry])[:50] + '"\n')
writer.write('\t' + self.transactions['Category'][entry] + '\t'*10 + str(self.transactions['Amount'][entry]) + ' EUR\n')
writer.write('\t' + self.transactions['Category'][entry] + '\t'*10 + str(-1 * self.transactions['Amount'][entry]) + ' EUR\n\n')
def mode(self, mode):
"""
The parser can be in one of several modes. The mode determines,
how the individual lines are handled.
"""
if mode not in ('classifications', 'categories', 'transactions',
'accounts', 'other', 'options'):
raise ValueError('Unknown mode "' + mode + '"')
else:
self.__mode = mode
def handle_option(self, line):
"""
sets the date format and the autoswitch (used with list of accounts)
MDY could be different for other languages
"""
splits = line.split(':')
if splits[1][:-1] == 'MDY':
self.dateformat = '%m.%d.%y'
elif splits[1][:-1] == 'AutoSwitch':
self.__autoswitch = True # we are in the list of accounts
def handle_other(self, line):
"""
handles the individual lines of the file
based on the current mode
"""
if self.__mode == 'classifications':
if line.startswith('N'):
temp_classification.clear()
temp_classification.label = line[1:-1]
elif line.startswith('D'):
temp_classification.description = line[1:-1]
elif line.startswith('^'):
self.classifications = self.classifications.append(
{'Label': temp_classification.label,
'Description': temp_classification.description},
ignore_index=True)
elif self.__mode == 'categories':
if line.startswith('N'):
temp_category.clear()
temp_category.label = line[1:-1]
elif line[:-1].startswith('S'):
temp_category.parent = line[1:-1]
elif line[:-1].startswith('D'):
temp_category.description = line[:-1]
elif line[:-1] in ['I', 'E']:
temp_category.type = line[:-1]
elif line.startswith('^'):
self.categories = self.categories.append(
{'Label': temp_category.label,
'Parent': temp_category.parent,
'Description': temp_category.description,
'Type':temp_category.type},
ignore_index=True)
elif self.__mode == 'accounts':
if line.startswith('N'):
temp_account.clear()
temp_account.label = line[1:-1]
elif line.startswith('T'):
temp_account.type = line[1:-1]
elif line.startswith('D'):
temp_account.description = line[1:-1]
elif line.startswith('^'):
self.accounts = self.accounts.append(
{'Label': temp_account.label,
'Type' : temp_account.type,
'Description': temp_account.description},
ignore_index=True)
elif self.__mode == 'transactions':
if line.startswith('D'):
t.clear()
t.date = line[1:-1]
elif line.startswith('T'):
t.amount = line[1:-1]
elif line.startswith('C'):
t.cleared = line[1:-1]
elif line.startswith('N'):
t.reference = line[1:-1]
elif line.startswith('P'):
t.payee = line[1:-1]
elif line.startswith('M'):
t.description = line[1:-1]
elif line.startswith('L'):
t.category = line[1:-1]
elif line.startswith('^'):
self.transactions = self.transactions.append(
{'Date': pd.to_datetime(t.date, format=self.dateformat),
'Amount': t.amount, 'Cleared': t.cleared,
'Category': t.category, 'Payee': t.payee,
'Reference': t.reference,
'Description': t.description},
ignore_index=True)
def parse(self):
"""
parses the QIF file and fills the internal data structures
"""
with open(self.__qiffile, encoding=self.encoding) as inputfile:
for line in inputfile:
if line.startswith('!Option'):
self.mode('options')
self.handle_option(line)
elif line.startswith('!Type:Class'):
self.mode('classifications')
elif line.startswith('!Type:Cat'):
self.mode('categories')
elif line.startswith('!Type:Bank'):
self.mode('transactions')
elif line.startswith('!Account'):
if self.__autoswitch:
self.mode('accounts')
elif line.startswith('!Clear:AutoSwitch'):
self.__autoswitch = False
elif line.startswith('!'):
self.mode('other')
else:
self.handle_other(line)
p = PyQifParser(r'C:\Users\Uwe\Nextcloud\WG\WG-Alles_20210321_145112.QIF')
p.parse()
#p.df_memberpayments(r'O:\DF\Pivot_Buchungen_2020.xlsx')
p.to_excel(r'C:\Users\Uwe\Nextcloud\WG\WG-Aaa.xlsx')
p.get_transactions().to_sql(con=database_connection, name='Buchungen', if_exists='replace') |
# Copyright 2021
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.urls import path
#
from archive.views import (
delete_collection,
delete_resource,
download,
edit_collection,
edit_resource,
home,
new_collection,
new_resource,
preview,
search,
view_collection,
view_resource,
)
app_name = "archive"
urlpatterns = [
path("", home, name="home"),
path("search", search, name="search"),
path("resource<path:path>", view_resource, name="resource_view"),
path("resource", view_resource, name="resource_view"),
path("new/collection<path:parent>", new_collection, name="new_collection"),
path("edit/collection<path:path>", edit_collection, name="edit_collection"),
path("delete/collection<path:path>", delete_collection, name="delete_collection"),
path("new/resource<path:parent>", new_resource, name="new_resource"),
path("edit/resource<path:path>", edit_resource, name="edit_resource"),
path("delete/resource<path:path>", delete_resource, name="delete_resource"),
path("view<path:path>", view_collection, name="view"),
path("view", view_collection, name="view"),
path("download<path:path>", download, name="download"),
path("preview<path:path>", preview, name="preview"),
]
|
import codecs
import pkg_resources
import cryptopals.challenge_1 as challenge_1
import cryptopals.challenge_2 as challenge_2
import cryptopals.challenge_3 as challenge_3
import cryptopals.challenge_4 as challenge_4
import cryptopals.challenge_5 as challenge_5
import cryptopals.challenge_7 as challenge_7
def test_challenge_1():
message = '49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d'
expected_output = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
actual_output = challenge_1.base64_from_hex(message)
assert expected_output == actual_output
def test_challenge_2():
key = '686974207468652062756c6c277320657965'
encrypted_message = '1c0111001f010100061a024b53535009181c'
expected_output = '746865206b696420646f6e277420706c6179'
actual_output = challenge_2.decrypt_xor_encrypted_message(key, encrypted_message)
assert expected_output == actual_output
def test_challenge_3():
encrypted_message = '1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'
expected_output = "Cooking MC's like a pound of bacon"
actual_output = challenge_3.decrypt_xor_encrypted_message(encrypted_message)
assert expected_output == actual_output
def test_challenge_4():
encrypted_messages = pkg_resources.resource_string('cryptopals.resources', '4.txt')
encrypted_messages = codecs.decode(encrypted_messages, 'utf').split('\n')
expected_output = 'Now that the party is jumping'
actual_output = challenge_4.find_xor_encrypted_message(encrypted_messages)
assert expected_output == actual_output
def test_challenge_5():
message = "Burning 'em, if you ain't quick and nimble\nI go crazy when I hear a cymbal"
expected_output = ('0b3637272a2b2e63622c2e69692a23693a2a3c6324202d623d63343c2a26226324272765272'
'a282b2f20430a652e2c652a3124333a653e2b2027630c692b20283165286326302e27282f')
actual_output = challenge_5.xor_with_repeating_key('ICE', message)
assert expected_output == actual_output
def test_challenge_7():
key = 'YELLOW SUBMARINE'
encrypted_message = pkg_resources.resource_string('cryptopals.resources', '7.txt')
expected_output = pkg_resources.resource_string('cryptopals.resources', '7_decrypted.txt')
actual_output = challenge_7.decrypt_using_aes_ecb(key, encrypted_message)
assert expected_output == actual_output
|
#!/bin/python3
"""
Task:
Modify the previous program such that only the users Alice and Bob are greeted with their names.
"""
nameBuffer=input("State your name: ")
if((nameBuffer=="Bob") or (nameBuffer=="Alice")):
print(f"Greetings {nameBuffer}")
else:
exit() |
from urllib.parse import urlencode
import pytest
from arq import Worker
from pytest_toolbox.comparison import CloseToNow
from em2.core import Action, ActionTypes, get_flag_counts
from .conftest import Factory
@pytest.mark.parametrize(
'user_actions',
[
[
{'conv': 'sent', 'flag': 'delete', 'change': {'sent': -1, 'deleted': 1}},
{'conv': 'sent', 'flag': 'restore', 'change': {'sent': 1, 'deleted': -1}},
],
[
{'conv': 'draft', 'flag': 'delete', 'change': {'draft': -1, 'deleted': 1}},
{'conv': 'draft', 'flag': 'restore', 'change': {'draft': 1, 'deleted': -1}},
],
[
{'conv': 'sent', 'flag': 'inbox', 'change': {'inbox': 1}},
{'conv': 'sent', 'flag': 'archive', 'change': {'inbox': -1}},
],
[{'conv': 'sent', 'flag': 'spam', 'status': 400, 'message': 'you cannot spam your own conversations'}],
[{'conv': 'inbox', 'flag': 'ham', 'status': 409, 'message': 'conversation not spam'}],
[
{'conv': 'inbox', 'flag': 'spam', 'change': {'inbox': -1, 'spam': 1}},
{'conv': 'inbox', 'flag': 'delete', 'change': {'deleted': 1, 'spam': -1}},
{'conv': 'inbox', 'flag': 'restore', 'change': {'deleted': -1, 'spam': 1}},
],
[
{'conv': 'inbox_unseen', 'flag': 'delete', 'change': {'deleted': 1, 'inbox': -1, 'unseen': -1}},
{'conv': 'inbox_unseen', 'flag': 'spam', 'change': {}},
{'conv': 'inbox_unseen', 'flag': 'ham', 'change': {}},
],
[
{'conv': 'inbox', 'flag': 'spam', 'change': {'inbox': -1, 'spam': 1}},
{'conv': 'inbox', 'flag': 'ham', 'change': {'inbox': 1, 'spam': -1}},
],
[
{'conv': 'inbox_unseen', 'flag': 'archive', 'change': {'archive': 1, 'inbox': -1, 'unseen': -1}},
{'conv': 'inbox_unseen', 'flag': 'spam', 'change': {'archive': -1, 'spam': 1}},
{'conv': 'inbox_unseen', 'flag': 'ham', 'change': {'archive': 1, 'spam': -1}},
],
[
{'conv': 'inbox_unseen', 'flag': 'archive', 'change': {'archive': 1, 'inbox': -1, 'unseen': -1}},
{'conv': 'inbox_unseen', 'flag': 'spam', 'change': {'archive': -1, 'spam': 1}},
{'conv': 'inbox_unseen', 'flag': 'spam', 'status': 409, 'message': 'conversation already spam'},
],
[
{'conv': 'inbox', 'flag': 'archive', 'change': {'archive': 1, 'inbox': -1}},
{'conv': 'inbox', 'flag': 'archive', 'status': 409, 'message': 'conversation not in inbox'},
],
[{'conv': 'inbox', 'flag': 'inbox', 'status': 409, 'message': 'conversation already in inbox'}],
[{'conv': 'inbox', 'flag': 'restore', 'status': 409, 'message': 'conversation not deleted'}],
[{'conv': 'inbox', 'flag': 'bad', 'status': 400, 'message': 'Invalid query data'}],
[
{'conv': 'inbox', 'flag': 'delete', 'change': {'deleted': 1, 'inbox': -1}},
{'conv': 'inbox', 'flag': 'inbox', 'status': 400, 'message': 'deleted, spam or draft conversation cannot'},
],
[
{'conv': 'inbox', 'flag': 'delete', 'change': {'deleted': 1, 'inbox': -1}},
{'conv': 'inbox', 'flag': 'delete', 'status': 409, 'message': 'conversation already deleted'},
],
[
{'conv': 'inbox', 'flag': 'archive', 'change': {'archive': 1, 'inbox': -1}},
{'conv': 'inbox', 'flag': 'delete', 'change': {'archive': -1, 'deleted': 1}},
{'conv': 'inbox', 'flag': 'restore', 'change': {'archive': 1, 'deleted': -1}},
],
[
{'conv': 'sent', 'flag': 'inbox', 'change': {'inbox': 1}},
{'conv': 'sent', 'flag': 'archive', 'change': {'inbox': -1}},
],
[{'conv': 'draft', 'flag': 'inbox', 'status': 400, 'message': 'deleted, spam or draft conversation cannot'}],
[{'conv': 'draft', 'flag': 'archive', 'status': 409, 'message': 'conversation not in inbox'}],
[{'conv': 'inbox_unseen', 'mark_seen': True, 'change': {'unseen': -1}}],
[
{'conv': 'inbox_unseen', 'flag': 'archive', 'change': {'inbox': -1, 'unseen': -1, 'archive': 1}},
{'conv': 'inbox_unseen', 'mark_seen': True},
],
],
)
async def test_set_flag(cli, factory: Factory, conns, user_actions):
user = await factory.create_user()
other_user = await factory.create_user()
p = [{'email': user.email}]
convs = {
'draft': await factory.create_conv(),
'sent': await factory.create_conv(publish=True),
'inbox_unseen': await factory.create_conv(publish=True, session_id=other_user.session_id, participants=p),
'inbox': await factory.create_conv(publish=True, session_id=other_user.session_id, participants=p),
}
await factory.act(convs['inbox'].id, Action(actor_id=user.id, act=ActionTypes.seen))
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 2, 'unseen': 1, 'draft': 1, 'sent': 1, 'archive': 0, 'all': 4, 'spam': 0, 'deleted': 0}
for i, action in enumerate(user_actions):
conv = action['conv']
if action.get('mark_seen'):
await factory.act(convs[conv].id, Action(actor_id=user.id, act=ActionTypes.seen))
counts_new = await get_flag_counts(conns, user.id)
else:
flag = action['flag']
r = await cli.post_json(
factory.url('ui:set-conv-flag', conv=convs[conv].key, query={'flag': flag}),
status=action.get('status', 200),
)
obj = await r.json()
if r.status != 200:
assert obj['message'].startswith(action['message']), i
continue
counts_new = obj['counts']
changes = {}
for k, v in counts_new.items():
diff = v - counts[k]
if diff:
changes[k] = diff
assert changes == action.get('change', {}), (i, counts_new)
counts = counts_new
true_counts = await get_flag_counts(conns, factory.user.id, force_update=True)
assert true_counts == counts
@pytest.fixture(name='conv')
async def _fix_conv(cli, factory: Factory, db_conn):
await factory.create_user()
creator = await factory.create_user()
prts = [{'email': factory.user.email}]
conv = await factory.create_conv(session_id=creator.session_id, publish=True, participants=prts)
assert 2 == await db_conn.fetchval('select count(*) from participants')
return conv
async def test_flags_inbox(cli, factory: Factory, conv, db_conn, conns):
u_id = factory.user.id
flags = await get_flag_counts(conns, u_id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
assert await db_conn.fetchval('select inbox from participants where user_id=$1', u_id) is True
r = await cli.post_json(factory.url('ui:set-conv-flag', conv=conv.key, query=dict(flag='archive')))
assert await r.json() == {
'conv_flags': {'inbox': False, 'archive': True, 'deleted': False, 'spam': False},
'counts': {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 1, 'all': 1, 'spam': 0, 'deleted': 0},
}
assert await db_conn.fetchval('select inbox from participants where user_id=$1', u_id) is None
r = await cli.post_json(factory.url('ui:set-conv-flag', conv=conv.key, query=dict(flag='inbox')))
assert await r.json() == {
'conv_flags': {'inbox': True, 'archive': False, 'deleted': False, 'spam': False},
'counts': {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0},
}
assert await db_conn.fetchval('select inbox from participants where user_id=$1', u_id) is True
async def test_flags_deleted(cli, factory: Factory, db_conn, conv):
u_id = factory.user.id
assert await db_conn.fetchval('select deleted from participants where user_id=$1', u_id) is None
assert await db_conn.fetchval('select deleted_ts from participants where user_id=$1', u_id) is None
r = await cli.post_json(factory.url('ui:set-conv-flag', conv=conv.key, query=dict(flag='delete')))
assert await r.json() == {
'conv_flags': {'inbox': False, 'archive': False, 'deleted': True, 'spam': False},
'counts': {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 1},
}
assert await db_conn.fetchval('select deleted from participants where user_id=$1', u_id) is True
assert await db_conn.fetchval('select deleted_ts from participants where user_id=$1', u_id) == CloseToNow()
r = await cli.post_json(factory.url('ui:set-conv-flag', conv=conv.key, query=dict(flag='restore')))
assert await r.json() == {
'conv_flags': {'inbox': True, 'archive': False, 'deleted': False, 'spam': False},
'counts': {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0},
}
assert await db_conn.fetchval('select deleted from participants where user_id=$1', u_id) is None
assert await db_conn.fetchval('select deleted_ts from participants where user_id=$1', u_id) is None
async def test_seen(factory: Factory, conv, conns):
flags = await get_flag_counts(conns, factory.user.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
await factory.act(conv.id, Action(actor_id=factory.user.id, act=ActionTypes.seen))
flags = await get_flag_counts(conns, factory.user.id)
assert flags == {'inbox': 1, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
# already seen, shouldn't change seen count
await factory.act(conv.id, Action(actor_id=factory.user.id, act=ActionTypes.msg_add, body='testing'))
flags = await get_flag_counts(conns, factory.user.id)
assert flags == {'inbox': 1, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
flags_empty = {
'flags': {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 0, 'spam': 0, 'deleted': 0},
'labels': [],
}
async def test_flag_counts_blank(cli, factory: Factory):
await factory.create_user()
obj = await cli.get_json(factory.url('ui:conv-counts'))
assert obj == flags_empty
async def test_flag_counts_publish(cli, factory: Factory):
await factory.create_user()
u2 = await factory.create_user()
obj = await cli.get_json(factory.url('ui:conv-counts'))
assert obj == flags_empty
await cli.get_json(factory.url('ui:conv-counts', session_id=u2.session_id))
await factory.create_conv(publish=True, participants=[{'email': u2.email}])
obj = await cli.get_json(factory.url('ui:conv-counts'))
assert obj == {
'flags': {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0},
'labels': [],
}
obj = await cli.get_json(factory.url('ui:conv-counts', session_id=u2.session_id))
assert obj == {
'flags': {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0},
'labels': [],
}
async def test_send_reply(factory: Factory, conns):
user = await factory.create_user()
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 0, 'spam': 0, 'deleted': 0}
other_user = await factory.create_user()
conv = await factory.create_conv(publish=True, participants=[{'email': other_user.email}])
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
await factory.act(conv.id, Action(actor_id=other_user.id, act=ActionTypes.msg_add, body='testing'))
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
async def test_send_delete_reply(cli, factory: Factory, conns):
user = await factory.create_user()
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 0, 'spam': 0, 'deleted': 0}
other_user = await factory.create_user()
conv = await factory.create_conv(publish=True, participants=[{'email': other_user.email}])
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
await cli.post_json(factory.url('ui:set-conv-flag', conv=conv.key, query={'flag': 'delete'}))
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 1}
await factory.act(conv.id, Action(actor_id=other_user.id, act=ActionTypes.msg_add, body='testing'))
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
async def test_flag_counts_draft_publish(cli, factory: Factory, db_conn):
await factory.create_user()
u2 = await factory.create_user()
obj = await cli.get_json(factory.url('ui:conv-counts'))
assert obj == flags_empty
await cli.get_json(factory.url('ui:conv-counts', session_id=u2.session_id))
await factory.create_conv(participants=[{'email': u2.email}])
assert 1 == await db_conn.fetchval('select count(*) from participants where user_id=$1', u2.id)
assert True is await db_conn.fetchval('select inbox from participants where user_id=$1', u2.id)
assert None is await db_conn.fetchval('select seen from participants where user_id=$1', u2.id)
obj = await cli.get_json(factory.url('ui:conv-counts'))
assert obj == {
'flags': {'inbox': 0, 'unseen': 0, 'draft': 1, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0},
'labels': [],
}
obj = await cli.get_json(factory.url('ui:conv-counts', session_id=u2.session_id))
assert obj == flags_empty
await cli.post_json(factory.url('ui:publish', conv=factory.conv.key), {'publish': True})
obj = await cli.get_json(factory.url('ui:conv-counts'))
assert obj == {
'flags': {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0},
'labels': [],
}
assert 1 == await db_conn.fetchval('select count(*) from participants where user_id=$1', u2.id)
assert True is await db_conn.fetchval('select inbox from participants where user_id=$1', u2.id)
assert None is await db_conn.fetchval('select seen from participants where user_id=$1', u2.id)
async def test_flag_counts(cli, factory: Factory, db_conn, redis):
await factory.create_user()
await factory.create_conv()
conv_inbox_unseen = await factory.create_conv(publish=True)
conv_inbox_seen = await factory.create_conv(publish=True)
conv_inbox_seen2 = await factory.create_conv(publish=True)
conv_archive = await factory.create_conv(publish=True)
conv_inbox_deleted = await factory.create_conv(publish=True)
conv_arch_spam = await factory.create_conv(publish=True)
conv_arch_spam_unseen = await factory.create_conv(publish=True)
new_user = await factory.create_user()
for r in await db_conn.fetch('select id from conversations'):
await factory.act(r[0], Action(actor_id=factory.user.id, act=ActionTypes.prt_add, participant=new_user.email))
await db_conn.execute('update participants set inbox=true, seen=null where conv=$1', conv_inbox_unseen.id)
await db_conn.execute('update participants set inbox=true, seen=true where conv=$1', conv_inbox_seen.id)
await db_conn.execute('update participants set inbox=true, seen=true where conv=$1', conv_inbox_seen2.id)
await db_conn.execute('update participants set inbox=null where conv=$1', conv_archive.id)
await db_conn.execute('update participants set deleted=true where conv=$1', conv_inbox_deleted.id)
await db_conn.execute('update participants set inbox=null, spam=true, seen=true where conv=$1', conv_arch_spam.id)
await db_conn.execute('update participants set spam=true, seen=null where conv=$1', conv_arch_spam_unseen.id)
await redis.delete('conv-counts*')
obj = await cli.get_json(factory.url('ui:conv-counts', session_id=new_user.session_id))
assert obj == {
'flags': {'inbox': 3, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 1, 'all': 7, 'spam': 2, 'deleted': 1},
'labels': [],
}
def query_display(v):
try:
return urlencode(v)
except TypeError:
return ','.join(v)
@pytest.mark.parametrize(
'query, expected',
[
({}, ['george', 'fred', 'ed', 'dave', 'charlie', 'ben', 'anne']),
({'flag': 'inbox'}, ['george', 'charlie']),
({'flag': 'spam'}, ['anne']),
({'flag': 'archive'}, ['ben']),
({'flag': 'unseen'}, ['charlie']),
({'flag': 'deleted'}, ['dave']),
({'flag': 'draft'}, ['ed']),
({'flag': 'sent'}, ['fred']),
],
ids=query_display,
)
async def test_filter_flags_conv_list(cli, factory: Factory, db_conn, query, expected):
await factory.create_user()
test_user = await factory.create_user()
prts = [{'email': test_user.email}]
conv_anne = await factory.create_conv(subject='anne', participants=prts, publish=True)
await db_conn.execute('update participants set spam=true where conv=$1', conv_anne.id)
conv_ben = await factory.create_conv(subject='ben', participants=prts, publish=True)
await db_conn.execute('update participants set inbox=false where conv=$1', conv_ben.id)
conv_charlie = await factory.create_conv(subject='charlie', participants=prts, publish=True)
await db_conn.execute('update participants set seen=false where conv=$1', conv_charlie.id)
conv_dave = await factory.create_conv(subject='dave', participants=prts, publish=True)
await db_conn.execute('update participants set deleted=true where conv=$1', conv_dave.id)
await factory.create_conv(subject='ed', session_id=test_user.session_id)
await factory.create_conv(subject='fred', session_id=test_user.session_id, publish=True)
conv_george = await factory.create_conv(subject='george', participants=prts, publish=True)
await db_conn.execute('update participants set seen=true where conv=$1', conv_george.id)
assert 7 == await db_conn.fetchval('select count(*) from conversations')
assert 7 == await db_conn.fetchval('select count(*) from participants where user_id=$1', test_user.id)
url = factory.url('ui:list', session_id=test_user.session_id, query=query)
data = await cli.get_json(url)
response = [c['details']['sub'] for c in data['conversations']]
assert response == expected, f'url: {url}, response: {response}'
data2 = await cli.get_json(url)
assert data == data2
if query:
return
# all case check results
d = {
c['details']['sub']: {
'seen': int(c['seen']), # use ints so the below fits on one line each
'inbox': int(c['inbox']),
'archive': int(c['archive']),
'deleted': int(c['deleted']),
'spam': int(c['spam']),
'draft': int(c['draft']),
'sent': int(c['sent']),
}
for c in data['conversations']
}
assert d == {
'anne': {'seen': 0, 'inbox': 0, 'archive': 0, 'deleted': 0, 'spam': 1, 'draft': 0, 'sent': 0},
'ben': {'seen': 0, 'inbox': 0, 'archive': 1, 'deleted': 0, 'spam': 0, 'draft': 0, 'sent': 0},
'charlie': {'seen': 0, 'inbox': 1, 'archive': 0, 'deleted': 0, 'spam': 0, 'draft': 0, 'sent': 0},
'dave': {'seen': 0, 'inbox': 0, 'archive': 0, 'deleted': 1, 'spam': 0, 'draft': 0, 'sent': 0},
'ed': {'seen': 1, 'inbox': 0, 'archive': 0, 'deleted': 0, 'spam': 0, 'draft': 1, 'sent': 0},
'fred': {'seen': 1, 'inbox': 0, 'archive': 0, 'deleted': 0, 'spam': 0, 'draft': 0, 'sent': 1},
'george': {'seen': 1, 'inbox': 1, 'archive': 0, 'deleted': 0, 'spam': 0, 'draft': 0, 'sent': 0},
}
async def test_draft_counts(factory: Factory, conns):
user = await factory.create_user()
user2 = await factory.create_user()
await factory.create_conv(participants=[{'email': user2.email}])
flags = await get_flag_counts(conns, user.id)
assert flags == {'inbox': 0, 'unseen': 0, 'draft': 1, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 0, 'spam': 0, 'deleted': 0}
async def test_published_counts(factory: Factory, conns):
user = await factory.create_user()
user2 = await factory.create_user()
await factory.create_conv(publish=True, participants=[{'email': user2.email}])
flags = await get_flag_counts(conns, user.id)
assert flags == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 1, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
async def test_add_prt(factory: Factory, conns):
user = await factory.create_user()
conv = await factory.create_conv(publish=True)
user2 = await factory.create_user()
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 0, 'spam': 0, 'deleted': 0}
await factory.act(conv.id, Action(actor_id=user.id, act=ActionTypes.prt_add, participant=user2.email))
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
await factory.act(conv.id, Action(actor_id=factory.user.id, act=ActionTypes.msg_add, body='testing'))
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
async def test_add_remove_add_prt(factory: Factory, conns):
user = await factory.create_user()
conv = await factory.create_conv(publish=True)
user2 = await factory.create_user()
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 0, 'spam': 0, 'deleted': 0}
f = await factory.act(conv.id, Action(actor_id=user.id, act=ActionTypes.prt_add, participant=user2.email))
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
await factory.act(
conv.id, Action(actor_id=user.id, act=ActionTypes.prt_remove, participant=user2.email, follows=f[0])
)
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
await factory.act(conv.id, Action(actor_id=user.id, act=ActionTypes.prt_add, participant=user2.email))
flags = await get_flag_counts(conns, user2.id)
assert flags == {'inbox': 1, 'unseen': 1, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 0, 'deleted': 0}
async def test_spam_seen(factory: Factory, conns, cli, url, create_ses_email, worker: Worker):
user = await factory.create_user()
msg = create_ses_email(to=(user.email,), receipt_extra=dict(spamVerdict={'status': 'FAIL'}))
r = await cli.post(url('protocol:webhook-ses', token='testing'), json=msg)
assert await worker.run_check() == 2
assert r.status == 204, await r.text()
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 1, 'deleted': 0}
conv_id = await conns.main.fetchval('select id from conversations')
await factory.act(conv_id, Action(actor_id=user.id, act=ActionTypes.seen))
counts = await get_flag_counts(conns, user.id)
assert counts == {'inbox': 0, 'unseen': 0, 'draft': 0, 'sent': 0, 'archive': 0, 'all': 1, 'spam': 1, 'deleted': 0}
|
import socket
# 获取主机地址
HOST = '127.0.0.1'
# 端口号,请大于1023
PORT = 65431
#创建UDP套接字对象
udp_server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#将套接字绑定到地址
udp_server.bind((HOST, PORT))
print("UDP Server is working...")
while True:
#阻塞,等待接收客户端数据
data, addr = udp_server.recvfrom(1024)
print('Recieved %s from %s:%s.' % (data, addr[0], addr[1]))
if data == b'close': #若接收到'close'
print('Server is closed!')
break
udp_server.sendto(data, addr) #回发接收到的数据
udp_server.close() #关闭udp_server套接字
|
from asciitranslator import *
from caesartranslator import *
from numberedalphabettranslator import *
from substitutiontranslator import *
from morsecodetranslator import *
from brailletranslator import *
from semaphoretranslator import *
from binarytranslator import *
from vigeneretranslator import *
from polishcrosstranslator import *
from xortranslator import *
from translator import * |
import RPi.GPIO as gpio
import time
#curses is used to capture the keyboard input
import curses
import pygame
from random import randint
#TODO: re-enable pylinter and set it up properly
#TODO: set up a finally that cleans up the GPIOs always instead of just after a keyboardbreak (ctr-c)
class Propulsion(object):
def __init__(self):
self.stdscr = curses.initscr()
curses.endwin()
#Disable warnings for the GPIOs being in use
gpio.setwarnings(False)
#Set the pinmode to BCM
gpio.setmode(gpio.BCM)
gpio.setup(18, gpio.OUT)
gpio.setup(13, gpio.OUT)
gpio.setup(17, gpio.OUT)
gpio.setup(22, gpio.OUT)
gpio.setup(23, gpio.OUT)
gpio.setup(24, gpio.OUT)
#Set pulse modulation to 50hz for pin 18 and store that initialization in a variable
self.pwm1 = gpio.PWM(18, 50)
self.pwm2 = gpio.PWM(13, 50)
#Start the pwm at 0% duty cycle
self.pwm1.start(0)
self.pwm2.start(0)
#The normal speed for forwards and backwards momentum
self.cruisingSpeed = 60
self.speedRight = 60
self.speedLeft = 60
#Initialize the state of the propulsion
self.state = "awake"
# The higher the steerforce, the more extreme the steering effect will be (1/2, 1/3 * power of other wheel)
self.steerForce = 4
self.quickTurn = True
#self.laugh()
def laugh(self):
pygame.mixer.init()
pygame.mixer.music.load("laugh.mp3")
pygame.mixer.music.play()
while pygame.mixer.music.get_busy() == True:
continue
def forward(self):
print("inching forward with speed "+ str(self.cruisingSpeed))
# Reset the speed of both wheels to cruising speed to disable left and right modes
#TODO: put the following 2 lines in a function so the code is more DRY
self.speedRight = self.speedLeft = self.cruisingSpeed
self.updateCycle()
self.state = "forward"
self.turnWheel("right", "forward")
self.turnWheel("left", "forward")
def reverse(self):
print("awkwardly backing away with speed "+ str(self.cruisingSpeed))
self.speedRight = self.speedLeft = self.cruisingSpeed
self.updateCycle()
self.state = "reverse"
self.turnWheel("right", "reverse")
self.turnWheel("left", "reverse")
def left(self):
#When driving forward, slow down the left wheel to turn left
#As of right now, the state has no 'forward left' so it will just output left
if self.state == "forward" and self.state != "left":
print("forward left")
#TODO: keep separate state for forwardleft and right because this sin't dry
#HEre the wheelspeeds are reset because if you turn forward right after forward left, the wheelspeeds would both be
# divided by steerforce
self.speedRight = self.speedLeft = self.cruisingSpeed
self.speedLeft /= self.steerForce
self.updateCycle()
# The next mode is for when the bot stands still
elif self.state != "forward" and self.state != "reverse":
print("to the left from " + self.state)
self.speedRight = self.speedLeft = self.cruisingSpeed
self.updateCycle()
self.state = "left"
# This is a nifty trick to control quickturning. If quickturn is true, the left wheel turns backward,
# the right wheel forward; if quickturn is false, the left wheel stops (false, false) the right wheel turns forward.
self.turnWheel("left", "reverse") if self.quickTurn else self.turnWheel("left", "stop")
self.turnWheel("right", "forward")
def right(self):
print("to the right ")
if self.state == "forward" and self.state != "right":
print("forward right")
#Todo, move speed to the turnWheel function as well, makes for easier code when turning when reversing
#Maybe its best to turn wheels into an object and have the rest of the script only rack if the bot is moving forwards or backwards
self.speedRight = self.speedLeft = self.cruisingSpeed
self.speedRight /= self.steerForce
self.updateCycle()
# The next mode is for when the bot stands still
elif self.state != "forward" and self.state != "reverse":
print("to the right from " + self.state)
#reset the speed of both wheels before turning.
self.speedRight = self.speedLeft = self.cruisingSpeed
self.updateCycle()
self.state = "right"
self.turnWheel("right", "reverse") if self.quickTurn else self.turnWheel("right", "stop")
self.turnWheel("left", "forward")
def goDuckYour(self):
'''This function selects a random sequence and executes it.
Each sequence contains 1 or more instructions like "left" or "forward. Each instruction is coupled to a function
each function sets the GPIOs up. After each instruction the sequence will wait for a number of seconds.
The coding in this function may look a bit ugly that's what you get with custom sequences...'''
print("Loose yourself (autonomous mode)")
testMoves = [self.left, self.right, self.reverse]
waggleMoves = [self.left, self.right]
sequence2 = ["enableQuickTurn"]
waggleMode = True
def test():
''''This mode is for testing out fun sequences. Just pop em in testmoves and watch her go.
If you find a nice combo, save it an array, give it a name in the modemapping object and gooo'''
print("Executing testmoves")
for instruction in testMoves:
instruction()
time.sleep(3)
def spin():
''''Time to go crazy'''
print("WUBBA LUBBA DUB DUUUUB")
#TODO: change this after I fix the math for changeSpeed so it accepts an absolute number
self.changeSpeed(30)
self.left()
time.sleep(.5)
while (self.cruisingSpeed <= 90):
self.changeSpeed(self.cruisingSpeed + 10)
self.left()
time.sleep(.5)
self.right()
time.sleep(2)
def waggle():
'''Waggle works like this: left,right,left,random quickturn, repeat patter with %chance through recursion'''
print("Executing waggle")
#TODO: left and right should listen to enable quickturn instead of detecting if the bot is not moving
self.enableQuickTurn(False)
for i in range(3):
waggleMoves[0]()
time.sleep(.3)
waggleMoves[1]()
time.sleep(.3)
self.enableQuickTurn(True)
waggleMoves[randint(0,1)]()
time.sleep(1)
if (randint(0,9) >= 4):
waggle()
#TODO: find a way to interrupt sleep so this process can be stopped by the user
#This dict holds the dirrent modes for autonomous movement
modeMapping = {
"1" : test,
"2" : waggle,
"3" : spin
}
option = chr(self.stdscr.getch())
curses.endwin()
modeMapping[option]()
self.stop()
def start(self):
try:
while True:
curses.cbreak()
#get the single character entered's ASCII integer and return its string equivalent
option = chr(self.stdscr.getch())
curses.endwin()
print("input is: " + str(option))
#we could convert this to a dictionary mapping but that wouldnt work for the more complex options
if option == "w":
self.forward()
elif option == "s":
self.reverse()
elif option == "a":
self.left()
elif option == "d":
self.right()
elif option == "b":
self.stop()
elif option == "0":
self.goDuckYour()
elif option=="k":
self.changeSpeed(self.cruisingSpeed + 10)
elif option=="m":
self.changeSpeed(self.cruisingSpeed - 10)
#build in a bit of a delay to not overwork the processor too much
time.sleep(.01)
except KeyboardInterrupt:
self.quit()
def changeSpeed(self, value):
'''Accepts an absolute speed between 0-100 and changes the wheels' speed to the given value. This does mean
that changing the speed while turning right or left will set the bot on a forward path'''
# TODO: it might be better to have a function that sets both the wheel direction and the speed. Then,
# The wheels can be objects with direction and speed as properties, yay OOP
print("changeSpeed called with: " + str(value) + " cruisingspeed: " + str(self.cruisingSpeed))
oldSpeed = self.cruisingSpeed
if ( value <= 100 and value >= 0 ):
self.cruisingSpeed = value
self.speedRight = self.speedLeft = self.cruisingSpeed
else:
print("Value out of range: " + str(value))
self.updateCycle()
def stop(self):
print("Stop! Hammertime")
self.state = "still"
self.turnWheel("left", "stop")
self.turnWheel("right", "stop")
def turnWheel(self, wheel, direction):
if wheel == "left":
if direction == "forward":
gpio.output(23, True)
gpio.output(24, False)
elif direction == "reverse":
gpio.output(23, False)
gpio.output(24, True)
elif direction == "stop":
gpio.output(23, False)
gpio.output(24, False)
if wheel == "right":
if direction == "forward":
gpio.output(17, True)
gpio.output(22, False)
elif direction == "reverse":
gpio.output(17, False)
gpio.output(22, True)
elif direction == "stop":
gpio.output(17, False)
gpio.output(22, False)
def enableQuickTurn(self, value):
self.quickTurn = value
def updateCycle(self):
self.pwm1.ChangeDutyCycle(self.speedRight)
self.pwm2.ChangeDutyCycle(self.speedLeft)
def quit(self):
print("quitting program")
self.pwm1.ChangeDutyCycle(0)
self.pwm2.ChangeDutyCycle(0)
gpio.cleanup()
curses.endwin()
propulsion = Propulsion()
propulsion.start()
|
import numpy as np
import numpy.fft as nfft
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.colors as col
from .config import FT, IFT, FTFREQ, SHIFT
def spectrogram(t, w, ut, t_lim=None, Nt=1000, s0=20.0):
"""Compute spectrogram for time-domain input signal.
Computes spectrogram of a time-domain input signal via short time Fourier
transform employing a Gaussian window function.
Args:
t (:obj:`numpy.array`, 1-dim):
Temporal grid.
w (:obj:`numpy.array`, 1-dim):
Angular-frequency grid.
Et (:obj:`numpy-array`, 1-dim):
Time-domain representation of analytic signal.
t_lim (:obj:`list`):
Delay time bounds for temporal axis considered for constructing
the spectrogram (tMin, tMax), default is (min(t),max(t)).
Nt (:obj:`int`):
Number of delay times samples in [tMin, tMax], used for signal
localization (default: Nt=1000).
s0 (:obj:`float`):
Root-mean-square width of Gaussian function used for signal
localization (default: s0=20.0).
Returns:
:obj:`list`: (t_spec, w_spec, P_tw), where `t_seq`
(:obj:`numpy.ndarray`, 1-dim) are delay times, `w`
(:obj:`numpy.ndarray`, 1-dim) are angular frequencies, and `P_tw`
(:obj:`numpy.ndarray`, 2-dim) is the spectrogram.
"""
if t_lim == None:
t_min, t_max = np.min(t), np.max(t)
else:
t_min, t_max = t_lim
# -- DELAY TIMES
t_seq = np.linspace(t_min, t_max, Nt)
# -- WINDOW FUNCTION
h = lambda t: np.exp(-(t ** 2) / 2 / s0 / s0) / np.sqrt(2.0 * np.pi * s0 * s0)
# -- COMPUTE TIME-FREQUENCY RESOLVED CONTENT OF INPUT FIELD
P = np.abs(FT(h(t - t_seq[:, np.newaxis]) * ut[np.newaxis, :], axis=-1)) ** 2
return t_seq, SHIFT(w), np.swapaxes(SHIFT(P, axes=-1), 0, 1)
def plot_spectrogram(z_pos, t_delay, w_opt, P_tw, t_lim = None, w_lim = None, o_name = None):
r"""Generate a figure of a spectrogram.
Generate figure showing the intensity normalized spectrogram. Scales the
spectrogram data so that maximum intensity per time and frequency is unity.
Args:
t_delay (:obj:`numpy.ndarray`, 1-dim): Delay time grid.
w_opt (:obj:`numpy.ndarray`, 1-dim): Angular-frequency grid.
P_tw (:obj:`numpy.ndarray`, 2-dim): Spectrogram data.
"""
if t_lim == None:
t_min, t_max = t_delay[0], t_delay[-1]
else:
t_min, t_max = t_lim
if w_lim == None:
w_min, w_max = w_opt[0], w_opt[-1]
else:
w_min, w_max = w_lim
f, ax1 = plt.subplots(1, 1, sharey=True, figsize=(4, 3))
plt.subplots_adjust(left=0.15, right=0.95, bottom=0.15, top=0.78)
cmap = mpl.cm.get_cmap("jet")
def _setColorbar(im, refPos):
"""colorbar helper"""
x0, y0, w, h = refPos.x0, refPos.y0, refPos.width, refPos.height
cax = f.add_axes([x0, y0 + 1.02 * h, w, 0.05 * h])
cbar = f.colorbar(im, cax=cax, orientation="horizontal")
cbar.ax.tick_params(
color="k",
labelcolor="k",
bottom=False,
direction="out",
labelbottom=False,
labeltop=True,
top=True,
size=4,
pad=0,
)
cbar.ax.tick_params(which="minor", bottom=False, top=False)
return cbar
_truncate = lambda I: np.where(I>I.max()*1e-5, I , I.max()*1e-5)
I = _truncate(P_tw[:-1, :-1] / P_tw.max())
im1 = ax1.pcolorfast(
t_delay,
w_opt,
I,
norm=col.LogNorm(vmin=1e-5 * I.max(), vmax=I.max()),
cmap=cmap,
)
cbar1 = _setColorbar(im1, ax1.get_position())
cbar1.ax.set_title(r"$P_S(t, \omega)$", color="k", y=3.5)
ax1.set_xlim(t_min, t_max)
ax1.set_ylim(w_min, w_max)
ax1.tick_params(axis="y", length=2.0, direction="out")
ax1.tick_params(axis="x", length=2.0, direction="out")
ax1.set_xlabel(r"Delay time $t$")
ax1.set_ylabel(r"Angular frequency $\omega$")
ax1.text(0., 0., r'$z = %3.2lf$'%(z_pos), horizontalalignment='left', color='white',
verticalalignment='bottom', transform=ax1.transAxes)
if o_name:
plt.savefig(o_name + ".png", format="png", dpi=600)
plt.close()
else:
plt.show()
|
print("What is your favourite food?")
answer = input()
print("Yummm...", answer)
|
"""Contains functions for making alignments between functional data and the surface, or, finding where the brain is.
"""
import os
import numpy as np
from builtins import input
def manual(subject, xfmname, reference=None, **kwargs):
"""Open GUI for manually aligning a functional volume to the cortical surface for `subject`. This
creates a new transform called `xfm`. The name of a nibabel-readable file (e.g. nii) should be
supplied as `reference`. This image will be copied into the database.
To modify an existing functional-anatomical transform, `reference` can be left blank, and the
previously used reference will be loaded.
<<ADD DETAILS ABOUT TRANSFORMATION MATRIX FORMAT HERE>>
When the GUI is closed, the transform will be saved into the pycortex database. The GUI requires
Mayavi support.
Parameters
----------
subject : str
Subject identifier.
xfmname : str
String identifying the transform to be created or loaded.
reference : str, optional
Path to a nibabel-readable image that will be used as the reference for this transform.
If given the default value of None, this function will attempt to load an existing reference
image from the database.
kwargs : dict
Passed to mayavi_aligner.get_aligner.
Returns
-------
m : 2D ndarray, shape (4, 4)
Transformation matrix.
"""
from .database import db
from .mayavi_aligner import get_aligner
def save_callback(aligner):
db.save_xfm(subject, xfmname, aligner.get_xfm("magnet"), xfmtype='magnet', reference=reference)
print("saved xfm")
def view_callback(aligner):
print('view-only mode! ignoring changes')
# Check whether transform w/ this xfmname already exists
view_only_mode = False
try:
db.get_xfm(subject, xfmname)
# Transform exists, make sure that reference is None
if reference is not None:
raise ValueError('Refusing to overwrite reference for existing transform %s, use reference=None to load stored reference' % xfmname)
# if masks have been cached, quit! user must remove them by hand
from glob import glob
if len(glob(db.get_paths(subject)['masks'].format(xfmname=xfmname, type='*'))):
print('Refusing to overwrite existing transform %s because there are cached masks. Delete the masks manually if you want to modify the transform.' % xfmname)
checked = False
while not checked:
resp = input("Do you want to continue in view-only mode? (Y/N) ").lower().strip()
if resp in ["y", "yes", "n", "no"]:
checked = True
if resp in ["y", "yes"]:
view_only_mode = True
print("Continuing in view-only mode...")
else:
raise ValueError("Exiting...")
else:
print("Didn't get that, please try again..")
except IOError:
# Transform does not exist, make sure that reference exists
if reference is None or not os.path.exists(reference):
raise ValueError('Reference image file (%s) does not exist' % reference)
m = get_aligner(subject, xfmname, epifile=reference, **kwargs)
m.save_callback = view_callback if view_only_mode else save_callback
m.configure_traits()
return m
def manualnhp(subject, xfmname, reference=None, **kwargs):
"""Open GUI for manually aligning a functional volume to the cortical surface for `subject`. This
creates a new transform called `xfm`. The name of a nibabel-readable file (e.g. nii) should be
supplied as `reference`. This image will be copied into the database.
To modify an existing functional-anatomical transform, `reference` can be left blank, and the
previously used reference will be loaded.
<<ADD DETAILS ABOUT TRANSFORMATION MATRIX FORMAT HERE>>
When the GUI is closed, the transform will be saved into the pycortex database. The GUI requires
Mayavi support.
Parameters
----------
subject : str
Subject identifier.
xfmname : str
String identifying the transform to be created or loaded.
reference : str, optional
Path to a nibabel-readable image that will be used as the reference for this transform.
If given the default value of None, this function will attempt to load an existing reference
image from the database.
kwargs : dict
Passed to mayavi_aligner.get_aligner.
Returns
-------
m : 2D ndarray, shape (4, 4)
Transformation matrix.
"""
from .database import db
from .mayavi_aligner import get_aligner
def save_callback(aligner):
db.save_xfm(subject, xfmname, aligner.get_xfm("magnet"), xfmtype='magnet', reference=reference)
print("saved xfm")
def view_callback(aligner):
print('view-only mode! ignoring changes')
# Check whether transform w/ this xfmname already exists
view_only_mode = False
try:
db.get_xfm(subject, xfmname)
# Transform exists, make sure that reference is None
if reference is not None:
raise ValueError('Refusing to overwrite reference for existing transform %s, use reference=None to load stored reference' % xfmname)
# if masks have been cached, quit! user must remove them by hand
from glob import glob
if len(glob(db.get_paths(subject)['masks'].format(xfmname=xfmname, type='*'))):
print('Refusing to overwrite existing transform %s because there are cached masks. Delete the masks manually if you want to modify the transform.' % xfmname)
checked = False
while not checked:
resp = input("Do you want to continue in view-only mode? (Y/N) ").lower().strip()
if resp in ["y", "yes", "n", "no"]:
checked = True
if resp in ["y", "yes"]:
view_only_mode = True
print("Continuing in view-only mode...")
else:
raise ValueError("Exiting...")
else:
print("Didn't get that, please try again..")
except IOError:
# Transform does not exist, make sure that reference exists
if reference is None or not os.path.exists(reference):
raise ValueError('Reference image file (%s) does not exist' % reference)
m = get_aligner(subject, xfmname, epifile=reference, **kwargs)
m.save_callback = view_callback if view_only_mode else save_callback
m.configure_traits()
return m
def fs_manual(subject, xfmname, output_name="register.lta", wm_color="yellow",
pial_color="blue", wm_surface='white', noclean=False, reference=None, inspect_only=False):
"""Open Freesurfer FreeView GUI for manually aligning/adjusting a functional
volume to the cortical surface for `subject`. This creates a new transform
called `xfmname`. The name of a nibabel-readable file (e.g. NIfTI) should be
supplied as `reference`. This image will be copied into the database.
IMPORTANT: This function assumes that the resulting .lta file is saved as:
"{default folder chosen by FreeView (should be /tmp/fsalign_xxx)}/{output_name}".
NOTE: Half-fixed some potential bugs in here, related to assumptions about how
results from mri_info calls would be formatted. IFF .dat files are written
based on nii files that have been stripped of their headers, then there will
be an extra line at the top stating that the coordinates are assumed to be in mm.
Without this line, the code here fails. Seems brittle, ripe for future bugs.
ALSO: all the freesurfer environment stuff shouldn't be necessary, except that
I don't know what vox2ras-tkr is doing.
Parameters
----------
subject : str
Subject identifier.
xfmname : str
The name of the transform to be modified.
output_name : str
The name of the .lta file generated after FreeView editing.
wm_color : str | "blue"
Color of the white matter surface. Default is "blue". This can
also be adjusted in the FreeView GUI.
pial_color : str | "red"
Color of the pial surface. Default is "red". This can also be adjusted
the FreeView GUI.
noclean : boolean | False
If True, intermediate files will not be removed from /tmp/fsalign_xxx
(this is useful for debugging things), and the returned value will be
the name of the temp directory. Default False.
reference : str
name of reference (generally, functional) volume. Only provide this if
you are working from scratch (if no transform exists already), else
it will throw an error.
inspect_only : boolean | False
Whether to open transform to view only (if True, nothing is saved
when freeview is closed)
wm_surface : string
name for white matter surface to use. 'white' or 'smoothwm'
Returns
-------
Nothing unless noclean is true.
"""
import subprocess as sp
import tempfile
import shutil
from .xfm import Transform
from .database import db
retval = None
try:
try:
cache = tempfile.mkdtemp(prefix="fsalign_")
sub_xfm = db.get_xfm(subject, xfmname)
# if masks have been cached, quit! user must remove them by hand
from glob import glob
masks_exist = len(glob(db.get_paths(subject)['masks'].format(xfmname=xfmname, type='*')))
if masks_exist and not inspect_only:
print('Refusing to overwrite existing transform %s because there are cached masks. Delete the masks manually if you want to modify the transform.' % xfmname)
raise ValueError('Exiting...')
if reference is not None:
raise ValueError('Refusing to overwrite extant reference for transform')
except IOError:
if reference is None:
print("Transform does not exist!")
if reference is None:
# Load load extant transform-relevant things
reference = sub_xfm.reference.get_filename()
_ = sub_xfm.to_freesurfer(os.path.join(cache, "register.dat"), subject) # Transform in freesurfer .dat format
# Command for FreeView and run
cmd = ("freeview -v $SUBJECTS_DIR/{sub}/mri/orig.mgz "
"{ref}:reg={reg} "
"-f $SUBJECTS_DIR/{sub}/surf/lh.{wms}:edgecolor={wmc} $SUBJECTS_DIR/{sub}/surf/rh.{wms}:edgecolor={wmc} "
"$SUBJECTS_DIR/{sub}/surf/lh.pial:edgecolor={pialc} $SUBJECTS_DIR/{sub}/surf/rh.pial:edgecolor={pialc}")
cmd = cmd.format(sub=subject, ref=reference, reg=os.path.join(cache, "register.dat"),
wmc=wm_color, pialc=pial_color, wms=wm_surface)
print('=== Calling (NO REFERENCE PROVIDED): ===')
print(cmd)
else:
# Command for FreeView and run
cmd = ("freeview -v $SUBJECTS_DIR/{sub}/mri/orig.mgz "
"{ref} "
"-f $SUBJECTS_DIR/{sub}/surf/lh.{wms}:edgecolor={wmc} $SUBJECTS_DIR/{sub}/surf/rh.{wms}:edgecolor={wmc} "
"$SUBJECTS_DIR/{sub}/surf/lh.pial:edgecolor={pialc} $SUBJECTS_DIR/{sub}/surf/rh.pial:edgecolor={pialc}")
cmd = cmd.format(sub=subject, ref=reference,
wmc=wm_color, pialc=pial_color,
wms=wm_surface)
print('=== Calling: ===')
print(cmd)
if not inspect_only:
sfile = os.path.join(cache, output_name)
print('\nREGISTRATION MUST BE SAVED AS:\n\n{}'.format(sfile))
# Run and save transform when user is done editing
if sp.call(cmd, shell=True) != 0:
raise IOError("Problem with FreeView!")
else:
if not inspect_only:
# Convert transform into .dat format
# Unclear why we're not just saving in .dat format above...?
reg_dat = os.path.join(cache, os.path.splitext(output_name)[0] + ".dat")
cmd = "lta_convert --inlta {inlta} --outreg {regdat}"
cmd = cmd.format(inlta=os.path.join(cache, output_name), regdat=reg_dat)
if sp.call(cmd, shell=True) != 0:
raise IOError("Error converting lta into dat!")
# Save transform to pycortex
xfm = Transform.from_freesurfer(reg_dat, reference, subject)
db.save_xfm(subject, xfmname, xfm.xfm, xfmtype='coord', reference=reference)
print("saved xfm")
except Exception as e:
raise(e)
finally:
if not noclean:
shutil.rmtree(cache)
else:
retval = cache
return retval
def automatic(subject, xfmname, reference, noclean=False, bbrtype="signed", pre_flirt_args='', use_fs_bbr=False):
"""Create an automatic alignment using the FLIRT boundary-based alignment (BBR) from FSL.
If `noclean`, intermediate files will not be removed from /tmp. The `reference` image and resulting
transform called `xfmname` will be automatically stored in the database.
It's good practice to open up this transform afterward in the manual aligner and check how it worked.
Do that using the following (with the same `subject` and `xfmname` used here, no need for `reference`):
> align.manual(subject, xfmname)
If automatic alignment gives you a very bad answer, you can try giving the pre-BBR FLIRT
some hints by passing '-usesqform' in as `pre_flirt_args`.
Parameters
----------
subject : str
Subject identifier.
xfmname : str
String identifying the transform to be created.
reference : str
Path to a nibabel-readable image that will be used as the reference for this transform.
Usually, this is a single (3D) functional data volume.
noclean : bool, optional
If True intermediate files will not be removed from /tmp (this is useful for debugging things),
and the returned value will be the name of the temp directory. Default False.
bbrtype : str, optional
The 'bbrtype' argument that is passed to FLIRT.
pre_flirt_args : str, optional
Additional arguments that are passed to the FLIRT pre-alignment step (not BBR).
use_fs_bbr : bool, optional
If True will use freesurfer bbregister instead of FSL BBR.
save_dat : bool, optional
If True, will save the register.dat file from freesurfer bbregister into
freesurfer's $SUBJECTS_DIR/subject/tmp.
Returns
-------
Nothing unless `noclean` is True.
"""
import shlex
import shutil
import tempfile
import subprocess as sp
from .database import db
from .xfm import Transform
from .options import config
fsl_prefix = config.get("basic", "fsl_prefix")
schfile = os.path.join(os.path.split(os.path.abspath(__file__))[0], "bbr.sch")
retval = None
try:
cache = tempfile.mkdtemp()
absreference = os.path.abspath(reference)
if use_fs_bbr:
print('Running freesurfer BBR')
cmd = 'bbregister --s {sub} --mov {absref} --init-fsl --reg {cache}/register.dat --t2'
cmd = cmd.format(sub=subject, absref=absreference, cache=cache)
if sp.call(cmd, shell=True) != 0:
raise IOError('Error calling freesurfer BBR!')
xfm = Transform.from_freesurfer(os.path.join(cache, "register.dat"), absreference, subject)
else:
raw = db.get_anat(subject, type='raw').get_filename()
bet = db.get_anat(subject, type='brainmask').get_filename()
wmseg = db.get_anat(subject, type='whitematter').get_filename()
#Compute anatomical-to-epi transform
print('FLIRT pre-alignment')
cmd = '{fslpre}flirt -in {epi} -ref {bet} -dof 6 {pre_flirt_args} -omat {cache}/init.mat'.format(
fslpre=fsl_prefix, cache=cache, epi=absreference, bet=bet, pre_flirt_args=pre_flirt_args)
if sp.call(cmd, shell=True) != 0:
raise IOError('Error calling initial FLIRT')
print('Running BBR')
# Run epi-to-anat transform (this is more stable than anat-to-epi in FSL!)
cmd = '{fslpre}flirt -in {epi} -ref {raw} -dof 6 -cost bbr -wmseg {wmseg} -init {cache}/init.mat -omat {cache}/out.mat -schedule {schfile} -bbrtype {bbrtype}'
cmd = cmd.format(fslpre=fsl_prefix, cache=cache, raw=bet, wmseg=wmseg, epi=absreference, schfile=schfile, bbrtype=bbrtype)
if sp.call(cmd, shell=True) != 0:
raise IOError('Error calling BBR flirt')
x = np.loadtxt(os.path.join(cache, "out.mat"))
# Pass transform as FROM epi TO anat; transform will be inverted
# back to anat-to-epi, standard direction for pycortex internal
# storage by from_fsl
xfm = Transform.from_fsl(x,absreference,raw)
# Save as pycortex 'coord' transform
xfm.save(subject,xfmname,'coord')
print('Success')
finally:
if not noclean:
shutil.rmtree(cache)
else:
retval = cache
return retval
def automaticnhp(subject, xfmname, reference, noclean=False, bbrtype="signed", pre_flirt_args='', use_fs_bbr=False):
"""Create an automatic alignment using the FLIRT boundary-based alignment (BBR) from FSL.
If `noclean`, intermediate files will not be removed from /tmp. The `reference` image and resulting
transform called `xfmname` will be automatically stored in the database.
It's good practice to open up this transform afterward in the manual aligner and check how it worked.
Do that using the following (with the same `subject` and `xfmname` used here, no need for `reference`):
> align.manual(subject, xfmname)
If automatic alignment gives you a very bad answer, you can try giving the pre-BBR FLIRT
some hints by passing '-usesqform' in as `pre_flirt_args`.
Parameters
----------
subject : str
Subject identifier.
xfmname : str
String identifying the transform to be created.
reference : str
Path to a nibabel-readable image that will be used as the reference for this transform.
Usually, this is a single (3D) functional data volume.
noclean : bool, optional
If True intermediate files will not be removed from /tmp (this is useful for debugging things),
and the returned value will be the name of the temp directory. Default False.
bbrtype : str, optional
The 'bbrtype' argument that is passed to FLIRT.
pre_flirt_args : str, optional
Additional arguments that are passed to the FLIRT pre-alignment step (not BBR).
use_fs_bbr : bool, optional
If True will use freesurfer bbregister instead of FSL BBR.
save_dat : bool, optional
If True, will save the register.dat file from freesurfer bbregister into
freesurfer's $SUBJECTS_DIR/subject/tmp.
Returns
-------
Nothing unless `noclean` is True.
"""
import shlex
import shutil
import tempfile
import subprocess as sp
from .database import db
from .xfm import Transform
from .options import config
fsl_prefix = config.get("basic", "fsl_prefix")
schfile = os.path.join(os.path.split(os.path.abspath(__file__))[0], "bbr.sch")
retval = None
try:
cache = tempfile.mkdtemp()
absreference = os.path.abspath(reference)
if use_fs_bbr:
print('Running freesurfer BBR')
cmd = 'bbregister --s {sub} --mov {absref} --init-fsl --reg {cache}/register.dat --t2'
cmd = cmd.format(sub=subject, absref=absreference, cache=cache)
if sp.call(cmd, shell=True) != 0:
raise IOError('Error calling freesurfer BBR!')
xfm = Transform.from_freesurfer(os.path.join(cache, "register.dat"), absreference, subject)
else:
raw = db.get_anat(subject, type='raw').get_filename()
bet = db.get_anat(subject, type='brainmask').get_filename()
wmseg = db.get_anat(subject, type='whitematter').get_filename()
#Compute anatomical-to-epi transform
#print('FLIRT pre-alignment')
#cmd = '{fslpre}flirt -in {epi} -ref {bet} -dof 6 {pre_flirt_args} -omat {cache}/init.mat'.format(
# fslpre=fsl_prefix, cache=cache, epi=absreference, bet=bet, pre_flirt_args=pre_flirt_args)
#if sp.call(cmd, shell=True) != 0:
# raise IOError('Error calling initial FLIRT')
print('Running FLIRT')
# Run epi-to-anat transform (this is more stable than anat-to-epi in FSL!)
cmd = 'flirt -in {epi} -ref {raw} -dof 12 -cost corratio -wmseg {wmseg} -omat {cache}/out.mat -pedir -2'
cmd = cmd.format(cache=cache, raw=bet, wmseg=wmseg, epi=absreference)
print(cmd)
if sp.call(cmd, shell=True) != 0:
raise IOError('Error calling FLIRT')
x = np.loadtxt(os.path.join(cache, "out.mat"))
# Pass transform as FROM epi TO anat; transform will be inverted
# back to anat-to-epi, standard direction for pycortex internal
# storage by from_fsl
xfm = Transform.from_fsl(x,absreference,raw)
# Save as pycortex 'coord' transform
xfm.save(subject,xfmname,'coord')
print('Success')
finally:
if not noclean:
shutil.rmtree(cache)
else:
retval = cache
return retval
def autotweak(subject, xfmname):
"""Tweak an alignment using the FLIRT boundary-based alignment (BBR) from FSL.
Ideally this function should actually use a limited search range, but it doesn't.
It's probably not very useful.
Parameters
----------
subject : str
Subject identifier.
xfmname : str
String identifying the transform to be tweaked.
"""
import shlex
import shutil
import tempfile
import subprocess as sp
from .database import db
from .xfm import Transform
from .options import config
fsl_prefix = config.get("basic", "fsl_prefix")
schfile = os.path.join(os.path.split(os.path.abspath(__file__))[0], "bbr.sch")
magnet = db.get_xfm(subject, xfmname, xfmtype='magnet')
try:
cache = tempfile.mkdtemp()
epifile = magnet.reference.get_filename()
raw = db.get_anat(subject, type='raw').get_filename()
bet = db.get_anat(subject, type='brainmask').get_filename()
wmseg = db.get_anat(subject, type='whitematter').get_filename()
initmat = magnet.to_fsl(db.get_anat(subject, 'raw').get_filename())
with open(os.path.join(cache, 'init.mat'), 'w') as fp:
np.savetxt(fp, initmat, fmt='%f')
print('Running BBR')
cmd = '{fslpre}flirt -in {epi} -ref {raw} -dof 6 -cost bbr -wmseg {wmseg} -init {cache}/init.mat -omat {cache}/out.mat -schedule {schfile}'
cmd = cmd.format(cache=cache, raw=raw, wmseg=wmseg, epi=epifile)
if sp.call(cmd, shell=True) != 0:
raise IOError('Error calling BBR flirt')
x = np.loadtxt(os.path.join(cache, "out.mat"))
# Pass transform as FROM epi TO anat; transform will be inverted
# back to anat-to-epi, standard direction for pycortex internal
# storage by from_fsl
Transform.from_fsl(x, epifile, raw).save(subject, xfmname+"_auto", 'coord')
print('Saved transform as (%s, %s)'%(subject, xfmname+'_auto'))
finally:
shutil.rmtree(cache)
|
"""
NAME: Asim Soylu
MATRIKELNUMMER: 108019256229
"""
"""WAS TUT DAS PROGRAMM?
Das Programm liest und tokenisiert den Lexikons und den vom Benutzer eingegebenen Text (Input-Text).
Dann vergleicht es die Token des Benutzers mit den Token des Lexikons,
und bestimmt die Punktzahl für jedes Lexikon der Sprachen.
Schließlich ordnet es die Sprachen und gibt die Sprache mit der höchsten Punktzahl
als die wahrscheinliche Sprache des vom Benutzer eingegebenen Textes aus.
"""
################################
# Funktionen
################################
################################
# Input aus dem Lexicon nehmen
################################
def read_text(filename):
"""Funktion, die einen Dateinamen als Input nimmt
und den Inhalt dieser Datei in ein geeignetes Format einliest:
Input: filename (str): Dateinamen
Output: lexicon_tokens (list): Liste von Tokens"""
#öffnet, liest und schließt die Datei
with open (filename, 'r', encoding="utf-8") as f:
source_text = f.read()
#ruft die Funktion "wordpunct_tokenize" aus der Bibliothek "nltk" auf
from nltk import wordpunct_tokenize
#Tokenisierung
tokens = wordpunct_tokenize(source_text)
#macht die Lexicon-Tokens kleiner
lexicon_tokens = [words.lower() for words in tokens]
return lexicon_tokens
################################
# Input vom Benutzer nehmen
################################
def take_input_tokenize():
"""Funktion, die den Benutzer nach einem Input-Text fragt,
diesen tokenisiert und verkleinert:
Input: -----
Return: user_ngrams (list): Liste der Benutzer-Ngramme in Kleinbuchstaben"""
#fragt den Benutzer nach einem Eingabe-Text
input_text = input("Bitte geben Sie einen Text:")
#ruft die Funktion "wordpunct_tokenize" aus der Bibliothek "nltk" auf
from nltk import wordpunct_tokenize
#tokenisierung
input_tokens = wordpunct_tokenize(input_text)
#macht die eingegebenen Wörter kleiner
user_tokens = [words.lower() for words in input_tokens]
return user_tokens
def compare_user_with_lexicon(user_tokens, lexicon_tokens):
"""Function that compares input tokens with the resource tokens
from 1000 most common words with the help of two for-loop:
Input:
1. user_tokens (list): Liste der Benutzer-Tokens
2. lexicon_tokens (list): Liste der Lexicon-Tokens
Output:
1. scores (list): Die Anzahl der übereinstimmenden Tokens """
#die Variable "scores" auf "0" setzen
scores = 0
#sucht das Element der user_tokens
for user_words in user_tokens:
#mit den Elementen der lexicon_tokens
for lexicon_words in lexicon_tokens:
#wenn es Übereinstimmungen zwischen zwei Daten gibt
if user_words == lexicon_words:
#erhöht die Punktzahl um +1
scores +=1
return scores
def rank(langscore):
"""Function that asks from user for an input-text
tokenizes and makes them lowered:
Input: langscore (dict): Dictionary der Sprachen und ihrer unsortierten Punktezahl
Output: lang_sorted (dict): Dictionary der Sprachen und ihrer sortierten Punktezahl"""
#definiert ein Dictionary namens "lang_sorted"
lang_sorted = {}
#definiert die Variable "listSorted" und sortiert das Dictionary langscore nach ihren "values" und kehrt sie um
listSorted = sorted(langscore, key=langscore.get, reverse=True)
#für jedes Element der Liste listSorted
for elt_sorted in listSorted:
#weist die sortierten Elemente der Liste "listSorted" dem Dictionary "lang_sorted" als Keys zu
#und die sortierten "values" des Dictionarys "langscore" in das Dictionary "lang_sorted" als "values"
lang_sorted[elt_sorted] = langscore[elt_sorted]
return lang_sorted
def run_script(filename):
"""Funktion, die alle weiteren Funktionen aufruft
Input:
1. filename (str): Dateinamen der einzulesenden Textdateien
Return: --
kein Rueckgabewert, aber Ausgabe der Ergebnisse auf der Konsole (mit prints)"""
#macht die User-Input zu Tokens
user_tokens = take_input_tokenize()
#definiert ein Dictionary namens "langscore" für jede Sprache und ihre Punktzahl
langscores = {}
#liest jeden Lexicontext jeder Sprache ein, macht sie zu Tokens
for lang in filename.keys():
lexicon_tokens = read_text(filename[lang])
#vergleicht Tokens vom User und Tokens aus den Lexicontexten und weist jeder einzelnen Sprache eine Punktzahl im Dictionary "langscores" zu
langscores[lang] = compare_user_with_lexicon(user_tokens, lexicon_tokens)
#ordnet die Punktzahlen der Sprachen ein
ranked_langscores = rank(langscores)
#prints die Sprachen und ihre Punktzahlen
print(ranked_langscores)
#gibt das erste Element des Dictionarys "ranked_langscores" aus
print("Nach den Ergebnissen ist die wahrscheinliche Sprache: "+ list(ranked_langscores.keys())[0])
return
################################
# Hauptprogramm
################################
if __name__ == "__main__":
#definiert das Dictionary "filename", das aus den Dateipfaden besteht
filename = {
"Deutsch": "resources\de\lexicon-de.txt",
"Englisch": "resources\en\lexicon-en.txt",
"Spanisch": "resources\es\lexicon-es.txt",
"Niederländisch": "resources\\nl\lexicon-nl.txt",
"Polnisch": "resources\pl\lexicon-pl.txt"
}
#diese Funktion ruft alle weiteren Funktionen auf
run_script(filename)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='o3',
packages=find_packages(include=('o3',)),
url='https://github.com/mblomdahl/o3',
license='The Unlicense',
description='Hadoop-Airflow analytics',
# https://pypi.python.org/pypi?:action=list_classifiers
install_requires=[
'psycopg2',
'hdfs3',
'apache-airflow',
'ansible',
'netaddr',
'ipython',
'pandas',
'fastavro',
'pyhive'
],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.6'
],
platforms=['POSIX']
)
|
# -*- coding: utf-8 -*-
import numpy as np
class Env(object):
def reset(self):
raise NotImplementedError()
def observation(self):
raise NotImplementedError()
def step(self):
raise NotImplementedError()
def reward(self):
raise NotImplementedError()
# physical/external base state of all entites
class EntityState(object):
def __init__(self):
# physical position
self.p_pos = None
# physical velocity
self.p_vel = None
# state of agents (including communication and internal/mental state)
class AgentState(EntityState):
def __init__(self):
super(AgentState, self).__init__()
# communication utterance
self.c = None
# action of the agent
class Action(object):
def __init__(self):
# physical action
self.u = None
# communication action
self.c = None
# properties and state of physical world entity
class Entity(object):
def __init__(self):
# name
self.name = ''
# properties:
self.size = 0.050
# entity can move / be pushed
self.movable = False
# entity collides with others
self.collide = True
# material density (affects mass)
self.density = 25.0
# color
self.color = None
# max speed and accel
self.max_speed = None
self.accel = None
# state
self.state = EntityState()
# mass
self.initial_mass = 1.0
@property
def mass(self):
return self.initial_mass
# properties of landmark entities
class Landmark(Entity):
def __init__(self):
super(Landmark, self).__init__()
# properties of agent entities
class Agent(Entity):
def __init__(self):
super(Agent, self).__init__()
# agents are movable by default
self.movable = True
self.collide_walls = False
self.collide_agents = False
# cannot send communication signals
self.silent = False
# cannot observe the world
self.blind = False
# physical motor noise amount
self.u_noise = None
# communication noise amount
self.c_noise = None
# control range
self.u_range = 1.0
# state
self.state = AgentState()
# action
self.action = Action()
# script behavior to execute
self.action_callback = None
class Wall(object):
def _init__(self):
pass
class Map(object):
def __init__(self):
self.sizeX = None
self.sizeY = None
self.matrix = None
self.agents = []
self.landmarks = []
# multi-agent world
class World(object):
def __init__(self):
# list of agents and entities (can change at execution-time!)
self.agents = []
self.landmarks = []
# communication channel dimensionality
self.dim_c = 0
# position dimensionality
self.dim_p = 2
# color dimensionality
self.dim_color = 3
# simulation timestep
self.dt = 0.1
# physical damping
self.damping = 0.25
# contact response parameters
self.contact_force = 1e+2
self.contact_margin = 1e-3
# map of this world
self.map = None
# return all entities in the world
@property
def entities(self):
return self.agents + self.landmarks
# return all agents controllable by external policies
@property
def policy_agents(self):
return [agent for agent in self.agents if agent.action_callback is None]
# return all agents controlled by world scripts
@property
def scripted_agents(self):
return [agent for agent in self.agents if agent.action_callback is not None]
# update state of the world
def step(self):
# set actions for scripted agents
for agent in self.scripted_agents:
agent.action = agent.action_callback(agent, self)
# gather forces applied to entities
p_force = [None] * len(self.entities)
# apply agent physical controls
p_force = self.apply_action_force(p_force)
# apply environment forces
p_force = self.apply_environment_force(p_force)
# integrate physical state
self.integrate_state(p_force)
# update agent state
for agent in self.agents:
self.update_agent_state(agent)
# gather agent action forces
def apply_action_force(self, p_force):
# set applied forces
for i,agent in enumerate(self.agents):
if agent.movable:
#noise = np.random.randn(*agent.action.u.shape) * agent.u_noise if agent.u_noise else 0.0
p_force[i] = agent.action.u #+ noise
return p_force
# gather physical forces acting on entities
def apply_environment_force(self, p_force):
for agent_id, agent in enumerate(self.agents):
next_pos = agent.state.p_pos + p_force[agent_id].astype(np.int64)
for i, a in enumerate(self.agents):
if agent_id == i:
continue
if all(a.state.p_pos == next_pos):
p_force[agent_id] = np.zeros(self.dim_p)
agent.collide_agents = True
break
next_pos = self.map.coord2ind(next_pos, self.map.SIZE_X, self.map.SIZE_Y)
if self.map.matrix[next_pos[0], next_pos[1], 0] == 1:
p_force[agent_id] = np.zeros(self.dim_p)
agent.collide_walls = True
return p_force
# integrate physical state
def integrate_state(self, p_force):
for i,entity in enumerate(self.entities):
if not entity.movable: continue
entity.state.p_pos += p_force[i].astype(np.int64)
def update_agent_state(self, agent):
# set communication state (directly for now)
if agent.silent:
agent.state.c = np.zeros(self.dim_c)
else:
noise = np.random.randn(*agent.action.c.shape) * agent.c_noise if agent.c_noise else 0.0
agent.state.c = agent.action.c + noise
# get collision forces for any contact between two entities
def get_collision_force(self, entity_a, entity_b):
if (not entity_a.collide) or (not entity_b.collide):
return [None, None] # not a collider
if (entity_a is entity_b):
return [None, None] # don't collide against itself
# compute actual distance between entities
delta_pos = entity_a.state.p_pos - entity_b.state.p_pos
dist = np.sqrt(np.sum(np.square(delta_pos)))
# minimum allowable distance
dist_min = entity_a.size + entity_b.size
# softmax penetration
k = self.contact_margin
penetration = np.logaddexp(0, -(dist - dist_min)/k)*k
force = self.contact_force * delta_pos / dist * penetration
force_a = +force if entity_a.movable else None
force_b = -force if entity_b.movable else None
return [force_a, force_b] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-06-08 22:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gymkhana', '0006_form_available'),
]
operations = [
migrations.RemoveField(
model_name='form_available',
name='id',
),
migrations.AddField(
model_name='form_available',
name='roll',
field=models.CharField(default=2016001, max_length=7, primary_key=True, serialize=False),
),
]
|
# BSD 3 - Clause License
# Copyright(c) 2021, Zenotech
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and / or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES
# LOSS OF USE, DATA, OR PROFITS
# OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import re
import math
import subprocess
import datetime
from .base import Scheduler
from mycluster.exceptions import SchedulerException, ConfigurationException
class SGE(Scheduler):
def scheduler_type(self):
return "sge"
def name(self):
return os.getenv("SGE_CLUSTER_NAME")
def queues(self):
# list all parallel env
# for parallel_env list queues associated
# Find first node with queue and record node config
queue_list = []
parallel_env_list = []
with os.popen("qconf -spl") as f:
for line in f:
parallel_env_list.append(line.strip())
for parallel_env in parallel_env_list:
with os.popen("qstat -pe " + parallel_env + " -U `whoami` -g c") as f:
f.readline() # read header
f.readline() # read separator
for line in f:
queue_name = line.split(" ")[0].strip()
# Check if user has permission to use queue
with os.popen("qstat -g c -U `whoami` -q " + queue_name) as f2:
try:
f2.readline()
f2.readline()
if len(f2.readline()):
queue_list.append(parallel_env + ":" + queue_name)
except:
pass
return queue_list
def node_config(self, queue_id):
# Find first node with queue and record node config
parallel_env = queue_id.split(":")[0]
queue_name = queue_id.split(":")[1]
host_group = 0
with os.popen("qconf -sq " + queue_name) as f:
for line in f:
if line.split(" ")[0] == "hostlist":
new_line = re.sub(" +", " ", line)
host_group = new_line.split(" ")[1]
config = {}
host_name = ""
found = False
if host_group[0] is "@":
# Is a host group
with os.popen("qconf -shgrp_resolved " + host_group) as f:
for line in f:
for host_name in line.split(" "):
with os.popen("qhost -q -h " + host_name) as f:
header = f.readline() # read header
f.readline() # read separator
new_header = re.sub(" +", " ", header).strip()
# sge <=6.2u4 style
if (new_header.split(" ")[3]) == "LOAD":
for line in f:
if line[0] != " ":
name = line.split(" ")[0]
if name != "global":
new_line = re.sub(" +", " ", line).strip()
if new_line.split(" ")[3] != "-":
config["max task"] = int(
new_line.split(" ")[2]
)
config["max thread"] = int(
new_line.split(" ")[2]
)
config["max memory"] = new_line.split(
" "
)[4]
found = True
break
else:
for line in f:
if line[0] != " ":
name = line.split(" ")[0]
if name != "global":
new_line = re.sub(" +", " ", line).strip()
if new_line.split(" ")[3] != "-":
config["max task"] = int(
new_line.split(" ")[4]
)
config["max thread"] = int(
new_line.split(" ")[5]
)
config["max memory"] = new_line.split(
" "
)[7]
found = True
break
if found:
break
else:
# Is a host
host_name = host_group
with os.popen("qhost -q -h " + host_name) as f:
header = f.readline() # read header
f.readline() # read separator
new_header = re.sub(" +", " ", header).strip()
if (new_header.split(" ")[3]) == "LOAD": # sge <=6.2u4 style
for line in f:
if line[0] != " ":
name = line.split(" ")[0]
if name != "global":
new_line = re.sub(" +", " ", line).strip()
if new_line.split(" ")[3] != "-":
config["max task"] = int(new_line.split(" ")[2])
config["max thread"] = int(new_line.split(" ")[2])
config["max memory"] = new_line.split(" ")[4]
else:
config["max task"] = 0
config["max thread"] = 0
config["max memory"] = 0
else:
for line in f:
if line[0] != " ":
name = line.split(" ")[0]
if name != "global":
new_line = re.sub(" +", " ", line).strip()
if new_line.split(" ")[3] != "-":
config["max task"] = int(new_line.split(" ")[4])
config["max thread"] = int(new_line.split(" ")[5])
config["max memory"] = new_line.split(" ")[7]
else:
config["max task"] = 0
config["max thread"] = 0
config["max memory"] = 0
return config
def tasks_per_node(self, queue_id):
parallel_env = queue_id.split(":")[0]
queue_name = queue_id.split(":")[1]
tasks = 0
with os.popen("qconf -sq " + queue_name) as f:
for line in f:
if line.split(" ")[0] == "slots":
tasks = int(re.split("\W+", line)[1])
pe_tasks = tasks
with os.popen("qconf -sp " + parallel_env) as f:
try:
for line in f:
if line.split(" ")[0] == "allocation_rule":
try:
# This may throw exception as allocation rule
# may not always be an integer
pe_tasks = int(re.split("\W+", line)[1])
except ValueError as e:
raise SchedulerException("Error parsing SGE output")
except:
pass
return min(tasks, pe_tasks)
def available_tasks(self, queue_id):
# split queue id into queue and parallel env
# list free slots
free_tasks = 0
max_tasks = 0
parallel_env = queue_id.split(":")[0]
queue_name = queue_id.split(":")[1]
with os.popen(" qstat -pe " + parallel_env + " -U `whoami` -g c") as f:
f.readline() # read header
f.readline() # read separator
for line in f:
# remove multiple white space
new_line = re.sub(" +", " ", line)
qn = new_line.split(" ")[0]
if qn == queue_name:
free_tasks = int(new_line.split(" ")[4])
max_tasks = int(new_line.split(" ")[5])
return {"available": free_tasks, "max tasks": max_tasks}
def _min_tasks_per_node(self, queue_id):
"""
This function is used when requesting non exclusive use
as the parallel environment might enforce a minimum number
of tasks
"""
parallel_env = queue_id.split(":")[0]
queue_name = queue_id.split(":")[1]
tasks = 1
pe_tasks = tasks
with os.popen("qconf -sp " + parallel_env) as f:
try:
for line in f:
if line.split(" ")[0] == "allocation_rule":
# This may throw exception as allocation rule
# may not always be an integer
pe_tasks = int(re.split("\W+", line)[1])
except:
pass
return max(tasks, pe_tasks)
def create_submit(
self,
queue_id,
num_tasks,
job_name,
job_script,
wall_clock,
openmpi_args="-bysocket -bind-to-socket",
project_name="default",
tasks_per_node=None,
threads_per_task=1,
user_email=None,
qos=None,
exclusive=True,
output_name=None,
):
parallel_env = queue_id.split(":")[0]
queue_name = queue_id.split(":")[1]
if tasks_per_node is None:
tasks_per_node = self.tasks_per_node(queue_id)
num_nodes = int(math.ceil(float(num_tasks) / float(tasks_per_node)))
if threads_per_task is None:
threads_per_task = 1
if ":" not in wall_clock:
wall_clock = wall_clock + ":00:00"
if "mycluster-" in job_script:
job_script = self._get_data(job_script)
if output_name is None:
output_name = job_name + ".out"
# For exclusive node use total number of slots required
# is number of nodes x number of slots offer by queue
num_queue_slots = num_nodes * self.tasks_per_node(queue_id)
if not exclusive:
if num_nodes == 1: # Assumes fill up rule
num_queue_slots = max(
tasks_per_node, self._min_tasks_per_node(queue_id)
)
template = self._load_template("sge.jinja")
script_str = template.render(
my_name=job_name,
my_script=job_script,
my_output=output_name,
user_email=user_email,
queue_name=queue_name,
parallel_env=parallel_env,
num_queue_slots=num_queue_slots,
num_tasks=num_tasks,
tpn=tasks_per_node,
num_threads_per_task=threads_per_task,
num_nodes=num_nodes,
project_name=project_name,
wall_clock=wall_clock,
openmpi_args=openmpi_args,
qos=qos,
exclusive=exclusive,
)
return script_str
def submit(
self, script_name, immediate=False, depends_on=None, depends_on_always_run=False
):
job_id = None
output = subprocess.run(
f"qsub -V -terse {script_name}",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
)
if output.returncode == 0:
job_id = 0
out = output.stdout.decode("utf-8")
try:
job_id = int(out.readline().strip())
return job_id
except:
raise SchedulerException("Error submitting job to SGE")
else:
raise SchedulerException(
f"Error submitting job to SGE: {output.stderr.decode('utf-8')}"
)
def list_current_jobs(self):
jobs = []
output = subprocess.run(
"qstat -u `whoami`",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
)
if output.returncode == 0:
for line in output.stdout.decode("utf-8").splitlines():
if line.startswith("job-ID") or line.startswith("---"):
continue
job_info = re.sub(" +", " ", line.strip()).split(" ")
jobs.append(
{
"id": int(job_info[0]),
"queue": job_info[6],
"name": job_info[2],
"state": job_info[4],
}
)
else:
raise SchedulerException("Error fetching job queue listing")
return jobs
def get_job_details(self, job_id):
"""
Get full job and step stats for job_id
First check using sacct, then fallback to squeue
"""
stats_dict = {}
output = {}
with os.popen("qacct -j " + str(job_id)) as f:
try:
f.readline() # read header
for line in f:
new_line = re.sub(" +", " ", line.strip())
output[new_line.split(" ")[0]] = new_line.split(" ", 1)[1]
except:
pass
stats_dict["wallclock"] = datetime.timedelta(
seconds=int(output["ru_wallclock"])
)
stats_dict["mem"] = output["mem"]
stats_dict["cpu"] = datetime.timedelta(seconds=int(output["cpu"].split(".")[0]))
stats_dict["queue"] = output["granted_pe"] + ":" + output["qname"]
return stats_dict
def delete(self, job_id):
cmd = f"qdel {job_id}"
output = subprocess.run(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
)
if output.returncode != 0:
raise SchedulerException(f"Error cancelling job {job_id}")
|
# -*- coding: utf-8 -*-
import sys
import os
import shutil
import commands
import time
def main(argv):
pwd = os.path.curdir
inputFileName = os.path.join(pwd, "input.txt")
if(len(argv) >= 1) and os.path.isdir(argv[0]):
pwd = argv[0]
if(len(argv) >= 2) and os.path.isdir(argv[1]):
inputFileName = argv[1]
print("start to : "+pwd)
executelog = open("executelog"+str(pwd)+str(int(time.time()))+".txt", "w")
hwfiles = os.listdir(pwd)
for hwfile in hwfiles:
if ".out" in hwfile:
print hwfile
executelog.write("start execute : " + hwfile + "\n")
ll = commands.getoutput(str(os.path.join(pwd, hwfile)) + " < " + inputFileName);
print ll
executelog.write(ll+"\n")
executelog.write("\n\n")
executelog.close();
if __name__ == "__main__":
main(sys.argv[1:])
|
from adafruit_blinka.microcontroller.generic_linux.libgpiod_pin import Pin
PA0 = Pin(0)
UART2_TX = PA0
PA1 = Pin(1)
UART2_RX = PA1
PA2 = Pin(2)
PA3 = Pin(3)
PA6 = Pin(6)
PA7 = Pin(7)
PA8 = Pin(8)
PA9 = Pin(9)
PA10 = Pin(10)
PA11 = Pin(11)
TWI0_SCL = PA11
PA12 = Pin(12)
TWI0_SDA = PA12
PA13 = Pin(13)
UART3_TX = PA13
PA14 = Pin(14)
UART3_RX = PA14
SPI1_SCLK = PA14
PA15 = Pin(15)
SPI1_MOSI = PA15
PA16 = Pin(16)
SPI1_MISO = PA16
PA17 = Pin(17)
PA18 = Pin(18)
PA19 = Pin(19)
PA20 = Pin(20)
PA21 = Pin(21)
PC0 = Pin(64)
SPI0_MOSI = PC0
PC1 = Pin(65)
SPI0_MISO = PC1
PC2 = Pin(66)
SPI0_SCLK = PC2
PC3 = Pin(67)
SPI0_CS = PC3
PC4 = Pin(68)
PC7 = Pin(71)
PD14 = Pin(110)
PG6 = Pin(198)
UART1_TX = PG6
PG7 = Pin(199)
UART1_RX = PG7
PG8 = Pin(200)
PG9 = Pin(201)
PG10 = Pin(202)
PG11 = Pin(203)
PG12 = Pin(204)
PG13 = Pin(205)
i2cPorts = (
(0, TWI0_SCL, TWI0_SDA),
)
# ordered as spiId, sckId, mosiId, misoId
spiPorts = (
(0, SPI0_SCLK, SPI0_MOSI, SPI0_MISO),
(1, SPI1_SCLK, SPI1_MOSI, SPI1_MISO),
)
# ordered as uartId, txId, rxId
uartPorts = (
(3, UART3_TX, UART3_RX),
)
|
import pytest
@pytest.fixture()
def pg_db(request):
import getpass
import orb
db = orb.Database('Postgres')
db.setName('orb_testing')
db.setHost('localhost')
db.setUsername(getpass.getuser())
db.setPassword('')
db.activate()
def fin():
db.disconnect()
request.addfinalizer(fin)
return db
@pytest.fixture()
def pg_sql(pg_db):
import orb
return orb.Connection.byName('Postgres')
@pytest.fixture(scope='session')
def pg_all_column_record(orb, TestAllColumns):
record = TestAllColumns(password='T3st1ng!')
return record
@pytest.fixture()
def pg_last_column_record(orb, TestAllColumns):
record = TestAllColumns.select().last()
return record
|
class Trie:
def __init__(self):
self.root = {}
def insert(self, word: str) -> None:
node = self.root
for c in word:
if c not in node:
node[c] = {}
node = node[c]
node['isWord'] = True
def search(self, word: str) -> bool:
node = self.find(word)
return node is not None and 'isWord' in node
def startsWith(self, prefix: str) -> bool:
return self.find(prefix) is not None
def find(self, prefix: str) -> dict:
node = self.root
for c in prefix:
if c not in node:
return None
node = node[c]
return node
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from exhibitionist.isubscriber import ISubscriber
from exhibitionist.mixins.pubsub import PubSubMixin
import unittest
class TestPubSub(unittest.TestCase):
def setUp(self):
self.pubsub = PubSubMixin()
def tearDown(self):
pass
@staticmethod
def pick_attr(l, attr):
return [getattr(x, attr) for x in l]
def test_sub(self):
# basic sub
h = ISubscriber()
self.pubsub.subscribe(h, "ch1")
self.assertTrue(h in self.pubsub.list_subs("ch1"))
def test_resub_no_dupe(self):
# basic sub
h = ISubscriber()
self.pubsub.subscribe(h, "ch1")
self.pubsub.subscribe(h, "ch1")
self.assertTrue(h in self.pubsub.list_subs("ch1"))
self.assertTrue(len(self.pubsub.list_subs("ch1")) == 1)
def test_sub_multi_channels(self):
# basic sub
h = ISubscriber()
h2 = ISubscriber()
self.pubsub.subscribe(h, "ch1")
self.pubsub.subscribe(h, "ch2")
self.assertTrue(h in self.pubsub.list_subs("ch1"))
self.assertTrue(h in self.pubsub.list_subs("ch2"))
self.pubsub.subscribe(h2, "ch1")
self.pubsub.subscribe(h2, "ch2")
self.assertTrue(h in self.pubsub.list_subs("ch1"))
self.assertTrue(h2 in self.pubsub.list_subs("ch1"))
self.assertTrue(h2 in self.pubsub.list_subs("ch2"))
def test_unsub(self):
h = ISubscriber()
h2 = ISubscriber()
self.pubsub.subscribe(h, "ch1")
self.pubsub.subscribe(h2, "ch1")
self.pubsub.subscribe(h, "ch2")
self.pubsub.subscribe(h, "ch3")
self.assertTrue(h in self.pubsub.list_subs("ch1"))
self.assertTrue(h in self.pubsub.list_subs("ch2"))
self.assertTrue(h in self.pubsub.list_subs("ch3"))
# specific ch
self.pubsub.unsubscribe(h, "ch1")
self.assertFalse(h in self.pubsub.list_subs("ch1"))
self.assertTrue(h in self.pubsub.list_subs("ch2"))
self.assertTrue(h2 in self.pubsub.list_subs("ch1"))
# specific ch
self.pubsub.unsubscribe(h)
self.assertFalse(h in self.pubsub.list_subs("ch1"))
self.assertFalse(h in self.pubsub.list_subs("ch2"))
self.assertFalse(h in self.pubsub.list_subs("ch3"))
self.assertTrue(h2 in self.pubsub.list_subs("ch1"))
def test_list_channels(self):
h = ISubscriber()
h2 = ISubscriber()
self.pubsub.subscribe(h, "ch1")
self.assertTrue("ch1" in self.pubsub.list_channels())
self.assertFalse("ch2" in self.pubsub.list_channels())
self.pubsub.subscribe(h, "ch2")
self.assertTrue("ch1" in self.pubsub.list_channels())
self.assertTrue("ch2" in self.pubsub.list_channels())
|
"""structured organization of world release regions."""
import re
REGION_TREE = {
"WW": {
"aliases": ["worldwide", "world-wide", "int", "global"],
"sub": {
"NA": {
"aliases": ["north-america"],
"sub": {
"US": {"aliases": ["united-states", "usa"], "sub": {}},
"CA": {"aliases": ["canada", "can"], "sub": {}},
}
},
"SA": {
"aliases": ["south-america"],
"sub": {
"BR": {"aliases": ["bra", "brasil", "brazil"], "sub": {}},
"AR": {"aliases": ["arg", "argentina"], "sub": {}},
"MX": {"aliases": ["mex", "mexico"], "sub": {}},
}
},
"EU": {
"aliases": ["europe", "pal", "eur"],
"sub": {
"GR": {"aliases": ["germany", "ger", "de", "deu"], "sub": {}},
"PL": {"aliases": ["pol", "poland"], "sub": {}},
"FR": {"aliases": ["fra", "france"], "sub": {}},
"UK": {"aliases": ["gb", "united-kingdom", "great-britain", "england", "en"], "sub": {}},
"ES": {"aliases": ["spain", "esp"], "sub": {}},
"IT": {"aliases": ["ita", "italy"], "sub": {}},
"CZ": {"aliases": ["cze", "czech-republic"], "sub": {}},
"AS": {"aliases": ["austria", "aut"], "sub": {}},
"RS": {"aliases": ["russia", "rus"], "sub": {}},
"FI": {"aliases": ["fin", "finland"], "sub": {}},
"PT": {"aliases": ["prt", "portugal"], "sub": {}},
"IL": {"aliases": ["isr", "israel"], "sub": {}},
"SE": {"aliases": ["swe", "sweden"], "sub": {}},
}
},
"AS": {
"aliases": ["asia", "sea", "australasia"],
"sub": {
"TK": {"aliases": ["tur", "turkey"], "sub": {}},
"JP": {"aliases": ["japan", "jpn"], "sub": {}},
"KR": {"aliases": ["south-korea", "korea", "kor", "ko", "sk", "rok"], "sub": {}},
"CH": {"aliases": ["chn", "china"], "sub": {}},
"HK": {"aliases": ["hkg", "hong-kong"], "sub": {}},
"TL": {"aliases": ["thailand", "tha"], "sub": {}},
"TW": {"aliases": ["taiwan", "twn"], "sub": {}},
"IN": {"aliases": ["ind", "india"], "sub": {}},
"KP": {"aliases": ["prk", "north-korea"], "sub": {}},
}
},
"OC": {
"aliases": ["oceania", "australasia"],
"sub": {
"AU": {"aliases": ["aus", "australia"], "sub": {}},
"NZ": {"aliases": ["nzl", "new zealand"], "sub": {}},
}
},
"AF": {
"aliases": ["africa"],
"sub": {
"SA": {"aliases": ["south-africa"], "sub": {}},
}
}
}
},
"sub": {},
"aliases": [],
}
slug = re.compile("\s|_")
def search_region_tree(inp, curr_key=None, curr_node=None):
inp_u = inp.upper()
inp_l = slug.sub("-", inp).lower()
if not curr_node or not curr_key:
curr_key = "WW"
curr_node = REGION_TREE["WW"]
if inp_u == curr_key:
return curr_key
if inp_l in curr_node.get("aliases", []):
return curr_key
for k, sb in curr_node["sub"].items():
out = search_region_tree(inp, k, sb)
if out:
return out
return None
if __name__ == "__main__":
print(search_region_tree("BRA"))
print(search_region_tree("united STATES"))
print(search_region_tree("united KINGDOM"))
|
from math import sin, cos, tan, sinh, cosh, tanh, pi
import matplotlib.pyplot as plt
# First initialise the variables
# We want to go from -2pi to 2pi in 1000+1 steps
n = 1000
start = -2.0 * pi
step = 4.0 * pi / n
xtab = []
y1tab = []
y2tab = []
y3tab = []
y4tab = []
y5tab = []
y6tab = []
# Then generate the data
for i in range(n + 1):
x = start + i * step
xtab.append(x)
y1tab.append(sin(x))
y2tab.append(cos(x))
y3tab.append(tan(x))
y4tab.append(sinh(x))
y5tab.append(cosh(x))
y6tab.append(tanh(x))
# Generate the plots
# Make 2 rows of 3 graphs, start with plot 1
plt.subplot(231)
plt.plot(xtab, y1tab)
plt.title('sin')
plt.subplot(232)
plt.plot(xtab,y2tab)
plt.title('cos')
plt.subplot(233)
plt.plot(xtab,y3tab)
plt.axis([-2.0 * pi, 2.0 * pi, -5.0, 5.0]) # setting scale:xmin,xmax,ymin,ymax
plt.title('tan')
# Second row: plot 4-6
plt.subplot(234)
plt.plot(xtab, y4tab)
plt.title('sinh')
plt.subplot(235)
plt.plot(xtab, y5tab)
plt.title('cosh')
plt.subplot(236)
plt.plot(xtab, y6tab)
plt.title('tanh')
# Show plot window
plt.show()
|
"""API client admin tools."""
import json
import requests
from tsdapiclient.client_config import ENV
from tsdapiclient.tools import handle_request_errors
@handle_request_errors
def get_tsd_api_key(
env: str,
pnum: str,
user_name: str,
password: str,
otp: str
) -> str:
headers = {'Content-Type': 'application/json'}
data = {'user_name': user_name, 'password': password, 'otp': otp}
url = '{0}/{1}/auth/tsd/api_key'.format(ENV[env], pnum)
print('GET: {0}'.format(url))
resp = requests.get(url, headers=headers, data=json.dumps(data))
return json.loads(resp.text)['api_key']
|
#!/usr/bin/python
import requests
from nagios.checkPlugin import CheckPlugin
from nagios.nagiosReturnValues import NagiosReturnValues
class CheckWebsite(CheckPlugin):
def check_snmp(self, settings):
# ignore snmp check
return True
@staticmethod
def make_request(url, request_type):
if not url.startswith("http://"):
url = 'http://' + url
if request_type == 'GET' or request_type == '' or request_type is None:
result = requests.get(url)
elif request_type == 'POST':
result = requests.post(url)
else:
return NagiosReturnValues.state_unknown
if result.status_code == 200:
return NagiosReturnValues.state_ok
else:
print '%s, Status: %d' % (url, result.status_code)
return NagiosReturnValues.state_critical
def check_website(self, host_address, website_settings):
if 'port' in website_settings:
port = website_settings['port']
host_address = '%s:%d' % (host_address, port)
if 'url' in website_settings:
request_url = website_settings['url']
request_method = website_settings['method']
if not request_url.startswith('/'):
request_url = '/' + request_url
return self.make_request(host_address + request_url, request_method)
else:
return self.make_request(host_address, 'GET')
def check(self, settings):
websites = settings.get_http()
statuses = []
device_status = NagiosReturnValues.state_ok
host_address = settings.device['node']['ipaddr']
if not websites:
return NagiosReturnValues.state_ok
for website_settings in websites:
status = self.check_website(host_address, website_settings)
statuses.append(status)
if len(statuses) > 0:
device_status = self.get_device_status(statuses)
if device_status == NagiosReturnValues.state_ok:
print 'HTTP OK'
return device_status
if __name__ == '__main__':
CheckWebsite().start()
|
#
# PySNMP MIB module MICOM-MPANL-SIGNALING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MICOM-MPANL-SIGNALING-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:12:23 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
micom_oscar, = mibBuilder.importSymbols("MICOM-OSCAR-MIB", "micom-oscar")
mcmSysAsciiTimeOfDay, = mibBuilder.importSymbols("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ModuleIdentity, Unsigned32, ObjectIdentity, Bits, Counter32, Integer32, TimeTicks, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, NotificationType, NotificationType, iso, Counter64, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Unsigned32", "ObjectIdentity", "Bits", "Counter32", "Integer32", "TimeTicks", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "NotificationType", "NotificationType", "iso", "Counter64", "MibIdentifier")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
micom_msm = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19)).setLabel("micom-msm")
configuration = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1))
control = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 2))
statistics = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3))
status = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4))
mcmMSMProfileCfgGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 1))
mcmMSMProfileCfgGroupNodeID = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMProfileCfgGroupNodeID.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMProfileCfgGroupNodeID.setDescription('NAME = ; DESC = The value of this object represents a unique \\ identifier for an access device. (Operational); HELP = ; CAPABILITIES = NET_DISP, VPN_DISP;')
mcmMSMProfileCfgGroupCustomerID = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMProfileCfgGroupCustomerID.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMProfileCfgGroupCustomerID.setDescription('NAME = ; DESC = The value of this object uniquely identifies \\ a customer to which a given access device belongs. \\ (Operational); HELP = ; CAPABILITIES = NET_DISP, VPN_DISP;')
mcmMSMProfileCfgGroupDNAPrefix = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMProfileCfgGroupDNAPrefix.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMProfileCfgGroupDNAPrefix.setDescription('NAME = ; DESC = The value of this object specifies the prefix of the \\ Directory Network Address which uniquely identifies \\ the unit within the network topology. (Operational); HELP = ; CAPABILITIES = NET_DISP, VPN_DISP;')
nvmMSMProfileCfgGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 2))
nvmMSMProfileCfgGroupNodeID = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMProfileCfgGroupNodeID.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMProfileCfgGroupNodeID.setDescription('NAME = ; DESC = The value of this object represents a unique \\ identifier for an access device. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
nvmMSMProfileCfgGroupCustomerID = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMProfileCfgGroupCustomerID.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMProfileCfgGroupCustomerID.setDescription('NAME = ; DESC = The value of this object uniquely identifies \\ a customer to which a given access device belongs. \\ Also, this is used by the NAC in registering the \\ Egress table. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
nvmMSMProfileCfgGroupDNAPrefix = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMProfileCfgGroupDNAPrefix.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMProfileCfgGroupDNAPrefix.setDescription('NAME = ; DESC = The value of this object specifies the prefix of the \\ Directory Network Address which uniquely identifies \\ the unit within the network topology. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
mcmMSMDTELinkCfgTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3), )
if mibBuilder.loadTexts: mcmMSMDTELinkCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgTable.setDescription('NAME = ; DESC = The MPANL DTE Signaling Line Parameters \\ table. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
mcmMSMDTELinkCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMDTELinkCfgIfIndex"))
if mibBuilder.loadTexts: mcmMSMDTELinkCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgEntry.setDescription('NAME = ; DESC = An entry in the MPANL DTE Signaling Line Parameters \\ table. (Operational); HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkCfgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkCfgIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgIfIndex.setDescription('NAME = ; DESC = Ifindex associated with the MPANL DTE line \\ parameters.; HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkCfgMaxSubChannelRange = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(17, 255)).clone(63)).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkCfgMaxSubChannelRange.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgMaxSubChannelRange.setDescription('NAME = ; DESC = The value of this object represents the highest \\ DLCI value to be used for sub-channel (CID) allocation. \\ (Note that the current default value, 63, may change \\ depending on the VoFR agreements.) Note that user \\ DLCIs on the MPANL start at DLCI-17. This \\ information is signaled from the DTE to the DCE. \\ (Operational); HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkCfgDTEReceiverBW = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16000, 2048000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDTEReceiverBW.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDTEReceiverBW.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ throughput (in bits/sec) that the MPANL DCE is \\ capable of transmitting(not less than 16Kbs \\ and not exceeding the Access Link bandwidth). \\ Several SVCs inside the netlink may use this \\ bandwidth. Each SVC will consume a part of \\ this bandwidth.\\ (Operational); HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkCfgDCEReceiverBW = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16000, 2048000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDCEReceiverBW.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDCEReceiverBW.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ throughput (in bits/sec) that the MPANL DTE is \\ capable of transmitting(not less than 16Kbs \\ and not exceeding the Access Link bandwidth). \\ Several SVCs inside the netlink may use this \\ bandwidth. Each SVC will consume a part of this \\ bandwidth.\\ (Operational); HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkCfgDTEMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 4100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDTEMaxFrameSize.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDTEMaxFrameSize.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ frame size (in bytes) that the MPANL DCE is capable \\ of transmitting. This value is signaled from the \\ DTE to the DCE where it is used by the VoFR procedure \\ in the wan driver to manage frame delay by fragmenting \\ VoFR frames. Multiple VoFR frames can constitute a frame \\ relay frame.\\ (Operational); HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkCfgDCEMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 3, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 4100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDCEMaxFrameSize.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDTELinkCfgDCEMaxFrameSize.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ frame size (in bytes) that the MPANL DTE is capable \\ of transmitting. This value is signaled from the \\ DTE to the DCE where it is used by the VoFR procedure \\ in the wan driver to manage frame delay by fragmenting \\ VoFR frames. Multiple VoFR frames can constitute a frame \\ relay frame.\\ (Operational); HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4), )
if mibBuilder.loadTexts: nvmMSMDTELinkCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgTable.setDescription('NAME = ; DESC = The MPANL DTE Signaling Line Parameters \\ table. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
nvmMSMDTELinkCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "nvmMSMDTELinkCfgIfIndex"))
if mibBuilder.loadTexts: nvmMSMDTELinkCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgEntry.setDescription('NAME = ; DESC = An entry in the MPANL DTE Signaling Line Parameters \\ table. (Configuration); HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgIfIndex.setDescription('NAME = ; DESC = Ifindex associated with the MPANL DTE line \\ parameters.; HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgMaxSubChannelRange = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(17, 255)).clone(63)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgMaxSubChannelRange.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgMaxSubChannelRange.setDescription('NAME = ; DESC = The value of this object represents the highest \\ DLCI value to be used for sub-channel allocation. \\ (Note that the current default value, 63, may change \\ depending on the VoFR agreements.) Note that user \\ DLCIs on the MPANL start at DLCI-17. This \\ information is signaled from the DTE to the DCE. \\ (Configuration); HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgDTEReceiverBW = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16000, 2048000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDTEReceiverBW.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDTEReceiverBW.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ throughput (in bits/sec) that the MPANL DCE is \\ capable of transmitting(not less than 16Kbs \\ and not exceeding the Access Link bandwidth). \\ Several SVCs inside the netlink may use this \\ bandwidth. Each SVC will consume a part of \\ this bandwidth.\\ (Configuration); HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgDCEReceiverBW = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(16000, 2048000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDCEReceiverBW.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDCEReceiverBW.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ throughput (in bits/sec) that the MPANL DTE is \\ capable of transmitting(not less than 16Kbs \\ and not exceeding the Access Link bandwidth). \\ Several SVCs inside the netlink may use this \\ bandwidth. Each SVC will consume a part of this \\ bandwidth.\\ (Configuration); HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgDTEMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 4100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDTEMaxFrameSize.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDTEMaxFrameSize.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ frame size (in bytes) that the MPANL DCE is capable \\ of transmitting. This value is signaled from the \\ DTE to the DCE where it is used by the VoFR procedure \\ in the wan driver to manage frame delay by fragmenting \\ VoFR frames. Multiple VoFR frames can constitute a frame \\ relay frame.\\ (Configuration); HELP = ; CAPABILITIES = ;')
nvmMSMDTELinkCfgDCEMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 4100))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDCEMaxFrameSize.setStatus('mandatory')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgDCEMaxFrameSize.setDescription('NAME = ; DESC = The value of this object represents the maximum \\ frame size (in bytes) that the MPANL DTE is capable \\ of transmitting. This value is signaled from the \\ DTE to the DCE where it is used by the VoFR procedure \\ in the wan driver to manage frame delay by fragmenting \\ VoFR frames. Multiple VoFR frames can constitute a frame \\ relay frame.\\ (Configuration); CAPABILITIES = ;')
nvmMSMDTELinkCfgEntryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 1, 4, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("modify", 1)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: nvmMSMDTELinkCfgEntryStatus.setStatus('obsolete')
if mibBuilder.loadTexts: nvmMSMDTELinkCfgEntryStatus.setDescription('NAME = ; DESC = This object was obsoleted as of release \\ 3.0; and is included here only to support \\ backwards compatibility for software \\ versions prior to release 3.0. \\ This object is used in the MSM profile table \\ to add new rows, modify existing rows, \\ and delete existing rows. (Configuration); HELP = ; CAPABILITIES = ;')
mcmMSMStatsLAPFConnTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1), )
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnTable.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnTable.setDescription('NAME = ; DESC = The MPANL Signaling Statistics for LAP-F connections \\ table.; \\ HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
mcmMSMStatsLAPFConnEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMStatsLAPFConnIfIndex"))
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnEntry.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnEntry.setDescription('NAME = ; DESC = An entry in the MPANL Signaling Statistics for LAP-F \\ connections table.; \\ HELP = ; CAPABILITIES = ;')
mcmMSMStatsLAPFConnIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnIfIndex.setDescription('NAME = ; DESC = The value of this object corresponds to a value \\ of IfIndex, and is used as the index into the \\ MSM Statistics for LAP-F connections table.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsLAPFConnReestablished = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnReestablished.setStatus('obsolete')
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnReestablished.setDescription('NAME = ; DESC = This object was obsoleted as of release \\ 3.0; and is included here only to support \\ backwards compatibility for software \\ versions prior to release 3.0. \\ Counter representing the number of times the LAP-F \\ connection was attempted to be re-established. This counter \\ indicates the number of CONNECT_REQ messages \\ sent to the LAP-F module.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsLAPFConnEstablished = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnEstablished.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnEstablished.setDescription('NAME = ; DESC = Counter representing the number of times the LAP-F \\ connection was established. This counter indicates \\ the number of CONNECT_IND messages sent by \\ the LAP-F module.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsLAPFConnDisconnects = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnDisconnects.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsLAPFConnDisconnects.setDescription('NAME = ; DESC = Counter representing the number of times a LAP-F \\ connection was broken. This counter indicates \\ the number of DISCONNECT_IND messages sent by \\ the LAP-F module.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsProfileTxCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsProfileTxCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsProfileTxCnt.setDescription('NAME = ; DESC = Counter represents the number of profiles \\ sent without the occurrence of a restart request.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsProfileRxCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsProfileRxCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsProfileRxCnt.setDescription('NAME = ; DESC = Counter represents the number of profiles \\ received without the occurrence of a restart request.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsRestartReqTxCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsRestartReqTxCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsRestartReqTxCnt.setDescription('NAME = ; DESC = Counter representing the number \\ of restart requests sent.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsRestartReqRxCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsRestartReqRxCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsRestartReqRxCnt.setDescription('NAME = ; DESC = Counter representing the number \\ of restart requests received.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsDnaAssociationCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsDnaAssociationCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsDnaAssociationCnt.setDescription('NAME = ; DESC = Counter representing the number \\ of DNA prefix associations.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsDnaDeassociationCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsDnaDeassociationCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsDnaDeassociationCnt.setDescription('NAME = ; DESC = Counter representing the number \\ of DNA prefix de-associations.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsPANLInfoElementsTxCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsPANLInfoElementsTxCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsPANLInfoElementsTxCnt.setDescription('NAME = ; DESC = Counter representing the number \\ of PANL information elements sent.; HELP = ; CAPABILITIES = ;')
mcmMSMStatsPANLInfoElementsRxCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMStatsPANLInfoElementsRxCnt.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMStatsPANLInfoElementsRxCnt.setDescription('NAME = ; DESC = Counter representing the number \\ of PANL information elements received.; HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkStatsTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 2), )
if mibBuilder.loadTexts: mcmMSMDTELinkStatsTable.setStatus('deprecated')
if mibBuilder.loadTexts: mcmMSMDTELinkStatsTable.setDescription('NAME = ; DESC = This table was deprecated as of release \\ 3.0; and is included here only to support \\ backwards compatibility for software \\ versions prior to release 3.0. \\ The MPANL DTE Signaling Line Parameters \\ table. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_DISP;')
mcmMSMDTELinkStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 2, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMDTELinkStatsIfIndex"))
if mibBuilder.loadTexts: mcmMSMDTELinkStatsEntry.setStatus('deprecated')
if mibBuilder.loadTexts: mcmMSMDTELinkStatsEntry.setDescription('NAME = ; DESC = An entry in the MPANL DTE Signaling Line Parameters \\ table. (Operational); HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkStatsIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkStatsIfIndex.setStatus('deprecated')
if mibBuilder.loadTexts: mcmMSMDTELinkStatsIfIndex.setDescription('NAME = ; DESC = Ifindex associated with the MPANL DTE line \\ parameter statistics and status.; HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkStatsStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkStatsStatus.setStatus('deprecated')
if mibBuilder.loadTexts: mcmMSMDTELinkStatsStatus.setDescription('NAME = ; DESC = MPANL Signaling protocol status of the DTE Line.; HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkStatsLocalCompName = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 33))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkStatsLocalCompName.setStatus('deprecated')
if mibBuilder.loadTexts: mcmMSMDTELinkStatsLocalCompName.setDescription('NAME = ; DESC = Local sites Component name; HELP = ; CAPABILITIES = ;')
mcmMSMDTELinkStatsRemoteCompName = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 3, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 33))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDTELinkStatsRemoteCompName.setStatus('deprecated')
if mibBuilder.loadTexts: mcmMSMDTELinkStatsRemoteCompName.setDescription('NAME = ; DESC = Remote sites Component name; HELP = ; CAPABILITIES = ;')
mcmMSMLinkStatusTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1), )
if mibBuilder.loadTexts: mcmMSMLinkStatusTable.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusTable.setDescription('NAME = ; DESC = The PANL Link Status Parameters \\ table.; HELP = ; CAPABILITIES = NET_DISP, VPN_DISP;')
mcmMSMLinkStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMLinkStatusIfIndex"))
if mibBuilder.loadTexts: mcmMSMLinkStatusEntry.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusEntry.setDescription('NAME = ; DESC = An entry in the PANL Link Status Parameters \\ table.; HELP = ; CAPABILITIES = ;')
mcmMSMLinkStatusIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusIfIndex.setDescription('NAME = ; DESC = Ifindex associated with the PANL link \\ parameter status table.; HELP = ; CAPABILITIES = ;')
mcmMSMLinkStatusInterfaceType = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dte", 1), ("dce", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusInterfaceType.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusInterfaceType.setDescription('NAME = ; DESC = Indicates the link status interface type.; HELP = ; CAPABILITIES = ;')
mcmMSMLinkStatusLAPFStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusLAPFStatus.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusLAPFStatus.setDescription('NAME = ; DESC = The status of the LAPF link connection \\ for the PANL signaling protocol.; HELP = ; CAPABILITIES = ;')
mcmMSMLinkStatusLocalCompName = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 33))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusLocalCompName.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusLocalCompName.setDescription("NAME = ; DESC = Local site's Component name. \\ This is a function of the MIB II \\ sysName and a unique PANL ifindex; HELP = ; CAPABILITIES = ;")
mcmMSMLinkStatusRemoteCompName = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 33))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusRemoteCompName.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusRemoteCompName.setDescription("NAME = ; DESC = Remote site's Component name; HELP = ; CAPABILITIES = ;")
mcmMSMLinkStatusRemoteGenCfgType = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("micomAccessDevice", 1), ("nortelAccessDevice", 2), ("passportSwitch", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusRemoteGenCfgType.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusRemoteGenCfgType.setDescription('NAME = ; DESC = Indicates the device type of the remote unit. \\ (i.e. The type of the unit directly connected \\ to this particular netlink DTE/DCE interface.; HELP = ; CAPABILITIES = ;')
mcmMSMLinkStatusPANLStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("down", 1), ("up", 2), ("restart", 3), ("incompatible", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLinkStatusPANLStatus.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMLinkStatusPANLStatus.setDescription('NAME = ; DESC = Indicates the current PANL status.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2), )
if mibBuilder.loadTexts: mcmMSMDCELinkStatusTable.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusTable.setDescription('NAME = ; DESC = The PANL DCE Link Status Parameters \\ table.; HELP = ; CAPABILITIES = NET_DISP, VPN_DISP;')
mcmMSMDCELinkStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMDCELinkStatusIfIndex"))
if mibBuilder.loadTexts: mcmMSMDCELinkStatusEntry.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusEntry.setDescription('NAME = ; DESC = An entry in the PANL DCE Link Status Parameters \\ table.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDCELinkStatusIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusIfIndex.setDescription('NAME = ; DESC = Ifindex associated with the PANL DCE link \\ parameter status.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusRemoteNodeId = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteNodeId.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteNodeId.setDescription('NAME = ; DESC = DCE Remote Node Id.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusRemoteCustId = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteCustId.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteCustId.setDescription('NAME = ; DESC = DCE Remote Customer Id.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusRemoteRxBw = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1600, 2048000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteRxBw.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteRxBw.setDescription('NAME = ; DESC = DCE Remote Receiving Bandwidth.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusRemoteMaxFrameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 4100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteMaxFrameSize.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteMaxFrameSize.setDescription('NAME = ; DESC = DCE Remote Maximum Frame Size.; HELP = ; CAPABILITIES = ;')
mcmMSMDCELinkStatusRemoteDLCIRange = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(17, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteDLCIRange.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDCELinkStatusRemoteDLCIRange.setDescription('NAME = ; DESC = The highest DLCI value to be used for the \\ remote side.; HELP = ; CAPABILITIES = ;')
mcmMSMDNAStatusTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 3), )
if mibBuilder.loadTexts: mcmMSMDNAStatusTable.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDNAStatusTable.setDescription('NAME = ; DESC = The Associated DNA Status Parameters \\ table.; HELP = ; CAPABILITIES = NET_DISP, VPN_DISP;')
mcmMSMDNAStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 3, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMDNAStatusPrefixNumber"))
if mibBuilder.loadTexts: mcmMSMDNAStatusEntry.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDNAStatusEntry.setDescription('NAME = ; DESC = An entry in the associated DNA status \\ table.; HELP = ; CAPABILITIES = ;')
mcmMSMDNAStatusPrefixNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 3, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 31))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDNAStatusPrefixNumber.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDNAStatusPrefixNumber.setDescription('NAME = ; DESC = DNA Prefix number of the attached device \\ (directly or cascaded).; HELP = ; CAPABILITIES = ;')
mcmMSMDNAStatusIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDNAStatusIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDNAStatusIfIndex.setDescription('NAME = ; DESC = Ifindex value showing the appropriate DCE \\ PANL interface corresponding to the specified \\ DNA prefix.; HELP = ; CAPABILITIES = ;')
mcmMSMDNAStatusAssociation = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 4, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("associatedDNA", 1), ("deassociatedDNA", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMDNAStatusAssociation.setStatus('mandatory')
if mibBuilder.loadTexts: mcmMSMDNAStatusAssociation.setDescription('NAME = ; DESC = DNA Association Status corresponding to \\ the specified DNA Prefix.; HELP = ; CAPABILITIES = ;')
mcmMSMLAPFConnectionsCntrTable = MibTable((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 2, 1), )
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrTable.setStatus('obsolete')
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrTable.setDescription('NAME = ; DESC = This table was obsoleted as of release \\ 3.0; and is included here only to support \\ backwards compatibility for software \\ versions prior to release 3.0. \\ The MSM LAP-F connections counter reset table.; HELP = ; CAPABILITIES = NET_OPER, VPN_OPER;')
mcmMSMLAPFConnectionsCntrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 2, 1, 1), ).setIndexNames((0, "MICOM-MPANL-SIGNALING-MIB", "mcmMSMLAPFConnectionsCntrIndex"))
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrEntry.setStatus('obsolete')
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrEntry.setDescription('NAME = ; DESC = An entry in the MSM LAP-F connections counter \\ reset table.; HELP = ; CAPABILITIES = ;')
mcmMSMLAPFConnectionsCntrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrIndex.setStatus('obsolete')
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrIndex.setDescription('NAME = ; DESC = The value of this object corresponds to a value \\ of IfIndex, and is used as the index into the \\ MSM LAP-F connections counter reset table. \\ This index specifies the particular row of counter \\ objects to be reset, and has the identical value as \\ mcmMSMStatsLAPFConnIfIndex.; HELP = ; CAPABILITIES = ;')
mcmMSMLAPFConnectionsCntrAction = MibTableColumn((1, 3, 6, 1, 4, 1, 335, 1, 4, 19, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("reset", 1)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrAction.setStatus('obsolete')
if mibBuilder.loadTexts: mcmMSMLAPFConnectionsCntrAction.setDescription('NAME = ; DESC = Setting this object to reset(1) will zero \\ the values of a specific row of counters in \\ the MPANL Signaling Statistics for LAP-F \\ connections table.; HELP = When set to reset(1), a row containing \\ the following counter objects are reset: \\ mcmMSMStatsLAPFConnReestablished \\ mcmMSMStatsLAPFConnEstablished \\ mcmMSMStatsLAPFConnDisconnects; CAPABILITIES = ;')
mcmMSMProfileReceivedFromPassport = NotificationType((1, 3, 6, 1, 4, 1, 335, 1, 4, 19) + (0,1)).setObjects(("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMLinkStatusIfIndex"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMDTELinkStatsRemoteCompName"))
if mibBuilder.loadTexts: mcmMSMProfileReceivedFromPassport.setDescription('NAME = ; DESC = PANL Signaling Protocol established. \\ Severity Level: INFORMATIVE.; HELP = Profile is received from Passport. This is a Normal\\ condition.;')
mcmMpanlInterfaceLinkUp = NotificationType((1, 3, 6, 1, 4, 1, 335, 1, 4, 19) + (0,2)).setObjects(("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMLinkStatusIfIndex"))
if mibBuilder.loadTexts: mcmMpanlInterfaceLinkUp.setDescription('NAME = ; DESC = This trap will be generated when the LAPF link \\ connection for the PANL signaling comes up. \\ Severity Level: INFORMATIVE.; HELP = The MSM LAPF link connection Is up. Normal MSM LAPF \\ Link establishment.;')
mcmMpanlInterfaceLinkDown = NotificationType((1, 3, 6, 1, 4, 1, 335, 1, 4, 19) + (0,3)).setObjects(("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMLinkStatusIfIndex"))
if mibBuilder.loadTexts: mcmMpanlInterfaceLinkDown.setDescription("NAME = ; DESC = This trap will be generated when the LAPF link \\ connection for the PANL signaling goes down. \\ Severity Level: CRITICAL.; HELP = Please check device configuration, and connection.\\ If problem persist, You may need field \\ personnel's assistance.;")
mcmMpanlPrefixDNAhasNotBeenConfigured = NotificationType((1, 3, 6, 1, 4, 1, 335, 1, 4, 19) + (0,4)).setObjects(("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay"))
if mibBuilder.loadTexts: mcmMpanlPrefixDNAhasNotBeenConfigured.setDescription('NAME = ; DESC = This trap will be generated from the MPANL \\ signaling module. It will trigger if the \\ DNA has not been configured. \\ Severity Level: CRITICAL.; HELP = Please provision(configure) the DNA address \\ of the access device. This may imply the DNA address \\ is missing.;')
mcmMpanlPrefixDNAChangedWithoutDeassociation = NotificationType((1, 3, 6, 1, 4, 1, 335, 1, 4, 19) + (0,5)).setObjects(("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMDNAStatusPrefixNumber"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMDNAStatusIfIndex"))
if mibBuilder.loadTexts: mcmMpanlPrefixDNAChangedWithoutDeassociation.setDescription('NAME = ; DESC = This trap will be generated from the PANL \\ signaling module. It will trigger if the \\ latest association will displace the earlier \\ assoiciation with a different netlink. \\ Severity Level: INFORMATIVE.; HELP = Please verify the DNA association number.;')
mcmMpanlIncompatibleType = NotificationType((1, 3, 6, 1, 4, 1, 335, 1, 4, 19) + (0,6)).setObjects(("MICOM-SYS-MIB", "mcmSysAsciiTimeOfDay"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMLinkStatusIfIndex"), ("MICOM-MPANL-SIGNALING-MIB", "mcmMSMLinkStatusRemoteGenCfgType"))
if mibBuilder.loadTexts: mcmMpanlIncompatibleType.setDescription('NAME = ; DESC = This trap will be generated from the PANL \\ signaling module. It will trigger if a micom \\ access device tries to attempt to communicate \\ with a nortel access device, which is prohibited. \\ Severity Level: CRITICAL.; HELP = It is noted that micom access device does not \\ communicate with the nortel access device.;')
mibBuilder.exportSymbols("MICOM-MPANL-SIGNALING-MIB", mcmMSMProfileCfgGroup=mcmMSMProfileCfgGroup, statistics=statistics, mcmMSMDCELinkStatusRemoteCustId=mcmMSMDCELinkStatusRemoteCustId, mcmMSMStatsDnaDeassociationCnt=mcmMSMStatsDnaDeassociationCnt, mcmMSMDTELinkCfgDTEMaxFrameSize=mcmMSMDTELinkCfgDTEMaxFrameSize, mcmMSMLinkStatusLocalCompName=mcmMSMLinkStatusLocalCompName, mcmMSMDCELinkStatusTable=mcmMSMDCELinkStatusTable, mcmMSMDTELinkCfgDCEReceiverBW=mcmMSMDTELinkCfgDCEReceiverBW, mcmMpanlIncompatibleType=mcmMpanlIncompatibleType, mcmMSMDTELinkCfgEntry=mcmMSMDTELinkCfgEntry, mcmMSMProfileCfgGroupCustomerID=mcmMSMProfileCfgGroupCustomerID, mcmMSMStatsPANLInfoElementsRxCnt=mcmMSMStatsPANLInfoElementsRxCnt, mcmMpanlInterfaceLinkUp=mcmMpanlInterfaceLinkUp, nvmMSMDTELinkCfgTable=nvmMSMDTELinkCfgTable, mcmMpanlInterfaceLinkDown=mcmMpanlInterfaceLinkDown, mcmMSMDTELinkStatsRemoteCompName=mcmMSMDTELinkStatsRemoteCompName, nvmMSMProfileCfgGroupDNAPrefix=nvmMSMProfileCfgGroupDNAPrefix, mcmMSMLAPFConnectionsCntrTable=mcmMSMLAPFConnectionsCntrTable, mcmMSMDCELinkStatusIfIndex=mcmMSMDCELinkStatusIfIndex, mcmMSMLinkStatusEntry=mcmMSMLinkStatusEntry, mcmMSMStatsLAPFConnEntry=mcmMSMStatsLAPFConnEntry, mcmMSMLinkStatusInterfaceType=mcmMSMLinkStatusInterfaceType, mcmMSMDNAStatusIfIndex=mcmMSMDNAStatusIfIndex, mcmMSMProfileCfgGroupNodeID=mcmMSMProfileCfgGroupNodeID, mcmMSMLinkStatusLAPFStatus=mcmMSMLinkStatusLAPFStatus, nvmMSMProfileCfgGroupCustomerID=nvmMSMProfileCfgGroupCustomerID, mcmMSMDTELinkCfgTable=mcmMSMDTELinkCfgTable, mcmMSMDCELinkStatusEntry=mcmMSMDCELinkStatusEntry, mcmMSMLinkStatusTable=mcmMSMLinkStatusTable, mcmMSMProfileReceivedFromPassport=mcmMSMProfileReceivedFromPassport, control=control, mcmMSMDNAStatusEntry=mcmMSMDNAStatusEntry, mcmMSMLinkStatusRemoteGenCfgType=mcmMSMLinkStatusRemoteGenCfgType, mcmMSMLinkStatusIfIndex=mcmMSMLinkStatusIfIndex, mcmMSMStatsLAPFConnReestablished=mcmMSMStatsLAPFConnReestablished, mcmMSMProfileCfgGroupDNAPrefix=mcmMSMProfileCfgGroupDNAPrefix, mcmMSMDCELinkStatusRemoteDLCIRange=mcmMSMDCELinkStatusRemoteDLCIRange, mcmMpanlPrefixDNAChangedWithoutDeassociation=mcmMpanlPrefixDNAChangedWithoutDeassociation, mcmMSMDTELinkStatsStatus=mcmMSMDTELinkStatsStatus, mcmMSMDTELinkStatsTable=mcmMSMDTELinkStatsTable, mcmMSMDNAStatusAssociation=mcmMSMDNAStatusAssociation, mcmMSMLAPFConnectionsCntrIndex=mcmMSMLAPFConnectionsCntrIndex, mcmMSMDTELinkCfgIfIndex=mcmMSMDTELinkCfgIfIndex, nvmMSMDTELinkCfgMaxSubChannelRange=nvmMSMDTELinkCfgMaxSubChannelRange, mcmMSMDNAStatusTable=mcmMSMDNAStatusTable, mcmMSMDTELinkStatsLocalCompName=mcmMSMDTELinkStatsLocalCompName, mcmMSMStatsLAPFConnDisconnects=mcmMSMStatsLAPFConnDisconnects, mcmMSMStatsProfileTxCnt=mcmMSMStatsProfileTxCnt, mcmMSMDTELinkCfgDTEReceiverBW=mcmMSMDTELinkCfgDTEReceiverBW, status=status, nvmMSMDTELinkCfgDCEReceiverBW=nvmMSMDTELinkCfgDCEReceiverBW, mcmMSMStatsLAPFConnEstablished=mcmMSMStatsLAPFConnEstablished, mcmMSMDTELinkCfgDCEMaxFrameSize=mcmMSMDTELinkCfgDCEMaxFrameSize, mcmMSMDCELinkStatusRemoteNodeId=mcmMSMDCELinkStatusRemoteNodeId, mcmMSMStatsLAPFConnTable=mcmMSMStatsLAPFConnTable, mcmMSMStatsRestartReqTxCnt=mcmMSMStatsRestartReqTxCnt, mcmMSMDNAStatusPrefixNumber=mcmMSMDNAStatusPrefixNumber, mcmMSMDCELinkStatusRemoteRxBw=mcmMSMDCELinkStatusRemoteRxBw, mcmMSMLinkStatusPANLStatus=mcmMSMLinkStatusPANLStatus, nvmMSMDTELinkCfgDCEMaxFrameSize=nvmMSMDTELinkCfgDCEMaxFrameSize, mcmMSMStatsLAPFConnIfIndex=mcmMSMStatsLAPFConnIfIndex, nvmMSMProfileCfgGroupNodeID=nvmMSMProfileCfgGroupNodeID, nvmMSMProfileCfgGroup=nvmMSMProfileCfgGroup, nvmMSMDTELinkCfgIfIndex=nvmMSMDTELinkCfgIfIndex, mcmMSMStatsDnaAssociationCnt=mcmMSMStatsDnaAssociationCnt, nvmMSMDTELinkCfgEntryStatus=nvmMSMDTELinkCfgEntryStatus, mcmMSMDCELinkStatusRemoteMaxFrameSize=mcmMSMDCELinkStatusRemoteMaxFrameSize, mcmMSMLAPFConnectionsCntrAction=mcmMSMLAPFConnectionsCntrAction, mcmMSMLinkStatusRemoteCompName=mcmMSMLinkStatusRemoteCompName, mcmMSMDTELinkStatsIfIndex=mcmMSMDTELinkStatsIfIndex, micom_msm=micom_msm, mcmMSMStatsPANLInfoElementsTxCnt=mcmMSMStatsPANLInfoElementsTxCnt, mcmMSMDTELinkStatsEntry=mcmMSMDTELinkStatsEntry, mcmMSMDTELinkCfgMaxSubChannelRange=mcmMSMDTELinkCfgMaxSubChannelRange, nvmMSMDTELinkCfgDTEReceiverBW=nvmMSMDTELinkCfgDTEReceiverBW, mcmMpanlPrefixDNAhasNotBeenConfigured=mcmMpanlPrefixDNAhasNotBeenConfigured, mcmMSMStatsRestartReqRxCnt=mcmMSMStatsRestartReqRxCnt, mcmMSMLAPFConnectionsCntrEntry=mcmMSMLAPFConnectionsCntrEntry, nvmMSMDTELinkCfgEntry=nvmMSMDTELinkCfgEntry, mcmMSMStatsProfileRxCnt=mcmMSMStatsProfileRxCnt, nvmMSMDTELinkCfgDTEMaxFrameSize=nvmMSMDTELinkCfgDTEMaxFrameSize, configuration=configuration)
|
import re
from operator import itemgetter
from collections import namedtuple, defaultdict
from .dumper_interface import Dumper as BaseDumper
class Dumper(BaseDumper):
__ignore_list__ = 'migrations'
column_record = namedtuple('Record', ['name', 'ttype', 'precision',
'nullable', 'default'])
foreign_key_record = namedtuple('Record', ['to_table', 'column',
'ref_key', 'name',
'on_update', 'on_delete'])
mapping = {
'BIGINT': 'big_integer',
'BLOB': 'binary',
'BOOLEAN': 'boolean',
'CHAR': 'char',
'DATE': 'date',
'DATETIME': 'datetime',
'DECIMAL': 'decimal',
'DOUBLE': 'double',
'ENUM': 'enum',
'FLOAT': 'float',
'INTEGER': 'integer',
'JSON': 'json',
'LONGTEXT': 'long_text',
'MEDIUMINT': 'medium_int',
'MEDIUMTEXT': 'medium_text',
'SMALLINT': 'small_int',
'TEXT': 'text',
'TIME': 'time',
'TINYINT': 'tiny_int',
'TIMESTAMP WITHOUT TIME ZONE': 'timestamp',
'CHARACTER VARYING': 'string',
'BIGSERIAL': 'big_increments',
'SERIAL': 'increments'
}
def handle_column(self, columns):
statements = []
for column in columns:
column_buffer = []
name = column.name
ttype = self.mapping[column.ttype.upper()]
precision = column.precision
nullable = column.nullable
default = column.default
# handle auto increments
pk = False
v = re.match(r"nextval\('(\w+)'::regclass\)", default or '')
if v and v.groups()[0].endswith(name + '_seq'):
pk = True
ttype = 'increments'
# dump to orator schema syntax
if not pk and precision:
column_buffer.append(
'table.{ttype}({name}, {precision})'.format(
ttype=ttype, name=repr(name),
precision=repr(precision)))
else:
column_buffer.append('table.{ttype}({name})'.format(
ttype=ttype, name=repr(name)))
if nullable != 'NO':
column_buffer.append('.nullable()')
if default is not None:
flag = True
# ignore timestamp type default value CURRENT_TIMESTAMP(6)
if ttype == 'timestamp' and 'timestamp' in default:
flag = False
if re.match(r'\w+(\w+)', default):
flag = False
if flag:
default = default.split(':')[0].strip("''")
if default.isdigit():
default = int(default)
elif re.match("^\d+?\.\d+?$", default) is not None:
default = float(default)
else:
default = "'{}'".format(default)
column_buffer.append('.default({})'.format(default))
statements.append(''.join(column_buffer))
return statements
def handle_index(self, indexes):
statements = []
for name, index in sorted(indexes.items(), key=itemgetter(0)):
ttype = index['ttype']
if ttype == 'primary':
name = None
statements.append(
'table.{}({}, name={})'.format(ttype, repr(index['columns']),
repr(name)))
return statements
def handle_foreign_key(self, foreign_keys):
statements = []
for foreign_key in foreign_keys:
local_key = foreign_key.column
ref_key = foreign_key.ref_key
to_table = foreign_key.to_table
on_update = foreign_key.on_update
on_delete = foreign_key.on_delete
statement = 'table.foreign({}).references({}).on({})'.format(
repr(local_key), repr(ref_key), repr(to_table))
if on_update == 'a':
statement += ".on_update('cascadea')"
if on_delete == 'cascadea':
statement += ".on_delete('cascadea')"
statements.append(statement)
return statements
def list_tables(self):
"""list all table_names from specified database
rtype [str]
"""
sql = self._grammar._list_tables()
result = self._conn.select(sql)
return filter(
lambda table_name: table_name not in self.__ignore_list__,
map(itemgetter(0), result))
def list_columns(self, table_name):
"""list column in table
rtype [namedtuple]"""
sql = self._grammar._list_columns(table_name)
result = self._conn.select(sql)
return [self.column_record(*r) for r in result]
def list_indexes(self, table_name):
"""list index in table"""
sql = self._grammar._list_indexes(table_name)
result = self._conn.select(sql)
indexes = defaultdict(lambda: {'columns': [], 'ttype': 'index'})
for r in result:
index = indexes[r[0]]
columns = self._conn.select(self._grammar._show_index(r[0]))
index['columns'].extend(zip(*columns))
if r[1].startswith('CREATE UNIQUE'):
index['ttype'] = 'unique'
if r[0].endswith('pkey'):
index['ttype'] = 'primary'
return indexes
def list_foreign_keys(self, table_name):
"""list foreign key from specified table"""
sql = self._grammar._list_foreign_keys(table_name)
result = self._conn.select(sql)
return [self.foreign_key_record(*r) for r in result]
|
from django.db import models
# Create your models here.
from django.urls import reverse
class Tournament(models.Model):
"""
Model representing a Tournament, a set of tournament rounds
"""
name = models.CharField(max_length=200, verbose_name='Name', help_text='Enter the name of the tournament (e.g. John Doe Memorial)')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name
class TournamentRound(models.Model):
"""
Every tournament page in the view, name is on the tab
name, format and date_scheduled should be the uniqueness
"""
name = models.CharField(max_length=200, verbose_name='Name', help_text='Enter the name of this round of the tournament (Default is Round #)')
scheduled_date = models.DateField(verbose_name='Date Scheduled', null=True, blank=True, help_text='Date this round of the tournament was supposed to be played')
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as username and password used to login to your clubs player data store (used by your plugin)')
format_plugin = models.ForeignKey('FormatPlugin', verbose_name='Format', on_delete=models.SET_NULL, null=True, blank=True, help_text='Select the scoring format for this round')
tournament = models.ForeignKey('Tournament', verbose_name='Tournament Played', on_delete=models.SET_NULL, null=True, blank=True, help_text='Select the tournament')
courses = models.ManyToManyField('Course', verbose_name='Courses', blank=True, help_text='Select the courses players are playing and set the default for the card')
course_tees = models.ManyToManyField('CourseTee', verbose_name='Course and tee', blank=True, help_text='Select the courses and tees players are playing')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name+' - '+self.format_plugin.name+' - '+self.scheduled_date.strftime('%m/%d/%Y')
class Round(models.Model):
"""
Model representing a Round. This with scores creates a single page of the tournament view
Each player has a round in a tournament, a little ambigous, it'll make sense some day
Each round has many scores
"""
handicap_index = models.DecimalField(max_digits=3, verbose_name='Handicap Index', decimal_places=1, help_text='Enter the players handicap index at time of tournament')
course_handicap = models.IntegerField(verbose_name='Course Handicap', help_text='Enter the course handicap at time of tournament')
total_out = models.IntegerField(verbose_name='OUT', null=True, blank=True, help_text='Enter the score of the front 9 holes')
total_out_style = models.CharField(max_length=200, verbose_name='Style for total out gross style', null=True, blank=True, help_text='Enter the background-color for the cell in gross view')
total_out_net = models.IntegerField(verbose_name='Front 9 Net', null=True, blank=True, help_text='Enter the net score for the front nine')
total_out_net_style = models.CharField(max_length=200, verbose_name='Style for total out net style', null=True, blank=True, help_text='Enter the background-color for the cell in net view')
total_in = models.IntegerField(verbose_name='IN', null=True, blank=True, help_text='Enter the score of the back 9 holes')
total_in_style = models.CharField(max_length=200, verbose_name='Style for total in gross style', null=True, blank=True, help_text='Enter the background-color for the cell in gross view')
total_in_net = models.IntegerField(verbose_name='Back 9 Net', null=True, blank=True, help_text='Enter the net score for the back 9')
total_in_net_style = models.CharField(max_length=200, verbose_name='Style for total in net style', null=True, blank=True, help_text='Enter the background-color for the cell in net view')
total = models.IntegerField(verbose_name='Total', null=True, blank=True, help_text='Enter the total score for the round')
total_style = models.CharField(max_length=200, verbose_name='Style for total gross style', null=True, blank=True, help_text='Enter the background-color for the cell in gross view')
net = models.IntegerField(verbose_name='Course Handicap', null=True, blank=True, help_text='Enter the net score for the round')
net_style = models.CharField(max_length=200, verbose_name='Style for total net style', null=True, blank=True, help_text='Enter the background-color for the cell in net view')
player = models.ForeignKey('Player', verbose_name='Player Id')
tournament_round = models.ForeignKey('TournamentRound', verbose_name='Tournament Round', on_delete=models.SET_NULL, null=True, blank=True, help_text='map to tournament round')
scorecard = models.ForeignKey('Scorecard', verbose_name='Scorecard')
course_tee = models.ForeignKey('CourseTee', verbose_name='Course and Tee', null=True, blank=True, help_text='Course and Tee This Round was Played on')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.player.name+' - '+self.tournament_round.tournament.name+' - '+self.tournament_round.name+' - '+self.tournament_round.scheduled_date.strftime('%m/%d/%Y')
class Scorecard(models.Model):
"""
Model representing a Scorecard is a ForeignKey to rounds
"""
start_time = models.DateTimeField(verbose_name='Date Started', null=True, blank=True, help_text='Select the date this round was started')
finish_time = models.DateTimeField(verbose_name='Date Finished', null=True, blank=True, help_text='Select the date this round was finished')
external_scorer = models.CharField(max_length=200, verbose_name='External Scorer Name', null=True, blank=True, help_text='Enter the name of the scorer if it is not a player')
external_attest = models.CharField(max_length=200, verbose_name='External Attest Name', null=True, blank=True, help_text='Enter the name of the attest if it is not a player')
scorer = models.ForeignKey('Player', related_name='player_scorer', verbose_name='Scorer Player', null=True, blank=True, help_text='Enter the player that kept score')
attest = models.ForeignKey('Player', related_name='player_attest', verbose_name='Attest Player', null=True, blank=True, help_text='Enter the player that attests the score')
course = models.ForeignKey('Course', related_name='course_played', verbose_name='Course Played', null=True, blank=True, help_text='Enter the course for this scorecard')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
try:
roundStr = ''
rounds = Round.objects.filter(scorecard=self.id)
for r in rounds:
roundStr = roundStr+r.player.name+' '
except:
roundStr = ''
return 'Date: '+self.start_time.strftime('%m/%d/%Y')+', Tee Time:'+self.start_time.strftime('%H:%M')+', '+roundStr
class Score(models.Model):
"""
Model representing a single raw score. These values will be altered in a tournament
when calibrated with a format, course, handicap, tee handicap
"""
score = models.IntegerField(verbose_name='Score', help_text='Enter the score for the hole')
score_style = models.CharField(max_length=200, verbose_name='Style for gross score', null=True, blank=True, help_text='Enter the background-color for the cell in gross view')
score_net = models.IntegerField(verbose_name='Score Net', null=True, blank=True, help_text='Enter the net score for the hole')
score_net_style = models.CharField(max_length=200, verbose_name='Style for net score', null=True, blank=True, help_text='Enter the background-color for the cell in net view')
hole_played = models.IntegerField(verbose_name='Hole Played', null=True, blank=True, help_text='Enter the hole number played (e.g. in shotgun start if this is hole 16, but the second hole played enter 2)')
skin_net = models.IntegerField(verbose_name='Skin', default=False, help_text='Is this score evaluated as a skin? This changes with each scorecard submitted (1:yes, 0:no)')
skin = models.IntegerField(verbose_name='Skin', default=False, help_text='Is this score evaluated as a skin? This changes with each scorecard submitted (1:yes, 0:no)')
tee = models.ForeignKey('Tee', verbose_name='Hole and Tee Id')
round = models.ForeignKey('Round', null=True, blank=True, verbose_name='Round for this score')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return 'Date: '+self.round.scorecard.start_time.strftime('%m/%d/%Y')+', TeeTime: '+self.round.scorecard.start_time.strftime('%H:%M')+'Hole: '+self.tee.hole.course.name + ' #' + str(self.tee.hole.number) + ' "' + str(self.score) + '" - ' + self.round.player.name
class Course(models.Model):
"""
Model representing a course this is the sum of all course tees
"""
name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the name of the Course (e.g. Callaway Gardens)')
priority = models.IntegerField(verbose_name='Priority', default=-1, help_text='Lowest number greater than 0 will be listed first in selecting format')
default = models.BooleanField(verbose_name='Default', default=False, help_text='Set a default for faster starts to putting scores in')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name
class CourseTee(models.Model):
"""
Model representing a single Tee for a course
"""
name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the name of the Course and Tee box (e.g. Callaway Gardens - White)')
priority = models.IntegerField(verbose_name='Priority', default=-1, help_text='Highest priority will be listed first in selecting format')
default = models.BooleanField(verbose_name='Default', default=False, help_text='Set a default for faster starts to putting scores in')
slope = models.IntegerField(verbose_name='Slope', help_text='Enter the slope for this course and tee')
color = models.CharField(max_length=200, verbose_name='Tee Color', help_text='Enter the number associated with the tee color')
course = models.ForeignKey('Course', default=113, verbose_name='Course Id')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.course.name+' - '+self.name
class Tee(models.Model):
"""
Model representing a single Tee for a single hole for a course
"""
yardage = models.IntegerField(verbose_name='Yardage', help_text='Enter the yardage for the tee')
par = models.PositiveSmallIntegerField(verbose_name='Par', help_text='Enter the par for this tee')
handicap = models.PositiveSmallIntegerField(verbose_name='Handicap', help_text='Enter the handicap for this tee')
hole = models.ForeignKey('Hole', verbose_name='Hole Id', null=True, on_delete=models.SET_NULL)
course_tee = models.ForeignKey('CourseTee', verbose_name='Course Tee Id')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.course_tee.course.name+' - '+self.course_tee.name+' #'+str(self.hole.number)
class Hole(models.Model):
"""
Model representing a single hole for a course
"""
name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the name of the hole')
number = models.IntegerField(help_text='Enter the number of the hole')
course = models.ForeignKey('Course', on_delete=models.CASCADE, verbose_name='Course this hole belongs to')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.course.name+' #'+str(self.number)+': '+self.name
class Club(models.Model):
"""
Model representing the club probably a configuration item because there can only be 1
"""
name = models.CharField(max_length=200, help_text='Enter the name of the club')
logo = models.ImageField(max_length=200, null=True, blank=True, help_text='logo')
default_tournament_name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter default prefix for a tournament name')
web_site = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the web site for the club')
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as username and password used to login to your clubs player data store (used by your plugin)')
players_last_updated = models.DateTimeField(null=True, blank=True, help_text='Enter the date the players handicaps were last updated')
player_plugin = models.ForeignKey('PlayerPlugin', null=True, blank=True, verbose_name='Player Plugin Id')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name
class Player(models.Model):
"""
Model representing a Player
"""
club_member_number = models.IntegerField(verbose_name='Club Member Number', default=-1, help_text='Enter the players club member number (GHIN number)')
name = models.CharField(max_length=200, help_text='Enter the name of the player')
handicap_index = models.DecimalField(max_digits=3, verbose_name='Current Handicap Index', decimal_places=1, help_text='Enter the handicap index')
high_handicap_index = models.DecimalField(max_digits=3, verbose_name='High Handicap Index', decimal_places=1, help_text='Enter the high handicap index')
low_handicap_index = models.DecimalField(max_digits=3, verbose_name='Low Handicap Index', decimal_places=1, help_text='Enter the low handicap index')
last_updated = models.DateTimeField(verbose_name='Last Updated', null=True, blank=True, help_text='Last time the player plugin was used to get this player')
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as address, phone number, age')
priority = models.IntegerField(verbose_name='Priority', default=-1, help_text='Highest priority will be listed first in selecting format')
club = models.ForeignKey('Club', null=True, blank=True, verbose_name='Club')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return str(self.club_member_number)+': '+self.name
class Activity(models.Model):
"""
Model representing recent activity from the app
"""
title = models.CharField(max_length=40, null=True, blank=True, help_text='Enter the title for this activity')
details = models.CharField(max_length=500, null=True, blank=True, help_text='Enter the details for this activity')
suspicious = models.BooleanField(verbose_name='Suspicious', default=False, help_text='Is this suspicious activity')
notes = models.CharField(max_length=500, null=True, blank=True, help_text='Enter the notes for this activity')
user = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the user for this activity')
date_time = models.DateTimeField(verbose_name='Date', null=True, blank=True, help_text='Enter the date for this activity')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.title
class RoundImportPlugin(models.Model):
"""
Model representing a Round Importer
The round importer is used to import files uploaded by a user to the database
"""
name = models.CharField(max_length=200, help_text='Enter the name of the format')
description = models.CharField(max_length=512, null=True, blank=True, help_text='Enter the description of the round importer')
version = models.IntegerField(verbose_name="Version", default=1, null=True, blank=True, help_text="Incrementing version number to keep names unique")
class_archive = models.FileField(upload_to="uploads/roundimportplugin", null=True, help_text="The file uploaded by the user")
class_archive_name = models.CharField(max_length=200, null=True, blank=True, help_text="The name of the file uploaded by the user")
class_module = models.FileField(upload_to="roundimportplugins", null=True, help_text='The class module file')
class_module_name = models.CharField(max_length=200, null=True, blank=True, help_text='The name of the class module provided by the user')
class_name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the name of the class with the module')
priority = models.IntegerField(verbose_name='Priority', default=-1, help_text='Highest priority will be listed first in selecting importer')
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as username and password used to login to your clubs player data store (used by your plugin)')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name+' v'+self.version+' '+self.class_name
class PlayerPlugin(models.Model):
"""
Model representing the plugins that can communicate with external player stores
"""
name = models.CharField(verbose_name="Name", max_length=200, null=True, blank=True, help_text='Enter the name of the plugin')
description = models.CharField(max_length=512, null=True, blank=True, help_text='Enter the description of the format')
version = models.IntegerField(verbose_name="Version", default=1, null=True, blank=True, help_text="Incrementing version number to keep names unique")
class_archive = models.FileField(upload_to="uploads/playerplugin", null=True, help_text="The file uploaded by the user")
class_archive_name = models.CharField(max_length=200, null=True, blank=True, help_text="The name of the file uploaded by the user")
class_module = models.FileField(upload_to="playerplugins", null=True, help_text='The class module file')
class_module_name = models.CharField(max_length=200, null=True, blank=True, help_text='The name of the class module provided by the user')
class_name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the name of the class within the module')
priority = models.IntegerField(verbose_name='Priority', default=-1, help_text='Highest priority will be listed first in selecting format')
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as username and password used to login to your clubs player data store (used by your plugin)')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name
#return self.name+' v'+str(self.version)+' '+self.class_name
class FormatPlugin(models.Model):
"""
Model representing a Tournament Format
The tournament format is used to calculate all scores and cell styles for gross and net
Currently also used for payout, but probably becomes a seperate plugin
"""
name = models.CharField(max_length=200, help_text='Enter the name of the format')
description = models.CharField(max_length=512, null=True, blank=True, help_text='Enter the description of the format')
version = models.IntegerField(verbose_name="Version", default=1, null=True, blank=True, help_text="Incrementing version number to keep names unique")
class_archive = models.FileField(upload_to="uploads/formatplugin", null=True, help_text="The file uploaded by the user")
class_archive_name = models.CharField(max_length=200, null=True, blank=True, help_text="The name of the file uploaded by the user")
class_module = models.FileField(upload_to="formatplugins", null=True, help_text='The class module file')
class_module_name = models.CharField(max_length=200, null=True, blank=True, help_text='The name of the class module provided by the user')
class_name = models.CharField(max_length=200, null=True, blank=True, help_text='Enter the name of the class with the module')
priority = models.IntegerField(verbose_name='Priority', default=-1, help_text='Highest priority will be listed first in selecting format')
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as username and password used to login to your clubs player data store (used by your plugin)')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name
#return self.name+' v'+str(self.version)+' '+self.class_name
class PayoutPlugin(models.Model):
"""
Model representing the plugins that will calculate payout for the overall tournament
Payout plugin gets a ton of data each time a scorecard is submitted
This plugin needs to return players that are paid (overall tournament and per round) for net scores, gross scores, skins, values for pins, number of drawings, others (magic holes, hole in one)
"""
name = models.CharField(max_length=200, help_text="Enter the name of the plugin")
version = models.IntegerField(verbose_name="Version", default=1, null=True, blank=True, help_text="Incrementing version number to keep names unique")
filename = models.CharField(verbose_name="Filename", max_length=200, null=True, blank=True, help_text='Name of the module (filename) containing the class of your plugin')
class_name = models.CharField(max_length=200, help_text="Enter the name of the class with the module")
data = models.CharField(max_length=516, null=True, blank=True, help_text='Data such as username and password used to login to your clubs player data store (used by your plugin)')
def __str__(self):
"""
String for representing the Model object (in Admin site etc.)
"""
return self.name+' v'+self.version+' '+self.class_name |
import json
from django.conf import settings
from azure.storage.queue import QueueService
import base64
def send_tweet(tweet_message, in_reply_to, entity):
queue_name = settings.AZURE_QUEUE_NAME
queue_service = QueueService(
account_name=settings.TWITTERBOT_STORAGE_ACCOUNT_NAME,
account_key=settings.TWITTERBOT_STORAGE_ACCOUNT_KEY
)
queue_service.create_queue(queue_name)
queue_message = {
'id': entity['id'],
'tweet': {
'status': tweet_message,
'in_reply_to_status_id': in_reply_to
},
'percentiles': entity['percentiles']
}
queue_service.put_message(
queue_name,
base64.b64encode(json.dumps(queue_message).encode('utf-8')).decode('utf-8')
)
|
"""
Game twenty-four point: practice kids' mental arithmetic capability
Rule:
two/four players, who show two/onecards simultanously,
try to make 3 arithmetic operations ( only four integer operaton+ - * / )
based on the four cards with number (1-10, J Q K are regarded as one) only once to get 24
who give the solution first, will win all four cards back. (no solution is also a result)
Finally, the player with most cards win!
Example:
4 7 A 5 -> 4*7-5+1=24
why 24 is chosen? 1*2*3*4=24, 24 has the most dividers!
this python script will try to search solution,
but total cards counts and final result(24 in this game) is parameterized,
it could extended to 5 cards to get 120, but f() need to be rewritten!
currently tested only with python 2.x, uisng 2to3 should make it work with python 3.x
Author: Qingfeng Xia
Lisense : BSD 4 clause
2012-12-10
"""
from __future__ import print_function
from itertools import combinations, product,permutations
N=4 # count of cards
M=24
op=['+','-','*','/'] #since the calculation is done by float , no need to use //
d=[float(i) for i in range(1,10)]
def f(tp,top,M,expr=False):
"""this function will evaluate one digits and arithmetic organization,
Valid only for N=4
"""
#e1='(({0[0]}{1[0]}{0[1]}){1[1]}{0[2]}){1[2]}{0[3]}=={2}'.format(tp,top,M) #not necessary for permutation
e2='({0[0]}{1[0]}({0[1]}{1[1]}{0[2]})){1[2]}{0[3]}=={2}'.format(tp,top,M)
#print e2
if expr:
r=[]
#if eval(e1): r.append(e1)
if eval(e2): r.append(e2)
return r
else:
#return eval(e1) or eval(e2)
return eval(e2)
def test_expr(td,M=24):
""" print possible solution for td: tuple of four card point
int division is hard to test for a%b==0, so change to float division
"""
l=[]
dlist=[float(d) for d in td]
for tp in permutations(dlist): #each card is used once
for top in product(op,repeat=N-1): #using product
l+= f(tp,top,M,True)
if len(l)==0:
print(td,'this number tuple can not make',M)
return []
else:
return l
def test():
top=('-','-','*')
tp=('3','4','5','6')
print(test_expr(tp,24))
print(f(tp,top,24))
#print product(op,repeat=N-1)
print('End of test')
def findnosolutiontuple():
for td in combinations(d,N):
l=[]
for tp in permutations(td):
l.append( any([f(tp,top,M) for top in product(op,repeat=N-1) ] ) )
if not any(l):
print('this number tuple can not make 24',td)
if __name__ == "__main__" :
test()
findnosolutiontuple()
|
"""Module contains objects to access BASIC DNA assembly parts from within the
pacakge."""
import basicsynbio as bsb
from Bio.Seq import Seq
from .main import PartLinkerCollection
from ..cam import seqrecord_hexdigest
STANDARD_LINKERS = {
"L1": "CTCGTTACTTACGACACTCCGAGACAGTCAGAGGGTATTTATTGAACTAGTCC",
"L2": "CTCGATCGGTGTGAAAAGTCAGTATCCAGTCGTGTAGTTCTTATTACCTGTCC",
"L3": "CTCGATCACGGCACTACACTCGTTGCTTTATCGGTATTGTTATTACAGAGTCC",
"L4": "CTCGAGAAGTAGTGCCACAGACAGTATTGCTTACGAGTTGATTTATCCTGTCC",
"L5": "CTCGGTATTGTAAAGCACGAAACCTACGATAAGAGTGTCAGTTCTCCTTGTCC",
"L6": "CTCGAACTTTTACGGGTGCCGACTCACTATTACAGACTTACTACAATCTGTCC",
"LMP": "CTCGGGTAAGAACTCGCACTTCGTGGAAACACTATTATCTGGTGGGTCTCTGTCC",
"LMS": "CTCGGGAGACCTATCGGTAATAACAGTCCAATCTGGTGTAACTTCGGAATCGTCC",
"LF1": "CTCGGGCTCGGGCTCCGAAAACTTGTACTTCCAGGGATCGGGCTCCGGGTCC",
"LF2": "CTCGGGCTCGGGCTCCCTGGAAGTTCTGTTTCAAGGTCCATCGGGCTCCGGGTCC",
"LF3": "CTCGGGCTCGGGCTCCGGATCTGGTTCAGGTTCAGGATCGGGCTCCGGGTCC",
"LF4": "CTCGGGCTCGGGCTCCGGATCAGGATCTGGTTCAGGTTCAGGATCGGGCTCCGGGTCC",
"LF5": "CTCGGGCTCGGGCTCCGGATCAGGATCTGGTTCAGGTTCAGGATCAGGATCGGGCTCCGGGTCC",
"LF6": "CTCGGCCGAAGCGGCTGCTAAAGAAGCAGCTGCTAAAGAGGCGGCCGCCAAGGCAGGGTCC",
}
UTR_RBS_LINKERS = {
"UTR1-RBS1": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCACACAGGACTAGTCC",
"UTR1-RBS2": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAAAAGAGGGGAAATAGTCC",
"UTR1-RBS3": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAAAAGAGGAGAAATAGTCC",
"UTR1-RBS-A01": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCTGGGGAGGTAGTCC",
"UTR1-RBS-A02": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCCGGGGAGGTAGTCC",
"UTR1-RBS-A03": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCTGAGGAGGTAGTCC",
"UTR1-RBS-A04": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCCAGGGAGGTAGTCC",
"UTR1-RBS-A05": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCCCGGGAGGTAGTCC",
"UTR1-RBS-A06": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCCGAGGAGGTAGTCC",
"UTR1-RBS-A07": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCTCAGGAGGTAGTCC",
"UTR1-RBS-A08": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCCAAGGAGGTAGTCC",
"UTR1-RBS-A09": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCTAGGGAGGTAGTCC",
"UTR1-RBS-A10": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCCCAGGAGGTAGTCC",
"UTR1-RBS-A11": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCTCGGGAGGTAGTCC",
"UTR1-RBS-A12": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCTAAGGAGGTAGTCC",
"UTR2-RBS1": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCACACAGGACTAGTCC",
"UTR2-RBS2": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAAAAGAGGGGAAATAGTCC",
"UTR2-RBS3": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAAAAGAGGAGAAATAGTCC",
"UTR2-RBS-A01": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCTGGGGAGGTAGTCC",
"UTR2-RBS-A02": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCCGGGGAGGTAGTCC",
"UTR2-RBS-A03": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCTGAGGAGGTAGTCC",
"UTR2-RBS-A04": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCCAGGGAGGTAGTCC",
"UTR2-RBS-A05": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCCCGGGAGGTAGTCC",
"UTR2-RBS-A06": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCCGAGGAGGTAGTCC",
"UTR2-RBS-A07": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCTCAGGAGGTAGTCC",
"UTR2-RBS-A08": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCCAAGGAGGTAGTCC",
"UTR2-RBS-A09": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCTAGGGAGGTAGTCC",
"UTR2-RBS-A10": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCCCAGGAGGTAGTCC",
"UTR2-RBS-A11": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCTCGGGAGGTAGTCC",
"UTR2-RBS-A12": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCTAAGGAGGTAGTCC",
"UTR3-RBS1": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCACACAGGACTAGTCC",
"UTR3-RBS2": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAAAAGAGGGGAAATAGTCC",
"UTR3-RBS3": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAAAAGAGGAGAAATAGTCC",
"UTR3-RBS-A01": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCTGGGGAGGTAGTCC",
"UTR3-RBS-A02": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCCGGGGAGGTAGTCC",
"UTR3-RBS-A03": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCTGAGGAGGTAGTCC",
"UTR3-RBS-A04": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCCAGGGAGGTAGTCC",
"UTR3-RBS-A05": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCCCGGGAGGTAGTCC",
"UTR3-RBS-A06": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCCGAGGAGGTAGTCC",
"UTR3-RBS-A07": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCTCAGGAGGTAGTCC",
"UTR3-RBS-A08": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCCAAGGAGGTAGTCC",
"UTR3-RBS-A09": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCTAGGGAGGTAGTCC",
"UTR3-RBS-A10": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCCCAGGAGGTAGTCC",
"UTR3-RBS-A11": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCTCGGGAGGTAGTCC",
"UTR3-RBS-A12": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCTAAGGAGGTAGTCC",
"UTR1-RBS-AM12": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCYVRGGAGGTAGTCC",
"UTR1-RBS-AM24": "CTCGTTGAACACCGTCTCAGGTAAGTATCAGTTGTAAATCYVRGGRGGTAGTCC",
"UTR2-RBS-AM12": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCYVRGGAGGTAGTCC",
"UTR2-RBS-AM24": "CTCGTGTTACTATTGGCTGAGATAAGGGTAGCAGAAAATCYVRGGRGGTAGTCC",
"UTR3-RBS-AM12": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCYVRGGAGGTAGTCC",
"UTR3-RBS-AM24": "CTCGGTATCTCGTGGTCTGACGGTAAAATCTATTGTAATCYVRGGRGGTAGTCC",
}
def _make_linker(
linker_class,
str_seq,
name,
description="visit https://www.biolegio.com/products-services/basic/ for further information.",
):
"""Create Linker
Args:
linker_class: object to parse linker into, e.g. 'bsb.BasicLinker',
'bsb.BasicUTRRBSLinker'.
str_seq: Sequence of Linker.
name: Generic name for linker.
description (str, optional): Description of Linker, Defaults to "visit
https://www.biolegio.com/products-services/basic/ for further
information.".
Returns:
object of type linker_class specified in Args.
"""
seq = Seq("GG" + str_seq)
linker = linker_class(seq, id=name, name=name, description=description)
linker.id = seqrecord_hexdigest(linker)
return linker
BIOLEGIO_LINKERS = {
key: _make_linker(bsb.BasicLinker, value, key)
for key, value in STANDARD_LINKERS.items()
}
BIOLEGIO_LINKERS.update(
**{
key: _make_linker(bsb.BasicUTRRBSLinker, value, key)
for key, value in UTR_RBS_LINKERS.items()
}
)
BASIC_BIOLEGIO_LINKERS = {"v0.1": PartLinkerCollection(BIOLEGIO_LINKERS.items())}
|
"""
test_clientless_reverse_http.py
Copyright 2006 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import commands
import socket
import tempfile
import unittest
import w3af.core.data.kb.config as cf
from w3af.core.controllers.payload_transfer.clientless_reverse_http import ClientlessReverseHTTP
from w3af.core.controllers.extrusion_scanning.extrusionScanner import extrusionScanner
from w3af.core.controllers.misc.temp_dir import create_temp_dir
from w3af.core.controllers.misc.get_unused_port import get_unused_port
from w3af.plugins.tests.helper import onlyroot
class TestClientlessReverseHTTP(unittest.TestCase):
def test_upload_file_mock(self):
exec_method = commands.getoutput
os = 'linux'
create_temp_dir()
cf.cf.save('interface', 'lo')
cf.cf.save('local_ip_address', '127.0.0.1')
inbound_port = get_unused_port()
echo_linux = ClientlessReverseHTTP(exec_method, os, inbound_port)
self.assertTrue(echo_linux.can_transfer())
file_len = 8195
file_content = 'A' * file_len
echo_linux.estimate_transfer_time(file_len)
temp_file_inst = tempfile.NamedTemporaryFile()
temp_fname = temp_file_inst.name
upload_success = echo_linux.transfer(file_content, temp_fname)
self.assertTrue(upload_success)
@onlyroot
def test_upload_file_root(self):
exec_method = commands.getoutput
os = 'linux'
create_temp_dir()
cf.cf.save('interface', 'lo')
cf.cf.save('local_ip_address', '127.0.0.1')
es = extrusionScanner(exec_method)
inbound_port = es.get_inbound_port()
echo_linux = ClientlessReverseHTTP(exec_method, os, inbound_port)
self.assertTrue(echo_linux.can_transfer())
file_len = 8195
file_content = 'A' * file_len
echo_linux.estimate_transfer_time(file_len)
temp_file_inst = tempfile.NamedTemporaryFile()
temp_fname = temp_file_inst.name
upload_success = echo_linux.transfer(file_content, temp_fname)
self.assertTrue(upload_success) |
Subsets and Splits