ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b4142a981cdd23d22694f291a4cc391f6de6ebf5 | #! /usr/bin/env python
import os
import subprocess
from dynamo import dataformat
from dynamo.core.executable import inventory
def shell():
subprocess.check_call(
"""
# This first line is only here so that it doesn't need reauthorized
# every time there's in edit
dynamo-exec-auth -u dynamo -x {script} --title teardown > /dev/null 2>&1
su -c 'dynamo -t teardown -W {script}' dynamo > /dev/null 2>&1
""".format(
script=__file__.replace('.pyc', '.py')),
shell=True)
def main(inv):
for attr in ['partitions', 'sites', 'groups', 'datasets']:
for key, obj in getattr(inv, attr).items():
if key is not None:
inv.delete(obj)
if __name__ == '__main__':
if os.getuid():
main(inventory)
# We get here if we're running the script as root user (happens in docker all the time)
else:
shell()
|
py | b4142b4dfbd46cee23660f247a76d850559460a4 | """ Python character mapping codec test
This uses the test codec in testcodec.py and thus also tests the
encodings package lookup scheme.
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright 2000 Guido van Rossum.
"""#"
import unittest
import codecs
# Register a search function which knows about our codec
def codec_search_function(encoding):
if encoding == 'testcodec':
from test import testcodec
return tuple(testcodec.getregentry())
return None
# test codec's name (see test/testcodec.py)
codecname = 'testcodec'
class CharmapCodecTest(unittest.TestCase):
def setUp(self):
codecs.register(codec_search_function)
self.addCleanup(codecs.unregister, codec_search_function)
def test_constructorx(self):
self.assertEqual(str(b'abc', codecname), 'abc')
self.assertEqual(str(b'xdef', codecname), 'abcdef')
self.assertEqual(str(b'defx', codecname), 'defabc')
self.assertEqual(str(b'dxf', codecname), 'dabcf')
self.assertEqual(str(b'dxfx', codecname), 'dabcfabc')
def test_encodex(self):
self.assertEqual('abc'.encode(codecname), b'abc')
self.assertEqual('xdef'.encode(codecname), b'abcdef')
self.assertEqual('defx'.encode(codecname), b'defabc')
self.assertEqual('dxf'.encode(codecname), b'dabcf')
self.assertEqual('dxfx'.encode(codecname), b'dabcfabc')
def test_constructory(self):
self.assertEqual(str(b'ydef', codecname), 'def')
self.assertEqual(str(b'defy', codecname), 'def')
self.assertEqual(str(b'dyf', codecname), 'df')
self.assertEqual(str(b'dyfy', codecname), 'df')
def test_maptoundefined(self):
self.assertRaises(UnicodeError, str, b'abc\001', codecname)
if __name__ == "__main__":
unittest.main()
|
py | b4142b609028e5e6c61100658764cb024524d806 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Main file for entry points"""
__author__ = "Justin Furuness"
__credits__ = ["Justin Furuness"]
__Lisence__ = "BSD"
__maintainer__ = "Justin Furuness"
__email__ = "[email protected]"
__status__ = "Development"
from .work_login import WorkLogin
def main():
WorkLogin().login()
def configure():
WorkLogin().configure()
|
py | b4142c5c0e81f7202370827af8069ab05c4a2d3c | from output.models.ms_data.datatypes.facets.decimal.decimal_min_exclusive003_xsd.decimal_min_exclusive003 import (
FooType,
Test,
)
__all__ = [
"FooType",
"Test",
]
|
py | b4142c5e5d9722fde642fb0dcdef70e3dce75aff | # -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
import logging
import sys
import json
from flask import Flask, render_template
from flask_cors import CORS
from chaos_genius import commands
from chaos_genius.logger import configure_logger
from chaos_genius.views import (
data_source_view,
kpi_view,
public_view,
meta_view,
anomaly_data_view,
config_setting_view,
alert_view,
dashboard_view,
status_view,
digest_view,
rca_view,
)
from chaos_genius.extensions import (
bcrypt,
cache,
# csrf_protect,
db,
flask_static_digest,
login_manager,
migrate,
integration_connector,
celery
)
from chaos_genius.settings import AIRBYTE_ENABLED
def create_app(config_object="chaos_genius.settings"):
"""Create application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split(".")[0])
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
register_shellcontext(app)
register_commands(app)
configure_logger(app)
CORS(app) # TODO: Remove the CORS in v1 release
return app
def register_extensions(app):
"""Register Flask extensions."""
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
# csrf_protect.init_app(app)
# login_manager.init_app(app)
migrate.init_app(app, db)
flask_static_digest.init_app(app)
if AIRBYTE_ENABLED:
integration_connector.init_app(app)
celery.init_app(app)
return None
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public_view.blueprint, url_prefix='/')
app.register_blueprint(config_setting_view.blueprint, url_prefix='/api/config')
# TODO: Rename the api endpoint to data source
app.register_blueprint(data_source_view.blueprint, url_prefix='/api/connection')
app.register_blueprint(kpi_view.blueprint, url_prefix='/api/kpi')
app.register_blueprint(anomaly_data_view.blueprint, url_prefix='/api/anomaly-data')
app.register_blueprint(alert_view.blueprint, url_prefix='/api/alert')
app.register_blueprint(dashboard_view.blueprint, url_prefix='/api/dashboard')
app.register_blueprint(status_view.blueprint, url_prefix='/api/status')
app.register_blueprint(meta_view.blueprint, url_prefix='/api/meta')
app.register_blueprint(digest_view.blueprint, url_prefix='/api/digest')
app.register_blueprint(rca_view.blueprint, url_prefix='/api/rca')
return None
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(e):
"""Render error template."""
response = e.get_response()
response.data = json.dumps({
"code": e.code,
"name": e.name,
"description": e.description,
})
response.content_type = "application/json"
return response
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {"db": db} # , "User": user.models.User}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.integration_connector)
app.cli.add_command(commands.run_anomaly)
app.cli.add_command(commands.run_rca)
app.cli.add_command(commands.run_alert)
app.cli.add_command(commands.reinstall_db)
app.cli.add_command(commands.insert_demo_data)
app.cli.add_command(commands.run_anomaly_rca_scheduler)
app.cli.add_command(commands.run_digest)
|
py | b4142ed444adcebed3459cfa7c8b82550fad0d61 | import RPi.GPIO as GPIO
import time, random, math, threading, datetime, locale, os, sys, Adafruit_DHT, urllib, yaml, paramiko, tweepy, requests, alsaaudio
from gtts import gTTS
from gpiozero import CPUTemperature
from time import strftime
from time import sleep
from threading import Thread
locale.setlocale(locale.LC_ALL, 'en_GB.UTF-8')
### GPIO and VARIABLES for input/outpus on the Raspberry Pi ###
# Relay
relay = 17
# Hygro
hygro = 23
hygro_Power = 24
# Led Diods
blue_one_pin = 27
blue_two_pin = 22
blue_three_pin = 5
green_one_pin = 6
green_two_pin = 26
red_one_pin = 25
red_two_pin = 16
blue_on_off_pin = 18
# GPIO Set mode to BCM instead of Board
GPIO.setmode(GPIO.BCM)
# GPIO input output
GPIO.setup(blue_one_pin, GPIO.OUT)
GPIO.setup(blue_two_pin, GPIO.OUT)
GPIO.setup(blue_three_pin, GPIO.OUT)
GPIO.setup(green_one_pin, GPIO.OUT)
GPIO.setup(green_two_pin, GPIO.OUT)
GPIO.setup(red_one_pin, GPIO.OUT)
GPIO.setup(red_two_pin, GPIO.OUT)
GPIO.setup(blue_on_off_pin, GPIO.OUT)
# Led PWM - Pulse width modulation - for pulsating lights
blue_one = GPIO.PWM(blue_one_pin, 100)
blue_two = GPIO.PWM(blue_two_pin, 100)
blue_three = GPIO.PWM(blue_three_pin, 100)
green_one = GPIO.PWM(green_one_pin, 100)
green_two = GPIO.PWM(green_two_pin, 100)
red_one = GPIO.PWM(red_one_pin, 100)
red_two = GPIO.PWM(red_two_pin, 100)
blue_on_off = GPIO.PWM(blue_on_off_pin, 100)
# Sets the diod to start at 0 - which means off
blue_one.start(0)
blue_two.start(0)
blue_three.start(0)
green_one.start(0)
green_two.start(0)
red_one.start(0)
red_two.start(0)
blue_on_off.start(0)
# Hygro reader setup
GPIO.setup(hygro, GPIO.IN)
GPIO.setup(hygro_Power, GPIO.OUT)
# Relay setup
GPIO.setup(relay, GPIO.OUT)
# Variables for logging
cpu = CPUTemperature() |
py | b414301f86f8889277ac98c666069f216b1b24cb | import os
import ipdb
import argparse
from tqdm import tqdm
import numpy as np
import torch
import torch.optim as optim
from torch.optim.lr_scheduler import MultiStepLR
import torch.nn.functional as F
from torch.autograd import grad
from torch.autograd import Variable
# Local imports
import data_loaders
from csv_logger import CSVLogger
from resnet import ResNet18
from wide_resnet import WideResNet
from unet import UNet
def experiment():
parser = argparse.ArgumentParser(description='CNN Hyperparameter Fine-tuning')
parser.add_argument('--dataset', default='cifar10', choices=['cifar10', 'cifar100'],
help='Choose a dataset')
parser.add_argument('--model', default='resnet18', choices=['resnet18', 'wideresnet'],
help='Choose a model')
parser.add_argument('--num_finetune_epochs', type=int, default=200,
help='Number of fine-tuning epochs')
parser.add_argument('--lr', type=float, default=0.1,
help='Learning rate')
parser.add_argument('--optimizer', type=str, default='sgdm',
help='Choose an optimizer')
parser.add_argument('--batch_size', type=int, default=128,
help='Mini-batch size')
parser.add_argument('--data_augmentation', action='store_true', default=True,
help='Whether to use data augmentation')
parser.add_argument('--wdecay', type=float, default=5e-4,
help='Amount of weight decay')
parser.add_argument('--load_checkpoint', type=str,
help='Path to pre-trained checkpoint to load and finetune')
parser.add_argument('--save_dir', type=str, default='finetuned_checkpoints',
help='Save directory for the fine-tuned checkpoint')
args = parser.parse_args()
args.load_checkpoint = '/h/lorraine/PycharmProjects/CG_IFT_test/baseline_checkpoints/cifar10_resnet18_sgdm_lr0.1_wd0.0005_aug0.pt'
if args.dataset == 'cifar10':
num_classes = 10
train_loader, val_loader, test_loader = data_loaders.load_cifar10(args.batch_size, val_split=True,
augmentation=args.data_augmentation)
elif args.dataset == 'cifar100':
num_classes = 100
train_loader, val_loader, test_loader = data_loaders.load_cifar100(args.batch_size, val_split=True,
augmentation=args.data_augmentation)
if args.model == 'resnet18':
cnn = ResNet18(num_classes=num_classes)
elif args.model == 'wideresnet':
cnn = WideResNet(depth=28, num_classes=num_classes, widen_factor=10, dropRate=0.3)
if not os.path.exists(args.save_dir):
os.makedirs(args.save_dir)
test_id = '{}_{}_{}_lr{}_wd{}_aug{}'.format(args.dataset, args.model, args.optimizer, args.lr, args.wdecay,
int(args.data_augmentation))
filename = os.path.join(args.save_dir, test_id + '.csv')
csv_logger = CSVLogger(
fieldnames=['epoch', 'train_loss', 'train_acc', 'val_loss', 'val_acc', 'test_loss', 'test_acc'],
filename=filename)
checkpoint = torch.load(args.load_checkpoint)
init_epoch = checkpoint['epoch']
cnn.load_state_dict(checkpoint['model_state_dict'])
model = cnn.cuda()
model.train()
args.hyper_train = 'augment' # 'all_weight' # 'weight'
def init_hyper_train(model):
"""
:return:
"""
init_hyper = None
if args.hyper_train == 'weight':
init_hyper = np.sqrt(args.wdecay)
model.weight_decay = Variable(torch.FloatTensor([init_hyper]).cuda(), requires_grad=True)
model.weight_decay = model.weight_decay.cuda()
elif args.hyper_train == 'all_weight':
num_p = sum(p.numel() for p in model.parameters())
weights = np.ones(num_p) * np.sqrt(args.wdecay)
model.weight_decay = Variable(torch.FloatTensor(weights).cuda(), requires_grad=True)
model.weight_decay = model.weight_decay.cuda()
model = model.cuda()
return init_hyper
if args.hyper_train == 'augment': # Dont do inside the prior function, else scope is wrong
augment_net = UNet(in_channels=3,
n_classes=3,
depth=5,
wf=6,
padding=True,
batch_norm=False,
up_mode='upconv') # TODO(PV): Initialize UNet properly
augment_net = augment_net.cuda()
def get_hyper_train():
"""
:return:
"""
if args.hyper_train == 'weight' or args.hyper_train == 'all_weight':
return [model.weight_decay]
if args.hyper_train == 'augment':
return augment_net.parameters()
def get_hyper_train_flat():
return torch.cat([p.view(-1) for p in get_hyper_train()])
# TODO: Check this size
init_hyper_train(model)
if args.hyper_train == 'all_weight':
wdecay = 0.0
else:
wdecay = args.wdecay
optimizer = optim.SGD(model.parameters(), lr=args.lr * 0.2 * 0.2, momentum=0.9, nesterov=True,
weight_decay=wdecay) # args.wdecay)
# print(checkpoint['optimizer_state_dict'])
# optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
scheduler = MultiStepLR(optimizer, milestones=[60, 120], gamma=0.2) # [60, 120, 160]
hyper_optimizer = torch.optim.Adam(get_hyper_train(), lr=1e-3) # try 0.1 as lr
# Set random regularization hyperparameters
# data_augmentation_hparams = {} # Random values for hue, saturation, brightness, contrast, rotation, etc.
if args.dataset == 'cifar10':
num_classes = 10
train_loader, val_loader, test_loader = data_loaders.load_cifar10(args.batch_size, val_split=True,
augmentation=args.data_augmentation)
elif args.dataset == 'cifar100':
num_classes = 100
train_loader, val_loader, test_loader = data_loaders.load_cifar100(args.batch_size, val_split=True,
augmentation=args.data_augmentation)
def test(loader):
model.eval() # Change model to 'eval' mode (BN uses moving mean/var).
correct = 0.
total = 0.
losses = []
for images, labels in loader:
images = images.cuda()
labels = labels.cuda()
with torch.no_grad():
pred = model(images)
xentropy_loss = F.cross_entropy(pred, labels)
losses.append(xentropy_loss.item())
pred = torch.max(pred.data, 1)[1]
total += labels.size(0)
correct += (pred == labels).sum().item()
avg_loss = float(np.mean(losses))
acc = correct / total
model.train()
return avg_loss, acc
def prepare_data(x, y):
"""
:param x:
:param y:
:return:
"""
x, y = x.cuda(), y.cuda()
# x, y = Variable(x), Variable(y)
return x, y
def train_loss_func(x, y):
"""
:param x:
:param y:
:return:
"""
x, y = prepare_data(x, y)
reg_loss = 0.0
if args.hyper_train == 'weight':
pred = model(x)
xentropy_loss = F.cross_entropy(pred, y)
# print(f"weight_decay: {torch.exp(model.weight_decay).shape}")
for p in model.parameters():
# print(f"weight_decay: {torch.exp(model.weight_decay).shape}")
# print(f"shape: {p.shape}")
reg_loss = reg_loss + .5 * (model.weight_decay ** 2) * torch.sum(p ** 2)
# print(f"reg_loss: {reg_loss}")
elif args.hyper_train == 'all_weight':
pred = model(x)
xentropy_loss = F.cross_entropy(pred, y)
count = 0
for p in model.parameters():
reg_loss = reg_loss + .5 * torch.sum(
(model.weight_decay[count: count + p.numel()] ** 2) * torch.flatten(p ** 2))
count += p.numel()
elif args.hyper_train == 'augment':
augmented_x = augment_net(x)
pred = model(augmented_x)
xentropy_loss = F.cross_entropy(pred, y)
return xentropy_loss + reg_loss, pred
def val_loss_func(x, y):
"""
:param x:
:param y:
:return:
"""
x, y = prepare_data(x, y)
pred = model(x)
xentropy_loss = F.cross_entropy(pred, y)
return xentropy_loss
for epoch in range(init_epoch, init_epoch + args.num_finetune_epochs):
xentropy_loss_avg = 0.
total_val_loss = 0.
correct = 0.
total = 0.
progress_bar = tqdm(train_loader)
for i, (images, labels) in enumerate(progress_bar):
progress_bar.set_description('Finetune Epoch ' + str(epoch))
# TODO: Take a hyperparameter step here
optimizer.zero_grad(), hyper_optimizer.zero_grad()
val_loss, weight_norm, grad_norm = hyper_step(1, 1, get_hyper_train, get_hyper_train_flat,
model, val_loss_func,
val_loader, train_loss_func, train_loader,
hyper_optimizer)
# del val_loss
# print(f"hyper: {get_hyper_train()}")
images, labels = images.cuda(), labels.cuda()
# pred = model(images)
# xentropy_loss = F.cross_entropy(pred, labels)
xentropy_loss, pred = train_loss_func(images, labels)
optimizer.zero_grad(), hyper_optimizer.zero_grad()
xentropy_loss.backward()
optimizer.step()
xentropy_loss_avg += xentropy_loss.item()
# Calculate running average of accuracy
pred = torch.max(pred.data, 1)[1]
total += labels.size(0)
correct += (pred == labels.data).sum().item()
accuracy = correct / total
progress_bar.set_postfix(
train='%.5f' % (xentropy_loss_avg / (i + 1)),
val='%.4f' % (total_val_loss / (i + 1)),
acc='%.4f' % accuracy,
weight='%.2f' % weight_norm,
update='%.3f' % grad_norm)
val_loss, val_acc = test(val_loader)
test_loss, test_acc = test(test_loader)
tqdm.write('val loss: {:6.4f} | val acc: {:6.4f} | test loss: {:6.4f} | test_acc: {:6.4f}'.format(
val_loss, val_acc, test_loss, test_acc))
scheduler.step(epoch)
row = {'epoch': str(epoch),
'train_loss': str(xentropy_loss_avg / (i + 1)), 'train_acc': str(accuracy),
'val_loss': str(val_loss), 'val_acc': str(val_acc),
'test_loss': str(test_loss), 'test_acc': str(test_acc)}
csv_logger.writerow(row)
"""def hyper_step(train_batch_num, val_batch_num, get_hyper_train, unshaped_get_hyper_train, model, val_loss_func,
val_loader, train_loss_func, train_loader, hyper_optimizer):
'''
:param train_batch_num:
:param val_batch_num:
:param get_hyper_train:
:param unshaped_get_hyper_train:
:param model:
:param val_loss_func:
:param val_loader:
:param train_loss_func:
:param train_loader:
:param hyper_optimizer:
:return:
'''
from util import gather_flat_grad
train_batch_num -= 1
val_batch_num -= 1
'''import gc
print("Printing objects...")
for obj in gc.get_objects():
try:
if torch.is_tensor(obj) or (hasattr(obj, 'data') and torch.is_tensor(obj.data)):
print(type(obj), obj.size())
except:
pass
print("Done printing objects.")'''
# set up placeholder for the partial derivative in each batch
total_d_val_loss_d_lambda = torch.zeros(get_hyper_train().size(0)).cuda()
num_weights = sum(p.numel() for p in model.parameters())
d_val_loss_d_theta = torch.zeros(num_weights).cuda()
model.train()
for batch_idx, (x, y) in enumerate(val_loader):
model.zero_grad()
val_loss = val_loss_func(x, y)
# val_loss_grad = grad(val_loss, model.parameters())
d_val_loss_d_theta = d_val_loss_d_theta + gather_flat_grad(grad(val_loss, model.parameters()))
if batch_idx >= val_batch_num: break
d_val_loss_d_theta = d_val_loss_d_theta / (batch_idx + 1)
# pre_conditioner = d_val_loss_d_theta # TODO - where the preconditioner should be
# flat_pre_conditioner = pre_conditioner
model.train() # train()
for batch_idx, (x, y) in enumerate(train_loader):
train_loss, _ = train_loss_func(x, y)
# TODO (JON): Probably don't recompute - use create_graph and retain_graph?
model.zero_grad(), hyper_optimizer.zero_grad()
d_train_loss_d_theta = grad(train_loss, model.parameters(), create_graph=True)
# flat_d_train_loss_d_theta = gather_flat_grad(d_train_loss_d_theta)
flat_d_train_loss_d_theta = d_val_loss_d_theta.detach().reshape(1, -1) @ gather_flat_grad(
d_train_loss_d_theta).reshape(-1, 1)
model.zero_grad(), hyper_optimizer.zero_grad()
# flat_d_train_loss_d_theta.backward() #flat_pre_conditioner)
# if get_hyper_train().grad is not None:
total_d_val_loss_d_lambda = total_d_val_loss_d_lambda - gather_flat_grad(
grad(flat_d_train_loss_d_theta.reshape(1), unshaped_get_hyper_train()))
# get_hyper_train().grad
# del d_train_loss_d_theta, flat_d_train_loss_d_theta
if batch_idx >= train_batch_num: break
total_d_val_loss_d_lambda = total_d_val_loss_d_lambda / (batch_idx + 1)
direct_d_val_loss_d_lambda = torch.zeros(get_hyper_train().size(0)).cuda()
'''model.train()
for batch_idx, (x_val, y_val) in enumerate(val_loader):
model.zero_grad(), hyper_optimizer.zero_grad()
val_loss = val_loss_func(x_val, y_val)
val_loss_grad = grad(val_loss, get_hyper_train(), allow_unused=True)
if val_loss_grad is not None and val_loss_grad[0] is not None:
direct_d_val_loss_d_lambda = direct_d_val_loss_d_lambda + gather_flat_grad(val_loss_grad)
del val_loss_grad
else:
del val_loss_grad
break
if batch_idx >= val_batch_num: break
direct_d_val_loss_d_lambda = direct_d_val_loss_d_lambda / (batch_idx + 1)'''
target_grad = direct_d_val_loss_d_lambda + total_d_val_loss_d_lambda
current_index = 0
for p in unshaped_get_hyper_train():
p_num_params = np.prod(p.shape)
p.grad = target_grad[current_index:current_index + p_num_params].view(p.shape)
current_index += p_num_params
# del direct_d_val_loss_d_lambda, total_d_val_loss_d_lambda
weight_norm, grad_norm = get_hyper_train().norm(), target_grad.norm()
#print("weight={}, update={}".format(weight_norm, grad_norm))
hyper_optimizer.step()
model.zero_grad(), hyper_optimizer.zero_grad()
# print(torch.cuda.memory_allocated(), torch.cuda.memory_cached(), torch.cuda.memory_cached() - torch.cuda.memory_allocated())
# torch.cuda.empty_cache()
return None, None, val_loss.detach(), weight_norm.detach(), grad_norm.detach()"""
def hyper_step(train_batch_num, val_batch_num, get_hyper_train, get_hyper_train_flat, model, val_loss_func, val_loader, train_loss_func,
train_loader, hyper_optimizer):
"""
:param train_batch_num:
:param val_batch_num:
:return:
"""
from util import gather_flat_grad
train_batch_num -= 1
val_batch_num -= 1
# set up placeholder for the partial derivative in each batch
total_d_val_loss_d_lambda = torch.zeros(get_hyper_train_flat().size(0)).cuda()
num_weights = sum(p.numel() for p in model.parameters())
d_val_loss_d_theta = torch.zeros(num_weights).cuda()
model.train()
for batch_idx, (x, y) in enumerate(val_loader):
model.zero_grad()
val_loss = val_loss_func(x, y)
# val_loss_grad = grad(val_loss, model.parameters())
d_val_loss_d_theta = d_val_loss_d_theta + gather_flat_grad(grad(val_loss, model.parameters()))
if batch_idx >= val_batch_num: break
d_val_loss_d_theta = d_val_loss_d_theta / (batch_idx + 1)
model.train() # train()
for batch_idx, (x, y) in enumerate(train_loader):
train_loss, _ = train_loss_func(x, y)
# TODO (JON): Probably don't recompute - use create_graph and retain_graph?
model.zero_grad()
# hyper_optimizer.zero_grad()
d_train_loss_d_theta = grad(train_loss, model.parameters(), create_graph=True)
# flat_d_train_loss_d_theta = gather_flat_grad(d_train_loss_d_theta)
flat_d_train_loss_d_theta = d_val_loss_d_theta.detach().reshape(1, -1) @ gather_flat_grad(
d_train_loss_d_theta).reshape(-1, 1)
model.zero_grad()
# hyper_optimizer.zero_grad()
# flat_d_train_loss_d_theta.backward() #flat_pre_conditioner)
# if get_hyper_train().grad is not None:
#if gather_flat_grad(get_hyper_train()) is not None:
total_d_val_loss_d_lambda = total_d_val_loss_d_lambda - gather_flat_grad(
grad(flat_d_train_loss_d_theta.reshape(1), get_hyper_train()))
if batch_idx >= train_batch_num: break
total_d_val_loss_d_lambda = total_d_val_loss_d_lambda / (batch_idx + 1)
direct_d_val_loss_d_lambda = torch.zeros(get_hyper_train_flat().size(0)).cuda()
grad_to_assign = direct_d_val_loss_d_lambda + total_d_val_loss_d_lambda
current_index = 0
for p in get_hyper_train():
p_num_params = np.prod(p.shape)
p.grad = grad_to_assign[current_index:current_index + p_num_params].view(p.shape)
current_index += p_num_params
# get_hyper_train().grad = (direct_d_val_loss_d_lambda + total_d_val_loss_d_lambda)
weight_norm, grad_norm = get_hyper_train_flat().norm(), grad_to_assign.norm() # get_hyper_train().grad.norm()
print("weight={}, update={}".format(weight_norm, grad_norm))
# print("weight={}, update={}".format(get_hyper_train_flat().norm(), gather_flat_grad(get_hyper_train()).norm()))
hyper_optimizer.step()
model.zero_grad()
# hyper_optimizer.zero_grad()
# return get_hyper_train(), get_hyper_train().grad, val_loss
return val_loss, weight_norm, grad_norm
if __name__ == '__main__':
experiment()
|
py | b4143097ea6c6b08be8db96124da89873385c71d | """
Basic ideas frm https://github.com/aymericdamien/TensorFlow-Examples/blob/master/examples/3_NeuralNetworks/neural_network_raw.py
- update using tf.data
- update session
"""
import tensorflow as tf
import numpy as np
from src.constants import NUM_EPOCHS, BATCH_SIZE, LEARNING_RATE
###############################################################################
#Load Data
try:
from tensorflow.keras.datasets import mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
except Exception:
print("download manually to ./data/ from {}".format(
"https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz"
))
with np.load("./data/mnist.npz") as f:
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
# classic numpy approach using reshape and
# reshape and save image dimensions
dim_img = x_train.shape[1:]
x_train = x_train.reshape(len(x_train), -1)
x_test = x_test.reshape(len(x_test), -1)
# Convert Numpy Array to Tensor manually to avoid accidential reassignment:
# x_train = tf.cast(x_train, dtype="float")
# x_test = tf.cast(x_test, dtype="float")
print("passed")
def oneHotEncode(array):
n = len(array)
dense_array = np.zeros((n, len(set(array))))
dense_array[np.arange(n), array] = 1
return dense_array
assert set(y_train) == set(
y_test), "Classes in train and test set are different. which is correct?"
classes = set(y_train) # 0-9 digits
y_train = oneHotEncode(y_train)
y_test = oneHotEncode(y_test)
###############################################################################
# parser function for input data
# Use `tf.parse_single_example()` to extract data from a `tf.Example`
# protocol buffer, and perform any additional per-record preprocessing.
def parser(record):
"""
Define a function to pass to map fct of tf.data.Dataset
example: https://www.tensorflow.org/guide/datasets#using_high-level_apis
"""
# keys_to_features = {
# "image_data": tf.FixedLenFeature((), tf.string, default_value=""),
# "date_time": tf.FixedLenFeature((), tf.int64, default_value=""),
# "label": tf.FixedLenFeature((), tf.int64,
# default_value=tf.zeros([], dtype=tf.int64)),
# }
keys_to_features = {
"image_data": tf.FixedLenFeature((), tf.float, default_value=""),
"label": tf.FixedLenFeature((), tf.int32,
default_value=tf.zeros([], dtype=tf.int64)),
}
parsed = tf.parse_single_example(record, keys_to_features)
# Perform additional preprocessing on the parsed data.
image = tf.image.decode_jpeg(parsed["image_data"])
image = tf.reshape(image, [299, 299, 1])
label = tf.cast(parsed["label"], tf.int32)
return {"image_data": image, "date_time": parsed["date_time"]}, label
###############################################################################
# Build Model
# data specific parameters:
num_classes = y_train.shape[-1] # MNIST total classes (0-9 digits)
dim_input = x_train.shape[-1] # MNIST data input (img shape: 28*28)
# Number of units in hidden layer (Hyperparameter)
n_hidden_1 = 256 # 1st layer number of neurons
n_hidden_2 = 256 # 2nd layer number of neurons
# tf Graph input
X = tf.placeholder("float", shape=[None, dim_input])
Y = tf.placeholder("int32", shape=[None, num_classes])
# # Create model a statful model
class FNN(object):
def __init__(self):
self.w_1 = tf.Variable(tf.Variable(tf.random_normal([dim_input, n_hidden_1])), name='W1')
self.b_1 = tf.Variable(tf.random_normal([n_hidden_1]), name='b1')
self.w_2 = tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2]), name="W2")
self.b_2 = tf.Variable(tf.random_normal([n_hidden_2]), name='b2')
self.w_out = tf.Variable(tf.random_normal([n_hidden_2, num_classes]), name="W_out")
self.b_out = tf.Variable(tf.random_normal([num_classes]), name='b_out')
#self.weights = [self.w_1, self.b_1, self.w_2, self.b_2, self.w_out, self.b_out]
def __call__(self, inputs, training=False):
hidden_1 = tf.nn.relu(tf.matmul(inputs, self.w_1) + self.b_1)
hidden_2 = tf.nn.relu(tf.matmul(hidden_1, self.w_2) + self.b_2)
logits = tf.matmul(hidden_2, self.w_out) + self.b_out
return logits
# class FNN(tf.keras.Model):
# def __init__(self):
# super(FNN, self).__init__()
# self.w_1 = tf.Variable(tf.Variable(tf.random_normal([dim_input, n_hidden_1])), name='W1')
# self.b_1 = tf.Variable(tf.random_normal([n_hidden_1]), name='b1')
# self.w_2 = tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2]), name="W2")
# self.b_2 = tf.Variable(tf.random_normal([n_hidden_2]), name='b2')
# self.w_out = tf.Variable(tf.random_normal([n_hidden_2, num_classes]), name="W_out")
# self.b_out = tf.Variable(tf.random_normal([num_classes]), name='b_out')
# #self.weights = [self.w_1, self.b_1, self.w_2, self.b_2, self.w_out, self.b_out]
# def call(self, inputs, training=False):
# hidden_1 = tf.nn.relu(tf.matmul(inputs, self.w_1) + self.b_1)
# hidden_2 = tf.nn.relu(tf.matmul(hidden_1, self.w_2) + self.b_2)
# logits = tf.matmul(hidden_2, self.w_out) + self.b_out
# return logits
# Construct model
logits = FNN()(X)
# Construct model
logits = FNN()(X)
# Define loss and optimizer
# loss_op = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(
# logits=logits, labels=Y))
loss = tf.losses.softmax_cross_entropy(logits=logits, onehot_labels=Y)
optimizer = tf.train.AdamOptimizer(learning_rate=LEARNING_RATE).minimize(loss)
# train_op = optimizer.minimize(loss_op)
# Evaluate model
tp = tf.equal(tf.argmax(logits, axis=1), tf.argmax(Y, axis=1))
accuracy = tf.reduce_mean(tf.cast(tp, dtype="float"))
# Initialize the variables (i.e. assign their default value)
# init = tf.global_variables_initializer() # ToDo: depreciated
##############################################################################
# tf.data , Preprocessing
trainslices = tf.data.Dataset.from_tensor_slices((X, Y))
trainslices = trainslices.shuffle(buffer_size=3000,
seed=123456,
reshuffle_each_iteration=True)
# trainslices = trainslices.map(parser) # to add to tf.data pipeline
trainslices = trainslices.repeat(count=1)
trainslices = trainslices.batch(batch_size=BATCH_SIZE,
drop_remainder=True) # if False -> breaks assert in training loop
iterator = trainslices.make_initializable_iterator()
next_element = iterator.get_next()
# tf.data.experimental.make_batched_features_dataset(BATCH_SIZE, traindata, num_epochs=NUM_EPOCHS)
# #unified call possible (unclear how to to do with numpy arrays)
# iterator = traindata.make_initializable_iterator(
# batch_size=BATCH_SIZE,
# features=traindata,
# num_epochs=NUM_EPOCHS)
testslices = tf.data.Dataset.from_tensor_slices((X, Y))
# Sequencing batch to mini-batches
# https://github.com/tensorflow/tensorflow/blob/9230423668770036179a72414482d45ddde40a3b/tensorflow/contrib/training/python/training/sequence_queueing_state_saver.py#L353
N_train = x_train.shape[0]
n_batches = N_train / BATCH_SIZE
step = int(n_batches/ 60)
init = tf.global_variables_initializer()
# Start training
with tf.Session() as sess:
# Run the initializer
# sess.run(init)
sess.run(init)
print('Initalized graph')
for i in range(1, NUM_EPOCHS+1):
print("Epoch {}: ".format(i), end='')
sess.run(iterator.initializer, feed_dict={X: x_train,
Y: y_train})
batch = 0
while True:
try:
images, labels = sess.run(next_element)
assert images.shape == (
BATCH_SIZE, dim_input), "Something is wrong with Batch shape: {}".format(images.shape)
# Run optimization op (backprop)
sess.run(optimizer, feed_dict={X: images, Y: labels})
if batch % step == 0:
print('#', end='')
batch += 1
except tf.errors.OutOfRangeError:
# Calculate metrics for validation set / test set
_loss, _acc = sess.run([loss, accuracy], feed_dict={X: x_train,
Y: y_train})
print(", Training Loss= {:.2f}".format(_loss) +
", Training Accuracy= {:.3f}".format(_acc))
break
print("Validation Accuracy:",
sess.run(accuracy, feed_dict={X: x_test,
Y: y_test}))
print("Optimization Finished!")
|
py | b41430a40cd83e8001b8636b251aa6e09d7bfa86 | from Physical.BaseModule import BaseModule
from Communication.Message import Message, MessageType
from Communication.EventBus import EventBus
from gpiozero import DistanceSensor
class RangefinderModule(BaseModule):
def __init__(self, rangefinder: DistanceSensor, event_bus: EventBus):
self.rangefinder = rangefinder
self.event_bus = event_bus
def power_up(self):
print ("Rangefinder module powering up")
self.event_bus.register(self, MessageType.RangeCommand)
def power_down(self):
self.event_bus.unregister(self.get_name())
self.rangefinder.close()
def process(self, message: Message):
response = Message()
response.set_payload(1)#self.rangefinder.distance)
response.set_type(MessageType.RangeResponse)
self.event_bus.post_message(response)
|
py | b41430bb6b4b1cfc58acc43e4575ef16f1a5cda1 | #!/usr/bin/env python
#
# Use the raw transactions API to spend CRCTs received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a circuitd or circuit-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the circuit data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/CIRCUIT/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "CIRCUIT")
return os.path.expanduser("~/.circuit")
def read_bitcoin_config(dbdir):
"""Read the circuit.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "circuit.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a circuit JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 31353 if testnet else 31351
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the circuitd we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(circuitd):
info = circuitd.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
circuitd.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = circuitd.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(circuitd):
address_summary = dict()
address_to_account = dict()
for info in circuitd.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = circuitd.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = circuitd.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-circuit-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(circuitd, fromaddresses, toaddress, amount, fee):
all_coins = list_available(circuitd)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to circuitd.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = circuitd.createrawtransaction(inputs, outputs)
signed_rawtx = circuitd.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(circuitd, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = circuitd.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(circuitd, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = circuitd.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(circuitd, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get CRCTs from")
parser.add_option("--to", dest="to", default=None,
help="address to get send CRCTs to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of circuit.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
circuitd = connect_JSON(config)
if options.amount is None:
address_summary = list_available(circuitd)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(circuitd) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(circuitd, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(circuitd, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = circuitd.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
|
py | b4143101ab6c02b50c5926ebdd2ec95ef05c2d82 | __copyright__ = "Copyright (c) Microsoft Corporation and Mila - Quebec AI Institute"
__license__ = "MIT"
from typing import Optional, Type
from torch.utils.data import DataLoader
from segar.configs.handler import get_env_config
from segar.factors import (
Charge,
Magnetism,
Mass,
StoredEnergy,
Density,
Position,
Shape,
Circle,
Mobile,
GaussianNoise,
RandomConvexHull,
UniformNoise,
Factor,
GaussianMixtureNoise,
Friction,
Size,
)
from segar.mdps import RGBObservation, StateObservation, Initialization
from segar.rules import Prior
from segar.sim.location_priors import (
RandomBottomLocation,
RandomTopLocation,
RandomMiddleLocation,
)
from segar.things import (
Charger,
Magnet,
Bumper,
Damper,
Object,
SandTile,
MagmaTile,
Hole,
FireTile,
Tile,
ThingFactory,
)
from segar.repl.static_datasets.iid_samples import create_iid_from_init
from segar.repl.static_datasets.iid_samples import IIDFromInit
from segar.tasks.puttputt import PuttPuttInitialization, GolfBall, GoalTile
import numpy as np
def create_initialization():
"""Creates a generic initialization that draws from the product of
marginals over all factors.
:return: Initialization object.
"""
config = dict(
numbers=[
(
ThingFactory(
[Charger, Magnet, Bumper, Damper, Object, SandTile, MagmaTile, Hole, FireTile]
),
0,
),
(GoalTile, 1),
(GolfBall, 1),
],
priors=[
Prior(Position, RandomMiddleLocation()),
Prior(Position, RandomBottomLocation(), entity_type=GolfBall),
Prior(Position, RandomTopLocation(), entity_type=GoalTile),
Prior(Shape, RandomConvexHull(0.3), entity_type=Tile),
Prior(Shape, Circle(0.3), entity_type=GoalTile),
Prior(Size, GaussianNoise(0.3, 0.01, clip=(0.1, 0.3)), entity_type=Object),
Prior(Size, GaussianNoise(1.0, 0.01, clip=(0.5, 1.5)), entity_type=Tile),
Prior(Mass, 1.0),
Prior(Mobile, True),
Prior(
Charge,
GaussianMixtureNoise(means=[-1.0, 1.0], stds=[0.1, 0.1]),
entity_type=GolfBall,
),
Prior(
Magnetism,
GaussianMixtureNoise(means=[-1.0, 1.0], stds=[0.1, 0.1]),
entity_type=GolfBall,
),
Prior(Density, GaussianNoise(1.0, 0.1, clip=(0.0, 2.0)), entity_type=GolfBall),
Prior(Mass, GaussianNoise(2.0, 0.5), entity_type=GolfBall),
Prior(StoredEnergy, GaussianNoise(0, 2.0), entity_type=GolfBall),
Prior(Friction, UniformNoise(0.2, 1.0), entity_type=SandTile),
],
)
initialization = PuttPuttInitialization(config=config)
return initialization
def make_data_loaders(
factors: list[Type[Factor]],
batch_size: int = 64,
n_workers: int = 8,
initialization: Optional[Initialization] = None,
) -> tuple[DataLoader, DataLoader, dict]:
"""Makes data loaders for initialization.
:param factors: Factor types to track as ground truth.
:param batch_size: Batch size for data loaders.
:param n_workers: Number of workers for data loaders.
:param initialization: Optional initialization object to generate ground
truth from.
:return:
"""
vis_config = get_env_config("visual", "linear_ae", "baseline")
input_observation = RGBObservation(config=vis_config, resolution=64)
target_observation = StateObservation("golfball", factors=factors)
initialization = initialization or create_initialization()
train_dataset = create_iid_from_init(
initialization, input_observation, target_observation, n_observations=1000
)
test_dataset = create_iid_from_init(
initialization, input_observation, target_observation, n_observations=1000
)
train_loader = DataLoader(
dataset=train_dataset,
batch_size=batch_size,
shuffle=True,
pin_memory=True,
drop_last=True,
num_workers=n_workers,
sampler=None,
)
test_loader = DataLoader(
dataset=test_dataset,
batch_size=batch_size,
shuffle=True,
pin_memory=True,
drop_last=True,
num_workers=n_workers,
sampler=None,
)
data_args = dict(input_size=input_observation.resolution)
return train_loader, test_loader, data_args
def make_numpy_data_loaders(
X_train: np.ndarray,
y_train: np.ndarray,
X_test: np.ndarray,
y_test: np.ndarray,
batch_size: int = 64,
n_workers: int = 8,
) -> tuple[DataLoader, DataLoader, dict]:
"""Makes data loaders for initialization.
:param X_train: NumPy array for train inputs.
:param y_train: NumPy array for train labels.
:param X_test: NumPy array for test inputs.
:param y_test: NumPy array for test labels.
:param batch_size: Batch size for data loaders.
:param n_workers: Number of workers for data loaders.
:return:
"""
train_dataset = IIDFromInit(X_train, y_train)
test_dataset = IIDFromInit(X_test, y_test)
train_loader = DataLoader(
dataset=train_dataset,
batch_size=batch_size,
shuffle=True,
pin_memory=True,
drop_last=True,
num_workers=n_workers,
sampler=None,
)
test_loader = DataLoader(
dataset=test_dataset,
batch_size=batch_size,
shuffle=True,
pin_memory=True,
drop_last=True,
num_workers=n_workers,
sampler=None,
)
return train_loader, test_loader, {}
|
py | b4143205a5331e60e8f9d16830316e455d3286af | import torch
import torch.nn as nn
from torch.nn import functional as F
from torchvision import transforms, datasets
import torch.utils.data
from tqdm import tqdm
import argparse
import os
import numpy as np
import utils
import data
import time
import models
import wandb
import warnings
import random
from collections import OrderedDict
from datasets import ISIC_few_shot, EuroSAT_few_shot, CropDisease_few_shot, Chest_few_shot
from datasets import miniImageNet_few_shot, tiered_ImageNet_few_shot, ImageNet_few_shot
import copy
import math
import warnings
def pseudolabel_dataset(embedding, clf, dataset, transform, transform_test, params):
'''
pseudolabel the dataset with the teacher model (embedding, clf)
'''
# Change the transform of the target dataset to the deterministic transformation
dataset.d.transform = transform_test
dataset.d.target_transform = (lambda x: x)
embedding.eval()
clf.eval()
loader = torch.utils.data.DataLoader(dataset, batch_size=params.bsize,
shuffle=False, drop_last=False, num_workers=params.num_workers)
# do an inference on the full target dataset
probs_all = []
for X, _ in loader:
X = X.cuda()
with torch.no_grad():
feature = embedding(X)
logits = clf(feature)
probs = F.softmax(logits, dim=1)
probs += 1e-6
probs_all.append(probs)
probs_all = torch.cat(probs_all, dim=0).cpu()
# Update the target dataset with the pseudolabel
if hasattr(dataset.d, 'targets'):
dataset.d.targets = probs_all
samples = [(i[0], probs_all[ind_i])for ind_i, i in enumerate(dataset.d.samples)]
dataset.d.samples = samples
dataset.d.imgs = samples
elif hasattr(dataset.d, "labels"):
dataset.d.labels = probs_all
else:
raise ValueError("No Targets variable found!")
# Switch the dataset's augmentation back to the stochastic augmentation
dataset.d.transform = transform
return dataset
def main(args):
# Set the scenes
if not os.path.isdir(args.dir):
os.makedirs(args.dir)
logger = utils.create_logger(os.path.join(
args.dir, 'checkpoint.log'), __name__)
trainlog = utils.savelog(args.dir, 'train')
vallog = utils.savelog(args.dir, 'val')
wandb.init(project='cross_task_distillation',
group=__file__,
name=f'{__file__}_{args.dir}')
wandb.config.update(args)
for arg in vars(args):
logger.info(f"{arg}: {getattr(args, arg)}")
# seed the random number generator
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
np.random.seed(args.seed)
random.seed(args.seed)
###########################
# Create Models
###########################
if args.model == 'resnet10':
backbone = models.ResNet10()
feature_dim = backbone.final_feat_dim
elif args.model == 'resnet12':
backbone = models.Resnet12(width=1, dropout=0.1)
feature_dim = backbone.output_size
elif args.model == 'resnet18':
backbone = models.resnet18(remove_last_relu=False,
input_high_res=True)
feature_dim = 512
else:
raise ValueError('Invalid backbone model')
backbone_sd_init = copy.deepcopy(backbone.state_dict())
# load the teacher
# specified at args.teacher_path
if args.teacher_path is not None:
if args.teacher_path_version == 0:
state = torch.load(args.teacher_path)['state']
clf_state = OrderedDict()
state_keys = list(state.keys())
for _, key in enumerate(state_keys):
if "feature." in key:
# an architecture model has attribute 'feature', load architecture
# feature to backbone by casting name from 'feature.trunk.xx' to 'trunk.xx'
newkey = key.replace("feature.", "")
state[newkey] = state.pop(key)
elif "classifier." in key:
newkey = key.replace("classifier.", "")
clf_state[newkey] = state.pop(key)
else:
state.pop(key)
sd = state
clf_sd = clf_state
elif args.teacher_path_version == 1:
temp = torch.load(args.teacher_path)
sd = temp['model']
clf_sd = temp['clf']
else:
raise ValueError("Invalid load path version!")
backbone.load_state_dict(sd)
backbone = nn.DataParallel(backbone).cuda()
num_classes = clf_sd['weight'].shape[0]
clf_teacher = nn.Linear(feature_dim, num_classes).cuda()
clf_teacher.load_state_dict(clf_sd)
# the student classifier head
clf = nn.Linear(feature_dim, num_classes).cuda()
# initialize the student classifier head with the teacher classifier head
if args.use_pretrained_clf:
print("Loading Pretrained Classifier")
clf.load_state_dict(clf_sd)
############################
###########################
# Create DataLoader
###########################
# create the base dataset
if args.base_dataset == 'miniImageNet':
base_transform = miniImageNet_few_shot.TransformLoader(args.image_size).get_composed_transform(aug=True)
base_transform_test = miniImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
base_dataset = datasets.ImageFolder(root=args.base_path, transform=base_transform)
if args.base_split is not None:
base_dataset = miniImageNet_few_shot.construct_subset(
base_dataset, args.base_split)
elif args.base_dataset == 'tiered_ImageNet':
if args.image_size != 84:
warnings.warn("Tiered ImageNet: The image size for is not 84x84")
base_transform = tiered_ImageNet_few_shot.TransformLoader(args.image_size).get_composed_transform(aug=False)
base_transform_test = tiered_ImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
base_dataset = datasets.ImageFolder(
root=args.base_path, transform=base_transform)
if args.base_split is not None:
base_dataset = tiered_ImageNet_few_shot.construct_subset(
base_dataset, args.base_split)
elif args.base_dataset == 'ImageNet':
if args.base_no_color_jitter:
base_transform = transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
else:
warnings.warn("Using ImageNet with Color Jitter")
base_transform = ImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=True)
base_transform_test = ImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
base_dataset = datasets.ImageFolder(
root=args.base_path, transform=base_transform)
if args.base_split is not None:
base_dataset = ImageNet_few_shot.construct_subset(base_dataset, args.base_split)
print("Size of Base dataset:", len(base_dataset))
else:
raise ValueError("Invalid base dataset!")
# create the target dataset
if args.target_dataset == 'ISIC':
transform = ISIC_few_shot.TransformLoader(args.image_size).get_composed_transform(aug=True)
transform_test = ISIC_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
dataset = ISIC_few_shot.SimpleDataset(transform, split=args.target_subset_split)
elif args.target_dataset == 'EuroSAT':
transform = EuroSAT_few_shot.TransformLoader(args.image_size).get_composed_transform(aug=True)
transform_test = EuroSAT_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
dataset = EuroSAT_few_shot.SimpleDataset(transform, split=args.target_subset_split)
elif args.target_dataset == 'CropDisease':
transform = CropDisease_few_shot.TransformLoader(args.image_size).get_composed_transform(aug=True)
transform_test = CropDisease_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
dataset = CropDisease_few_shot.SimpleDataset(
transform, split=args.target_subset_split)
elif args.target_dataset == 'ChestX':
transform = Chest_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=True)
transform_test = Chest_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
dataset = Chest_few_shot.SimpleDataset(transform, split=args.target_subset_split)
elif args.target_dataset == 'miniImageNet_test':
transform = miniImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=True)
transform_test = miniImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
dataset = miniImageNet_few_shot.SimpleDataset(
transform, split=args.target_subset_split)
elif args.target_dataset == 'tiered_ImageNet_test':
if args.image_size != 84:
warnings.warn("Tiered ImageNet: The image size for is not 84x84")
transform = tiered_ImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=True)
transform_test = tiered_ImageNet_few_shot.TransformLoader(
args.image_size).get_composed_transform(aug=False)
dataset = tiered_ImageNet_few_shot.SimpleDataset(
transform, split=args.target_subset_split)
else:
raise ValueError('Invalid dataset!')
# pseudolabel target dataset
dataset = pseudolabel_dataset(backbone, clf_teacher, dataset,
transform, transform_test, args)
print("Size of target dataset", len(dataset))
dataset_test = copy.deepcopy(dataset)
dataset.d.transform = transform
dataset_test.d.transform = transform_test
ind = torch.randperm(len(dataset))
# initialize the student's backbone with random weights
if args.backbone_random_init:
backbone.module.load_state_dict(backbone_sd_init)
# split the target dataset into train and val
# 10% of the unlabeled data is used for validation
train_ind = ind[:int(0.9*len(ind))]
val_ind = ind[int(0.9*len(ind)):]
trainset = torch.utils.data.Subset(dataset, train_ind)
valset = torch.utils.data.Subset(dataset_test, val_ind)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.bsize,
num_workers=args.num_workers,
shuffle=True, drop_last=True)
valloader = torch.utils.data.DataLoader(valset, batch_size=args.bsize,
num_workers=args.num_workers,
shuffle=False, drop_last=False)
# Generate trainset and valset for base dataset
base_ind = torch.randperm(len(base_dataset))
base_train_ind = base_ind[:int((1 - args.base_val_ratio)*len(base_ind))]
base_val_ind = base_ind[int((1 - args.base_val_ratio)*len(base_ind)):]
base_dataset_val = copy.deepcopy(base_dataset)
base_dataset_val.transform = base_transform_test
base_trainset = torch.utils.data.Subset(base_dataset, base_train_ind)
base_valset = torch.utils.data.Subset(base_dataset_val, base_val_ind)
print("Size of base validation set", len(base_valset))
base_trainloader = torch.utils.data.DataLoader(base_trainset, batch_size=args.bsize,
num_workers=args.num_workers,
shuffle=True, drop_last=True)
base_valloader = torch.utils.data.DataLoader(base_valset, batch_size=args.bsize * 2,
num_workers=args.num_workers,
shuffle=False, drop_last=False)
############################
###########################
# Create Optimizer
###########################
optimizer = torch.optim.SGD([
{'params': backbone.parameters()},
{'params': clf.parameters()},
],
lr=0.1, momentum=0.9,
weight_decay=args.wd,
nesterov=False)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
mode='min', factor=0.5,
patience=10, verbose=False,
cooldown=10,
threshold_mode='rel',
threshold=1e-4, min_lr=1e-5)
#######################################
starting_epoch = 0
# whether to resume from the latest checkpoint
if args.resume_latest:
import re
pattern = "checkpoint_(\d+).pkl"
candidate = []
for i in os.listdir(args.dir):
match = re.search(pattern, i)
if match:
candidate.append(int(match.group(1)))
# if nothing found, then start from scratch
if len(candidate) == 0:
print('No latest candidate found to resume!')
logger.info('No latest candidate found to resume!')
else:
latest = np.amax(candidate)
load_path = os.path.join(args.dir, f'checkpoint_{latest}.pkl')
if latest >= args.epochs:
print('The latest checkpoint found ({}) is after the number of epochs (={}) specified! Exiting!'.format(
load_path, args.epochs))
logger.info('The latest checkpoint found ({}) is after the number of epochs (={}) specified! Exiting!'.format(
load_path, args.epochs))
import sys
sys.exit(0)
else:
best_model_path = os.path.join(args.dir, 'checkpoint_best.pkl')
# first load the previous best model
best_epoch = load_checkpoint(backbone, clf,
optimizer, scheduler, best_model_path)
logger.info('Latest model epoch: {}'.format(latest))
logger.info(
'Validate the best model checkpointed at epoch: {}'.format(best_epoch))
# Validate to set the right loss
performance_val = validate(backbone, clf,
valloader, base_valloader,
best_epoch, args.epochs, logger, vallog, args, postfix='Validation')
loss_val = performance_val['Loss_test/avg']
error_val = 100 - performance_val['top1_test_per_class/avg']
best_error = error_val
best_loss = loss_val
sd_best = torch.load(os.path.join(
args.dir, 'checkpoint_best.pkl'))
if latest > best_epoch:
starting_epoch = load_checkpoint(
backbone, clf, optimizer, scheduler, load_path)
else:
starting_epoch = best_epoch
logger.info(
'Continue Training at epoch: {}'.format(starting_epoch))
###########################################
####### Learning rate test ################
###########################################
if starting_epoch == 0:
### Start by doing a learning rate test
lr_candidates = [1e-1, 5e-2, 3e-2, 1e-2, 5e-3, 3e-3, 1e-3]
step = 50
# number of training epochs to get at least 50 updates
warm_up_epoch = math.ceil(step / len(trainloader))
# keep track of the student model initialization
# Need to keep reloading when testing different learning rates
sd_current = copy.deepcopy(backbone.state_dict())
sd_head = copy.deepcopy(clf.state_dict())
vals = []
# Test the learning rate by training for one epoch
for current_lr in lr_candidates:
lr_log = utils.savelog(args.dir, f'lr_{current_lr}')
# reload the student model
backbone.load_state_dict(sd_current)
clf.load_state_dict(sd_head)
# create the optimizer
optimizer = torch.optim.SGD([
{'params': backbone.parameters()},
{'params': clf.parameters()},
],
lr=current_lr, momentum=0.9,
weight_decay=args.wd,
nesterov=False)
logger.info(f'*** Testing Learning Rate: {current_lr}')
# training for a bit
for i in range(warm_up_epoch):
perf = train(backbone, clf, optimizer,
trainloader, base_trainloader,
i, warm_up_epoch, logger, lr_log, args, turn_off_sync=True)
# compute the validation loss for picking learning rates
perf_val = validate(backbone, clf, valloader,
base_valloader,
1, 1, logger, vallog, args, postfix='Validation',
turn_off_sync=True)
vals.append(perf_val['Loss_test/avg'])
# pick the best learning rates
current_lr = lr_candidates[int(np.argmin(vals))]
# reload the models
backbone.load_state_dict(sd_current)
clf.load_state_dict(sd_head)
logger.info(f"** Learning with lr: {current_lr}")
optimizer = torch.optim.SGD([
{'params': backbone.parameters()},
{'params': clf.parameters()},
],
lr=current_lr, momentum=0.9,
weight_decay=args.wd,
nesterov=False)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
mode='min', factor=0.5,
patience=10, verbose=False,
cooldown=10,
threshold_mode='rel',
threshold=1e-4, min_lr=1e-5)
scheduler.step(math.inf)
best_loss = math.inf
best_epoch = 0
checkpoint(backbone, clf,
optimizer, scheduler, os.path.join(
args.dir, f'checkpoint_best.pkl'), 0)
############################
# save the initialization
checkpoint(backbone, clf,
optimizer, scheduler,
os.path.join(
args.dir, f'checkpoint_{starting_epoch}.pkl'), starting_epoch)
try:
for epoch in tqdm(range(starting_epoch, args.epochs)):
perf = train(backbone, clf, optimizer, trainloader,
base_trainloader,
epoch, args.epochs, logger, trainlog, args)
scheduler.step(perf['Loss/avg'])
# Always checkpoint after first epoch of training
if (epoch == starting_epoch) or ((epoch + 1) % args.save_freq == 0):
checkpoint(backbone, clf,
optimizer, scheduler,
os.path.join(
args.dir, f'checkpoint_{epoch + 1}.pkl'), epoch + 1)
if (epoch == starting_epoch) or ((epoch + 1) % args.eval_freq == 0):
performance_val = validate(backbone, clf, valloader,
base_valloader,
epoch+1, args.epochs, logger, vallog, args, postfix='Validation')
loss_val = performance_val['Loss_test/avg']
if best_loss > loss_val:
best_epoch = epoch + 1
checkpoint(backbone, clf,
optimizer, scheduler, os.path.join(
args.dir, f'checkpoint_best.pkl'), best_epoch)
logger.info(
f"*** Best model checkpointed at Epoch {best_epoch}")
best_loss = loss_val
if (epoch + 1) % args.save_freq != 0:
checkpoint(backbone, clf,
optimizer, scheduler, os.path.join(
args.dir, f'checkpoint_{epoch + 1}.pkl'), epoch + 1)
finally:
trainlog.save()
vallog.save()
return
def checkpoint(model, clf, optimizer, scheduler, save_path, epoch):
'''
epoch: the number of epochs of training that has been done
Should resume from epoch
'''
sd = {
'model': copy.deepcopy(model.module.state_dict()),
'clf': copy.deepcopy(clf.state_dict()),
'opt': copy.deepcopy(optimizer.state_dict()),
'scheduler': copy.deepcopy(scheduler.state_dict()),
'epoch': epoch
}
torch.save(sd, save_path)
return sd
def load_checkpoint(model, clf, optimizer, scheduler, load_path):
'''
Load model and optimizer from load path
Return the epoch to continue the checkpoint
'''
sd = torch.load(load_path)
model.module.load_state_dict(sd['model'])
clf.load_state_dict(sd['clf'])
optimizer.load_state_dict(sd['opt'])
scheduler.load_state_dict(sd['scheduler'])
return sd['epoch']
def train(model, clf,
optimizer, trainloader, base_trainloader, epoch,
num_epochs, logger, trainlog, args, turn_off_sync=False):
meters = utils.AverageMeterSet()
model.train()
clf.train()
kl_criterion = nn.KLDivLoss(reduction='batchmean')
nll_criterion = nn.NLLLoss(reduction='mean')
base_loader_iter = iter(base_trainloader)
end = time.time()
for i, (X1, y) in enumerate(trainloader):
meters.update('Data_time', time.time() - end)
current_lr = optimizer.param_groups[0]['lr']
meters.update('lr', current_lr, 1)
X1 = X1.cuda()
y = y.cuda()
# Get the data from the base dataset
try:
X_base, y_base = base_loader_iter.next()
except StopIteration:
base_loader_iter = iter(base_trainloader)
X_base, y_base = base_loader_iter.next()
X_base = X_base.cuda()
y_base = y_base.cuda()
optimizer.zero_grad()
# cross entropy loss on the base dataset
features_base = model(X_base)
logits_base = clf(features_base)
log_probability_base = F.log_softmax(logits_base, dim=1)
loss_base = nll_criterion(log_probability_base, y_base)
f1 = model(X1)
# Pseudolabel loss on the target dataset
logits_xtask_1 = clf(f1)
log_probability_1 = F.log_softmax(logits_xtask_1, dim=1)
loss_xtask = kl_criterion(log_probability_1, y)
loss = loss_base + loss_xtask
loss.backward()
optimizer.step()
meters.update('Loss', loss.item(), 1)
meters.update('KL_Loss_target', loss_xtask.item(), 1)
meters.update('CE_Loss_source', loss_base.item(), 1)
perf = utils.accuracy(logits_xtask_1.data,
y.argmax(dim=1).data, topk=(1, ))
meters.update('top1', perf['average'][0].item(), len(X1))
meters.update('top1_per_class', perf['per_class_average'][0].item(), 1)
perf_base = utils.accuracy(logits_base.data,
y_base.data, topk=(1, ))
meters.update('top1_base', perf_base['average'][0].item(), len(X_base))
meters.update('top1_base_per_class', perf_base['per_class_average'][0].item(), 1)
meters.update('Batch_time', time.time() - end)
end = time.time()
if (i + 1) % args.print_freq == 0:
values = meters.values()
averages = meters.averages()
sums = meters.sums()
logger_string = ('Training Epoch: [{epoch}/{epochs}] Step: [{step} / {steps}] '
'Batch Time: {meters[Batch_time]:.4f} '
'Data Time: {meters[Data_time]:.4f} Average Loss: {meters[Loss]:.4f} '
'Average KL Loss (Target): {meters[KL_Loss_target]:.4f} '
'Average CE Loss (Source): {meters[CE_Loss_source]: .4f} '
'Learning Rate: {meters[lr]:.4f} '
'Top1: {meters[top1]:.4f} '
'Top1_per_class: {meters[top1_per_class]:.4f} '
'Top1_base: {meters[top1_base]:.4f} '
'Top1_base_per_class: {meters[top1_base_per_class]:.4f} '
).format(
epoch=epoch, epochs=num_epochs, step=i+1, steps=len(trainloader), meters=meters)
logger.info(logger_string)
if (args.iteration_bp is not None) and (i+1) == args.iteration_bp:
break
logger_string = ('Training Epoch: [{epoch}/{epochs}] Step: [{step}] Batch Time: {meters[Batch_time]:.4f} '
'Data Time: {meters[Data_time]:.4f} Average Loss: {meters[Loss]:.4f} '
'Average KL Loss (Target): {meters[KL_Loss_target]:.4f} '
'Average CE Loss (Source): {meters[CE_Loss_source]: .4f} '
'Learning Rate: {meters[lr]:.4f} '
'Top1: {meters[top1]:.4f} '
'Top1_per_class: {meters[top1_per_class]:.4f} '
'Top1_base: {meters[top1_base]:.4f} '
'Top1_base_per_class: {meters[top1_base_per_class]:.4f} '
).format(
epoch=epoch+1, epochs=num_epochs, step=0, meters=meters)
logger.info(logger_string)
values = meters.values()
averages = meters.averages()
sums = meters.sums()
trainlog.record(epoch+1, {
**values,
**averages,
**sums
})
if not turn_off_sync:
wandb.log({'loss': averages['Loss/avg']}, step=epoch+1)
wandb.log(
{'ce_loss_source': averages['CE_Loss_source/avg']}, step=epoch+1)
wandb.log(
{'kl_loss_target': averages['KL_Loss_target/avg']}, step=epoch+1)
wandb.log({'top1': averages['top1/avg'],
'top1_per_class': averages['top1_per_class/avg'],
}, step=epoch+1)
wandb.log({'top1_base': averages['top1_base/avg'],
'top1_base_per_class': averages['top1_base_per_class/avg'],
}, step=epoch+1)
return averages
def validate(model, clf,
testloader, base_loader, epoch, num_epochs, logger,
testlog, args, postfix='Validation', turn_off_sync=False):
meters = utils.AverageMeterSet()
model.eval()
clf.eval()
criterion_xtask = nn.KLDivLoss(reduction='batchmean')
nll_criterion = nn.NLLLoss(reduction='mean')
logits_xtask_test_all = []
ys_all = []
end = time.time()
# Compute the loss for the target dataset
with torch.no_grad():
for _, (Xtest, y) in enumerate(testloader):
Xtest = Xtest.cuda()
y = y.cuda()
ftest = model(Xtest)
# get the logits for xtask
logits_xtask_test = clf(ftest)
logits_xtask_test_all.append(logits_xtask_test)
ys_all.append(y)
ys_all = torch.cat(ys_all, dim=0)
logits_xtask_test_all = torch.cat(logits_xtask_test_all, dim=0)
log_probability = F.log_softmax(logits_xtask_test_all, dim=1)
loss_xtask = criterion_xtask(log_probability, ys_all)
logits_base_all = []
ys_base_all = []
with torch.no_grad():
# Compute the loss on the source base dataset
for X_base, y_base in base_loader:
X_base = X_base.cuda()
y_base = y_base.cuda()
features = model(X_base)
logits_base = clf(features)
logits_base_all.append(logits_base)
ys_base_all.append(y_base)
ys_base_all = torch.cat(ys_base_all, dim=0)
logits_base_all = torch.cat(logits_base_all, dim=0)
log_probability_base = F.log_softmax(logits_base_all, dim=1)
loss_base = nll_criterion(log_probability_base, ys_base_all)
loss = loss_xtask + loss_base
meters.update('CE_Loss_source_test', loss_base.item(), 1)
meters.update('KL_Loss_target_test', loss_xtask.item(), 1)
meters.update('Loss_test', loss.item(), 1)
perf = utils.accuracy(logits_xtask_test_all.data,
ys_all.argmax(dim=1).data, topk=(1, ))
meters.update('top1_test', perf['average'][0].item(), 1)
meters.update('top1_test_per_class',
perf['per_class_average'][0].item(), 1)
perf_base = utils.accuracy(logits_base_all.data,
ys_base_all.data, topk=(1, ))
meters.update('top1_base_test', perf_base['average'][0].item(), 1)
meters.update('top1_base_test_per_class',
perf_base['per_class_average'][0].item(), 1)
meters.update('Batch_time', time.time() - end)
logger_string = ('{postfix} Epoch: [{epoch}/{epochs}] Batch Time: {meters[Batch_time]:.4f} '
'Average Test Loss: {meters[Loss_test]:.4f} '
'Average Test KL Loss (Target): {meters[KL_Loss_target_test]: .4f} '
'Average CE Loss (Source): {meters[CE_Loss_source_test]: .4f} '
'Top1_test: {meters[top1_test]:.4f} '
'Top1_test_per_class: {meters[top1_test_per_class]:.4f} '
'Top1_base_test: {meters[top1_base_test]:.4f} '
'Top1_base_test_per_class: {meters[top1_base_test_per_class]:.4f} ').format(
postfix=postfix, epoch=epoch, epochs=num_epochs, meters=meters)
logger.info(logger_string)
values = meters.values()
averages = meters.averages()
sums = meters.sums()
testlog.record(epoch, {
**values,
**averages,
**sums
})
if postfix != '':
postfix = '_' + postfix
if not turn_off_sync:
wandb.log({'loss' + postfix: averages['Loss_test/avg']}, step=epoch)
wandb.log(
{'kl_loss_target' + postfix: averages['KL_Loss_target_test/avg']}, step=epoch)
wandb.log(
{'ce_loss_source' + postfix: averages['CE_Loss_source_test/avg']}, step=epoch)
wandb.log({'top1' + postfix: averages['top1_test/avg'],
'top1_per_class' + postfix: averages['top1_test_per_class/avg'],
}, step=epoch)
wandb.log({'top1_base' + postfix: averages['top1_base_test/avg'],
'top1_base_per_class' + postfix: averages['top1_base_test_per_class/avg'],
}, step=epoch)
return averages
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='STARTUP without Self-supervision')
parser.add_argument('--dir', type=str, default='.',
help='directory to save the checkpoints')
parser.add_argument('--bsize', type=int, default=32,
help='batch_size for STARTUP')
parser.add_argument('--epochs', type=int, default=50,
help='Number of training epochs')
parser.add_argument('--save_freq', type=int, default=5,
help='Frequency (in epoch) to save')
parser.add_argument('--eval_freq', type=int, default=1,
help='Frequency (in epoch) to evaluate on the val set')
parser.add_argument('--print_freq', type=int, default=5,
help='Frequency (in step per epoch) to print training stats')
parser.add_argument('--load_path', type=str, default=None,
help='Path to the checkpoint to be loaded')
parser.add_argument('--seed', type=int, default=1,
help='Seed for randomness')
parser.add_argument('--wd', type=float, default=1e-4,
help='Weight decay for the model')
parser.add_argument('--resume_latest', action='store_true',
help='resume from the latest model in args.dir')
parser.add_argument('--num_workers', type=int, default=4,
help='Number of workers for dataloader')
parser.add_argument('--iteration_bp', type=int, help='which step to break in the training loop')
parser.add_argument('--model', type=str, default='resnet10',
help='Backbone model')
parser.add_argument('--teacher_path', type=str, required=True,
help='path to the teacher model')
parser.add_argument('--teacher_path_version', type=int, default=1,
help='how to load the teacher')
parser.add_argument('--use_pretrained_clf', action='store_true',
help="whether to initialize student's classifier (this is the teacher classifier)")
parser.add_argument('--backbone_random_init', action='store_true',
help="Use random initialized backbone ")
parser.add_argument('--base_dataset', type=str, required=True, help='base_dataset to use')
parser.add_argument('--base_path', type=str, required=True, help='path to base dataset')
parser.add_argument('--base_split', type=str, help='split for the base dataset')
parser.add_argument('--base_no_color_jitter', action='store_true', help='remove color jitter for ImageNet')
parser.add_argument('--base_val_ratio', type=float, default=0.05, help='amount of base dataset set aside for validation')
parser.add_argument('--target_dataset', type=str, required=True,
help='the target domain dataset')
parser.add_argument('--target_subset_split', type=str,
help='path to the csv files that specifies the unlabeled split for the target dataset')
parser.add_argument('--image_size', type=int, default=224,
help='Resolution of the input image')
args = parser.parse_args()
main(args)
|
py | b414324a79af495c75075bcb214c45a181741e69 | s=input()
def f(x):
if x=="":
return ""
if x[0]==".":
return "0"+f(x[1:])
elif x[:2]=="-.":
return "1"+f(x[2:])
elif x[:2]=="--":
return "2"+f(x[2:])
print(f(s))
|
py | b41432a7dd3e474fc8cc4af8c170bb95184a3da5 | # (c) 2012-2018, Ansible by Red Hat
#
# This file is part of Ansible Galaxy
#
# Ansible Galaxy is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by
# the Apache Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Ansible Galaxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License
# along with Galaxy. If not, see <http://www.apache.org/licenses/>.
from .content import * # noqa
from .content_block import * # noqa
from .content_type import * # noqa
from .email import * # noqa
from .namespace import * # noqa
from .notification import * # noqa
from .provider import * # noqa
from .provider_source import * # noqa
from .repository_source import * # noqa
from .repository import * # noqa
from .provider_namespace import * # noqa
from .roles import * # noqa
from .users import * # noqa
from .serializers import * # noqa
from .token import * # noqa
from .survey import * # noqa
|
py | b41432b9ebe1e85758e465e919601cb2acf22edb | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiMarketingAdvertisingQueryModel(object):
def __init__(self):
self._ad_id = None
@property
def ad_id(self):
return self._ad_id
@ad_id.setter
def ad_id(self, value):
self._ad_id = value
def to_alipay_dict(self):
params = dict()
if self.ad_id:
if hasattr(self.ad_id, 'to_alipay_dict'):
params['ad_id'] = self.ad_id.to_alipay_dict()
else:
params['ad_id'] = self.ad_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiMarketingAdvertisingQueryModel()
if 'ad_id' in d:
o.ad_id = d['ad_id']
return o
|
py | b41433260195908bfee7b6d8d392851f2b22b07e | # Copyright 2020, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Import related codes.
That is import as expression, and star import.
"""
from nuitka.nodes.LocalsScopes import GlobalsDictHandle
from nuitka.PythonVersions import python_version
from .CodeHelpers import (
generateChildExpressionsCode,
generateExpressionCode,
withObjectCodeTemporaryAssignment,
)
from .ErrorCodes import getErrorExitBoolCode, getErrorExitCode
from .LineNumberCodes import emitLineNumberUpdateCode
from .ModuleCodes import getModuleAccessCode
def generateBuiltinImportCode(to_name, expression, emit, context):
# We know that 5 expressions are created, pylint: disable=W0632
module_name, globals_name, locals_name, import_list_name, level_name = generateChildExpressionsCode(
expression=expression, emit=emit, context=context
)
with withObjectCodeTemporaryAssignment(
to_name, "imported_value", expression, emit, context
) as value_name:
_getBuiltinImportCode(
to_name=value_name,
module_name=module_name,
globals_name=globals_name,
locals_name=locals_name,
import_list_name=import_list_name,
level_name=level_name,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
# TODO: Maybe use this for other cases too, not just import.
def _getCountedArgumentsHelperCallCode(
helper_prefix, to_name, args, min_args, needs_check, emit, context
):
orig_args = args
args = list(args)
while args[-1] is None:
del args[-1]
if None in args:
emit(
"%s = %s_KW(%s);"
% (
to_name,
helper_prefix,
", ".join("NULL" if arg is None else str(arg) for arg in orig_args),
)
)
else:
# Check that no following arguments are not None.
assert len(args) >= min_args
emit(
"%s = %s%d(%s);"
% (to_name, helper_prefix, len(args), ", ".join(str(arg) for arg in args))
)
getErrorExitCode(
check_name=to_name,
release_names=args,
needs_check=needs_check,
emit=emit,
context=context,
)
context.addCleanupTempName(to_name)
def _getBuiltinImportCode(
to_name,
module_name,
globals_name,
locals_name,
import_list_name,
level_name,
needs_check,
emit,
context,
):
emitLineNumberUpdateCode(emit, context)
_getCountedArgumentsHelperCallCode(
helper_prefix="IMPORT_MODULE",
to_name=to_name,
args=(module_name, globals_name, locals_name, import_list_name, level_name),
min_args=1,
needs_check=needs_check,
emit=emit,
context=context,
)
def generateImportModuleHardCode(to_name, expression, emit, context):
module_name = expression.getModuleName()
needs_check = expression.mayRaiseException(BaseException)
emitLineNumberUpdateCode(emit, context)
with withObjectCodeTemporaryAssignment(
to_name, "imported_value", expression, emit, context
) as value_name:
emit("""%s = PyImport_ImportModule("%s");""" % (value_name, module_name))
getErrorExitCode(
check_name=value_name, needs_check=needs_check, emit=emit, context=context
)
def generateImportModuleNameHardCode(to_name, expression, emit, context):
module_name = expression.getModuleName()
import_name = expression.getImportName()
needs_check = expression.mayRaiseException(BaseException)
with withObjectCodeTemporaryAssignment(
to_name, "imported_value", expression, emit, context
) as value_name:
if module_name == "sys":
emit("""%s = PySys_GetObject((char *)"%s");""" % (value_name, import_name))
elif module_name in ("os", "__future__", "importlib._bootstrap"):
emitLineNumberUpdateCode(emit, context)
emit(
"""\
{
PyObject *hard_module = PyImport_ImportModule("%(module_name)s");
if (likely(hard_module != NULL)) {
%(to_name)s = PyObject_GetAttr(hard_module, %(import_name)s);
} else {
%(to_name)s = NULL;
}
}
"""
% {
"to_name": value_name,
"module_name": module_name,
"import_name": context.getConstantCode(import_name),
}
)
else:
assert False, module_name
getErrorExitCode(
check_name=value_name, needs_check=needs_check, emit=emit, context=context
)
def generateImportStarCode(statement, emit, context):
module_name = context.allocateTempName("star_imported")
generateExpressionCode(
to_name=module_name,
expression=statement.getSourceModule(),
emit=emit,
context=context,
)
old_source_ref = context.setCurrentSourceCodeReference(
statement.getSourceReference()
)
res_name = context.getBoolResName()
target_scope = statement.getTargetDictScope()
if type(target_scope) is GlobalsDictHandle:
emit(
"%s = IMPORT_MODULE_STAR(%s, true, %s);"
% (res_name, getModuleAccessCode(context=context), module_name)
)
else:
locals_declaration = context.addLocalsDictName(target_scope.getCodeName())
emit(
"""
%(res_name)s = IMPORT_MODULE_STAR(%(locals_dict)s, false, %(module_name)s);
"""
% {
"res_name": res_name,
"locals_dict": locals_declaration,
"module_name": module_name,
}
)
getErrorExitBoolCode(
condition="%s == false" % res_name,
release_name=module_name,
emit=emit,
context=context,
)
context.setCurrentSourceCodeReference(old_source_ref)
def generateImportNameCode(to_name, expression, emit, context):
from_arg_name = context.allocateTempName("import_name_from")
generateExpressionCode(
to_name=from_arg_name,
expression=expression.getModule(),
emit=emit,
context=context,
)
with withObjectCodeTemporaryAssignment(
to_name, "imported_value", expression, emit, context
) as value_name:
if python_version >= 350:
emit(
"""\
if (PyModule_Check(%(from_arg_name)s)) {
%(to_name)s = IMPORT_NAME_OR_MODULE(
%(from_arg_name)s,
(PyObject *)moduledict_%(module_identifier)s,
%(import_name)s,
%(import_level)s
);
} else {
%(to_name)s = IMPORT_NAME(%(from_arg_name)s, %(import_name)s);
}
"""
% {
"to_name": value_name,
"from_arg_name": from_arg_name,
"import_name": context.getConstantCode(
constant=expression.getImportName()
),
"import_level": context.getConstantCode(
constant=expression.getImportLevel()
),
"module_identifier": context.getModuleCodeName(),
}
)
else:
emit(
"%s = IMPORT_NAME(%s, %s);"
% (
value_name,
from_arg_name,
context.getConstantCode(constant=expression.getImportName()),
)
)
getErrorExitCode(
check_name=value_name,
release_name=from_arg_name,
needs_check=expression.mayRaiseException(BaseException),
emit=emit,
context=context,
)
context.addCleanupTempName(value_name)
|
py | b41434f61c6ad4e778a0916879f7a7b5233fd9f7 | """
This module converts requested URLs to callback view functions.
RegexURLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a ResolverMatch object which provides access to all
attributes of the resolved URL match.
"""
import functools
import re
import threading
from importlib import import_module
from urllib.parse import quote
from django.conf import settings
from django.core.checks import Warning
from django.core.checks.urls import check_resolver
from django.core.exceptions import ImproperlyConfigured
from django.utils.datastructures import MultiValueDict
from django.utils.functional import cached_property
from django.utils.http import RFC3986_SUBDELIMS
from django.utils.regex_helper import normalize
from django.utils.translation import get_language
from .converters import get_converter
from .exceptions import NoReverseMatch, Resolver404
from .utils import get_callable
class ResolverMatch:
def __init__(self, func, args, kwargs, url_name=None, app_names=None, namespaces=None):
self.func = func
self.args = args
self.kwargs = kwargs
self.url_name = url_name
# If a URLRegexResolver doesn't have a namespace or app_name, it passes
# in an empty value.
self.app_names = [x for x in app_names if x] if app_names else []
self.app_name = ':'.join(self.app_names)
self.namespaces = [x for x in namespaces if x] if namespaces else []
self.namespace = ':'.join(self.namespaces)
if not hasattr(func, '__name__'):
# A class-based view
self._func_path = func.__class__.__module__ + '.' + func.__class__.__name__
else:
# A function-based view
self._func_path = func.__module__ + '.' + func.__name__
view_path = url_name or self._func_path
self.view_name = ':'.join(self.namespaces + [view_path])
def __getitem__(self, index):
return (self.func, self.args, self.kwargs)[index]
def __repr__(self):
return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name=%s, app_names=%s, namespaces=%s)" % (
self._func_path, self.args, self.kwargs, self.url_name,
self.app_names, self.namespaces,
)
@functools.lru_cache(maxsize=None)
def get_resolver(urlconf=None):
if urlconf is None:
from django.conf import settings
urlconf = settings.ROOT_URLCONF
return URLResolver(RegexPattern(r'^/'), urlconf)
@functools.lru_cache(maxsize=None)
def get_ns_resolver(ns_pattern, resolver):
# Build a namespaced resolver for the given parent URLconf pattern.
# This makes it possible to have captured parameters in the parent
# URLconf pattern.
ns_resolver = URLResolver(RegexPattern(ns_pattern), resolver.url_patterns)
return URLResolver(RegexPattern(r'^/'), [ns_resolver])
class LocaleRegexDescriptor:
def __init__(self, attr):
self.attr = attr
def __get__(self, instance, cls=None):
"""
Return a compiled regular expression based on the active language.
"""
if instance is None:
return self
# As a performance optimization, if the given regex string is a regular
# string (not a lazily-translated string proxy), compile it once and
# avoid per-language compilation.
pattern = getattr(instance, self.attr)
if isinstance(pattern, str):
instance.__dict__['regex'] = instance._compile(pattern)
return instance.__dict__['regex']
language_code = get_language()
if language_code not in instance._regex_dict:
instance._regex_dict[language_code] = instance._compile(str(pattern))
return instance._regex_dict[language_code]
class CheckURLMixin:
def describe(self):
"""
Format the URL pattern for display in warning messages.
"""
description = "'{}'".format(self)
if self.name:
description += " [name='{}']".format(self.name)
return description
def _check_pattern_startswith_slash(self):
"""
Check that the pattern does not begin with a forward slash.
"""
regex_pattern = self.regex.pattern
if not settings.APPEND_SLASH:
# Skip check as it can be useful to start a URL pattern with a slash
# when APPEND_SLASH=False.
return []
if regex_pattern.startswith(('/', '^/', '^\\/')) and not regex_pattern.endswith('/'):
warning = Warning(
"Your URL pattern {} has a route beginning with a '/'. Remove this "
"slash as it is unnecessary. If this pattern is targeted in an "
"include(), ensure the include() pattern has a trailing '/'.".format(
self.describe()
),
id="urls.W002",
)
return [warning]
else:
return []
class RegexPattern(CheckURLMixin):
regex = LocaleRegexDescriptor('_regex')
def __init__(self, regex, name=None, is_endpoint=False):
self._regex = regex
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = {}
def match(self, path):
match = self.regex.search(path)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
args = () if kwargs else match.groups()
return path[match.end():], args, kwargs
return None
def check(self):
warnings = []
warnings.extend(self._check_pattern_startswith_slash())
if not self._is_endpoint:
warnings.extend(self._check_include_trailing_dollar())
return warnings
def _check_include_trailing_dollar(self):
regex_pattern = self.regex.pattern
if regex_pattern.endswith('$') and not regex_pattern.endswith(r'\$'):
return [Warning(
"Your URL pattern {} uses include with a route ending with a '$'. "
"Remove the dollar from the route to avoid problems including "
"URLs.".format(self.describe()),
id='urls.W001',
)]
else:
return []
def _compile(self, regex):
"""Compile and return the given regular expression."""
try:
return re.compile(regex)
except re.error as e:
raise ImproperlyConfigured(
'"%s" is not a valid regular expression: %s' % (regex, e)
)
def __str__(self):
return self._regex
_PATH_PARAMETER_COMPONENT_RE = re.compile(
r'<(?:(?P<converter>[^>:]+):)?(?P<parameter>\w+)>'
)
def _route_to_regex(route, is_endpoint=False):
"""
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
"""
original_route = route
parts = ['^']
converters = {}
while True:
match = _PATH_PARAMETER_COMPONENT_RE.search(route)
if not match:
parts.append(re.escape(route))
break
parts.append(re.escape(route[:match.start()]))
route = route[match.end():]
parameter = match.group('parameter')
if not parameter.isidentifier():
raise ImproperlyConfigured(
"URL route '%s' uses parameter name %r which isn't a valid "
"Python identifier." % (original_route, parameter)
)
raw_converter = match.group('converter')
if raw_converter is None:
# If a converter isn't specified, the default is `str`.
raw_converter = 'str'
try:
converter = get_converter(raw_converter)
except KeyError as e:
raise ImproperlyConfigured(
"URL route '%s' uses invalid converter %s." % (original_route, e)
)
converters[parameter] = converter
parts.append('(?P<' + parameter + '>' + converter.regex + ')')
if is_endpoint:
parts.append('$')
return ''.join(parts), converters
class RoutePattern(CheckURLMixin):
regex = LocaleRegexDescriptor('_route')
def __init__(self, route, name=None, is_endpoint=False):
self._route = route
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = _route_to_regex(str(route), is_endpoint)[1]
def match(self, path):
match = self.regex.search(path)
if match:
# RoutePattern doesn't allow non-named groups so args are ignored.
kwargs = match.groupdict()
for key, value in kwargs.items():
converter = self.converters[key]
try:
kwargs[key] = converter.to_python(value)
except ValueError:
return None
return path[match.end():], (), kwargs
return None
def check(self):
return self._check_pattern_startswith_slash()
def _compile(self, route):
return re.compile(_route_to_regex(route, self._is_endpoint)[0])
def __str__(self):
return self._route
class LocalePrefixPattern:
def __init__(self, prefix_default_language=True):
self.prefix_default_language = prefix_default_language
self.converters = {}
@property
def regex(self):
# This is only used by reverse() and cached in _reverse_dict.
return re.compile(self.language_prefix)
@property
def language_prefix(self):
language_code = get_language() or settings.LANGUAGE_CODE
if language_code == settings.LANGUAGE_CODE and not self.prefix_default_language:
return ''
else:
return '%s/' % language_code
def match(self, path):
language_prefix = self.language_prefix
if path.startswith(language_prefix):
return path[len(language_prefix):], (), {}
return None
def check(self):
return []
def describe(self):
return "'{}'".format(self)
def __str__(self):
return self.language_prefix
class URLPattern:
def __init__(self, pattern, callback, default_args=None, name=None):
self.pattern = pattern
self.callback = callback # the view
self.default_args = default_args or {}
self.name = name
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.pattern.describe())
def check(self):
warnings = self._check_pattern_name()
warnings.extend(self.pattern.check())
return warnings
def _check_pattern_name(self):
"""
Check that the pattern name does not contain a colon.
"""
if self.pattern.name is not None and ":" in self.pattern.name:
warning = Warning(
"Your URL pattern {} has a name including a ':'. Remove the colon, to "
"avoid ambiguous namespace references.".format(self.pattern.describe()),
id="urls.W003",
)
return [warning]
else:
return []
def resolve(self, path):
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
# Pass any extra_kwargs as **kwargs.
kwargs.update(self.default_args)
return ResolverMatch(self.callback, args, kwargs, self.pattern.name)
@cached_property
def lookup_str(self):
"""
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
"""
callback = self.callback
if isinstance(callback, functools.partial):
callback = callback.func
if not hasattr(callback, '__name__'):
return callback.__module__ + "." + callback.__class__.__name__
return callback.__module__ + "." + callback.__qualname__
class URLResolver:
def __init__(self, pattern, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
self.pattern = pattern
# urlconf_name is the dotted Python path to the module defining
# urlpatterns. It may also be an object with an urlpatterns attribute
# or urlpatterns itself.
self.urlconf_name = urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
self.namespace = namespace
self.app_name = app_name
self._reverse_dict = {}
self._namespace_dict = {}
self._app_dict = {}
# set of dotted paths to all functions and classes that are used in
# urlpatterns
self._callback_strs = set()
self._populated = False
self._local = threading.local()
def __repr__(self):
if isinstance(self.urlconf_name, list) and len(self.urlconf_name):
# Don't bother to output the whole list, it can be huge
urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__
else:
urlconf_repr = repr(self.urlconf_name)
return '<%s %s (%s:%s) %s>' % (
self.__class__.__name__, urlconf_repr, self.app_name,
self.namespace, self.pattern.describe(),
)
def check(self):
warnings = []
for pattern in self.url_patterns:
warnings.extend(check_resolver(pattern))
if not warnings:
warnings = self.pattern.check()
return warnings
def _populate(self):
# Short-circuit if called recursively in this thread to prevent
# infinite recursion. Concurrent threads may call this at the same
# time and will need to continue, so set 'populating' on a
# thread-local variable.
if getattr(self._local, 'populating', False):
return
try:
self._local.populating = True
lookups = MultiValueDict()
namespaces = {}
apps = {}
language_code = get_language()
for url_pattern in reversed(self.url_patterns):
p_pattern = url_pattern.pattern.regex.pattern
if p_pattern.startswith('^'):
p_pattern = p_pattern[1:]
if isinstance(url_pattern, URLPattern):
self._callback_strs.add(url_pattern.lookup_str)
bits = normalize(url_pattern.pattern.regex.pattern)
lookups.appendlist(
url_pattern.callback,
(bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters)
)
if url_pattern.name is not None:
lookups.appendlist(
url_pattern.name,
(bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters)
)
else: # url_pattern is a URLResolver.
url_pattern._populate()
if url_pattern.app_name:
apps.setdefault(url_pattern.app_name, []).append(url_pattern.namespace)
namespaces[url_pattern.namespace] = (p_pattern, url_pattern)
else:
for name in url_pattern.reverse_dict:
for matches, pat, defaults, converters in url_pattern.reverse_dict.getlist(name):
new_matches = normalize(p_pattern + pat)
lookups.appendlist(
name,
(
new_matches,
p_pattern + pat,
dict(defaults, **url_pattern.default_kwargs),
dict(self.pattern.converters, **converters)
)
)
for namespace, (prefix, sub_pattern) in url_pattern.namespace_dict.items():
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in url_pattern.app_dict.items():
apps.setdefault(app_name, []).extend(namespace_list)
self._callback_strs.update(url_pattern._callback_strs)
self._namespace_dict[language_code] = namespaces
self._app_dict[language_code] = apps
self._reverse_dict[language_code] = lookups
self._populated = True
finally:
self._local.populating = False
@property
def reverse_dict(self):
language_code = get_language()
if language_code not in self._reverse_dict:
self._populate()
return self._reverse_dict[language_code]
@property
def namespace_dict(self):
language_code = get_language()
if language_code not in self._namespace_dict:
self._populate()
return self._namespace_dict[language_code]
@property
def app_dict(self):
language_code = get_language()
if language_code not in self._app_dict:
self._populate()
return self._app_dict[language_code]
def _is_callback(self, name):
if not self._populated:
self._populate()
return name in self._callback_strs
def resolve(self, path):
path = str(path) # path may be a reverse_lazy object
tried = []
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
for pattern in self.url_patterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404 as e:
sub_tried = e.args[0].get('tried')
if sub_tried is not None:
tried.extend([pattern] + t for t in sub_tried)
else:
tried.append([pattern])
else:
if sub_match:
# Merge captured arguments in match with submatch
sub_match_dict = dict(kwargs, **self.default_kwargs)
# Update the sub_match_dict with the kwargs from the sub_match.
sub_match_dict.update(sub_match.kwargs)
# If there are *any* named groups, ignore all non-named groups.
# Otherwise, pass all non-named arguments as positional arguments.
sub_match_args = sub_match.args
if not sub_match_dict:
sub_match_args = args + sub_match.args
return ResolverMatch(
sub_match.func,
sub_match_args,
sub_match_dict,
sub_match.url_name,
[self.app_name] + sub_match.app_names,
[self.namespace] + sub_match.namespaces,
)
tried.append([pattern])
raise Resolver404({'tried': tried, 'path': new_path})
raise Resolver404({'path': path})
@cached_property
def urlconf_module(self):
if isinstance(self.urlconf_name, str):
return import_module(self.urlconf_name)
else:
return self.urlconf_name
@cached_property
def url_patterns(self):
# urlconf_module might be a valid set of patterns, so we default to it
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
try:
iter(patterns)
except TypeError:
msg = (
"The included URLconf '{name}' does not appear to have any "
"patterns in it. If you see valid patterns in the file then "
"the issue is probably caused by a circular import."
)
raise ImproperlyConfigured(msg.format(name=self.urlconf_name))
return patterns
def resolve_error_handler(self, view_type):
callback = getattr(self.urlconf_module, 'handler%s' % view_type, None)
if not callback:
# No handler specified in file; use lazy import, since
# django.conf.urls imports this file.
from django.conf import urls
callback = getattr(urls, 'handler%s' % view_type)
return get_callable(callback), {}
def reverse(self, lookup_view, *args, **kwargs):
return self._reverse_with_prefix(lookup_view, '', *args, **kwargs)
def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
if args and kwargs:
raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
if not self._populated:
self._populate()
possibilities = self.reverse_dict.getlist(lookup_view)
for possibility, pattern, defaults, converters in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
candidate_subs = dict(zip(params, args))
else:
if set(kwargs).symmetric_difference(params).difference(defaults):
continue
matches = True
for k, v in defaults.items():
if kwargs.get(k, v) != v:
matches = False
break
if not matches:
continue
candidate_subs = kwargs
# Convert the candidate subs to text using Converter.to_url().
text_candidate_subs = {}
for k, v in candidate_subs.items():
if k in converters:
text_candidate_subs[k] = converters[k].to_url(v)
else:
text_candidate_subs[k] = str(v)
# WSGI provides decoded URLs, without %xx escapes, and the URL
# resolver operates on such URLs. First substitute arguments
# without quoting to build a decoded URL and look for a match.
# Then, if we have a match, redo the substitution with quoted
# arguments in order to return a properly encoded URL.
candidate_pat = _prefix.replace('%', '%%') + result
if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % text_candidate_subs):
# safe characters from `pchar` definition of RFC 3986
url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@')
# Don't allow construction of scheme relative urls.
if url.startswith('//'):
url = '/%%2F%s' % url[2:]
return url
# lookup_view can be URL name or callable, but callables are not
# friendly in error messages.
m = getattr(lookup_view, '__module__', None)
n = getattr(lookup_view, '__name__', None)
if m is not None and n is not None:
lookup_view_s = "%s.%s" % (m, n)
else:
lookup_view_s = lookup_view
patterns = [pattern for (_, pattern, _, _) in possibilities]
if patterns:
if args:
arg_msg = "arguments '%s'" % (args,)
elif kwargs:
arg_msg = "keyword arguments '%s'" % (kwargs,)
else:
arg_msg = "no arguments"
msg = (
"Reverse for '%s' with %s not found. %d pattern(s) tried: %s" %
(lookup_view_s, arg_msg, len(patterns), patterns)
)
else:
msg = (
"Reverse for '%(view)s' not found. '%(view)s' is not "
"a valid view function or pattern name." % {'view': lookup_view_s}
)
raise NoReverseMatch(msg)
|
py | b41435021d9785c3fec3eaa175be6f2f2905d69a | # Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for voiceover services."""
from __future__ import annotations
from core import feconf
from core.constants import constants
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import opportunity_services
from core.domain import question_services
from core.domain import rights_manager
from core.domain import story_domain
from core.domain import story_services
from core.domain import topic_domain
from core.domain import topic_services
from core.domain import user_services
from core.domain import voiceover_services
from core.platform import models
from core.tests import test_utils
(suggestion_models,) = models.Registry.import_models([models.NAMES.suggestion])
class VoiceoverApplicationServicesUnitTests(test_utils.GenericTestBase):
"""Provides testing of the voiceover services."""
APPLICANT_USERNAME = 'applicant'
APPLICANT_EMAIL = '[email protected]'
def setUp(self):
super(VoiceoverApplicationServicesUnitTests, self).setUp()
self.signup(self.CURRICULUM_ADMIN_EMAIL, self.CURRICULUM_ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.APPLICANT_EMAIL, self.APPLICANT_USERNAME)
self.admin_id = self.get_user_id_from_email(self.CURRICULUM_ADMIN_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.applicant_id = self.get_user_id_from_email(self.APPLICANT_EMAIL)
self.applicant = user_services.get_user_actions_info(self.applicant_id)
self.set_curriculum_admins([self.CURRICULUM_ADMIN_USERNAME])
self.admin = user_services.get_user_actions_info(self.admin_id)
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.USER_ID = 'user'
self.SKILL_ID = 'skill'
self.QUESTION_ID = question_services.get_new_question_id()
explorations = [self.save_new_valid_exploration(
'%s' % i,
self.owner_id,
title='title %d' % i,
category='category%d' % i,
end_state_name='End State',
correctness_feedback_enabled=True
) for i in range(2)]
for exp in explorations:
self.publish_exploration(self.owner_id, exp.id)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description', 'fragm')
topic.thumbnail_filename = 'thumbnail.svg'
topic.thumbnail_bg_color = '#C6DCDA'
topic.subtopics = [
topic_domain.Subtopic(
1, 'Title', ['skill_id_1'], 'image.svg',
constants.ALLOWED_THUMBNAIL_BG_COLORS['subtopic'][0], 21131,
'dummy-subtopic-three')]
topic.next_subtopic_id = 2
topic_services.save_new_topic(self.owner_id, topic)
topic_services.publish_topic(self.TOPIC_ID, self.admin_id)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID,
'a-story')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
topic_services.publish_story(
self.TOPIC_ID, self.STORY_ID, self.admin_id)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': '0'
})], 'Changes.')
self.add_user_role(
self.CURRICULUM_ADMIN_USERNAME, feconf.ROLE_ID_VOICEOVER_ADMIN)
def test_voiceover_application_creation(self):
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(user_voiceover_applications, [])
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(user_voiceover_applications[0].target_id, '0')
def test_get_voiceover_application_from_model_with_invalid_type_raise_error(
self):
suggestion_models.GeneralVoiceoverApplicationModel(
id='application_id',
target_type='exploration',
target_id='0',
status='review',
author_id='author_id',
final_reviewer_id=None,
language_code='en',
filename='filename.mp3',
content='<p>content</p>',
rejection_message=None).put()
voiceover_application_model = (
suggestion_models.GeneralVoiceoverApplicationModel.get_by_id(
'application_id'))
voiceover_application_model.target_type = 'invalid_type'
voiceover_application_model.update_timestamps()
voiceover_application_model.put()
with self.assertRaisesRegex(
Exception,
'Invalid target type for voiceover application: invalid_type'):
voiceover_services.get_voiceover_application_by_id('application_id')
def test_newly_created_voiceover_application_have_in_review_status(self):
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(user_voiceover_applications, [])
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
def test_get_reviewable_voiceover_applications(self):
voiceover_applications = (
voiceover_services.get_reviewable_voiceover_applications(
self.admin_id))
self.assertEqual(voiceover_applications, [])
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
voiceover_applications = (
voiceover_services.get_reviewable_voiceover_applications(
self.admin_id))
self.assertEqual(len(voiceover_applications), 1)
self.assertEqual(
voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
def test_accept_application_assigns_role_to_entity(self):
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
voiceover_services.accept_voiceover_application(
user_voiceover_applications[0].voiceover_application_id,
self.admin_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id, status=suggestion_models.STATUS_ACCEPTED))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_ACCEPTED)
exploration_rights = rights_manager.get_exploration_rights('0')
can_voiceover = rights_manager.check_can_voiceover_activity(
self.applicant, exploration_rights)
self.assertTrue(can_voiceover)
def test_accept_application_removes_exploration_voiceover_opportunity(self):
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
opportunities, _, more = (
opportunity_services.get_voiceover_opportunities('en', None))
self.assertEqual(len(opportunities), 1)
voiceover_services.accept_voiceover_application(
user_voiceover_applications[0].voiceover_application_id,
self.admin_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id, status=suggestion_models.STATUS_ACCEPTED))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_ACCEPTED)
opportunities, _, more = (
opportunity_services.get_voiceover_opportunities('en', None))
self.assertEqual(len(opportunities), 0)
self.assertFalse(more)
def test_accept_application_removes_rejectes_other_similar_applications(
self):
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.owner_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.owner_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
voiceover_services.accept_voiceover_application(
user_voiceover_applications[0].voiceover_application_id,
self.admin_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id, status=suggestion_models.STATUS_ACCEPTED))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_ACCEPTED)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.owner_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_REJECTED)
self.assertEqual(
user_voiceover_applications[0].rejection_message,
'We have to reject your application as another application for the '
'same opportunity got accepted.')
def test_author_accepts_own_voiceover_application_raise_exception(self):
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
with self.assertRaisesRegex(
Exception, 'Applicants are not allowed to review their own '
'voiceover application.'):
voiceover_services.accept_voiceover_application(
user_voiceover_applications[0].voiceover_application_id,
self.applicant_id)
def test_reject_voiceover_application(self):
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_IN_REVIEW)
opportunities, _, _ = (
opportunity_services.get_voiceover_opportunities('en', None))
self.assertEqual(len(opportunities), 1)
voiceover_services.reject_voiceover_application(
user_voiceover_applications[0].voiceover_application_id,
self.admin_id, 'Rejection message')
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
self.assertEqual(len(user_voiceover_applications), 1)
self.assertEqual(
user_voiceover_applications[0].status,
suggestion_models.STATUS_REJECTED)
opportunities, _, _ = (
opportunity_services.get_voiceover_opportunities('en', None))
self.assertEqual(len(opportunities), 1)
def test_author_rejects_own_voiceover_application_raise_exception(self):
voiceover_services.create_new_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en', '',
'audio_file.mp3', self.applicant_id)
user_voiceover_applications = (
voiceover_services.get_user_submitted_voiceover_applications(
self.applicant_id))
with self.assertRaisesRegex(
Exception, 'Applicants are not allowed to review their own '
'voiceover application.'):
voiceover_services.reject_voiceover_application(
user_voiceover_applications[0].voiceover_application_id,
self.applicant_id, 'Testing rejection')
def test_get_text_to_create_voiceover_application(self):
exp_services.update_exploration(
self.owner_id, '0', [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': (
exp_domain.STATE_PROPERTY_CONTENT),
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>The new content to voiceover</p>'
}
})], 'Adds new content to init state')
content = voiceover_services.get_text_to_create_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'en')
self.assertEqual(content, '<p>The new content to voiceover</p>')
def test_get_text_to_create_voiceover_application_in_diff_language(self):
exp_services.update_exploration(
self.owner_id, '0', [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': (
exp_domain.STATE_PROPERTY_CONTENT),
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>The new content to voiceover</p>'
}
}), exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_ADD_WRITTEN_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>The new content to voiceover</p>',
'translation_html': '<p>Translation in Hindi</p>',
'data_format': 'html'
})], 'Adds new content to init state and its translation')
content = voiceover_services.get_text_to_create_voiceover_application(
feconf.ENTITY_TYPE_EXPLORATION, '0', 'hi')
self.assertEqual(content, '<p>Translation in Hindi</p>')
def test_get_text_to_create_voiceover_application_for_invalid_type(self):
with self.assertRaisesRegex(
Exception, 'Invalid target type: invalid_type'):
voiceover_services.get_text_to_create_voiceover_application(
'invalid_type', '0', 'hi')
|
py | b414352596eec9ed734853922723c28a92a238ed | from metacat.webapi import MetaCatClient
class MetaCatRucioPlugin(object):
def __init__(self, client):
self.Client = client
def get_metadata(self, scope, name, session=None):
info = self.Cient.get_file_info(name=f"{scope}:{name}")
return info.Metadata
def set_metadata(self, scope, name, key, value, recursive=False, session=None):
self.Client.update_file_meta(
{key:value},
names=[f"{scope}:{name}"]
)
@transactional_session
def set_metadata_bulk(self, scope, name, meta, recursive=False, session=None):
self.Client.update_file_meta(
meta,
names=[f"{scope}:{name}"]
)
@abstractmethod
def delete_metadata(self, scope, name, key, session=None):
meta = self.get_metadata(scope, name)
try: del meta[key]
except KeyError: return
self.Client.update_file_meta(
meta,
names=[f"{scope}:{name}"],
mode="replace"
)
@abstractmethod
def list_dids(self, scope, filters, type='collection', ignore_case=False, limit=None,
offset=None, long=False, recursive=False, session=None):
where_items = []
for k, v in filters.items():
if isinstance(v, str):
where_items.append(f"{k} = '{v}'")
else:
where_items.append(f"{k} = {v}")
where_clause = " and ".join(where_items)
if type in ("collection", "dataset", "container"):
if where_clause: where_clause = " having "+where_clouse
query = f"datasets {scope}:'%' {where_clause}"
if recursive:
query += " with children recursively"
else:
if where_clause: where_clause = " where "+where_clouse
query = f"files from {scope}:'%'"
if recursive:
query += " with children recursively"
query += where_clouse
if limit is not None:
query += f" limit {limit}"
results = self.Client.run_query(query)
return [item["name"] for item in results]
@abstractmethod
def manages_key(self, key):
return True
|
py | b4143586db9d1ae9728b6e908eafcd49940b0a8c | """
Task:CountDiv
Compute number of integers divisible by k in range [a..b].
Write a function:
def solution(A, B, K)
that, given three integers A, B and K, returns the number of integers within the range [A..B] that are divisible by K, i.e.:
{ i : A ≤ i ≤ B, i mod K = 0 }
For example, for A = 6, B = 11 and K = 2, your function should return 3, because there are three numbers divisible by 2 within the range [6..11], namely 6, 8 and 10.
Write an efficient algorithm for the following assumptions:
A and B are integers within the range [0..2,000,000,000];
K is an integer within the range [1..2,000,000,000];
A ≤ B.
Copyright 2009–2019 by Codility Limited. All Rights Reserved. Unauthorized copying, publication or disclosure prohibited.
You can check it out the result at https://app.codility.com/demo/results/training7FYMSS-MVT/ .
"""
# you can write to stdout for debugging purposes, e.g.
# print("this is a debug message")
def solution(A ,B ,K):
# write your code in Python 3.6
# 先計算A%K是否整除,若有須加1
# 之後分別將B與A除以K,並計算其差後回傳
# more detail please check it out at https://codesays.com/2014/solution-to-count-div-by-codility/#comment-1411 .
X = 1 if (A % K) == 0 else 0
return (B // K) - (A // K) + X
# testcase 1
A = 6
B = 11
K = 2
print(solution(A, B, K))
# testcase 2
A = 0
B = 0
K = 11
print(solution(A, B, K))
# testcase 3
A = 10
B = 10
K = 5
print(solution(A, B, K))
# testcase 4
A = 0
B = 14
K = 2
print(solution(A, B, K)) |
py | b414361155addcbc3d2fe16c7a60dd3a7db01889 | class Memory():
def __init__(self, size):
self.size = size # size of the memory
self.memory = {} # memory reference variable dictionary
# array of vacant places (0 if vacant, 1 if full)
self.vac = [0 for i in range(size)]
self.used = 0
#check if there is a space in the memory
def _checkSpace(self):
if 0 in self.vac:
return 1
else:
return 0
#know the remaining space in the memory
def remainingSpace(self):
return self.size - self.used
#Adds variable to memory with the variable as a key
def addToMemory(self, var):
"Takes the variable and assigns it to it's respected value"
if var in self.memory:
return
if self._checkSpace():
self.used += 1
#print(f"space{self._checkSpace()}")
for i in range(len(self.vac)):
if self.vac[i] == 0:
#print(i)
self.memory[var] = i
self.vac[i] = 1
break
else:
raise StackOverFlow("Memory Size Exceeded , try to free some space")
#shows the reference of a specific variable in the memory
def getReferenceOf(self, var):
return self.memory[var]
"""for var in self.memory:
if self.memory[ref]==var:
return ref"""
#Frees the location held by the variable in the memory
def free(self, *args):
var = set(args)
for i in args:
ref = self.getReferenceOf(i)
del self.memory[i]
self.vac[ref] = 0
self.used -= 1
|
py | b41439248c8e5b18a1b27c722a26b9d6091e69bd | # -*- coding: utf-8 -*-
from matplotlib.pyplot import axis
from ....Functions.init_fig import init_fig
from ....definitions import config_dict
ROTOR_COLOR = config_dict["PLOT"]["COLOR_DICT"]["ROTOR_COLOR"]
STATOR_COLOR = config_dict["PLOT"]["COLOR_DICT"]["STATOR_COLOR"]
def plot(self, fig=None):
"""Plot the Slot in a matplotlib fig
Parameters
----------
self : Slot
A Slot object
fig :
if None, open a new fig and plot, else add to the current
one (Default value = None)
Returns
-------
None
"""
surf = self.get_surface()
# Display the result
(fig, axes, patch_leg, label_leg) = init_fig(fig)
axes.set_xlabel("(m)")
axes.set_ylabel("(m)")
axes.set_title("Slot")
# Add the slot to the fig
if self.get_is_stator:
patches = surf.get_patches(color=STATOR_COLOR)
else:
patches = surf.get_patch(color=ROTOR_COLOR)
for patch in patches:
axes.add_patch(patch)
# Axis Setup
axis("equal")
fig.show()
|
py | b41439fa2ddfce96fc15647b60296d317f77290b | try:
import numpy as np
except ImportError:
np = None
from tests.numpy.testcase import NumpyBaseTestCase
# from decimal import Decimal
class LowCardinalityTestCase(NumpyBaseTestCase):
required_server_version = (19, 3, 3)
stable_support_version = (19, 9, 2)
def cli_client_kwargs(self):
if self.server_version >= self.stable_support_version:
return {'allow_suspicious_low_cardinality_types': 1}
def test_uint8(self):
with self.create_table('a LowCardinality(UInt8)'):
data = [np.array(range(255))]
self.client.execute(
'INSERT INTO test (a) VALUES', data, columnar=True
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted,
'\n'.join(str(x) for x in data[0]) + '\n'
)
inserted = self.client.execute(query, columnar=True)
self.assertArraysEqual(inserted[0], data[0])
def test_int8(self):
with self.create_table('a LowCardinality(Int8)'):
data = [np.array([x - 127 for x in range(255)])]
self.client.execute(
'INSERT INTO test (a) VALUES', data, columnar=True
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted,
'\n'.join(str(x) for x in data[0]) + '\n'
)
inserted = self.client.execute(query, columnar=True)
self.assertArraysEqual(inserted[0], data[0])
# def test_nullable_int8(self):
# with self.create_table('a LowCardinality(Nullable(Int8))'):
# data = [(None, ), (-1, ), (0, ), (1, ), (None, )]
# self.client.execute('INSERT INTO test (a) VALUES', data[0])
#
# query = 'SELECT * FROM test'
# inserted = self.emit_cli(query)
# self.assertEqual(inserted, '\\N\n-1\n0\n1\n\\N\n')
#
# inserted = self.client.execute(query)
# self.assertEqual(inserted, data[0])
def test_date(self):
with self.create_table('a LowCardinality(Date)'):
data = [np.array(list(range(300)), dtype='datetime64[D]')]
self.client.execute(
'INSERT INTO test (a) VALUES', data, columnar=True
)
query = 'SELECT * FROM test'
inserted = self.client.execute(query, columnar=True)
self.assertArraysEqual(inserted[0], data[0])
def test_float(self):
with self.create_table('a LowCardinality(Float)'):
data = [np.array([float(x) for x in range(300)])]
self.client.execute(
'INSERT INTO test (a) VALUES', data, columnar=True
)
query = 'SELECT * FROM test'
inserted = self.client.execute(query, columnar=True)
self.assertArraysEqual(inserted[0], data[0])
# def test_decimal(self):
# with self.create_table('a LowCardinality(Float)'):
# data = [(Decimal(x),) for x in range(300)]
# self.client.execute('INSERT INTO test (a) VALUES', data[0])
#
# query = 'SELECT * FROM test'
# inserted = self.client.execute(query)
# self.assertEqual(inserted, data[0])
#
# def test_array(self):
# with self.create_table('a Array(LowCardinality(Int16))'):
# data = [([100, 500], )]
# self.client.execute('INSERT INTO test (a) VALUES', data[0])
#
# query = 'SELECT * FROM test'
# inserted = self.emit_cli(query)
# self.assertEqual(inserted, '[100,500]\n')
#
# inserted = self.client.execute(query)
# self.assertEqual(inserted, data[0])
#
# def test_empty_array(self):
# with self.create_table('a Array(LowCardinality(Int16))'):
# data = [([], )]
# self.client.execute('INSERT INTO test (a) VALUES', data[0])
#
# query = 'SELECT * FROM test'
# inserted = self.emit_cli(query)
# self.assertEqual(inserted, '[]\n')
#
# inserted = self.client.execute(query)
# self.assertEqual(inserted, data[0])
#
def test_string(self):
with self.create_table('a LowCardinality(String)'):
data = [
np.array(['test', 'low', 'cardinality', 'test', 'test', ''])
]
self.client.execute(
'INSERT INTO test (a) VALUES', data, columnar=True
)
query = 'SELECT * FROM test'
inserted = self.emit_cli(query)
self.assertEqual(
inserted,
'test\nlow\ncardinality\ntest\ntest\n\n'
)
inserted = self.client.execute(query, columnar=True)
self.assertArraysEqual(inserted[0], data[0])
# def test_fixed_string(self):
# with self.create_table('a LowCardinality(FixedString(12))'):
# data = [
# ('test', ), ('low', ), ('cardinality', ),
# ('test', ), ('test', ), ('', )
# ]
# self.client.execute('INSERT INTO test (a) VALUES', data[0])
#
# query = 'SELECT * FROM test'
# inserted = self.emit_cli(query)
# self.assertEqual(
# inserted,
# 'test\\0\\0\\0\\0\\0\\0\\0\\0\n'
# 'low\\0\\0\\0\\0\\0\\0\\0\\0\\0\n'
# 'cardinality\\0\n'
# 'test\\0\\0\\0\\0\\0\\0\\0\\0\n'
# 'test\\0\\0\\0\\0\\0\\0\\0\\0\n'
# '\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\n'
# )
#
# inserted = self.client.execute(query)
# self.assertEqual(inserted, data[0])
#
# def test_nullable_string(self):
# with self.create_table('a LowCardinality(Nullable(String))'):
# data = [
# ('test', ), ('', ), (None, )
# ]
# self.client.execute('INSERT INTO test (a) VALUES', data[0])
#
# query = 'SELECT * FROM test'
# inserted = self.emit_cli(query)
# self.assertEqual(
# inserted,
# 'test\n\n\\N\n'
# )
#
# inserted = self.client.execute(query)
# self.assertEqual(inserted, data[0])
|
py | b4143b9160b0b8d8d4f46250df68b2bf9b6c2f2e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import pylab
import pywt
usage = """Usage:\n %s wavelet [refinement level]""" % os.path.basename(
sys.argv[0])
try:
wavelet = pywt.Wavelet(sys.argv[1])
try:
level = int(sys.argv[2])
except IndexError, e:
level = 10
except ValueError, e:
print "Unknown wavelet"
raise SystemExit
except IndexError, e:
print usage
raise SystemExit
print wavelet
data = wavelet.wavefun(level)
funcs, x = data[:-1], data[-1]
n = (len(data) - 1) // 2
labels = [
"scaling function (phi)", "wavelet function (psi)",
"r. scaling function (phi)", "r. wavelet function (psi)"
]
colours = ("r", "g", "r", "g")
for i, (d, label, colour) in enumerate(zip(funcs, labels, colours)):
mi, ma = d.min(), d.max()
margin = (ma - mi) * 0.05
ax = pylab.subplot(n, 2, 1 + i)
pylab.plot(x, d, colour)
pylab.title(label)
pylab.ylim(mi - margin, ma + margin)
pylab.xlim(x[0], x[-1])
pylab.show()
|
py | b4143bc36f23389ccfa42b7dad53f82a8f162f1c | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import itertools
import warnings
from typing import Dict, List, Optional, Tuple, Union
import torch
import torch.nn.functional as F
from pytorch3d.ops import interpolate_face_attributes
from pytorch3d.structures.utils import list_to_packed, list_to_padded, padded_to_list
from torch.nn.functional import interpolate
# This file contains classes and helper functions for texturing.
# There are three types of textures: TexturesVertex, TexturesAtlas
# and TexturesUV which inherit from a base textures class TexturesBase.
#
# Each texture class has a method 'sample_textures' to sample a
# value given barycentric coordinates.
#
# All the textures accept either list or padded inputs. The values
# are stored as either per face values (TexturesAtlas, TexturesUV),
# or per face vertex features (TexturesVertex).
def _list_to_padded_wrapper(
x: List[torch.Tensor],
pad_size: Union[list, tuple, None] = None,
pad_value: float = 0.0,
) -> torch.Tensor:
r"""
This is a wrapper function for
pytorch3d.structures.utils.list_to_padded function which only accepts
3-dimensional inputs.
For this use case, the input x is of shape (F, 3, ...) where only F
is different for each element in the list
Transforms a list of N tensors each of shape (Mi, ...) into a single tensor
of shape (N, pad_size, ...), or (N, max(Mi), ...)
if pad_size is None.
Args:
x: list of Tensors
pad_size: int specifying the size of the first dimension
of the padded tensor
pad_value: float value to be used to fill the padded tensor
Returns:
x_padded: tensor consisting of padded input tensors
"""
N = len(x)
# pyre-fixme[16]: `Tensor` has no attribute `ndim`.
dims = x[0].ndim
reshape_dims = x[0].shape[1:]
D = torch.prod(torch.tensor(reshape_dims)).item()
x_reshaped = []
for y in x:
if y.ndim != dims and y.shape[1:] != reshape_dims:
msg = (
"list_to_padded requires tensors to have the same number of dimensions"
)
raise ValueError(msg)
x_reshaped.append(y.reshape(-1, D))
x_padded = list_to_padded(x_reshaped, pad_size=pad_size, pad_value=pad_value)
return x_padded.reshape((N, -1) + reshape_dims)
def _padded_to_list_wrapper(
x: torch.Tensor, split_size: Union[list, tuple, None] = None
) -> List[torch.Tensor]:
r"""
This is a wrapper function for pytorch3d.structures.utils.padded_to_list
which only accepts 3-dimensional inputs.
For this use case, the input x is of shape (N, F, ...) where F
is the number of faces which is different for each tensor in the batch.
This function transforms a padded tensor of shape (N, M, ...) into a
list of N tensors of shape (Mi, ...) where (Mi) is specified in
split_size(i), or of shape (M,) if split_size is None.
Args:
x: padded Tensor
split_size: list of ints defining the number of items for each tensor
in the output list.
Returns:
x_list: a list of tensors
"""
N, M = x.shape[:2]
reshape_dims = x.shape[2:]
D = torch.prod(torch.tensor(reshape_dims)).item()
x_reshaped = x.reshape(N, M, D)
x_list = padded_to_list(x_reshaped, split_size=split_size)
x_list = [xl.reshape((xl.shape[0],) + reshape_dims) for xl in x_list]
return x_list
def _pad_texture_maps(
images: Union[Tuple[torch.Tensor], List[torch.Tensor]]
) -> torch.Tensor:
"""
Pad all texture images so they have the same height and width.
Args:
images: list of N tensors of shape (H, W, 3)
Returns:
tex_maps: Tensor of shape (N, max_H, max_W, 3)
"""
tex_maps = []
max_H = 0
max_W = 0
for im in images:
h, w, _3 = im.shape
if h > max_H:
max_H = h
if w > max_W:
max_W = w
tex_maps.append(im)
max_shape = (max_H, max_W)
for i, image in enumerate(tex_maps):
if image.shape[:2] != max_shape:
image_BCHW = image.permute(2, 0, 1)[None]
new_image_BCHW = interpolate(
image_BCHW, size=max_shape, mode="bilinear", align_corners=False
)
tex_maps[i] = new_image_BCHW[0].permute(1, 2, 0)
tex_maps = torch.stack(tex_maps, dim=0) # (num_tex_maps, max_H, max_W, 3)
return tex_maps
# A base class for defining a batch of textures
# with helper methods.
# This is also useful to have so that inside `Meshes`
# we can allow the input textures to be any texture
# type which is an instance of the base class.
class TexturesBase(object):
def __init__(self):
self._N = 0
self.valid = None
def isempty(self):
if self._N is not None and self.valid is not None:
return self._N == 0 or self.valid.eq(False).all()
return False
def to(self, device):
for k in dir(self):
v = getattr(self, k)
if isinstance(v, (list, tuple)) and all(
torch.is_tensor(elem) for elem in v
):
v = [elem.to(device) for elem in v]
setattr(self, k, v)
if torch.is_tensor(v) and v.device != device:
setattr(self, k, v.to(device))
return self
def _extend(self, N: int, props: List[str]) -> Dict[str, Union[torch.Tensor, List]]:
"""
Create a dict with the specified properties
repeated N times per batch element.
Args:
N: number of new copies of each texture
in the batch.
props: a List of strings which refer to either
class attributes or class methods which
return tensors or lists.
Returns:
Dict with the same keys as props. The values are the
extended properties.
"""
if not isinstance(N, int):
raise ValueError("N must be an integer.")
if N <= 0:
raise ValueError("N must be > 0.")
new_props = {}
for p in props:
t = getattr(self, p)
if callable(t):
t = t() # class method
if isinstance(t, list):
if not all(isinstance(elem, (int, float)) for elem in t):
raise ValueError("Extend only supports lists of scalars")
t = [[ti] * N for ti in t]
new_props[p] = list(itertools.chain(*t))
elif torch.is_tensor(t):
new_props[p] = t.repeat_interleave(N, dim=0)
return new_props
def _getitem(self, index: Union[int, slice], props: List[str]):
"""
Helper function for __getitem__
"""
new_props = {}
if isinstance(index, (int, slice)):
for p in props:
t = getattr(self, p)
if callable(t):
t = t() # class method
new_props[p] = t[index]
elif isinstance(index, list):
index = torch.tensor(index)
if isinstance(index, torch.Tensor):
if index.dtype == torch.bool:
# pyre-fixme[16]: `Tensor` has no attribute `nonzero`.
index = index.nonzero()
index = index.squeeze(1) if index.numel() > 0 else index
index = index.tolist()
for p in props:
t = getattr(self, p)
if callable(t):
t = t() # class method
new_props[p] = [t[i] for i in index]
return new_props
def sample_textures(self):
"""
Different texture classes sample textures in different ways
e.g. for vertex textures, the values at each vertex
are interpolated across the face using the barycentric
coordinates.
Each texture class should implement a sample_textures
method to take the `fragments` from rasterization.
Using `fragments.pix_to_face` and `fragments.bary_coords`
this function should return the sampled texture values for
each pixel in the output image.
"""
raise NotImplementedError()
def clone(self):
"""
Each texture class should implement a method
to clone all necessary internal tensors.
"""
raise NotImplementedError()
def __getitem__(self, index):
"""
Each texture class should implement a method
to get the texture properites for the
specified elements in the batch.
The TexturesBase._getitem(i) method
can be used as a helper funtion to retrieve the
class attributes for item i. Then, a new
instance of the child class can be created with
the attributes.
"""
raise NotImplementedError()
def __repr__(self):
return "TexturesBase"
def Textures(
maps: Union[List, torch.Tensor, None] = None,
faces_uvs: Optional[torch.Tensor] = None,
verts_uvs: Optional[torch.Tensor] = None,
verts_rgb: Optional[torch.Tensor] = None,
) -> TexturesBase:
"""
Textures class has been DEPRECATED.
Preserving Textures as a function for backwards compatibility.
Args:
maps: texture map per mesh. This can either be a list of maps
[(H, W, 3)] or a padded tensor of shape (N, H, W, 3).
faces_uvs: (N, F, 3) tensor giving the index into verts_uvs for each
vertex in the face. Padding value is assumed to be -1.
verts_uvs: (N, V, 2) tensor giving the uv coordinate per vertex.
verts_rgb: (N, V, 3) tensor giving the rgb color per vertex. Padding
value is assumed to be -1.
Returns:
a Textures class which is an instance of TexturesBase e.g. TexturesUV,
TexturesAtlas, TexturesVerte
"""
warnings.warn(
"""Textures class is deprecated,
use TexturesUV, TexturesAtlas, TexturesVertex instead.
Textures class will be removed in future releases.""",
PendingDeprecationWarning,
)
if all(x is not None for x in [faces_uvs, verts_uvs, maps]):
# pyre-fixme[6]: Expected `Union[List[torch.Tensor], torch.Tensor]` for 1st
# param but got `Union[None, List[typing.Any], torch.Tensor]`.
return TexturesUV(maps=maps, faces_uvs=faces_uvs, verts_uvs=verts_uvs)
elif verts_rgb is not None:
return TexturesVertex(verts_features=verts_rgb)
else:
raise ValueError(
"Textures either requires all three of (faces uvs, verts uvs, maps) or verts rgb"
)
class TexturesAtlas(TexturesBase):
def __init__(self, atlas: Union[torch.Tensor, List, None]):
"""
A texture representation where each face has a square texture map.
This is based on the implementation from SoftRasterizer [1].
Args:
atlas: (N, F, R, R, D) tensor giving the per face texture map.
The atlas can be created during obj loading with the
pytorch3d.io.load_obj function - in the input arguments
set `create_texture_atlas=True`. The atlas will be
returned in aux.texture_atlas.
The padded and list representations of the textures are stored
and the packed representations is computed on the fly and
not cached.
[1] Liu et al, 'Soft Rasterizer: A Differentiable Renderer for Image-based
3D Reasoning', ICCV 2019
"""
if isinstance(atlas, (list, tuple)):
correct_format = all(
(
torch.is_tensor(elem)
and elem.ndim == 4
and elem.shape[1] == elem.shape[2]
)
for elem in atlas
)
if not correct_format:
msg = "Expected atlas to be a list of tensors of shape (F, R, R, D)"
raise ValueError(msg)
self._atlas_list = atlas
self._atlas_padded = None
self.device = torch.device("cpu")
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self._N = len(atlas)
self._num_faces_per_mesh = [len(a) for a in atlas]
if self._N > 0:
self.device = atlas[0].device
elif torch.is_tensor(atlas):
# pyre-fixme[16]: `Optional` has no attribute `ndim`.
if atlas.ndim != 5:
msg = "Expected atlas to be of shape (N, F, R, R, D); got %r"
raise ValueError(msg % repr(atlas.ndim))
self._atlas_padded = atlas
self._atlas_list = None
# pyre-fixme[16]: `Optional` has no attribute `device`.
self.device = atlas.device
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
# pyre-fixme[6]: Expected `Sized` for 1st param but got
# `Optional[torch.Tensor]`.
self._N = len(atlas)
# pyre-fixme[16]: `Optional` has no attribute `shape`.
max_F = atlas.shape[1]
self._num_faces_per_mesh = [max_F] * self._N
else:
raise ValueError("Expected atlas to be a tensor or list")
# The num_faces_per_mesh, N and valid
# are reset inside the Meshes object when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self.valid = torch.ones((self._N,), dtype=torch.bool, device=self.device)
# This is a hack to allow the child classes to also have the same representation
# as the parent. In meshes.py we check that the input textures have the correct
# type. However due to circular imports issues, we can't import the texture
# classes into any files in pytorch3d.structures. Instead we check
# for repr(textures) == "TexturesBase".
def __repr__(self):
return super().__repr__()
def clone(self):
tex = self.__class__(atlas=self.atlas_padded().clone())
num_faces = (
self._num_faces_per_mesh.clone()
if torch.is_tensor(self._num_faces_per_mesh)
else self._num_faces_per_mesh
)
tex.valid = self.valid.clone()
tex._num_faces_per_mesh = num_faces
return tex
def __getitem__(self, index):
props = ["atlas_list", "_num_faces_per_mesh"]
new_props = self._getitem(index, props=props)
atlas = new_props["atlas_list"]
if isinstance(atlas, list):
# multiple batch elements
new_tex = self.__class__(atlas=atlas)
elif torch.is_tensor(atlas):
# single element
new_tex = self.__class__(atlas=[atlas])
else:
raise ValueError("Not all values are provided in the correct format")
new_tex._num_faces_per_mesh = new_props["_num_faces_per_mesh"]
return new_tex
def atlas_padded(self) -> torch.Tensor:
if self._atlas_padded is None:
if self.isempty():
self._atlas_padded = torch.zeros(
(self._N, 0, 0, 0, 3), dtype=torch.float32, device=self.device
)
else:
self._atlas_padded = _list_to_padded_wrapper(
self._atlas_list, pad_value=0.0
)
return self._atlas_padded
def atlas_list(self) -> List[torch.Tensor]:
if self._atlas_list is None:
if self.isempty():
self._atlas_padded = [
torch.empty((0, 0, 0, 3), dtype=torch.float32, device=self.device)
] * self._N
self._atlas_list = _padded_to_list_wrapper(
self._atlas_padded, split_size=self._num_faces_per_mesh
)
return self._atlas_list
def atlas_packed(self) -> torch.Tensor:
if self.isempty():
return torch.zeros(
(self._N, 0, 0, 3), dtype=torch.float32, device=self.device
)
atlas_list = self.atlas_list()
return list_to_packed(atlas_list)[0]
def extend(self, N: int) -> "TexturesAtlas":
new_props = self._extend(N, ["atlas_padded", "_num_faces_per_mesh"])
new_tex = TexturesAtlas(atlas=new_props["atlas_padded"])
new_tex._num_faces_per_mesh = new_props["_num_faces_per_mesh"]
return new_tex
def sample_textures(self, fragments, **kwargs) -> torch.Tensor:
"""
Args:
fragments:
The outputs of rasterization. From this we use
- pix_to_face: LongTensor of shape (N, H, W, K) specifying the indices
of the faces (in the packed representation) which
overlap each pixel in the image.
- barycentric_coords: FloatTensor of shape (N, H, W, K, 3) specifying
the barycentric coordianates of each pixel
relative to the faces (in the packed
representation) which overlap the pixel.
Returns:
texels: (N, H, W, K, 3)
"""
N, H, W, K = fragments.pix_to_face.shape
atlas_packed = self.atlas_packed()
R = atlas_packed.shape[1]
bary = fragments.bary_coords
pix_to_face = fragments.pix_to_face
bary_w01 = bary[..., :2]
# pyre-fixme[16]: `bool` has no attribute `__getitem__`.
mask = (pix_to_face < 0)[..., None]
bary_w01 = torch.where(mask, torch.zeros_like(bary_w01), bary_w01)
w_xy = (bary_w01 * R).to(torch.int64) # (N, H, W, K, 2)
below_diag = (
bary_w01.sum(dim=-1) * R - w_xy.float().sum(dim=-1)
) <= 1.0 # (N, H, W, K)
w_x, w_y = w_xy.unbind(-1)
w_x = torch.where(below_diag, w_x, (R - 1 - w_x))
w_y = torch.where(below_diag, w_y, (R - 1 - w_y))
texels = atlas_packed[pix_to_face, w_y, w_x]
texels = texels * (pix_to_face >= 0)[..., None].float()
return texels
def join_batch(self, textures: List["TexturesAtlas"]) -> "TexturesAtlas":
"""
Join the list of textures given by `textures` to
self to create a batch of textures. Return a new
TexturesAtlas object with the combined textures.
Args:
textures: List of TextureAtlas objects
Returns:
new_tex: TextureAtlas object with the combined
textures from self and the list `textures`.
"""
tex_types_same = all(isinstance(tex, TexturesAtlas) for tex in textures)
if not tex_types_same:
raise ValueError("All textures must be of type TexturesAtlas.")
atlas_list = []
atlas_list += self.atlas_list()
num_faces_per_mesh = self._num_faces_per_mesh
for tex in textures:
atlas_list += tex.atlas_list()
num_faces_per_mesh += tex._num_faces_per_mesh
new_tex = self.__class__(atlas=atlas_list)
new_tex._num_faces_per_mesh = num_faces_per_mesh
return new_tex
class TexturesUV(TexturesBase):
def __init__(
self,
maps: Union[torch.Tensor, List[torch.Tensor]],
faces_uvs: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]],
verts_uvs: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]],
):
"""
Textures are represented as a per mesh texture map and uv coordinates for each
vertex in each face. NOTE: this class only supports one texture map per mesh.
Args:
maps: texture map per mesh. This can either be a list of maps
[(H, W, 3)] or a padded tensor of shape (N, H, W, 3)
faces_uvs: (N, F, 3) tensor giving the index into verts_uvs for each face
verts_uvs: (N, V, 2) tensor giving the uv coordinates per vertex
Note: only the padded and list representation of the textures are stored
and the packed representations is computed on the fly and
not cached.
"""
super().__init__()
if isinstance(faces_uvs, (list, tuple)):
for fv in faces_uvs:
# pyre-fixme[16]: `Tensor` has no attribute `ndim`.
if fv.ndim != 2 or fv.shape[-1] != 3:
msg = "Expected faces_uvs to be of shape (F, 3); got %r"
raise ValueError(msg % repr(fv.shape))
self._faces_uvs_list = faces_uvs
self._faces_uvs_padded = None
self.device = torch.device("cpu")
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self._N = len(faces_uvs)
self._num_faces_per_mesh = [len(fv) for fv in faces_uvs]
if self._N > 0:
self.device = faces_uvs[0].device
elif torch.is_tensor(faces_uvs):
if faces_uvs.ndim != 3 or faces_uvs.shape[-1] != 3:
msg = "Expected faces_uvs to be of shape (N, F, 3); got %r"
raise ValueError(msg % repr(faces_uvs.shape))
self._faces_uvs_padded = faces_uvs
self._faces_uvs_list = None
self.device = faces_uvs.device
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self._N = len(faces_uvs)
max_F = faces_uvs.shape[1]
self._num_faces_per_mesh = [max_F] * self._N
else:
raise ValueError("Expected faces_uvs to be a tensor or list")
if isinstance(verts_uvs, (list, tuple)):
for fv in verts_uvs:
if fv.ndim != 2 or fv.shape[-1] != 2:
msg = "Expected verts_uvs to be of shape (V, 2); got %r"
raise ValueError(msg % repr(fv.shape))
self._verts_uvs_list = verts_uvs
self._verts_uvs_padded = None
if len(verts_uvs) != self._N:
raise ValueError(
"verts_uvs and faces_uvs must have the same batch dimension"
)
if not all(v.device == self.device for v in verts_uvs):
import pdb
pdb.set_trace()
raise ValueError("verts_uvs and faces_uvs must be on the same device")
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self._num_verts_per_mesh = [len(v) for v in verts_uvs]
elif torch.is_tensor(verts_uvs):
if (
verts_uvs.ndim != 3
or verts_uvs.shape[-1] != 2
or verts_uvs.shape[0] != self._N
):
msg = "Expected verts_uvs to be of shape (N, V, 2); got %r"
raise ValueError(msg % repr(verts_uvs.shape))
self._verts_uvs_padded = verts_uvs
self._verts_uvs_list = None
if verts_uvs.device != self.device:
raise ValueError("verts_uvs and faces_uvs must be on the same device")
# These values may be overridden when textures is
# passed into the Meshes constructor.
max_V = verts_uvs.shape[1]
self._num_verts_per_mesh = [max_V] * self._N
else:
raise ValueError("Expected verts_uvs to be a tensor or list")
if torch.is_tensor(maps):
# pyre-fixme[16]: `List` has no attribute `ndim`.
# pyre-fixme[16]: `List` has no attribute `shape`.
if maps.ndim != 4 or maps.shape[0] != self._N:
msg = "Expected maps to be of shape (N, H, W, 3); got %r"
raise ValueError(msg % repr(maps.shape))
self._maps_padded = maps
self._maps_list = None
elif isinstance(maps, (list, tuple)):
if len(maps) != self._N:
raise ValueError("Expected one texture map per mesh in the batch.")
self._maps_list = maps
if self._N > 0:
maps = _pad_texture_maps(maps)
else:
maps = torch.empty(
(self._N, 0, 0, 3), dtype=torch.float32, device=self.device
)
self._maps_padded = maps
else:
raise ValueError("Expected maps to be a tensor or list.")
if self._maps_padded.device != self.device:
raise ValueError("maps must be on the same device as verts/faces uvs.")
self.valid = torch.ones((self._N,), dtype=torch.bool, device=self.device)
def __repr__(self):
return super().__repr__()
def clone(self):
tex = self.__class__(
self.maps_padded().clone(),
self.faces_uvs_padded().clone(),
self.verts_uvs_padded().clone(),
)
num_faces = (
self._num_faces_per_mesh.clone()
if torch.is_tensor(self._num_faces_per_mesh)
else self._num_faces_per_mesh
)
tex._num_faces_per_mesh = num_faces
tex.valid = self.valid.clone()
return tex
def __getitem__(self, index):
props = ["verts_uvs_list", "faces_uvs_list", "maps_list", "_num_faces_per_mesh"]
new_props = self._getitem(index, props)
faces_uvs = new_props["faces_uvs_list"]
verts_uvs = new_props["verts_uvs_list"]
maps = new_props["maps_list"]
# if index has multiple values then faces/verts/maps may be a list of tensors
if all(isinstance(f, (list, tuple)) for f in [faces_uvs, verts_uvs, maps]):
new_tex = self.__class__(
faces_uvs=faces_uvs, verts_uvs=verts_uvs, maps=maps
)
elif all(torch.is_tensor(f) for f in [faces_uvs, verts_uvs, maps]):
new_tex = self.__class__(
faces_uvs=[faces_uvs], verts_uvs=[verts_uvs], maps=[maps]
)
else:
raise ValueError("Not all values are provided in the correct format")
new_tex._num_faces_per_mesh = new_props["_num_faces_per_mesh"]
return new_tex
def faces_uvs_padded(self) -> torch.Tensor:
if self._faces_uvs_padded is None:
if self.isempty():
self._faces_uvs_padded = torch.zeros(
(self._N, 0, 3), dtype=torch.float32, device=self.device
)
else:
self._faces_uvs_padded = list_to_padded(
self._faces_uvs_list, pad_value=0.0
)
return self._faces_uvs_padded
def faces_uvs_list(self) -> List[torch.Tensor]:
if self._faces_uvs_list is None:
if self.isempty():
self._faces_uvs_list = [
torch.empty((0, 3), dtype=torch.float32, device=self.device)
] * self._N
else:
self._faces_uvs_list = padded_to_list(
self._faces_uvs_padded, split_size=self._num_faces_per_mesh
)
return self._faces_uvs_list
def faces_uvs_packed(self) -> torch.Tensor:
if self.isempty():
return torch.zeros((self._N, 3), dtype=torch.float32, device=self.device)
faces_uvs_list = self.faces_uvs_list()
return list_to_packed(faces_uvs_list)[0]
def verts_uvs_padded(self) -> torch.Tensor:
if self._verts_uvs_padded is None:
if self.isempty():
self._verts_uvs_padded = torch.zeros(
(self._N, 0, 2), dtype=torch.float32, device=self.device
)
else:
self._verts_uvs_padded = list_to_padded(
self._verts_uvs_list, pad_value=0.0
)
return self._verts_uvs_padded
def verts_uvs_list(self) -> List[torch.Tensor]:
if self._verts_uvs_list is None:
if self.isempty():
self._verts_uvs_list = [
torch.empty((0, 2), dtype=torch.float32, device=self.device)
] * self._N
else:
self._verts_uvs_list = padded_to_list(
self._verts_uvs_padded, split_size=self._num_verts_per_mesh
)
return self._verts_uvs_list
def verts_uvs_packed(self) -> torch.Tensor:
if self.isempty():
return torch.zeros((self._N, 2), dtype=torch.float32, device=self.device)
verts_uvs_list = self.verts_uvs_list()
return list_to_packed(verts_uvs_list)[0]
# Currently only the padded maps are used.
def maps_padded(self) -> torch.Tensor:
return self._maps_padded
def maps_list(self) -> torch.Tensor:
# maps_list is not used anywhere currently - maps
# are padded to ensure the (H, W) of all maps is the
# same across the batch and we don't store the
# unpadded sizes of the maps. Therefore just return
# the unbinded padded tensor.
return self._maps_padded.unbind(0)
def extend(self, N: int) -> "TexturesUV":
new_props = self._extend(
N,
[
"maps_padded",
"verts_uvs_padded",
"faces_uvs_padded",
"_num_faces_per_mesh",
"_num_verts_per_mesh",
],
)
new_tex = TexturesUV(
maps=new_props["maps_padded"],
faces_uvs=new_props["faces_uvs_padded"],
verts_uvs=new_props["verts_uvs_padded"],
)
new_tex._num_faces_per_mesh = new_props["_num_faces_per_mesh"]
new_tex._num_verts_per_mesh = new_props["_num_verts_per_mesh"]
return new_tex
def sample_textures(self, fragments, **kwargs) -> torch.Tensor:
"""
Interpolate a 2D texture map using uv vertex texture coordinates for each
face in the mesh. First interpolate the vertex uvs using barycentric coordinates
for each pixel in the rasterized output. Then interpolate the texture map
using the uv coordinate for each pixel.
Args:
fragments:
The outputs of rasterization. From this we use
- pix_to_face: LongTensor of shape (N, H, W, K) specifying the indices
of the faces (in the packed representation) which
overlap each pixel in the image.
- barycentric_coords: FloatTensor of shape (N, H, W, K, 3) specifying
the barycentric coordianates of each pixel
relative to the faces (in the packed
representation) which overlap the pixel.
Returns:
texels: tensor of shape (N, H, W, K, C) giving the interpolated
texture for each pixel in the rasterized image.
"""
verts_uvs = self.verts_uvs_packed()
faces_uvs = self.faces_uvs_packed()
faces_verts_uvs = verts_uvs[faces_uvs]
texture_maps = self.maps_padded()
# pixel_uvs: (N, H, W, K, 2)
pixel_uvs = interpolate_face_attributes(
fragments.pix_to_face, fragments.bary_coords, faces_verts_uvs
)
N, H_out, W_out, K = fragments.pix_to_face.shape
N, H_in, W_in, C = texture_maps.shape # 3 for RGB
# pixel_uvs: (N, H, W, K, 2) -> (N, K, H, W, 2) -> (NK, H, W, 2)
pixel_uvs = pixel_uvs.permute(0, 3, 1, 2, 4).reshape(N * K, H_out, W_out, 2)
# textures.map:
# (N, H, W, C) -> (N, C, H, W) -> (1, N, C, H, W)
# -> expand (K, N, C, H, W) -> reshape (N*K, C, H, W)
texture_maps = (
texture_maps.permute(0, 3, 1, 2)[None, ...]
.expand(K, -1, -1, -1, -1)
.transpose(0, 1)
.reshape(N * K, C, H_in, W_in)
)
# Textures: (N*K, C, H, W), pixel_uvs: (N*K, H, W, 2)
# Now need to format the pixel uvs and the texture map correctly!
# From pytorch docs, grid_sample takes `grid` and `input`:
# grid specifies the sampling pixel locations normalized by
# the input spatial dimensions It should have most
# values in the range of [-1, 1]. Values x = -1, y = -1
# is the left-top pixel of input, and values x = 1, y = 1 is the
# right-bottom pixel of input.
pixel_uvs = pixel_uvs * 2.0 - 1.0
texture_maps = torch.flip(texture_maps, [2]) # flip y axis of the texture map
if texture_maps.device != pixel_uvs.device:
texture_maps = texture_maps.to(pixel_uvs.device)
texels = F.grid_sample(texture_maps, pixel_uvs, align_corners=False)
texels = texels.reshape(N, K, C, H_out, W_out).permute(0, 3, 4, 1, 2)
return texels
def join_batch(self, textures: List["TexturesUV"]) -> "TexturesUV":
"""
Join the list of textures given by `textures` to
self to create a batch of textures. Return a new
TexturesUV object with the combined textures.
Args:
textures: List of TexturesUV objects
Returns:
new_tex: TexturesUV object with the combined
textures from self and the list `textures`.
"""
tex_types_same = all(isinstance(tex, TexturesUV) for tex in textures)
if not tex_types_same:
raise ValueError("All textures must be of type TexturesUV.")
verts_uvs_list = []
faces_uvs_list = []
maps_list = []
faces_uvs_list += self.faces_uvs_list()
verts_uvs_list += self.verts_uvs_list()
maps_list += list(self.maps_padded().unbind(0))
num_faces_per_mesh = self._num_faces_per_mesh
for tex in textures:
verts_uvs_list += tex.verts_uvs_list()
faces_uvs_list += tex.faces_uvs_list()
num_faces_per_mesh += tex._num_faces_per_mesh
tex_map_list = list(tex.maps_padded().unbind(0))
maps_list += tex_map_list
new_tex = self.__class__(
maps=maps_list, verts_uvs=verts_uvs_list, faces_uvs=faces_uvs_list
)
new_tex._num_faces_per_mesh = num_faces_per_mesh
return new_tex
class TexturesVertex(TexturesBase):
def __init__(
self,
verts_features: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]],
):
"""
Batched texture representation where each vertex in a mesh
has a D dimensional feature vector.
Args:
verts_features: (N, V, D) tensor giving a feature vector with
artbitrary dimensions for each vertex.
"""
if isinstance(verts_features, (tuple, list)):
correct_shape = all(
# pyre-fixme[16]: `Tensor` has no attribute `ndim`.
(torch.is_tensor(v) and v.ndim == 2)
for v in verts_features
)
if not correct_shape:
raise ValueError(
"Expected verts_features to be a list of tensors of shape (V, D)."
)
self._verts_features_list = verts_features
self._verts_features_padded = None
self.device = torch.device("cpu")
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self._N = len(verts_features)
self._num_verts_per_mesh = [len(fv) for fv in verts_features]
if self._N > 0:
self.device = verts_features[0].device
elif torch.is_tensor(verts_features):
if verts_features.ndim != 3:
msg = "Expected verts_features to be of shape (N, V, D); got %r"
raise ValueError(msg % repr(verts_features.shape))
self._verts_features_padded = verts_features
self._verts_features_list = None
self.device = verts_features.device
# These values may be overridden when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self._N = len(verts_features)
max_F = verts_features.shape[1]
self._num_verts_per_mesh = [max_F] * self._N
else:
raise ValueError("verts_features must be a tensor or list of tensors")
# This is set inside the Meshes object when textures is
# passed into the Meshes constructor. For more details
# refer to the __init__ of Meshes.
self.valid = torch.ones((self._N,), dtype=torch.bool, device=self.device)
def __repr__(self):
return super().__repr__()
def clone(self):
tex = self.__class__(self.verts_features_padded().clone())
if self._verts_features_list is not None:
tex._verts_features_list = [f.clone() for f in self._verts_features_list]
num_faces = (
self._num_verts_per_mesh.clone()
if torch.is_tensor(self._num_verts_per_mesh)
else self._num_verts_per_mesh
)
tex._num_verts_per_mesh = num_faces
tex.valid = self.valid.clone()
return tex
def __getitem__(self, index):
props = ["verts_features_list", "_num_verts_per_mesh"]
new_props = self._getitem(index, props)
verts_features = new_props["verts_features_list"]
if isinstance(verts_features, list):
new_tex = self.__class__(verts_features=verts_features)
elif torch.is_tensor(verts_features):
new_tex = self.__class__(verts_features=[verts_features])
else:
raise ValueError("Not all values are provided in the correct format")
new_tex._num_verts_per_mesh = new_props["_num_verts_per_mesh"]
return new_tex
def verts_features_padded(self) -> torch.Tensor:
if self._verts_features_padded is None:
if self.isempty():
self._verts_features_padded = torch.zeros(
(self._N, 0, 3, 0), dtype=torch.float32, device=self.device
)
else:
self._verts_features_padded = list_to_padded(
self._verts_features_list, pad_value=0.0
)
return self._verts_features_padded
def verts_features_list(self) -> List[torch.Tensor]:
if self._verts_features_list is None:
if self.isempty():
self._verts_features_list = [
torch.empty((0, 3, 0), dtype=torch.float32, device=self.device)
] * self._N
else:
self._verts_features_list = padded_to_list(
self._verts_features_padded, split_size=self._num_verts_per_mesh
)
return self._verts_features_list
def verts_features_packed(self) -> torch.Tensor:
if self.isempty():
return torch.zeros((self._N, 3, 0), dtype=torch.float32, device=self.device)
verts_features_list = self.verts_features_list()
return list_to_packed(verts_features_list)[0]
def extend(self, N: int) -> "TexturesVertex":
new_props = self._extend(N, ["verts_features_padded", "_num_verts_per_mesh"])
new_tex = TexturesVertex(verts_features=new_props["verts_features_padded"])
new_tex._num_verts_per_mesh = new_props["_num_verts_per_mesh"]
return new_tex
def sample_textures(self, fragments, faces_packed=None) -> torch.Tensor:
"""
Detemine the color for each rasterized face. Interpolate the colors for
vertices which form the face using the barycentric coordinates.
Args:
fragments:
The outputs of rasterization. From this we use
- pix_to_face: LongTensor of shape (N, H, W, K) specifying the indices
of the faces (in the packed representation) which
overlap each pixel in the image.
- barycentric_coords: FloatTensor of shape (N, H, W, K, 3) specifying
the barycentric coordianates of each pixel
relative to the faces (in the packed
representation) which overlap the pixel.
Returns:
texels: An texture per pixel of shape (N, H, W, K, C).
There will be one C dimensional value for each element in
fragments.pix_to_face.
"""
verts_features_packed = self.verts_features_packed()
faces_verts_features = verts_features_packed[faces_packed]
texels = interpolate_face_attributes(
fragments.pix_to_face, fragments.bary_coords, faces_verts_features
)
return texels
def join_batch(self, textures: List["TexturesVertex"]) -> "TexturesVertex":
"""
Join the list of textures given by `textures` to
self to create a batch of textures. Return a new
TexturesVertex object with the combined textures.
Args:
textures: List of TexturesVertex objects
Returns:
new_tex: TexturesVertex object with the combined
textures from self and the list `textures`.
"""
tex_types_same = all(isinstance(tex, TexturesVertex) for tex in textures)
if not tex_types_same:
raise ValueError("All textures must be of type TexturesVertex.")
verts_features_list = []
verts_features_list += self.verts_features_list()
num_faces_per_mesh = self._num_verts_per_mesh
for tex in textures:
verts_features_list += tex.verts_features_list()
num_faces_per_mesh += tex._num_verts_per_mesh
new_tex = self.__class__(verts_features=verts_features_list)
new_tex._num_verts_per_mesh = num_faces_per_mesh
return new_tex
|
py | b4143c7e237e241144608a9b6de17f4b9535d7ce | # -*- coding: utf-8 -*-
'''This sets up the package.
Stolen from http://python-packaging.readthedocs.io/en/latest/everything.html
and modified by me.
'''
__version__ = '0.1.0'
from setuptools import setup, find_packages
def readme():
"""Load the README file."""
with open('README.md') as f:
return f.read()
# let's be lazy and put requirements in one place
# what could possibly go wrong?
with open('requirements.txt') as infd:
INSTALL_REQUIRES = [x.strip('\n') for x in infd.readlines()]
# Running setup
setup(
name='damascus',
version=__version__,
description=('Massive halos in DECaLS traced by massive central galaxies'),
long_description=readme(),
long_description_content_type="text/markdown",
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Astronomy",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
keywords='astronomy, photometry',
url='https://github.com/dr-guangtou/damascus',
author='Song Huang',
author_email='[email protected]',
license='MIT',
packages=find_packages(),
install_requires=INSTALL_REQUIRES,
include_package_data=True,
zip_safe=False,
python_requires='>=3.6',
)
|
py | b4143db2ef79fc4328216b56a4b6cc8a57b24e8d | import shutil
import os
import re
import ast
import pydoc
import getpass
from copy import deepcopy, copy
from collections.abc import Mapping, Iterable
from pathlib import Path
from functools import reduce
import datetime
from jinja2 import Template
from ploomber.placeholders import util
from ploomber import repo
from ploomber.util import default
from ploomber.exceptions import BaseException
def expand_raw_dictionary_and_extract_tags(raw, mapping):
data = deepcopy(raw)
placeholders_all = []
for (d, current_key, current_val, _) in iterate_nested_dict(data):
d[current_key], placeholders = expand_if_needed(current_val, mapping)
placeholders_all.extend(placeholders)
return data, set(placeholders_all)
def expand_raw_dictionary(raw, mapping):
"""
Expands a dictionary where some values are {{tags}} using their values
in a mapping
"""
return expand_raw_dictionary_and_extract_tags(raw, mapping)[0]
def expand_raw_dictionaries_and_extract_tags(raw, mapping):
"""
Expands a list of dictionaries
"""
expanded, tags = list(
zip(*[
expand_raw_dictionary_and_extract_tags(element, mapping)
for element in raw
]))
tags_unique = set(reduce(lambda x, y: x | y, tags))
return expanded, tags_unique
def expand_if_needed(raw_value, mapping):
value, placeholders = mapping._render(raw_value)
return cast_if_possible(value), placeholders
def cast_if_possible(value):
"""
Reference to env in specs must be strings, but we would like the rendered
value to still have the appropriate type
"""
if isinstance(value, str):
value_lower = value.lower()
if value_lower == 'false':
return False
elif value_lower == 'true':
return True
elif value_lower in {'none', 'null'}:
return None
try:
return ast.literal_eval(value)
except Exception:
pass
return value
class EnvironmentExpander:
"""
Convert values in the raw dictionary by expanding tags such as {{git}},
{{version}} or {{here}}. See `expand_raw_value` for more details
Parameters
----------
preprocessed : dict
Preprocessed env dictionary
path_to_here : str
Path to env.yaml, used to expand {{here}}
version_requires_import : bool, default=False
Whether determining package version requires import or not. If False,
the root ``__init__.py`` file in the module must have
a ``__version__ = 'LITERAL'`` variable. Literal is extracted.
"""
def __init__(self,
preprocessed,
path_to_here=None,
version_requires_import=False):
self._preprocessed = preprocessed
# {{here}} resolves to this value
self._path_to_here = (None if path_to_here is None else str(
Path(path_to_here).resolve()))
# we compute every placeholder's value so we only do it once
self._placeholders = {}
self._version_requires_import = version_requires_import
def expand_raw_dictionary(self, raw):
data = deepcopy(raw)
for (d, current_key, current_val,
parent_keys) in iterate_nested_dict(data):
d[current_key] = self.expand_raw_value(current_val, parent_keys)
return data
def expand_raw_value(self, raw_value, parents):
"""
Expand a string with placeholders
Parameters
----------
raw_value : str
The original value to expand
parents : list
The list of parents to get to this value in the dictionary
Notes
-----
If for a given raw_value, the first parent is 'path', expanded value
is casted to pathlib.Path object and .expanduser() is called,
furthermore, if raw_value ends with '/', a directory is created if
it does not currently exist
"""
placeholders = util.get_tags_in_str(raw_value)
if not placeholders:
value = raw_value
else:
if 'git' in placeholders:
if not shutil.which('git'):
raise BaseException('Found placeholder {{git}}, but '
'git is not installed. Please install '
'it and try again.')
if not repo.is_repo(
self._preprocessed.get('_module', self._path_to_here)):
raise BaseException(
'Found placeholder {{git}}, but could not '
'locate a git repository. Create a repository '
'or remove the {{git}} placeholder.')
# get all required placeholders
params = {k: self.load_placeholder(k) for k in placeholders}
value = Template(raw_value).render(**params)
if parents:
if parents[0] == 'path':
# value is a str (since it was loaded from a yaml file),
# if it has an explicit trailing slash, interpret it as
# a directory and create it, we have to do it at this point,
# because once we cast to Path, we lose the trailing slash
if value.endswith('/'):
self._try_create_dir(value)
return Path(value).expanduser()
else:
return cast_if_possible(value)
def _try_create_dir(self, value):
# make sure to expand user to avoid creating a "~" folder
path = Path(value).expanduser()
if not path.exists():
path.mkdir(parents=True)
def load_placeholder(self, key):
if key not in self._placeholders:
if hasattr(self, 'get_' + key):
try:
value = getattr(self, 'get_' + key)()
except Exception as e:
raise BaseException('An error happened while '
'expanding placeholder {{' + key +
'}}') from e
self._placeholders[key] = value
else:
raise RuntimeError('Unknown placeholder "{}"'.format(key))
return self._placeholders[key]
def _get_version_importing(self):
module_path = self._preprocessed.get('_module')
if not module_path:
raise KeyError('_module key is required to use version '
'placeholder')
# is this ok to do? /path/to/{module_name}
module_name = str(Path(module_path).name)
module = pydoc.locate(module_name)
if module is None:
raise ImportError(
'Unabe to import module with name "{}"'.format(module_name))
if hasattr(module, '__version__'):
return module.__version__
else:
raise RuntimeError('Module "{}" does not have a __version__ '
'attribute '.format(module))
def _get_version_without_importing(self):
if '_module' not in self._preprocessed:
raise KeyError('_module key is required to use version '
'placeholder')
content = (self._preprocessed['_module'] / '__init__.py').read_text()
version_re = re.compile(r'__version__\s+=\s+(.*)')
version = str(ast.literal_eval(version_re.search(content).group(1)))
return version
def get_version(self):
if self._version_requires_import:
return self._get_version_importing()
else:
return self._get_version_without_importing()
def get_user(self):
return getpass.getuser()
def get_cwd(self):
return str(Path(os.getcwd()).resolve())
def get_root(self):
root = default.try_to_find_root_recursively(
starting_dir=self._path_to_here)
if root is None:
raise ValueError('Failed to expand {{root}}, could not '
'find a setup.py in a parent folder')
return root
def get_here(self):
if self._path_to_here:
return self._path_to_here
else:
raise RuntimeError('here placeholder is only available '
'when env was initialized from a file or '
'when directly passing path to use')
def get_git(self):
module_path = self._preprocessed.get('_module')
if self._path_to_here:
module_path = self._path_to_here
if not module_path:
raise KeyError('_module key is required to use git placeholder')
return repo.git_location(module_path)
def get_git_hash(self):
module_path = self._preprocessed.get('_module')
if self._path_to_here:
module_path = self._path_to_here
if not module_path:
raise KeyError(
'_module key is required to use git_hash placeholder')
return repo.git_hash(module_path)
def get_now(self):
"""Returns current timestamp in ISO 8601 format
"""
return datetime.datetime.now().isoformat()
def iterate_nested_dict(d):
"""
Iterate over all values (possibly nested) in a dictionary
Yields: dict holding the value, current key, current value, list of keys
to get to this value
"""
for k, v in d.items():
for i in _iterate(d, k, v, preffix=[k]):
yield i
def _iterate(parent, key, value, preffix):
if isinstance(value, Mapping):
for k, v in value.items():
preffix_new = copy(preffix)
preffix_new.append(k)
for i in _iterate(value, k, v, preffix_new):
yield i
elif isinstance(value, Iterable) and not isinstance(value, str):
for idx, some_val in enumerate(value):
preffix_new = copy(preffix)
preffix_new.append(idx)
for i in _iterate(value, idx, some_val, preffix_new):
yield i
else:
yield parent, key, value, preffix
|
py | b4143ed9ae7261b38b92fc362f5be034ec345624 | # main.py
import os, discord, random, re
import emails
import gblvar
import pkg_resources
from discord.ext import commands
from discord.utils import get
client = discord.Client()
# MARK - When bot connects
@client.event
async def on_ready():
for guild in client.guilds:
if guild.id == gblvar.discord_guild_id:
break
print(f'🌐 {client.user} is connected to {guild.name} (id: {guild.id})')
# MARK - When someone joins the Guild
@client.event
async def on_member_join(member):
await member.create_dm()
await member.dm_channel.send(f'Hi {member.name}, welcome to the {gblvar.discord_guild_name} Discord server! To verify your identity, please enter your {gblvar.email_type_specifier} email address')
# MARK - When someone send a message
@client.event
async def on_message(message):
# Check if message was sent by the bot
if message.author == client.user:
return
# Check if the message was a DM
if message.channel.type != discord.ChannelType.private:
return
# Parses the message for a list of emails
receiver_email = re.search(r'[\w\.-]+@[\w\.-]+\.\w+', message.content)
no_regression_result = re.search(r'[\w\.-]+@[\w\.-]+\.\w+', "")
if message.content == "--version":
v2 = pkg_resources.get_distribution('my_package_name').version
await message.channel.send("The current version of Discord Auth is {}".format(v2))
return
# The message received is a code
if message.content.isnumeric():
try:
f = open(f'.codes/{message.channel.id}.txt', "r")
data = f.readlines()
user_email = data[1].strip()
user_code = data[0].strip()
f.close()
if user_code == no_regression_result:
response = f'Please enter your {gblvar.email_type_specifier} email address first.'
await message.channel.send(response)
elif user_email == no_regression_result:
response = f'Please enter your {gblvar.email_type_specifier} email address first.'
await message.channel.send(response)
elif message.content == user_code:
new_guild = client.get_guild(int(gblvar.discord_guild_id))
member = new_guild.get_member(message.author.id)
roles_to_add = []
print(gblvar.authorized_users)
for role_to_add in gblvar.authorized_users[user_email]:
roles_to_add.append(new_guild.get_role(int(role_to_add)))
role = new_guild.get_role(int(role_to_add))
await member.add_roles(role, reason="Discord Auth Bot")
if roles_to_add == []:
role = new_guild.get_role(int(gblvar.discord_role_to_assign_id))
await member.add_roles(role, reason="Discord Auth Bot")
print(f'✅ The user {user_email} was added to the Discord')
response = f'Welcome to {gblvar.discord_guild_name}! You can now use the Discord Server.'
await message.channel.send(response)
else:
print(f'Invalid code given for {user_email}')
response = "The code given is invalid. Please try again."
await message.channel.send(response)
# File does not exist yet
except FileNotFoundError:
response = f'Please enter your {gblvar.email_type_specifier} email address first.'
await message.channel.send(response)
# No email was given
elif receiver_email == no_regression_result:
response = "Please enter a valid email address."
await message.channel.send(response)
# Email is in the list of valid emails
elif receiver_email.group(0) in list(gblvar.authorized_users.keys()):
emails.send_auth_code(receiver_email.group(0), message.channel.id)
response = f'An email was sent to {receiver_email.group(0)} with an authentication code. Please enter the code here.'
await message.channel.send(response)
# Email is not in the list of valid emails
else:
response = 'Sorry, that email is not in the list of allowed emails. Please contact the channel owner.'
await message.channel.send(response)
# MARK - Take care of exceptions by displaying them in the terminal & saving them to a log file
#@client.event
#async def on_error(event, *args, **kwargs):
# error_description = str(event)
# for arg in args:
# error_description += "\n"
# error_description += str(arg)
# for kwarg in kwargs:
# error_description += "\n"
# error_description += str(kwarg)
#
# with open('err.log', 'a') as f:
# if event == 'on_message':
# error_description += "\n \n"
# f.write(f'Unhandled message: {error_description}')
# else:
# raise
client.run(gblvar.discord_bot_token) |
py | b41440571903283ba212b3be9b481f0842883f3d | test = {
'name': 'question 1.17',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> np.allclose(my_cumsum([1, 4, 2, 5, 3]), [1, 5, 7, 12, 15])
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> np.allclose(my_cumsum([2.4, 5.2, 4.7, 9.3]), [2.4, 7.6, 12.3, 21.6])
True
""",
'hidden': False,
'locked': False
},
],
'scored': True,
'setup': 'import numpy as np',
'teardown': '',
'type': 'doctest'
}
]
}
|
py | b414405a2b00ed8501ea28952b1c8a8e12669e89 | import fastapi
from fastapi import FastAPI, status
from starlette.requests import Request
from infrastructure.database.postgres.sqlhandler import SqlHandler
from interface.controllers.contest_controller import ContestController
from interface.controllers.problem_controller import ProblemController
from interface.controllers.submission_controller import SubmissionController
from interface.controllers.user_controller import UserController
from interface.controllers.language_controller import LanguageController
from interface.controllers.registration_controller import RegistrationController
from interface.controllers.notification_controller import NotificationController
from exceptions.waf import error_response, DuplicateKeyHTTPException
def set_route(api: FastAPI) -> None:
set_route_contest(api)
set_route_problem(api)
set_route_submission(api)
# set_route_user(api)
set_route_language(api)
set_router_registration(api)
set_route_notification(api)
def set_route_contest(api: FastAPI) -> None:
contest_controller = ContestController(SqlHandler())
api.add_api_route(
"/contests",
contest_controller.contests,
methods=["GET"],
status_code=200,
tags=["contests"],
)
api.add_api_route(
"/contests",
contest_controller.store,
status_code=201,
methods=["POST"],
tags=["contests"],
responses=error_response([DuplicateKeyHTTPException]),
)
api.add_api_route(
"/contests/{contest_id}",
contest_controller.contest,
status_code=200,
methods=["GET"],
tags=["contests"],
)
api.add_api_route(
"/contests/{contest_id}",
contest_controller.update,
status_code=200,
methods=["PUT"],
tags=["contests"],
)
api.add_api_route(
"/contests/{contest_id}",
contest_controller.delete,
status_code=200,
methods=["DELETE"],
tags=["contests"],
)
def set_route_problem(api: FastAPI) -> None:
problem_controller = ProblemController(SqlHandler(), fastapi)
api.add_api_route(
"/contests/{contest_id}/problems",
problem_controller.problems,
methods=["GET"],
status_code=200,
tags=["problems"],
)
api.add_api_route(
"/contests/{contest_id}/problems/{problem_id}",
problem_controller.problem,
methods=["GET"],
status_code=200,
tags=["problems"],
)
def set_route_user(api: FastAPI) -> None:
user_controller = UserController(SqlHandler())
api.add_api_route("/users/create", user_controller.create)
api.add_api_route("/users", user_controller.users)
def set_route_submission(api: FastAPI) -> None:
submission_controller = SubmissionController(SqlHandler(), fastapi)
api.add_api_route(
"/contests/{contest_id}/problems/{problem_id}/submit",
submission_controller.submit,
methods=["POST"],
status_code=201,
tags=["submissions"],
)
# contest_idで紐づいたそのコンテストの提出リストとかほしい
api.add_api_route(
"/contests/{contest_id}/submissions/{problem_id}",
submission_controller.submissions,
methods=["GET"],
status_code=200,
tags=["submissions"],
)
api.add_api_route(
"/contests/{contest_id}/submissions/{problem_id}/{submit_id}",
submission_controller.submission,
methods=["GET"],
status_code=200,
tags=["submissions"],
)
def set_route_language(api: FastAPI) -> None:
language_controller = LanguageController(SqlHandler())
api.add_api_route(
"/languages",
language_controller.languages,
methods=["GET"],
status_code=200,
tags=["languages"],
)
api.add_api_route(
"/languages",
language_controller.create_language,
methods=["POST"],
status_code=201,
tags=["languages"],
responses=error_response([DuplicateKeyHTTPException]),
)
api.add_api_route(
"/languages",
language_controller.update,
methods=["PUT"],
status_code=200,
tags=["languages"],
)
api.add_api_route(
"/languages",
language_controller.delete,
methods=["DELETE"],
status_code=200,
tags=["languages"],
)
def set_router_registration(api: FastAPI) -> None:
registration_controller = RegistrationController(SqlHandler())
api.add_api_route(
"/registration",
registration_controller.registrations,
methods=["GET"],
tags=["registrations"],
)
api.add_api_route(
"/registration",
registration_controller.registration,
methods=["POST"],
tags=["registrations"],
)
api.add_api_route(
"/registration",
registration_controller.delete,
methods=["DELETE"],
tags=["registrations"],
)
def set_route_notification(api: FastAPI) -> None:
notification_controller = NotificationController(SqlHandler())
api.add_api_route(
"/notifications",
notification_controller.notifications,
methods=["GET"],
tags=["notifications"],
)
api.add_api_route(
"/notifications",
notification_controller.create,
methods=["POST"],
tags=["notifications"],
responses=error_response([DuplicateKeyHTTPException]),
)
api.add_api_route(
"/notifications",
notification_controller.update,
methods=["PUT"],
tags=["notifications"],
)
api.add_api_route(
"/notifications",
notification_controller.delete,
methods=["DELETE"],
tags=["notifications"],
)
|
py | b41440605347a1b91b6a83ea87f59aef1b65d6b7 | import random
from librosa.filters import mel
import torch
from torch._C import dtype
import torch.nn as nn
import torchvision
import ffmpeg
import torchaudio
import torchvision.transforms as transforms
import cv2
import sys
import traceback
import os
import math
import numpy as np
import json
from torch.utils.data import Dataset, DataLoader
from logging import Logger
from torchvision.transforms.transforms import Lambda
try:
from datasets import MelSpectrogram, align_and_crop_face
except:
sys.path.extend(['..'])
from spectograms import MelSpectrogram
from face_utils import align_and_crop_face
def av_speech_collate_fn_trim(batch):
"NOTE: WILL NOT WORK WITH THE NEW CODE, HAVE NOT CHANGED IT."
lower_faces, speeches, melspecs, face_crop = zip(*batch)
N = len(lower_faces)
max_frames_in_batch = min([l.shape[0] for l in lower_faces])
max_samples_in_batch = min([s.shape[1] for s in speeches])
trimmed_lower_faces = list()
trimmed_speeches = list()
for lower_face, speech, melspec in zip(lower_faces, speeches, melspecs):
trimmed_lower_faces.append(lower_face[:max_frames_in_batch, :, :, :].unsqueeze(0))
trimmed_speeches.append(speech[:, :max_samples_in_batch].unsqueeze(0))
lower_faces_tensor = torch.cat(trimmed_lower_faces, dim=0)
speeches_tensor = torch.cat(trimmed_speeches, dim=0)
face_crop_tensor = torch.cat([f.unsqueeze(0) for f in face_crop], dim=0)
return (lower_faces_tensor, [max_frames_in_batch for _ in range(N)]),\
(speeches_tensor, [max_samples_in_batch for _ in range(N)]), face_crop_tensor
def av_speech_collate_fn_pad(batch):
lower_faces, speeches, melspecs, face_crop = zip(*batch)
max_frames_in_batch = max([l.shape[0] for l in lower_faces])
max_samples_in_batch = max([s.shape[1] for s in speeches])
max_melspec_samples_in_batch = max([m.shape[1] for m in melspecs])
padded_lower_faces = torch.zeros(len(lower_faces), max_frames_in_batch, *tuple(lower_faces[0].shape[1:]))
padded_speeches = torch.zeros(len(speeches), 1, max_samples_in_batch)
padded_melspecs = torch.zeros(len(melspecs), melspecs[0].shape[0], max_melspec_samples_in_batch)
mel_gate_padded = torch.zeros(len(melspecs), max_melspec_samples_in_batch)
video_lengths = list()
audio_lengths = list()
melspec_lengths = list()
for idx, (lower_face, speech, melspec) in enumerate(zip(lower_faces, speeches, melspecs)):
T = lower_face.shape[0]
video_lengths.append(T)
padded_lower_faces[idx, :T, :, :, :] = lower_face
S = speech.shape[-1]
audio_lengths.append(S)
padded_speeches[idx, :, :S] = speech
M = melspec.shape[-1]
melspec_lengths.append(M)
padded_melspecs[idx, :, :M] = melspec
mel_gate_padded[idx, M-1:] = 1.0
face_crop_tensor = torch.cat([f.unsqueeze(0) for f in face_crop], dim=0)
padded_lower_faces = padded_lower_faces.permute(0, 2, 1, 3, 4)
padded_speeches = padded_speeches.squeeze(1)
video_lengths = torch.tensor(video_lengths)
audio_lengths = torch.tensor(audio_lengths)
melspec_lengths = torch.tensor(melspec_lengths)
return (padded_lower_faces, video_lengths), (padded_speeches, audio_lengths), (padded_melspecs, melspec_lengths, mel_gate_padded), face_crop_tensor
def x_round(x):
return math.floor(x * 4) / 4
class AVSpeech(Dataset):
def __init__(self, rootpth, face_size=(96, 96), mode='train', demo=False, duration=1, face_augmentation=None, *args, **kwargs):
super(AVSpeech, self).__init__(*args, **kwargs)
assert mode in ('train', 'test')
self.rootpth = rootpth
self.linear_spectogram = MelSpectrogram()
self.face_recog_resize = transforms.Compose([
transforms.Resize((160, 160)),
transforms.Lambda(lambda im: (im.float() - 127.5) / 128.0),
])
self.face_size = face_size
self.face_resize = transforms.Compose([
transforms.Resize(face_size),
transforms.Lambda(lambda im: im.float() / 255.0),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
])
if face_augmentation is None:
self.face_augmentation = nn.Identity()
else:
self.face_augmentation = face_augmentation
self.mode = mode
self.demo = demo
self.data_path = os.path.join(self.rootpth, mode)
self.items = dict()
index = 0
for root, _, filenames in os.walk(self.data_path):
for filename in filenames:
if filename.endswith('.mp4'):
video_path = os.path.join(root, filename)
audio_path = os.path.join(root, filename.replace('.mp4', '.wav'))
frame_info_path = os.path.join(root, filename.replace('.mp4', '.json'))
if os.path.isfile(audio_path) and os.path.isfile(frame_info_path):
self.items[index] = [video_path, audio_path, frame_info_path]
index += 1
self.len = index
self.duration = duration
print(f'Size of {type(self).__name__}: {index}')
random.shuffle(self.items)
self.item_iter = iter(self.items)
self.current_item = None
self.current_item_attributes = dict()
def __len__(self):
return self.len
def reset_item(self):
self.current_item = None
return self['item']
def get_item(self):
try:
item_idx = next(self.item_iter)
except StopIteration:
random.shuffle(self.items)
self.item_iter = iter(self.items)
item_idx = next(self.item_iter)
worker_info = torch.utils.data.get_worker_info()
if worker_info: item_idx = (item_idx + worker_info.id) % len(self.items)
video_pth, audio_pth, frame_info_path = self.items[item_idx]
try:
video_info = ffmpeg.probe(video_pth)['format']
except:
return self.get_item()
self.current_item = self.items[item_idx]
self.current_item_attributes = {
'start_time': 0,
'end_time': x_round(float(video_info['duration']))
}
return self.current_item
def __getitem__(self, idx):
if self.current_item is None:
item = self.get_item()
else:
item = self.current_item
video_pth, audio_pth, frame_info_path = item
overlap = 0.2
start_time = max(self.current_item_attributes['start_time'] - overlap, 0)
end_time = self.current_item_attributes['end_time']
if start_time > end_time:
return self.reset_item()
duration = random.choice(np.arange(0.5, self.duration + overlap, overlap))
self.current_item_attributes['start_time'] += duration
try:
speech, sampling_rate = torchaudio.load(audio_pth, frame_offset=int(16000 * start_time),
num_frames=int(16000 * duration), normalize=True, format='wav')
except:
# traceback.print_exc()
return self.reset_item()
assert sampling_rate == 16000
if speech.shape[1] == 0:
return self.reset_item()
frames, _, _ = torchvision.io.read_video(video_pth, start_pts=start_time, end_pts=start_time + duration, pts_unit='sec')
frames = frames.permute(0, 3, 1, 2)
N = frames.shape[0]
absoulte_start_frame_in_video = int(start_time * 25)
with open(frame_info_path, 'r') as json_path:
frame_info = json.load(json_path)
faces = list()
for idx in range(N):
absolute_frame_idx = str(absoulte_start_frame_in_video + idx)
if absolute_frame_idx not in frame_info: continue
landmarks = frame_info[absolute_frame_idx]['landmarks']
face_coords = np.array(frame_info[absolute_frame_idx]['face_coords'], dtype=np.int)
face_coords[face_coords < 0] = 0
face = align_and_crop_face(frames[idx, :, :, :], face_coords, landmarks)
if face.shape[1] < 16 or face.shape[2] < 16: return self.reset_item()
faces.append(face)
if len(faces) == 0:
return self.reset_item()
faces = self.face_augmentation(faces)
face_indices = (torch.rand(2) * len(faces)).int()
face_crop = torch.cat([self.face_recog_resize(faces[f_id]).unsqueeze(0) for f_id in face_indices], dim=0)
lower_faces = list()
for face in faces:
C, H, W = face.shape
lower_face = face[:, H//2:, :]
lower_faces.append(self.face_resize(lower_face).unsqueeze(0))
lower_faces = torch.cat(lower_faces, dim=0)
try:
melspec = self.linear_spectogram(speech).squeeze(0)
except:
return self.reset_item()
return lower_faces, speech, melspec, face_crop
def main():
ds = AVSpeech('/media/ssd/christen-rnd/Experiments/Lip2Speech/Datasets/AVSpeech', mode='test')
dl = DataLoader(ds,
batch_size=8,
shuffle=True,
num_workers=8,
pin_memory=False,
drop_last=True,
collate_fn=av_speech_collate_fn_pad)
from IPython.display import Audio, display
for bdx, batch in enumerate(dl):
(video, video_lengths), (speeches, audio_lengths), (melspecs, melspec_lengths, mel_gates), faces = batch
frames = video
print('video.shape', video.shape)
print('faces.shape ', faces.shape)
print('frames[0][0].shape ', frames[0][0].shape)
# print('speech.shape ', speech.shape)
B, C, T, H, W = video.shape
for k in range(B):
for i in range(T):
image = frames[k, :, i, :, :].permute(1, 2, 0).numpy()
image = image * np.array([0.229, 0.224, 0.225]) + np.array([0.485, 0.456, 0.406])
face = faces[k, 0, :, :, :].permute(1, 2, 0).numpy()
face = ((face * 128.0) + 127.5).astype(dtype=np.uint8)
print(k, i, image.shape)
cv2.imshow('lip', image[:, :, :: -1])
cv2.imshow('face', face[:, :, :: -1])
if ord('q') == cv2.waitKey(1):
exit()
# sample_rate = 16000
# effects = [
# ["lowpass", "-1", "700"], # apply single-pole lowpass filter
# # ["speed", "0.8"], # reduce the speed
# # This only changes sample rate, so it is necessary to
# # add `rate` effect with original sample rate after this.
# # ["rate", f"{sample_rate}"],
# # ["reverb", "-w"], # Reverbration gives some dramatic feeling
# ]
# aug_speech, sample_rate2 = torchaudio.sox_effects.apply_effects_tensor(
# speech[0], sample_rate, effects)
# torchaudio.save('test.wav', speech[0], 16000)
# torchaudio.save('aug_speech.wav', aug_speech, 16000)
# plot_waveform(waveform, sample_rate)
# plot_specgram(waveform, sample_rate)
# play_audio(waveform, sample_rate)
# images = images.numpy()
# lb = lb.numpy()
# for image, label in zip(images, lb):
# label = ds.vis_label(label)
# print(torch.unique(label))
# print(img.shape, label.shape)
if __name__ == "__main__":
main()
|
py | b41442ed4af3064150af3f77d08abfbfb6d53fe3 | #!/usr/bin/env python3
# Kutsutaan crontabista, argumenttina numero väliltä 0 -3
# 10 20 * * 1,2,3,4,5,6,0 python3 /home/pi/Kanala/puhallin.py 2 >/dev/null 2>&1
# 30 20 * * 1,2,3,4,5,6,0 python3 /home/pi/Kanala/puhallin.py 0 >/dev/null 2>&1
# ohjaa mqtt-viesteilla tassa tapauksessa reletta, joka ohjaa kanalan puhallinta
# Jari Hiltunen 14.6.2020
# parametrina releen ohjaustieto, joka voi olla:
# 0 = molemmat releet pois
# 1 = rele 1 on, rele 2 off
# 2 = molemmat on
# 3 = rele 1 off, rele 2 on
import paho.mqtt.client as mqtt
import logging
import sys
from parametrit import MQTTSERVERI, MQTTSERVERIPORTTI, MQTTKAYTTAJA, MQTTSALARI
def virhe_loggeri(login_formaatti='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
login_nimi='', logitiedosto_infoille='/home/pi/Kanala/info/puhallin-info.log',
logitiedosto_virheille='/home/pi/Kanala/info/puhallin-virhe.log'):
logi = logging.getLogger(login_nimi)
login_formaatti = logging.Formatter(login_formaatti)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(login_formaatti)
logi.addHandler(stream_handler)
file_handler_info = logging.FileHandler(logitiedosto_infoille, mode='a')
file_handler_info.setFormatter(login_formaatti)
file_handler_info.setLevel(logging.INFO)
logi.addHandler(file_handler_info)
file_handler_error = logging.FileHandler(logitiedosto_virheille, mode='a')
file_handler_error.setFormatter(login_formaatti)
file_handler_error.setLevel(logging.ERROR)
logi.addHandler(file_handler_error)
logi.setLevel(logging.INFO)
return logi
loggeri = virhe_loggeri()
def yhdista_mqtt(client, userdata, flags, rc):
if client.is_connected() is False:
try:
client.connect_async(MQTTSERVERI, MQTTSERVERIPORTTI, 60, bind_address="") # yhdista mqtt-brokeriin
except OSError as e:
loggeri.error("MQTT-palvelinongelma %s", e)
raise Exception("MQTT-palvelinongelma! %s" % e)
print("Yhdistetty statuksella: " + str(rc))
loggeri.info("Yhdistetty statuksella: " + str(rc))
""" Tilataan aiheet mqtt-palvelimelle. [0] ohjausobjekteissa tarkoittaa liikeanturia """
# mqttvalot.subscribe("$SYS/#")
viesti = sys.argv[1] # scriptin argumentti 1
# suoritetaan Raspberrylla
client = mqtt.Client("puhaltimen-ohjaus")
client.username_pw_set(MQTTKAYTTAJA, MQTTSALARI)
client.connect(MQTTSERVERI, MQTTSERVERIPORTTI, 60, bind_address="") # yhdista mqtt-brokeriin
aihe = "kanala/sisa/puhallin" # aihe jolla status julkaistaan
# Lahetetaan mqtt-brokerille tieto
if (int(viesti) >= 0) and (int(viesti) < 4):
statustieto = viesti
try:
client.publish(aihe, payload=str(statustieto), qos=1, retain=True)
print("Releen ohjaus %s lahetetty" % statustieto)
loggeri.info("Releen ohjaus %s lahetetty" % statustieto)
except OSError:
print("Ongelma lahetyksessa!")
loggeri.error("Ongelma lahetyksessa!")
pass
else:
print("Arvo valilta 0 -3 kiitos!")
loggeri.error("Arvo valilta 0 -3 kiitos!")
|
py | b4144340721e9e025e94901bb975ef7c59a2f33d | """
File:
midifile.py
Contents and purpose:
Utilities used throughout JetCreator
Copyright (c) 2008 Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import struct
import copy
import array
# JET events
JET_EVENT_MARKER = 102
JET_MARKER_LOOP_END = 0
JET_EVENT_TRIGGER_CLIP = 103
# header definitions
SMF_HEADER_FMT = '>4slHHH'
SMF_RIFF_TAG = 'MThd'
SMF_TRACK_HEADER_FMT = '>4sl'
SMF_TRACK_RIFF_TAG = 'MTrk'
# defaults
DEFAULT_PPQN = 120
DEFAULT_BEATS_PER_MEASURE = 4
DEFAULT_TIME_FORMAT = '%03d:%02d:%03d'
# force note-offs to end of list
MAX_SEQ_NUM = 0x7fffffff
# MIDI messages
NOTE_OFF = 0x80
NOTE_ON = 0x90
POLY_KEY_PRESSURE = 0xa0
CONTROL_CHANGE = 0xb0
PROGRAM_CHANGE = 0xc0
CHANNEL_PRESSURE = 0xd0
PITCH_BEND = 0xe0
# System common messages
SYSEX = 0xf0
MIDI_TIME_CODE = 0xf1
SONG_POSITION_POINTER = 0xf2
SONG_SELECT = 0xf3
RESERVED_F4 = 0xf4
RESERVED_F5 = 0xf5
TUNE_REQUEST = 0xf6
END_SYSEX = 0xf7
# System real-time messages
TIMING_CLOCK = 0xf8
RESERVED_F9 = 0xf9
START = 0xfa
CONTINUE = 0xfb
STOP = 0xfc
RESERVED_FD = 0xfd
ACTIVE_SENSING = 0xfe
SYSTEM_RESET = 0xff
ONE_BYTE_MESSAGES = (
TUNE_REQUEST,
TIMING_CLOCK,
RESERVED_F9,
START,
CONTINUE,
STOP,
RESERVED_FD,
ACTIVE_SENSING,
SYSTEM_RESET)
THREE_BYTE_MESSAGES = (
NOTE_OFF,
NOTE_ON,
POLY_KEY_PRESSURE,
CONTROL_CHANGE,
PITCH_BEND)
MIDI_MESSAGES = (
NOTE_OFF,
NOTE_ON,
POLY_KEY_PRESSURE,
CONTROL_CHANGE,
CHANNEL_PRESSURE,
PITCH_BEND,
SYSEX)
# Meta-events
META_EVENT = 0xff
META_EVENT_SEQUENCE_NUMBER = 0x00
META_EVENT_TEXT_EVENT = 0x01
META_EVENT_COPYRIGHT_NOTICE = 0x02
META_EVENT_SEQUENCE_TRACK_NAME = 0x03
META_EVENT_INSTRUMENT_NAME = 0x04
META_EVENT_LYRIC = 0x05
META_EVENT_MARKER = 0x06
META_EVENT_CUE_POINT = 0x07
META_EVENT_MIDI_CHANNEL_PREFIX = 0x20
META_EVENT_END_OF_TRACK = 0x2f
META_EVENT_SET_TEMPO = 0x51
META_EVENT_SMPTE_OFFSET = 0x54
META_EVENT_TIME_SIGNATURE = 0x58
META_EVENT_KEY_SIGNATURE = 0x59
META_EVENT_SEQUENCER_SPECIFIC = 0x7f
# recurring error messages
MSG_NOT_SMF_FILE = 'Not an SMF file - aborting parse!'
MSG_INVALID_TRACK_HEADER = 'Track header is invalid'
MSG_TYPE_MISMATCH = 'msg_type does not match event type'
LARGE_TICK_WARNING = 1000
# default control values
CTRL_BANK_SELECT_MSB = 0
CTRL_MOD_WHEEL = 1
CTRL_RPN_DATA_MSB = 6
CTRL_VOLUME = 7
CTRL_PAN = 10
CTRL_EXPRESSION = 11
CTRL_BANK_SELECT_LSB = 32
CTRL_RPN_DATA_LSB = 38
CTRL_SUSTAIN = 64
CTRL_RPN_LSB = 100
CTRL_RPN_MSB = 101
CTRL_RESET_CONTROLLERS = 121
RPN_PITCH_BEND_SENSITIVITY = 0
RPN_FINE_TUNING = 1
RPN_COARSE_TUNING = 2
MONITOR_CONTROLLERS = (
CTRL_BANK_SELECT_MSB,
CTRL_MOD_WHEEL,
CTRL_RPN_DATA_MSB,
CTRL_VOLUME,
CTRL_PAN,
CTRL_EXPRESSION,
CTRL_BANK_SELECT_LSB,
CTRL_RPN_DATA_LSB,
CTRL_SUSTAIN,
CTRL_RPN_LSB,
CTRL_RPN_MSB)
MONITOR_RPNS = (
RPN_PITCH_BEND_SENSITIVITY,
RPN_FINE_TUNING,
RPN_COARSE_TUNING)
RPN_PITCH_BEND_SENSITIVITY = 0
RPN_FINE_TUNING = 1
RPN_COARSE_TUNING = 2
DEFAULT_CONTROLLER_VALUES = {
CTRL_BANK_SELECT_MSB : 121,
CTRL_MOD_WHEEL : 0,
CTRL_RPN_DATA_MSB : 0,
CTRL_VOLUME : 100,
CTRL_PAN : 64,
CTRL_EXPRESSION : 127,
CTRL_RPN_DATA_LSB : 0,
CTRL_BANK_SELECT_LSB : 0,
CTRL_SUSTAIN : 0,
CTRL_RPN_LSB : 0x7f,
CTRL_RPN_MSB : 0x7f}
DEFAULT_RPN_VALUES = {
RPN_PITCH_BEND_SENSITIVITY : 0x100,
RPN_FINE_TUNING : 0,
RPN_COARSE_TUNING : 1}
# initialize logger
midi_file_logger = logging.getLogger('MIDI_file')
midi_file_logger.setLevel(logging.NOTSET)
class trackGrid(object):
def __init__ (self, track, channel, name, empty):
self.track = track
self.channel = channel
self.name = name
self.empty = empty
def __str__ (self):
return "['%s', '%s', '%s']" % (self.track, self.channel, self.name)
#---------------------------------------------------------------
# MIDIFileException
#---------------------------------------------------------------
class MIDIFileException (Exception):
def __init__ (self, stream, msg):
stream.error_loc = stream.tell()
self.stream = stream
self.msg = msg
def __str__ (self):
return '[%d]: %s' % (self.stream.error_loc, self.msg)
#---------------------------------------------------------------
# TimeBase
#---------------------------------------------------------------
class TimeBase (object):
def __init__ (self, ppqn=DEFAULT_PPQN, beats_per_measure=DEFAULT_BEATS_PER_MEASURE):
self.ppqn = ppqn
self.beats_per_measure = beats_per_measure
def ConvertToTicks (self, measures, beats, ticks):
total_beats = beats + (measures * self.beats_per_measure)
total_ticks = ticks + (total_beats * self.ppqn)
return total_ticks
def ConvertTicksToMBT (self, ticks):
beats = ticks / self.ppqn
ticks -= beats * self.ppqn
measures = beats / self.beats_per_measure
beats -= measures * self.beats_per_measure
return (measures, beats, ticks)
def ConvertTicksToStr (self, ticks, format=DEFAULT_TIME_FORMAT):
measures, beats, ticks = self.ConvertTicksToMBT(ticks)
return format % (measures, beats, ticks)
def ConvertStrTimeToTuple(self, s):
try:
measures, beats, ticks = s.split(':',3)
return (int(measures), int(beats), int(ticks))
except:
return (0,0,0)
def ConvertStrTimeToTicks(self, s):
measures, beats, ticks = self.ConvertStrTimeToTuple(s)
return self.ConvertToTicks(measures, beats, ticks)
def MbtDifference(self, mbt1, mbt2):
t1 = self.ConvertToTicks(mbt1[0], mbt1[1], mbt1[2])
t2 = self.ConvertToTicks(mbt2[0], mbt2[1], mbt2[2])
return abs(t1-t2)
#---------------------------------------------------------------
# Helper functions
#---------------------------------------------------------------
def ReadByte (stream):
try:
return ord(stream.read(1))
except TypeError:
stream.error_loc = stream.tell()
raise MIDIFileException(stream, 'Unexpected EOF')
def ReadBytes (stream, length):
bytes = []
for i in range(length):
bytes.append(ReadByte(stream))
return bytes
def ReadVarLenQty (stream):
value = 0
while 1:
byte = ReadByte(stream)
value = (value << 7) + (byte & 0x7f)
if byte & 0x80 == 0:
return value
def WriteByte (stream, value):
stream.write(chr(value))
def WriteBytes (stream, bytes):
for byte in bytes:
WriteByte(stream, byte)
def WriteVarLenQty (stream, value):
bytes = [value & 0x7f]
value = value >> 7
while value > 0:
bytes.append((value & 0x7f) | 0x80)
value = value >> 7
bytes.reverse()
WriteBytes(stream, bytes)
#---------------------------------------------------------------
# EventFilter
#---------------------------------------------------------------
class EventFilter (object):
pass
class EventTypeFilter (object):
def __init__ (self, events, exclude=True):
self.events = events
self.exclude = exclude
def Check (self, event):
if event.msg_type in self.events:
return not self.exclude
return self.exclude
class NoteFilter (EventFilter):
def __init__ (self, notes, exclude=True):
self.notes = notes
self.exclude = exclude
def Check (self, event):
if event.msg_type in (NOTE_ON, NOTE_OFF):
if event.note in self.notes:
return not self.exclude
return self.exclude
class ChannelFilter (EventFilter):
def __init__ (self, channel, exclude=True):
self.channel = channel
self.exclude = exclude
def Check (self, event):
if event.msg_type in (NOTE_ON, NOTE_OFF, POLY_KEY_PRESSURE, CONTROL_CHANGE, CHANNEL_PRESSURE, PITCH_BEND):
if event.channel in self.channel:
return not self.exclude
return self.exclude
#---------------------------------------------------------------
# MIDIEvent
#---------------------------------------------------------------
class MIDIEvent (object):
"""Factory for creating MIDI events from a stream."""
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
if msg_type == SYSEX:
return SysExEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif msg_type == END_SYSEX:
return SysExContEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif msg_type == META_EVENT:
return MetaEvent.ReadFromStream(stream, seq, ticks, msg_type)
else:
high_nibble = msg_type & 0xf0
if high_nibble == NOTE_OFF:
return NoteOffEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif high_nibble == NOTE_ON:
return NoteOnEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif high_nibble == POLY_KEY_PRESSURE:
return PolyKeyPressureEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif high_nibble == CONTROL_CHANGE:
return ControlChangeEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif high_nibble == PROGRAM_CHANGE:
return ProgramChangeEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif high_nibble == CHANNEL_PRESSURE:
return ChannelPressureEvent.ReadFromStream(stream, seq, ticks, msg_type)
elif high_nibble == PITCH_BEND:
return PitchBendEvent.ReadFromStream(stream, seq, ticks, msg_type)
else:
stream.Warning('Ignoring unexpected message type 0x%02x' % msg_type)
def WriteTicks (self, stream, track):
WriteVarLenQty(stream, self.ticks - track.ticks)
track.ticks = self.ticks
def WriteRunningStatus (self, stream, track, filters, msg, data1, data2=None):
if not self.CheckFilters(filters):
return
self.WriteTicks(stream, track)
status = msg + self.channel
if track.running_status != status:
WriteByte(stream, status)
track.running_status = status
WriteByte(stream, data1)
if data2 is not None:
WriteByte(stream, data2)
def CheckFilters (self, filters):
if filters is None or not len(filters):
return True
# never filter meta-events
if (self.msg_type == META_EVENT) and (self.meta_type == META_EVENT_END_OF_TRACK):
return True
# check all filters
for f in filters:
if not f.Check(self):
return False
return True
def TimeEventStr (self, timebase):
return '[%s]: %s' % (timebase.ConvertTicksToStr(self.ticks), self.__str__())
#---------------------------------------------------------------
# NoteOffEvent
#---------------------------------------------------------------
class NoteOffEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, note, velocity):
self.name = 'NoteOff'
self.msg_type = NOTE_OFF
self.seq = seq
self.ticks = ticks
self.channel = channel
self.note = note
self.velocity = velocity
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
ticks = ticks
channel = msg_type & 0x0f
note = ReadByte(stream)
velocity = ReadByte(stream)
if msg_type & 0xf0 != NOTE_OFF:
stream.seek(-2,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return NoteOffEvent(ticks, seq, channel, note, velocity)
def WriteToStream (self, stream, track, filters=None):
# special case for note-off using zero velocity
if self.velocity > 0:
self.WriteRunningStatus(stream, track, filters, NOTE_ON, self.note, self.velocity)
if track.running_status == (NOTE_OFF + self.channel):
self.WriteRunningStatus(stream, track, filters, NOTE_ON, self.note, self.velocity)
else:
self.WriteRunningStatus(stream, track, filters, NOTE_ON, self.note, 0)
def __str__ (self):
return '%s: ch=%d n=%d v=%d' % (self.name, self.channel, self.note, self.velocity)
#---------------------------------------------------------------
# NoteOnEvent
#---------------------------------------------------------------
class NoteOnEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, note, velocity, note_length, note_off_velocity):
self.name = 'NoteOn'
self.msg_type = NOTE_ON
self.ticks = ticks
self.seq = seq
self.channel = channel
self.note = note
self.velocity = velocity
self.note_length = note_length
self.note_off_velocity = note_off_velocity
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
note = ReadByte(stream)
velocity = ReadByte(stream)
if msg_type & 0xf0 != NOTE_ON:
stream.seek(-2,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
if velocity == 0:
return NoteOffEvent(ticks, seq, channel, note, velocity)
return NoteOnEvent(ticks, seq, channel, note, velocity, None, None)
def WriteToStream (self, stream, track, filters=None):
self.WriteRunningStatus(stream, track, filters, NOTE_ON, self.note, self.velocity)
def __str__ (self):
if self.note_length is not None:
return '%s: ch=%d n=%d v=%d l=%d' % (self.name, self.channel, self.note, self.velocity, self.note_length)
else:
return '%s: ch=%d n=%d v=%d' % (self.name, self.channel, self.note, self.velocity)
#---------------------------------------------------------------
# PolyKeyPressureEvent
#---------------------------------------------------------------
class PolyKeyPressureEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, note, value):
self.name = 'PolyKeyPressure'
self.msg_type = POLY_KEY_PRESSURE
self.ticks = ticks
self.seq = seq
self.channel = channel
self.note = note
self.value = value
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
note = ReadByte(stream)
value = ReadByte(stream)
if msg_type & 0xf0 != POLY_KEY_PRESSURE:
stream.seek(-2,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return PolyKeyPressureEvent(ticks, seq, channel, note, value)
def WriteToStream (self, stream, track, filters=None):
self.WriteRunningStatus(stream, track, filters, POLY_KEY_PRESSURE, self.note, self.value)
def __str__ (self):
return '%s: ch=%d n=%d v=%d' % (self.name, self.channel, self.note, self.value)
#---------------------------------------------------------------
# ControlChangeEvent
#---------------------------------------------------------------
class ControlChangeEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, controller, value):
self.name = 'ControlChange'
self.msg_type = CONTROL_CHANGE
self.ticks = ticks
self.seq = seq
self.channel = channel
self.controller = controller
self.value = value
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
controller = ReadByte(stream)
value = ReadByte(stream)
if msg_type & 0xf0 != CONTROL_CHANGE:
stream.seek(-2,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
if controller >= 120:
return ChannelModeEvent(ticks, seq, channel, controller, value)
return ControlChangeEvent(ticks, seq, channel, controller, value)
def WriteToStream (self, stream, track, filters=None):
self.WriteRunningStatus(stream, track, filters, CONTROL_CHANGE, self.controller, self.value)
def __str__ (self):
return '%s: ch=%d c=%d v=%d' % (self.name, self.channel, self.controller, self.value)
#---------------------------------------------------------------
# ChannelModeEvent
#---------------------------------------------------------------
class ChannelModeEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, controller, value):
self.name = 'ChannelMode'
self.msg_type = CONTROL_CHANGE
self.ticks = ticks
self.seq = seq
self.channel = channel
self.controller = controller
self.value = value
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
controller = ReadByte(stream)
value = ReadByte(stream)
if msg_type & 0xf0 != CONTROL_CHANGE:
stream.seek(-2,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
if controller < 120:
return ControlChangeEvent(ticks, seq, channel, controller, value)
return ChannelModeEvent(ticks, seq, channel, value)
def WriteToStream (self, stream, track, filters=None):
self.WriteRunningStatus(stream, track, filters, CONTROL_CHANGE, self.controller, self.value)
def __str__ (self):
return '%s: ch=%d c=%d v=%d' % (self.name, self.channel, self.controller, self.value)
#---------------------------------------------------------------
# ProgramChangeEvent
#---------------------------------------------------------------
class ProgramChangeEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, program):
self.name = 'ProgramChange'
self.msg_type = PROGRAM_CHANGE
self.ticks = ticks
self.seq = seq
self.channel = channel
self.program = program
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
program = ReadByte(stream)
if msg_type & 0xf0 != PROGRAM_CHANGE:
stream.seek(-1,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return ProgramChangeEvent(ticks, seq, channel, program)
def WriteToStream (self, stream, track, filters=None):
self.WriteRunningStatus(stream, track, filters, PROGRAM_CHANGE, self.program)
def __str__ (self):
return '%s: ch=%d p=%d' % (self.name, self.channel, self.program)
#---------------------------------------------------------------
# ChannelPressureEvent
#---------------------------------------------------------------
class ChannelPressureEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, value):
self.name = 'ChannelPressure'
self.msg_type = CHANNEL_PRESSURE
self.ticks = ticks
self.seq = seq
self.channel = channel
self.value = value
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
value = ReadByte(stream)
if msg_type & 0xf0 != CHANNEL_PRESSURE:
stream.seek(-1,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return ChannelPressureEvent(ticks, seq, channel, value)
def WriteToStream (self, stream, track, filters=None):
self.WriteRunningStatus(stream, track, filters, CHANNEL_PRESSURE, self.value)
def __str__ (self):
return '%s: ch=%d v=%d' % (self.name, self.channel, self.value)
#---------------------------------------------------------------
# PitchBendEvent
#---------------------------------------------------------------
class PitchBendEvent (MIDIEvent):
def __init__ (self, ticks, seq, channel, value):
self.name = 'PitchBend'
self.msg_type = PITCH_BEND
self.ticks = ticks
self.seq = seq
self.channel = channel
self.value = value
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
channel = msg_type & 0x0f
value = (ReadByte(stream) << 7) + ReadByte(stream) - 0x2000
if msg_type & 0xf0 != PITCH_BEND:
stream.seek(-2,1)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return PitchBendEvent(ticks, seq, channel, value)
def WriteToStream (self, stream, track, filters=None):
value = self.value + 0x2000
if value < 0:
value = 0
if value > 0x3fff:
value = 0x3fff
self.WriteRunningStatus(stream, track, filters, PITCH_BEND, value >> 7, value & 0x7f)
def __str__ (self):
return '%s: ch=%d v=%d' % (self.name, self.channel, self.value)
#---------------------------------------------------------------
# SysExEvent
#---------------------------------------------------------------
class SysExEvent (MIDIEvent):
def __init__ (self, ticks, seq, msg):
self.name = 'SysEx'
self.msg_type = SYSEX
self.ticks = ticks
self.seq = seq
self.length = len(msg)
self.msg = msg
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
pos = stream.tell()
length = ReadVarLenQty(stream)
msg = ReadBytes(stream, length)
if msg_type != SYSEX:
stream.seek(pos,0)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return SysExEvent(ticks, seq, msg)
def WriteToStream (self, stream, track, filters=None):
if not self.CheckFilters(filters):
return
self.WriteTicks(stream, track)
WriteByte(stream, SYSEX)
WriteVarLenQty(stream, self.length)
WriteBytes(stream, self.msg)
track.running_status = None
def __str__ (self):
fmt_str = '%s: f0' + ' %02x'*self.length
return fmt_str % ((self.name,) + tuple(self.msg))
#---------------------------------------------------------------
# SysExContEvent
#---------------------------------------------------------------
class SysExContEvent (MIDIEvent):
def __init__ (self, ticks, seq, msg):
self.name = 'SysEx+'
self.msg_type = END_SYSEX
self.ticks = ticks
self.seq = seq
self.length = len(msg)
self.msg = msg
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
pos = stream.tell()
length = ReadVarLenQty(stream)
msg = ReadBytes(stream, length)
if msg_type != END_SYSEX:
stream.seek(pos,0)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
return SysExContEvent(ticks, seq, msg)
def WriteToStream (self, stream, track, filters=None):
if not self.CheckFilters(filters):
return
self.WriteTicks(stream, track)
WriteByte(stream, END_SYSEX)
WriteVarLenQty(stream, self.length)
WriteBytes(stream, self.msg)
track.running_status = None
def __str__ (self):
fmt_str = '%s:' + ' %02x'*self.length
return fmt_str % ((self.name,) + tuple(self.msg))
#---------------------------------------------------------------
# MetaEvent
#---------------------------------------------------------------
class MetaEvent (MIDIEvent):
def __init__ (self, ticks, seq, meta_type, msg):
self.name = 'MetaEvent'
self.msg_type = META_EVENT
self.ticks = ticks
self.seq = seq
self.meta_type = meta_type
self.length = len(msg)
self.msg = msg
@staticmethod
def ReadFromStream (stream, seq, ticks, msg_type):
pos = stream.tell()
meta_type = ReadByte(stream)
length = ReadVarLenQty(stream)
msg = ReadBytes(stream, length)
if msg_type != META_EVENT:
stream.seek(pos,0)
raise MIDIFileException(stream, MSG_TYPE_MISMATCH)
obj = MetaEvent(ticks, seq, meta_type, msg)
return obj
def WriteToStream (self, stream, track, filters=None):
if not self.CheckFilters(filters):
return
self.WriteTicks(stream, track)
WriteByte(stream, META_EVENT)
WriteByte(stream, self.meta_type)
WriteVarLenQty(stream, self.length)
WriteBytes(stream, self.msg)
track.running_status = None
def __str__ (self):
fmt_str = '%s: %02x' + ' %02x'*self.length
return fmt_str % ((self.name, self.meta_type) + tuple(self.msg))
#---------------------------------------------------------------
# MIDIControllers
#---------------------------------------------------------------
class MIDIControllers (object):
def __init__ (self):
self.controllers = []
self.rpns = []
for channel in range(16):
self.controllers.append({})
self.controllers[channel] = copy.deepcopy(DEFAULT_CONTROLLER_VALUES)
self.rpns.append({})
self.rpns[channel] = copy.deepcopy(DEFAULT_RPN_VALUES)
self.pitchbend = [0] * 16
self.program = [-1] * 16
self.pressure = [0] * 16
def __str__ (self):
output = []
for channel in range(16):
output.append('channel=%d' % channel)
output.append(' program=%d' % self.program[channel])
output.append(' pressure=%d' % self.pressure[channel])
output.append(' controllers')
for controller in self.controllers[channel].keys():
output.append(' %03d: %03d' % (controller, self.controllers[channel][controller]))
output.append(' rpns')
for rpn in self.rpns[channel].keys():
output.append(' %05d: %05d>' % (controller, self.rpns[channel][rpn]))
return '\n'.join(output)
def Event (self, event):
"""Process an event and save any changes in controller values"""
# process control changes
if event.msg_type == CONTROL_CHANGE:
self.ControlChange(event)
elif event.msg_type == CHANNEL_PRESSURE:
self.PressureChange(event)
elif event.msg_type == PROGRAM_CHANGE:
self.ProgramChange(event)
elif event.msg_type == PITCH_BEND:
self.PitchBendChange(event)
def PitchBendChange (self, event):
"""Monitor pitch bend change."""
self.pitchbend[event.channel] = event.value
def ProgramChange (self, event):
"""Monitor program change."""
self.program[event.channel] = event.program
def ControlChange (self, event):
"""Monitor control change."""
controller = event.controller
if controller in MONITOR_CONTROLLERS:
channel = event.channel
self.controllers[channel][controller] = event.value
if (controller == CTRL_RPN_DATA_MSB) or (controller == CTRL_RPN_DATA_LSB):
rpn = (self.controllers[channel][CTRL_RPN_MSB] << 7) + self.controllers[channel][CTRL_RPN_LSB]
if rpn in MONITOR_RPNS:
value = (self.controllers[channel][CTRL_RPN_DATA_MSB] << 7) + self.controllers[channel][CTRL_RPN_DATA_LSB]
self.rpns[channel][rpn] = value
# reset controllers
elif event.controller == CTRL_RESET_CONTROLLERS:
self.ResetControllers[event.channel]
def PressureChange (self, event):
"""Monitor pressure change."""
self.pressure[event.channel] = event.value
def ResetControllers (self, channel):
"""Reset controllers to default."""
self.controllers[channel] = DEFAULT_CONTROLLER_VALUES
self.rpns[channel] = DEFAULT_RPN_VALUES
self.pressure[channel] = 0
def GenerateEventList (self, ticks, ref_values=None):
"""Generate an event list based on controller differences."""
events = EventList()
# if no reference values, based on default values
if ref_values is None:
ref_values = MIDIControllers()
# iterate through 16 MIDI channels
for channel in range(16):
# generate RPN changes
for rpn in self.rpns[channel].keys():
value = self.rpns[channel][rpn]
if value != ref_values.rpns[channel][rpn]:
events.append(ControlChangeEvent(ticks, -1, channel, CTRL_RPN_MSB, rpn >> 7))
events.append(ControlChangeEvent(ticks, -1, channel, CTRL_RPN_LSB, rpn & 0x7f))
events.append(ControlChangeEvent(ticks, -1, channel, CTRL_RPN_DATA_MSB, value >> 7))
events.append(ControlChangeEvent(ticks, -1, channel, CTRL_RPN_DATA_LSB, value & 0x7f))
# generate controller changes
for controller in self.controllers[channel].keys():
if self.controllers[channel][controller] != ref_values.controllers[channel][controller]:
events.append(ControlChangeEvent(ticks, -1, channel, controller, self.controllers[channel][controller]))
# generate pressure changes
if self.pressure[channel] != ref_values.pressure[channel]:
events.append(ChannelPressureEvent(ticks, -1, channel, self.pressure[channel]))
# generate program changes
if self.program[channel] != ref_values.program[channel]:
if self.program[channel] in range(128):
events.append(ProgramChangeEvent(ticks, -1, channel, self.program[channel]))
# generate pitch bend changes
if self.pitchbend[channel] != ref_values.pitchbend[channel]:
if self.pitchbend[channel] in range(-8192,8191):
events.append(PitchBendEvent(ticks, -1, channel, self.pitchbend[channel]))
return events
#---------------------------------------------------------------
# EventList
#---------------------------------------------------------------
class EventList (list):
def __init__ (self):
list.__init__(self)
def FixNoteLengths (self):
midi_file_logger.debug('Fix note lengths')
# search for note-on's in event list
for index in range(len(self)):
event = self[index]
if event.msg_type == NOTE_ON:
note_off_ticks = event.ticks + event.note_length
# check for note-on occuring before end of current note
for i in range(index + 1, len(self)):
event_to_check = self[i]
if event_to_check.ticks >= note_off_ticks:
break
# adjust note length
if (event_to_check.msg_type == NOTE_ON) and (event_to_check.note == event.note):
midi_file_logger.debug('Adjusting note length @ %d' % event.ticks)
event.note_length = event_to_check.ticks - event.ticks
break
def ChaseControllers (self, end_seq, start_seq = 0, values = None):
midi_file_logger.debug('ChaseControllers from %d to %d' % (start_seq, end_seq))
# initialize controller values
if values is None:
values = MIDIControllers()
# chase controllers in track
for i in range(start_seq, min(end_seq, len(self))):
values.Event(self[i])
# return new values
return values
def SelectEvents (self, start, end):
midi_file_logger.debug('SelectEvents: %d to %d' % (start, end))
selected = EventList()
for event in self:
if event.ticks >= start:
if event.ticks >= end:
break
midi_file_logger.debug('SelectEvent: %s' % event.__str__())
selected.append(event)
return selected
def MergeEvents (self, events):
# copy events and sort them by ticks/sequence#
self.extend(events)
self.SortEvents()
def InsertEvents (self, events, seq):
self[seq:seq] = events
self.RenumberSeq()
def DeleteEvents (self, start_index, end_index, move_meta_events=None):
# default parameters
if start_index is None:
start_index = 0
if end_index is None:
end_index = len(self)
#print("\n")
#for evt in self[start_index:end_index]:
# print("%d %s" % (evt.ticks, evt))
# delete events
delete_count = 0
move_count = 0
for event in self[start_index:end_index]:
#Bth; Added this so we always get clip end events; clips that ended on last measure wouldn't end on repeat
if (event.msg_type == CONTROL_CHANGE) and \
(event.controller == JET_EVENT_TRIGGER_CLIP) and \
((event.value & 0x40) != 0x40):
pass
else:
if (move_meta_events is None) or (event.msg_type != META_EVENT):
self.remove(event)
delete_count += 1
# move meta-events
else:
event.ticks = move_meta_events
move_count += 1
midi_file_logger.debug('DeleteEvents: deleted %d events in range(%s:%s)' % (delete_count, start_index, end_index))
midi_file_logger.debug('DeleteEvents: moved %d events in range(%s:%s)' % (move_count, start_index, end_index))
def SeekEvent (self, pos):
for i in range(len(self)):
if self[i].ticks >= pos:
return i
return None
def RenumberSeq (self):
seq = 0
for event in self:
event.seq = seq
seq += 1
def SortEvents (self):
self.sort(self.EventSorter)
self.RenumberSeq()
@staticmethod
def EventSorter (x, y):
if x.ticks == y.ticks:
return cmp(x.seq, y.seq)
else:
return cmp(x.ticks, y.ticks)
def DumpEvents (self, output, timebase):
if output is not None:
for event in self:
output.write('%s\n' % event.TimeEventStr(timebase))
else:
for event in self:
midi_file_logger.debug(event.TimeEventStr(timebase))
#---------------------------------------------------------------
# MIDITrack
#---------------------------------------------------------------
class MIDITrack (object):
"""The MIDITrack class implements methods for reading, parsing,
modifying, and writing tracks in Standard MIDI Files (SMF).
"""
def __init__ (self):
self.length = 0
self.events = EventList()
self.end_of_track = None
self.channel = None
self.name = None
def ReadFromStream (self, stream, offset, file_size):
self.stream = stream
ticks = 0
seq = 0
running_status = None
tick_warning_level = stream.timebase.ppqn * LARGE_TICK_WARNING
# read the track header - verify it's an SMF track
stream.seek(offset)
bytes = stream.read(struct.calcsize(SMF_TRACK_HEADER_FMT))
riff_tag, track_len = struct.unpack(SMF_TRACK_HEADER_FMT, bytes)
midi_file_logger.debug('SMF track header\n Tag: %s\n TrackLen: %d' % (riff_tag, track_len))
if (riff_tag != SMF_TRACK_RIFF_TAG):
raise MIDIFileException(stream, MSG_INVALID_TRACK_HEADER)
self.start = stream.tell()
# check for valid track length
if (self.start + track_len) > file_size:
stream.Warning('Ignoring illegal track length - %d exceeds length of file' % track_len)
track_len = None
# read the entire track
note_on_list = []
while 1:
# save current position
pos = stream.tell()
# check for end of track
if track_len is not None:
if (pos - self.start) >= track_len:
break
# are we past end of track?
if self.end_of_track:
stream.Warning('Ignoring data encountered beyond end-of-track meta-event')
break;
# read delta timestamp
delta = ReadVarLenQty(stream)
if ticks > tick_warning_level:
stream.Warning('Tick value is excessive - possibly corrupt data?')
ticks += delta
# get the event type and process it
msg_type = ReadByte(stream)
# if data byte, check for running status
if msg_type & 0x80 == 0:
# use running status
msg_type = running_status
# back up so event can process data
stream.seek(-1,1)
# if no running status, we have a problem
if not running_status:
stream.Warning('Ignoring data byte received with no running status')
# create event type from stream
event = MIDIEvent.ReadFromStream(stream, seq, ticks, msg_type)
if self.channel == None:
try:
self.channel = event.channel
except AttributeError:
pass
# track note-ons
if event.msg_type == NOTE_ON:
"""
Experimental code to clean up overlapping notes
Clean up now occurs during write process
for note_on in note_on_list:
if (event.channel == note_on.channel) and (event.note == note_on.note):
stream.Warning('Duplicate note-on\'s encountered without intervening note-off')
stream.Warning(' [%s]: %s' % (stream.timebase.ConvertTicksToStr(event.ticks), event.__str__()))
note_on.note_length = event.ticks - note_on.ticks - 1
if note_on.note_length <= 0:
stream.Warning('Eliminating duplicate note-on')
event.ticks = note_on.ticks
self.events.remove(note_on)
"""
note_on_list.append(event)
# process note-offs
if event.msg_type == NOTE_OFF:
for note_on in note_on_list[:]:
if (event.channel == note_on.channel) and (event.note == note_on.note):
note_on.note_length = event.ticks - note_on.ticks
note_on.note_off_velocity = event.velocity
note_on_list.remove(note_on)
break
#else:
# stream.Warning('Note-off encountered without corresponding note-on')
# stream.Warning(' [%s]: %s' % (stream.timebase.ConvertTicksToStr(event.ticks), event.__str__()))
# check for end of track
elif event.msg_type == META_EVENT and event.meta_type == META_EVENT_END_OF_TRACK:
self.end_of_track = event.ticks
# BTH; get track name
elif event.msg_type == META_EVENT and event.meta_type == META_EVENT_SEQUENCE_TRACK_NAME:
self.name = array.array('B', event.msg).tostring()
# append event to event list
else:
self.events.append(event)
seq += 1
# save position for port-mortem
stream.last_good_event = pos
# update running statusc_str(
if msg_type < 0xf0:
running_status = msg_type
elif (msg_type < 0xf8) or (msg_type == 0xff):
running_status = None
# check for stuck notes
#if len(note_on_list):
# stream.Warning('Note-ons encountered without corresponding note-offs')
# check for missing end-of-track meta-event
if self.end_of_track is None:
self.last_tick = self.events[-1].ticks
stream.Warning('End of track encountered with no end-of-track meta-event')
# if track length was bad, correct it
if track_len is None:
track_len = stream.tell() - offset - 8
return track_len
def Write (self, stream, filters=None):
# save current file position so we can write header
header_loc = stream.tell()
stream.seek(header_loc + struct.calcsize(SMF_TRACK_HEADER_FMT))
# save a copy of the event list so we can restore it
save_events = copy.copy(self.events)
# create note-off events
index = 0
while 1:
if index >= len(self.events):
break
# if note-on event, create a note-off event
event = self.events[index]
index += 1
if event.msg_type == NOTE_ON:
note_off = NoteOffEvent(event.ticks + event.note_length, index, event.channel, event.note, event.note_off_velocity)
# insert note-off in list
for i in range(index, len(self.events)):
if self.events[i].ticks >= note_off.ticks:
self.events.insert(i, note_off)
break
else:
self.events.append(note_off)
# renumber list
self.events.RenumberSeq()
# write the events
self.running_status = None
self.ticks = 0
for event in self.events:
# write event
event.WriteToStream(stream, self, filters)
# restore original list (without note-off events)
self.events = save_events
# write the end-of-track meta-event
MetaEvent(self.end_of_track, 0, META_EVENT_END_OF_TRACK,[]).WriteToStream(stream, self, None)
# write track header
end_of_track = stream.tell()
track_len = end_of_track - header_loc - struct.calcsize(SMF_TRACK_HEADER_FMT)
stream.seek(header_loc)
bytes = struct.pack(SMF_TRACK_HEADER_FMT, SMF_TRACK_RIFF_TAG, track_len)
stream.write(bytes)
stream.seek(end_of_track)
def Trim (self, start, end, slide=True, chase_controllers=True, delete_meta_events=False, quantize=0):
controllers = None
if quantize:
# quantize events just before start
for event in self.events.SelectEvents(start - quantize, start):
midi_file_logger.debug('Trim: Moving event %s to %d' % (event.__str__(), start))
event.ticks = start
# quantize events just before end
for event in self.events.SelectEvents(end - quantize, end):
midi_file_logger.debug('Trim: Moving event %s to %d' % (event.__str__(), end))
event.ticks = end
# trim start
if start:
# find first event inside trim
start_event = self.events.SeekEvent(start)
if start_event is not None:
# chase controllers to cut point
if chase_controllers:
controllers = self.events.ChaseControllers(self.events[start_event].seq)
controller_events = controllers.GenerateEventList(0)
midi_file_logger.debug('Trim: insert new controller events at %d:' % start)
controller_events.DumpEvents(None, self.stream.timebase)
self.events.InsertEvents(controller_events, start_event)
# delete events
midi_file_logger.debug('Trim: deleting events up to event %d' % start_event)
if delete_meta_events:
self.events.DeleteEvents(None, start_event, None)
else:
self.events.DeleteEvents(None, start_event, start)
# delete everything except metadata
else:
self.events.DeleteEvents(None, None, start)
# trim end
end_event = self.events.SeekEvent(end)
if end_event is not None:
midi_file_logger.debug('Trim: trimming section starting at event %d' % end_event)
self.events.DeleteEvents(end_event, None)
# trim any notes that extend past the end
for event in self.events:
if event.msg_type == NOTE_ON:
if (event.ticks + event.note_length) > end:
midi_file_logger.debug('Trim: trimming note that extends past end %s' % event.TimeEventStr(self.stream.timebase))
event.note_length = end - event.ticks
if event.note_length <= 0:
raise 'Error in note length - note should have been deleted'
midi_file_logger.debug('Trim: initial end-of-track: %d' % self.end_of_track)
self.end_of_track = min(self.end_of_track, end)
# slide events to start of track to fill hole
if slide and start:
midi_file_logger.debug('Trim: sliding events: %d' % start)
for event in self.events:
if event.ticks > start:
event.ticks -= start
else:
event.ticks = 0
self.end_of_track = max(0, self.end_of_track - start)
midi_file_logger.debug('Trim: new end-of-track: %d' % self.end_of_track)
self.events.RenumberSeq()
self.events.FixNoteLengths()
def DumpEvents (self, output):
self.events.DumpEvents(output, self.stream.timebase)
if output is not None:
output.write('[%s]: end-of-track\n' % self.stream.timebase.ConvertTicksToStr(self.end_of_track))
else:
midi_file_logger.debug('[%s]: end-of-track' % self.stream.timebase.ConvertTicksToStr(self.end_of_track))
#---------------------------------------------------------------
# MIDIFile
#---------------------------------------------------------------
class MIDIFile (file):
"""The MIDIFile class implements methods for reading, parsing,
modifying, and writing Standard MIDI Files (SMF).
"""
def __init__ (self, name, mode):
file.__init__(self, name, mode)
self.timebase = TimeBase()
def ReadFromStream (self, start_offset=0, file_size=None):
"""Parse the MIDI file creating a list of properties, tracks,
and events based on the contents of the file.
"""
# determine file size - without using os.stat
if file_size == None:
self.start_offset = start_offset
self.seek(0,2)
file_size = self.tell() - self.start_offset
self.seek(start_offset,0)
else:
file_size = file_size
# for error recovery
self.last_good_event = None
self.error_loc = None
# read the file header - verify it's an SMF file
bytes = self.read(struct.calcsize(SMF_HEADER_FMT))
riff_tag, self.hdr_len, self.format, self.num_tracks, self.timebase.ppqn = struct.unpack(SMF_HEADER_FMT, bytes)
midi_file_logger.debug('SMF header\n Tag: %s\n HeaderLen: %d\n Format: %d\n NumTracks: %d\n PPQN: %d\n' % \
(riff_tag, self.hdr_len, self.format, self.num_tracks, self.timebase.ppqn))
# sanity check on header
if (riff_tag != SMF_RIFF_TAG) or (self.format not in range(2)):
raise MIDIFileException(self, MSG_NOT_SMF_FILE)
# check for odd header size
if self.hdr_len + 8 != struct.calcsize(SMF_HEADER_FMT):
self.Warning('SMF file has unusual header size: %d bytes' % self.hdr_len)
# read each of the tracks
offset = start_offset + self.hdr_len + 8
self.tracks = []
self.end_of_file = 0
for i in range(self.num_tracks):
#print("Track: %d" % i)
# parse the track
track = MIDITrack()
length = track.ReadFromStream(self, offset, file_size)
track.trackNum = i
self.tracks.append(track)
# calculate offset to next track
offset += length + 8
# determine time of last event
self.end_of_file = max(self.end_of_file, track.end_of_track)
# if start_offset is zero, the final offset should match the file length
if (offset - start_offset) != file_size:
self.Warning('SMF file size is incorrect - should be %d, was %d' % (file_size, offset))
def Save (self, offset=0, filters=None):
"""Save this file back to disk with modifications."""
if (not 'w' in self.mode) and (not '+' in self.mode):
raise MIDIFileException(self, 'Cannot write to file in read-only mode')
self.Write(self, offset, filters)
def SaveAs (self, filename, offset=0, filters=None):
"""Save MIDI data to new file."""
output_file = MIDIFile(filename, 'wb')
self.Write(output_file, offset, filters)
output_file.close()
def Write (self, output_file, offset=0, filters=None):
"""This function does the actual work of writing the file."""
# write the file header
output_file.seek(offset)
bytes = struct.pack(SMF_HEADER_FMT, SMF_RIFF_TAG, struct.calcsize(SMF_HEADER_FMT) - 8, self.format, self.num_tracks, self.timebase.ppqn)
output_file.write(bytes)
# write out the tracks
for track in self.tracks:
track.Write(output_file, filters)
# flush the data to disk
output_file.flush()
def ConvertToType0 (self):
"""Convert a file to type 0."""
if self.format == 0:
midi_file_logger.warning('File is already type 0 - ignoring request to convert')
return
# convert to type 0
for track in self.tracks[1:]:
self.tracks[0].MergeEvents(track.events)
self.tracks = self.tracks[:1]
self.num_tracks = 1
self.format = 0
def DeleteEmptyTracks (self):
"""Delete any tracks that do not contain MIDI messages"""
track_num = 0
for track in self.tracks[:]:
for event in self.tracks.events:
if event.msg_type in MIDI_MESSAGES:
break;
else:
midi_file_logger.debug('Deleting track %d' % track_num)
self.tracks.remove(track)
track_num += 1
def ConvertToTicks (self, measures, beats, ticks):
return self.timebase.ConvertToTicks(measures, beats, ticks)
def Trim (self, start, end, quantize=0, chase_controllers=True):
track_num = 0
for track in self.tracks:
midi_file_logger.debug('Trimming track %d' % track_num)
track.Trim(start, end, quantize=quantize, chase_controllers=chase_controllers)
track_num += 1
def DumpTracks (self, output=None):
track_num = 0
for track in self.tracks:
if output is None:
midi_file_logger.debug('*** Track %d ***' % track_num)
else:
output.write('*** Track %d ***' % track_num)
track.DumpEvents(output)
track_num += 1
def Warning (self, msg):
midi_file_logger.warning('[%d]: %s' % (self.tell(), msg))
def Error (self, msg):
midi_file_logger.error('[%d]: %s' % (self.tell(), msg))
def DumpError (self):
if self.last_good_event:
midi_file_logger.error('Dumping from last good event:')
pos = self.last_good_event - 16
length = self.error_loc - pos + 16
elif self.error_loc:
midi_file_logger.error('Dumping from 16 bytes prior to error:')
pos = self.error_loc
length = 32
else:
midi_file_logger.error('No dump information available')
return
self.seek(pos, 0)
for i in range(length):
if i % 16 == 0:
if i:
midi_file_logger.error(' '.join(debug_out))
debug_out = ['%08x:' % (pos + i)]
byte = self.read(1)
if len(byte) == 0:
break;
debug_out.append('%02x' % ord(byte))
if i % 16 > 0:
midi_file_logger.error(' '.join(debug_out))
def GetMidiInfo(midiFile):
"""Bth; Get MIDI info"""
class midiData(object):
def __init__ (self):
self.err = 1
self.endMbt = "0:0:0"
self.totalTicks = 0
self.maxTracks = 0
self.maxMeasures = 0
self.maxBeats = 0
self.maxTicks = 0
self.totalTicks = 0
self.timebase = None
self.ppqn = 0
self.beats_per_measure = 0
self.trackList = []
md = midiData()
try:
m = MIDIFile(midiFile, 'rb')
m.ReadFromStream()
for track in m.tracks:
if track.channel is not None:
empty = False
trk = track.channel + 1
else:
empty = True
trk = ''
md.trackList.append(trackGrid(track.trackNum, trk, track.name, empty))
md.endMbt = m.timebase.ConvertTicksToMBT(m.end_of_file)
md.endMbtStr = "%d:%d:%d" % (md.endMbt[0], md.endMbt[1], md.endMbt[2])
md.maxMeasures = md.endMbt[0]
md.maxBeats = 4
md.maxTicks = m.timebase.ppqn
md.maxTracks = m.num_tracks
md.totalTicks = m.end_of_file
md.timebase = m.timebase
md.ppqn = m.timebase.ppqn
md.beats_per_measure = m.timebase.beats_per_measure
#add above if more added
md.err = 0
m.close()
except:
raise
pass
return md
#---------------------------------------------------------------
# main
#---------------------------------------------------------------
if __name__ == '__main__':
sys = __import__('sys')
os = __import__('os')
# initialize root logger
root_logger = logging.getLogger('')
root_logger.setLevel(logging.NOTSET)
# initialize console handler
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter('%(message)s'))
console_handler.setLevel(logging.DEBUG)
root_logger.addHandler(console_handler)
files = []
dirs = []
last_arg = None
sysex_filter = False
drum_filter = False
convert = False
# process args
for arg in sys.argv[1:]:
# previous argument implies this argument
if last_arg is not None:
if last_arg == '-DIR':
dirs.append(arg)
last_arg = None
# check for switch
elif arg[0] == '-':
if arg == '-DIR':
last_arg = arg
elif arg == '-SYSEX':
sysex_filter = True
elif arg == '-DRUMS':
drum_filter = True
elif arg == '-CONVERT':
convert = True
else:
midi_file_logger.error('Bad option %s' % arg)
# must be a filename
else:
files.append(arg)
# setup filters
filters = []
if sysex_filter:
filters.append(EventTypeFilter((SYSEX,)))
if drum_filter:
filters.append(ChannelFilter((9,),False))
# process dirs
for d in dirs:
for root, dir_list, file_list in os.walk(d):
for f in file_list:
if f.endswith('.mid'):
files.append(os.path.join(root, f))
# process files
bad_files = []
for f in files:
midi_file_logger.info('Processing file %s' % f)
midiFile = MIDIFile(f, 'rb')
try:
midiFile.ReadFromStream()
#midiFile.DumpTracks()
#print('[%s]: end-of-track\n' % midiFile.timebase.ConvertTicksToStr(midiFile.end_of_file))
# convert to type 0
if convert and (midiFile.format == 1):
midiFile.Convert(0)
converted = True
else:
converted = False
# write processed file
if converted or len(filters):
midiFile.SaveAs(f[:-4] + '-mod.mid', filters)
except MIDIFileException, X:
bad_files.append(f)
midi_file_logger.error('Error in file %s' % f)
midi_file_logger.error(X)
midiFile.DumpError()
midiFile.close()
# dump problem files
if len(bad_files):
midi_file_logger.info('The following file(s) had errors:')
for f in bad_files:
midi_file_logger.info(f)
else:
midi_file_logger.info('All files read successfully')
|
py | b41443a6ed2aad6b9e4f0c86f8c39ded1140d56c | # ---------------------------------------------------------------------
# Ericsson.MINI_LINK.get_arp
# ---------------------------------------------------------------------
# Copyright (C) 2007-2017 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetarp import IGetARP
class Script(BaseScript):
name = "Ericsson.MINI_LINK.get_arp"
interface = IGetARP
rx_line = re.compile(
r"^\s*(?P<ip>[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)\s+ether\s+"
r"(?P<mac>\S+)\s+\S+\s+(?P<interface>\S+)\s*$",
re.MULTILINE,
)
def execute(self, interface=None):
r = []
for match in self.rx_line.finditer(self.cli_clean("show arp")):
iface = match.group("interface")
if (interface is not None) and (interface != iface):
continue
r += [match.groupdict()]
return r
|
py | b41443fb2e7a6b623e2b4753023b9efda43ee353 | import os
import re
from oarepo_model_builder.entrypoints import load_model, create_builder_from_entrypoints
from tests.mock_filesystem import MockFilesystem
import yaml
def test_mapping():
schema = load_model(
"test.yaml",
"test",
model_content={"oarepo:use": "invenio", "settings": {"supported-langs": {
'cs': {
'text': {
'analyzer': 'czech',
},
'sort': {
'type': 'icu_collation_keyword'
},
'keyword': {
'test': 'test'
}
},
'en': {
'text': {
'analyzer': 'en'
},
'sort': {
'type': 'icu_collation_keyword'
}
}
}},
"model": {"properties": {"a": {"type": "multilingual"}}}},
isort=False,
black=False,
)
filesystem = MockFilesystem()
builder = create_builder_from_entrypoints(filesystem=filesystem)
builder.build(schema, "")
data = builder.filesystem.open(os.path.join("test", "records", "mappings", "v7", "test", "test-1.0.0.json")).read()
assert re.sub(r"\s", "", data) == re.sub(
r"\s",
"",
"""
{"mappings":
{"properties":
{"a":
{"type":"object","properties":{"lang":{"type":"keyword","ignore_above":50},"value":{"type":"text"}}},
"a_cs":{"type":"text","analyzer":"czech","sort":{"type":"icu_collation_keyword","index":false,"language":"cs"},"fields":{"keyword":{"test": "test","type":"keyword","ignore_above":50}}},
"a_en":{"type":"text","analyzer":"en","sort":{"type":"icu_collation_keyword","index":false,"language":"en"},"fields":{"keyword":{"type":"keyword","ignore_above":50}}},
"id":{"type":"keyword","ignore_above":50},
"created":{"type":"date"},"updated":{"type":"date"},
"$schema":{"type":"keyword","ignore_above":50}}}}
""",
)
def test_generated_schema():
schema = load_model(
"test.yaml",
"test",
model_content={"oarepo:use": "invenio", "settings": {"supported-langs": {
'cs': {
'text': {
'analyzer': 'czech',
},
'sort': {
'type': 'icu_collation_keyword'
},
'keyword': {
'test': 'test'
}
},
'en': {
'text': {
'analyzer': 'czech'
},
'sort': {
'type': 'icu_collation_keyword'
}
}
}},
"model": {"properties": {"a": {"type": "multilingual"}}}},
isort=False,
black=False,
)
filesystem = MockFilesystem()
builder = create_builder_from_entrypoints(filesystem=filesystem)
builder.build(schema, "")
data = builder.filesystem.open(os.path.join("test", "services", "schema.py")).read()
assert re.sub(r"\s", "", data) == re.sub(
r"\s",
"",
"""
import marshmallow as ma
import marshmallow.fields as ma_fields
import marshmallow.validate as ma_valid
from test.services.multilingual_schema import MultilingualSchema
from invenio_records_resources.services.records.schema import BaseRecordSchema as InvenioBaseRecordSchema
class TestSchema(ma.Schema, ):
\"""TestSchema schema.\"""
a = ma_fields.List(ma_fields.Nested(lambda: MultilingualSchema()))
id = ma_fields.String()
created = ma_fields.Date()
updated = ma_fields.Date()
_schema = ma_fields.String(data_key='$schema')
""",
)
def test_sample_data():
schema = load_model(
"test.yaml",
"test",
model_content={"oarepo:use": "invenio", "settings": {"supported-langs": {
'cs': {
'text': {
'analyzer': 'czech',
},
'sort': {
'type': 'icu_collation_keyword'
},
'keyword': {
'test': 'test'
}
},
'en': {
'text': {
'analyzer': 'czech'
},
'sort': {
'type': 'icu_collation_keyword'
}
}
}},
"model": {"properties": {"a": {"type": "multilingual"}}},
"oarepo:sample": {"count": 1}},
isort=False,
black=False,
)
filesystem = MockFilesystem()
builder = create_builder_from_entrypoints(filesystem=filesystem)
builder.build(schema, "")
data = yaml.full_load(builder.filesystem.open(os.path.join("scripts", "sample_data.yaml")).read())
assert isinstance(data['a'], list)
assert len(data['a']) == 2
assert set(x['lang'] for x in data['a']) == {'cs', 'en'}
def test_search_options():
schema = load_model(
"test.yaml",
"test",
model_content={"oarepo:use": "invenio", "settings": {"supported-langs": {
'cs': {
'text': {
'analyzer': 'czech',
},
'sort': {
'type': 'icu_collation_keyword'
},
'keyword': {
'test': 'test'
}
},
'en': {
'text': {
'analyzer': 'czech'
},
'sort': {
'type': 'icu_collation_keyword'
}
}
}},
"model": {"properties": {"a": {"type": "multilingual", "oarepo:sortable":{}}}}},
isort=False,
black=False,
)
filesystem = MockFilesystem()
builder = create_builder_from_entrypoints(filesystem=filesystem)
builder.build(schema, "")
data = builder.filesystem.open(os.path.join("test", "services", "search.py")).read()
print(data)
assert re.sub(r"\s", "", data) == re.sub(
r"\s",
"",
"""
from invenio_records_resources.services import SearchOptions as InvenioSearchOptions
from . import facets
def _(x):
\"""Identity function for string extraction.\"""
return x
class TestSearchOptions(InvenioSearchOptions):
\"""TestRecord search options.\"""
facets = {
'a': facets.a,
'_id': facets._id,
'created': facets.created,
'updated': facets.updated,
'_schema': facets._schema,
}
sort_options = {
"bestmatch": dict(
title=_('Best match'),
fields=['_score'], # ES defaults to desc on `_score` field
),
"newest": dict(
title=_('Newest'),
fields=['-created'],
),
"oldest": dict(
title=_('Oldest'),
fields=['created'],
),
'a': {'fields': ['a']},'a_cs': {'fields': ['a_cs']},
'a_en': {'fields': ['a_en']},
}
""",
) |
py | b414459eb89cc02ff5045ef5e73e0ca37763b592 | """
Support for Wink switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.wink/
"""
import logging
from homeassistant.components.wink import WinkToggleDevice
from homeassistant.const import CONF_ACCESS_TOKEN
REQUIREMENTS = ['python-wink==0.7.4']
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Wink platform."""
import pywink
if discovery_info is None:
token = config.get(CONF_ACCESS_TOKEN)
if token is None:
logging.getLogger(__name__).error(
"Missing wink access_token. "
"Get one at https://winkbearertoken.appspot.com/")
return
pywink.set_bearer_token(token)
add_devices(WinkToggleDevice(switch) for switch in pywink.get_switches())
add_devices(WinkToggleDevice(switch) for switch in
pywink.get_powerstrip_outlets())
add_devices(WinkToggleDevice(switch) for switch in pywink.get_sirens())
|
py | b41446d8e5fdcf2074c3cd7ea0ed5918133261df | """Loads preprocessed MNIST digits from the keras dataset."""
import tensorflow as tf
from .registry import register
from .loader_utils import load_from_keras
@register("digits")
def load_digits(num_valid=10000, label_smoothing=0):
"""
Returns preprocessed train, validation, and test sets for MNIST digits.
"""
return load_from_keras(
tf.keras.datasets.mnist, num_valid, label_smoothing=label_smoothing
)
|
py | b41447bb0ddce6f772392f97ae79e9aa9e4ee6c4 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This is the updated colour science code to reflect the
# 2020 SciLifeLab visual ID.
# Colourblind friendly colour sets - not typically used in reporting
COLOURS = {
1: ["#4477AA"],
2: ["#4477AA", "#CC6677"],
3: ["#4477AA", "#DDCC77", "#CC6677"],
4: ["#4477AA", "#117733", "#DDCC77", "#CC6677"],
5: ["#332288", "#88CCEE", "#117733", "#DDCC77", "#CC6677"],
6: ["#332288", "#88CCEE", "#117733", "#DDCC77", "#CC6677", "#AA4499"],
7: ["#332288", "#88CCEE", "#44AA99", "#117733", "#DDCC77", "#CC6677", "#AA4499"],
8: [
"#332288",
"#88CCEE",
"#44AA99",
"#117733",
"#999933",
"#DDCC77",
"#CC6677",
"#AA4499",
],
9: [
"#332288",
"#88CCEE",
"#44AA99",
"#117733",
"#999933",
"#DDCC77",
"#CC6677",
"#882255",
"#AA4499",
],
10: [
"#332288",
"#88CCEE",
"#44AA99",
"#117733",
"#999933",
"#DDCC77",
"#661100",
"#CC6677",
"#882255",
"#AA4499",
],
11: [
"#332288",
"#6699CC",
"#88CCEE",
"#44AA99",
"#117733",
"#999933",
"#DDCC77",
"#661100",
"#CC6677",
"#882255",
"#AA4499",
],
12: [
"#332288",
"#6699CC",
"#88CCEE",
"#44AA99",
"#117733",
"#999933",
"#DDCC77",
"#661100",
"#CC6677",
"#AA4466",
"#882255",
"#AA4499",
],
"12-mix": [
"#DDCC77",
"#117733",
"#6699CC",
"#661100",
"#882255",
"#332288",
"#AA4466",
"#88CCEE",
"#44AA99",
"#999933",
"#CC6677",
"#AA4499",
],
}
SCILIFE_COLOURS = [
"#A7C947", # 100% green
"#E9F2D1", # 25% green
"#D3E4A3", # 50% green
"#BDD775", # 75% green
"#045C64", # 100% teal
"#C0D6D8", # 25% teal
"#82AEB2", # 50% teal
"#43858B", # 75% teal
"#4C979F", # 100% aqua
"#D2E5E7", # 25% aqua
"#A6CBCF", # 50% aqua
"#79B1B7", # 75% aqua
"#491F53", # 100% grape
"#D2C7D4", # 25% grape
"#A48FA9", # 50% grape
"#77577E", # 75% grape
"#E5E5E5", # light grey
"#A6A6A6", # medium grey
"#3F3F3F", # dark grey
]
SCILIFE_COLOURS_NOGREY = [
"#A7C947", # 100% green
"#E9F2D1", # 25% green
"#D3E4A3", # 50% green
"#BDD775", # 75% green
"#045C64", # 100% teal
"#C0D6D8", # 25% teal
"#82AEB2", # 50% teal
"#43858B", # 75% teal
"#4C979F", # 100% aqua
"#D2E5E7", # 25% aqua
"#A6CBCF", # 50% aqua
"#79B1B7", # 75% aqua
"#491F53", # 100% grape
"#D2C7D4", # 25% grape
"#A48FA9", # 50% grape
"#77577E", # 75% grape
]
SCILIFE_COLOURS_GREYS = [
"#E5E5E5", # light grey
"#A6A6A6", # medium grey
"#3F3F3F", # dark grey
]
# below is with the users unabbreviated (under this is abbreviated, used for pies)
FACILITY_USER_AFFILIATION_COLOUR_OFFICIAL_UNABB = {
"Chalmers University of Technology": "#006C5C", # https://www.chalmers.se/SiteCollectionDocuments/om%20chalmers%20dokument/Grafisk%20profil/Chalmers_visuella_identitet_1.0_2018.pdf
"Karolinska Institutet": "#79084A", # https://ki.se/medarbetare/farger-i-kis-grafiska-profil
"KTH Royal Institute of Technology": "#1954A6", # https://intra.kth.se/administration/kommunikation/grafiskprofil/profilfarger-for-print-1.845077
"Linköping University": "#00B9E7", # https://insidan.liu.se/kommunikationsstod/grafiskprofil/valkommen/1.750068/Liu_grafisk_manual_12english_selections.pdf
"Lund University": "#9C6114", # https://www.staff.lu.se/sites/staff.lu.se/files/profile-colours-eng-a4.png
"Stockholm University": "#002F5F", # https://www.su.se/medarbetare/kommunikation/grafisk-manual/f%C3%A4rger-1.362110
"Swedish University of Agricultural Sciences": "#154734", # https://internt.slu.se/globalassets/mw/stod-serv/kommmarkn.for/kommunikator/img/colour-palette-eng.pdf
"Umeå University": "#2A4765", # https://www.aurora.umu.se/stod-och-service/kommunikation/grafisk-profil/
"University of Gothenburg": "#004B89", # https://medarbetarportalen.gu.se/Kommunikation/visuell-identitet/grundprofil/farger/
"Uppsala University": "#990000", # https://mp.uu.se/documents/432512/911394/Grafiska+riktlinjerokt2018.pdf/b4c90d05-2cc7-d59e-b0af-c357fb33c84b
"Örebro University": "#D4021D", # NOT official - red taken from logo at https://eitrawmaterials.eu/orebro-university/
"Naturhistoriska Riksmuséet": "#408EBF", # NOT official I pulled it from the logo at http://www.nrm.se/
"Healthcare": "#FF99DD", # pink
"Industry": "#9FA1A3", # grey
"International University": "#91D88C", # light green
"Other international organization": "#FFFF99", # yellow
"Other Swedish organization": "#B15928", # burnt orange
"Other Swedish University": "#FF7C5B", # red orange
}
# abbreviated to match names given in pies (IAB 2021)
FACILITY_USER_AFFILIATION_COLOUR_OFFICIAL_ABB = {
"Chalmers": "#006C5C", # https://www.chalmers.se/SiteCollectionDocuments/om%20chalmers%20dokument/Grafisk%20profil/Chalmers_visuella_identitet_1.0_2018.pdf
"KI": "#79084A", # https://ki.se/medarbetare/farger-i-kis-grafiska-profil
"KTH": "#1954BA", # https://intra.kth.se/administration/kommunikation/grafiskprofil/profilfarger-for-print-1.845077
"LiU": "#00B9E7", # https://insidan.liu.se/kommunikationsstod/grafiskprofil/valkommen/1.750068/Liu_grafisk_manual_12english_selections.pdf
"LU": "#9C6114", # https://www.medarbetarwebben.lu.se/sites/medarbetarwebben.lu.se/files/grafiskmanual-2.0-2018.pdf
"SU": "#002F5F", # https://www.su.se/staff/organisation-governance/governing-documents-rules-and-regulations/communication-collaboration/rules-for-the-visual-identity-at-stockholm-university-1.17458
"SLU": "#154734", # https://internt.slu.se/en/support-services/administrative-support/communication/brand-guidelines/visual-identity/slu-colours/palette/
"UmU": "#2A4765", # https://www.aurora.umu.se/en/service-and-support/advice-and-guidelines/communication/visual-identity/
"GU": "#004B89", # https://medarbetarportalen.gu.se/Kommunikation/visuell-identitet/grundprofil/farger/
"UU": "#990000", # https://mp.uu.se/documents/432512/911394/Grafiska+riktlinjerokt2018.pdf/b4c90d05-2cc7-d59e-b0af-c357fb33c84b
"ÖU": "#D4021D", # NOT official - red taken from logo at https://eitrawmaterials.eu/orebro-university/
"NRM": "#408EBF", # NOT official I pulled it from the logo at http://www.nrm.se/
"Healthcare": "#FF99DD", # pink
"Industry": "#9FA1A3", # grey
"Int Univ": "#91D88C", # light green
"Other Int Org": "#FFFF99", # yellow
"Other Swe Org": "#B15928", # burnt orange
"Other Swe Univ": "#FF7C5B", # red orange
}
# Author of colour gradient stuff: Ben Southgate https://bsou.io/posts/color-gradients-with-python
def hex_to_RGB(hex):
""" "#FFFFFF" -> [255,255,255]"""
# Pass 16 to the integer function for change of base
return [int(hex[i : i + 2], 16) for i in range(1, 6, 2)]
def RGB_to_hex(RGB):
"""[255,255,255] -> "#FFFFFF" """
# Components need to be integers for hex to make sense
RGB = [int(x) for x in RGB]
return "#" + "".join(
["0{0:x}".format(v) if v < 16 else "{0:x}".format(v) for v in RGB]
)
def color_dict(gradient):
"""Takes in a list of RGB sub-lists and returns dictionary of
colors in RGB and hex form for use in a graphing function
defined later on"""
return {
"hex": [RGB_to_hex(RGB) for RGB in gradient],
"r": [RGB[0] for RGB in gradient],
"g": [RGB[1] for RGB in gradient],
"b": [RGB[2] for RGB in gradient],
}
def linear_gradient(start_hex, finish_hex="#FFFFFF", n=10):
"""returns a gradient list of (n) colors between
two hex colors. start_hex and finish_hex
should be the full six-digit color string,
inlcuding the number sign ("#FFFFFF")"""
# Starting and ending colors in RGB form
s = hex_to_RGB(start_hex)
f = hex_to_RGB(finish_hex)
# Initilize a list of the output colors with the starting color
RGB_list = [s]
# Calcuate a color at each evenly spaced value of t from 1 to n
for t in range(1, n):
# Interpolate RGB vector for color at the current value of t
curr_vector = [
int(s[j] + (float(t) / (n - 1)) * (f[j] - s[j])) for j in range(3)
]
# Add it to our list of output colors
RGB_list.append(curr_vector)
return color_dict(RGB_list)
def rand_hex_color(num=1):
"""Generate random hex colors, default is one,
returning a string. If num is greater than
1, an array of strings is returned."""
colors = [RGB_to_hex([x * 255 for x in np.random.rand(3)]) for i in range(num)]
if num == 1:
return colors[0]
else:
return colors
def polylinear_gradient(colors, n):
"""returns a list of colors forming linear gradients between
all sequential pairs of colors. "n" specifies the total
number of desired output colors"""
# The number of colors per individual linear gradient
n_out = int(float(n) / (len(colors) - 1))
# returns dictionary defined by color_dict()
gradient_dict = linear_gradient(colors[0], colors[1], n_out)
if len(colors) > 1:
for col in range(1, len(colors) - 1):
next = linear_gradient(colors[col], colors[col + 1], n_out)
for k in ("hex", "r", "g", "b"):
# Exclude first point to avoid duplicates
gradient_dict[k] += next[k][1:]
return gradient_dict
|
py | b414482c1070737a1a2784339c6cbc71b5cd1345 | # Copyright (c) 2020 VMware Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
import logging
from azure.mgmt.storage import StorageManagementClient
from azure.identity import ClientSecretCredential
from azure.mgmt.storage.models import (
NetworkRuleSet,
StorageAccountUpdateParameters,
)
logging.basicConfig(level=logging.INFO)
class EnableTrustedMicrosoftServices(object):
def parse(self, payload):
"""Parse payload received from Remediation Service.
:param payload: JSON string containing parameters received from the remediation service.
:type payload: str.
:returns: Dictionary of parsed parameters
:rtype: dict
:raises: KeyError, JSONDecodeError
"""
remediation_entry = json.loads(payload)
object_id = remediation_entry["notificationInfo"]["FindingInfo"]["ObjectId"]
region = remediation_entry["notificationInfo"]["FindingInfo"]["Region"]
object_chain = remediation_entry["notificationInfo"]["FindingInfo"][
"ObjectChain"
]
object_chain_dict = json.loads(object_chain)
subscription_id = object_chain_dict["cloudAccountId"]
properties = object_chain_dict["properties"]
resource_group_name = ""
for property in properties:
if property["name"] == "ResourceGroup" and property["type"] == "string":
resource_group_name = property["stringV"]
break
logging.info("parsed params")
logging.info(f" resource_group_name: {resource_group_name}")
logging.info(f" account_name: {object_id}")
logging.info(f" subscription_id: {subscription_id}")
logging.info(f" region: {region}")
return {
"resource_group_name": resource_group_name,
"account_name": object_id,
"subscription_id": subscription_id,
"region": region,
}
def remediate(self, client, resource_group_name, account_name):
"""Enable Trusted Microsoft Services for Storage Account access
:param client: Instance of the Azure StorageManagementClient.
:param resource_group_name: The name of the resource group to which the storage account belongs.
:param account_name: The name of the storage account.
:param client: str.
:type resource_group_name: str.
:type account_name: str.
:returns: Integer signaling success or failure.
:rtype: int
:raises: msrestazure.azure_exceptions.CloudError
"""
try:
storage_account = client.storage_accounts.get_properties(
resource_group_name=resource_group_name, account_name=account_name,
)
if storage_account.network_rule_set.bypass is None:
bypass = "AzureServices"
else:
bypass = storage_account.network_rule_set.bypass + ", AzureServices"
logging.info(" executing client.blob_containers.update")
logging.info(f" resource_group_name={resource_group_name}")
logging.info(f" account_name={account_name}")
# Enabling Trusted Microsoft Services for Storage Account access
client.storage_accounts.update(
resource_group_name=resource_group_name,
account_name=account_name,
parameters=StorageAccountUpdateParameters(
network_rule_set=NetworkRuleSet(
bypass=bypass, default_action="Deny"
)
),
)
except Exception as e:
logging.error(f"{str(e)}")
raise
return 0
def run(self, args):
"""Run the remediation job.
:param args: List of arguments provided to the job.
:type args: list.
:returns: int
"""
params = self.parse(args[1])
credentials = ClientSecretCredential(
client_id=os.environ.get("AZURE_CLIENT_ID"),
client_secret=os.environ.get("AZURE_CLIENT_SECRET"),
tenant_id=os.environ.get("AZURE_TENANT_ID"),
)
client = StorageManagementClient(credentials, params["subscription_id"])
return self.remediate(
client, params["resource_group_name"], params["account_name"],
)
if __name__ == "__main__":
sys.exit(EnableTrustedMicrosoftServices().run(sys.argv))
|
py | b414497f876aa9a38c2dcf3ca4d30be910ea59a8 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2021, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import time
from typing import Tuple
# External imports
from flaky import flaky
# Bokeh imports
from bokeh._testing.plugins.project import BokehServerPage, SinglePlotPage
from bokeh._testing.util.compare import cds_data_almost_equal
from bokeh._testing.util.selenium import RECORD
from bokeh.application.handlers.function import ModifyDoc
from bokeh.layouts import column
from bokeh.models import (
Circle,
ColumnDataSource,
CustomAction,
CustomJS,
Div,
MultiLine,
Plot,
PolyDrawTool,
Range1d,
)
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
pytest_plugins = (
"bokeh._testing.plugins.project",
)
def _make_plot(num_objects=0, drag=True, vertices=False):
source = ColumnDataSource(dict(xs=[[1, 2]], ys=[[1, 1]]))
plot = Plot(height=400, width=400, x_range=Range1d(0, 3), y_range=Range1d(0, 3), min_border=0)
renderer = plot.add_glyph(source, MultiLine(xs='xs', ys='ys'))
tool = PolyDrawTool(num_objects=num_objects, drag=drag, renderers=[renderer])
if vertices:
psource = ColumnDataSource(dict(x=[], y=[]))
prenderer = plot.add_glyph(psource, Circle(x='x', y='y', size=10))
tool.vertex_renderer = prenderer
plot.add_tools(tool)
plot.toolbar.active_multi = tool
code = RECORD("xs", "source.data.xs", final=False) + RECORD("ys", "source.data.ys")
plot.add_tools(CustomAction(callback=CustomJS(args=dict(source=source), code=code)))
plot.toolbar_sticky = False
return plot
def _make_server_plot(expected) -> Tuple[ModifyDoc, Plot]:
plot = Plot(height=400, width=400, x_range=Range1d(0, 3), y_range=Range1d(0, 3), min_border=0)
def modify_doc(doc):
source = ColumnDataSource(dict(xs=[[1, 2]], ys=[[1, 1]]))
renderer = plot.add_glyph(source, MultiLine(xs='xs', ys='ys'))
tool = PolyDrawTool(renderers=[renderer])
plot.add_tools(tool)
plot.toolbar.active_multi = tool
div = Div(text='False')
def cb(attr, old, new):
if cds_data_almost_equal(new, expected):
div.text = 'True'
source.on_change('data', cb)
code = RECORD("matches", "div.text")
plot.add_tools(CustomAction(callback=CustomJS(args=dict(div=div), code=code)))
doc.add_root(column(plot, div))
return modify_doc, plot
@pytest.mark.selenium
class Test_PolyDrawTool:
def test_selected_by_default(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot()
page = single_plot_page(plot)
button = page.get_toolbar_button('poly-draw')
assert 'active' in button.get_attribute('class')
assert page.has_no_console_errors()
def test_can_be_deselected_and_selected(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot()
page = single_plot_page(plot)
# Check is active
button = page.get_toolbar_button('poly-draw')
assert 'active' in button.get_attribute('class')
# Click and check is not active
button = page.get_toolbar_button('poly-draw')
button.click()
assert 'active' not in button.get_attribute('class')
# Click again and check is active
button = page.get_toolbar_button('poly-draw')
button.click()
assert 'active' in button.get_attribute('class')
assert page.has_no_console_errors()
def test_double_click_triggers_draw(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot()
page = single_plot_page(plot)
# ensure double clicking adds a poly
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.5)
page.click_custom_action()
expected = {"xs": [[1, 2], [1.6216216216216217, 2.4324324324324325]],
"ys": [[1, 1], [1.5, 0.75]]}
assert cds_data_almost_equal(page.results, expected)
assert page.has_no_console_errors()
def test_click_snaps_to_vertex(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot(vertices=True)
page = single_plot_page(plot)
# ensure double clicking adds a poly
page.double_click_canvas_at_position(plot, 200, 200)
page.click_canvas_at_position(plot, 300, 300)
time.sleep(0.5)
page.double_click_canvas_at_position(plot, 201, 201)
time.sleep(0.5)
page.click_custom_action()
expected = {"xs": [[1, 2], [1.6216216216216217, 2.4324324324324325, 1.6216216216216217]],
"ys": [[1, 1], [1.5, 0.75, 1.5]]}
assert cds_data_almost_equal(page.results, expected)
assert page.has_no_console_errors()
def test_drag_moves_multi_line(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot()
page = single_plot_page(plot)
# ensure clicking adds a point
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.4) # hammerJS click timeout
page.drag_canvas_at_position(plot, 200, 200, 70, 50)
page.click_custom_action()
expected = {"xs": [[1, 2], [2.1891891891891895, 3]],
"ys": [[1, 1], [1.125, 0.375]]}
assert cds_data_almost_equal(page.results, expected)
assert page.has_no_console_errors()
def test_drag_does_not_move_multi_line(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot(drag=False)
page = single_plot_page(plot)
# ensure clicking adds a point
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.4) # hammerJS click timeout
page.drag_canvas_at_position(plot, 200, 200, 70, 53)
page.click_custom_action()
expected = {"xs": [[1, 2], [1.6216216216216217, 2.4324324324324325]],
"ys": [[1, 1], [1.5, 0.75]] }
assert cds_data_almost_equal(page.results, expected)
assert page.has_no_console_errors()
def test_num_object_limits_multi_lines(self, single_plot_page: SinglePlotPage) -> None:
plot = _make_plot(num_objects=1)
page = single_plot_page(plot)
# ensure clicking adds a point
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.4) # hammerJS click timeout
page.drag_canvas_at_position(plot, 200, 200, 70, 50)
page.click_custom_action()
expected = {"xs": [[2.1891891891891895, 3]],
"ys": [[1.125, 0.375]]}
assert cds_data_almost_equal(page.results, expected)
assert page.has_no_console_errors()
@flaky(max_runs=10)
def test_poly_draw_syncs_to_server(self, bokeh_server_page: BokehServerPage) -> None:
expected = {"xs": [[1, 2], [1.6216216216216217, 2.4324324324324325]],
"ys": [[1, 1], [1.5, 0.75]]}
modify_doc, plot = _make_server_plot(expected)
page = bokeh_server_page(modify_doc)
# ensure double clicking adds a poly
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.5)
page.click_custom_action()
assert page.results == {"matches": "True"}
# TODO (bev) Fix up after GH CI switch
@pytest.mark.skip
@flaky(max_runs=10)
def test_poly_drag_syncs_to_server(self, bokeh_server_page: BokehServerPage) -> None:
expected = {"xs": [[1, 2], [2.1891891891891895, 3]],
"ys": [[1, 1], [1.125, 0.375]]}
modify_doc, plot = _make_server_plot(expected)
page = bokeh_server_page(modify_doc)
# ensure dragging move multi_line
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.4) # hammerJS click timeout
page.drag_canvas_at_position(plot, 200, 200, 70, 50)
page.click_custom_action()
assert page.results == {"matches": "True"}
@flaky(max_runs=10)
def test_poly_delete_syncs_to_server(self, bokeh_server_page: BokehServerPage) -> None:
expected = {"xs": [[1, 2]],
"ys": [[1, 1]]}
modify_doc, plot = _make_server_plot(expected)
page = bokeh_server_page(modify_doc)
page.double_click_canvas_at_position(plot, 200, 200)
page.double_click_canvas_at_position(plot, 300, 300)
time.sleep(0.4) # hammerJS click timeout
page.click_canvas_at_position(plot, 200, 200)
time.sleep(0.4) # hammerJS click timeout
page.send_keys("\ue003") # Backspace
time.sleep(0.4) # hammerJS click timeout
page.click_custom_action()
assert page.results == {"matches": "True"}
|
py | b41449e502e16eafd268b96752cb830363adbc36 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.texttospeech_v1.types import cloud_tts
from .base import TextToSpeechTransport, DEFAULT_CLIENT_INFO
from .grpc import TextToSpeechGrpcTransport
class TextToSpeechGrpcAsyncIOTransport(TextToSpeechTransport):
"""gRPC AsyncIO backend transport for TextToSpeech.
Service that implements Google Cloud Text-to-Speech API.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "texttospeech.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "texttospeech.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def list_voices(
self,
) -> Callable[
[cloud_tts.ListVoicesRequest], Awaitable[cloud_tts.ListVoicesResponse]
]:
r"""Return a callable for the list voices method over gRPC.
Returns a list of Voice supported for synthesis.
Returns:
Callable[[~.ListVoicesRequest],
Awaitable[~.ListVoicesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_voices" not in self._stubs:
self._stubs["list_voices"] = self.grpc_channel.unary_unary(
"/google.cloud.texttospeech.v1.TextToSpeech/ListVoices",
request_serializer=cloud_tts.ListVoicesRequest.serialize,
response_deserializer=cloud_tts.ListVoicesResponse.deserialize,
)
return self._stubs["list_voices"]
@property
def synthesize_speech(
self,
) -> Callable[
[cloud_tts.SynthesizeSpeechRequest],
Awaitable[cloud_tts.SynthesizeSpeechResponse],
]:
r"""Return a callable for the synthesize speech method over gRPC.
Synthesizes speech synchronously: receive results
after all text input has been processed.
Returns:
Callable[[~.SynthesizeSpeechRequest],
Awaitable[~.SynthesizeSpeechResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "synthesize_speech" not in self._stubs:
self._stubs["synthesize_speech"] = self.grpc_channel.unary_unary(
"/google.cloud.texttospeech.v1.TextToSpeech/SynthesizeSpeech",
request_serializer=cloud_tts.SynthesizeSpeechRequest.serialize,
response_deserializer=cloud_tts.SynthesizeSpeechResponse.deserialize,
)
return self._stubs["synthesize_speech"]
def close(self):
return self.grpc_channel.close()
__all__ = ("TextToSpeechGrpcAsyncIOTransport",)
|
py | b4144a4362b10c681e9b263d6f09bb3d69693217 | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/SampledData
Release: DSTU2
Version: 1.0.2
Revision: 7202
"""
from pydantic import Field
from . import fhirtypes
from .element import Element
class SampledData(Element):
"""A series of measurements taken by a device.
A series of measurements taken by a device, with upper and lower limits.
There may be more than one dimension in the data.
"""
resource_type = Field("SampledData", const=True)
data: fhirtypes.String = Field(
...,
alias="data",
title="Type `String` (represented as `dict` in JSON)",
description='Decimal values with spaces, or "E" | "U" | "L"',
)
dimensions: fhirtypes.PositiveInt = Field(
...,
alias="dimensions",
title="Type `PositiveInt` (represented as `dict` in JSON)",
description="Number of sample points at each time point",
)
factor: fhirtypes.Decimal = Field(
None,
alias="factor",
title="Type `Decimal` (represented as `dict` in JSON)",
description="Multiply data by this before adding to origin",
)
lowerLimit: fhirtypes.Decimal = Field(
None,
alias="lowerLimit",
title="Type `Decimal` (represented as `dict` in JSON)",
description="Lower limit of detection",
)
origin: fhirtypes.QuantityType = Field(
...,
alias="origin",
title="Type `Quantity` (represented as `dict` in JSON)",
description="Zero value and units",
)
period: fhirtypes.Decimal = Field(
...,
alias="period",
title="Type `Decimal` (represented as `dict` in JSON)",
description="Number of milliseconds between samples",
)
upperLimit: fhirtypes.Decimal = Field(
None,
alias="upperLimit",
title="Type `Decimal` (represented as `dict` in JSON)",
description="Upper limit of detection",
)
|
py | b4144acd159385c805dbc24792ba51e019b57e47 | import base64, requests, sys
print '-----------------------------------------------'
print '- PureCloud Python Client Credentials Example -'
print '-----------------------------------------------'
clientId = '7de3af06-c0b3-4f9b-af45-72f4a1403797'
clientSecret = '1duphi_YtswNjN2GXOg_APY-KKTmnYXvfNj7N8GUhnM'
# Base64 encode the client ID and client secret
authorization = base64.b64encode(clientId + ':' + clientSecret)
# Prepare for POST /oauth/token request
requestHeaders = {
'Authorization': 'Basic ' + authorization,
'Content-Type': 'application/x-www-form-urlencoded'
}
requestBody = {
'grant_type': 'client_credentials'
}
# Get token
response = requests.post('https://login.mypurecloud.com/oauth/token', data=requestBody, headers=requestHeaders)
# Check response
if response.status_code == 200:
print 'Got token'
else:
print 'Failure: ' + str(response.status_code) + ' - ' + response.reason
sys.exit(response.status_code)
# Get JSON response body
responseJson = response.json()
# Prepare for GET /api/v2/authorization/roles request
requestHeaders = {
'Authorization': responseJson['token_type'] + ' ' + responseJson['access_token']
}
# Get roles
response = requests.get('https://api.mypurecloud.com/api/v2/authorization/roles', headers=requestHeaders)
# Check response
if response.status_code == 200:
print 'Got roles'
else:
print 'Failure: ' + str(response.status_code) + ' - ' + response.reason
sys.exit(response.status_code)
# Print roles
print '\nRoles:'
for entity in response.json()['entities']:
print ' ' + entity['name']
print '\nDone' |
py | b4144bde9d107221486d76552163a2e187e4f73c | def j(abc):
print('-'*30)
print(abc)
print('-'*30)
j(str(input('João quer queijo')))
'''def s(a, b):
s = a + b
print(s)
s(5, 8)
s(100, -50)
s(-5, - 8)'''
def c(* num):
#print(num)
for v in num:
print(f'{v} ', end='')
#print('FIM')
t=len(num)
print(f'Recebi {num} e são ao todos {t}.')
c(2, 1, 3)
c(8, 0)
c(7, 9, 4, 4, 8, 0)
'''def d(l):
p = 0
while p < len(l):
l[p] *=2
p +=1
v = [6, 3, 9, 1, 0, 2]
print(v)
d(v)
print(v)'''
'''def soma(*v):
s = 0
for n in v:
s += n
print(f'A soma dos valores {v} é {s}')
soma(5, 2)
soma(2, 9, 4)
soma(0, -2, 4)''' |
py | b4144bef6686091b9cd843a3f22d87e5a3cfbf4d | try:
import urllib.parse
def escape_url(url): return urllib.parse.quote(url)
except ImportError:
import urllib
def escape_url(url): return urllib.quote(url)
import random
from kivy.lang import Builder
from kivy.uix.recycleview import RecycleView
from kivy.uix.recycleview.views import RecycleDataViewBehavior
from kivy.uix.label import Label
from kivy.uix.image import AsyncImage
from kivy.properties import BooleanProperty, NumericProperty, StringProperty, ListProperty, ObjectProperty
from kivy.uix.recycleboxlayout import RecycleBoxLayout
from kivy.uix.behaviors import ButtonBehavior
from kivy.uix.recycleview.layout import LayoutSelectionBehavior
from kivy.uix.boxlayout import BoxLayout
from .NavigableBehavior import NavigableBehavior
from .Settings import settings
Builder.load_string('''
<PlaylistContentItem>:
id: item
size_hint: 1, 0.1
padding: (16, 8)
spacing: 5
PlaylistContentImage:
size_hint: None, 1
pos_hint: {{'left': 0}}
allow_stretch: True
source: item.thumburi
BoxLayout:
id: labels
size_hint: .9, 1
pos_hint: {{'right': 0}}
padding: (8, 4)
orientation: 'vertical'
Label:
text: item.title
color: (1, 1, 1, 1)
font_name: '{0.font}'
font_size: {0.font_size} * 1.5
text_size: labels.width, None
size: self.texture_size
halign: 'left'
shorten: True
Label:
text: item.type + item.duration_formatted + ' ' + ', '.join(('"' + tag + '"' for tag in item.tags))
font_name: '{0.font}'
font_size: {0.font_size}
text_size: labels.width, None
size: self.texture_size
halign: 'left'
shorten: True
<PlaylistContentPane>:
viewclass: 'PlaylistContentItem'
PlaylistContentLayout:
default_size: None, dp(56)
default_size_hint: 1, None
size_hint_y: None
height: self.minimum_height
orientation: 'vertical'
'''.format(settings))
class PlaylistContentImage(AsyncImage):
# Workaround for an AsyncImage bug
def _on_source_load(self, *args, **kwargs):
if self._coreimage:
super(PlaylistContentImage, self)._on_source_load(*args, **kwargs)
class PlaylistContentLayout(RecycleBoxLayout): pass
class PlaylistContentItem(NavigableBehavior, ButtonBehavior, RecycleDataViewBehavior, BoxLayout):
title = StringProperty()
thumburi = StringProperty()
type = StringProperty()
duration = NumericProperty()
duration_formatted = StringProperty()
tags = ListProperty()
uri = StringProperty()
mediaplayer = ObjectProperty()
index = NumericProperty()
def refresh_view_attrs(self, rv, index, data):
val = super(PlaylistContentItem, self).refresh_view_attrs(rv, index, data)
self.mediaplayer.map_playlistcontent[self.index] = self
return val
def on_release(self):
self.mediaplayer.play_media(self.index)
class PlaylistContentPane(RecycleView):
def __init__(self, mediaplayer, **kwargs):
super(PlaylistContentPane, self).__init__(**kwargs)
self.mediaplayer = mediaplayer
self.data = []
def get_index_from_id(self, _id):
for index, item in enumerate(self.data):
if item['_id'] == _id: return index
return -1
def data_sort(self):
if self.mediaplayer.shuffle:
random.shuffle(self.data)
else:
if self.mediaplayer.current_playlist == 'special_all_media':
self.data = sorted(self.data, key = lambda m: m['title'].lower())
else:
playlist = self.mediaplayer.meteor.find_one('mediaplaylists', selector = {'_id': self.mediaplayer.current_playlist})
contents = playlist.get('contents')
self.data = sorted(self.data, key = lambda m: contents.index(m['_id']))
for i, d in enumerate(self.data): d['index'] = i
def add_data_item(self, new_data):
new_data['uri'] = 'http://{}/media/static/{}'.format(self.mediaplayer.server, escape_url(new_data.get('location')))
if new_data.get('thumbnail'):
new_data['thumburi'] = 'http://{}/media/static/{}'.format(self.mediaplayer.server, escape_url(new_data.get('thumbnail')))
else: new_data['thumburi'] = ''
if new_data.get('type') in ('audio', 'video'):
dur = new_data['duration']
m, s = divmod(dur, 60)
h, m = divmod(m, 60)
new_data['duration_formatted'] = ' - %d:%02d:%02d' % (h, m, s)
else:
new_data['duration'] = 0
new_data['duration_formatted'] = ''
new_data['mediaplayer'] = self.mediaplayer
self.data.append(new_data)
def update_from_playlist(self):
self.data = []
if self.mediaplayer.current_playlist == 'special_all_media':
for item in self.mediaplayer.meteor.find('media'): self.add_data_item(dict(item))
else:
playlist = self.mediaplayer.meteor.find_one('mediaplaylists', selector = {'_id': self.mediaplayer.current_playlist})
contents = playlist.get('contents')
for _id in contents:
item = self.mediaplayer.meteor.find_one('media', selector = {'_id': _id})
self.add_data_item(dict(item))
self.data_sort()
self.refresh_from_data()
def added(self, _id, fields):
if _id == self.mediaplayer.current_playlist or self.mediaplayer.current_playlist == 'special_all_media':
new_data = fields
new_data['_id'] = _id
self.add_data_item(new_data)
self.data_sort()
self.refresh_from_data()
def changed(self, _id, fields):
index = self.get_index_from_id(_id)
if not index == -1:
self.data[index].update(fields)
self.data_sort()
self.refresh_from_data()
def removed(self, _id):
index = self.get_index_from_id(_id)
if not index == -1:
self.data.remove(self.data[index])
self.refresh_from_data()
|
py | b4144ce4f1bed14542247a9b7bcfd0186f93fd6c | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""Python2.6+ compatible logging setup module to be used as point-of-entry to a
program."""
import os
import optparse
import logging
import logging.config
import example_package.example_module
# We imported example_module before setting logging configuration.
# This can cause issues, see the module for explanation.
def run():
load_logging_conf('logging.conf')
# All loggers MUST be started AFTER this point, including for imported modules!
# Start the logger for this module.
log = logging.getLogger(__name__)
opts, args = parse_cli_args()
set_debug_verbosity(opts.verbose)
log.debug('test debug message')
log.info('test info message')
log.warn('test warn message')
log.error('test error message')
log.critical('test critical message')
example_package.example_module.do_stuff()
def load_logging_conf(log_cfg_filename):
"""Load logging configuration at '<src_dir>/../logs/<filename>' (os agnostic)."""
src_dir = os.path.dirname(os.path.realpath(__file__))
cfg_file_path = os.sep.join((src_dir, '..', 'logs', log_cfg_filename))
logging.config.fileConfig(cfg_file_path)
def parse_cli_args():
"""Parse command line args. Additional options can be added."""
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', dest="verbose",
action='count', default=0,
help='increase debug logging level')
return parser.parse_args()
def set_debug_verbosity(verbosity_counter):
"""Deactivates the debug handler if verbosity_counter is 0, else sets
the logging level appropriately."""
debug_handler = logging.root.handlers[1]
if verbosity_counter == 0:
logging.root.removeHandler(debug_handler)
elif verbosity_counter == 1:
debug_handler.level = logging.INFO
elif verbosity_counter == 2:
debug_handler.level = logging.DEBUG
else:
debug_handler.level = logging.NOTSET
if __name__ == '__main__':
run()
|
py | b4144d07ac6f743b6b96199722e43854316f8a59 | import datetime
import os
import tempfile
import flask_restful
import requests
from flask import Blueprint, request
from flask import abort
from flask import flash
from flask import redirect
from flask import render_template
from flask import session
from flask_restful_swagger import swagger
from werkzeug.utils import secure_filename
from SpiderManager.app import db, api, agent, app
from SpiderManager.app.spider.model import JobInstance, Project, JobExecution, SpiderInstance, JobRunType
api_spider_bp = Blueprint('spider', __name__)
'''
========= api =========
'''
class ProjectCtrl(flask_restful.Resource):
@swagger.operation(
summary='list projects',
parameters=[])
def get(self):
return [project.to_dict() for project in Project.query.all()]
@swagger.operation(
summary='add project',
parameters=[{
"name": "project_name",
"description": "project name",
"required": True,
"paramType": "form",
"dataType": 'string'
}])
def post(self):
project_name = request.form['project_name']
project = Project()
project.project_name = project_name
db.session.add(project)
db.session.commit()
return project.to_dict()
class SpiderCtrl(flask_restful.Resource):
@swagger.operation(
summary='list spiders',
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}])
def get(self, project_id):
project = Project.find_project_by_id(project_id)
return [spider_instance.to_dict() for spider_instance in
SpiderInstance.query.filter_by(project_id=project_id).all()]
class SpiderDetailCtrl(flask_restful.Resource):
@swagger.operation(
summary='spider detail',
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}, {
"name": "spider_id",
"description": "spider instance id",
"required": True,
"paramType": "path",
"dataType": 'int'
}])
def get(self, project_id, spider_id):
spider_instance = SpiderInstance.query.filter_by(project_id=project_id, id=spider_id).first()
return spider_instance.to_dict() if spider_instance else abort(404)
@swagger.operation(
summary='run spider',
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}, {
"name": "spider_id",
"description": "spider instance id",
"required": True,
"paramType": "path",
"dataType": 'int'
}, {
"name": "spider_arguments",
"description": "spider arguments",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "priority",
"description": "LOW: -1, NORMAL: 0, HIGH: 1, HIGHEST: 2",
"required": False,
"paramType": "form",
"dataType": 'int'
}, {
"name": "tags",
"description": "spider tags",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "desc",
"description": "spider desc",
"required": False,
"paramType": "form",
"dataType": 'string'
}])
def put(self, project_id, spider_id):
spider_instance = SpiderInstance.query.filter_by(project_id=project_id, id=spider_id).first()
if not spider_instance: abort(404)
job_instance = JobInstance()
job_instance.spider_name = spider_instance.spider_name
job_instance.project_id = project_id
job_instance.spider_arguments = request.form.get('spider_arguments')
job_instance.desc = request.form.get('desc')
job_instance.tags = request.form.get('tags')
job_instance.run_type = JobRunType.ONETIME
job_instance.priority = request.form.get('priority', 0)
job_instance.enabled = -1
db.session.add(job_instance)
db.session.commit()
agent.start_spider(job_instance)
return True
JOB_INSTANCE_FIELDS = [column.name for column in JobInstance.__table__.columns]
JOB_INSTANCE_FIELDS.remove('id')
JOB_INSTANCE_FIELDS.remove('date_created')
JOB_INSTANCE_FIELDS.remove('date_modified')
class JobCtrl(flask_restful.Resource):
@swagger.operation(
summary='list job instance',
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}])
def get(self, project_id):
return [job_instance.to_dict() for job_instance in
JobInstance.query.filter_by(run_type="periodic", project_id=project_id).all()]
@swagger.operation(
summary='add job instance',
notes="json keys: <br>" + "<br>".join(JOB_INSTANCE_FIELDS),
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}, {
"name": "spider_name",
"description": "spider_name",
"required": True,
"paramType": "form",
"dataType": 'string'
}, {
"name": "spider_arguments",
"description": "spider_arguments, split by ','",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "desc",
"description": "desc",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "tags",
"description": "tags , split by ','",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "run_type",
"description": "onetime/periodic",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "priority",
"description": "LOW: -1, NORMAL: 0, HIGH: 1, HIGHEST: 2",
"required": False,
"paramType": "form",
"dataType": 'int'
}, {
"name": "cron_minutes",
"description": "@see http://apscheduler.readthedocs.io/en/latest/modules/triggers/cron.html",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_hour",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_day_of_month",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_day_of_week",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_month",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}])
def post(self, project_id):
post_data = request.form
if post_data:
job_instance = JobInstance()
job_instance.spider_name = post_data['spider_name']
job_instance.project_id = project_id
job_instance.spider_arguments = post_data.get('spider_arguments')
job_instance.desc = post_data.get('desc')
job_instance.tags = post_data.get('tags')
job_instance.run_type = post_data['run_type']
job_instance.priority = post_data.get('priority', 0)
if job_instance.run_type == "periodic":
job_instance.cron_minutes = post_data.get('cron_minutes') or '0'
job_instance.cron_hour = post_data.get('cron_hour') or '*'
job_instance.cron_day_of_month = post_data.get('cron_day_of_month') or '*'
job_instance.cron_day_of_week = post_data.get('cron_day_of_week') or '*'
job_instance.cron_month = post_data.get('cron_month') or '*'
db.session.add(job_instance)
db.session.commit()
return True
class JobDetailCtrl(flask_restful.Resource):
@swagger.operation(
summary='update job instance',
notes="json keys: <br>" + "<br>".join(JOB_INSTANCE_FIELDS),
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}, {
"name": "job_id",
"description": "job instance id",
"required": True,
"paramType": "path",
"dataType": 'int'
}, {
"name": "spider_name",
"description": "spider_name",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "spider_arguments",
"description": "spider_arguments, split by ','",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "desc",
"description": "desc",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "tags",
"description": "tags , split by ','",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "run_type",
"description": "onetime/periodic",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "priority",
"description": "LOW: -1, NORMAL: 0, HIGH: 1, HIGHEST: 2",
"required": False,
"paramType": "form",
"dataType": 'int'
}, {
"name": "cron_minutes",
"description": "@see http://apscheduler.readthedocs.io/en/latest/modules/triggers/cron.html",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_hour",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_day_of_month",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_day_of_week",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "cron_month",
"description": "",
"required": False,
"paramType": "form",
"dataType": 'string'
}, {
"name": "enabled",
"description": "-1 / 0, default: 0",
"required": False,
"paramType": "form",
"dataType": 'int'
}, {
"name": "status",
"description": "if set to 'run' will run the job",
"required": False,
"paramType": "form",
"dataType": 'int'
}
])
def put(self, project_id, job_id):
post_data = request.form
if post_data:
job_instance = JobInstance.query.filter_by(project_id=project_id, id=job_id).first()
if not job_instance: abort(404)
job_instance.spider_arguments = post_data.get('spider_arguments') or job_instance.spider_arguments
job_instance.priority = post_data.get('priority') or job_instance.priority
job_instance.enabled = post_data.get('enabled', 0)
job_instance.cron_minutes = post_data.get('cron_minutes') or job_instance.cron_minutes
job_instance.cron_hour = post_data.get('cron_hour') or job_instance.cron_hour
job_instance.cron_day_of_month = post_data.get('cron_day_of_month') or job_instance.cron_day_of_month
job_instance.cron_day_of_week = post_data.get('cron_day_of_week') or job_instance.cron_day_of_week
job_instance.cron_month = post_data.get('cron_month') or job_instance.cron_month
job_instance.desc = post_data.get('desc', 0) or job_instance.desc
job_instance.tags = post_data.get('tags', 0) or job_instance.tags
db.session.commit()
if post_data.get('status') == 'run':
agent.start_spider(job_instance)
return True
class JobExecutionCtrl(flask_restful.Resource):
@swagger.operation(
summary='list job execution status',
parameters=[{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
}])
def get(self, project_id):
return JobExecution.list_jobs(project_id)
class JobExecutionDetailCtrl(flask_restful.Resource):
@swagger.operation(
summary='stop job',
notes='',
parameters=[
{
"name": "project_id",
"description": "project id",
"required": True,
"paramType": "path",
"dataType": 'int'
},
{
"name": "job_exec_id",
"description": "job_execution_id",
"required": True,
"paramType": "path",
"dataType": 'string'
}
])
def put(self, project_id, job_exec_id):
job_execution = JobExecution.query.filter_by(project_id=project_id, id=job_exec_id).first()
if job_execution:
agent.cancel_spider(job_execution)
return True
api.add_resource(ProjectCtrl, "/api/projects")
api.add_resource(SpiderCtrl, "/api/projects/<project_id>/spiders")
api.add_resource(SpiderDetailCtrl, "/api/projects/<project_id>/spiders/<spider_id>")
api.add_resource(JobCtrl, "/api/projects/<project_id>/jobs")
api.add_resource(JobDetailCtrl, "/api/projects/<project_id>/jobs/<job_id>")
api.add_resource(JobExecutionCtrl, "/api/projects/<project_id>/jobexecs")
api.add_resource(JobExecutionDetailCtrl, "/api/projects/<project_id>/jobexecs/<job_exec_id>")
'''
========= Router =========
'''
@app.before_request
def intercept_no_project():
if request.path.find('/project//') > -1:
flash("create project first")
return redirect("/project/manage", code=302)
@app.context_processor
def inject_common():
return dict(now=datetime.datetime.now(),
servers=agent.servers)
@app.context_processor
def inject_project():
project_context = {}
project_context['project_list'] = Project.query.all()
if project_context['project_list'] and (not session.get('project_id')):
project = Project.query.first()
session['project_id'] = project.id
if session.get('project_id'):
project_context['project'] = Project.find_project_by_id(session['project_id'])
project_context['spider_list'] = [spider_instance.to_dict() for spider_instance in
SpiderInstance.query.filter_by(project_id=session['project_id']).all()]
else:
project_context['project'] = {}
return project_context
@app.context_processor
def utility_processor():
def timedelta(end_time, start_time):
'''
:param end_time:
:param start_time:
:param unit: s m h
:return:
'''
if not end_time or not start_time:
return ''
if type(end_time) == str:
end_time = datetime.datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S')
if type(start_time) == str:
start_time = datetime.datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
total_seconds = (end_time - start_time).total_seconds()
return readable_time(total_seconds)
def readable_time(total_seconds):
if not total_seconds:
return '-'
if total_seconds < 60:
return '%s s' % total_seconds
if total_seconds < 3600:
return '%s m' % int(total_seconds / 60)
return '%s h %s m' % (int(total_seconds / 3600), int((total_seconds % 3600) / 60))
return dict(timedelta=timedelta, readable_time=readable_time)
@app.route("/")
def index():
project = Project.query.first()
if project:
return redirect("/project/%s/job/dashboard" % project.id, code=302)
return redirect("/project/manage", code=302)
@app.route("/project/<project_id>")
def project_index(project_id):
session['project_id'] = project_id
return redirect("/project/%s/job/dashboard" % project_id, code=302)
@app.route("/project/create", methods=['post'])
def project_create():
project_name = request.form['project_name']
project = Project()
project.project_name = project_name
db.session.add(project)
db.session.commit()
return redirect("/project/%s/spider/deploy" % project.id, code=302)
@app.route("/project/<project_id>/delete")
def project_delete(project_id):
project = Project.find_project_by_id(project_id)
agent.delete_project(project)
db.session.delete(project)
db.session.commit()
return redirect("/project/manage", code=302)
@app.route("/project/manage")
def project_manage():
return render_template("project_manage.html")
@app.route("/project/<project_id>/job/dashboard")
def job_dashboard(project_id):
return render_template("job_dashboard.html", job_status=JobExecution.list_jobs(project_id))
@app.route("/project/<project_id>/job/periodic")
def job_periodic(project_id):
project = Project.find_project_by_id(project_id)
job_instance_list = [job_instance.to_dict() for job_instance in
JobInstance.query.filter_by(run_type="periodic", project_id=project_id).all()]
return render_template("job_periodic.html",
job_instance_list=job_instance_list)
@app.route("/project/<project_id>/job/add", methods=['post'])
def job_add(project_id):
project = Project.find_project_by_id(project_id)
job_instance = JobInstance()
job_instance.spider_name = request.form['spider_name']
job_instance.project_id = project_id
job_instance.spider_arguments = request.form['spider_arguments']
job_instance.priority = request.form.get('priority', 0)
job_instance.run_type = request.form['run_type']
# chose daemon manually
if request.form['daemon'] != 'auto':
spider_args = []
if request.form['spider_arguments']:
spider_args = request.form['spider_arguments'].split(",")
spider_args.append("daemon={}".format(request.form['daemon']))
job_instance.spider_arguments = ','.join(spider_args)
if job_instance.run_type == JobRunType.ONETIME:
job_instance.enabled = -1
db.session.add(job_instance)
db.session.commit()
agent.start_spider(job_instance)
if job_instance.run_type == JobRunType.PERIODIC:
job_instance.cron_minutes = request.form.get('cron_minutes') or '0'
job_instance.cron_hour = request.form.get('cron_hour') or '*'
job_instance.cron_day_of_month = request.form.get('cron_day_of_month') or '*'
job_instance.cron_day_of_week = request.form.get('cron_day_of_week') or '*'
job_instance.cron_month = request.form.get('cron_month') or '*'
# set cron exp manually
if request.form.get('cron_exp'):
job_instance.cron_minutes, job_instance.cron_hour, job_instance.cron_day_of_month, job_instance.cron_day_of_week, job_instance.cron_month = \
request.form['cron_exp'].split(' ')
db.session.add(job_instance)
db.session.commit()
return redirect(request.referrer, code=302)
@app.route("/project/<project_id>/jobexecs/<job_exec_id>/stop")
def job_stop(project_id, job_exec_id):
job_execution = JobExecution.query.filter_by(project_id=project_id, id=job_exec_id).first()
agent.cancel_spider(job_execution)
return redirect(request.referrer, code=302)
@app.route("/project/<project_id>/jobexecs/<job_exec_id>/log")
def job_log(project_id, job_exec_id):
job_execution = JobExecution.query.filter_by(project_id=project_id, id=job_exec_id).first()
res = requests.get(agent.log_url(job_execution))
res.encoding = 'utf8'
raw = res.text
return render_template("job_log.html", log_lines=raw.split('\n'))
@app.route("/project/<project_id>/job/<job_instance_id>/run")
def job_run(project_id, job_instance_id):
job_instance = JobInstance.query.filter_by(project_id=project_id, id=job_instance_id).first()
agent.start_spider(job_instance)
return redirect(request.referrer, code=302)
@app.route("/project/<project_id>/job/<job_instance_id>/remove")
def job_remove(project_id, job_instance_id):
job_instance = JobInstance.query.filter_by(project_id=project_id, id=job_instance_id).first()
db.session.delete(job_instance)
db.session.commit()
return redirect(request.referrer, code=302)
@app.route("/project/<project_id>/job/<job_instance_id>/switch")
def job_switch(project_id, job_instance_id):
job_instance = JobInstance.query.filter_by(project_id=project_id, id=job_instance_id).first()
job_instance.enabled = -1 if job_instance.enabled == 0 else 0
db.session.commit()
return redirect(request.referrer, code=302)
@app.route("/project/<project_id>/spider/dashboard")
def spider_dashboard(project_id):
spider_instance_list = SpiderInstance.list_spiders(project_id)
return render_template("spider_dashboard.html",
spider_instance_list=spider_instance_list)
@app.route("/project/<project_id>/spider/deploy")
def spider_deploy(project_id):
project = Project.find_project_by_id(project_id)
return render_template("spider_deploy.html")
@app.route("/project/<project_id>/spider/upload", methods=['post'])
def spider_egg_upload(project_id):
project = Project.find_project_by_id(project_id)
if 'file' not in request.files:
flash('No file part')
return redirect(request.referrer)
file = request.files['file']
# if user does not select file, browser also
# submit a empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.referrer)
if file:
filename = secure_filename(file.filename)
dst = os.path.join(tempfile.gettempdir(), filename)
file.save(dst)
agent.deploy(project, dst)
flash('deploy success!')
return redirect(request.referrer)
@app.route("/project/<project_id>/project/stats")
def project_stats(project_id):
project = Project.find_project_by_id(project_id)
run_stats = JobExecution.list_run_stats_by_hours(project_id)
return render_template("project_stats.html", run_stats=run_stats)
@app.route("/project/<project_id>/server/stats")
def service_stats(project_id):
project = Project.find_project_by_id(project_id)
run_stats = JobExecution.list_run_stats_by_hours(project_id)
return render_template("server_stats.html", run_stats=run_stats)
|
py | b4144d5ab1ba655370eb6e7d7f87059a1d0f87e6 | import logging
import copy
import torch
import numpy as np
from lib.pysixd.pose_error import re, te
from core.utils.pose_utils import quat2mat_torch
from core.utils.rot_reps import rot6d_to_mat_batch
from core.utils import lie_algebra, quaternion_lf
from .net_factory import NECKS, HEADS
logger = logging.getLogger(__name__)
def get_xyz_doublemask_region_out_dim(cfg):
net_cfg = cfg.MODEL.POSE_NET
g_head_cfg = net_cfg.GEO_HEAD
loss_cfg = net_cfg.LOSS_CFG
xyz_loss_type = loss_cfg.XYZ_LOSS_TYPE
if xyz_loss_type in ["MSE", "L1", "L2", "SmoothL1"]:
xyz_out_dim = 3
elif xyz_loss_type in ["CE_coor", "CE"]:
xyz_out_dim = 3 * (g_head_cfg.XYZ_BIN + 1)
else:
raise NotImplementedError(f"unknown xyz loss type: {xyz_loss_type}")
mask_loss_type = loss_cfg.MASK_LOSS_TYPE
if mask_loss_type in ["L1", "BCE", "RW_BCE", "dice"]:
mask_out_dim = 2
elif mask_loss_type in ["CE"]:
mask_out_dim = 4
else:
raise NotImplementedError(f"unknown mask loss type: {mask_loss_type}")
region_out_dim = g_head_cfg.NUM_REGIONS + 1
# at least 2 regions (with bg, at least 3 regions)
assert region_out_dim > 2, region_out_dim
return xyz_out_dim, mask_out_dim, region_out_dim
def get_xyz_mask_region_out_dim(cfg):
net_cfg = cfg.MODEL.POSE_NET
g_head_cfg = net_cfg.GEO_HEAD
loss_cfg = net_cfg.LOSS_CFG
xyz_loss_type = loss_cfg.XYZ_LOSS_TYPE
if xyz_loss_type in ["MSE", "L1", "L2", "SmoothL1"]:
xyz_out_dim = 3
elif xyz_loss_type in ["CE_coor", "CE"]:
xyz_out_dim = 3 * (g_head_cfg.XYZ_BIN + 1)
else:
raise NotImplementedError(f"unknown xyz loss type: {xyz_loss_type}")
mask_loss_type = loss_cfg.MASK_LOSS_TYPE
if mask_loss_type in ["L1", "BCE", "RW_BCE", "dice"]:
mask_out_dim = 1
elif mask_loss_type in ["CE"]:
mask_out_dim = 2
else:
raise NotImplementedError(f"unknown mask loss type: {mask_loss_type}")
region_out_dim = g_head_cfg.NUM_REGIONS + 1
# at least 2 regions (with bg, at least 3 regions)
assert region_out_dim > 2, region_out_dim
return xyz_out_dim, mask_out_dim, region_out_dim
def get_xyz_mask_out_dim(cfg):
net_cfg = cfg.MODEL.POSE_NET
g_head_cfg = net_cfg.GEO_HEAD
loss_cfg = net_cfg.LOSS_CFG
xyz_loss_type = loss_cfg.XYZ_LOSS_TYPE
mask_loss_type = loss_cfg.MASK_LOSS_TYPE
if xyz_loss_type in ["MSE", "L1", "L2", "SmoothL1"]:
r_out_dim = 3
elif xyz_loss_type in ["CE_coor", "CE"]:
r_out_dim = 3 * (g_head_cfg.XYZ_BIN + 1)
else:
raise NotImplementedError(f"unknown xyz loss type: {xyz_loss_type}")
if mask_loss_type in ["L1", "BCE", "RW_BCE", "dice"]:
mask_out_dim = 1
elif mask_loss_type in ["CE"]:
mask_out_dim = 2
else:
raise NotImplementedError(f"unknown mask loss type: {mask_loss_type}")
return r_out_dim, mask_out_dim
def get_neck(cfg):
net_cfg = cfg.MODEL.POSE_NET
neck_cfg = net_cfg.NECK
params_lr_list = []
if neck_cfg.ENABLED:
neck_init_cfg = copy.deepcopy(neck_cfg.INIT_CFG)
neck_type = neck_init_cfg.pop("type")
neck = NECKS[neck_type](**neck_init_cfg)
if neck_cfg.FREEZE:
for param in neck.parameters():
with torch.no_grad():
param.requires_grad = False
else:
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, neck.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * neck_cfg.LR_MULT,
}
)
else:
neck = None
return neck, params_lr_list
def get_geo_head(cfg):
net_cfg = cfg.MODEL.POSE_NET
geo_head_cfg = net_cfg.GEO_HEAD
params_lr_list = []
geo_head_init_cfg = copy.deepcopy(geo_head_cfg.INIT_CFG)
geo_head_type = geo_head_init_cfg.pop("type")
xyz_num_classes = net_cfg.NUM_CLASSES if geo_head_cfg.XYZ_CLASS_AWARE else 1
mask_num_classes = net_cfg.NUM_CLASSES if geo_head_cfg.MASK_CLASS_AWARE else 1
if geo_head_cfg.NUM_REGIONS <= 1:
xyz_dim, mask_dim = get_xyz_mask_out_dim(cfg)
geo_head_init_cfg.update(
xyz_num_classes=xyz_num_classes,
mask_num_classes=mask_num_classes,
xyz_out_dim=xyz_dim,
mask_out_dim=mask_dim,
)
elif "DoubleMask" in geo_head_type:
xyz_dim, mask_dim, region_dim = get_xyz_doublemask_region_out_dim(cfg)
region_num_classes = net_cfg.NUM_CLASSES if geo_head_cfg.REGION_CLASS_AWARE else 1
geo_head_init_cfg.update(
xyz_num_classes=xyz_num_classes,
mask_num_classes=mask_num_classes,
region_num_classes=region_num_classes,
xyz_out_dim=xyz_dim,
mask_out_dim=mask_dim,
region_out_dim=region_dim,
)
else:
xyz_dim, mask_dim, region_dim = get_xyz_mask_region_out_dim(cfg)
region_num_classes = net_cfg.NUM_CLASSES if geo_head_cfg.REGION_CLASS_AWARE else 1
geo_head_init_cfg.update(
xyz_num_classes=xyz_num_classes,
mask_num_classes=mask_num_classes,
region_num_classes=region_num_classes,
xyz_out_dim=xyz_dim,
mask_out_dim=mask_dim,
region_out_dim=region_dim,
)
geo_head = HEADS[geo_head_type](**geo_head_init_cfg)
if geo_head_cfg.FREEZE:
for param in geo_head.parameters():
with torch.no_grad():
param.requires_grad = False
else:
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, geo_head.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * geo_head_cfg.LR_MULT,
}
)
return geo_head, params_lr_list
def get_pnp_net(cfg):
net_cfg = cfg.MODEL.POSE_NET
g_head_cfg = net_cfg.GEO_HEAD
pnp_net_cfg = net_cfg.PNP_NET
loss_cfg = net_cfg.LOSS_CFG
xyz_dim, mask_dim, region_dim = get_xyz_mask_region_out_dim(cfg)
if loss_cfg.XYZ_LOSS_TYPE in ["CE_coor", "CE"]:
pnp_net_in_channel = xyz_dim - 3 # for bin xyz, no bg channel
else:
pnp_net_in_channel = xyz_dim
if pnp_net_cfg.WITH_2D_COORD:
pnp_net_in_channel += 2
if pnp_net_cfg.REGION_ATTENTION:
pnp_net_in_channel += g_head_cfg.NUM_REGIONS
if pnp_net_cfg.MASK_ATTENTION in ["concat"]: # do not add dim for none/mul
pnp_net_in_channel += 1
if pnp_net_cfg.ROT_TYPE in ["allo_quat", "ego_quat"]:
rot_dim = 4
elif pnp_net_cfg.ROT_TYPE in [
"allo_log_quat",
"ego_log_quat",
"allo_lie_vec",
"ego_lie_vec",
]:
rot_dim = 3
elif pnp_net_cfg.ROT_TYPE in ["allo_rot6d", "ego_rot6d"]:
rot_dim = 6
else:
raise ValueError(f"Unknown ROT_TYPE: {pnp_net_cfg.ROT_TYPE}")
pnp_net_init_cfg = copy.deepcopy(pnp_net_cfg.INIT_CFG)
pnp_head_type = pnp_net_init_cfg.pop("type")
if pnp_head_type in ["ConvPnPNet", "ConvPnPNetCls"]:
pnp_net_init_cfg.update(
nIn=pnp_net_in_channel,
rot_dim=rot_dim,
num_regions=g_head_cfg.NUM_REGIONS,
mask_attention_type=pnp_net_cfg.MASK_ATTENTION,
)
elif pnp_head_type == "PointPnPNet":
pnp_net_init_cfg.update(
nIn=pnp_net_in_channel,
rot_dim=rot_dim,
num_regions=g_head_cfg.NUM_REGIONS,
)
elif pnp_head_type == "SimplePointPnPNet":
pnp_net_init_cfg.update(
nIn=pnp_net_in_channel,
rot_dim=rot_dim,
mask_attention_type=pnp_net_cfg.MASK_ATTENTION,
# num_regions=g_head_cfg.NUM_REGIONS,
)
else:
raise ValueError(f"Unknown pnp head type: {pnp_head_type}")
pnp_net = HEADS[pnp_head_type](**pnp_net_init_cfg)
params_lr_list = []
if pnp_net_cfg.FREEZE:
for param in pnp_net.parameters():
with torch.no_grad():
param.requires_grad = False
else:
if pnp_net_cfg.TRAIN_R_ONLY:
logger.info("Train fc_r only...")
for name, param in pnp_net.named_parameters():
if "fc_r" not in name:
with torch.no_grad():
param.requires_grad = False
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, pnp_net.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * pnp_net_cfg.LR_MULT,
}
)
return pnp_net, params_lr_list
def get_pnp_net_no_region(cfg):
net_cfg = cfg.MODEL.POSE_NET
g_head_cfg = net_cfg.GEO_HEAD
pnp_net_cfg = net_cfg.PNP_NET
loss_cfg = net_cfg.LOSS_CFG
xyz_dim, mask_dim = get_xyz_mask_out_dim(cfg)
if loss_cfg.XYZ_LOSS_TYPE in ["CE_coor", "CE"]:
pnp_net_in_channel = xyz_dim - 3 # for bin xyz, no bg channel
else:
pnp_net_in_channel = xyz_dim
if pnp_net_cfg.WITH_2D_COORD:
pnp_net_in_channel += 2
if pnp_net_cfg.MASK_ATTENTION in ["concat"]: # do not add dim for none/mul
pnp_net_in_channel += 1
if pnp_net_cfg.ROT_TYPE in ["allo_quat", "ego_quat"]:
rot_dim = 4
elif pnp_net_cfg.ROT_TYPE in [
"allo_log_quat",
"ego_log_quat",
"allo_lie_vec",
"ego_lie_vec",
]:
rot_dim = 3
elif pnp_net_cfg.ROT_TYPE in ["allo_rot6d", "ego_rot6d"]:
rot_dim = 6
else:
raise ValueError(f"Unknown ROT_TYPE: {pnp_net_cfg.ROT_TYPE}")
pnp_net_init_cfg = copy.deepcopy(pnp_net_cfg.INIT_CFG)
pnp_head_type = pnp_net_init_cfg.pop("type")
if pnp_head_type == "ConvPnPNetNoRegion":
pnp_net_init_cfg.update(
nIn=pnp_net_in_channel,
rot_dim=rot_dim,
mask_attention_type=pnp_net_cfg.MASK_ATTENTION,
)
elif pnp_head_type == "PointPnPNetNoRegion":
pnp_net_init_cfg.update(nIn=pnp_net_in_channel, rot_dim=rot_dim)
elif pnp_head_type == "SimplePointPnPNetNoRegion":
pnp_net_init_cfg.update(
nIn=pnp_net_in_channel,
rot_dim=rot_dim,
mask_attention_type=pnp_net_cfg.MASK_ATTENTION,
)
else:
raise ValueError(f"Unknown pnp head type: {pnp_head_type}")
pnp_net = HEADS[pnp_head_type](**pnp_net_init_cfg)
params_lr_list = []
if pnp_net_cfg.FREEZE:
for param in pnp_net.parameters():
with torch.no_grad():
param.requires_grad = False
else:
params_lr_list.append(
{
"params": filter(lambda p: p.requires_grad, pnp_net.parameters()),
"lr": float(cfg.SOLVER.BASE_LR) * pnp_net_cfg.LR_MULT,
}
)
return pnp_net, params_lr_list
def get_rot_mat(rot, rot_type):
if rot_type in ["ego_quat", "allo_quat"]:
rot_m = quat2mat_torch(rot)
elif rot_type in ["ego_log_quat", "allo_log_quat"]:
# from latentfusion (lf)
rot_m = quat2mat_torch(quaternion_lf.qexp(rot))
elif rot_type in ["ego_lie_vec", "allo_lie_vec"]:
rot_m = lie_algebra.lie_vec_to_rot(rot)
elif rot_type in ["ego_rot6d", "allo_rot6d"]:
rot_m = rot6d_to_mat_batch(rot)
else:
raise ValueError(f"Wrong pred_rot type: {rot_type}")
return rot_m
def get_mask_prob(pred_mask, mask_loss_type):
# (b,c,h,w)
# output: (b, 1, h, w)
bs, c, h, w = pred_mask.shape
if mask_loss_type == "L1":
assert c == 1, c
mask_max = torch.max(pred_mask.view(bs, -1), dim=-1)[0].view(bs, 1, 1, 1)
mask_min = torch.min(pred_mask.view(bs, -1), dim=-1)[0].view(bs, 1, 1, 1)
# [0, 1]
mask_prob = (pred_mask - mask_min) / (mask_max - mask_min) # + 1e-5)
elif mask_loss_type in ["BCE", "RW_BCE", "dice"]:
assert c == 1, c
mask_prob = torch.sigmoid(pred_mask)
elif mask_loss_type == "CE":
mask_prob = torch.softmax(pred_mask, dim=1, keepdim=True)[:, 1:2, :, :]
else:
raise NotImplementedError(f"Unknown mask loss type: {mask_loss_type}")
return mask_prob
def compute_mean_re_te(pred_transes, pred_rots, gt_transes, gt_rots):
pred_transes = pred_transes.detach().cpu().numpy()
pred_rots = pred_rots.detach().cpu().numpy()
gt_transes = gt_transes.detach().cpu().numpy()
gt_rots = gt_rots.detach().cpu().numpy()
bs = pred_rots.shape[0]
R_errs = np.zeros((bs,), dtype=np.float32)
T_errs = np.zeros((bs,), dtype=np.float32)
for i in range(bs):
R_errs[i] = re(pred_rots[i], gt_rots[i])
T_errs[i] = te(pred_transes[i], gt_transes[i])
return R_errs.mean(), T_errs.mean()
|
py | b4144e41130ec5033e1a0a59d37d29ed31aea670 | """
4
0000
101
111000
1
Case #1: 0000
Case #2: (1)0(1)
Case #3: (111)000
Case #4: (1)
"""
import re
def nestingDepth(s):
l = re.findall(r"(0+|1+|2+|3+|4+|5+|6+|7+|8+|9+)", s)
out = ""
open_p = 0
for i in range(len(l)):
n = int(l[i][0])
if open_p < n:
new_p = n - open_p
out += "(" * new_p + l[i]
open_p += new_p
else:
out += l[i]
if i + 1 < len(l):
n = int(l[i + 1][0])
if n < int(l[i][0]):
close_p = open_p - n
out += ")" * close_p
open_p -= close_p
else:
out += ")" * open_p
return out
t = int(input()) # read a line with a single integer
for i in range(1, t + 1):
s = input()
print("Case #{}: {}".format(i, nestingDepth(s)))
|
py | b4144ea1da3ddd40de76e12b717be05814a92a3f | import logging
import os
import subprocess
import sys
import warnings
from pytest import raises, mark
from testfixtures import LogCapture
from twisted.internet import defer
from twisted.trial import unittest
import scrapy
from scrapy.crawler import Crawler, CrawlerRunner, CrawlerProcess
from scrapy.settings import Settings, default_settings
from scrapy.spiderloader import SpiderLoader
from scrapy.utils.log import configure_logging, get_scrapy_root_handler
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
from scrapy.extensions.throttle import AutoThrottle
from scrapy.extensions import telnet
from scrapy.utils.test import get_testenv
class BaseCrawlerTest(unittest.TestCase):
def assertOptionIsDefault(self, settings, key):
self.assertIsInstance(settings, Settings)
self.assertEqual(settings[key], getattr(default_settings, key))
class CrawlerTestCase(BaseCrawlerTest):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_populate_spidercls_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
class CustomSettingsSpider(DefaultSpider):
custom_settings = spider_settings
settings = Settings()
settings.setdict(project_settings, priority='project')
crawler = Crawler(CustomSettingsSpider, settings)
self.assertEqual(crawler.settings.get('TEST1'), 'spider')
self.assertEqual(crawler.settings.get('TEST2'), 'spider')
self.assertEqual(crawler.settings.get('TEST3'), 'project')
self.assertFalse(settings.frozen)
self.assertTrue(crawler.settings.frozen)
def test_crawler_accepts_dict(self):
crawler = Crawler(DefaultSpider, {'foo': 'bar'})
self.assertEqual(crawler.settings['foo'], 'bar')
self.assertOptionIsDefault(crawler.settings, 'RETRY_ENABLED')
def test_crawler_accepts_None(self):
crawler = Crawler(DefaultSpider)
self.assertOptionIsDefault(crawler.settings, 'RETRY_ENABLED')
def test_crawler_rejects_spider_objects(self):
with raises(ValueError):
Crawler(DefaultSpider())
class SpiderSettingsTestCase(unittest.TestCase):
def test_spider_custom_settings(self):
class MySpider(scrapy.Spider):
name = 'spider'
custom_settings = {
'AUTOTHROTTLE_ENABLED': True
}
crawler = Crawler(MySpider, {})
enabled_exts = [e.__class__ for e in crawler.extensions.middlewares]
self.assertIn(AutoThrottle, enabled_exts)
class CrawlerLoggingTestCase(unittest.TestCase):
def test_no_root_handler_installed(self):
handler = get_scrapy_root_handler()
if handler is not None:
logging.root.removeHandler(handler)
class MySpider(scrapy.Spider):
name = 'spider'
crawler = Crawler(MySpider, {})
assert get_scrapy_root_handler() is None
def test_spider_custom_settings_log_level(self):
log_file = self.mktemp()
class MySpider(scrapy.Spider):
name = 'spider'
custom_settings = {
'LOG_LEVEL': 'INFO',
'LOG_FILE': log_file,
# disable telnet if not available to avoid an extra warning
'TELNETCONSOLE_ENABLED': telnet.TWISTED_CONCH_AVAILABLE,
}
configure_logging()
self.assertEqual(get_scrapy_root_handler().level, logging.DEBUG)
crawler = Crawler(MySpider, {})
self.assertEqual(get_scrapy_root_handler().level, logging.INFO)
info_count = crawler.stats.get_value('log_count/INFO')
logging.debug('debug message')
logging.info('info message')
logging.warning('warning message')
logging.error('error message')
with open(log_file, 'rb') as fo:
logged = fo.read().decode('utf8')
self.assertNotIn('debug message', logged)
self.assertIn('info message', logged)
self.assertIn('warning message', logged)
self.assertIn('error message', logged)
self.assertEqual(crawler.stats.get_value('log_count/ERROR'), 1)
self.assertEqual(crawler.stats.get_value('log_count/WARNING'), 1)
self.assertEqual(
crawler.stats.get_value('log_count/INFO') - info_count, 1)
self.assertEqual(crawler.stats.get_value('log_count/DEBUG', 0), 0)
class SpiderLoaderWithWrongInterface:
def unneeded_method(self):
pass
class CustomSpiderLoader(SpiderLoader):
pass
class CrawlerRunnerTestCase(BaseCrawlerTest):
def test_spider_manager_verify_interface(self):
settings = Settings({
'SPIDER_LOADER_CLASS': 'tests.test_crawler.SpiderLoaderWithWrongInterface'
})
with warnings.catch_warnings(record=True) as w:
self.assertRaises(AttributeError, CrawlerRunner, settings)
self.assertEqual(len(w), 1)
self.assertIn("SPIDER_LOADER_CLASS", str(w[0].message))
self.assertIn("scrapy.interfaces.ISpiderLoader", str(w[0].message))
def test_crawler_runner_accepts_dict(self):
runner = CrawlerRunner({'foo': 'bar'})
self.assertEqual(runner.settings['foo'], 'bar')
self.assertOptionIsDefault(runner.settings, 'RETRY_ENABLED')
def test_crawler_runner_accepts_None(self):
runner = CrawlerRunner()
self.assertOptionIsDefault(runner.settings, 'RETRY_ENABLED')
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
runner = CrawlerRunner(Settings())
spiders = runner.spiders
self.assertEqual(len(w), 1)
self.assertIn("CrawlerRunner.spiders", str(w[0].message))
self.assertIn("CrawlerRunner.spider_loader", str(w[0].message))
sl_cls = load_object(runner.settings['SPIDER_LOADER_CLASS'])
self.assertIsInstance(spiders, sl_cls)
class CrawlerProcessTest(BaseCrawlerTest):
def test_crawler_process_accepts_dict(self):
runner = CrawlerProcess({'foo': 'bar'})
self.assertEqual(runner.settings['foo'], 'bar')
self.assertOptionIsDefault(runner.settings, 'RETRY_ENABLED')
def test_crawler_process_accepts_None(self):
runner = CrawlerProcess()
self.assertOptionIsDefault(runner.settings, 'RETRY_ENABLED')
class ExceptionSpider(scrapy.Spider):
name = 'exception'
@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
raise ValueError('Exception in from_crawler method')
class NoRequestsSpider(scrapy.Spider):
name = 'no_request'
def start_requests(self):
return []
@mark.usefixtures('reactor_pytest')
class CrawlerRunnerHasSpider(unittest.TestCase):
@defer.inlineCallbacks
def test_crawler_runner_bootstrap_successful(self):
runner = CrawlerRunner()
yield runner.crawl(NoRequestsSpider)
self.assertEqual(runner.bootstrap_failed, False)
@defer.inlineCallbacks
def test_crawler_runner_bootstrap_successful_for_several(self):
runner = CrawlerRunner()
yield runner.crawl(NoRequestsSpider)
yield runner.crawl(NoRequestsSpider)
self.assertEqual(runner.bootstrap_failed, False)
@defer.inlineCallbacks
def test_crawler_runner_bootstrap_failed(self):
runner = CrawlerRunner()
try:
yield runner.crawl(ExceptionSpider)
except ValueError:
pass
else:
self.fail('Exception should be raised from spider')
self.assertEqual(runner.bootstrap_failed, True)
@defer.inlineCallbacks
def test_crawler_runner_bootstrap_failed_for_several(self):
runner = CrawlerRunner()
try:
yield runner.crawl(ExceptionSpider)
except ValueError:
pass
else:
self.fail('Exception should be raised from spider')
yield runner.crawl(NoRequestsSpider)
self.assertEqual(runner.bootstrap_failed, True)
def test_crawler_runner_asyncio_enabled_true(self):
if self.reactor_pytest == 'asyncio':
runner = CrawlerRunner(settings={
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
})
else:
msg = r"The installed reactor \(.*?\) does not match the requested one \(.*?\)"
with self.assertRaisesRegex(Exception, msg):
runner = CrawlerRunner(settings={
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
})
@defer.inlineCallbacks
def test_crawler_process_asyncio_enabled_true(self):
with LogCapture(level=logging.DEBUG) as log:
if self.reactor_pytest == 'asyncio':
runner = CrawlerProcess(settings={
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
})
yield runner.crawl(NoRequestsSpider)
self.assertIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", str(log))
else:
msg = r"The installed reactor \(.*?\) does not match the requested one \(.*?\)"
with self.assertRaisesRegex(Exception, msg):
runner = CrawlerProcess(settings={
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
})
@defer.inlineCallbacks
def test_crawler_process_asyncio_enabled_false(self):
runner = CrawlerProcess(settings={"TWISTED_REACTOR": None})
with LogCapture(level=logging.DEBUG) as log:
yield runner.crawl(NoRequestsSpider)
self.assertNotIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", str(log))
class ScriptRunnerMixin:
def run_script(self, script_name):
script_path = os.path.join(self.script_dir, script_name)
args = (sys.executable, script_path)
p = subprocess.Popen(args, env=get_testenv(),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return stderr.decode('utf-8')
class CrawlerProcessSubprocess(ScriptRunnerMixin, unittest.TestCase):
script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'CrawlerProcess')
def test_simple(self):
log = self.run_script('simple.py')
self.assertIn('Spider closed (finished)', log)
self.assertNotIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", log)
def test_asyncio_enabled_no_reactor(self):
log = self.run_script('asyncio_enabled_no_reactor.py')
self.assertIn('Spider closed (finished)', log)
self.assertIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", log)
def test_asyncio_enabled_reactor(self):
log = self.run_script('asyncio_enabled_reactor.py')
self.assertIn('Spider closed (finished)', log)
self.assertIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", log)
def test_ipv6_default_name_resolver(self):
log = self.run_script('default_name_resolver.py')
self.assertIn('Spider closed (finished)', log)
self.assertIn("twisted.internet.error.DNSLookupError: DNS lookup failed: no results for hostname lookup: ::1.", log)
self.assertIn("'downloader/exception_type_count/twisted.internet.error.DNSLookupError': 1,", log)
def test_ipv6_alternative_name_resolver(self):
log = self.run_script('alternative_name_resolver.py')
self.assertIn('Spider closed (finished)', log)
self.assertTrue(any([
"twisted.internet.error.ConnectionRefusedError" in log,
"twisted.internet.error.ConnectError" in log,
]))
self.assertTrue(any([
"'downloader/exception_type_count/twisted.internet.error.ConnectionRefusedError': 1," in log,
"'downloader/exception_type_count/twisted.internet.error.ConnectError': 1," in log,
]))
def test_reactor_select(self):
log = self.run_script("twisted_reactor_select.py")
self.assertIn("Spider closed (finished)", log)
self.assertIn("Using reactor: twisted.internet.selectreactor.SelectReactor", log)
def test_reactor_poll(self):
log = self.run_script("twisted_reactor_poll.py")
self.assertIn("Spider closed (finished)", log)
self.assertIn("Using reactor: twisted.internet.pollreactor.PollReactor", log)
def test_reactor_asyncio(self):
log = self.run_script("twisted_reactor_asyncio.py")
self.assertIn("Spider closed (finished)", log)
self.assertIn("Using reactor: twisted.internet.asyncioreactor.AsyncioSelectorReactor", log)
class CrawlerRunnerSubprocess(ScriptRunnerMixin, unittest.TestCase):
script_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'CrawlerRunner')
def test_response_ip_address(self):
log = self.run_script("ip_address.py")
self.assertIn("INFO: Spider closed (finished)", log)
self.assertIn("INFO: Host: not.a.real.domain", log)
self.assertIn("INFO: Type: <class 'ipaddress.IPv4Address'>", log)
self.assertIn("INFO: IP address: 127.0.0.1", log)
|
py | b4144eae2d7fafea844168adcd5645174cefec55 | #!/usr/bin/python3
import os
import itertools
import operator
class DictionaryMaker:
def __init__(self):
self.aditionalData = []
self.simpleCollection = ["admin", "adm", "adm", "2015",
"2019","2018", "2016", "2017", "2014", ".", "-", "_", "@"]
self.convinationLevel = 2
self.domainName = False
self.fullName = False
self.address = False
self.importantDate = False
self.identification = False
def welcome(self):
print("Welcome human, Please answer the following questions...")
self.getInputs()
self.processInput()
self.generateDictionary()
def getInputs(self):
print("Usage: it is possible to enter an empty response...")
convination = input("convination level: (2 recomended)")
self.convinationLevel = 2
if convination != "" :
self.convinationLevel = int(convination)
self.makeQuestion("domain name?", "domainName")
self.makeQuestion("address?", "address")
self.makeQuestion("full name?", "fullName")
self.makeQuestion(
"birthdate or important date?(dd-mm-yyyy)", "importantDate")
self.makeQuestion(
"identifier or identification number?", "identification")
self.makeQuestion("aditional data?", "aditionalData")
def processNumbers(self, inStr):
numbers = [str(s) for s in inStr.split() if s.isdigit()]
response = []
for number in numbers:
for i in range(1, len(number)):
res = itertools.product(number, repeat=i)
for convination in res:
response.append(''.join(convination))
return response
def processStr(self, inStr):
response = [str(s) for s in inStr.split() if not s.isdigit()]
response.append(inStr)
response.append("".join(inStr.split()))
return response
def processName(self, inStr):
response = self.processStr(inStr)
words = [str(s) for s in inStr.split() if not s.isdigit()]
for word in words:
response.append(word[0])
response.append(word[0].title())
res = itertools.product(response, repeat=2)
for convination in res:
response.append(''.join(convination))
return self.cleanList(response)
def processDomain(self, inStr):
response = []
response.append(inStr)
response.extend(inStr.split("."))
return response
def processAddress(self, inStr):
response = []
response.append(inStr)
response.extend(inStr.split())
response.append(''.join(inStr.split()))
response.extend(self.processNumbers(inStr))
response.extend(self.processStr(inStr))
return response
def processDate(self, inStr):
response = []
if "/" in inStr:
response.extend(inStr.split("/"))
if "-" in inStr:
response.extend(inStr.split("-"))
if len(response) == 3 and len(response[2]) == 4:
response.append(response[2][:2])
response.append(response[2][2:])
tmpResponse = []
if len(response) == 0:
return response
res = itertools.combinations(response, 3)
for convination in res:
response.append(''.join(convination))
# response.append('-'.join(convination))
# response.append('/'.join(convination))
print("date convinations: " + str(len(response)) + ".")
return self.cleanList(response)
def processIdentification(self, inStr):
numbers = [str(s) for s in inStr.split() if s.isdigit()]
response = []
for number in numbers:
for i in range(1, len(number)):
response.append(number[0:i])
response.append(number[:i])
return self.cleanList(response)
def makeQuestion(self, questionStr, storeStr):
nextQuestion = True
if not getattr(self, storeStr):
setattr(self, storeStr, [])
storeSelf = getattr(self, storeStr)
while nextQuestion:
tempAnswer = input(questionStr + " (empty = next question): ")
if tempAnswer != "":
storeSelf.append(tempAnswer)
else:
nextQuestion = False
setattr(self, storeStr, storeSelf)
def processInput(self):
print("Starting processing...")
if len(self.domainName) > 0:
print("processing domain name...")
for domain in self.domainName:
self.simpleCollection.extend(self.processDomain(domain))
if len(self.fullName) > 0:
print("processing full name...")
for fullName in self.fullName:
self.simpleCollection.extend(self.processName(fullName))
if len(self.address) > 0:
print("processing address...")
for address in self.address:
self.simpleCollection.extend(self.processAddress(address))
if len(self.aditionalData) > 0:
print("processing additional data...")
for data in self.aditionalData:
self.simpleCollection.extend(self.processStr(data))
if len(self.importantDate) > 0:
print("processing dates...")
for date in self.importantDate:
self.simpleCollection.extend(self.processDate(date))
if len(self.identification) > 0:
print("processing identification...")
for identification in self.identification:
self.simpleCollection.extend(
self.processIdentification(identification))
tempTitles = []
for text in self.simpleCollection:
if not str(text).title() in tempTitles:
tempTitles.append(str(text).title())
self.simpleCollection.extend(tempTitles)
self.greenPrint("Done")
def cleanList(self, list):
return sorted(set(list))
def greenPrint(self, text):
print('\033[92m' + text + " " + u'\u2713' + '\033[0m')
def generateDictionary(self):
print("making words convinations...")
print(str(len(self.simpleCollection)) + " words.")
lines = []
for i in range(1, self.convinationLevel + 1):
print("starting level: " + str(i) + ".")
res = itertools.product(self.cleanList(
self.simpleCollection), repeat=i)
for j in res:
posiblePass = ''.join(j)
lines.append(posiblePass)
self.greenPrint("leven " + str(i) + ": done")
print("cleaning List... " + str(len(lines)))
lines = self.cleanList(lines)
self.greenPrint("clen list done lines: " + str(len(lines)) + ".")
print("writing " + str(len(lines)) + " lines in file...")
self.makeFile(lines)
self.greenPrint("write file done")
def makeFile(self, lines):
with open('pass.txt', 'a') as passFile:
for line in lines:
passFile.write(line + '\n')
dictionaryMaker = DictionaryMaker()
dictionaryMaker.welcome()
|
py | b4144f1823c66c81eef6df7178457860b039179e | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2014 SoftLayer Technologies, Inc.
# Copyright 2015 Mirantis, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import errno
try:
from eventlet import sleep
except ImportError:
from time import sleep
from eventlet.green import socket
import functools
import os
import re
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import netutils
from oslo_utils import strutils
import six
from six.moves import urllib
from webob import exc
from glance.common import exception
from glance.common import timeutils
from glance.i18n import _, _LE
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# Whitelist of v1 API headers of form x-image-meta-xxx
IMAGE_META_HEADERS = ['x-image-meta-location', 'x-image-meta-size',
'x-image-meta-is_public', 'x-image-meta-disk_format',
'x-image-meta-container_format', 'x-image-meta-name',
'x-image-meta-status', 'x-image-meta-copy_from',
'x-image-meta-uri', 'x-image-meta-checksum',
'x-image-meta-created_at', 'x-image-meta-updated_at',
'x-image-meta-deleted_at', 'x-image-meta-min_ram',
'x-image-meta-min_disk', 'x-image-meta-owner',
'x-image-meta-store', 'x-image-meta-id',
'x-image-meta-protected', 'x-image-meta-deleted',
'x-image-meta-virtual_size']
GLANCE_TEST_SOCKET_FD_STR = 'GLANCE_TEST_SOCKET_FD'
def chunkreadable(iter, chunk_size=65536):
"""
Wrap a readable iterator with a reader yielding chunks of
a preferred size, otherwise leave iterator unchanged.
:param iter: an iter which may also be readable
:param chunk_size: maximum size of chunk
"""
return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
def chunkiter(fp, chunk_size=65536):
"""
Return an iterator to a file-like obj which yields fixed size chunks
:param fp: a file-like object
:param chunk_size: maximum size of chunk
"""
while True:
chunk = fp.read(chunk_size)
if chunk:
yield chunk
else:
break
def cooperative_iter(iter):
"""
Return an iterator which schedules after each
iteration. This can prevent eventlet thread starvation.
:param iter: an iterator to wrap
"""
try:
for chunk in iter:
sleep(0)
yield chunk
except Exception as err:
with excutils.save_and_reraise_exception():
msg = _LE("Error: cooperative_iter exception %s") % err
LOG.error(msg)
def cooperative_read(fd):
"""
Wrap a file descriptor's read with a partial function which schedules
after each read. This can prevent eventlet thread starvation.
:param fd: a file descriptor to wrap
"""
def readfn(*args):
result = fd.read(*args)
sleep(0)
return result
return readfn
MAX_COOP_READER_BUFFER_SIZE = 134217728 # 128M seems like a sane buffer limit
CONF.import_group('import_filtering_opts',
'glance.async_.flows._internal_plugins')
def validate_import_uri(uri):
"""Validate requested uri for Image Import web-download.
:param uri: target uri to be validated
"""
if not uri:
return False
parsed_uri = urllib.parse.urlparse(uri)
scheme = parsed_uri.scheme
host = parsed_uri.hostname
port = parsed_uri.port
wl_schemes = CONF.import_filtering_opts.allowed_schemes
bl_schemes = CONF.import_filtering_opts.disallowed_schemes
wl_hosts = CONF.import_filtering_opts.allowed_hosts
bl_hosts = CONF.import_filtering_opts.disallowed_hosts
wl_ports = CONF.import_filtering_opts.allowed_ports
bl_ports = CONF.import_filtering_opts.disallowed_ports
# NOTE(jokke): Checking if both allowed and disallowed are defined and
# logging it to inform only allowed will be obeyed.
if wl_schemes and bl_schemes:
bl_schemes = []
LOG.debug("Both allowed and disallowed schemes has been configured. "
"Will only process allowed list.")
if wl_hosts and bl_hosts:
bl_hosts = []
LOG.debug("Both allowed and disallowed hosts has been configured. "
"Will only process allowed list.")
if wl_ports and bl_ports:
bl_ports = []
LOG.debug("Both allowed and disallowed ports has been configured. "
"Will only process allowed list.")
if not scheme or ((wl_schemes and scheme not in wl_schemes) or
parsed_uri.scheme in bl_schemes):
return False
if not host or ((wl_hosts and host not in wl_hosts) or
host in bl_hosts):
return False
if port and ((wl_ports and port not in wl_ports) or
port in bl_ports):
return False
return True
class CooperativeReader(object):
"""
An eventlet thread friendly class for reading in image data.
When accessing data either through the iterator or the read method
we perform a sleep to allow a co-operative yield. When there is more than
one image being uploaded/downloaded this prevents eventlet thread
starvation, ie allows all threads to be scheduled periodically rather than
having the same thread be continuously active.
"""
def __init__(self, fd):
"""
:param fd: Underlying image file object
"""
self.fd = fd
self.iterator = None
# NOTE(markwash): if the underlying supports read(), overwrite the
# default iterator-based implementation with cooperative_read which
# is more straightforward
if hasattr(fd, 'read'):
self.read = cooperative_read(fd)
else:
self.iterator = None
self.buffer = b''
self.position = 0
def read(self, length=None):
"""Return the requested amount of bytes, fetching the next chunk of
the underlying iterator when needed.
This is replaced with cooperative_read in __init__ if the underlying
fd already supports read().
"""
if length is None:
if len(self.buffer) - self.position > 0:
# if no length specified but some data exists in buffer,
# return that data and clear the buffer
result = self.buffer[self.position:]
self.buffer = b''
self.position = 0
return bytes(result)
else:
# otherwise read the next chunk from the underlying iterator
# and return it as a whole. Reset the buffer, as subsequent
# calls may specify the length
try:
if self.iterator is None:
self.iterator = self.__iter__()
return next(self.iterator)
except StopIteration:
return b''
finally:
self.buffer = b''
self.position = 0
else:
result = bytearray()
while len(result) < length:
if self.position < len(self.buffer):
to_read = length - len(result)
chunk = self.buffer[self.position:self.position + to_read]
result.extend(chunk)
# This check is here to prevent potential OOM issues if
# this code is called with unreasonably high values of read
# size. Currently it is only called from the HTTP clients
# of Glance backend stores, which use httplib for data
# streaming, which has readsize hardcoded to 8K, so this
# check should never fire. Regardless it still worths to
# make the check, as the code may be reused somewhere else.
if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
raise exception.LimitExceeded()
self.position += len(chunk)
else:
try:
if self.iterator is None:
self.iterator = self.__iter__()
self.buffer = next(self.iterator)
self.position = 0
except StopIteration:
self.buffer = b''
self.position = 0
return bytes(result)
return bytes(result)
def __iter__(self):
return cooperative_iter(self.fd.__iter__())
class LimitingReader(object):
"""
Reader designed to fail when reading image data past the configured
allowable amount.
"""
def __init__(self, data, limit,
exception_class=exception.ImageSizeLimitExceeded):
"""
:param data: Underlying image data object
:param limit: maximum number of bytes the reader should allow
:param exception_class: Type of exception to be raised
"""
self.data = data
self.limit = limit
self.bytes_read = 0
self.exception_class = exception_class
def __iter__(self):
for chunk in self.data:
self.bytes_read += len(chunk)
if self.bytes_read > self.limit:
raise self.exception_class()
else:
yield chunk
def read(self, i):
result = self.data.read(i)
self.bytes_read += len(result)
if self.bytes_read > self.limit:
raise self.exception_class()
return result
def image_meta_to_http_headers(image_meta):
"""
Returns a set of image metadata into a dict
of HTTP headers that can be fed to either a Webob
Request object or an httplib.HTTP(S)Connection object
:param image_meta: Mapping of image metadata
"""
headers = {}
for k, v in image_meta.items():
if v is not None:
if k == 'properties':
for pk, pv in v.items():
if pv is not None:
headers["x-image-meta-property-%s"
% pk.lower()] = six.text_type(pv)
else:
headers["x-image-meta-%s" % k.lower()] = six.text_type(v)
return headers
def get_image_meta_from_headers(response):
"""
Processes HTTP headers from a supplied response that
match the x-image-meta and x-image-meta-property and
returns a mapping of image metadata and properties
:param response: Response to process
"""
result = {}
properties = {}
if hasattr(response, 'getheaders'): # httplib.HTTPResponse
headers = response.getheaders()
else: # webob.Response
headers = response.headers.items()
for key, value in headers:
key = str(key.lower())
if key.startswith('x-image-meta-property-'):
field_name = key[len('x-image-meta-property-'):].replace('-', '_')
properties[field_name] = value or None
elif key.startswith('x-image-meta-'):
field_name = key[len('x-image-meta-'):].replace('-', '_')
if 'x-image-meta-' + field_name not in IMAGE_META_HEADERS:
msg = _("Bad header: %(header_name)s") % {'header_name': key}
raise exc.HTTPBadRequest(msg, content_type="text/plain")
result[field_name] = value or None
result['properties'] = properties
for key, nullable in [('size', False), ('min_disk', False),
('min_ram', False), ('virtual_size', True)]:
if key in result:
try:
result[key] = int(result[key])
except ValueError:
if nullable and result[key] == str(None):
result[key] = None
else:
extra = (_("Cannot convert image %(key)s '%(value)s' "
"to an integer.")
% {'key': key, 'value': result[key]})
raise exception.InvalidParameterValue(value=result[key],
param=key,
extra_msg=extra)
if result[key] is not None and result[key] < 0:
extra = _('Cannot be a negative value.')
raise exception.InvalidParameterValue(value=result[key],
param=key,
extra_msg=extra)
for key in ('is_public', 'deleted', 'protected'):
if key in result:
result[key] = strutils.bool_from_string(result[key])
return result
def create_mashup_dict(image_meta):
"""
Returns a dictionary-like mashup of the image core properties
and the image custom properties from given image metadata.
:param image_meta: metadata of image with core and custom properties
"""
d = {}
for key, value in six.iteritems(image_meta):
if isinstance(value, dict):
for subkey, subvalue in six.iteritems(
create_mashup_dict(value)):
if subkey not in image_meta:
d[subkey] = subvalue
else:
d[key] = value
return d
def safe_mkdirs(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def mutating(func):
"""Decorator to enforce read-only logic"""
@functools.wraps(func)
def wrapped(self, req, *args, **kwargs):
if req.context.read_only:
msg = "Read-only access"
LOG.debug(msg)
raise exc.HTTPForbidden(msg, request=req,
content_type="text/plain")
return func(self, req, *args, **kwargs)
return wrapped
def setup_remote_pydev_debug(host, port):
error_msg = _LE('Error setting up the debug environment. Verify that the'
' option pydev_worker_debug_host is pointing to a valid '
'hostname or IP on which a pydev server is listening on'
' the port indicated by pydev_worker_debug_port.')
try:
try:
from pydev import pydevd
except ImportError:
import pydevd
pydevd.settrace(host,
port=port,
stdoutToServer=True,
stderrToServer=True)
return True
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(error_msg)
def get_test_suite_socket():
global GLANCE_TEST_SOCKET_FD_STR
if GLANCE_TEST_SOCKET_FD_STR in os.environ:
fd = int(os.environ[GLANCE_TEST_SOCKET_FD_STR])
sock = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
if six.PY2:
sock = socket.SocketType(_sock=sock)
sock.listen(CONF.backlog)
del os.environ[GLANCE_TEST_SOCKET_FD_STR]
os.close(fd)
return sock
return None
def is_valid_hostname(hostname):
"""Verify whether a hostname (not an FQDN) is valid."""
return re.match('^[a-zA-Z0-9-]+$', hostname) is not None
def is_valid_fqdn(fqdn):
"""Verify whether a host is a valid FQDN."""
return re.match(r'^[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$', fqdn) is not None
def parse_valid_host_port(host_port):
"""
Given a "host:port" string, attempts to parse it as intelligently as
possible to determine if it is valid. This includes IPv6 [host]:port form,
IPv4 ip:port form, and hostname:port or fqdn:port form.
Invalid inputs will raise a ValueError, while valid inputs will return
a (host, port) tuple where the port will always be of type int.
"""
try:
try:
host, port = netutils.parse_host_port(host_port)
except Exception:
raise ValueError(_('Host and port "%s" is not valid.') % host_port)
if not netutils.is_valid_port(port):
raise ValueError(_('Port "%s" is not valid.') % port)
# First check for valid IPv6 and IPv4 addresses, then a generic
# hostname. Failing those, if the host includes a period, then this
# should pass a very generic FQDN check. The FQDN check for letters at
# the tail end will weed out any hilariously absurd IPv4 addresses.
if not (netutils.is_valid_ipv6(host) or netutils.is_valid_ipv4(host) or
is_valid_hostname(host) or is_valid_fqdn(host)):
raise ValueError(_('Host "%s" is not valid.') % host)
except Exception as ex:
raise ValueError(_('%s '
'Please specify a host:port pair, where host is an '
'IPv4 address, IPv6 address, hostname, or FQDN. If '
'using an IPv6 address, enclose it in brackets '
'separately from the port (i.e., '
'"[fe80::a:b:c]:9876").') % ex)
return (host, int(port))
try:
REGEX_4BYTE_UNICODE = re.compile(u'[\U00010000-\U0010ffff]')
except re.error:
# UCS-2 build case
REGEX_4BYTE_UNICODE = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
def no_4byte_params(f):
"""
Checks that no 4 byte unicode characters are allowed
in dicts' keys/values and string's parameters
"""
def wrapper(*args, **kwargs):
def _is_match(some_str):
return (isinstance(some_str, six.text_type) and
REGEX_4BYTE_UNICODE.findall(some_str) != [])
def _check_dict(data_dict):
# a dict of dicts has to be checked recursively
for key, value in six.iteritems(data_dict):
if isinstance(value, dict):
_check_dict(value)
else:
if _is_match(key):
msg = _("Property names can't contain 4 byte unicode.")
raise exception.Invalid(msg)
if _is_match(value):
msg = (_("%s can't contain 4 byte unicode characters.")
% key.title())
raise exception.Invalid(msg)
for data_dict in [arg for arg in args if isinstance(arg, dict)]:
_check_dict(data_dict)
# now check args for str values
for arg in args:
if _is_match(arg):
msg = _("Param values can't contain 4 byte unicode.")
raise exception.Invalid(msg)
# check kwargs as well, as params are passed as kwargs via
# registry calls
_check_dict(kwargs)
return f(*args, **kwargs)
return wrapper
def stash_conf_values():
"""
Make a copy of some of the current global CONF's settings.
Allows determining if any of these values have changed
when the config is reloaded.
"""
conf = {
'bind_host': CONF.bind_host,
'bind_port': CONF.bind_port,
'tcp_keepidle': CONF.cert_file,
'backlog': CONF.backlog,
'key_file': CONF.key_file,
'cert_file': CONF.cert_file
}
return conf
def split_filter_op(expression):
"""Split operator from threshold in an expression.
Designed for use on a comparative-filtering query field.
When no operator is found, default to an equality comparison.
:param expression: the expression to parse
:returns: a tuple (operator, threshold) parsed from expression
"""
left, sep, right = expression.partition(':')
if sep:
# If the expression is a date of the format ISO 8601 like
# CCYY-MM-DDThh:mm:ss+hh:mm and has no operator, it should
# not be partitioned, and a default operator of eq should be
# assumed.
try:
timeutils.parse_isotime(expression)
op = 'eq'
threshold = expression
except ValueError:
op = left
threshold = right
else:
op = 'eq' # default operator
threshold = left
# NOTE stevelle decoding escaped values may be needed later
return op, threshold
def validate_quotes(value):
"""Validate filter values
Validation opening/closing quotes in the expression.
"""
open_quotes = True
for i in range(len(value)):
if value[i] == '"':
if i and value[i - 1] == '\\':
continue
if open_quotes:
if i and value[i - 1] != ',':
msg = _("Invalid filter value %s. There is no comma "
"before opening quotation mark.") % value
raise exception.InvalidParameterValue(message=msg)
else:
if i + 1 != len(value) and value[i + 1] != ",":
msg = _("Invalid filter value %s. There is no comma "
"after closing quotation mark.") % value
raise exception.InvalidParameterValue(message=msg)
open_quotes = not open_quotes
if not open_quotes:
msg = _("Invalid filter value %s. The quote is not closed.") % value
raise exception.InvalidParameterValue(message=msg)
def split_filter_value_for_quotes(value):
"""Split filter values
Split values by commas and quotes for 'in' operator, according api-wg.
"""
validate_quotes(value)
tmp = re.compile(r'''
"( # if found a double-quote
[^\"\\]* # take characters either non-quotes or backslashes
(?:\\. # take backslashes and character after it
[^\"\\]*)* # take characters either non-quotes or backslashes
) # before double-quote
",? # a double-quote with comma maybe
| ([^,]+),? # if not found double-quote take any non-comma
# characters with comma maybe
| , # if we have only comma take empty string
''', re.VERBOSE)
return [val[0] or val[1] for val in re.findall(tmp, value)]
def evaluate_filter_op(value, operator, threshold):
"""Evaluate a comparison operator.
Designed for use on a comparative-filtering query field.
:param value: evaluated against the operator, as left side of expression
:param operator: any supported filter operation
:param threshold: to compare value against, as right side of expression
:raises InvalidFilterOperatorValue: if an unknown operator is provided
:returns: boolean result of applied comparison
"""
if operator == 'gt':
return value > threshold
elif operator == 'gte':
return value >= threshold
elif operator == 'lt':
return value < threshold
elif operator == 'lte':
return value <= threshold
elif operator == 'neq':
return value != threshold
elif operator == 'eq':
return value == threshold
msg = _("Unable to filter on a unknown operator.")
raise exception.InvalidFilterOperatorValue(msg)
|
py | b4144f84821da93162ca60fb0fea656fd384b792 | import datetime
import os
import logging
import re
import datetime
import shutil
# logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s')
logging.basicConfig(level=logging.DEBUG)
print(f'Hello {datetime.datetime.now()}')
def delete_file(file_name):
if os.path.exists(f'./images/{file_name}'):
os.remove(f'./images/{file_name}')
else:
print(f'The file with name {file_name} does not exist')
def extractDate(file_name):
# ex: 'IMG_20191021_172027.jpg' => '20191021'
# ex: '20191021_172027.jpg' => '20191021'
# match = re.search(r'[_|-]?(.*?)[_|-]', file_name)
match = re.search(r'[2019|2020]([0-9]){7}', file_name)
if match:
return datetime.datetime.strptime(match.group(0), '%Y%m%d')
else:
return '<Error>'
def scanFolder(basepath, prefix='.'):
logger = logging.getLogger('scanFolder')
res = []
with os.scandir(basepath) as entries:
for entry in entries:
if entry.name.startswith('.'):
continue
if entry.is_dir():
count = len([iq for iq in os.scandir(entry.path)])
# logger.debug(f'{prefix} Folder: {entry.name}, count: {count}')
res += scanFolder(entry.path, prefix + '..')
else:
res += [entry]
info = entry.stat()
# logger.debug(f'{prefix} File: {entry.name}, Date: {extractDate(entry.name)}')
# logger.debug(f'{prefix}.. Info: {info}')
return res
def resetFile(file):
sourcePath = './SourcePhotosTest'
os.rename(file.path, f'{sourcePath}/{file.name}')
def saveFile(file):
date = extractDate(file.name)
basepath = './TargetPhotosTest'
targetPath = basepath
# Prepare the target path
if (type(date) == datetime.datetime):
targetPath += f'/{date.year}/{date.month}.{date.year}/{date.day}.{date.month}.{date.year}'
else:
targetPath += '/others'
# Create directoy if not exist
if not os.path.exists(targetPath):
os.makedirs(targetPath)
os.rename(file.path, f'{targetPath}/{file.name}')
def saveTargetFiles(files):
logger = logging.getLogger('saveTargetFiles')
logging.info('starting saving target ...')
for file in files:
saveFile(file)
def resetTargetFiles():
basepath = './TargetPhotosTest'
files = scanFolder(basepath)
for file in files:
resetFile(file)
def print(files):
for file in files:
date = extractDate(file.name)
if (type(date) == datetime.datetime):
date_str = f'{date.year}/{date.month}/{date.day}'
else:
date_str = '<Error>'
logging.debug(f'{file.name}, {date_str}')
def main():
logger = logging.getLogger('main')
logging.info('started main ...')
# List all subdirectories using scandir()
basepath = './SourcePhotosTest'
files = scanFolder(basepath)
logging.debug(f'Files Count: {len(files)}')
print(files)
saveTargetFiles(files)
# resetTargetFiles()
if __name__ == '__main__':
main()
|
py | b414500d1894a82944419f999d1648b0327db72c | # -*- coding: latin-1 -*-
"""
fragments - define text fragments in the document
"""
from domain.norm_document.model import Chapter, Section, Norm, Verifier
S1101 = Section(
identifier="11.01",
title="Beveiligde gebieden",
text="Doelstelling: Onbevoegde fysieke toegang tot, schade aan en interferentie met informatie en "
"informatieverwerkende faciliteiten van de organisatie voorkomen.",
fragments=[
Norm(
identifier="11.01.01",
title="Fysieke beveiligingszone",
text="Beveiligingszones behoren te worden gedefinieerd en gebruikt om gebieden te beschermen die "
"gevoelige of essentiële informatie en informatieverwerkende faciliteiten bevatten.",
bbn=1,
fragments=[
Verifier(
identifier="11.01.01/01",
title="",
text="Er wordt voor het inrichten van beveiligde zones gebruik gemaakt van "
"de volgende voorschriften:"
"<ol style='list-style-type: lower-alpha;'>"
"<li>het Kader Rijkstoegangsbeleid (2010);</li>"
"<li>het Normenkader Beveiliging Rijkskantoren (NkBR 2015);</li>"
"<li>het Beveiligingsvoorschrift Rijk (BVR 2013).<li>"
"</ol>",
bbn=1,
),
],
),
Norm(
identifier="11.01.02",
title="Fysieke toegangsbeveiliging",
text="Beveiligde gebieden behoren te worden beschermd door passende toegangsbeveiliging om ervoor "
"te zorgen dat alleen bevoegd personeel toegang krijgt.",
bbn=1,
fragments=[
Verifier(
identifier="11.01.02/01",
title="",
text="In geval van concrete beveiligingsrisico's worden waarschuwingen, conform onderlinge "
"afspraken, verzonden aan de relevante collega's binnen het beveiligingsdomein van "
"het Rijk.",
bbn=2,
),
],
),
Norm(
identifier="11.01.03",
title="Kantoren, ruimten en faciliteiten beveiligen",
text="Voor kantoren, ruimten en faciliteiten behoort fysieke beveiliging te worden "
"ontworpen en toegepast.",
bbn=1,
fragments=[
Verifier(
identifier="11.01.03/01",
title="",
text="Sleutelbeheer is ingericht op basis van een sleutelplan "
"(NkBR 5.4).",
bbn=1,
),
],
),
Norm(
identifier="11.01.04",
title="Beschermen tegen bedreigingen van buitenaf",
text="Tegen natuurrampen, kwaadwillige aanvallen of ongelukken behoort fysieke bescherming te worden "
"ontworpen en toegepast.",
bbn=1,
fragments=[
Verifier(
identifier="11.01.04/01",
title="",
text="De organisatie heeft geïnventariseerd welke papieren archieven en apparatuur "
"bedrijfskritisch zijn. Tegen bedreigingen van buitenaf zijn beveiligingsmaatregelen "
"genomen op basis van een expliciete risicoafweging.",
bbn=1,
),
Verifier(
identifier="11.01.04/02",
title="",
text="Bij huisvesting van IT-apparatuur wordt rekening gehouden met de kans op gevolgen van "
"rampen veroorzaakt door de natuur en menselijk handelen.",
bbn=1,
),
],
),
Norm(
identifier="11.01.05",
title="Werken in beveiligde gebieden",
text="Voor het werken in beveiligde gebieden behoren procedures te worden ontwikkeld en toegepast.",
bbn=2,
fragments=[
Verifier(
identifier="11.01.05/01",
title="",
text="- conform norm -",
bbn=2,
),
],
),
Norm(
identifier="11.01.06",
title="Laad- en loslocatie",
text="Toegangspunten zoals laad- en loslocaties en andere punten waar onbevoegde personen het terrein "
"kunnen betreden, behoren te worden beheerst, en zo mogelijk te worden afgeschermd van "
"informatieverwerkende faciliteiten om onbevoegde toegang te vermijden.",
bbn=1,
fragments=[
Verifier(
identifier="11.01.06/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
],
)
S1102 = Section(
identifier="11.02",
title="Apparatuur",
text="Doelstelling: Verlies, schade, diefstal of compromittering van bedrijfsmiddelen en onderbreking van de "
"bedrijfsvoering van de organisatie voorkomen.",
fragments=[
Norm(
identifier="11.02.01",
title="Plaatsing en bescherming van apparatuur",
text="Apparatuur behoort zo te worden geplaatst en beschermd dat risico's van bedreigingen en gevaren "
"van buitenaf, alsook de kans op onbevoegde toegang worden verkleind.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.01/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.02",
title="Nutsvoorzieningen",
text="Apparatuur behoort te worden beschermd tegen stroomuitval en andere verstoringen die worden "
"veroorzaakt door ontregelingen in nutsvoorzieningen.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.02/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.03",
title="Beveiliging van bekabeling",
text="Voedings- en telecommunicatiekabels voor het versturen van gegevens of die informatiediensten "
"ondersteunen, behoren te worden beschermd tegen interceptie, verstoring of schade.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.03/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.04",
title="Onderhoud van apparatuur",
text="Apparatuur behoort correct te worden onderhouden om de continue beschikbaarheid en integriteit "
"ervan te waarborgen.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.04/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.05",
title="Verwijdering van bedrijfsmiddelen",
text="Apparatuur, informatie en software behoren niet van de locatie te worden meegenomen zonder "
"voorafgaande goedkeuring.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.05/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.06",
title="Beveiliging van apparatuur en bedrijfsmiddelen buiten het terrein",
text="Bedrijfsmiddelen die zich buiten het terrein bevinden, behoren te worden beveiligd, waarbij "
"rekening behoort te worden gehouden met de verschillende risico's van werken buiten het terrein "
"van de organisatie.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.06/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.07",
title="Veilig verwijderen of hergebruiken van apparatuur",
text="Alle onderdelen van de apparatuur die opslagmedia bevatten, behoren te worden geverifieerd om te "
"waarborgen dat gevoelige gegevens en in licentie gegeven software voorafgaand aan verwijdering of "
"hergebruik zijn verwijderd of betrouwbaar veilig zijn overschreven.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.07/01",
title="",
text="- conform norm - (zie 08.03.02)",
bbn=1,
),
],
),
Norm(
identifier="11.02.08",
title="Onbeheerde gebruikersapparatuur",
text="Gebruikers moeten ervoor zorgen dat onbeheerde apparatuur voldoende beschermd is.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.08/01",
title="",
text="- conform norm -",
bbn=1,
),
],
),
Norm(
identifier="11.02.09",
title="'Clear desk'- en 'clear screen'-beleid",
text="Er behoort een 'clear desk'-beleid voor papieren documenten en verwijderbare opslagmedia en een "
"'clear screen'-beleid voor informatieverwerkende faciliteiten te worden ingesteld.",
bbn=1,
fragments=[
Verifier(
identifier="11.02.09/01",
title="",
text="Een onbeheerde werkplek in een ongecontroleerde omgeving is altijd vergrendeld.",
bbn=2,
),
Verifier(
identifier="11.02.09/02",
title="",
text="Informatie wordt automatisch ontoegankelijk gemaakt met bijvoorbeeld een screensaver na een "
"inactiviteit van maximaal 15 minuten.",
bbn=2,
),
Verifier(
identifier="11.02.09/03",
title="",
text="Sessies op remote desktops worden op het remote platform vergrendeld na 15 minuten. Het "
"overnemen van sessies op remote desktops op een ander client apparaat is alleen mogelijk "
"via dezelfde beveiligde loginprocedure als waarmee de sessie is gecreëerd.",
bbn=2,
),
Verifier(
identifier="11.02.09/04",
title="",
text="Bij het gebruik van een chipcardtoken voor toegang tot systemen wordt bij het verwijderen "
"van de token de toegangsbeveiligingslock automatisch geactiveerd.",
bbn=2,
),
],
),
],
)
CH11 = Chapter(
identifier="11",
title="Fysieke beveiliging en beveiliging van de omgeving",
fragments=[
S1101,
S1102,
]
)
|
py | b41450c9b9891b3b05f1ae7e321188e9f25266b1 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-04-19 13:41
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organisation', '0004_auto_20160419_1322'),
]
operations = [
migrations.AddField(
model_name='company',
name='service_areas',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=5), default=[], help_text='Postal codes of the areas in which this company provides service', size=None),
preserve_default=False,
),
]
|
py | b41450f55ac21e54ba68edae82fee10d6f2dcfbf | # pylint: skip-file
# pylint 忽略文件
from setuptools import setup, find_packages
def long_description():
with open("README.md", "r") as fh:
long_desc = fh.read()
return long_desc
def requirements():
with open("requirements.txt", "r") as fh:
reqs = [line.strip() for line in fh]
return reqs
setup(
name='little-finger',
version='0.0.14', # 版本号
description='tool pkg.', # 包的介绍
author='yromeMfOtuO', # 作者 就是我啦~
author_email='[email protected]', # 你的邮箱
url='', # 项目地址,一般的填git地址 也可以是任意可用的url 不过我喜欢使用 git
packages=find_packages(), # Python导入包的列表,我们使用find_packages() 来自动收集
long_description=long_description(), # 项目的描述 一般是 string 上文中定义了它
long_description_content_type="text/markdown", # 描述文档 README 的格式 一般我喜欢MD. 也可以是你喜欢的其他格式 支不支持我就不知道了~ 估计HTML 是支持的
license="GPLv3", # 开源协议
# 这 需要去官网查,在下边提供了许可证连接 或者 你可以直接把我的粘贴走
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: OS Independent"],
python_requires='>=3.6', # Python 的版本约束
# 其他依赖的约束
install_requires=requirements()
)
|
py | b414511eaf269cb7ae14783b401358c43d3dd895 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.13.5
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import kubernetes_asyncio.client
from kubernetes_asyncio.client.models.v1_pod_dns_config_option import V1PodDNSConfigOption # noqa: E501
from kubernetes_asyncio.client.rest import ApiException
class TestV1PodDNSConfigOption(unittest.TestCase):
"""V1PodDNSConfigOption unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1PodDNSConfigOption(self):
"""Test V1PodDNSConfigOption"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes_asyncio.client.models.v1_pod_dns_config_option.V1PodDNSConfigOption() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
py | b41452b88398121c9bd7c19517d6ec0fafe72d09 | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.mininode import *
from test_framework.test_framework import StipendiumTestFramework
from test_framework.util import *
from time import *
'''
llmq-signing.py
Checks LLMQs signing sessions
'''
class LLMQSigningTest(StipendiumTestFramework):
def __init__(self):
super().__init__(6, 5, [], fast_dip3_enforcement=True)
def run_test(self):
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
self.mine_quorum()
id = "0000000000000000000000000000000000000000000000000000000000000001"
msgHash = "0000000000000000000000000000000000000000000000000000000000000002"
msgHashConflict = "0000000000000000000000000000000000000000000000000000000000000003"
def check_sigs(hasrecsigs, isconflicting1, isconflicting2):
for mn in self.mninfo:
if mn.node.quorum("hasrecsig", 100, id, msgHash) != hasrecsigs:
return False
if mn.node.quorum("isconflicting", 100, id, msgHash) != isconflicting1:
return False
if mn.node.quorum("isconflicting", 100, id, msgHashConflict) != isconflicting2:
return False
return True
def wait_for_sigs(hasrecsigs, isconflicting1, isconflicting2, timeout):
t = time()
while time() - t < timeout:
if check_sigs(hasrecsigs, isconflicting1, isconflicting2):
return
sleep(0.1)
raise AssertionError("wait_for_sigs timed out")
def assert_sigs_nochange(hasrecsigs, isconflicting1, isconflicting2, timeout):
t = time()
while time() - t < timeout:
assert(check_sigs(hasrecsigs, isconflicting1, isconflicting2))
sleep(0.1)
# Initial state
wait_for_sigs(False, False, False, 1)
# Sign 2 shares, should not result in recovered sig
for i in range(2):
self.mninfo[i].node.quorum("sign", 100, id, msgHash)
assert_sigs_nochange(False, False, False, 3)
# Sign one more share, should result in recovered sig and conflict for msgHashConflict
self.mninfo[2].node.quorum("sign", 100, id, msgHash)
wait_for_sigs(True, False, True, 15)
# Mine one more quorum, so that we have 2 active ones, nothing should change
self.mine_quorum()
assert_sigs_nochange(True, False, True, 3)
# Mine 2 more quorums, so that the one used for the the recovered sig should become inactive, nothing should change
self.mine_quorum()
self.mine_quorum()
assert_sigs_nochange(True, False, True, 3)
# fast forward 6.5 days, recovered sig should still be valid
set_mocktime(get_mocktime() + int(60 * 60 * 24 * 6.5))
set_node_times(self.nodes, get_mocktime())
# Cleanup starts every 5 seconds
wait_for_sigs(True, False, True, 15)
# fast forward 1 day, recovered sig should not be valid anymore
set_mocktime(get_mocktime() + int(60 * 60 * 24 * 1))
set_node_times(self.nodes, get_mocktime())
# Cleanup starts every 5 seconds
wait_for_sigs(False, False, False, 15)
for i in range(2):
self.mninfo[i].node.quorum("sign", 100, id, msgHashConflict)
for i in range(2, 5):
self.mninfo[i].node.quorum("sign", 100, id, msgHash)
wait_for_sigs(True, False, True, 15)
if __name__ == '__main__':
LLMQSigningTest().main()
|
py | b4145357915bf2467620fdade7a7353698462742 | from django.db import models
from django.template.defaultfilters import truncatechars
from tinymce.models import HTMLField
from django.utils import timezone
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericRelation
MEDIA_ENTITY_MODELS = models.Q(
app_label='information', model='image') | models.Q(
app_label='information', model='audio') | models.Q(
app_label='information', model='video')
class MediaEntity(models.Model):
gallery = models.ForeignKey(
'Gallery',
null=True,
blank=True,
related_name='entities',
on_delete=models.CASCADE)
limit = models.Q(app_label='information', model='image')
content_type = models.ForeignKey(
ContentType,
on_delete=models.CASCADE,
limit_choices_to=MEDIA_ENTITY_MODELS)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class MediaBaseModel(models.Model):
phaidra_id = models.CharField(
max_length=32, help_text="For example: 'o:906180'")
caption_de = HTMLField(null=True, blank=True)
caption_en = HTMLField(null=True, blank=True)
class Meta:
abstract = True
def __str__(self):
return str(self.id)
@property
def short_caption_de(self):
return truncatechars(self.caption_de, 100)
@property
def short_caption_en(self):
return truncatechars(self.caption_en, 100)
class Image(MediaBaseModel):
media_entity = GenericRelation(MediaEntity)
class Audio(MediaBaseModel):
thumbnail = models.ImageField(upload_to='audio_thumbnails/')
media_entity = GenericRelation(MediaEntity)
class Video(MediaBaseModel):
stream_id = models.CharField(
max_length=128, help_text="For example: 'c47c9323f8628c10527cdd9748173e5acc4d8c9c'")
thumbnail = models.ImageField(upload_to='video_thumbnails/')
media_entity = GenericRelation(MediaEntity)
class Gallery(models.Model):
def __str__(self):
return str(self.id)
class Information(models.Model):
updated = models.DateTimeField(null=True, blank=True)
created = models.DateTimeField(null=True, blank=True)
title_de = models.TextField(null=True, blank=True)
title_en = models.TextField(null=True, blank=True)
content_de = HTMLField(null=True, blank=True)
content_en = HTMLField(null=True, blank=True)
media_images = models.ManyToManyField(Image, blank=True)
media_audios = models.ManyToManyField(Audio, blank=True)
media_videos = models.ManyToManyField(Video, blank=True)
media_galleries = models.ManyToManyField(Gallery, blank=True)
def __str__(self):
return ('{} - {}').format(self.id, self.title_de)
@property
def short_content_de(self):
return truncatechars(self.content_de, 100)
@property
def short_content_en(self):
return truncatechars(self.content_en, 100)
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if not self.id:
self.created = timezone.now()
self.updated = timezone.now()
return super(Information, self).save(*args, **kwargs) |
py | b41453c2e31d61e6043c1f291c96751f4e4be674 | #-----------------------------------------------------------------------------
# Copyright (c) 2005-2021, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
"""
Various classes and functions to provide some backwards-compatibility
with previous versions of Python onward.
"""
import os
import platform
import site
import subprocess
import sys
import errno
import importlib.machinery
from PyInstaller.exceptions import ExecCommandFailed
# Copied from https://docs.python.org/3/library/platform.html#cross-platform.
is_64bits = sys.maxsize > 2**32
# Distinguish specific code for various Python versions.
# Variables 'is_pyXY' mean that Python X.Y and up is supported.
# Keep even unsupported versions here to keep 3rd-party hooks working.
is_py35 = sys.version_info >= (3, 5)
is_py36 = sys.version_info >= (3, 6)
is_py37 = sys.version_info >= (3, 7)
is_py38 = sys.version_info >= (3, 8)
is_py39 = sys.version_info >= (3, 9)
is_win = sys.platform.startswith('win')
is_win_10 = is_win and (platform.win32_ver()[0] == '10')
is_cygwin = sys.platform == 'cygwin'
is_darwin = sys.platform == 'darwin' # Mac OS X
# Unix platforms
is_linux = sys.platform.startswith('linux')
is_solar = sys.platform.startswith('sun') # Solaris
is_aix = sys.platform.startswith('aix')
is_freebsd = sys.platform.startswith('freebsd')
is_openbsd = sys.platform.startswith('openbsd')
is_hpux = sys.platform.startswith('hp-ux')
# Some code parts are similar to several unix platforms
# (e.g. Linux, Solaris, AIX)
# Mac OS X is not considered as unix since there are many
# platform specific details for Mac in PyInstaller.
is_unix = is_linux or is_solar or is_aix or is_freebsd or is_hpux or is_openbsd
# On different platforms is different file for dynamic python library.
# TODO: When removing support for is_py37, the "m" variants can be
# removed, see
# <https://docs.python.org/3/whatsnew/3.8.html#build-and-c-api-changes>
_pyver = sys.version_info[:2]
if is_win or is_cygwin:
PYDYLIB_NAMES = {'python%d%d.dll' % _pyver,
'libpython%d%d.dll' % _pyver,
'libpython%d%dm.dll' % _pyver,
'libpython%d.%d.dll' % _pyver,
'libpython%d.%dm.dll' % _pyver} # For MSYS2 environment
elif is_darwin:
# libpython%d.%dm.dylib for Conda virtual environment installations
PYDYLIB_NAMES = {'Python', '.Python',
'Python%d' % _pyver[0],
'libpython%d.%d.dylib' % _pyver,
'libpython%d.%dm.dylib' % _pyver}
elif is_aix:
# Shared libs on AIX may be archives with shared object members,
# hence the ".a" suffix. However, starting with python 2.7.11
# libpython?.?.so and Python3 libpython?.?m.so files are produced.
PYDYLIB_NAMES = {'libpython%d.%d.a' % _pyver,
'libpython%d.%dm.a' % _pyver,
'libpython%d.%d.so' % _pyver,
'libpython%d.%dm.so' % _pyver}
elif is_freebsd:
PYDYLIB_NAMES = {'libpython%d.%d.so.1' % _pyver,
'libpython%d.%dm.so.1' % _pyver,
'libpython%d.%d.so.1.0' % _pyver,
'libpython%d.%dm.so.1.0' % _pyver}
elif is_openbsd:
PYDYLIB_NAMES = {'libpython%d.%d.so.0.0' % _pyver,
'libpython%d.%dm.so.0.0' % _pyver}
elif is_hpux:
PYDYLIB_NAMES = {'libpython%d.%d.so' % _pyver}
elif is_unix:
# Other *nix platforms.
# Python 2 .so library on Linux is: libpython2.7.so.1.0
# Python 3 .so library on Linux is: libpython3.2mu.so.1.0, libpython3.3m.so.1.0
PYDYLIB_NAMES = {'libpython%d.%d.so.1.0' % _pyver,
'libpython%d.%dm.so.1.0' % _pyver,
'libpython%d.%dmu.so.1.0' % _pyver,
'libpython%d.%dm.so' % _pyver,
'libpython%d.%d.so' % _pyver}
else:
raise SystemExit('Your platform is not yet supported. '
'Please define constant PYDYLIB_NAMES for your platform.')
# Function with which to open files.
open_file = open
text_read_mode = 'r'
# In Python 3 built-in function raw_input() was renamed to just 'input()'.
stdin_input = input
# Safe repr that always outputs ascii
safe_repr = ascii
# String types to replace `isinstance(foo, str)`
# Use `isinstance(foo, string_types)` instead.
string_types = str
# Correct extension ending: 'c' or 'o'
if __debug__:
PYCO = 'c'
else:
PYCO = 'o'
# Options for python interpreter when invoked in a subprocess.
if __debug__:
# Python started *without* -O
_PYOPTS = ''
else:
_PYOPTS = '-O'
# In a virtual environment created by virtualenv (github.com/pypa/virtualenv)
# there exists sys.real_prefix with the path to the base Python
# installation from which the virtual environment was created.
# This is true regardless of
# the version of Python used to execute the virtualenv command.
#
# In a virtual environment created by the venv module available in
# the Python standard lib, there exists sys.base_prefix with the path to
# the base implementation. This does not exist in
# a virtual environment created by virtualenv.
#
# The following code creates compat.is_venv and is.virtualenv
# that are True when running a virtual environment, and also
# compat.base_prefix with the path to the
# base Python installation.
base_prefix = os.path.abspath(
getattr(sys, 'real_prefix', getattr(sys, 'base_prefix', sys.prefix))
)
# Ensure `base_prefix` is not containing any relative parts.
is_venv = is_virtualenv = base_prefix != os.path.abspath(sys.prefix)
# Conda environments sometimes have different paths or apply patches to
# packages that can affect how a hook or package should access resources.
# Method for determining conda taken from:
# https://stackoverflow.com/questions/47610844#47610844
is_conda = os.path.isdir(os.path.join(base_prefix, 'conda-meta'))
# Similar to ``is_conda`` but is ``False`` using another ``venv``-like manager
# on top. In this case, no packages encountered will be conda packages meaning
# that the default non-conda behaviour is generally desired from PyInstaller.
is_pure_conda = os.path.isdir(os.path.join(sys.prefix, 'conda-meta'))
# Full path to python interpreter.
python_executable = getattr(sys, '_base_executable', sys.executable)
# Is this Python from Microsoft App Store (Windows only)?
# Python from Microsoft App Store has executable pointing at empty shims.
is_ms_app_store = is_win and os.path.getsize(python_executable) == 0
if is_ms_app_store:
# Locate the actual executable inside base_prefix.
python_executable = os.path.join(
base_prefix, os.path.basename(python_executable))
if not os.path.exists(python_executable):
raise SystemExit('PyInstaller cannot locate real python executable '
'belonging to Python from Microsoft App Store!')
# In Python 3.4 module 'imp' is deprecated and there is another way how
# to obtain magic value.
import importlib.util
BYTECODE_MAGIC = importlib.util.MAGIC_NUMBER
# List of suffixes for Python C extension modules.
from importlib.machinery import EXTENSION_SUFFIXES, all_suffixes
ALL_SUFFIXES = all_suffixes()
# In Python 3 'Tkinter' has been made lowercase - 'tkinter'.
# TODO: remove once all references are gone from both pyinstaller and
# pyinstaller-hooks-contrib!
modname_tkinter = 'tkinter'
# On Windows we require pywin32-ctypes
# -> all pyinstaller modules should use win32api from PyInstaller.compat to
# ensure that it can work on MSYS2 (which requires pywin32-ctypes)
if is_win:
try:
from win32ctypes.pywin32 import pywintypes # noqa: F401
from win32ctypes.pywin32 import win32api
except ImportError:
# This environment variable is set by setup.py
# - It's not an error for pywin32 to not be installed at that point
if not os.environ.get('PYINSTALLER_NO_PYWIN32_FAILURE'):
raise SystemExit('PyInstaller cannot check for assembly dependencies.\n'
'Please install pywin32-ctypes.\n\n'
'pip install pywin32-ctypes\n')
# macOS's platform.architecture() can be buggy, so we do this manually here.
# Based off the python documentation:
# https://docs.python.org/3/library/platform.html#platform.architecture
architecture = '64bit' if sys.maxsize > 2**32 and is_darwin else \
'32bit' if is_darwin else platform.architecture()[0]
# Cygwin needs special handling, because platform.system() contains
# identifiers such as MSYS_NT-10.0-19042 and CYGWIN_NT-10.0-19042 that
# do not fit PyInstaller's OS naming scheme. Explicitly set `system` to
# 'Cygwin'.
if is_cygwin:
system = 'Cygwin'
else:
system = platform.system()
# Machine suffix for bootloader.
# PyInstaller is reported to work on ARM architecture, so for that
# case we need an extra identifying specifier on the bootloader
# name string, like: Linux-32bit-arm, over normal Linux-32bit
machine = 'arm' if platform.machine().startswith('arm') else \
'aarch' if platform.machine().startswith('aarch') else \
'sw_64' if platform.machine().startswith('sw_64') else None
# Set and get environment variables does not handle unicode strings correctly
# on Windows.
# Acting on os.environ instead of using getenv()/setenv()/unsetenv(),
# as suggested in <http://docs.python.org/library/os.html#os.environ>:
# "Calling putenv() directly does not change os.environ, so it's
# better to modify os.environ." (Same for unsetenv.)
def getenv(name, default=None):
"""
Returns unicode string containing value of environment variable 'name'.
"""
return os.environ.get(name, default)
def setenv(name, value):
"""
Accepts unicode string and set it as environment variable 'name' containing
value 'value'.
"""
os.environ[name] = value
def unsetenv(name):
"""
Delete the environment variable 'name'.
"""
# Some platforms (e.g. AIX) do not support `os.unsetenv()` and
# thus `del os.environ[name]` has no effect onto the real
# environment. For this case we set the value to the empty string.
os.environ[name] = ""
del os.environ[name]
# Exec commands in subprocesses.
def exec_command(*cmdargs, **kwargs):
"""
Run the command specified by the passed positional arguments, optionally
configured by the passed keyword arguments.
.. DANGER::
**Ignore this function's return value** -- unless this command's standard
output contains _only_ pathnames, in which case this function returns the
correct filesystem-encoded string expected by PyInstaller. In all other
cases, this function's return value is _not_ safely usable. Consider
calling the general-purpose `exec_command_stdout()` function instead.
For backward compatibility, this function's return value non-portably
depends on the current Python version and passed keyword arguments:
* Under Python 2.7, this value is an **encoded `str` string** rather than
a decoded `unicode` string. This value _cannot_ be safely used for any
purpose (e.g., string manipulation or parsing), except to be passed
directly to another non-Python command.
* Under Python 3.x, this value is a **decoded `str` string**. However,
even this value is _not_ necessarily safely usable:
* If the `encoding` parameter is passed, this value is guaranteed to be
safely usable.
* Else, this value _cannot_ be safely used for any purpose (e.g.,
string manipulation or parsing), except to be passed directly to
another non-Python command. Why? Because this value has been decoded
with the encoding specified by `sys.getfilesystemencoding()`, the
encoding used by `os.fsencode()` and `os.fsdecode()` to convert from
platform-agnostic to platform-specific pathnames. This is _not_
necessarily the encoding with which this command's standard output
was encoded. Cue edge-case decoding exceptions.
Parameters
----------
cmdargs :
Variadic list whose:
1. Mandatory first element is the absolute path, relative path,
or basename in the current `${PATH}` of the command to run.
1. Optional remaining elements are arguments to pass to this command.
encoding : str, optional
Optional keyword argument specifying the encoding with which to decode
this command's standard output under Python 3. As this function's return
value should be ignored, this argument should _never_ be passed.
__raise_ENOENT__ : boolean, optional
Optional keyword argument to simply raise the exception if the
executing the command fails since to the command is not found. This is
useful to checking id a command exists.
All remaining keyword arguments are passed as is to the `subprocess.Popen()`
constructor.
Returns
----------
str
Ignore this value. See discussion above.
"""
encoding = kwargs.pop('encoding', None)
raise_ENOENT = kwargs.pop('__raise_ENOENT__', None)
try:
proc = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, **kwargs)
out = proc.communicate(timeout=60)[0]
except OSError as e:
if raise_ENOENT and e.errno == errno.ENOENT:
raise
print('--' * 20, file=sys.stderr)
print("Error running '%s':" % " ".join(cmdargs), file=sys.stderr)
print(e, file=sys.stderr)
print('--' * 20, file=sys.stderr)
raise ExecCommandFailed("Error: Executing command failed!") from e
except subprocess.TimeoutExpired:
proc.kill()
raise
# stdout/stderr are returned as a byte array NOT as string.
# Thus we need to convert that to proper encoding.
try:
if encoding:
out = out.decode(encoding)
else:
# If no encoding is given, assume we're reading filenames from
# stdout only because it's the common case.
out = os.fsdecode(out)
except UnicodeDecodeError as e:
# The sub-process used a different encoding,
# provide more information to ease debugging.
print('--' * 20, file=sys.stderr)
print(str(e), file=sys.stderr)
print('These are the bytes around the offending byte:',
file=sys.stderr)
print('--' * 20, file=sys.stderr)
raise
return out
def exec_command_rc(*cmdargs, **kwargs):
"""
Return the exit code of the command specified by the passed positional
arguments, optionally configured by the passed keyword arguments.
Parameters
----------
cmdargs : list
Variadic list whose:
1. Mandatory first element is the absolute path, relative path,
or basename in the current `${PATH}` of the command to run.
1. Optional remaining elements are arguments to pass to this command.
All keyword arguments are passed as is to the `subprocess.call()` function.
Returns
----------
int
This command's exit code as an unsigned byte in the range `[0, 255]`,
where 0 signifies success and all other values failure.
"""
# 'encoding' keyword is not supported for 'subprocess.call'.
# Remove it thus from kwargs.
if 'encoding' in kwargs:
kwargs.pop('encoding')
return subprocess.call(cmdargs, **kwargs)
def exec_command_stdout(*command_args, **kwargs):
"""
Capture and return the standard output of the command specified by the
passed positional arguments, optionally configured by the passed keyword
arguments.
Unlike the legacy `exec_command()` and `exec_command_all()` functions, this
modern function is explicitly designed for cross-platform portability. The
return value may be safely used for any purpose, including string
manipulation and parsing.
.. NOTE::
If this command's standard output contains _only_ pathnames, this
function does _not_ return the correct filesystem-encoded string expected
by PyInstaller. If this is the case, consider calling the
filesystem-specific `exec_command()` function instead.
Parameters
----------
cmdargs : list
Variadic list whose:
1. Mandatory first element is the absolute path, relative path,
or basename in the current `${PATH}` of the command to run.
1. Optional remaining elements are arguments to pass to this command.
encoding : str, optional
Optional name of the encoding with which to decode this command's
standard output (e.g., `utf8`), passed as a keyword argument. If
unpassed , this output will be decoded in a portable manner specific to
to the current platform, shell environment, and system settings with
Python's built-in `universal_newlines` functionality.
All remaining keyword arguments are passed as is to the
`subprocess.check_output()` function.
Returns
----------
str
Unicode string of this command's standard output decoded according to
the "encoding" keyword argument.
"""
# Value of the passed "encoding" parameter, defaulting to None.
encoding = kwargs.pop('encoding', None)
# If no encoding was specified, the current locale is defaulted to. Else, an
# encoding was specified. To ensure this encoding is respected, the
# "universal_newlines" option is disabled if also passed. Nice, eh?
kwargs['universal_newlines'] = encoding is None
# Standard output captured from this command as a decoded Unicode string if
# "universal_newlines" is enabled or an encoded byte array otherwise.
stdout = subprocess.check_output(command_args, **kwargs)
# Return a Unicode string, decoded from this encoded byte array if needed.
return stdout if encoding is None else stdout.decode(encoding)
def exec_command_all(*cmdargs, **kwargs):
"""
Run the command specified by the passed positional arguments, optionally
configured by the passed keyword arguments.
.. DANGER::
**Ignore this function's return value.** If this command's standard
output consists solely of pathnames, consider calling `exec_command()`;
else, consider calling `exec_command_stdout()`.
Parameters
----------
cmdargs : list
Variadic list whose:
1. Mandatory first element is the absolute path, relative path,
or basename in the current `${PATH}` of the command to run.
1. Optional remaining elements are arguments to pass to this command.
encoding : str, optional
Optional keyword argument specifying the encoding with which to decode
this command's standard output. As this function's return
value should be ignored, this argument should _never_ be passed.
All remaining keyword arguments are passed as is to the `subprocess.Popen()`
constructor.
Returns
----------
(int, str, str)
Ignore this 3-element tuple `(exit_code, stdout, stderr)`. See the
`exec_command()` function for discussion.
"""
encoding = kwargs.pop('encoding', None)
proc = subprocess.Popen(cmdargs, bufsize=-1, # Default OS buffer size.
stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
# Waits for subprocess to complete.
try:
out, err = proc.communicate(timeout=60)
except subprocess.TimeoutExpired:
proc.kill()
raise
# stdout/stderr are returned as a byte array NOT as string.
# Thus we need to convert that to proper encoding.
try:
if encoding:
out = out.decode(encoding)
err = err.decode(encoding)
else:
# If no encoding is given, assume we're reading filenames from
# stdout only because it's the common case.
out = os.fsdecode(out)
err = os.fsdecode(err)
except UnicodeDecodeError as e:
# The sub-process used a different encoding,
# provide more information to ease debugging.
print('--' * 20, file=sys.stderr)
print(str(e), file=sys.stderr)
print('These are the bytes around the offending byte:',
file=sys.stderr)
print('--' * 20, file=sys.stderr)
raise
return proc.returncode, out, err
def __wrap_python(args, kwargs):
cmdargs = [sys.executable]
# Mac OS X supports universal binaries (binary for multiple architectures.
# We need to ensure that subprocess binaries are running for the same
# architecture as python executable.
# It is necessary to run binaries with 'arch' command.
if is_darwin:
if architecture == '64bit':
if machine == 'arm':
py_prefix = ['arch', '-arm64'] # Apple M1
else:
py_prefix = ['arch', '-x86_64'] # Intel
elif architecture == '32bit':
py_prefix = ['arch', '-i386']
else:
py_prefix = []
# Since OS X 10.11 the environment variable DYLD_LIBRARY_PATH is no
# more inherited by child processes, so we proactively propagate
# the current value using the `-e` option of the `arch` command.
if 'DYLD_LIBRARY_PATH' in os.environ:
path = os.environ['DYLD_LIBRARY_PATH']
py_prefix += ['-e', 'DYLD_LIBRARY_PATH=%s' % path]
cmdargs = py_prefix + cmdargs
if _PYOPTS:
cmdargs.append(_PYOPTS)
cmdargs.extend(args)
env = kwargs.get('env')
if env is None:
env = dict(**os.environ)
# Ensure python 3 subprocess writes 'str' as utf-8
env['PYTHONIOENCODING'] = 'UTF-8'
# ... and ensure we read output as utf-8
kwargs['encoding'] = 'UTF-8'
return cmdargs, kwargs
def exec_python(*args, **kwargs):
"""
Wrap running python script in a subprocess.
Return stdout of the invoked command.
"""
cmdargs, kwargs = __wrap_python(args, kwargs)
return exec_command(*cmdargs, **kwargs)
def exec_python_rc(*args, **kwargs):
"""
Wrap running python script in a subprocess.
Return exit code of the invoked command.
"""
cmdargs, kwargs = __wrap_python(args, kwargs)
return exec_command_rc(*cmdargs, **kwargs)
## Path handling.
def expand_path(path):
"""
Replace initial tilde '~' in path with user's home directory and also
expand environment variables (${VARNAME} - Unix, %VARNAME% - Windows).
"""
return os.path.expandvars(os.path.expanduser(path))
# Site-packages functions - use native function if available.
def getsitepackages(prefixes=None):
"""Returns a list containing all global site-packages directories.
For each directory present in ``prefixes`` (or the global ``PREFIXES``),
this function will find its `site-packages` subdirectory depending on the
system environment, and will return a list of full paths.
"""
# This implementation was copied from the ``site`` module, python 3.7.3.
sitepackages = []
seen = set()
if prefixes is None:
prefixes = [sys.prefix, sys.exec_prefix]
for prefix in prefixes:
if not prefix or prefix in seen:
continue
seen.add(prefix)
if os.sep == '/':
sitepackages.append(
os.path.join(
prefix, "lib", "python%d.%d" % sys.version_info[:2],
"site-packages"
)
)
else:
sitepackages.append(prefix)
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
return sitepackages
# Backported for virtualenv.
# Module 'site' in virtualenv might not have this attribute.
getsitepackages = getattr(site, 'getsitepackages', getsitepackages)
# Wrapper to load a module from a Python source file.
# This function loads import hooks when processing them.
def importlib_load_source(name, pathname):
# Import module from a file.
mod_loader = importlib.machinery.SourceFileLoader(name, pathname)
return mod_loader.load_module()
# Patterns of module names that should be bundled into the base_library.zip.
PY3_BASE_MODULES = {
# Python 3.x
# These modules are direct or indirect dependencies of encodings.* modules.
# encodings modules must be recursively included to set the I/O encoding during
# python startup.
'_bootlocale',
'_collections_abc',
'_weakrefset',
'abc',
'codecs',
'collections',
'copyreg',
'encodings',
'enum',
'functools',
'io',
'heapq',
'keyword',
'linecache',
'locale',
'operator',
're',
'reprlib',
'sre_compile',
'sre_constants',
'sre_parse',
'traceback', # for startup errors
'types',
'weakref',
'warnings',
}
# Object types of Pure Python modules in modulegraph dependency graph.
# Pure Python modules have code object (attribute co_code).
PURE_PYTHON_MODULE_TYPES = {
'SourceModule',
'CompiledModule',
'Package',
'NamespacePackage',
# Deprecated.
# TODO Could these module types be removed?
'FlatPackage',
'ArchiveModule',
}
# Object types of special Python modules (built-in, run-time, namespace package)
# in modulegraph dependency graph that do not have code object.
SPECIAL_MODULE_TYPES = {
'AliasNode',
'BuiltinModule',
'RuntimeModule',
'RuntimePackage',
# PyInstaller handles scripts differently and not as standard Python modules.
'Script',
}
# Object types of Binary Python modules (extensions, etc) in modulegraph
# dependency graph.
BINARY_MODULE_TYPES = {
'Extension',
'ExtensionPackage',
}
# Object types of valid Python modules in modulegraph dependency graph.
VALID_MODULE_TYPES = PURE_PYTHON_MODULE_TYPES | SPECIAL_MODULE_TYPES | BINARY_MODULE_TYPES
# Object types of bad/missing/invalid Python modules in modulegraph
# dependency graph.
# TODO Should be 'Invalid' module types also in the 'MISSING' set?
BAD_MODULE_TYPES = {
'BadModule',
'ExcludedModule',
'InvalidSourceModule',
'InvalidCompiledModule',
'MissingModule',
# Runtime modules and packages are technically valid rather than bad, but
# exist only in-memory rather than on-disk (typically due to
# pre_safe_import_module() hooks) and hence cannot be physically frozen.
# For simplicity, these nodes are categorized as bad rather than valid.
'RuntimeModule',
'RuntimePackage',
}
ALL_MODULE_TYPES = VALID_MODULE_TYPES | BAD_MODULE_TYPES
# TODO Review this mapping to TOC, remove useless entries.
# Dict to map ModuleGraph node types to TOC typecodes
MODULE_TYPES_TO_TOC_DICT = {
# Pure modules.
'AliasNode': 'PYMODULE',
'Script': 'PYSOURCE',
'SourceModule': 'PYMODULE',
'CompiledModule': 'PYMODULE',
'Package': 'PYMODULE',
'FlatPackage': 'PYMODULE',
'ArchiveModule': 'PYMODULE',
# Binary modules.
'Extension': 'EXTENSION',
'ExtensionPackage': 'EXTENSION',
# Special valid modules.
'BuiltinModule': 'BUILTIN',
'NamespacePackage': 'PYMODULE',
# Bad modules.
'BadModule': 'bad',
'ExcludedModule': 'excluded',
'InvalidSourceModule': 'invalid',
'InvalidCompiledModule': 'invalid',
'MissingModule': 'missing',
'RuntimeModule': 'runtime',
'RuntimePackage': 'runtime',
# Other.
'does not occur': 'BINARY',
}
def check_requirements():
"""
Verify that all requirements to run PyInstaller are met.
Fail hard if any requirement is not met.
"""
# Fail hard if Python does not have minimum required version
if sys.version_info < (3, 6):
raise EnvironmentError('PyInstaller requires at Python 3.6 or newer.')
|
py | b41453ecd0dff86b09da462690cdf6cfe4353c91 | import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
# Parameters and DataLoaders
input_size = 5
output_size = 2
batch_size = 30
data_size = 100
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class RandomDataset(Dataset):
def __init__(self, size, length):
self.len = length
self.data = torch.randn(length, size)
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return self.len
rand_loader = DataLoader(dataset=RandomDataset(input_size, data_size),
batch_size=batch_size, shuffle=True)
class Model(nn.Module):
# Our model
def __init__(self, input_size, output_size):
super(Model, self).__init__()
self.fc = nn.Linear(input_size, output_size)
def forward(self, input):
output = self.fc(input)
print("\tIn Model: input size", input.size(),
"output size", output.size())
return output
model = Model(input_size, output_size)
if torch.cuda.device_count() > 1:
print("Let's use", torch.cuda.device_count(), "GPUs!")
# dim = 0 [30, xxx] -> [10, ...], [10, ...], [10, ...] on 3 GPUs
model = nn.DataParallel(model)
model.to(device)
for data in rand_loader:
input = data.to(device)
output = model(input)
print("Outside: input size", input.size(),
"output_size", output.size())
|
py | b414556f9a67d035463f15ce25171efe51aaff72 | from pathlib import Path
def load_tests(loader, tests, pattern):
here = Path(__file__)
names = [f'tp2.{p.stem}' for p in here.parent.glob('*.py') if p != here]
tests.addTests(loader.loadTestsFromNames(names))
return tests
|
py | b41456225def48948bc1246ad3c153400087fb27 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bilibili/metadata/locale/locale.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='bilibili/metadata/locale/locale.proto',
package='bilibili.metadata.locale',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n%bilibili/metadata/locale/locale.proto\x12\x18\x62ilibili.metadata.locale\"=\n\tLocaleIds\x12\x10\n\x08language\x18\x01 \x01(\t\x12\x0e\n\x06script\x18\x02 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\"\x9a\x01\n\x06Locale\x12\x35\n\x08\x63_locale\x18\x01 \x01(\x0b\x32#.bilibili.metadata.locale.LocaleIds\x12\x35\n\x08s_locale\x18\x02 \x01(\x0b\x32#.bilibili.metadata.locale.LocaleIds\x12\x10\n\x08sim_code\x18\x03 \x01(\t\x12\x10\n\x08timezone\x18\x04 \x01(\tb\x06proto3'
)
_LOCALEIDS = _descriptor.Descriptor(
name='LocaleIds',
full_name='bilibili.metadata.locale.LocaleIds',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='language', full_name='bilibili.metadata.locale.LocaleIds.language', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='script', full_name='bilibili.metadata.locale.LocaleIds.script', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region', full_name='bilibili.metadata.locale.LocaleIds.region', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=67,
serialized_end=128,
)
_LOCALE = _descriptor.Descriptor(
name='Locale',
full_name='bilibili.metadata.locale.Locale',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='c_locale', full_name='bilibili.metadata.locale.Locale.c_locale', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='s_locale', full_name='bilibili.metadata.locale.Locale.s_locale', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sim_code', full_name='bilibili.metadata.locale.Locale.sim_code', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timezone', full_name='bilibili.metadata.locale.Locale.timezone', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=131,
serialized_end=285,
)
_LOCALE.fields_by_name['c_locale'].message_type = _LOCALEIDS
_LOCALE.fields_by_name['s_locale'].message_type = _LOCALEIDS
DESCRIPTOR.message_types_by_name['LocaleIds'] = _LOCALEIDS
DESCRIPTOR.message_types_by_name['Locale'] = _LOCALE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LocaleIds = _reflection.GeneratedProtocolMessageType('LocaleIds', (_message.Message,), {
'DESCRIPTOR' : _LOCALEIDS,
'__module__' : 'bilibili.metadata.locale.locale_pb2'
# @@protoc_insertion_point(class_scope:bilibili.metadata.locale.LocaleIds)
})
_sym_db.RegisterMessage(LocaleIds)
Locale = _reflection.GeneratedProtocolMessageType('Locale', (_message.Message,), {
'DESCRIPTOR' : _LOCALE,
'__module__' : 'bilibili.metadata.locale.locale_pb2'
# @@protoc_insertion_point(class_scope:bilibili.metadata.locale.Locale)
})
_sym_db.RegisterMessage(Locale)
# @@protoc_insertion_point(module_scope)
|
py | b414566d53f98f3371b3a4415b958fa9f28de971 | import os
import time, threading
def foo():
os.system("start crawl.bat")
print(time.ctime())
threading.Timer(3600, foo).start()
foo() |
py | b41457f28e219c3748bcd42e814b1794b62f9466 | # Copyright (c) 2020, eQualit.ie inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from datetime import datetime, timedelta
from logging import Logger
from unittest import mock
from pyspark.sql import functions as F
from baskerville.models.request_set_cache import RequestSetSparkCache
from tests.unit.baskerville_tests.helpers.spark_testing_base import \
SQLTestCaseLatestSpark
class TestRequestSetSparkCache(SQLTestCaseLatestSpark):
def setUp(self):
super().setUp()
self.test_cache_config = {
'db_url': 'db_url',
'db_driver': 'db_driver',
'user': 'user',
'password': 'password',
}
self.test_table_name = 'test_table_name'
self.test_columns_to_keep = ['1', '2']
self.test_groupby_fields = ['a', 'b']
def test_instance(self):
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep
)
self.assertTrue(hasattr(rsc, 'cache'))
self.assertTrue(hasattr(rsc, 'cache_config'))
self.assertTrue(hasattr(rsc, 'table_name'))
self.assertTrue(hasattr(rsc, 'columns_to_keep'))
self.assertTrue(hasattr(rsc, 'expire_if_longer_than'))
self.assertTrue(hasattr(rsc, 'logger'))
self.assertTrue(hasattr(rsc, 'session_getter'))
self.assertTrue(hasattr(rsc, 'group_by_fields'))
self.assertTrue(rsc.cache is None)
self.assertTrue(isinstance(rsc.cache_config, dict))
self.assertTrue(isinstance(rsc.table_name, str))
self.assertTrue(isinstance(rsc.columns_to_keep, list))
self.assertTrue(isinstance(rsc.expire_if_longer_than, int))
self.assertTrue(isinstance(rsc.logger, Logger))
self.assertTrue(callable(rsc.session_getter))
self.assertTrue(isinstance(rsc.group_by_fields, tuple))
def test__get_load_q(self):
expected_q = f'''(SELECT *
from {self.test_table_name}
where id in (select max(id)
from {self.test_table_name}
group by {', '.join(self.test_groupby_fields)} )
) as {self.test_table_name}'''
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
q = rsc._get_load_q()
self.assertEqual(q, expected_q)
def test_load(self):
update_date = datetime.utcnow()
hosts = ('a', 'b')
extra_filters = {}
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._load = mock.MagicMock()
rsc._load = mock.MagicMock()
persist = rsc._load.return_value.persist
persist.return_value = {}
rsc.write = mock.MagicMock()
rsc.load(update_date, hosts, extra_filters)
rsc._load.assert_called_once_with(
update_date=update_date,
hosts=hosts,
extra_filters={})
persist.assert_not_called()
# persist.assert_called_once()
# self.assertTrue(isinstance(returned_rsc.cache, dict))
# self.assertTrue(isinstance(rsc.cache, dict))
@mock.patch('baskerville.models.request_set_cache.F.broadcast')
def test__load(self, mock_broadcast):
update_date = datetime.utcnow()
hosts = ('a', 'b')
extra_filters = (F.col('a') > 0)
expected_where = str((
((F.col("updated_at") >= F.lit(update_date)) |
(F.col("created_at") >= F.lit(update_date)))
& extra_filters
)._jc)
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc.session_getter = mock.MagicMock()
format = rsc.session_getter.return_value.read.format
options = format.return_value.options
load = options.return_value.load
where = load.return_value.where
select = where.return_value.select
join = select.return_value.join
mock_broadcast.return_value = hosts
rsc.load(update_date, hosts, extra_filters)
options.assert_called_once_with(
url=self.test_cache_config['db_url'],
driver=self.test_cache_config['db_driver'],
dbtable=rsc._get_load_q(),
user=self.test_cache_config['user'],
password=self.test_cache_config['password'],
fetchsize=1000,
max_connections=200,
)
select.assert_called_once_with(*self.test_columns_to_keep)
load.assert_called_once()
self.assertEqual(
str(where.call_args_list[0][0][0]._jc),
expected_where
)
join.assert_called_once_with(hosts, ['target'], 'leftsemi')
# cache.assert_called_once()
def test_append(self):
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
rsc._RequestSetSparkCache__cache.columns = []
union = rsc._RequestSetSparkCache__cache.union
union.return_value = '42'
df = mock.MagicMock()
df.select.return_value = 'hello world'
returned_rsc = rsc.append(df)
self.assertEqual(returned_rsc.cache, '42')
self.assertEqual(rsc.cache, '42')
df.select.assert_called_once_with(*self.test_columns_to_keep)
union.assert_called_once_with('hello world')
def test_refresh(self):
update_date = datetime.utcnow()
hosts = ['a', 'b']
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._load = mock.MagicMock()
rsc.append = mock.MagicMock()
rsc._load.return_value = 42
returned_rsc = rsc.refresh(update_date, hosts)
self.assertTrue(isinstance(returned_rsc, RequestSetSparkCache))
rsc._load.assert_called_once_with(
extra_filters=None, update_date=update_date, hosts=hosts)
rsc.append.assert_called_once_with(42)
rsc.append.return_value.deduplicate.assert_called_once()
def test_deduplicate(self):
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
drop_duplicates = rsc._RequestSetSparkCache__cache.dropDuplicates
cache = drop_duplicates.return_value.cache
cache.return_value = rsc._RequestSetSparkCache__cache
rsc.deduplicate()
drop_duplicates.assert_called_once()
def test_alias(self):
test_alias = 'test_alias'
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
alias = rsc._RequestSetSparkCache__cache.alias
rsc.alias(test_alias)
alias.assert_called_once_with(test_alias)
def test_show(self):
n = 10
t = True
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
show = rsc._RequestSetSparkCache__cache.show
rsc.show()
show.assert_called_once_with(20, False)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
show = rsc._RequestSetSparkCache__cache.show
rsc.show(n, t)
show.assert_called_once_with(n, t)
def test_select(self):
test_what = 42
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
select = rsc._RequestSetSparkCache__cache.select
rsc.select(test_what)
select.assert_called_once_with(test_what)
def test_count(self):
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
count = rsc._RequestSetSparkCache__cache.count
rsc.count()
count.assert_called_once()
def test_clean(self):
now = datetime.utcnow()
test_expire_longer_than = 60
update_date = now - timedelta(seconds=test_expire_longer_than)
filter_ = str((
(F.col("updated_at") >= F.lit(update_date)) |
(F.col("created_at") >= F.lit(update_date))
)._jc)
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
select = rsc._RequestSetSparkCache__cache.select
where = select.return_value.where
rsc.clean(now, test_expire_longer_than)
select.assert_called_once_with('*')
# spark columns cannot be compared by equality
self.assertEqual(
str(where.call_args_list[0][0][0]._jc),
filter_
)
def test_empty(self):
rsc = RequestSetSparkCache(
self.test_cache_config,
self.test_table_name,
self.test_columns_to_keep,
group_by_fields=self.test_groupby_fields
)
rsc._RequestSetSparkCache__cache = mock.MagicMock()
rsc.empty()
self.assertTrue(rsc.cache is None)
|
py | b41458961262e80f706395b3833749305dbf2027 | # Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import ssl
import time
from typing import List, Optional
from dateutil import parser
from elasticsearch import Elasticsearch
from elasticsearch.connection import create_ssl_context
from metadata.config.common import ConfigModel
from metadata.generated.schema.entity.data.chart import Chart
from metadata.generated.schema.entity.data.dashboard import Dashboard
from metadata.generated.schema.entity.data.database import Database
from metadata.generated.schema.entity.data.dbtmodel import DbtModel
from metadata.generated.schema.entity.data.pipeline import Pipeline, Task
from metadata.generated.schema.entity.data.table import Column, Table
from metadata.generated.schema.entity.data.topic import Topic
from metadata.generated.schema.entity.services.dashboardService import DashboardService
from metadata.generated.schema.entity.services.databaseService import DatabaseService
from metadata.generated.schema.entity.services.messagingService import MessagingService
from metadata.generated.schema.entity.services.pipelineService import PipelineService
from metadata.generated.schema.type import entityReference
from metadata.ingestion.api.common import Record, WorkflowContext
from metadata.ingestion.api.sink import Sink, SinkStatus
from metadata.ingestion.models.table_metadata import (
ChangeDescription,
DashboardESDocument,
DbtModelESDocument,
PipelineESDocument,
TableESDocument,
TopicESDocument,
)
from metadata.ingestion.ometa.ometa_api import OpenMetadata
from metadata.ingestion.ometa.openmetadata_rest import MetadataServerConfig
from metadata.ingestion.sink.elasticsearch_constants import (
DASHBOARD_ELASTICSEARCH_INDEX_MAPPING,
DBT_ELASTICSEARCH_INDEX_MAPPING,
PIPELINE_ELASTICSEARCH_INDEX_MAPPING,
TABLE_ELASTICSEARCH_INDEX_MAPPING,
TOPIC_ELASTICSEARCH_INDEX_MAPPING,
)
logger = logging.getLogger(__name__)
class ElasticSearchConfig(ConfigModel):
es_host: str
es_port: int = 9200
es_username: Optional[str] = None
es_password: Optional[str] = None
index_tables: Optional[bool] = True
index_topics: Optional[bool] = True
index_dashboards: Optional[bool] = True
index_pipelines: Optional[bool] = True
index_dbt_models: Optional[bool] = True
table_index_name: str = "table_search_index"
topic_index_name: str = "topic_search_index"
dashboard_index_name: str = "dashboard_search_index"
pipeline_index_name: str = "pipeline_search_index"
dbt_index_name: str = "dbt_model_search_index"
scheme: str = "http"
use_ssl: bool = False
verify_certs: bool = False
timeout: int = 30
ca_certs: Optional[str] = None
class ElasticsearchSink(Sink):
""" """
DEFAULT_ELASTICSEARCH_INDEX_MAPPING = TABLE_ELASTICSEARCH_INDEX_MAPPING
@classmethod
def create(
cls, config_dict: dict, metadata_config_dict: dict, ctx: WorkflowContext
):
config = ElasticSearchConfig.parse_obj(config_dict)
metadata_config = MetadataServerConfig.parse_obj(metadata_config_dict)
return cls(ctx, config, metadata_config)
def __init__(
self,
ctx: WorkflowContext,
config: ElasticSearchConfig,
metadata_config: MetadataServerConfig,
) -> None:
self.config = config
self.metadata_config = metadata_config
self.ctx = ctx
self.status = SinkStatus()
self.metadata = OpenMetadata(self.metadata_config)
self.elasticsearch_doc_type = "_doc"
http_auth = None
if self.config.es_username:
http_auth = (self.config.es_username, self.config.es_password)
ssl_context = None
if self.config.scheme == "https" and not self.config.verify_certs:
ssl_context = create_ssl_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
self.elasticsearch_client = Elasticsearch(
[
{"host": self.config.es_host, "port": self.config.es_port},
],
http_auth=http_auth,
scheme=self.config.scheme,
use_ssl=self.config.use_ssl,
verify_certs=self.config.verify_certs,
ssl_context=ssl_context,
ca_certs=self.config.ca_certs,
)
if self.config.index_tables:
self._check_or_create_index(
self.config.table_index_name, TABLE_ELASTICSEARCH_INDEX_MAPPING
)
if self.config.index_topics:
self._check_or_create_index(
self.config.topic_index_name, TOPIC_ELASTICSEARCH_INDEX_MAPPING
)
if self.config.index_dashboards:
self._check_or_create_index(
self.config.dashboard_index_name, DASHBOARD_ELASTICSEARCH_INDEX_MAPPING
)
if self.config.index_pipelines:
self._check_or_create_index(
self.config.pipeline_index_name, PIPELINE_ELASTICSEARCH_INDEX_MAPPING
)
if self.config.index_dbt_models:
self._check_or_create_index(
self.config.dbt_index_name, DBT_ELASTICSEARCH_INDEX_MAPPING
)
def _check_or_create_index(self, index_name: str, es_mapping: str):
"""
Retrieve all indices that currently have {elasticsearch_alias} alias
:return: list of elasticsearch indices
"""
if self.elasticsearch_client.indices.exists(index_name):
mapping = self.elasticsearch_client.indices.get_mapping()
if not mapping[index_name]["mappings"]:
logger.debug(
f"There are no mappings for index {index_name}. Updating the mapping"
)
es_mapping_dict = json.loads(es_mapping)
es_mapping_update_dict = {
"properties": es_mapping_dict["mappings"]["properties"]
}
self.elasticsearch_client.indices.put_mapping(
index=index_name,
body=json.dumps(es_mapping_update_dict),
request_timeout=self.config.timeout,
)
else:
logger.warning(
"Received index not found error from Elasticsearch. "
+ "The index doesn't exist for a newly created ES. It's OK on first run."
)
# create new index with mapping
self.elasticsearch_client.indices.create(
index=index_name, body=es_mapping, request_timeout=self.config.timeout
)
def write_record(self, record: Record) -> None:
if isinstance(record, Table):
table_doc = self._create_table_es_doc(record)
self.elasticsearch_client.index(
index=self.config.table_index_name,
id=str(table_doc.table_id),
body=table_doc.json(),
request_timeout=self.config.timeout,
)
if isinstance(record, Topic):
topic_doc = self._create_topic_es_doc(record)
self.elasticsearch_client.index(
index=self.config.topic_index_name,
id=str(topic_doc.topic_id),
body=topic_doc.json(),
request_timeout=self.config.timeout,
)
if isinstance(record, Dashboard):
dashboard_doc = self._create_dashboard_es_doc(record)
self.elasticsearch_client.index(
index=self.config.dashboard_index_name,
id=str(dashboard_doc.dashboard_id),
body=dashboard_doc.json(),
request_timeout=self.config.timeout,
)
if isinstance(record, Pipeline):
pipeline_doc = self._create_pipeline_es_doc(record)
self.elasticsearch_client.index(
index=self.config.pipeline_index_name,
id=str(pipeline_doc.pipeline_id),
body=pipeline_doc.json(),
request_timeout=self.config.timeout,
)
if isinstance(record, DbtModel):
dbt_model_doc = self._create_dbt_model_es_doc(record)
self.elasticsearch_client.index(
index=self.config.dbt_index_name,
id=str(dbt_model_doc.dbt_model_id),
body=dbt_model_doc.json(),
request_timeout=self.config.timeout,
)
if hasattr(record.name, "__root__"):
self.status.records_written(record.name.__root__)
else:
self.status.records_written(record.name)
def _create_table_es_doc(self, table: Table):
fqdn = table.fullyQualifiedName
database = table.database.name
table_name = table.name
suggest = [
{"input": [fqdn], "weight": 5},
{"input": [table_name], "weight": 10},
]
column_names = []
column_descriptions = []
tags = set()
timestamp = time.time()
tier = None
for table_tag in table.tags:
if "Tier" in table_tag.tagFQN:
tier = table_tag.tagFQN
else:
tags.add(table_tag.tagFQN)
self._parse_columns(
table.columns, None, column_names, column_descriptions, tags
)
database_entity = self.metadata.get_by_id(
entity=Database, entity_id=str(table.database.id.__root__)
)
service_entity = self.metadata.get_by_id(
entity=DatabaseService, entity_id=str(database_entity.service.id.__root__)
)
table_owner = str(table.owner.id.__root__) if table.owner is not None else ""
table_followers = []
if table.followers:
for follower in table.followers.__root__:
table_followers.append(str(follower.id.__root__))
table_type = None
if hasattr(table.tableType, "name"):
table_type = table.tableType.name
change_descriptions = self._get_change_descriptions(Table, table.id.__root__)
table_doc = TableESDocument(
table_id=str(table.id.__root__),
database=str(database_entity.name.__root__),
service=service_entity.name,
service_type=service_entity.serviceType.name,
service_category="databaseService",
name=table.name.__root__,
suggest=suggest,
description=table.description,
table_type=table_type,
last_updated_timestamp=timestamp,
column_names=column_names,
column_descriptions=column_descriptions,
monthly_stats=table.usageSummary.monthlyStats.count,
monthly_percentile_rank=table.usageSummary.monthlyStats.percentileRank,
weekly_stats=table.usageSummary.weeklyStats.count,
weekly_percentile_rank=table.usageSummary.weeklyStats.percentileRank,
daily_stats=table.usageSummary.dailyStats.count,
daily_percentile_rank=table.usageSummary.dailyStats.percentileRank,
tier=tier,
tags=list(tags),
fqdn=fqdn,
owner=table_owner,
followers=table_followers,
change_descriptions=change_descriptions,
)
return table_doc
def _create_topic_es_doc(self, topic: Topic):
fqdn = topic.fullyQualifiedName
topic_name = topic.name
suggest = [
{"input": [fqdn], "weight": 5},
{"input": [topic_name], "weight": 10},
]
tags = set()
timestamp = time.time()
service_entity = self.metadata.get_by_id(
entity=MessagingService, entity_id=str(topic.service.id.__root__)
)
topic_owner = str(topic.owner.id.__root__) if topic.owner is not None else ""
topic_followers = []
if topic.followers:
for follower in topic.followers.__root__:
topic_followers.append(str(follower.id.__root__))
tier = None
for topic_tag in topic.tags:
if "Tier" in topic_tag.tagFQN:
tier = topic_tag.tagFQN
else:
tags.add(topic_tag.tagFQN)
change_descriptions = self._get_change_descriptions(Topic, topic.id.__root__)
topic_doc = TopicESDocument(
topic_id=str(topic.id.__root__),
service=service_entity.name,
service_type=service_entity.serviceType.name,
service_category="messagingService",
name=topic.name.__root__,
suggest=suggest,
description=topic.description,
last_updated_timestamp=timestamp,
tier=tier,
tags=list(tags),
fqdn=fqdn,
owner=topic_owner,
followers=topic_followers,
change_descriptions=change_descriptions,
)
print(topic_doc.json())
return topic_doc
def _create_dashboard_es_doc(self, dashboard: Dashboard):
fqdn = dashboard.fullyQualifiedName
dashboard_name = dashboard.name
suggest = [{"input": [dashboard.displayName], "weight": 10}]
tags = set()
timestamp = time.time()
service_entity = self.metadata.get_by_id(
entity=DashboardService, entity_id=str(dashboard.service.id.__root__)
)
dashboard_owner = (
str(dashboard.owner.id.__root__) if dashboard.owner is not None else ""
)
dashboard_followers = []
if dashboard.followers:
for follower in dashboard.followers.__root__:
dashboard_followers.append(str(follower.id.__root__))
tier = None
for dashboard_tag in dashboard.tags:
if "Tier" in dashboard_tag.tagFQN:
tier = dashboard_tag.tagFQN
else:
tags.add(dashboard_tag.tagFQN)
charts: List[Chart] = self._get_charts(dashboard.charts)
chart_names = []
chart_descriptions = []
for chart in charts:
chart_names.append(chart.displayName)
if chart.description is not None:
chart_descriptions.append(chart.description)
if len(chart.tags) > 0:
for col_tag in chart.tags:
tags.add(col_tag.tagFQN)
change_descriptions = self._get_change_descriptions(
Dashboard, dashboard.id.__root__
)
dashboard_doc = DashboardESDocument(
dashboard_id=str(dashboard.id.__root__),
service=service_entity.name,
service_type=service_entity.serviceType.name,
service_category="dashboardService",
name=dashboard.displayName,
chart_names=chart_names,
chart_descriptions=chart_descriptions,
suggest=suggest,
description=dashboard.description,
last_updated_timestamp=timestamp,
tier=tier,
tags=list(tags),
fqdn=fqdn,
owner=dashboard_owner,
followers=dashboard_followers,
monthly_stats=dashboard.usageSummary.monthlyStats.count,
monthly_percentile_rank=dashboard.usageSummary.monthlyStats.percentileRank,
weekly_stats=dashboard.usageSummary.weeklyStats.count,
weekly_percentile_rank=dashboard.usageSummary.weeklyStats.percentileRank,
daily_stats=dashboard.usageSummary.dailyStats.count,
daily_percentile_rank=dashboard.usageSummary.dailyStats.percentileRank,
change_descriptions=change_descriptions,
)
return dashboard_doc
def _create_pipeline_es_doc(self, pipeline: Pipeline):
fqdn = pipeline.fullyQualifiedName
suggest = [{"input": [pipeline.displayName], "weight": 10}]
tags = set()
timestamp = time.time()
service_entity = self.metadata.get_by_id(
entity=PipelineService, entity_id=str(pipeline.service.id.__root__)
)
pipeline_owner = (
str(pipeline.owner.id.__root__) if pipeline.owner is not None else ""
)
pipeline_followers = []
if pipeline.followers:
for follower in pipeline.followers.__root__:
pipeline_followers.append(str(follower.id.__root__))
tier = None
for pipeline_tag in pipeline.tags:
if "Tier" in pipeline_tag.tagFQN:
tier = pipeline_tag.tagFQN
else:
tags.add(pipeline_tag.tagFQN)
tasks: List[Task] = pipeline.tasks
task_names = []
task_descriptions = []
for task in tasks:
task_names.append(task.displayName)
if task.description is not None:
task_descriptions.append(task.description)
if tags in task and len(task.tags) > 0:
for col_tag in task.tags:
tags.add(col_tag.tagFQN)
change_descriptions = self._get_change_descriptions(
Pipeline, pipeline.id.__root__
)
pipeline_doc = PipelineESDocument(
pipeline_id=str(pipeline.id.__root__),
service=service_entity.name,
service_type=service_entity.serviceType.name,
service_category="pipelineService",
name=pipeline.displayName,
task_names=task_names,
task_descriptions=task_descriptions,
suggest=suggest,
description=pipeline.description,
last_updated_timestamp=timestamp,
tier=tier,
tags=list(tags),
fqdn=fqdn,
owner=pipeline_owner,
followers=pipeline_followers,
change_descriptions=change_descriptions,
)
return pipeline_doc
def _create_dbt_model_es_doc(self, dbt_model: DbtModel):
fqdn = dbt_model.fullyQualifiedName
database = dbt_model.database.name
dbt_model_name = dbt_model.name
suggest = [
{"input": [fqdn], "weight": 5},
{"input": [dbt_model_name], "weight": 10},
]
column_names = []
column_descriptions = []
tags = set()
timestamp = time.time()
tier = None
for dbt_model_tag in dbt_model.tags:
if "Tier" in dbt_model_tag.tagFQN:
tier = dbt_model_tag.tagFQN
else:
tags.add(dbt_model_tag.tagFQN)
self._parse_columns(
dbt_model.columns, None, column_names, column_descriptions, tags
)
database_entity = self.metadata.get_by_id(
entity=Database, entity_id=str(dbt_model.database.id.__root__)
)
service_entity = self.metadata.get_by_id(
entity=DatabaseService, entity_id=str(database_entity.service.id.__root__)
)
dbt_model_owner = (
str(dbt_model.owner.id.__root__) if dbt_model.owner is not None else ""
)
dbt_model_followers = []
if dbt_model.followers:
for follower in dbt_model.followers.__root__:
dbt_model_followers.append(str(follower.id.__root__))
dbt_node_type = None
if hasattr(dbt_model.dbtNodeType, "name"):
dbt_node_type = dbt_model.dbtNodeType.name
change_descriptions = self._get_change_descriptions(
DbtModel, dbt_model.id.__root__
)
dbt_model_doc = DbtModelESDocument(
dbt_model_id=str(dbt_model.id.__root__),
database=str(database_entity.name.__root__),
service=service_entity.name,
service_type=service_entity.serviceType.name,
service_category="databaseService",
name=dbt_model.name.__root__,
suggest=suggest,
description=dbt_model.description,
dbt_model_type=dbt_node_type,
last_updated_timestamp=timestamp,
column_names=column_names,
column_descriptions=column_descriptions,
tier=tier,
tags=list(tags),
fqdn=fqdn,
schema_description=None,
owner=dbt_model_owner,
followers=dbt_model_followers,
change_descriptions=change_descriptions,
)
return dbt_model_doc
def _get_charts(self, chart_refs: Optional[List[entityReference.EntityReference]]):
charts = []
if chart_refs:
for chart_ref in chart_refs:
chart = self.metadata.get_by_id(
entity=Chart, entity_id=str(chart_ref.id.__root__), fields=["tags"]
)
charts.append(chart)
return charts
def _parse_columns(
self,
columns: List[Column],
parent_column,
column_names,
column_descriptions,
tags,
):
for column in columns:
col_name = (
parent_column + "." + column.name.__root__
if parent_column is not None
else column.name.__root__
)
column_names.append(col_name)
if column.description is not None:
column_descriptions.append(column.description)
if len(column.tags) > 0:
for col_tag in column.tags:
tags.add(col_tag.tagFQN)
if column.children is not None:
self._parse_columns(
column.children,
column.name.__root__,
column_names,
column_descriptions,
tags,
)
def _get_change_descriptions(self, entity_type, entity_id):
try:
entity_versions = self.metadata.list_versions(entity_id, entity_type)
change_descriptions = []
for version in entity_versions.versions:
version_json = json.loads(version)
updatedAt = parser.parse(version_json["updatedAt"])
change_description = ChangeDescription(
updatedBy=version_json["updatedBy"], updatedAt=updatedAt.timestamp()
)
if "changeDescription" in version_json:
change_description.fieldsAdded = version_json["changeDescription"][
"fieldsAdded"
]
change_description.fieldsDeleted = version_json[
"changeDescription"
]["fieldsDeleted"]
change_description.fieldsUpdated = version_json[
"changeDescription"
]["fieldsUpdated"]
change_descriptions.append(change_description)
return change_descriptions
except Exception as err:
logger.error(repr(err))
def get_status(self):
return self.status
def close(self):
self.elasticsearch_client.close()
|
py | b414599512ff8e1f4795e8d914724d4157d29a8b | sensorDataTypeToString = {0: 'NO DATA',
1: 'RESERVED',
2: 'RESERVED',
3: 'RESERVED',
4: 'RESERVED',
5: 'Gyro Z',
6: 'Front-left wheel ticks',
7: 'Front-right wheel ticks',
8: 'Rear-left wheel ticks',
9: 'Rear-right wheel ticks',
10: 'Speed tick',
11: 'Speed',
12: 'Gyro temp',
13: 'Gyro Y',
14: 'Gyro X',
15: 'RESERVED',
16: 'Accel X',
17: 'Accel Y',
18: 'Accel Z'
}
def parseEsfStatus(statusInfo):
epoch = []
fusionMode = [packet[0]['FusionMode'] for packet in statusInfo]
sensorInfo = {}
for packet in statusInfo:
curEpoch = packet[0]['ITOW']/1e3
epoch.append(curEpoch)
for sensorStatus in packet[1:]:
sensorDataType = sensorStatus['SensStatus1'] & 0x3f
sensorDataTypeString = sensorDataTypeToString[sensorDataType]
calibStatus = sensorStatus['SensStatus2'] & 0x3
timeStatus = sensorStatus['SensStatus2'] & 0xc
faults = sensorStatus['Faults']
if sensorDataTypeString not in sensorInfo:
sensorInfo[sensorDataTypeString] = {'epoch': [], 'calibStatus': [], 'timeStatus': [], 'faults': []}
sensorInfo[sensorDataTypeString]['epoch'].append(curEpoch)
sensorInfo[sensorDataTypeString]['calibStatus'].append(calibStatus)
sensorInfo[sensorDataTypeString]['timeStatus'].append(timeStatus)
sensorInfo[sensorDataTypeString]['faults'].append(faults)
return epoch, fusionMode, sensorInfo
if __name__=='__main__':
import numpy as np
import matplotlib.pyplot as plt
import cPickle as pickle
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input')
args = parser.parse_args()
f = open(args.input, 'r')
data = pickle.load(f)
esfStatusInfo = data['ESF-STATUS']
epoch, fusionMode, sensorInfo = parseEsfStatus(esfStatusInfo)
sensorFaults = {}
for key, value in sensorInfo.iteritems():
faults = np.array(value['faults'])
sensorFaults[key] = {}
sensorFaults[key]['None'] = np.sum(faults == 0)
sensorFaults[key]['badMeas'] = np.sum((faults & 0x1) == 0x1)
sensorFaults[key]['badTTag'] = np.sum((faults & 0x2) == 0x2)
sensorFaults[key]['missingMeas'] = np.sum((faults & 0x4) == 0x4)
sensorFaults[key]['noisyMeas'] = np.sum((faults & 0x8) == 0x8)
print('\n*** FAULTS ***')
for key in sorted(sensorFaults.keys()):
print(key)
numMessages = len(sensorInfo[key]['faults'])
for faultName in sorted(sensorFaults[key].keys()):
numFaults = sensorFaults[key][faultName]
percentage = float(numFaults)/numMessages*100.
if faultName is not 'None' and numFaults == 0:
continue
print(' {} - {}/{} ({:.3f}%)'.format(faultName, numFaults, numMessages, percentage))
fig, ax = plt.subplots(len(sensorInfo)+1, sharex=True)
ax[0].plot(epoch, fusionMode, color='r')
ax[0].set_ylim([0, 3.5])
ax[0].set_ylabel('Fusion Mode')
ax[0].grid(True)
ax[0].set_title('Calibration Status')
for i, key in enumerate(sorted(sensorInfo.keys()), 1):
info = sensorInfo[key]
ax[i].plot(info['epoch'], info['calibStatus'], color='g')
ax[i].set_ylim([-0.5, 3.5])
ax[i].set_ylabel(key)
ax[i].grid(True)
fig, ax = plt.subplots(len(sensorInfo), sharex=True)
ax[0].set_title('Faults')
for i, key in enumerate(sorted(sensorInfo.keys())):
info = sensorInfo[key]
ax[i].plot(info['epoch'], info['faults'], color='b')
ax[i].set_ylim([-0.5, 10])
ax[i].set_ylabel(key)
ax[i].grid(True)
plt.show()
|
py | b4145b236b142c609dc3aa4637ddb00a6b64b426 | """
Create an observable property. Observable properties allow users to set multiple callback functions to be called
before a value is changed with "before_change", after a value is changed with "change", before a property is deleted
with "before_delete", and after a property is deleted with "delete".
Example:
.. code-block:: python
class Point:
def __init__(self, x=0, y=0):
self._x = x
self._y = y
@observe_property
def x(self):
return self._x
@x.setter
def x(self, x):
self._x = x
@x.deleter
def x(self):
del self._x
@observe_property
def y(self):
return self._y
@y.setter
def y(self, y):
self._y = y
@y.deleter
def y(self):
del self._y
@x.on("before_change")
@y.on("before_change")
def moving(self, *args):
print("Moving")
@x.on("change")
@y.on("change")
def moving(self, *args):
print("Moved point", repr(self))
@x.on("before_delete")
@y.on("before_delete")
def deleting(self)
print("Deleting value")
@x.on("before_delete")
@y.on("before_delete")
def deleted(self)
print("Deleted value")
def __repr__(self):
return self.__class__.__name__ + "(%d, %d)" % (self.x, self.y)
p = Point() # instance of a class
p.x = 1
# Moving
# Moved point Point(1, 0)
p.y = 2
# Moving
# Moved point Point(1, 2)
del p.x
# Deleting value
# Deleted value
Point.x.on(p, "change", lambda value: print("Changing p's x value to ", repr(value))) # Instance specific callback
p.x = 2
# Moving
# Moved point Point(2, 2)
# Changing p's x value to 2
Point.x.off(p, "change") # Remove all callbacks from change
"""
from .interface import get_signal, on_signal, off_signal, fire_signal, block_signals, \
copy_signals, copy_signals_as_bound, SignalerInstance, SignalerDescriptorInstance
__all__ = ["signaler_property", "SignalerPropertyInstance"]
class SignalerPropertyInstance(SignalerDescriptorInstance):
"""Replaces a property with this class that uses callback functions for before and after a value changes.
Signals (Callbacks):
* 'before_delete' - function should take no arguments
* 'delete' - function should take no arguments
* 'before_change' - function should take a single value argument
* 'change' - function should take a single value argument
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None, check_change=True):
"""Initialize like a property
Args:
fget (function/method)[None]: Getter method for the property
fset (function/method)[None]: Setter method for the property
fdel (function/method)[None]: Deleter method for the property
doc (str)[None]: Documentation for the property
check_change (bool)[True]: If True before the setter is called check if the value is different (uses getter)
"""
super(SignalerPropertyInstance, self).__init__()
# Variables
self.check_change = check_change
try:
self.fget = fget
except (AttributeError, TypeError): # property fget is a readonly attribute
pass
try:
self.fset = fset
except (AttributeError, TypeError): # property fset is a readonly attribute
pass
try:
self.fdel = fdel
except (AttributeError, TypeError): # property fdel is a readonly attribute
pass
if doc is None and fget is not None:
doc = fget.__doc__
self.__doc__ = doc
try:
self.__name__ = self.fget.__name__
except AttributeError:
pass
self.event_signals["before_delete"] = []
self.event_signals["delete"] = []
self.event_signals["before_change"] = []
self.event_signals["change"] = []
# ===== Property methods =====
def get_value(self):
"""Return the property value with the getter function."""
if self.fget is None:
raise AttributeError("unreadable attribute")
return self.fget()
def set_value(self, value):
"""Set the property value with the setter function."""
if self.fset is None:
raise AttributeError("can't set attribute")
# Check if the new value is different from the current value
if self.check_change and self.fget:
val = self.get_value()
if val == value:
return
# Set the value
self.fire("before_change", value)
ret = self.fset(value)
# Get the new value from the getter if possible
new_val = value
if self.fget:
new_val = self.get_value()
self.fire("change", new_val)
return ret # None usually
def del_value(self):
"""Delete the property value with the deleter function."""
if self.fdel is None:
raise AttributeError("can't delete attribute")
self.fire("before_delete")
ret = self.fdel()
self.fire("delete")
return ret # None usually
def __call__(self, value):
"""Set the value like a function. This makes the SignalerPropertyInstance very similar to the signaler.
The bind function depends on this functionality.
"""
return self.set_value(value)
def create_signaler_instance(self, instance=None):
"""Create and return a signaler instance."""
pass
class signaler_property(property, SignalerPropertyInstance): # , property
"""Property that is observable through callback functions.
Add a callback to function to be called when a before a property changes or after a property changes. Callbacks
can be added for 'before_change', 'change', 'before_delete', and 'delete'.
Signals (Callbacks):
* 'before_delete' - function should take no arguments
* 'delete' - function should take no arguments
* 'before_change' - function should take a single value argument
* 'change' - function should take a single value argument
Exammple:
.. code-block:: python
class MyClass:
# Just like a normal property
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
@x.deleter
def x(self):
del self._x
# Connect callback methods to observe what happens to the property
@x.on("before_change")
def about_to_change_x(self, *args):
print("x is about to change")
@x.on("change")
def x_changed(self, value):
print("x changed")
@x.on("before_delete")
def x_about_to_be_deleted(self):
print("x is going to go away now")
@x.on("delete")
def x_deleted(self)
print("x has been removed")
m = MyClass()
m.x = 1
def print_value(value):
print("x changed to", value)
MyClass.x.on("change", print_value)
m.x = 2
print(m.x)
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None, check_change=True):
"""Initialize like a property
Args:
fget (function/method)[None]: Getter method for the property
fset (function/method)[None]: Setter method for the property
fdel (function/method)[None]: Deleter method for the property
doc (str)[None]: Documentation for the property
check_change (bool)[True]: If True before the setter is called check if the value is different (uses getter)
"""
SignalerPropertyInstance.__init__(self, fget=fget, fset=fset, fdel=fdel, doc=doc, check_change=check_change)
super(signaler_property, self).__init__(fget=fget, fset=fset, fdel=fdel, doc=doc)
# self.event_signals = {"before_delete": [], "delete": [], "before_change": [], "change": []}
self.check_change = check_change
# end Constructor
def create_signaler_instance(self, instance=None):
"""Create and return a signaler instance."""
fget = None
fset = None
fdel = None
doc = self.__doc__
# Bind the get, set, and del methods with the given instance before creating the observable property
if self.fget:
fget = self.fget.__get__(instance, instance.__class__)
if self.fset:
fset = self.fset.__get__(instance, instance.__class__)
if self.fdel:
fdel = self.fdel.__get__(instance, instance.__class__)
# Create the new signaler for the instance with bound methods.
sig = SignalerPropertyInstance(fget=fget, fset=fset, fdel=fdel, doc=doc, check_change=self.check_change)
# Map all of the connected callbacks as bound methods to the instance
copy_signals_as_bound(self, sig, instance)
return sig # return an event handler object for the instance
# ========== class decorator ==========
def __set__(self, instance, obj):
"""Class decorator that is called for `obj.x = 1`."""
if instance is None:
return self
sig = self.get_signaler_instance(instance)
return sig.set_value(obj)
# end __set__
def __get__(self, instance=None, owner=None):
"""Class decorator that is called for `print(obj.x)`."""
if instance is None:
return self
sig = self.get_signaler_instance(instance)
return sig.get_value()
# end __get__
def __delete__(self, instance):
"""Class decorator that is called for `del obj.x`."""
if instance is None:
return self
sig = self.get_signaler_instance(instance)
return sig.del_value()
# ===== Decorators =====
def getter(self, fget):
"""Decorator to add a getter method. Works just like @property.getter."""
obj = super(signaler_property, self).getter(fget)
obj.check_change = self.check_change
copy_signals(self, obj)
try:
obj.__name__ = obj.fget.__name__
except AttributeError:
pass
return obj
def setter(self, fset):
"""Decorator to add a setter method. Works just like @property.setter."""
obj = super(signaler_property, self).setter(fset)
obj.check_change = self.check_change
copy_signals(self, obj)
return obj
def deleter(self, fdel):
"""Decorator to add a deleter method. Works just like @property.deleter."""
obj = super(signaler_property, self).deleter(fdel)
obj.check_change = self.check_change
copy_signals(self, obj)
return obj
# ========== Connect Callback functions ==========
def get_signal(self, instance, signal_type=None):
"""Return a list of callback methods.
Options:
If user gives 'signal_type' and (optional) 'func' arguments.
.. code-block:: python
class MyClass:
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
@x.on("before_change")
def about_to_change_x(*args):
print("x is about to change")
print(MyClass.x.get_signal("before_change"))
If user gives 'instance', 'signal_type', and (optional) 'func' arguments.
.. code-block:: python
class MyClass:
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
@x.on("before_change")
def about_to_change_x(*args):
print("x is about to change")
obj = MyClass()
print(MyClass.x.get_signal(obj, "before_change"))
Args:
instance (object): Object to connec the signal with.
signal_type (str): Signal name to direct which signal to use
Args Alternative:
signal_type (str): Signal name to direct which signal to use
"""
if signal_type is None:
signal_type = instance
instance = None
if not isinstance(signal_type, str):
raise TypeError("Invalid 'signal type' given.")
if instance is None:
instance = self
return get_signal(instance, signal_type)
def on(self, instance, signal_type=None, func=None):
"""Connect callback methods.
Options:
If user gives 'signal_type' and (optional) 'func' arguments.
.. code-block:: python
class MyClass:
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
@x.on("before_change")
def about_to_change_x(*args):
print("x is about to change")
If user gives 'instance', 'signal_type', and (optional) 'func' arguments.
.. code-block:: python
class MyClass:
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
obj = MyClass()
MyClass.x.on(obj, "before_change", lambda *args: print("x is about to change"))
Args:
instance (object): Object to connec the signal with.
signal_type (str): Signal name to direct which signal to use
func (callable): Callback function
Args Alternative:
signal_type (str): Signal name to direct which signal to use
func (callable): Callback function
Returns:
func (callable): The callable function that was given or a decorator to decorate a function.
"""
if isinstance(instance, str) and (signal_type is None or callable(signal_type)):
# Class property called as a decorator
instance, signal_type, func = None, instance, signal_type
sig = self.get_signaler_instance(instance)
if func is None:
def decorator(func):
sig.on(signal_type, func)
return func
return decorator
elif sig is self:
return super(signaler_property, self).on(signal_type, func)
else:
return sig.on(signal_type, func)
def off(self, instance, signal_type=None, func=None):
"""Disconnect from a signal.
Options:
If user gives 'signal_type' and (optional) 'func' arguments.
.. code-block:: python
class MyClass:
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
@x.on("before_change")
def notify_x_about_to_change(value):
print("x is about to change to ", value)
x.off("before_change", notify_x_about_to_change) # Disconnect the callback method
If user give 'instance', 'signal_type', and (optional) 'func' arguments.
.. code-block:: python
class MyClass:
@signaler_property
def x(self):
return self._x
@x.setter
def x(self, value):
self._x = value
@x.on("before_change")
def notify_x_about_to_change(value):
print("x is about to change to ", value)
obj = MyClass()
MyClass.x.off(obj, "before_change", obj.notify_x_about_to_change)
Args:
signal_type (str): Signal name to direct which signal to use
func (callable)[None]: Callback function
Args Alternative:
instance (object): Object to connect the signal with.
signal_type (str): Signal name to direct which signal to use
func (callable)[None]: Callback function
Returns:
existed (bool): True if the given function was attached to the signal. Also True if the given func argument
was None and there was at least 1 function attached to the signal.
"""
if isinstance(instance, str) and (signal_type is None or callable(signal_type)):
# Class property called as a decorator
instance, signal_type, func = None, instance, signal_type
sig = self.get_signaler_instance(instance)
if sig is self:
return super(signaler_property, self).off(signal_type, func)
else:
return sig.off(signal_type, func)
def fire(self, *args, **kwargs):
"""Trigger the callback functions connected to the signal.
Args:
instance (object): Object to connect the signal with.
signal_type (str): Signal name to direct which signal to use
*args, **kwargs: Callback function arguments
Args Alternative:
signal_type (str): Signal name to direct which signal to use
*args, **kwargs: Callback function arguments
"""
length = len(args)
if length == 0:
raise ValueError("Invalid number of arguments given! Give either 'instance', 'signal_type', and '*args' "
"or just a 'signal_type' and '*args'.")
first_arg = args[0]
if isinstance(first_arg, str):
# Signal type given as first argument
signal_type = first_arg
args = args[1:]
return super(signaler_property, self).fire(signal_type, *args, **kwargs)
else:
# Instance given as first argument
instance = self.get_signaler_instance(first_arg)
signal_type = args[1]
args = args[2:]
return instance.fire(signal_type, *args, **kwargs)
def block(self, instance, signal_type=None, block=True):
"""Block the callback functions connected to the signal or signals.
Args:
instance (object): Object the signal is associated with.
signal_type (str)[None]: Signal name to direct which signal to use or None for all signals
block (bool)[True]: Block or unblock the signals
Args Alternative:
signal_type (str)[None]: Signal name to direct which signal to use or None for all signals
block (bool)[True]: Block or unblock the signals
"""
if isinstance(instance, str) and (signal_type is None or isinstance(signal_type, bool)):
# Class property called as a decorator
instance, signal_type, block = None, instance, signal_type
sig = self.get_signaler_instance(instance)
if sig is self:
return super(signaler_property, self).block(signal_type, block)
else:
return sig.block(signal_type, block)
|
py | b4145bc0e69060b9fa14d30f80d72fde649604b2 | import logging
import os
import sys
from typing import Iterator, NamedTuple, List, Dict, Text, AbstractSet, Optional, Set
import gin
import pypeln as pl
from absl import app
from absl import flags
from pypeln.process import IterableQueue
from pypeln.process.api.filter import FilterFn
from pypeln.process.api.map import MapFn
from pysc2 import run_configs
from pysc2.env.environment import StepType
from pysc2.env.sc2_env import Race
from tqdm import tqdm
from sc2_imitation_learning.common.replay_processor import ReplayProcessor, get_replay_info
from sc2_imitation_learning.common.utils import retry
from sc2_imitation_learning.dataset.dataset import ActionTimeStep, store_episode_to_hdf5, get_dataset_specs
from sc2_imitation_learning.dataset.sc2_dataset import SC2REPLAY_RACES
from sc2_imitation_learning.environment.environment import ActionSpace, ObservationSpace
from sc2_imitation_learning.environment.sc2_environment import SC2ActionSpace, SC2ObservationSpace, SC2InterfaceConfig, \
SC2Maps
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger(__name__)
flags.DEFINE_string('replays_path', default='./data/replays/4.7.1/',
help='Path to the directory where the replays are stored.')
flags.DEFINE_string('dataset_path', default='./data/datasets/1v1/v1',
help='Path to the directory where the dataset will be stored.')
flags.DEFINE_integer('num_workers', os.cpu_count(), help='Number of parallel workers.')
flags.DEFINE_multi_string('gin_file', ['./configs/1v1/build_dataset.gin'], help='List of paths to Gin config files.')
flags.DEFINE_multi_string('gin_param', None, help='List of Gin parameter bindings.')
FLAGS = flags.FLAGS
class ReplayMeta(NamedTuple):
observed_player_id: int
replay_info: Dict
replay_path: Text
class Replay(NamedTuple):
time_steps: List[ActionTimeStep]
replay_meta: ReplayMeta
def is_not_none(x): return x is not None
def find_replays(replay_path: Text) -> Iterator[str]:
for entry in os.scandir(os.path.abspath(replay_path)):
if entry.name.endswith('.SC2Replay'):
yield entry.path
def load_replay_meta(replay_path: Text) -> List[ReplayMeta]:
replay_info = get_replay_info(replay_path)
return [ReplayMeta(player['PlayerID'], replay_info, replay_path) for player in replay_info['Players']]
@gin.register
class FilterReplay(FilterFn):
def __init__(self,
min_duration: float = 0.,
min_mmr: int = 0,
min_apm: int = 0,
observed_player_races: AbstractSet[Race] = frozenset((Race.protoss, Race.terran, Race.zerg)),
opponent_player_races: AbstractSet[Race] = frozenset((Race.protoss, Race.terran, Race.zerg)),
wins_only: bool = False,
map_names: Optional[Set[str]] = None) -> None:
super().__init__()
self.min_duration = min_duration
self.min_mmr = min_mmr
self.min_apm = min_apm
self.observed_player_races = observed_player_races
self.opponent_player_races = opponent_player_races
self.wins_only = wins_only
self.map_names = map_names
def __call__(self, replay_meta: ReplayMeta, **kwargs) -> bool:
if not FLAGS.is_parsed():
FLAGS(sys.argv)
observed_player_info = next(
filter(lambda p: p['PlayerID'] == replay_meta.observed_player_id, replay_meta.replay_info['Players']))
if len(replay_meta.replay_info['Players']) > 1:
opponent_player_info = next(
filter(lambda p: p['PlayerID'] != replay_meta.observed_player_id, replay_meta.replay_info['Players']))
else:
opponent_player_info = None
sc2_maps = SC2Maps(run_configs.get().data_dir)
return (replay_meta.replay_info['Duration'] >= self.min_duration
and observed_player_info.get('MMR', 0) >= self.min_mmr
and observed_player_info['APM'] >= self.min_apm
and SC2REPLAY_RACES[observed_player_info['AssignedRace']] in self.observed_player_races
and (opponent_player_info is None or
SC2REPLAY_RACES[opponent_player_info['AssignedRace']] in self.opponent_player_races)
and (not self.wins_only or observed_player_info['Result'] == 'Win')
and (self.map_names is None or sc2_maps.normalize_map_name(replay_meta.replay_info['Title']) in self.map_names))
@gin.register
class ProcessReplay(MapFn):
def __init__(self,
interface_config: SC2InterfaceConfig = gin.REQUIRED,
action_space: SC2ActionSpace = gin.REQUIRED,
observation_space: SC2ObservationSpace = gin.REQUIRED,
sc2_version: str = gin.REQUIRED) -> None:
super().__init__()
self.interface_config = interface_config
self.action_space = action_space
self.observation_space = observation_space
self.sc2_version = sc2_version
@retry(max_tries=3)
def __call__(self, replay_meta: ReplayMeta, **kwargs) -> Replay:
if not FLAGS.is_parsed():
FLAGS(sys.argv)
def _valid_or_fallback_action(o: dict, a: Dict):
if o['scalar_features']['available_actions'][a['action_type']] == 0:
return self.action_space.no_op() # action_type not available
elif 'build_queue_length' in o['scalar_features'] and \
o['scalar_features']['build_queue_length'] <= a['build_queue_id']:
return self.action_space.no_op() # build_queue_id not available
elif 'multi_select_length' in o['scalar_features'] and \
o['scalar_features']['multi_select_length'] <= a['select_unit_id']:
return self.action_space.no_op() # select_unit_id not available
elif 'cargo_length' in o['scalar_features'] and \
o['scalar_features']['cargo_length'] <= a['unload_id']:
return self.action_space.no_op() # unload_id not available
else:
return a
with ReplayProcessor(
replay_path=replay_meta.replay_path,
interface_config=self.interface_config,
observation_space=self.observation_space,
action_space=self.action_space,
observed_player_id=replay_meta.observed_player_id,
version=self.sc2_version) as replay_processor:
sampled_replay: List[ActionTimeStep] = []
reward = 0.
for curr_ts, curr_act in replay_processor.iterator():
action = _valid_or_fallback_action(curr_ts.observation, curr_act)
reward += curr_ts.reward
if ( # add timestep to replay if:
len(sampled_replay) == 0 # a) it is the first timestep of an episode,
or curr_ts.step_type == StepType.LAST # b) it is the last timestep of an episode,
or action['action_type'] != 0 # c) an action other than noop is executed or
or sampled_replay[-1].action['step_mul'] == self.interface_config.max_step_mul - 1 # d) max_step_mul is reached
):
sampled_replay.append(ActionTimeStep(observation=curr_ts.observation, action=action, reward=reward,
done=len(sampled_replay) == 0))
reward = 0.
else: # if timestep is skipped, increment step_mul of most recent action
sampled_replay[-1].action['step_mul'] += 1
return Replay(time_steps=sampled_replay, replay_meta=replay_meta)
@gin.register
class StoreReplay(MapFn):
def __init__(self,
dataset_path: str,
action_space: ActionSpace = gin.REQUIRED,
observation_space: ObservationSpace = gin.REQUIRED) -> None:
super().__init__()
self.dataset_path = dataset_path
self.action_space = action_space
self.observation_space = observation_space
def __call__(self, replay: Replay, **kwargs) -> str:
replay_name = os.path.splitext(os.path.basename(replay.replay_meta.replay_path))[0]
replay_name = f"{replay_name}_{replay.replay_meta.observed_player_id}"
specs = get_dataset_specs(self.action_space, self.observation_space)
file_name = store_episode_to_hdf5(
path=self.dataset_path,
name=replay_name,
episode=replay.time_steps,
episode_info={
'observed_player_id': replay.replay_meta.observed_player_id,
'replay_path': replay.replay_meta.replay_path,
'replay_info': replay.replay_meta.replay_info
},
specs=specs)
return file_name
def patch_iterable_queue():
""" Patches __getstate__ and __setstate__ of IterableQueues such that namespace and exception_queue attributes get
exported/restored. See PR: https://github.com/cgarciae/pypeln/pull/74 """
orig_getstate = IterableQueue.__getstate__
orig_setstate = IterableQueue.__setstate__
def new_getstate(self):
return orig_getstate(self) + (self.namespace, self.exception_queue)
def new_setstate(self, state):
orig_setstate(self, state[:-2])
self.namespace, self.exception_queue = state[-2:]
IterableQueue.__getstate__ = new_getstate
IterableQueue.__setstate__ = new_setstate
logger.info("Pickle patch for IterableQueue applied.")
patch_iterable_queue()
def main(_):
gin.parse_config_files_and_bindings(FLAGS.gin_file, FLAGS.gin_param)
os.makedirs(FLAGS.dataset_path, exist_ok=True)
assert len(os.listdir(FLAGS.dataset_path)) == 0, f'dataset_path directory ({FLAGS.dataset_path}) must be empty.'
gin_config_str = gin.config_str(max_line_length=120)
print("Loaded configuration:")
print(gin_config_str)
with open(os.path.join(FLAGS.dataset_path, 'config.gin'), mode='w') as f:
f.write(gin_config_str)
filter_replay = gin.get_configurable(FilterReplay)()
process_replay = gin.get_configurable(ProcessReplay)()
store_replay = gin.get_configurable(StoreReplay)(dataset_path=FLAGS.dataset_path)
dataset_files = []
for dataset_file in tqdm(
find_replays(FLAGS.replays_path)
| pl.process.flat_map(load_replay_meta, workers=FLAGS.num_workers, maxsize=0)
| pl.process.filter(filter_replay, workers=1, maxsize=0)
| pl.process.map(process_replay, workers=FLAGS.num_workers, maxsize=FLAGS.num_workers)
| pl.process.filter(is_not_none, workers=1, maxsize=0)
| pl.process.map(store_replay, workers=FLAGS.num_workers, maxsize=0)
):
dataset_files.append(dataset_file)
if __name__ == '__main__':
app.run(main)
|
py | b4145bca441d1488321ccbc81e87527ec4f8b2c8 | from django.db import IntegrityError
from rest_framework import exceptions
from rest_framework import serializers as ser
from osf.models import AbstractNode, Node, Collection, Guid, Registration, CollectionProvider
from osf.exceptions import ValidationError
from api.base.serializers import LinksField, RelationshipField, LinkedNodesRelationshipSerializer, LinkedRegistrationsRelationshipSerializer
from api.base.serializers import JSONAPISerializer, IDField, TypeField, VersionedDateTimeField
from api.base.exceptions import InvalidModelValueError, RelationshipPostMakesNoChanges
from api.base.utils import absolute_reverse, get_user_auth
from api.nodes.serializers import NodeLinksSerializer
from api.taxonomies.serializers import TaxonomizableSerializerMixin
from framework.exceptions import PermissionsError
from osf.utils.permissions import WRITE
from website.exceptions import NodeStateError
class CollectionProviderRelationshipField(RelationshipField):
def get_object(self, provider_id):
return CollectionProvider.load(provider_id)
def to_internal_value(self, data):
provider = self.get_object(data)
return {'provider': provider}
class GuidRelationshipField(RelationshipField):
def get_object(self, _id):
return Guid.load(_id)
def to_internal_value(self, data):
guid = self.get_object(data)
return {'guid': guid}
class CollectionSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'title',
'date_created',
'date_modified',
])
id = IDField(source='_id', read_only=True)
type = TypeField()
title = ser.CharField(required=True)
date_created = VersionedDateTimeField(source='created', read_only=True)
date_modified = VersionedDateTimeField(source='modified', read_only=True)
bookmarks = ser.BooleanField(read_only=False, default=False, source='is_bookmark_collection')
is_promoted = ser.BooleanField(read_only=True, default=False)
is_public = ser.BooleanField(read_only=False, default=False)
status_choices = ser.ListField(
child=ser.CharField(max_length=31),
default=list()
)
collected_type_choices = ser.ListField(
child=ser.CharField(max_length=31),
default=list()
)
links = LinksField({})
provider = CollectionProviderRelationshipField(
related_view='providers:collection-providers:collection-provider-detail',
related_view_kwargs={'provider_id': '<provider._id>'},
read_only=True
)
node_links = RelationshipField(
related_view='collections:node-pointers',
related_view_kwargs={'collection_id': '<_id>'},
related_meta={'count': 'get_node_links_count'}
)
# TODO: Add a self link to this when it's available
linked_nodes = RelationshipField(
related_view='collections:linked-nodes',
related_view_kwargs={'collection_id': '<_id>'},
related_meta={'count': 'get_node_links_count'},
self_view='collections:collection-node-pointer-relationship',
self_view_kwargs={'collection_id': '<_id>'}
)
linked_registrations = RelationshipField(
related_view='collections:linked-registrations',
related_view_kwargs={'collection_id': '<_id>'},
related_meta={'count': 'get_registration_links_count'},
self_view='collections:collection-registration-pointer-relationship',
self_view_kwargs={'collection_id': '<_id>'}
)
class Meta:
type_ = 'collections'
def get_absolute_url(self, obj):
return absolute_reverse('collections:collection-detail', kwargs={
'collection_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']
})
def get_node_links_count(self, obj):
auth = get_user_auth(self.context['request'])
return Node.objects.filter(guids__in=obj.guid_links.all(), is_deleted=False).can_view(user=auth.user, private_link=auth.private_link).count()
def get_registration_links_count(self, obj):
auth = get_user_auth(self.context['request'])
return Registration.objects.filter(guids__in=obj.guid_links.all(), is_deleted=False).can_view(user=auth.user, private_link=auth.private_link).count()
def create(self, validated_data):
node = Collection(**validated_data)
node.category = ''
try:
node.save()
except ValidationError as e:
raise InvalidModelValueError(detail=e.messages[0])
except IntegrityError:
raise ser.ValidationError('Each user cannot have more than one Bookmark collection.')
return node
def update(self, collection, validated_data):
"""Update instance with the validated data.
"""
assert isinstance(collection, Collection), 'collection must be a Collection'
if validated_data:
for key, value in validated_data.iteritems():
if key == 'title' and collection.is_bookmark_collection:
raise InvalidModelValueError('Bookmark collections cannot be renamed.')
setattr(collection, key, value)
try:
collection.save()
except ValidationError as e:
raise InvalidModelValueError(detail=e.messages[0])
return collection
class CollectionDetailSerializer(CollectionSerializer):
"""
Overrides CollectionSerializer to make id required.
"""
id = IDField(source='_id', required=True)
class CollectedMetaSerializer(TaxonomizableSerializerMixin, JSONAPISerializer):
class Meta:
type_ = 'collected-metadata'
filterable_fields = frozenset([
'id',
'collected_type',
'date_created',
'date_modified',
'subjects',
'status',
])
id = IDField(source='guid._id', read_only=True)
type = TypeField()
creator = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': '<creator._id>'},
)
collection = RelationshipField(
related_view='collections:collection-detail',
related_view_kwargs={'collection_id': '<collection._id>'},
)
guid = RelationshipField(
related_view='guids:guid-detail',
related_view_kwargs={'guids': '<guid._id>'},
always_embed=True,
)
collected_type = ser.CharField(required=False)
status = ser.CharField(required=False)
def get_absolute_url(self, obj):
return absolute_reverse(
'collected-metadata:collected-metadata-detail',
kwargs={
'collection_id': obj.collection._id,
'cgm_id': obj.guid._id,
'version': self.context['request'].parser_context['kwargs']['version']
}
)
def update(self, obj, validated_data):
if validated_data and 'subjects' in validated_data:
auth = get_user_auth(self.context['request'])
subjects = validated_data.pop('subjects', None)
try:
obj.set_subjects(subjects, auth)
except PermissionsError as e:
raise exceptions.PermissionDenied(detail=e.message)
except (ValueError, NodeStateError) as e:
raise exceptions.ValidationError(detail=e.message)
if 'status' in validated_data:
obj.status = validated_data.pop('status')
if 'collected_type' in validated_data:
obj.collected_type = validated_data.pop('collected_type')
obj.save()
return obj
class CollectedMetaCreateSerializer(CollectedMetaSerializer):
# Makes guid writeable only on create
guid = GuidRelationshipField(
related_view='guids:guid-detail',
related_view_kwargs={'guids': '<guid._id>'},
always_embed=True,
read_only=False,
required=True,
)
def create(self, validated_data):
subjects = validated_data.pop('subjects', None)
collection = validated_data.pop('collection', None)
creator = validated_data.pop('creator', None)
guid = validated_data.pop('guid')
if not collection:
raise exceptions.ValidationError('"collection" must be specified.')
if not creator:
raise exceptions.ValidationError('"creator" must be specified.')
if not (creator.has_perm('write_collection', collection) or (hasattr(guid.referent, 'has_permission') and guid.referent.has_permission(creator, WRITE))):
raise exceptions.PermissionDenied('Must have write permission on either collection or collected object to collect.')
try:
obj = collection.collect_object(guid.referent, creator, **validated_data)
except ValidationError as e:
raise InvalidModelValueError(e.message)
if subjects:
auth = get_user_auth(self.context['request'])
try:
obj.set_subjects(subjects, auth)
except PermissionsError as e:
raise exceptions.PermissionDenied(detail=e.message)
except (ValueError, NodeStateError) as e:
raise exceptions.ValidationError(detail=e.message)
return obj
class CollectionNodeLinkSerializer(NodeLinksSerializer):
target_node = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<guid.referent._id>'},
always_embed=True
)
def get_absolute_url(self, obj):
return absolute_reverse(
'collections:node-pointer-detail',
kwargs={
'collection_id': self.context['request'].parser_context['kwargs']['collection_id'],
'node_link_id': obj.guid._id,
'version': self.context['request'].parser_context['kwargs']['version']
}
)
# Override NodeLinksSerializer
def create(self, validated_data):
request = self.context['request']
user = request.user
collection = self.context['view'].get_collection()
target_node_id = validated_data['_id']
pointer_node = AbstractNode.load(target_node_id)
if not pointer_node:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node \'{}\' not found.'.format(target_node_id)
)
try:
pointer = collection.collect_object(pointer_node, user)
except ValidationError:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node \'{}\' already pointed to by \'{}\'.'.format(target_node_id, collection._id)
)
return pointer
class CollectedAbstractNodeRelationshipSerializer(object):
_abstract_node_subclass = None
def make_instance_obj(self, obj):
# Convenience method to format instance based on view's get_object
return {'data':
list(self._abstract_node_subclass.objects.filter(
guids__in=obj.guid_links.all(), is_deleted=False
)),
'self': obj}
def update(self, instance, validated_data):
collection = instance['self']
auth = get_user_auth(self.context['request'])
add, remove = self.get_pointers_to_add_remove(pointers=instance['data'], new_pointers=validated_data['data'])
for pointer in remove:
collection.remove_object(pointer)
for node in add:
collection.collect_object(node, auth.user)
return self.make_instance_obj(collection)
def create(self, validated_data):
instance = self.context['view'].get_object()
auth = get_user_auth(self.context['request'])
collection = instance['self']
add, remove = self.get_pointers_to_add_remove(pointers=instance['data'], new_pointers=validated_data['data'])
if not len(add):
raise RelationshipPostMakesNoChanges
for node in add:
try:
collection.collect_object(node, auth.user)
except ValidationError as e:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node {} generated error: {}.'.format(node._id, e.message)
)
return self.make_instance_obj(collection)
class CollectedNodeRelationshipSerializer(CollectedAbstractNodeRelationshipSerializer, LinkedNodesRelationshipSerializer):
_abstract_node_subclass = Node
class CollectedRegistrationsRelationshipSerializer(CollectedAbstractNodeRelationshipSerializer, LinkedRegistrationsRelationshipSerializer):
_abstract_node_subclass = Registration
|
py | b4145c7c2a6681c89306910274f783120328cfbf | import paddle
import paddle.nn as nn
import paddle.vision.transforms as T
from ppim.models.common import load_model
transforms = T.Compose(
[
T.Resize(256),
T.CenterCrop(224),
T.ToTensor(),
T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
]
)
urls = {
"hardnet_39_ds": r"https://bj.bcebos.com/v1/ai-studio-online/f0b243912f6045bebfe89c65500c4a16534276e45f3544c592713e6e5524ebd2?responseContentDisposition=attachment%3B%20filename%3Dhardnet_39_ds.pdparams",
"hardnet_68_ds": r"https://bj.bcebos.com/v1/ai-studio-online/a8939896a12243db942263747687cabcad4aae89890345199f1ecfa4fadd6b27?responseContentDisposition=attachment%3B%20filename%3Dhardnet_68_ds.pdparams",
"hardnet_68": r"https://bj.bcebos.com/v1/ai-studio-online/c82332d24182481db918a848e2ec6d3a6167bd0a96cb4dc1876ce00e224bcb24?responseContentDisposition=attachment%3B%20filename%3Dhardnet_68.pdparams",
"hardnet_85": r"https://bj.bcebos.com/v1/ai-studio-online/e6f9e798149343968bf80a7ca5e8a7b2e447339202fe451c80878da91895f794?responseContentDisposition=attachment%3B%20filename%3Dhardnet_85.pdparams",
}
def ConvLayer(in_channels, out_channels, kernel_size=3, stride=1, bias_attr=False):
layer = nn.Sequential(
(
"conv",
nn.Conv2D(
in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
padding=kernel_size // 2,
groups=1,
bias_attr=bias_attr,
),
),
("norm", nn.BatchNorm2D(out_channels)),
("relu", nn.ReLU6()),
)
return layer
def DWConvLayer(in_channels, out_channels, kernel_size=3, stride=1, bias_attr=False):
layer = nn.Sequential(
(
"dwconv",
nn.Conv2D(
in_channels,
out_channels,
kernel_size=kernel_size,
stride=stride,
padding=1,
groups=out_channels,
bias_attr=bias_attr,
),
),
("norm", nn.BatchNorm2D(out_channels)),
)
return layer
def CombConvLayer(in_channels, out_channels, kernel_size=1, stride=1):
layer = nn.Sequential(
("layer1", ConvLayer(in_channels, out_channels, kernel_size=kernel_size)),
("layer2", DWConvLayer(out_channels, out_channels, stride=stride)),
)
return layer
class HarDBlock(nn.Layer):
def __init__(
self,
in_channels,
growth_rate,
grmul,
n_layers,
keepBase=False,
residual_out=False,
dwconv=False,
):
super().__init__()
self.keepBase = keepBase
self.links = []
layers_ = []
self.out_channels = 0 # if upsample else in_channels
for i in range(n_layers):
outch, inch, link = self.get_link(i + 1, in_channels, growth_rate, grmul)
self.links.append(link)
use_relu = residual_out
if dwconv:
layers_.append(CombConvLayer(inch, outch))
else:
layers_.append(ConvLayer(inch, outch))
if (i % 2 == 0) or (i == n_layers - 1):
self.out_channels += outch
# print("Blk out =",self.out_channels)
self.layers = nn.LayerList(layers_)
def get_link(self, layer, base_ch, growth_rate, grmul):
if layer == 0:
return base_ch, 0, []
out_channels = growth_rate
link = []
for i in range(10):
dv = 2 ** i
if layer % dv == 0:
k = layer - dv
link.append(k)
if i > 0:
out_channels *= grmul
out_channels = int(int(out_channels + 1) / 2) * 2
in_channels = 0
for i in link:
ch, _, _ = self.get_link(i, base_ch, growth_rate, grmul)
in_channels += ch
return out_channels, in_channels, link
def forward(self, x):
layers_ = [x]
for layer in range(len(self.layers)):
link = self.links[layer]
tin = []
for i in link:
tin.append(layers_[i])
if len(tin) > 1:
x = paddle.concat(tin, 1)
else:
x = tin[0]
out = self.layers[layer](x)
layers_.append(out)
t = len(layers_)
out_ = []
for i in range(t):
if (i == 0 and self.keepBase) or (i == t - 1) or (i % 2 == 1):
out_.append(layers_[i])
out = paddle.concat(out_, 1)
return out
class HarDNet(nn.Layer):
def __init__(
self,
depth_wise=False,
arch=85,
pretrained=True,
weight_path="",
class_dim=1000,
with_pool=True,
):
super().__init__()
first_ch = [32, 64]
second_kernel = 3
max_pool = True
grmul = 1.7
drop_rate = 0.1
# HarDNet68
ch_list = [128, 256, 320, 640, 1024]
gr = [14, 16, 20, 40, 160]
n_layers = [8, 16, 16, 16, 4]
downSamp = [1, 0, 1, 1, 0]
if arch == 85:
# HarDNet85
first_ch = [48, 96]
ch_list = [192, 256, 320, 480, 720, 1280]
gr = [24, 24, 28, 36, 48, 256]
n_layers = [8, 16, 16, 16, 16, 4]
downSamp = [1, 0, 1, 0, 1, 0]
drop_rate = 0.2
elif arch == 39:
# HarDNet39
first_ch = [24, 48]
ch_list = [96, 320, 640, 1024]
grmul = 1.6
gr = [16, 20, 64, 160]
n_layers = [4, 16, 8, 4]
downSamp = [1, 1, 1, 0]
if depth_wise:
second_kernel = 1
max_pool = False
drop_rate = 0.05
blks = len(n_layers)
self.base = nn.LayerList([])
# First Layer: Standard Conv3x3, Stride=2
self.base.append(
ConvLayer(
in_channels=3,
out_channels=first_ch[0],
kernel_size=3,
stride=2,
bias_attr=False,
)
)
# Second Layer
self.base.append(ConvLayer(first_ch[0], first_ch[1], kernel_size=second_kernel))
# Maxpooling or DWConv3x3 downsampling
if max_pool:
self.base.append(nn.MaxPool2D(kernel_size=3, stride=2, padding=1))
else:
self.base.append(DWConvLayer(first_ch[1], first_ch[1], stride=2))
# Build all HarDNet blocks
ch = first_ch[1]
for i in range(blks):
blk = HarDBlock(ch, gr[i], grmul, n_layers[i], dwconv=depth_wise)
ch = blk.out_channels
self.base.append(blk)
if i == blks - 1 and arch == 85:
self.base.append(nn.Dropout(0.1))
self.base.append(ConvLayer(ch, ch_list[i], kernel_size=1))
ch = ch_list[i]
if downSamp[i] == 1:
if max_pool:
self.base.append(nn.MaxPool2D(kernel_size=2, stride=2))
else:
self.base.append(DWConvLayer(ch, ch, stride=2))
ch = ch_list[blks - 1]
layers = []
if with_pool:
layers.append(nn.AdaptiveAvgPool2D((1, 1)))
if class_dim > 0:
layers.append(nn.Flatten())
layers.append(nn.Dropout(drop_rate))
layers.append(nn.Linear(ch, class_dim))
self.base.append(nn.Sequential(*layers))
def forward(self, x):
for layer in self.base:
x = layer(x)
return x
def hardnet_39_ds(pretrained=False, return_transforms=False, **kwargs):
model = HarDNet(arch=39, depth_wise=True, **kwargs)
if pretrained:
model = load_model(model, urls["hardnet_39_ds"])
if return_transforms:
return model, transforms
else:
return model
def hardnet_68(pretrained=False, return_transforms=False, **kwargs):
model = HarDNet(arch=68, **kwargs)
if pretrained:
model = load_model(model, urls["hardnet_68"])
if return_transforms:
return model, transforms
else:
return model
def hardnet_68_ds(pretrained=False, return_transforms=False, **kwargs):
model = HarDNet(arch=68, depth_wise=True, **kwargs)
if pretrained:
model = load_model(model, urls["hardnet_68_ds"])
if return_transforms:
return model, transforms
else:
return model
def hardnet_85(pretrained=False, return_transforms=False, **kwargs):
model = HarDNet(arch=85, **kwargs)
if pretrained:
model = load_model(model, urls["hardnet_85"])
if return_transforms:
return model, transforms
else:
return model
|
py | b4145d037fdeddffb3700ae5d6ded38126e5326d | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
#
# !/usr/bin/env python
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
from torch.nn import CrossEntropyLoss
import warnings
from transformers.modeling_gpt2 import GPT2PreTrainedModel, Block
from transformers.configuration_utils import PretrainedConfig
from transformers.utils import logging
from transformers.modeling_outputs import (
BaseModelOutputWithPastAndCrossAttentions,
CausalLMOutputWithPastAndCrossAttentions,
)
logger = logging.get_logger(__name__)
GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"gpt2": "https://huggingface.co/gpt2/resolve/main/config.json",
"gpt2-medium": "https://huggingface.co/gpt2-medium/resolve/main/config.json",
"gpt2-large": "https://huggingface.co/gpt2-large/resolve/main/config.json",
"gpt2-xl": "https://huggingface.co/gpt2-xl/resolve/main/config.json",
"distilgpt2": "https://huggingface.co/distilgpt2/resolve/main/config.json",
}
class GPT2Config(PretrainedConfig):
"""
This is the configuration class to store the configuration of a :class:`~transformers.GPT2Model` or a
:class:`~transformers.TFGPT2Model`. It is used to instantiate a GPT-2 model according to the specified arguments,
defining the model architecture. Instantiating a configuration with the defaults will yield a similar configuration
to that of the GPT-2 `small <https://huggingface.co/gpt2>`__ architecture.
Configuration objects inherit from :class:`~transformers.PretrainedConfig` and can be used to control the model
outputs. Read the documentation from :class:`~transformers.PretrainedConfig` for more information.
Args:
vocab_size (:obj:`int`, `optional`, defaults to 50257):
Vocabulary size of the GPT-2 model. Defines the number of different tokens that can be represented by the
:obj:`inputs_ids` passed when calling :class:`~transformers.GPT2Model` or
:class:`~transformers.TFGPT2Model`.
n_positions (:obj:`int`, `optional`, defaults to 1024):
The maximum sequence length that this model might ever be used with. Typically set this to something large
just in case (e.g., 512 or 1024 or 2048).
n_ctx (:obj:`int`, `optional`, defaults to 1024):
Dimensionality of the causal mask (usually same as n_positions).
n_embd (:obj:`int`, `optional`, defaults to 768):
Dimensionality of the embeddings and hidden states.
n_layer (:obj:`int`, `optional`, defaults to 12):
Number of hidden layers in the Transformer encoder.
n_head (:obj:`int`, `optional`, defaults to 12):
Number of attention heads for each attention layer in the Transformer encoder.
n_inner (:obj:`int`, `optional`, defaults to None):
Dimensionality of the inner feed-forward layers. :obj:`None` will set it to 4 times n_embd
activation_function (:obj:`str`, `optional`, defaults to :obj:`"gelu"`):
Activation function, to be selected in the list :obj:`["relu", "silu", "gelu", "tanh", "gelu_new"]`.
resid_pdrop (:obj:`float`, `optional`, defaults to 0.1):
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
embd_pdrop (:obj:`int`, `optional`, defaults to 0.1):
The dropout ratio for the embeddings.
attn_pdrop (:obj:`float`, `optional`, defaults to 0.1):
The dropout ratio for the attention.
layer_norm_epsilon (:obj:`float`, `optional`, defaults to 1e-5):
The epsilon to use in the layer normalization layers
initializer_range (:obj:`float`, `optional`, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
summary_type (:obj:`string`, `optional`, defaults to :obj:`"cls_index"`):
Argument used when doing sequence summary, used in the models :class:`~transformers.GPT2DoubleHeadsModel`
and :class:`~transformers.TFGPT2DoubleHeadsModel`.
Has to be one of the following options:
- :obj:`"last"`: Take the last token hidden state (like XLNet).
- :obj:`"first"`: Take the first token hidden state (like BERT).
- :obj:`"mean"`: Take the mean of all tokens hidden states.
- :obj:`"cls_index"`: Supply a Tensor of classification token position (like GPT/GPT-2).
- :obj:`"attn"`: Not implemented now, use multi-head attention.
summary_use_proj (:obj:`bool`, `optional`, defaults to :obj:`True`):
Argument used when doing sequence summary, used in the models :class:`~transformers.GPT2DoubleHeadsModel`
and :class:`~transformers.TFGPT2DoubleHeadsModel`.
Whether or not to add a projection after the vector extraction.
summary_activation (:obj:`str`, `optional`):
Argument used when doing sequence summary. Used in for the multiple choice head in
:class:`~transformers.GPT2DoubleHeadsModel`.
Pass :obj:`"tanh"` for a tanh activation to the output, any other value will result in no activation.
summary_proj_to_labels (:obj:`bool`, `optional`, defaults to :obj:`True`):
Argument used when doing sequence summary, used in the models :class:`~transformers.GPT2DoubleHeadsModel`
and :class:`~transformers.TFGPT2DoubleHeadsModel`.
Whether the projection outputs should have :obj:`config.num_labels` or :obj:`config.hidden_size` classes.
summary_first_dropout (:obj:`float`, `optional`, defaults to 0.1):
Argument used when doing sequence summary, used in the models :class:`~transformers.GPT2DoubleHeadsModel`
and :class:`~transformers.TFGPT2DoubleHeadsModel`.
The dropout ratio to be used after the projection and activation.
gradient_checkpointing (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to use gradient checkpointing to save memory at the expense of slower backward pass.
Example::
>>> from transformers import GPT2Model, GPT2Config
>>> # Initializing a GPT2 configuration
>>> configuration = GPT2Config()
>>> # Initializing a model from the configuration
>>> model = GPT2Model(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
"""
model_type = "gpt2"
def __init__(
self,
vocab_size=50257,
n_positions=1024,
n_sentences=128,
n_finals=32,
n_pos=1024,
n_beats=1024,
n_ctx=1024,
n_embd=768,
n_layer=12,
n_head=12,
n_inner=None,
activation_function="gelu_new",
resid_pdrop=0.1,
embd_pdrop=0.1,
attn_pdrop=0.1,
layer_norm_epsilon=1e-5,
initializer_range=0.02,
summary_type="cls_index",
summary_use_proj=True,
summary_activation=None,
summary_proj_to_labels=True,
summary_first_dropout=0.1,
bos_token_id=50256,
eos_token_id=50256,
gradient_checkpointing=False,
**kwargs
):
super().__init__(bos_token_id=bos_token_id, eos_token_id=eos_token_id, **kwargs)
self.vocab_size = vocab_size
self.n_ctx = n_ctx
self.n_positions = n_positions
self.n_sentences = n_sentences
self.n_finals = n_finals
self.n_pos = n_pos
self.n_beats = n_beats
self.n_embd = n_embd
self.n_layer = n_layer
self.n_head = n_head
self.n_inner = n_inner
self.activation_function = activation_function
self.resid_pdrop = resid_pdrop
self.embd_pdrop = embd_pdrop
self.attn_pdrop = attn_pdrop
self.layer_norm_epsilon = layer_norm_epsilon
self.initializer_range = initializer_range
self.summary_type = summary_type
self.summary_use_proj = summary_use_proj
self.summary_activation = summary_activation
self.summary_first_dropout = summary_first_dropout
self.summary_proj_to_labels = summary_proj_to_labels
self.gradient_checkpointing = gradient_checkpointing
self.bos_token_id = bos_token_id
self.eos_token_id = eos_token_id
@property
def max_position_embeddings(self):
return self.n_positions
@property
def hidden_size(self):
return self.n_embd
@property
def num_attention_heads(self):
return self.n_head
@property
def num_hidden_layers(self):
return self.n_layer
class GPT2Model(GPT2PreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.wte = nn.Embedding(config.vocab_size, config.n_embd)
self.wpe = nn.Embedding(config.n_positions, config.n_embd)
self.wse = nn.Embedding(config.n_sentences, config.n_embd)
self.wfe = nn.Embedding(config.n_finals, config.n_embd)
self.wre = nn.Embedding(config.n_pos, config.n_embd)
self.wbe = nn.Embedding(config.n_beats, config.n_embd)
self.drop = nn.Dropout(config.embd_pdrop)
self.h = nn.ModuleList([Block(config.n_ctx, config, scale=True) for _ in range(config.n_layer)])
self.ln_f = nn.LayerNorm(config.n_embd, eps=config.layer_norm_epsilon)
self.init_weights()
def get_input_embeddings(self):
return self.wte
def set_input_embeddings(self, new_embeddings):
self.wte = new_embeddings
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer}
"""
for layer, heads in heads_to_prune.items():
self.h[layer].attn.prune_heads(heads)
def forward(
self,
input_ids=None,
past_key_values=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
sentence_ids=None, # added
final_ids=None, # added
pos_ids=None, # added inner sentence relative position
beat_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
**kwargs,
):
if "past" in kwargs:
warnings.warn(
"The `past` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("past")
assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
batch_size = input_ids.shape[0]
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size = inputs_embeds.shape[0]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, input_shape[-1])
if position_ids is not None:
position_ids = position_ids.view(-1, input_shape[-1])
if sentence_ids is not None:
sentence_ids = sentence_ids.view(-1, input_shape[-1])
if final_ids is not None:
final_ids = final_ids.view(-1, input_shape[-1])
if pos_ids is not None:
pos_ids = pos_ids.view(-1, input_shape[-1])
if beat_ids is not None:
beat_ids = beat_ids.view(-1, input_shape[-1])
if past_key_values is None:
past_length = 0
past_key_values = [None] * len(self.h)
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
device = input_ids.device if input_ids is not None else inputs_embeds.device
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])
# Attention mask.
if attention_mask is not None:
assert batch_size > 0, "batch_size has to be defined and > 0"
attention_mask = attention_mask.view(batch_size, -1)
# We create a 3D attention mask from a 2D tensor mask.
# Sizes are [batch_size, 1, 1, to_seq_length]
# So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length]
# this attention mask is more simple than the triangular masking of causal attention
# used in OpenAI GPT, we just need to prepare the broadcast dimension here.
attention_mask = attention_mask[:, None, None, :]
# Since attention_mask is 1.0 for positions we want to attend and 0.0 for
# masked positions, this operation will create a tensor which is 0.0 for
# positions we want to attend and -10000.0 for masked positions.
# Since we are adding it to the raw scores before the softmax, this is
# effectively the same as removing these entirely.
attention_mask = attention_mask.to(dtype=self.dtype) # fp16 compatibility
attention_mask = (1.0 - attention_mask) * -10000.0
# If a 2D ou 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.add_cross_attention and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# head_mask has shape n_layer x batch x n_heads x N x N
head_mask = self.get_head_mask(head_mask, self.config.n_layer)
if inputs_embeds is None:
inputs_embeds = self.wte(input_ids)
hidden_states = inputs_embeds
if position_ids is not None:
position_embeds = self.wpe(position_ids)
hidden_states += position_embeds
if sentence_ids is not None:
sentence_embeds = self.wse(sentence_ids)
hidden_states += sentence_embeds
if final_ids is not None:
final_embeds = self.wfe(final_ids)
hidden_states += final_embeds
if pos_ids is not None:
pos_embeds = self.wre(pos_ids)
hidden_states += pos_embeds
if beat_ids is not None:
beat_embeds = self.wbe(beat_ids)
hidden_states += beat_embeds
if token_type_ids is not None:
token_type_embeds = self.wte(token_type_ids)
hidden_states = hidden_states + token_type_embeds
hidden_states = self.drop(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
presents = () if use_cache else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
all_hidden_states = () if output_hidden_states else None
for i, (block, layer_past) in enumerate(zip(self.h, past_key_values)):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states.view(*output_shape),)
if getattr(self.config, "gradient_checkpointing", False):
def create_custom_forward(module):
def custom_forward(*inputs):
# checkpointing only works with tuple returns, not with lists
return tuple(output for output in module(*inputs, use_cache, output_attentions))
return custom_forward
outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(block),
hidden_states,
layer_past,
attention_mask,
head_mask[i],
encoder_hidden_states,
encoder_attention_mask,
)
else:
outputs = block(
hidden_states,
layer_past=layer_past,
attention_mask=attention_mask,
head_mask=head_mask[i],
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states, present = outputs[:2]
if use_cache is True:
presents = presents + (present,)
if output_attentions:
all_self_attentions = all_self_attentions + (outputs[2],)
if self.config.add_cross_attention:
all_cross_attentions = all_cross_attentions + (outputs[3],)
hidden_states = self.ln_f(hidden_states)
hidden_states = hidden_states.view(*output_shape)
# Add last hidden state
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(v for v in [hidden_states, presents, all_hidden_states, all_self_attentions] if v is not None)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=presents,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
cross_attentions=all_cross_attentions,
)
class GPT2LMHeadModel(GPT2PreTrainedModel):
authorized_missing_keys = [r"h\.\d+\.attn\.masked_bias", r"lm_head\.weight"]
def __init__(self, config):
super().__init__(config)
self.transformer = GPT2Model(config)
self.lm_head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
self.init_weights()
def get_output_embeddings(self):
return self.lm_head
def prepare_inputs_for_generation(self, input_ids, past=None, **kwargs):
# only last token for inputs_ids if past is defined in kwargs
if past:
input_ids = input_ids[:, -1].unsqueeze(-1)
attention_mask = kwargs.get("attention_mask", None)
position_ids = kwargs.get("position_ids", None)
if attention_mask is not None and position_ids is None:
# create position_ids on the fly for batch generation
position_ids = attention_mask.long().cumsum(-1) - 1
position_ids.masked_fill_(attention_mask == 0, 1)
if past:
position_ids = position_ids[:, -1].unsqueeze(-1)
else:
position_ids = None
return {
"input_ids": input_ids,
"past_key_values": past,
"use_cache": kwargs.get("use_cache"),
"position_ids": position_ids,
"attention_mask": attention_mask,
}
def forward(
self,
input_ids=None,
past_key_values=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
sentence_ids=None,
final_ids=None,
pos_ids=None,
beat_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
**kwargs,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
``labels = input_ids`` Indices are selected in ``[-100, 0, ..., config.vocab_size]`` All labels set to
``-100`` are ignored (masked), the loss is only computed for labels in ``[0, ..., config.vocab_size]``
"""
if "past" in kwargs:
warnings.warn(
"The `past` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("past")
assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
transformer_outputs = self.transformer(
input_ids,
past_key_values=past_key_values,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
sentence_ids=sentence_ids,
final_ids=final_ids,
pos_ids=pos_ids,
beat_ids=beat_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = transformer_outputs[0]
lm_logits = self.lm_head(hidden_states)
loss = None
if labels is not None:
# Shift so that tokens < n predict n
shift_logits = lm_logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
# Flatten the tokens
loss_fct = CrossEntropyLoss()
loss = loss_fct(shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
if not return_dict:
output = (lm_logits,) + transformer_outputs[1:]
return ((loss,) + output) if loss is not None else output
return CausalLMOutputWithPastAndCrossAttentions(
loss=loss,
logits=lm_logits,
past_key_values=transformer_outputs.past_key_values,
hidden_states=transformer_outputs.hidden_states,
attentions=transformer_outputs.attentions,
cross_attentions=transformer_outputs.cross_attentions,
)
|
py | b4145de58199a5b16a65ab4928dcb42059a40e1d | ''' A simple FuncDesigner stochastic optimization example '''
from FuncDesigner import *
from openopt import GLP
A = distribution.gauss(4, 0.5) # gauss distribution with mean = 4, std = 0.5
# this is same to
#from scipy import stats
#_a = distribution.continuous(ppf=stats.norm(4, 5).ppf)
# along with "gauss" you can use "norm" (for scipy.stats compatibility, yet I dislike it due to ambiguity with linalg.norm)
# or "normal"
B = distribution.exponential(3, 0.7) # location = 3, scale = 0.7
# for compatibility with scipy.stats you can use "expon" as well
C = distribution.uniform(-1.5, 1.5) # uniform distribution from -1.5 to 1.5
a, b, c = oovars('a b c')
x, y, z = oovars('x y z', lb=-1, ub=1)
f = sin(b) + cos(b) + arcsin(b/100) + arccos(b/100) + arctan(b) +\
(1.5+x + 3*y*z)*cosh(b/ (20+x+y)) + sinh(b/30) + tanh(b) + arctanh(b/100) + arccosh(200+b) +\
arcsinh(3+b) + (x+y+0.4)*exp(b/ (15+x+z)) + sqrt(b+100) + abs(b-2) + log(b+50) + log10(b+100) +\
log2(b+100) + tan(c/50) + x + 2**(a/4 + x + y + z)
objective = 0.15 * mean(f+2*x) + x*cos(y+2*z) + z * var(b) * std(c) + y * P(a - z + b*sin(c) > 5)
constraints = [
P(a**2 - z + b*c < 4.7) < 0.03, # by default constraint tolerance is 10^-6
(P(c/b + z > sin(x)) > 0.02)(tol = 1e-10), # use tol 10^-10 instead; especially useful for equality constraints
mean(b+y) <= 3.5
]
startPoint = {x: 0, y: 0, z: 0, a: A, b: B, c: C}
''' This is multiextremum problem (due to sin, cos etc),
thus we have to use global nonlinear solver capable of handling nonlinear constraints
(BTW having probability functions P() make it even discontinuous for discrete distribution(s) involved)
'''
p = GLP(objective, startPoint, constraints = constraints)
solver = 'de' # named after "differential evolution", check http://openopt.org/GLP for other available global solvers
r = p.maximize(solver, maxTime = 150, maxDistributionSize=100, iprint = 50)
'''
------------------------- OpenOpt 0.45 -------------------------
solver: de problem: unnamed type: GLP
iter objFunVal log10(MaxResidual/ConTol)
0 6.008e+00 8.40
50 7.436e+00 -100.00
93 7.517e+00 -100.00
istop: 11 (Non-Success Number > maxNonSuccess = 15)
Solver: Time Elapsed = 31.58 CPU Time Elapsed = 30.07
objFunValue: 7.516546 (feasible, max(residuals/requiredTolerances) = 0)
'''
print(r(x, y, z)) # [0.99771171590186, -0.15952854483416395, 0.8584877921129496]
# let's check constraint values
# below we could use c(r.xf) but c(r) is less-to-type and looks better
print(P(a**2 - z + b*c < 4.7)(r)) # should be less than 0.03
print(P(c/b + z > sin(x))(r)) # should be greater than 0.02
print(mean(b+y)(r)) # should be less than 3.5
#0.0200595929361
#0.029969536767
#[ 3.49947095]
# we could plot cdf (and pdf for continuous) for any stochastic function wrt the optimal parameters point, e.g.
f(r).cdf.plot()
# or, for example, (f + sin(x) + 2*f*cos(y+f) + z * P(f<x))(r).cdf.plot()
|
py | b4145df246308b4ba5532cacdfe9435472b53dcc | # Copyright 2021 Katteli Inc.
# TestFlows.com Open-Source Software Testing Framework (http://testflows.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testflows.settings as settings
from testflows._core.flags import Flags, SKIP, REPEATED
from testflows._core.testtype import TestType
from testflows._core.message import Message
from testflows._core.name import split, parentname
from testflows._core.cli.colors import color
from testflows._core.transform.log.report.totals import Counts, color_result
indent = " " * 2
class UnstableCounts(Counts):
def __str__(self):
icon = "\u25D4"
fail_rate = (self.fail + self.error + self.null)/self.units * 100
if fail_rate in (0, 100):
return ""
fail_rate = color(f"{fail_rate:.2f}%", "cyan", attrs=["bold"])
s = f"{color(icon, 'cyan', attrs=['bold'])} [ {fail_rate} ] {self.name} ("
r = []
if self.ok > 0:
r.append(color_result("OK", f"{self.ok} ok"))
if self.fail > 0:
r.append(color_result("Fail", f"{self.fail} failed"))
if self.skip > 0:
r.append(color_result("Skip", f"{self.skip} skipped"))
if self.error > 0:
r.append(color_result("Error", f"{self.error} errored"))
if self.null > 0:
r.append(color_result("Null", f"{self.null} null"))
if self.xok > 0:
r.append(color_result("XOK", f"{self.xok} xok"))
if self.xfail > 0:
r.append(color_result("XFail", f"{self.xfail} xfail"))
if self.xerror > 0:
r.append(color_result("XError", f"{self.xerror} xerror"))
if self.xnull > 0:
r.append(color_result("XNull", f"{self.xnull} xnull"))
s += color(", ", "white", attrs=["bold"]).join(r)
s += color(")\n", "white", attrs=["bold"])
return s
def add_result(msg, results):
flags = Flags(msg["test_flags"])
if flags & SKIP and settings.show_skipped is False:
return
if getattr(TestType, msg["test_type"]) == TestType.Iteration:
result = msg["result_type"]
parent_id, test_id = split(msg["test_id"])
if results.get(parent_id) is None:
results[parent_id] = []
results[parent_id].append((msg, result))
processors = {
Message.RESULT.name: (add_result,),
}
def generate(results, divider):
"""Generate report.
"""
if not results:
return
unstable = ""
for entry in results.values():
name = parentname(entry[0][0]["test_name"])
counts = UnstableCounts(name, *([0] * 10))
for iteration in entry:
msg, result = iteration
counts.units += 1
result_name = result.lower()
setattr(counts, result_name, getattr(counts, result_name) + 1)
_counts = str(counts)
if _counts:
_counts += "\n"
unstable += _counts
if unstable:
unstable = color(f"{divider}Unstable\n\n", "white", attrs=["bold"]) + unstable.rstrip()
report = f"{unstable}"
return report or None
def transform(stop, divider="\n"):
"""Generate unstable report.
:param stop: stop event
:param divider: report divider, default: `\n`
"""
line = None
results = {}
while True:
if line is not None:
processor = processors.get(line["message_keyword"], None)
if processor:
processor[0](line, results, *processor[1:])
line = None
if stop.is_set():
line = generate(results, divider)
line = yield line
|
py | b4145e1233d7146c800109cbcac8703af9ec9039 | """ CISCO_IETF_BFD_MIB
This document contains the Management information base for
Bidirectional Forwarding Detection(BFD) Protocol as defined
in draft\-ietf\-bfd\-base\-06.txt.
BFD is a protocol intended to detect faults in the
bidirectional path between two forwarding engines, including
interfaces, data link(s), and to the extent possible the forwarding
engines themselves, with potentially very low latency. It operates
independently of media, data protocols, and routing protocols.
This MIB module is based on the Internet Draft
draft\-ietf\-bfd\-mib\-03.txt and draft\-ietf\-bfd\-mib\-04.txt
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class CiscoBfdDiag(Enum):
"""
CiscoBfdDiag (Enum Class)
A common BFD diagnostic code.
.. data:: noDiagnostic = 0
.. data:: controlDetectionTimeExpired = 1
.. data:: echoFunctionFailed = 2
.. data:: neighborSignaledSessionDown = 3
.. data:: forwardingPlaneReset = 4
.. data:: pathDown = 5
.. data:: concatenatedPathDown = 6
.. data:: administrativelyDown = 7
.. data:: reverseConcatenatedPathDown = 8
"""
noDiagnostic = Enum.YLeaf(0, "noDiagnostic")
controlDetectionTimeExpired = Enum.YLeaf(1, "controlDetectionTimeExpired")
echoFunctionFailed = Enum.YLeaf(2, "echoFunctionFailed")
neighborSignaledSessionDown = Enum.YLeaf(3, "neighborSignaledSessionDown")
forwardingPlaneReset = Enum.YLeaf(4, "forwardingPlaneReset")
pathDown = Enum.YLeaf(5, "pathDown")
concatenatedPathDown = Enum.YLeaf(6, "concatenatedPathDown")
administrativelyDown = Enum.YLeaf(7, "administrativelyDown")
reverseConcatenatedPathDown = Enum.YLeaf(8, "reverseConcatenatedPathDown")
class CISCOIETFBFDMIB(Entity):
"""
.. attribute:: ciscobfdscalarobjects
**type**\: :py:class:`CiscoBfdScalarObjects <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdScalarObjects>`
**config**\: False
.. attribute:: ciscobfdsesstable
The BFD Session Table describes the BFD sessions
**type**\: :py:class:`CiscoBfdSessTable <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable>`
**config**\: False
.. attribute:: ciscobfdsessmaptable
The BFD Session Mapping Table maps the complex indexing of the BFD sessions to the flat CiscoBfdSessIndexTC used in the ciscoBfdSessTable
**type**\: :py:class:`CiscoBfdSessMapTable <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessMapTable>`
**config**\: False
.. attribute:: ciscobfdsessdiscmaptable
The BFD Session Discriminator Mapping Table maps a local discriminator value to associated BFD sessions' CiscoBfdSessIndexTC used in the ciscoBfdSessTable
**type**\: :py:class:`CiscoBfdSessDiscMapTable <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable>`
**config**\: False
.. attribute:: ciscobfdsessipmaptable
The BFD Session IP Mapping Table maps given ciscoBfdSessInterface, ciscoBfdSessAddrType, and ciscoBbfdSessAddr to an associated BFD sessions' CiscoBfdSessIndexTC used in the ciscoBfdSessTable. This table SHOULD contains those BFD sessions are of IP type\: singleHop(1) and multiHop(2)
**type**\: :py:class:`CiscoBfdSessIpMapTable <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessIpMapTable>`
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB, self).__init__()
self._top_entity = None
self.yang_name = "CISCO-IETF-BFD-MIB"
self.yang_parent_name = "CISCO-IETF-BFD-MIB"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ciscoBfdScalarObjects", ("ciscobfdscalarobjects", CISCOIETFBFDMIB.CiscoBfdScalarObjects)), ("ciscoBfdSessTable", ("ciscobfdsesstable", CISCOIETFBFDMIB.CiscoBfdSessTable)), ("ciscoBfdSessMapTable", ("ciscobfdsessmaptable", CISCOIETFBFDMIB.CiscoBfdSessMapTable)), ("ciscoBfdSessDiscMapTable", ("ciscobfdsessdiscmaptable", CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable)), ("ciscoBfdSessIpMapTable", ("ciscobfdsessipmaptable", CISCOIETFBFDMIB.CiscoBfdSessIpMapTable))])
self._leafs = OrderedDict()
self.ciscobfdscalarobjects = CISCOIETFBFDMIB.CiscoBfdScalarObjects()
self.ciscobfdscalarobjects.parent = self
self._children_name_map["ciscobfdscalarobjects"] = "ciscoBfdScalarObjects"
self.ciscobfdsesstable = CISCOIETFBFDMIB.CiscoBfdSessTable()
self.ciscobfdsesstable.parent = self
self._children_name_map["ciscobfdsesstable"] = "ciscoBfdSessTable"
self.ciscobfdsessmaptable = CISCOIETFBFDMIB.CiscoBfdSessMapTable()
self.ciscobfdsessmaptable.parent = self
self._children_name_map["ciscobfdsessmaptable"] = "ciscoBfdSessMapTable"
self.ciscobfdsessdiscmaptable = CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable()
self.ciscobfdsessdiscmaptable.parent = self
self._children_name_map["ciscobfdsessdiscmaptable"] = "ciscoBfdSessDiscMapTable"
self.ciscobfdsessipmaptable = CISCOIETFBFDMIB.CiscoBfdSessIpMapTable()
self.ciscobfdsessipmaptable.parent = self
self._children_name_map["ciscobfdsessipmaptable"] = "ciscoBfdSessIpMapTable"
self._segment_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB, [], name, value)
class CiscoBfdScalarObjects(Entity):
"""
.. attribute:: ciscobfdadminstatus
The global administrative status of BFD in this router. The value 'enabled' denotes that the BFD Process is active on at least one interface; 'disabled' disables it on all interfaces
**type**\: :py:class:`CiscoBfdAdminStatus <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdScalarObjects.CiscoBfdAdminStatus>`
**config**\: False
.. attribute:: ciscobfdversionnumber
The current version number of the BFD protocol
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessnotificationsenable
If this object is set to true(1), then it enables the emission of ciscoBfdSessUp and ciscoBfdSessDown notifications; otherwise these notifications are not emitted
**type**\: bool
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdScalarObjects, self).__init__()
self.yang_name = "ciscoBfdScalarObjects"
self.yang_parent_name = "CISCO-IETF-BFD-MIB"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ciscobfdadminstatus', (YLeaf(YType.enumeration, 'ciscoBfdAdminStatus'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CISCOIETFBFDMIB', 'CiscoBfdScalarObjects.CiscoBfdAdminStatus')])),
('ciscobfdversionnumber', (YLeaf(YType.uint32, 'ciscoBfdVersionNumber'), ['int'])),
('ciscobfdsessnotificationsenable', (YLeaf(YType.boolean, 'ciscoBfdSessNotificationsEnable'), ['bool'])),
])
self.ciscobfdadminstatus = None
self.ciscobfdversionnumber = None
self.ciscobfdsessnotificationsenable = None
self._segment_path = lambda: "ciscoBfdScalarObjects"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdScalarObjects, ['ciscobfdadminstatus', 'ciscobfdversionnumber', 'ciscobfdsessnotificationsenable'], name, value)
class CiscoBfdAdminStatus(Enum):
"""
CiscoBfdAdminStatus (Enum Class)
The global administrative status of BFD in this router.
The value 'enabled' denotes that the BFD Process is
active on at least one interface; 'disabled' disables
it on all interfaces.
.. data:: enabled = 1
.. data:: disabled = 2
"""
enabled = Enum.YLeaf(1, "enabled")
disabled = Enum.YLeaf(2, "disabled")
class CiscoBfdSessTable(Entity):
"""
The BFD Session Table describes the BFD sessions.
.. attribute:: ciscobfdsessentry
The BFD Session Entry describes BFD session
**type**\: list of :py:class:`CiscoBfdSessEntry <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessTable, self).__init__()
self.yang_name = "ciscoBfdSessTable"
self.yang_parent_name = "CISCO-IETF-BFD-MIB"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ciscoBfdSessEntry", ("ciscobfdsessentry", CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry))])
self._leafs = OrderedDict()
self.ciscobfdsessentry = YList(self)
self._segment_path = lambda: "ciscoBfdSessTable"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessTable, [], name, value)
class CiscoBfdSessEntry(Entity):
"""
The BFD Session Entry describes BFD session.
.. attribute:: ciscobfdsessindex (key)
This object contains an index used to represent a unique BFD session on this device
**type**\: int
**range:** 1..4294967295
**config**\: False
.. attribute:: ciscobfdsessapplicationid
This object contains an index used to indicate a local application which owns or maintains this BFD session. For instance, the MPLS VPN process may maintain a subset of the total number of BFD sessions. This application ID provides a convenient way to segregate sessions by the applications which maintain them
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessdiscriminator
This object specifies the local discriminator for this BFD session, used to uniquely identify it
**type**\: int
**range:** 1..4294967295
**config**\: False
.. attribute:: ciscobfdsessremotediscr
This object specifies the session discriminator chosen by the remote system for this BFD session
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessudpport
The destination UDP Port for BFD. The default value is the well\-known value for this port. BFD State failing(5) is only applicable if this BFD session is running version 0
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: ciscobfdsessstate
The perceived state of the BFD session
**type**\: :py:class:`CiscoBfdSessState <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessState>`
**config**\: False
.. attribute:: ciscobfdsessremoteheardflag
This object specifies status of BFD packet reception from the remote system. Specifically, it is set to true(1) if the local system is actively receiving BFD packets from the remote system, and is set to false(0) if the local system has not received BFD packets recently (within the detection time) or if the local system is attempting to tear down the BFD session
**type**\: bool
**config**\: False
.. attribute:: ciscobfdsessdiag
A diagnostic code specifying the local system's reason for the last transition of the session from up(1) to some other state
**type**\: :py:class:`CiscoBfdDiag <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CiscoBfdDiag>`
**config**\: False
.. attribute:: ciscobfdsessopermode
This object specifies current operating mode that BFD session is operating in
**type**\: :py:class:`CiscoBfdSessOperMode <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessOperMode>`
**config**\: False
.. attribute:: ciscobfdsessdemandmodedesiredflag
This object indicates that the local system's desire to use Demand mode. Specifically, it is set to true(1) if the local system wishes to use Demand mode or false(0) if not
**type**\: bool
**config**\: False
.. attribute:: ciscobfdsessechofuncmodedesiredflag
This object indicates that the local system's desire to use Echo mode. Specifically, it is set to true(1) if the local system wishes to use Echo mode or false(0) if not
**type**\: bool
**config**\: False
.. attribute:: ciscobfdsesscontrolplanindepflag
This object indicates that the local system's ability to continue to function through a disruption of the control plane. Specifically, it is set to true(1) if the local system BFD implementation is independent of the control plane. Otherwise, the value is set to false(0)
**type**\: bool
**config**\: False
.. attribute:: ciscobfdsessaddrtype
This object specifies IP address type of the neighboring IP address which is being monitored with this BFD session. Only values unknown(0), ipv4(1) or ipv6(2) have to be supported. A value of unknown(0) is allowed only when the outgoing interface is of type point\-to\-point, or when the BFD session is not associated with a specific interface. If any other unsupported values are attempted in a set operation, the agent MUST return an inconsistentValue error
**type**\: :py:class:`InetAddressType <ydk.models.cisco_ios_xe.INET_ADDRESS_MIB.InetAddressType>`
**config**\: False
.. attribute:: ciscobfdsessaddr
This object specifies the neighboring IP address which is being monitored with this BFD session. It can also be used to enabled BFD on a specific interface. The value is set to zero when BFD session is not associated with a specific interface
**type**\: str
**length:** 0..255
**config**\: False
.. attribute:: ciscobfdsessdesiredmintxinterval
This object specifies the minimum interval, in microseconds, that the local system would like to use when transmitting BFD Control packets
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessreqminrxinterval
This object specifies the minimum interval, in microseconds, between received BFD Control packets the local system is capable of supporting
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessreqminechorxinterval
This object specifies the minimum interval, in microseconds, between received BFD Echo packets that this system is capable of supporting
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessdetectmult
This object specifies the Detect time multiplier
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessstortype
This variable indicates the storage type for this object. Conceptual rows having the value 'permanent' need not allow write\-access to any columnar objects in the row
**type**\: :py:class:`StorageType <ydk.models.cisco_ios_xe.SNMPv2_TC.StorageType>`
**config**\: False
.. attribute:: ciscobfdsessrowstatus
This variable is used to create, modify, and/or delete a row in this table. When a row in this table has a row in the active(1) state, no objects in this row can be modified except the ciscoBfdSessRowStatus and ciscoBfdSessStorageType
**type**\: :py:class:`RowStatus <ydk.models.cisco_ios_xe.SNMPv2_TC.RowStatus>`
**config**\: False
.. attribute:: ciscobfdsessauthpresflag
This object indicates that the local system's desire to use Authentication. Specifically, it is set to true(1) if the local system wishes the session to be authenticated or false(0) if not
**type**\: bool
**config**\: False
.. attribute:: ciscobfdsessauthenticationtype
The Authentication Type used for this BFD session. This field is valid only when the Authentication Present bit is set
**type**\: :py:class:`CiscoBfdSessAuthenticationType <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessAuthenticationType>`
**config**\: False
.. attribute:: ciscobfdsessversionnumber
The version number of the BFD protocol that this session is running in
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsesstype
The type of this BFD session
**type**\: :py:class:`CiscoBfdSessType <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessType>`
**config**\: False
.. attribute:: ciscobfdsessinterface
This object contains an interface index used to indicate the interface which this BFD session is running on
**type**\: int
**range:** 1..2147483647
**config**\: False
.. attribute:: ciscobfdsessperfpktin
The total number of BFD messages received for this BFD session
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessperfpktout
The total number of BFD messages sent for this BFD session
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessuptime
The value of sysUpTime on the most recent occasion at which the session came up. If no such up event exists this object contains a zero value
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessperflastsessdowntime
The value of sysUpTime on the most recent occasion at which the last time communication was lost with the neighbor. If no such down event exist this object contains a zero value
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessperflastcommlostdiag
The BFD diag code for the last time communication was lost with the neighbor. If no such down event exists this object contains a zero value
**type**\: :py:class:`CiscoBfdDiag <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CiscoBfdDiag>`
**config**\: False
.. attribute:: ciscobfdsessperfsessupcount
The number of times this session has gone into the Up state since the router last rebooted
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessperfdisctime
The value of sysUpTime on the most recent occasion at which any one or more of the session counters suffered a discontinuity. The relevant counters are the specific instances associated with this BFD session of any Counter32 object contained in the ciscoBfdSessPerfTable. If no such discontinuities have occurred since the last re\-initialization of the local management subsystem, then this object contains a zero value
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ciscobfdsessperfpktinhc
This value represents the total number of BFD messages received for this BFD session. It MUST be equal to the least significant 32 bits of ciscoBfdSessPerfPktIn if ciscoBfdSessPerfPktInHC is supported according to the rules spelled out in RFC2863
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: ciscobfdsessperfpktouthc
This value represents the total number of total number of BFD messages transmitted for this BFD session. It MUST be equal to the least significant 32 bits of ciscoBfdSessPerfPktIn if ciscoBfdSessPerfPktOutHC is supported according to the rules spelled out in RFC2863
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry, self).__init__()
self.yang_name = "ciscoBfdSessEntry"
self.yang_parent_name = "ciscoBfdSessTable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['ciscobfdsessindex']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ciscobfdsessindex', (YLeaf(YType.uint32, 'ciscoBfdSessIndex'), ['int'])),
('ciscobfdsessapplicationid', (YLeaf(YType.uint32, 'ciscoBfdSessApplicationId'), ['int'])),
('ciscobfdsessdiscriminator', (YLeaf(YType.uint32, 'ciscoBfdSessDiscriminator'), ['int'])),
('ciscobfdsessremotediscr', (YLeaf(YType.uint32, 'ciscoBfdSessRemoteDiscr'), ['int'])),
('ciscobfdsessudpport', (YLeaf(YType.uint16, 'ciscoBfdSessUdpPort'), ['int'])),
('ciscobfdsessstate', (YLeaf(YType.enumeration, 'ciscoBfdSessState'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CISCOIETFBFDMIB', 'CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessState')])),
('ciscobfdsessremoteheardflag', (YLeaf(YType.boolean, 'ciscoBfdSessRemoteHeardFlag'), ['bool'])),
('ciscobfdsessdiag', (YLeaf(YType.enumeration, 'ciscoBfdSessDiag'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CiscoBfdDiag', '')])),
('ciscobfdsessopermode', (YLeaf(YType.enumeration, 'ciscoBfdSessOperMode'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CISCOIETFBFDMIB', 'CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessOperMode')])),
('ciscobfdsessdemandmodedesiredflag', (YLeaf(YType.boolean, 'ciscoBfdSessDemandModeDesiredFlag'), ['bool'])),
('ciscobfdsessechofuncmodedesiredflag', (YLeaf(YType.boolean, 'ciscoBfdSessEchoFuncModeDesiredFlag'), ['bool'])),
('ciscobfdsesscontrolplanindepflag', (YLeaf(YType.boolean, 'ciscoBfdSessControlPlanIndepFlag'), ['bool'])),
('ciscobfdsessaddrtype', (YLeaf(YType.enumeration, 'ciscoBfdSessAddrType'), [('ydk.models.cisco_ios_xe.INET_ADDRESS_MIB', 'InetAddressType', '')])),
('ciscobfdsessaddr', (YLeaf(YType.str, 'ciscoBfdSessAddr'), ['str'])),
('ciscobfdsessdesiredmintxinterval', (YLeaf(YType.uint32, 'ciscoBfdSessDesiredMinTxInterval'), ['int'])),
('ciscobfdsessreqminrxinterval', (YLeaf(YType.uint32, 'ciscoBfdSessReqMinRxInterval'), ['int'])),
('ciscobfdsessreqminechorxinterval', (YLeaf(YType.uint32, 'ciscoBfdSessReqMinEchoRxInterval'), ['int'])),
('ciscobfdsessdetectmult', (YLeaf(YType.uint32, 'ciscoBfdSessDetectMult'), ['int'])),
('ciscobfdsessstortype', (YLeaf(YType.enumeration, 'ciscoBfdSessStorType'), [('ydk.models.cisco_ios_xe.SNMPv2_TC', 'StorageType', '')])),
('ciscobfdsessrowstatus', (YLeaf(YType.enumeration, 'ciscoBfdSessRowStatus'), [('ydk.models.cisco_ios_xe.SNMPv2_TC', 'RowStatus', '')])),
('ciscobfdsessauthpresflag', (YLeaf(YType.boolean, 'ciscoBfdSessAuthPresFlag'), ['bool'])),
('ciscobfdsessauthenticationtype', (YLeaf(YType.enumeration, 'ciscoBfdSessAuthenticationType'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CISCOIETFBFDMIB', 'CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessAuthenticationType')])),
('ciscobfdsessversionnumber', (YLeaf(YType.uint32, 'ciscoBfdSessVersionNumber'), ['int'])),
('ciscobfdsesstype', (YLeaf(YType.enumeration, 'ciscoBfdSessType'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CISCOIETFBFDMIB', 'CiscoBfdSessTable.CiscoBfdSessEntry.CiscoBfdSessType')])),
('ciscobfdsessinterface', (YLeaf(YType.int32, 'ciscoBfdSessInterface'), ['int'])),
('ciscobfdsessperfpktin', (YLeaf(YType.uint32, 'ciscoBfdSessPerfPktIn'), ['int'])),
('ciscobfdsessperfpktout', (YLeaf(YType.uint32, 'ciscoBfdSessPerfPktOut'), ['int'])),
('ciscobfdsessuptime', (YLeaf(YType.uint32, 'ciscoBfdSessUpTime'), ['int'])),
('ciscobfdsessperflastsessdowntime', (YLeaf(YType.uint32, 'ciscoBfdSessPerfLastSessDownTime'), ['int'])),
('ciscobfdsessperflastcommlostdiag', (YLeaf(YType.enumeration, 'ciscoBfdSessPerfLastCommLostDiag'), [('ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB', 'CiscoBfdDiag', '')])),
('ciscobfdsessperfsessupcount', (YLeaf(YType.uint32, 'ciscoBfdSessPerfSessUpCount'), ['int'])),
('ciscobfdsessperfdisctime', (YLeaf(YType.uint32, 'ciscoBfdSessPerfDiscTime'), ['int'])),
('ciscobfdsessperfpktinhc', (YLeaf(YType.uint64, 'ciscoBfdSessPerfPktInHC'), ['int'])),
('ciscobfdsessperfpktouthc', (YLeaf(YType.uint64, 'ciscoBfdSessPerfPktOutHC'), ['int'])),
])
self.ciscobfdsessindex = None
self.ciscobfdsessapplicationid = None
self.ciscobfdsessdiscriminator = None
self.ciscobfdsessremotediscr = None
self.ciscobfdsessudpport = None
self.ciscobfdsessstate = None
self.ciscobfdsessremoteheardflag = None
self.ciscobfdsessdiag = None
self.ciscobfdsessopermode = None
self.ciscobfdsessdemandmodedesiredflag = None
self.ciscobfdsessechofuncmodedesiredflag = None
self.ciscobfdsesscontrolplanindepflag = None
self.ciscobfdsessaddrtype = None
self.ciscobfdsessaddr = None
self.ciscobfdsessdesiredmintxinterval = None
self.ciscobfdsessreqminrxinterval = None
self.ciscobfdsessreqminechorxinterval = None
self.ciscobfdsessdetectmult = None
self.ciscobfdsessstortype = None
self.ciscobfdsessrowstatus = None
self.ciscobfdsessauthpresflag = None
self.ciscobfdsessauthenticationtype = None
self.ciscobfdsessversionnumber = None
self.ciscobfdsesstype = None
self.ciscobfdsessinterface = None
self.ciscobfdsessperfpktin = None
self.ciscobfdsessperfpktout = None
self.ciscobfdsessuptime = None
self.ciscobfdsessperflastsessdowntime = None
self.ciscobfdsessperflastcommlostdiag = None
self.ciscobfdsessperfsessupcount = None
self.ciscobfdsessperfdisctime = None
self.ciscobfdsessperfpktinhc = None
self.ciscobfdsessperfpktouthc = None
self._segment_path = lambda: "ciscoBfdSessEntry" + "[ciscoBfdSessIndex='" + str(self.ciscobfdsessindex) + "']"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/ciscoBfdSessTable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry, ['ciscobfdsessindex', 'ciscobfdsessapplicationid', 'ciscobfdsessdiscriminator', 'ciscobfdsessremotediscr', 'ciscobfdsessudpport', 'ciscobfdsessstate', 'ciscobfdsessremoteheardflag', 'ciscobfdsessdiag', 'ciscobfdsessopermode', 'ciscobfdsessdemandmodedesiredflag', 'ciscobfdsessechofuncmodedesiredflag', 'ciscobfdsesscontrolplanindepflag', 'ciscobfdsessaddrtype', 'ciscobfdsessaddr', 'ciscobfdsessdesiredmintxinterval', 'ciscobfdsessreqminrxinterval', 'ciscobfdsessreqminechorxinterval', 'ciscobfdsessdetectmult', 'ciscobfdsessstortype', 'ciscobfdsessrowstatus', 'ciscobfdsessauthpresflag', 'ciscobfdsessauthenticationtype', 'ciscobfdsessversionnumber', 'ciscobfdsesstype', 'ciscobfdsessinterface', 'ciscobfdsessperfpktin', 'ciscobfdsessperfpktout', 'ciscobfdsessuptime', 'ciscobfdsessperflastsessdowntime', 'ciscobfdsessperflastcommlostdiag', 'ciscobfdsessperfsessupcount', 'ciscobfdsessperfdisctime', 'ciscobfdsessperfpktinhc', 'ciscobfdsessperfpktouthc'], name, value)
class CiscoBfdSessAuthenticationType(Enum):
"""
CiscoBfdSessAuthenticationType (Enum Class)
The Authentication Type used for this BFD session. This
field is valid only when the Authentication Present bit is set
.. data:: simplePassword = 1
.. data:: keyedMD5 = 2
.. data:: meticulousKeyedMD5 = 3
.. data:: keyedSHA1 = 4
.. data:: meticulousKeyedSHA1 = 5
"""
simplePassword = Enum.YLeaf(1, "simplePassword")
keyedMD5 = Enum.YLeaf(2, "keyedMD5")
meticulousKeyedMD5 = Enum.YLeaf(3, "meticulousKeyedMD5")
keyedSHA1 = Enum.YLeaf(4, "keyedSHA1")
meticulousKeyedSHA1 = Enum.YLeaf(5, "meticulousKeyedSHA1")
class CiscoBfdSessOperMode(Enum):
"""
CiscoBfdSessOperMode (Enum Class)
This object specifies current operating mode that BFD
session is operating in.
.. data:: asyncModeWEchoFun = 1
.. data:: asynchModeWOEchoFun = 2
.. data:: demandModeWEchoFunction = 3
.. data:: demandModeWOEchoFunction = 4
"""
asyncModeWEchoFun = Enum.YLeaf(1, "asyncModeWEchoFun")
asynchModeWOEchoFun = Enum.YLeaf(2, "asynchModeWOEchoFun")
demandModeWEchoFunction = Enum.YLeaf(3, "demandModeWEchoFunction")
demandModeWOEchoFunction = Enum.YLeaf(4, "demandModeWOEchoFunction")
class CiscoBfdSessState(Enum):
"""
CiscoBfdSessState (Enum Class)
The perceived state of the BFD session.
.. data:: adminDown = 1
.. data:: down = 2
.. data:: init = 3
.. data:: up = 4
.. data:: failing = 5
"""
adminDown = Enum.YLeaf(1, "adminDown")
down = Enum.YLeaf(2, "down")
init = Enum.YLeaf(3, "init")
up = Enum.YLeaf(4, "up")
failing = Enum.YLeaf(5, "failing")
class CiscoBfdSessType(Enum):
"""
CiscoBfdSessType (Enum Class)
The type of this BFD session.
.. data:: singleHop = 1
.. data:: multiHop = 2
"""
singleHop = Enum.YLeaf(1, "singleHop")
multiHop = Enum.YLeaf(2, "multiHop")
class CiscoBfdSessMapTable(Entity):
"""
The BFD Session Mapping Table maps the complex
indexing of the BFD sessions to the flat
CiscoBfdSessIndexTC used in the ciscoBfdSessTable.
.. attribute:: ciscobfdsessmapentry
The BFD Session Entry describes BFD session that is mapped to this index
**type**\: list of :py:class:`CiscoBfdSessMapEntry <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessMapTable.CiscoBfdSessMapEntry>`
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessMapTable, self).__init__()
self.yang_name = "ciscoBfdSessMapTable"
self.yang_parent_name = "CISCO-IETF-BFD-MIB"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ciscoBfdSessMapEntry", ("ciscobfdsessmapentry", CISCOIETFBFDMIB.CiscoBfdSessMapTable.CiscoBfdSessMapEntry))])
self._leafs = OrderedDict()
self.ciscobfdsessmapentry = YList(self)
self._segment_path = lambda: "ciscoBfdSessMapTable"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessMapTable, [], name, value)
class CiscoBfdSessMapEntry(Entity):
"""
The BFD Session Entry describes BFD session
that is mapped to this index.
.. attribute:: ciscobfdsessapplicationid (key)
**type**\: int
**range:** 0..4294967295
**refers to**\: :py:class:`ciscobfdsessapplicationid <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
.. attribute:: ciscobfdsessdiscriminator (key)
**type**\: int
**range:** 1..4294967295
**refers to**\: :py:class:`ciscobfdsessdiscriminator <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
.. attribute:: ciscobfdsessaddrtype (key)
**type**\: :py:class:`InetAddressType <ydk.models.cisco_ios_xe.INET_ADDRESS_MIB.InetAddressType>`
**config**\: False
.. attribute:: ciscobfdsessaddr (key)
**type**\: str
**length:** 0..255
**refers to**\: :py:class:`ciscobfdsessaddr <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
.. attribute:: ciscobfdsessmapbfdindex
This object indicates the CiscoBfdSessIndexTC referred to by the indices of this row. In essence, a mapping is provided between these indices and the ciscoBfdSessTable
**type**\: int
**range:** 1..4294967295
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessMapTable.CiscoBfdSessMapEntry, self).__init__()
self.yang_name = "ciscoBfdSessMapEntry"
self.yang_parent_name = "ciscoBfdSessMapTable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['ciscobfdsessapplicationid','ciscobfdsessdiscriminator','ciscobfdsessaddrtype','ciscobfdsessaddr']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ciscobfdsessapplicationid', (YLeaf(YType.str, 'ciscoBfdSessApplicationId'), ['int'])),
('ciscobfdsessdiscriminator', (YLeaf(YType.str, 'ciscoBfdSessDiscriminator'), ['int'])),
('ciscobfdsessaddrtype', (YLeaf(YType.enumeration, 'ciscoBfdSessAddrType'), [('ydk.models.cisco_ios_xe.INET_ADDRESS_MIB', 'InetAddressType', '')])),
('ciscobfdsessaddr', (YLeaf(YType.str, 'ciscoBfdSessAddr'), ['str'])),
('ciscobfdsessmapbfdindex', (YLeaf(YType.uint32, 'ciscoBfdSessMapBfdIndex'), ['int'])),
])
self.ciscobfdsessapplicationid = None
self.ciscobfdsessdiscriminator = None
self.ciscobfdsessaddrtype = None
self.ciscobfdsessaddr = None
self.ciscobfdsessmapbfdindex = None
self._segment_path = lambda: "ciscoBfdSessMapEntry" + "[ciscoBfdSessApplicationId='" + str(self.ciscobfdsessapplicationid) + "']" + "[ciscoBfdSessDiscriminator='" + str(self.ciscobfdsessdiscriminator) + "']" + "[ciscoBfdSessAddrType='" + str(self.ciscobfdsessaddrtype) + "']" + "[ciscoBfdSessAddr='" + str(self.ciscobfdsessaddr) + "']"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/ciscoBfdSessMapTable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessMapTable.CiscoBfdSessMapEntry, ['ciscobfdsessapplicationid', 'ciscobfdsessdiscriminator', 'ciscobfdsessaddrtype', 'ciscobfdsessaddr', 'ciscobfdsessmapbfdindex'], name, value)
class CiscoBfdSessDiscMapTable(Entity):
"""
The BFD Session Discriminator Mapping Table maps a
local discriminator value to associated BFD sessions'
CiscoBfdSessIndexTC used in the ciscoBfdSessTable.
.. attribute:: ciscobfdsessdiscmapentry
Each row contains a mapping between a local discriminator value to an entry in ciscoBfdSessTable
**type**\: list of :py:class:`CiscoBfdSessDiscMapEntry <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable.CiscoBfdSessDiscMapEntry>`
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable, self).__init__()
self.yang_name = "ciscoBfdSessDiscMapTable"
self.yang_parent_name = "CISCO-IETF-BFD-MIB"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ciscoBfdSessDiscMapEntry", ("ciscobfdsessdiscmapentry", CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable.CiscoBfdSessDiscMapEntry))])
self._leafs = OrderedDict()
self.ciscobfdsessdiscmapentry = YList(self)
self._segment_path = lambda: "ciscoBfdSessDiscMapTable"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable, [], name, value)
class CiscoBfdSessDiscMapEntry(Entity):
"""
Each row contains a mapping between a local discriminator
value to an entry in ciscoBfdSessTable.
.. attribute:: ciscobfdsessdiscriminator (key)
**type**\: int
**range:** 1..4294967295
**refers to**\: :py:class:`ciscobfdsessdiscriminator <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
.. attribute:: ciscobfdsessdiscmapindex
This object indicates the CiscoBfdSessIndexTC referred to by the index of this row. In essence, a mapping is provided between this index and the ciscoBfdSessTable
**type**\: int
**range:** 1..4294967295
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable.CiscoBfdSessDiscMapEntry, self).__init__()
self.yang_name = "ciscoBfdSessDiscMapEntry"
self.yang_parent_name = "ciscoBfdSessDiscMapTable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['ciscobfdsessdiscriminator']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ciscobfdsessdiscriminator', (YLeaf(YType.str, 'ciscoBfdSessDiscriminator'), ['int'])),
('ciscobfdsessdiscmapindex', (YLeaf(YType.uint32, 'ciscoBfdSessDiscMapIndex'), ['int'])),
])
self.ciscobfdsessdiscriminator = None
self.ciscobfdsessdiscmapindex = None
self._segment_path = lambda: "ciscoBfdSessDiscMapEntry" + "[ciscoBfdSessDiscriminator='" + str(self.ciscobfdsessdiscriminator) + "']"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/ciscoBfdSessDiscMapTable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessDiscMapTable.CiscoBfdSessDiscMapEntry, ['ciscobfdsessdiscriminator', 'ciscobfdsessdiscmapindex'], name, value)
class CiscoBfdSessIpMapTable(Entity):
"""
The BFD Session IP Mapping Table maps given
ciscoBfdSessInterface, ciscoBfdSessAddrType, and
ciscoBbfdSessAddr to an associated BFD sessions'
CiscoBfdSessIndexTC used in the ciscoBfdSessTable.
This table SHOULD contains those BFD sessions are
of IP type\: singleHop(1) and multiHop(2).
.. attribute:: ciscobfdsessipmapentry
Each row contains a mapping between ciscoBfdSessInterface, ciscoBfdSessAddrType and ciscoBfdSessAddr values to an entry in ciscoBfdSessTable
**type**\: list of :py:class:`CiscoBfdSessIpMapEntry <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessIpMapTable.CiscoBfdSessIpMapEntry>`
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessIpMapTable, self).__init__()
self.yang_name = "ciscoBfdSessIpMapTable"
self.yang_parent_name = "CISCO-IETF-BFD-MIB"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ciscoBfdSessIpMapEntry", ("ciscobfdsessipmapentry", CISCOIETFBFDMIB.CiscoBfdSessIpMapTable.CiscoBfdSessIpMapEntry))])
self._leafs = OrderedDict()
self.ciscobfdsessipmapentry = YList(self)
self._segment_path = lambda: "ciscoBfdSessIpMapTable"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessIpMapTable, [], name, value)
class CiscoBfdSessIpMapEntry(Entity):
"""
Each row contains a mapping between ciscoBfdSessInterface,
ciscoBfdSessAddrType and ciscoBfdSessAddr values to an
entry in ciscoBfdSessTable.
.. attribute:: ciscobfdsessinterface (key)
**type**\: int
**range:** 1..2147483647
**refers to**\: :py:class:`ciscobfdsessinterface <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
.. attribute:: ciscobfdsessaddrtype (key)
**type**\: :py:class:`InetAddressType <ydk.models.cisco_ios_xe.INET_ADDRESS_MIB.InetAddressType>`
**config**\: False
.. attribute:: ciscobfdsessaddr (key)
**type**\: str
**length:** 0..255
**refers to**\: :py:class:`ciscobfdsessaddr <ydk.models.cisco_ios_xe.CISCO_IETF_BFD_MIB.CISCOIETFBFDMIB.CiscoBfdSessTable.CiscoBfdSessEntry>`
**config**\: False
.. attribute:: ciscobfdsessipmapindex
This object indicates the CiscoBfdSessIndexTC referred to by the indices of this row. In essence, a mapping is provided between these indices and an entry in ciscoBfdSessTable
**type**\: int
**range:** 1..4294967295
**config**\: False
"""
_prefix = 'CISCO-IETF-BFD-MIB'
_revision = '2011-04-16'
def __init__(self):
super(CISCOIETFBFDMIB.CiscoBfdSessIpMapTable.CiscoBfdSessIpMapEntry, self).__init__()
self.yang_name = "ciscoBfdSessIpMapEntry"
self.yang_parent_name = "ciscoBfdSessIpMapTable"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['ciscobfdsessinterface','ciscobfdsessaddrtype','ciscobfdsessaddr']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ciscobfdsessinterface', (YLeaf(YType.str, 'ciscoBfdSessInterface'), ['int'])),
('ciscobfdsessaddrtype', (YLeaf(YType.enumeration, 'ciscoBfdSessAddrType'), [('ydk.models.cisco_ios_xe.INET_ADDRESS_MIB', 'InetAddressType', '')])),
('ciscobfdsessaddr', (YLeaf(YType.str, 'ciscoBfdSessAddr'), ['str'])),
('ciscobfdsessipmapindex', (YLeaf(YType.uint32, 'ciscoBfdSessIpMapIndex'), ['int'])),
])
self.ciscobfdsessinterface = None
self.ciscobfdsessaddrtype = None
self.ciscobfdsessaddr = None
self.ciscobfdsessipmapindex = None
self._segment_path = lambda: "ciscoBfdSessIpMapEntry" + "[ciscoBfdSessInterface='" + str(self.ciscobfdsessinterface) + "']" + "[ciscoBfdSessAddrType='" + str(self.ciscobfdsessaddrtype) + "']" + "[ciscoBfdSessAddr='" + str(self.ciscobfdsessaddr) + "']"
self._absolute_path = lambda: "CISCO-IETF-BFD-MIB:CISCO-IETF-BFD-MIB/ciscoBfdSessIpMapTable/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(CISCOIETFBFDMIB.CiscoBfdSessIpMapTable.CiscoBfdSessIpMapEntry, ['ciscobfdsessinterface', 'ciscobfdsessaddrtype', 'ciscobfdsessaddr', 'ciscobfdsessipmapindex'], name, value)
def clone_ptr(self):
self._top_entity = CISCOIETFBFDMIB()
return self._top_entity
|
bzl | b4145e31ab07dd6ccff80e6546f13c9c40e9d2ce | # E.g. ApplicationCommunicationMessages -> application-communication-messages
def pascal_to_kebab(input):
kebab_cased = ""
for idx in range(len(input)):
letter = input[idx]
if idx == 0:
kebab_cased += letter.lower()
elif letter.isupper():
kebab_cased += "-" + letter.lower()
else:
kebab_cased += letter.lower()
return kebab_cased
def file_to_kebab(filepath):
filename = filepath.split("/")[-1].split(".")[0]
return pascal_to_kebab(filename)
|
py | b4145ecd65263ca91a82678b3652f6bdc488dbfc | import argparse
import os
import shutil
import time
import random
import numpy as np
import math
import sys
sys.path.append('../../')
from utils import *
from pytorch_classification.utils import Bar, AverageMeter
from NeuralNet import NeuralNet
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.distributions import Categorical
from torchvision import datasets, transforms
from torch.autograd import Variable
from .NaiveNNet import NaiveNNet as nnnet
from .VNet import VNet as vnet
args = dotdict({
'lr': 0.001,
'dropout': 0.3,
'epochs': 10,
'batch_size': 64,
'cuda': torch.cuda.is_available(),
'num_channels': 512,
})
class NNetWrapper(NeuralNet):
def __init__(self, game):
self.nnet = nnnet(game, args)
self.board_x, self.board_y = game.getBoardSize()
self.action_size = game.getActionSize()
if args.cuda:
self.nnet.cuda()
def train(self, examples):
optimizer = optim.Adam(self.nnet.parameters())
boards, actions, deltas = list(zip(*[examples[i] for i in range(len(examples))]))
for i in range(len(boards)):
board = torch.FloatTensor(boards[i].astype(np.float64))
board = Variable(board)
delta = torch.FloatTensor(np.asarray(deltas[i]).astype(np.float64))
delta = Variable(delta)
out_pi, out_v = self.nnet(board)
l_pi = self.loss_pi(delta, actions[i], out_pi)
l_v = self.loss_v(delta, out_v)
total_loss = l_pi + l_v
# compute gradient and do SGD step
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
def predict(self, board):
"""
board: np array with board
"""
# timing
start = time.time()
# preparing input
board = torch.FloatTensor(board.astype(np.float64))
if args.cuda: board = board.contiguous().cuda()
board = Variable(board, volatile=True)
board = board.view(1, self.board_x, self.board_y)
self.nnet.eval()
pi,v = self.nnet(board)
#print('PREDICTION TIME TAKEN : {0:03f}'.format(time.time()-start))
return pi.data.cpu().numpy()[0], v.data.cpu().numpy()[0]
def loss_pi(self, reward, action, output):
output = output[:, action].view(1, -1)
log_prob = output.log()
loss = -log_prob * reward
return loss
def loss_v(self, delta, output):
return (delta - output.view(-1))**2
def save_checkpoint(self, folder='checkpoint', filename='checkpoint.pth.tar'):
filepath = os.path.join(folder, filename)
if not os.path.exists(folder):
print("Checkpoint Directory does not exist! Making directory {}".format(folder))
os.mkdir(folder)
else:
print("Checkpoint Directory exists! ")
torch.save({
'state_dict' : self.nnet.state_dict(),
}, filepath)
def load_checkpoint(self, folder='checkpoint', filename='checkpoint.pth.tar'):
# https://github.com/pytorch/examples/blob/master/imagenet/main.py#L98
filepath = os.path.join(folder, filename)
if not os.path.exists(filepath):
raise("No model in path {}".format(filepath))
checkpoint = torch.load(filepath)
self.nnet.load_state_dict(checkpoint['state_dict'])
|
py | b4145ed3ed826e2007e90529571a96be063c8083 | import functools
import typing as tp
import haiku as hk
import numpy as np
from elegy import utils
from elegy.module import Module, LOCAL, LocalContext, add_summary, get_module
from elegy import module
def sequential(*layers: tp.Callable[..., tp.Any]) -> tp.Callable[..., tp.Any]:
"""
Connects all layers. *args and **kwargs are passed to the first layer.
```python
def call(self, x):
mlp = elegy.nn.sequential(
elegy.nn.Linear(64),
jax.nn.relu,
elegy.nn.Linear(32),
jax.nn.relu,
elegy.nn.Linear(10),
jax.nn.softmax,
)
y = mlp(x)
...
```
!!! Note
`sequential` is not a `Module`, that is, it wont create a scope over the layers it runs,
in constrast to `Sequential` layers are eagerly instantiate outside of `sequential`
and just passed to it to automate the execution.
Arguments:
layers: Modules or functions passed as `*args`
Returns:
A callable that waits for the inputs and applies the layers sequentially.
"""
def call(inputs, *args, **kwargs):
out = inputs
for i, layer in enumerate(layers):
if i == 0:
out = layer(out, *args, **kwargs)
else:
out = layer(out)
if not isinstance(layer, Module):
name = (
layer.__name__
if hasattr(layer, "__name__")
else layer.__class__.__name__
)
add_summary(name, out)
return out
return call
class Sequential(Module):
"""
Sequentially calls the given list of layers.
Note that `Sequential` is limited in the range of possible architectures
it can handle. This is a deliberate design decision; `Sequential` is only
meant to be used for the simple case of fusing together modules/ops where
the input of a particular module/op is the output of the previous one.
Another restriction is that it is not possible to have extra arguments in the
`call` method that are passed to the constituents of the module - for
example, if there is a `BatchNorm` module in `Sequential` and the user
wishes to switch the `training` flag. If this is the desired use case,
the recommended solution is to subclass `Module` and implement
`call`:
```python
class CustomModule(elegy.Module):
def call(self, x, training):
x = elegy.nn.Conv2D(32, 4, 2)(x)
x = elegy.nn.BatchNorm(True, True, 0.9)(x, training)
x = jax.nn.relu(x)
return x
```
"""
def __init__(
self, layers: tp.Callable[[], tp.Iterable[tp.Callable[..., tp.Any]]], **kwargs
):
self.layers = tuple(layers())
# set signature of call to the signature of of the first layer
# by creating a wrapper function.
current_call = self.call
@utils.wraps(self.layers[0])
def call(*args, **kwargs):
return current_call(*args, **kwargs)
self.call = call
super().__init__(**kwargs)
def call(self, *args, **kwargs):
"""Connects all layers. *args and **kwargs are passed to the first layer."""
return sequential(*self.layers)(*args, **kwargs)
|
py | b4145ef113af4a0be08260dfee685d9d54bdfc3e | # Shows the top artists for a user
import pprint
import sys
import spotipy
import spotipy.util as util
import simplejson as json
if len(sys.argv) > 1:
username = sys.argv[1]
else:
print("Usage: %s username" % (sys.argv[0],))
sys.exit()
scope = 'user-top-read'
token = util.prompt_for_user_token(username, scope)
if token:
sp = spotipy.Spotify(auth=token)
sp.trace = False
ranges = ['short_term', 'medium_term', 'long_term']
for range in ranges:
print ("range:", range)
results = sp.current_user_top_artists(time_range=range, limit=50)
for i, item in enumerate(results['items']):
print (i, item['name'])
print ()
else:
print("Can't get token for", username)
|
py | b4145efd0e9c8bf9a65446116dcacdfd657ba22c | # coding=utf-8
#
# @lc app=leetcode id=15 lang=python
#
# [15] 3Sum
#
# https://leetcode.com/problems/3sum/description/
#
# algorithms
# Medium (23.44%)
# Total Accepted: 495.1K
# Total Submissions: 2.1M
# Testcase Example: '[-1,0,1,2,-1,-4]'
#
# Given an array nums of n integers, are there elements a, b, c in nums such
# that a + b + c = 0? Find all unique triplets in the array which gives the sum
# of zero.
#
# Note:
#
# The solution set must not contain duplicate triplets.
#
# Example:
#
#
# Given array nums = [-1, 0, 1, 2, -1, -4],
#
# A solution set is:
# [
# [-1, 0, 1],
# [-1, -1, 2]
# ]
#
#
#
import itertools
import time
def api_deco(func):
def wrapper(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
print time.time() - start
return result
return wrapper
class Solution(object):
# @api_deco
def threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# 双指针竟然比哈希表还快
if len(nums) < 3:
return []
nums.sort()
result = []
for ix, x in enumerate(nums[:-2]):
if ix and nums[ix-1] == x:
continue
fast = len(nums)-1
low = ix+1
while low < fast:
if not x + nums[low] + nums[fast]:
result.append([x, nums[low], nums[fast]])
low += 1
fast -= 1
while low < fast and nums[low] == nums[low-1]:
low += 1
while low < fast and nums[fast] == nums[fast+1]:
fast -= 1
elif x + nums[low] + nums[fast] < 0:
low += 1
else:
fast -= 1
return result
@api_deco
def oldthreeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# 一张哈希表走天下
# 结果里的子元素保持有序
# 结果的元素不必有序
# O(n^2)
# still time limit
from collections import Counter
counter = Counter(nums)
result = []
for first in counter:
counter[first] -= 1
for second in counter:
if counter[second] == 0 or second < first:
continue
counter[second] -= 1
target = -(first+second)
if not counter.get(target) or target < second:
counter[second] += 1
continue
result.append([first, second, target])
counter[second] += 1
counter[first] += 1
return result
def ______threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# hash map + two sum
# copy from algorithm.yuanbin.me
# still time limit
if len(nums) < 3:
return []
result = []
# 先排序就不会有重复的结果出现
# 否则就是 [[-1, 0, 1], [-1, 2, -1], [0, 1, -1]]
# 因为排序之后,结果相同,list虽然不能和set一样快
# 但是list排序之后还是能保证不重复的
nums.sort()
for key, num in enumerate(nums):
data = {}
for v in nums[key+1:]:
if -num-v in data:
if [num, -num-v, v] not in result:
result.append([num, -num-v, v])
else:
data[v] = data.get(v, 0) + 1
return result
def _____threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# 有重复数字
# 时间复杂度 O(n^3)
if len(nums) < 3:
return []
result = []
for ix, x in enumerate(nums):
for iy, y in enumerate(nums[ix+1:]):
for iz, z in enumerate(nums[ix+iy+2:]):
if x + y + z == 0:
result.append([x, y, z])
return result
def ______threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# 化简为 2sum
# Counter is not hashable will cause time limit
# and result in not in except order
# [[-1, 1, 0], [-1, -1, 2]]
# not in order can sorted
# but time limit
from collections import Counter
visited = []
result = []
for num in nums:
new_nums = nums[:]
new_nums.remove(num)
for r in self.two_sum(new_nums, -num):
r.insert(0, num)
if Counter(r) not in visited:
result.append(sorted(r))
visited.append(Counter(r))
return sorted(result)
def two_sum(self, nums, num):
data = {}
result = []
for n in nums:
if num - n in data:
result.append([n, num - n])
data[n] = data.get(n, 0) + 1
return result
def ____threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# 对数组顺序和元素顺序都有要求
# 求两数之和等于第三个数的负数
# list is not hashable will be duplicate
# and not in order
#
res = []
for i in range(len(nums)):
items = nums[::]
items.pop(i)
for item in itertools.combinations(items, 2):
if sum(item) == -nums[i]:
re = [nums[i]]
re.extend(item)
res.append(re)
return res
def __threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# answer can has duplicate element
# should't use set
res = []
ses = []
for i in self.generate(nums):
if len(i) == 3 and sum(i) == 0 and set(i) not in ses:
ses.append(set(i))
res.append(i)
return res
def generate(self, nums):
if not nums:
return []
left = nums[:1]
right = self.generate(nums[1:])
res = []
for i in right:
if len(i) < 3:
j = i[::]
i.append(left[0])
res.append(i)
res.append(j)
else:
res.append(i)
return [left] + res
def ___threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
# not in position
# answer [[-1,0,1],[-1,2,-1]]
# expect [[-1,-1,2],[-1,0,1]]
# Still Time Limit
ses = []
res = []
for i in itertools.combinations(nums, 3):
if sum(i) == 0 and set(i) not in ses:
ses.append(set(i))
res.append(sorted(i))
return sorted(res)
# if __name__ == "__main__":
# s = Solution()
# print s.threeSum([-1, 0, 1, 2, -1, -4],)
# print s.threeSum([-5,-1,10,-15,10,-11,-8,-14,5,3,9,3,-11,-4,0,5,5,1,14,2,-13,0,-10,-12,-2,4,-9,-7,14,-2,3,-6,13,-10,-14,8,-14,-15,1,7,4,-5,-13,8,-1,-6,-10,-11,10,11,6,13,-4,11,-14,1,1,14,9,-8,-2,-11,1,-12,-14,-6,3,10,-6,-11,-6,5,-9,-4,-10,5,5,-5,1,1,13,-8,-1,-14,-11,-8,2,-3,-9,-12,4,4,14,12,-1,8,-9,-13,3,0,13,12,-9,12,-7,-12,2,5,-1,-11])
# s.threeSum([82597,-9243,62390,83030,-97960,-26521,-61011,83390,-38677,12333,75987,46091,83794,19355,-71037,-6242,-28801,324,1202,-90885,-2989,-95597,-34333,35528,5680,89093,-90606,50360,-29393,-27012,53313,65213,99818,-82405,-41661,-3333,-51952,72135,-1523,26377,74685,96992,92263,15929,5467,-99555,-43348,-41689,-60383,-3990,32165,65265,-72973,-58372,12741,-48568,-46596,72419,-1859,34153,62937,81310,-61823,-96770,-54944,8845,-91184,24208,-29078,31495,65258,14198,85395,70506,-40908,56740,-12228,-40072,32429,93001,68445,-73927,25731,-91859,-24150,10093,-60271,-81683,-18126,51055,48189,-6468,25057,81194,-58628,74042,66158,-14452,-49851,-43667,11092,39189,-17025,-79173,13606,83172,92647,-59741,19343,-26644,-57607,82908,-20655,1637,80060,98994,39331,-31274,-61523,91225,-72953,13211,-75116,-98421,-41571,-69074,99587,39345,42151,-2460,98236,15690,-52507,-95803,-48935,-46492,-45606,-79254,-99851,52533,73486,39948,-7240,71815,-585,-96252,90990,-93815,93340,-71848,58733,-14859,-83082,-75794,-82082,-24871,-15206,91207,-56469,-93618,67131,-8682,75719,87429,-98757,-7535,-24890,-94160,85003,33928,75538,97456,-66424,-60074,-8527,-28697,-22308,2246,-70134,-82319,-10184,87081,-34949,-28645,-47352,-83966,-60418,-15293,-53067,-25921,55172,75064,95859,48049,34311,-86931,-38586,33686,-36714,96922,76713,-22165,-80585,-34503,-44516,39217,-28457,47227,-94036,43457,24626,-87359,26898,-70819,30528,-32397,-69486,84912,-1187,-98986,-32958,4280,-79129,-65604,9344,58964,50584,71128,-55480,24986,15086,-62360,-42977,-49482,-77256,-36895,-74818,20,3063,-49426,28152,-97329,6086,86035,-88743,35241,44249,19927,-10660,89404,24179,-26621,-6511,57745,-28750,96340,-97160,-97822,-49979,52307,79462,94273,-24808,77104,9255,-83057,77655,21361,55956,-9096,48599,-40490,-55107,2689,29608,20497,66834,-34678,23553,-81400,-66630,-96321,-34499,-12957,-20564,25610,-4322,-58462,20801,53700,71527,24669,-54534,57879,-3221,33636,3900,97832,-27688,-98715,5992,24520,-55401,-57613,-69926,57377,-77610,20123,52174,860,60429,-91994,-62403,-6218,-90610,-37263,-15052,62069,-96465,44254,89892,-3406,19121,-41842,-87783,-64125,-56120,73904,-22797,-58118,-4866,5356,75318,46119,21276,-19246,-9241,-97425,57333,-15802,93149,25689,-5532,95716,39209,-87672,-29470,-16324,-15331,27632,-39454,56530,-16000,29853,46475,78242,-46602,83192,-73440,-15816,50964,-36601,89758,38375,-40007,-36675,-94030,67576,46811,-64919,45595,76530,40398,35845,41791,67697,-30439,-82944,63115,33447,-36046,-50122,-34789,43003,-78947,-38763,-89210,32756,-20389,-31358,-90526,-81607,88741,86643,98422,47389,-75189,13091,95993,-15501,94260,-25584,-1483,-67261,-70753,25160,89614,-90620,-48542,83889,-12388,-9642,-37043,-67663,28794,-8801,13621,12241,55379,84290,21692,-95906,-85617,-17341,-63767,80183,-4942,-51478,30997,-13658,8838,17452,-82869,-39897,68449,31964,98158,-49489,62283,-62209,-92792,-59342,55146,-38533,20496,62667,62593,36095,-12470,5453,-50451,74716,-17902,3302,-16760,-71642,-34819,96459,-72860,21638,47342,-69897,-40180,44466,76496,84659,13848,-91600,-90887,-63742,-2156,-84981,-99280,94326,-33854,92029,-50811,98711,-36459,-75555,79110,-88164,-97397,-84217,97457,64387,30513,-53190,-83215,252,2344,-27177,-92945,-89010,82662,-11670,86069,53417,42702,97082,3695,-14530,-46334,17910,77999,28009,-12374,15498,-46941,97088,-35030,95040,92095,-59469,-24761,46491,67357,-66658,37446,-65130,-50416,99197,30925,27308,54122,-44719,12582,-99525,-38446,-69050,-22352,94757,-56062,33684,-40199,-46399,96842,-50881,-22380,-65021,40582,53623,-76034,77018,-97074,-84838,-22953,-74205,79715,-33920,-35794,-91369,73421,-82492,63680,-14915,-33295,37145,76852,-69442,60125,-74166,74308,-1900,-30195,-16267,-60781,-27760,5852,38917,25742,-3765,49097,-63541,98612,-92865,-30248,9612,-8798,53262,95781,-42278,-36529,7252,-27394,-5021,59178,80934,-48480,-75131,-54439,-19145,-48140,98457,-6601,-51616,-89730,78028,32083,-48904,16822,-81153,-8832,48720,-80728,-45133,-86647,-4259,-40453,2590,28613,50523,-4105,-27790,-74579,-17223,63721,33489,-47921,97628,-97691,-14782,-65644,18008,-93651,-71266,80990,-76732,-47104,35368,28632,59818,-86269,-89753,34557,-92230,-5933,-3487,-73557,-13174,-43981,-43630,-55171,30254,-83710,-99583,-13500,71787,5017,-25117,-78586,86941,-3251,-23867,-36315,75973,86272,-45575,77462,-98836,-10859,70168,-32971,-38739,-12761,93410,14014,-30706,-77356,-85965,-62316,63918,-59914,-64088,1591,-10957,38004,15129,-83602,-51791,34381,-89382,-26056,8942,5465,71458,-73805,-87445,-19921,-80784,69150,-34168,28301,-68955,18041,6059,82342,9947,39795,44047,-57313,48569,81936,-2863,-80932,32976,-86454,-84207,33033,32867,9104,-16580,-25727,80157,-70169,53741,86522,84651,68480,84018,61932,7332,-61322,-69663,76370,41206,12326,-34689,17016,82975,-23386,39417,72793,44774,-96259,3213,79952,29265,-61492,-49337,14162,65886,3342,-41622,-62659,-90402,-24751,88511,54739,-21383,-40161,-96610,-24944,-602,-76842,-21856,69964,43994,-15121,-85530,12718,13170,-13547,69222,62417,-75305,-81446,-38786,-52075,-23110,97681,-82800,-53178,11474,35857,94197,-58148,-23689,32506,92154,-64536,-73930,-77138,97446,-83459,70963,22452,68472,-3728,-25059,-49405,95129,-6167,12808,99918,30113,-12641,-26665,86362,-33505,50661,26714,33701,89012,-91540,40517,-12716,-57185,-87230,29914,-59560,13200,-72723,58272,23913,-45586,-96593,-26265,-2141,31087,81399,92511,-34049,20577,2803,26003,8940,42117,40887,-82715,38269,40969,-50022,72088,21291,-67280,-16523,90535,18669,94342,-39568,-88080,-99486,-20716,23108,-28037,63342,36863,-29420,-44016,75135,73415,16059,-4899,86893,43136,-7041,33483,-67612,25327,40830,6184,61805,4247,81119,-22854,-26104,-63466,63093,-63685,60369,51023,51644,-16350,74438,-83514,99083,10079,-58451,-79621,48471,67131,-86940,99093,11855,-22272,-67683,-44371,9541,18123,37766,-70922,80385,-57513,-76021,-47890,36154,72935,84387,-92681,-88303,-7810,59902,-90,-64704,-28396,-66403,8860,13343,33882,85680,7228,28160,-14003,54369,-58893,92606,-63492,-10101,64714,58486,29948,-44679,-22763,10151,-56695,4031,-18242,-36232,86168,-14263,9883,47124,47271,92761,-24958,-73263,-79661,-69147,-18874,29546,-92588,-85771,26451,-86650,-43306,-59094,-47492,-34821,-91763,-47670,33537,22843,67417,-759,92159,63075,94065,-26988,55276,65903,30414,-67129,-99508,-83092,-91493,-50426,14349,-83216,-76090,32742,-5306,-93310,-60750,-60620,-45484,-21108,-58341,-28048,-52803,69735,78906,81649,32565,-86804,-83202,-65688,-1760,89707,93322,-72750,84134,71900,-37720,19450,-78018,22001,-23604,26276,-21498,65892,-72117,-89834,-23867,55817,-77963,42518,93123,-83916,63260,-2243,-97108,85442,-36775,17984,-58810,99664,-19082,93075,-69329,87061,79713,16296,70996,13483,-74582,49900,-27669,-40562,1209,-20572,34660,83193,75579,7344,64925,88361,60969,3114,44611,-27445,53049,-16085,-92851,-53306,13859,-33532,86622,-75666,-18159,-98256,51875,-42251,-27977,-18080,23772,38160,41779,9147,94175,99905,-85755,62535,-88412,-52038,-68171,93255,-44684,-11242,-104,31796,62346,-54931,-55790,-70032,46221,56541,-91947,90592,93503,4071,20646,4856,-63598,15396,-50708,32138,-85164,38528,-89959,53852,57915,-42421,-88916,-75072,67030,-29066,49542,-71591,61708,-53985,-43051,28483,46991,-83216,80991,-46254,-48716,39356,-8270,-47763,-34410,874,-1186,-7049,28846,11276,21960,-13304,-11433,-4913,55754,79616,70423,-27523,64803,49277,14906,-97401,-92390,91075,70736,21971,-3303,55333,-93996,76538,54603,-75899,98801,46887,35041,48302,-52318,55439,24574,14079,-24889,83440,14961,34312,-89260,-22293,-81271,-2586,-71059,-10640,-93095,-5453,-70041,66543,74012,-11662,-52477,-37597,-70919,92971,-17452,-67306,-80418,7225,-89296,24296,86547,37154,-10696,74436,-63959,58860,33590,-88925,-97814,-83664,85484,-8385,-50879,57729,-74728,-87852,-15524,-91120,22062,28134,80917,32026,49707,-54252,-44319,-35139,13777,44660,85274,25043,58781,-89035,-76274,6364,-63625,72855,43242,-35033,12820,-27460,77372,-47578,-61162,-70758,-1343,-4159,64935,56024,-2151,43770,19758,-30186,-86040,24666,-62332,-67542,73180,-25821,-27826,-45504,-36858,-12041,20017,-24066,-56625,-52097,-47239,-90694,8959,7712,-14258,-5860,55349,61808,-4423,-93703,64681,-98641,-25222,46999,-83831,-54714,19997,-68477,66073,51801,-66491,52061,-52866,79907,-39736,-68331,68937,91464,98892,910,93501,31295,-85873,27036,-57340,50412,21,-2445,29471,71317,82093,-94823,-54458,-97410,39560,-7628,66452,39701,54029,37906,46773,58296,60370,-61090,85501,-86874,71443,-72702,-72047,14848,34102,77975,-66294,-36576,31349,52493,-70833,-80287,94435,39745,-98291,84524,-18942,10236,93448,50846,94023,-6939,47999,14740,30165,81048,84935,-19177,-13594,32289,62628,-90612,-542,-66627,64255,71199,-83841,-82943,-73885,8623,-67214,-9474,-35249,62254,-14087,-90969,21515,-83303,94377,-91619,19956,-98810,96727,-91939,29119,-85473,-82153,-69008,44850,74299,-76459,-86464,8315,-49912,-28665,59052,-69708,76024,-92738,50098,18683,-91438,18096,-19335,35659,91826,15779,-73070,67873,-12458,-71440,-46721,54856,97212,-81875,35805,36952,68498,81627,-34231,81712,27100,-9741,-82612,18766,-36392,2759,41728,69743,26825,48355,-17790,17165,56558,3295,-24375,55669,-16109,24079,73414,48990,-11931,-78214,90745,19878,35673,-15317,-89086,94675,-92513,88410,-93248,-19475,-74041,-19165,32329,-26266,-46828,-18747,45328,8990,-78219,-25874,-74801,-44956,-54577,-29756,-99822,-35731,-18348,-68915,-83518,-53451,95471,-2954,-13706,-8763,-21642,-37210,16814,-60070,-42743,27697,-36333,-42362,11576,85742,-82536,68767,-56103,-63012,71396,-78464,-68101,-15917,-11113,-3596,77626,-60191,-30585,-73584,6214,-84303,18403,23618,-15619,-89755,-59515,-59103,-74308,-63725,-29364,-52376,-96130,70894,-12609,50845,-2314,42264,-70825,64481,55752,4460,-68603,-88701,4713,-50441,-51333,-77907,97412,-66616,-49430,60489,-85262,-97621,-18980,44727,-69321,-57730,66287,-92566,-64427,-14270,11515,-92612,-87645,61557,24197,-81923,-39831,-10301,-23640,-76219,-68025,92761,-76493,68554,-77734,-95620,-11753,-51700,98234,-68544,-61838,29467,46603,-18221,-35441,74537,40327,-58293,75755,-57301,-7532,-94163,18179,-14388,-22258,-46417,-48285,18242,-77551,82620,250,-20060,-79568,-77259,82052,-98897,-75464,48773,-79040,-11293,45941,-67876,-69204,-46477,-46107,792,60546,-34573,-12879,-94562,20356,-48004,-62429,96242,40594,2099,99494,25724,-39394,-2388,-18563,-56510,-83570,-29214,3015,74454,74197,76678,-46597,60630,-76093,37578,-82045,-24077,62082,-87787,-74936,58687,12200,-98952,70155,-77370,21710,-84625,-60556,-84128,925,65474,-15741,-94619,88377,89334,44749,22002,-45750,-93081,-14600,-83447,46691,85040,-66447,-80085,56308,44310,24979,-29694,57991,4675,-71273,-44508,13615,-54710,23552,-78253,-34637,50497,68706,81543,-88408,-21405,6001,-33834,-21570,-46692,-25344,20310,71258,-97680,11721,59977,59247,-48949,98955,-50276,-80844,-27935,-76102,55858,-33492,40680,66691,-33188,8284,64893,-7528,6019,-85523,8434,-64366,-56663,26862,30008,-7611,-12179,-70076,21426,-11261,-36864,-61937,-59677,929,-21052,3848,-20888,-16065,98995,-32293,-86121,-54564,77831,68602,74977,31658,40699,29755,98424,80358,-69337,26339,13213,-46016,-18331,64713,-46883,-58451,-70024,-92393,-4088,70628,-51185,71164,-75791,-1636,-29102,-16929,-87650,-84589,-24229,-42137,-15653,94825,13042,88499,-47100,-90358,-7180,29754,-65727,-42659,-85560,-9037,-52459,20997,-47425,17318,21122,20472,-23037,65216,-63625,-7877,-91907,24100,-72516,22903,-85247,-8938,73878,54953,87480,-31466,-99524,35369,-78376,89984,-15982,94045,-7269,23319,-80456,-37653,-76756,2909,81936,54958,-12393,60560,-84664,-82413,66941,-26573,-97532,64460,18593,-85789,-38820,-92575,-43663,-89435,83272,-50585,13616,-71541,-53156,727,-27644,16538,34049,57745,34348,35009,16634,-18791,23271,-63844,95817,21781,16590,59669,15966,-6864,48050,-36143,97427,-59390,96931,78939,-1958,50777,43338,-51149,39235,-27054,-43492,67457,-83616,37179,10390,85818,2391,73635,87579,-49127,-81264,-79023,-81590,53554,-74972,-83940,-13726,-39095,29174,78072,76104,47778,25797,-29515,-6493,-92793,22481,-36197,-65560,42342,15750,97556,99634,-56048,-35688,13501,63969,-74291,50911,39225,93702,-3490,-59461,-30105,-46761,-80113,92906,-68487,50742,36152,-90240,-83631,24597,-50566,-15477,18470,77038,40223,-80364,-98676,70957,-63647,99537,13041,31679,86631,37633,-16866,13686,-71565,21652,-46053,-80578,-61382,68487,-6417,4656,20811,67013,-30868,-11219,46,74944,14627,56965,42275,-52480,52162,-84883,-52579,-90331,92792,42184,-73422,-58440,65308,-25069,5475,-57996,59557,-17561,2826,-56939,14996,-94855,-53707,99159,43645,-67719,-1331,21412,41704,31612,32622,1919,-69333,-69828,22422,-78842,57896,-17363,27979,-76897,35008,46482,-75289,65799,20057,7170,41326,-76069,90840,-81253,-50749,3649,-42315,45238,-33924,62101,96906,58884,-7617,-28689,-66578,62458,50876,-57553,6739,41014,-64040,-34916,37940,13048,-97478,-11318,-89440,-31933,-40357,-59737,-76718,-14104,-31774,28001,4103,41702,-25120,-31654,63085,-3642,84870,-83896,-76422,-61520,12900,88678,85547,33132,-88627,52820,63915,-27472,78867,-51439,33005,-23447,-3271,-39308,39726,-74260,-31874,-36893,93656,910,-98362,60450,-88048,99308,13947,83996,-90415,-35117,70858,-55332,-31721,97528,82982,-86218,6822,25227,36946,97077,-4257,-41526,56795,89870,75860,-70802,21779,14184,-16511,-89156,-31422,71470,69600,-78498,74079,-19410,40311,28501,26397,-67574,-32518,68510,38615,19355,-6088,-97159,-29255,-92523,3023,-42536,-88681,64255,41206,44119,52208,39522,-52108,91276,-70514,83436,63289,-79741,9623,99559,12642,85950,83735,-21156,-67208,98088,-7341,-27763,-30048,-44099,-14866,-45504,-91704,19369,13700,10481,-49344,-85686,33994,19672,36028,60842,66564,-24919,33950,-93616,-47430,-35391,-28279,56806,74690,39284,-96683,-7642,-75232,37657,-14531,-86870,-9274,-26173,98640,88652,64257,46457,37814,-19370,9337,-22556,-41525,39105,-28719,51611,-93252,98044,-90996,21710,-47605,-64259,-32727,53611,-31918,-3555,33316,-66472,21274,-37731,-2919,15016,48779,-88868,1897,41728,46344,-89667,37848,68092,-44011,85354,-43776,38739,-31423,-66330,65167,-22016,59405,34328,-60042,87660,-67698,-59174,-1408,-46809,-43485,-88807,-60489,13974,22319,55836,-62995,-37375,-4185,32687,-36551,-75237,58280,26942,-73756,71756,78775,-40573,14367,-71622,-77338,24112,23414,-7679,-51721,87492,85066,-21612,57045,10673,-96836,52461,-62218,-9310,65862,-22748,89906,-96987,-98698,26956,-43428,46141,47456,28095,55952,67323,-36455,-60202,-43302,-82932,42020,77036,10142,60406,70331,63836,58850,-66752,52109,21395,-10238,-98647,-41962,27778,69060,98535,-28680,-52263,-56679,66103,-42426,27203,80021,10153,58678,36398,63112,34911,20515,62082,-15659,-40785,27054,43767,-20289,65838,-6954,-60228,-72226,52236,-35464,25209,-15462,-79617,-41668,-84083,62404,-69062,18913,46545,20757,13805,24717,-18461,-47009,-25779,68834,64824,34473,39576,31570,14861,-15114,-41233,95509,68232,67846,84902,-83060,17642,-18422,73688,77671,-26930,64484,-99637,73875,6428,21034,-73471,19664,-68031,15922,-27028,48137,54955,-82793,-41144,-10218,-24921,-28299,-2288,68518,-54452,15686,-41814,66165,-72207,-61986,80020,50544,-99500,16244,78998,40989,14525,-56061,-24692,-94790,21111,37296,-90794,72100,70550,-31757,17708,-74290,61910,78039,-78629,-25033,73172,-91953,10052,64502,99585,-1741,90324,-73723,68942,28149,30218,24422,16659,10710,-62594,94249,96588,46192,34251,73500,-65995,-81168,41412,-98724,-63710,-54696,-52407,19746,45869,27821,-94866,-76705,-13417,-61995,-71560,43450,67384,-8838,-80293,-28937,23330,-89694,-40586,46918,80429,-5475,78013,25309,-34162,37236,-77577,86744,26281,-29033,-91813,35347,13033,-13631,-24459,3325,-71078,-75359,81311,19700,47678,-74680,-84113,45192,35502,37675,19553,76522,-51098,-18211,89717,4508,-82946,27749,85995,89912,-53678,-64727,-14778,32075,-63412,-40524,86440,-2707,-36821,63850,-30883,67294,-99468,-23708,34932,34386,98899,29239,-23385,5897,54882,98660,49098,70275,17718,88533,52161,63340,50061,-89457,19491,-99156,24873,-17008,64610,-55543,50495,17056,-10400,-56678,-29073,-42960,-76418,98562,-88104,-96255,10159,-90724,54011,12052,45871,-90933,-69420,67039,37202,78051,-52197,-40278,-58425,65414,-23394,-1415,6912,-53447,7352,17307,-78147,63727,98905,55412,-57658,-32884,-44878,22755,39730,3638,35111,39777,74193,38736,-11829,-61188,-92757,55946,-71232,-63032,-83947,39147,-96684,-99233,25131,-32197,24406,-55428,-61941,25874,-69453,64483,-19644,-68441,12783,87338,-48676,66451,-447,-61590,50932,-11270,29035,65698,-63544,10029,80499,-9461,86368,91365,-81810,-71914,-52056,-13782,44240,-30093,-2437,24007,67581,-17365,-69164,-8420,-69289,-29370,48010,90439,13141,69243,50668,39328,61731,78266,-81313,17921,-38196,55261,9948,-24970,75712,-72106,28696,7461,31621,61047,51476,56512,11839,-96916,-82739,28924,-99927,58449,37280,69357,11219,-32119,-62050,-48745,-83486,-52376,42668,82659,68882,38773,46269,-96005,97630,25009,-2951,-67811,99801,81587,-79793,-18547,-83086,69512,33127,-92145,-88497,47703,59527,1909,88785,-88882,69188,-46131,-5589,-15086,36255,-53238,-33009,82664,53901,35939,-42946,-25571,33298,69291,53199,74746,-40127,-39050,91033,51717,-98048,87240,36172,65453,-94425,-63694,-30027,59004,88660,3649,-20267,-52565,-67321,34037,4320,91515,-56753,60115,27134,68617,-61395,-26503,-98929,-8849,-63318,10709,-16151,61905,-95785,5262,23670,-25277,90206,-19391,45735,37208,-31992,-92450,18516,-90452,-58870,-58602,93383,14333,17994,82411,-54126,-32576,35440,-60526,-78764,-25069,-9022,-394,92186,-38057,55328,-61569,67780,77169,19546,-92664,-94948,44484,-13439,83529,27518,-48333,72998,38342,-90553,-98578,-76906,81515,-16464,78439,92529,35225,-39968,-10130,-7845,-32245,-74955,-74996,67731,-13897,-82493,33407,93619,59560,-24404,-57553,19486,-45341,34098,-24978,-33612,79058,71847,76713,-95422,6421,-96075,-59130,-28976,-16922,-62203,69970,68331,21874,40551,89650,51908,58181,66480,-68177,34323,-3046,-49656,-59758,43564,-10960,-30796,15473,-20216,46085,-85355,41515,-30669,-87498,57711,56067,63199,-83805,62042,91213,-14606,4394,-562,74913,10406,96810,-61595,32564,31640,-9732,42058,98052,-7908,-72330,1558,-80301,34878,32900,3939,-8824,88316,20937,21566,-3218,-66080,-31620,86859,54289,90476,-42889,-15016,-18838,75456,30159,-67101,42328,-92703,85850,-5475,23470,-80806,68206,17764,88235,46421,-41578,74005,-81142,80545,20868,-1560,64017,83784,68863,-97516,-13016,-72223,79630,-55692,82255,88467,28007,-34686,-69049,-41677,88535,-8217,68060,-51280,28971,49088,49235,26905,-81117,-44888,40623,74337,-24662,97476,79542,-72082,-35093,98175,-61761,-68169,59697,-62542,-72965,59883,-64026,-37656,-92392,-12113,-73495,98258,68379,-21545,64607,-70957,-92254,-97460,-63436,-8853,-19357,-51965,-76582,12687,-49712,45413,-60043,33496,31539,-57347,41837,67280,-68813,52088,-13155,-86430,-15239,-45030,96041,18749,-23992,46048,35243,-79450,85425,-58524,88781,-39454,53073,-48864,-82289,39086,82540,-11555,25014,-5431,-39585,-89526,2705,31953,-81611,36985,-56022,68684,-27101,11422,64655,-26965,-63081,-13840,-91003,-78147,-8966,41488,1988,99021,-61575,-47060,65260,-23844,-21781,-91865,-19607,44808,2890,63692,-88663,-58272,15970,-65195,-45416,-48444,-78226,-65332,-24568,42833,-1806,-71595,80002,-52250,30952,48452,-90106,31015,-22073,62339,63318,78391,28699,77900,-4026,-76870,-45943,33665,9174,-84360,-22684,-16832,-67949,-38077,-38987,-32847,51443,-53580,-13505,9344,-92337,26585,70458,-52764,-67471,-68411,-1119,-2072,-93476,67981,40887,-89304,-12235,41488,1454,5355,-34855,-72080,24514,-58305,3340,34331,8731,77451,-64983,-57876,82874,62481,-32754,-39902,22451,-79095,-23904,78409,-7418,77916])
# s.oldthreeSum([82597,-9243,62390,83030,-97960,-26521,-61011,83390,-38677,12333,75987,46091,83794,19355,-71037,-6242,-28801,324,1202,-90885,-2989,-95597,-34333,35528,5680,89093,-90606,50360,-29393,-27012,53313,65213,99818,-82405,-41661,-3333,-51952,72135,-1523,26377,74685,96992,92263,15929,5467,-99555,-43348,-41689,-60383,-3990,32165,65265,-72973,-58372,12741,-48568,-46596,72419,-1859,34153,62937,81310,-61823,-96770,-54944,8845,-91184,24208,-29078,31495,65258,14198,85395,70506,-40908,56740,-12228,-40072,32429,93001,68445,-73927,25731,-91859,-24150,10093,-60271,-81683,-18126,51055,48189,-6468,25057,81194,-58628,74042,66158,-14452,-49851,-43667,11092,39189,-17025,-79173,13606,83172,92647,-59741,19343,-26644,-57607,82908,-20655,1637,80060,98994,39331,-31274,-61523,91225,-72953,13211,-75116,-98421,-41571,-69074,99587,39345,42151,-2460,98236,15690,-52507,-95803,-48935,-46492,-45606,-79254,-99851,52533,73486,39948,-7240,71815,-585,-96252,90990,-93815,93340,-71848,58733,-14859,-83082,-75794,-82082,-24871,-15206,91207,-56469,-93618,67131,-8682,75719,87429,-98757,-7535,-24890,-94160,85003,33928,75538,97456,-66424,-60074,-8527,-28697,-22308,2246,-70134,-82319,-10184,87081,-34949,-28645,-47352,-83966,-60418,-15293,-53067,-25921,55172,75064,95859,48049,34311,-86931,-38586,33686,-36714,96922,76713,-22165,-80585,-34503,-44516,39217,-28457,47227,-94036,43457,24626,-87359,26898,-70819,30528,-32397,-69486,84912,-1187,-98986,-32958,4280,-79129,-65604,9344,58964,50584,71128,-55480,24986,15086,-62360,-42977,-49482,-77256,-36895,-74818,20,3063,-49426,28152,-97329,6086,86035,-88743,35241,44249,19927,-10660,89404,24179,-26621,-6511,57745,-28750,96340,-97160,-97822,-49979,52307,79462,94273,-24808,77104,9255,-83057,77655,21361,55956,-9096,48599,-40490,-55107,2689,29608,20497,66834,-34678,23553,-81400,-66630,-96321,-34499,-12957,-20564,25610,-4322,-58462,20801,53700,71527,24669,-54534,57879,-3221,33636,3900,97832,-27688,-98715,5992,24520,-55401,-57613,-69926,57377,-77610,20123,52174,860,60429,-91994,-62403,-6218,-90610,-37263,-15052,62069,-96465,44254,89892,-3406,19121,-41842,-87783,-64125,-56120,73904,-22797,-58118,-4866,5356,75318,46119,21276,-19246,-9241,-97425,57333,-15802,93149,25689,-5532,95716,39209,-87672,-29470,-16324,-15331,27632,-39454,56530,-16000,29853,46475,78242,-46602,83192,-73440,-15816,50964,-36601,89758,38375,-40007,-36675,-94030,67576,46811,-64919,45595,76530,40398,35845,41791,67697,-30439,-82944,63115,33447,-36046,-50122,-34789,43003,-78947,-38763,-89210,32756,-20389,-31358,-90526,-81607,88741,86643,98422,47389,-75189,13091,95993,-15501,94260,-25584,-1483,-67261,-70753,25160,89614,-90620,-48542,83889,-12388,-9642,-37043,-67663,28794,-8801,13621,12241,55379,84290,21692,-95906,-85617,-17341,-63767,80183,-4942,-51478,30997,-13658,8838,17452,-82869,-39897,68449,31964,98158,-49489,62283,-62209,-92792,-59342,55146,-38533,20496,62667,62593,36095,-12470,5453,-50451,74716,-17902,3302,-16760,-71642,-34819,96459,-72860,21638,47342,-69897,-40180,44466,76496,84659,13848,-91600,-90887,-63742,-2156,-84981,-99280,94326,-33854,92029,-50811,98711,-36459,-75555,79110,-88164,-97397,-84217,97457,64387,30513,-53190,-83215,252,2344,-27177,-92945,-89010,82662,-11670,86069,53417,42702,97082,3695,-14530,-46334,17910,77999,28009,-12374,15498,-46941,97088,-35030,95040,92095,-59469,-24761,46491,67357,-66658,37446,-65130,-50416,99197,30925,27308,54122,-44719,12582,-99525,-38446,-69050,-22352,94757,-56062,33684,-40199,-46399,96842,-50881,-22380,-65021,40582,53623,-76034,77018,-97074,-84838,-22953,-74205,79715,-33920,-35794,-91369,73421,-82492,63680,-14915,-33295,37145,76852,-69442,60125,-74166,74308,-1900,-30195,-16267,-60781,-27760,5852,38917,25742,-3765,49097,-63541,98612,-92865,-30248,9612,-8798,53262,95781,-42278,-36529,7252,-27394,-5021,59178,80934,-48480,-75131,-54439,-19145,-48140,98457,-6601,-51616,-89730,78028,32083,-48904,16822,-81153,-8832,48720,-80728,-45133,-86647,-4259,-40453,2590,28613,50523,-4105,-27790,-74579,-17223,63721,33489,-47921,97628,-97691,-14782,-65644,18008,-93651,-71266,80990,-76732,-47104,35368,28632,59818,-86269,-89753,34557,-92230,-5933,-3487,-73557,-13174,-43981,-43630,-55171,30254,-83710,-99583,-13500,71787,5017,-25117,-78586,86941,-3251,-23867,-36315,75973,86272,-45575,77462,-98836,-10859,70168,-32971,-38739,-12761,93410,14014,-30706,-77356,-85965,-62316,63918,-59914,-64088,1591,-10957,38004,15129,-83602,-51791,34381,-89382,-26056,8942,5465,71458,-73805,-87445,-19921,-80784,69150,-34168,28301,-68955,18041,6059,82342,9947,39795,44047,-57313,48569,81936,-2863,-80932,32976,-86454,-84207,33033,32867,9104,-16580,-25727,80157,-70169,53741,86522,84651,68480,84018,61932,7332,-61322,-69663,76370,41206,12326,-34689,17016,82975,-23386,39417,72793,44774,-96259,3213,79952,29265,-61492,-49337,14162,65886,3342,-41622,-62659,-90402,-24751,88511,54739,-21383,-40161,-96610,-24944,-602,-76842,-21856,69964,43994,-15121,-85530,12718,13170,-13547,69222,62417,-75305,-81446,-38786,-52075,-23110,97681,-82800,-53178,11474,35857,94197,-58148,-23689,32506,92154,-64536,-73930,-77138,97446,-83459,70963,22452,68472,-3728,-25059,-49405,95129,-6167,12808,99918,30113,-12641,-26665,86362,-33505,50661,26714,33701,89012,-91540,40517,-12716,-57185,-87230,29914,-59560,13200,-72723,58272,23913,-45586,-96593,-26265,-2141,31087,81399,92511,-34049,20577,2803,26003,8940,42117,40887,-82715,38269,40969,-50022,72088,21291,-67280,-16523,90535,18669,94342,-39568,-88080,-99486,-20716,23108,-28037,63342,36863,-29420,-44016,75135,73415,16059,-4899,86893,43136,-7041,33483,-67612,25327,40830,6184,61805,4247,81119,-22854,-26104,-63466,63093,-63685,60369,51023,51644,-16350,74438,-83514,99083,10079,-58451,-79621,48471,67131,-86940,99093,11855,-22272,-67683,-44371,9541,18123,37766,-70922,80385,-57513,-76021,-47890,36154,72935,84387,-92681,-88303,-7810,59902,-90,-64704,-28396,-66403,8860,13343,33882,85680,7228,28160,-14003,54369,-58893,92606,-63492,-10101,64714,58486,29948,-44679,-22763,10151,-56695,4031,-18242,-36232,86168,-14263,9883,47124,47271,92761,-24958,-73263,-79661,-69147,-18874,29546,-92588,-85771,26451,-86650,-43306,-59094,-47492,-34821,-91763,-47670,33537,22843,67417,-759,92159,63075,94065,-26988,55276,65903,30414,-67129,-99508,-83092,-91493,-50426,14349,-83216,-76090,32742,-5306,-93310,-60750,-60620,-45484,-21108,-58341,-28048,-52803,69735,78906,81649,32565,-86804,-83202,-65688,-1760,89707,93322,-72750,84134,71900,-37720,19450,-78018,22001,-23604,26276,-21498,65892,-72117,-89834,-23867,55817,-77963,42518,93123,-83916,63260,-2243,-97108,85442,-36775,17984,-58810,99664,-19082,93075,-69329,87061,79713,16296,70996,13483,-74582,49900,-27669,-40562,1209,-20572,34660,83193,75579,7344,64925,88361,60969,3114,44611,-27445,53049,-16085,-92851,-53306,13859,-33532,86622,-75666,-18159,-98256,51875,-42251,-27977,-18080,23772,38160,41779,9147,94175,99905,-85755,62535,-88412,-52038,-68171,93255,-44684,-11242,-104,31796,62346,-54931,-55790,-70032,46221,56541,-91947,90592,93503,4071,20646,4856,-63598,15396,-50708,32138,-85164,38528,-89959,53852,57915,-42421,-88916,-75072,67030,-29066,49542,-71591,61708,-53985,-43051,28483,46991,-83216,80991,-46254,-48716,39356,-8270,-47763,-34410,874,-1186,-7049,28846,11276,21960,-13304,-11433,-4913,55754,79616,70423,-27523,64803,49277,14906,-97401,-92390,91075,70736,21971,-3303,55333,-93996,76538,54603,-75899,98801,46887,35041,48302,-52318,55439,24574,14079,-24889,83440,14961,34312,-89260,-22293,-81271,-2586,-71059,-10640,-93095,-5453,-70041,66543,74012,-11662,-52477,-37597,-70919,92971,-17452,-67306,-80418,7225,-89296,24296,86547,37154,-10696,74436,-63959,58860,33590,-88925,-97814,-83664,85484,-8385,-50879,57729,-74728,-87852,-15524,-91120,22062,28134,80917,32026,49707,-54252,-44319,-35139,13777,44660,85274,25043,58781,-89035,-76274,6364,-63625,72855,43242,-35033,12820,-27460,77372,-47578,-61162,-70758,-1343,-4159,64935,56024,-2151,43770,19758,-30186,-86040,24666,-62332,-67542,73180,-25821,-27826,-45504,-36858,-12041,20017,-24066,-56625,-52097,-47239,-90694,8959,7712,-14258,-5860,55349,61808,-4423,-93703,64681,-98641,-25222,46999,-83831,-54714,19997,-68477,66073,51801,-66491,52061,-52866,79907,-39736,-68331,68937,91464,98892,910,93501,31295,-85873,27036,-57340,50412,21,-2445,29471,71317,82093,-94823,-54458,-97410,39560,-7628,66452,39701,54029,37906,46773,58296,60370,-61090,85501,-86874,71443,-72702,-72047,14848,34102,77975,-66294,-36576,31349,52493,-70833,-80287,94435,39745,-98291,84524,-18942,10236,93448,50846,94023,-6939,47999,14740,30165,81048,84935,-19177,-13594,32289,62628,-90612,-542,-66627,64255,71199,-83841,-82943,-73885,8623,-67214,-9474,-35249,62254,-14087,-90969,21515,-83303,94377,-91619,19956,-98810,96727,-91939,29119,-85473,-82153,-69008,44850,74299,-76459,-86464,8315,-49912,-28665,59052,-69708,76024,-92738,50098,18683,-91438,18096,-19335,35659,91826,15779,-73070,67873,-12458,-71440,-46721,54856,97212,-81875,35805,36952,68498,81627,-34231,81712,27100,-9741,-82612,18766,-36392,2759,41728,69743,26825,48355,-17790,17165,56558,3295,-24375,55669,-16109,24079,73414,48990,-11931,-78214,90745,19878,35673,-15317,-89086,94675,-92513,88410,-93248,-19475,-74041,-19165,32329,-26266,-46828,-18747,45328,8990,-78219,-25874,-74801,-44956,-54577,-29756,-99822,-35731,-18348,-68915,-83518,-53451,95471,-2954,-13706,-8763,-21642,-37210,16814,-60070,-42743,27697,-36333,-42362,11576,85742,-82536,68767,-56103,-63012,71396,-78464,-68101,-15917,-11113,-3596,77626,-60191,-30585,-73584,6214,-84303,18403,23618,-15619,-89755,-59515,-59103,-74308,-63725,-29364,-52376,-96130,70894,-12609,50845,-2314,42264,-70825,64481,55752,4460,-68603,-88701,4713,-50441,-51333,-77907,97412,-66616,-49430,60489,-85262,-97621,-18980,44727,-69321,-57730,66287,-92566,-64427,-14270,11515,-92612,-87645,61557,24197,-81923,-39831,-10301,-23640,-76219,-68025,92761,-76493,68554,-77734,-95620,-11753,-51700,98234,-68544,-61838,29467,46603,-18221,-35441,74537,40327,-58293,75755,-57301,-7532,-94163,18179,-14388,-22258,-46417,-48285,18242,-77551,82620,250,-20060,-79568,-77259,82052,-98897,-75464,48773,-79040,-11293,45941,-67876,-69204,-46477,-46107,792,60546,-34573,-12879,-94562,20356,-48004,-62429,96242,40594,2099,99494,25724,-39394,-2388,-18563,-56510,-83570,-29214,3015,74454,74197,76678,-46597,60630,-76093,37578,-82045,-24077,62082,-87787,-74936,58687,12200,-98952,70155,-77370,21710,-84625,-60556,-84128,925,65474,-15741,-94619,88377,89334,44749,22002,-45750,-93081,-14600,-83447,46691,85040,-66447,-80085,56308,44310,24979,-29694,57991,4675,-71273,-44508,13615,-54710,23552,-78253,-34637,50497,68706,81543,-88408,-21405,6001,-33834,-21570,-46692,-25344,20310,71258,-97680,11721,59977,59247,-48949,98955,-50276,-80844,-27935,-76102,55858,-33492,40680,66691,-33188,8284,64893,-7528,6019,-85523,8434,-64366,-56663,26862,30008,-7611,-12179,-70076,21426,-11261,-36864,-61937,-59677,929,-21052,3848,-20888,-16065,98995,-32293,-86121,-54564,77831,68602,74977,31658,40699,29755,98424,80358,-69337,26339,13213,-46016,-18331,64713,-46883,-58451,-70024,-92393,-4088,70628,-51185,71164,-75791,-1636,-29102,-16929,-87650,-84589,-24229,-42137,-15653,94825,13042,88499,-47100,-90358,-7180,29754,-65727,-42659,-85560,-9037,-52459,20997,-47425,17318,21122,20472,-23037,65216,-63625,-7877,-91907,24100,-72516,22903,-85247,-8938,73878,54953,87480,-31466,-99524,35369,-78376,89984,-15982,94045,-7269,23319,-80456,-37653,-76756,2909,81936,54958,-12393,60560,-84664,-82413,66941,-26573,-97532,64460,18593,-85789,-38820,-92575,-43663,-89435,83272,-50585,13616,-71541,-53156,727,-27644,16538,34049,57745,34348,35009,16634,-18791,23271,-63844,95817,21781,16590,59669,15966,-6864,48050,-36143,97427,-59390,96931,78939,-1958,50777,43338,-51149,39235,-27054,-43492,67457,-83616,37179,10390,85818,2391,73635,87579,-49127,-81264,-79023,-81590,53554,-74972,-83940,-13726,-39095,29174,78072,76104,47778,25797,-29515,-6493,-92793,22481,-36197,-65560,42342,15750,97556,99634,-56048,-35688,13501,63969,-74291,50911,39225,93702,-3490,-59461,-30105,-46761,-80113,92906,-68487,50742,36152,-90240,-83631,24597,-50566,-15477,18470,77038,40223,-80364,-98676,70957,-63647,99537,13041,31679,86631,37633,-16866,13686,-71565,21652,-46053,-80578,-61382,68487,-6417,4656,20811,67013,-30868,-11219,46,74944,14627,56965,42275,-52480,52162,-84883,-52579,-90331,92792,42184,-73422,-58440,65308,-25069,5475,-57996,59557,-17561,2826,-56939,14996,-94855,-53707,99159,43645,-67719,-1331,21412,41704,31612,32622,1919,-69333,-69828,22422,-78842,57896,-17363,27979,-76897,35008,46482,-75289,65799,20057,7170,41326,-76069,90840,-81253,-50749,3649,-42315,45238,-33924,62101,96906,58884,-7617,-28689,-66578,62458,50876,-57553,6739,41014,-64040,-34916,37940,13048,-97478,-11318,-89440,-31933,-40357,-59737,-76718,-14104,-31774,28001,4103,41702,-25120,-31654,63085,-3642,84870,-83896,-76422,-61520,12900,88678,85547,33132,-88627,52820,63915,-27472,78867,-51439,33005,-23447,-3271,-39308,39726,-74260,-31874,-36893,93656,910,-98362,60450,-88048,99308,13947,83996,-90415,-35117,70858,-55332,-31721,97528,82982,-86218,6822,25227,36946,97077,-4257,-41526,56795,89870,75860,-70802,21779,14184,-16511,-89156,-31422,71470,69600,-78498,74079,-19410,40311,28501,26397,-67574,-32518,68510,38615,19355,-6088,-97159,-29255,-92523,3023,-42536,-88681,64255,41206,44119,52208,39522,-52108,91276,-70514,83436,63289,-79741,9623,99559,12642,85950,83735,-21156,-67208,98088,-7341,-27763,-30048,-44099,-14866,-45504,-91704,19369,13700,10481,-49344,-85686,33994,19672,36028,60842,66564,-24919,33950,-93616,-47430,-35391,-28279,56806,74690,39284,-96683,-7642,-75232,37657,-14531,-86870,-9274,-26173,98640,88652,64257,46457,37814,-19370,9337,-22556,-41525,39105,-28719,51611,-93252,98044,-90996,21710,-47605,-64259,-32727,53611,-31918,-3555,33316,-66472,21274,-37731,-2919,15016,48779,-88868,1897,41728,46344,-89667,37848,68092,-44011,85354,-43776,38739,-31423,-66330,65167,-22016,59405,34328,-60042,87660,-67698,-59174,-1408,-46809,-43485,-88807,-60489,13974,22319,55836,-62995,-37375,-4185,32687,-36551,-75237,58280,26942,-73756,71756,78775,-40573,14367,-71622,-77338,24112,23414,-7679,-51721,87492,85066,-21612,57045,10673,-96836,52461,-62218,-9310,65862,-22748,89906,-96987,-98698,26956,-43428,46141,47456,28095,55952,67323,-36455,-60202,-43302,-82932,42020,77036,10142,60406,70331,63836,58850,-66752,52109,21395,-10238,-98647,-41962,27778,69060,98535,-28680,-52263,-56679,66103,-42426,27203,80021,10153,58678,36398,63112,34911,20515,62082,-15659,-40785,27054,43767,-20289,65838,-6954,-60228,-72226,52236,-35464,25209,-15462,-79617,-41668,-84083,62404,-69062,18913,46545,20757,13805,24717,-18461,-47009,-25779,68834,64824,34473,39576,31570,14861,-15114,-41233,95509,68232,67846,84902,-83060,17642,-18422,73688,77671,-26930,64484,-99637,73875,6428,21034,-73471,19664,-68031,15922,-27028,48137,54955,-82793,-41144,-10218,-24921,-28299,-2288,68518,-54452,15686,-41814,66165,-72207,-61986,80020,50544,-99500,16244,78998,40989,14525,-56061,-24692,-94790,21111,37296,-90794,72100,70550,-31757,17708,-74290,61910,78039,-78629,-25033,73172,-91953,10052,64502,99585,-1741,90324,-73723,68942,28149,30218,24422,16659,10710,-62594,94249,96588,46192,34251,73500,-65995,-81168,41412,-98724,-63710,-54696,-52407,19746,45869,27821,-94866,-76705,-13417,-61995,-71560,43450,67384,-8838,-80293,-28937,23330,-89694,-40586,46918,80429,-5475,78013,25309,-34162,37236,-77577,86744,26281,-29033,-91813,35347,13033,-13631,-24459,3325,-71078,-75359,81311,19700,47678,-74680,-84113,45192,35502,37675,19553,76522,-51098,-18211,89717,4508,-82946,27749,85995,89912,-53678,-64727,-14778,32075,-63412,-40524,86440,-2707,-36821,63850,-30883,67294,-99468,-23708,34932,34386,98899,29239,-23385,5897,54882,98660,49098,70275,17718,88533,52161,63340,50061,-89457,19491,-99156,24873,-17008,64610,-55543,50495,17056,-10400,-56678,-29073,-42960,-76418,98562,-88104,-96255,10159,-90724,54011,12052,45871,-90933,-69420,67039,37202,78051,-52197,-40278,-58425,65414,-23394,-1415,6912,-53447,7352,17307,-78147,63727,98905,55412,-57658,-32884,-44878,22755,39730,3638,35111,39777,74193,38736,-11829,-61188,-92757,55946,-71232,-63032,-83947,39147,-96684,-99233,25131,-32197,24406,-55428,-61941,25874,-69453,64483,-19644,-68441,12783,87338,-48676,66451,-447,-61590,50932,-11270,29035,65698,-63544,10029,80499,-9461,86368,91365,-81810,-71914,-52056,-13782,44240,-30093,-2437,24007,67581,-17365,-69164,-8420,-69289,-29370,48010,90439,13141,69243,50668,39328,61731,78266,-81313,17921,-38196,55261,9948,-24970,75712,-72106,28696,7461,31621,61047,51476,56512,11839,-96916,-82739,28924,-99927,58449,37280,69357,11219,-32119,-62050,-48745,-83486,-52376,42668,82659,68882,38773,46269,-96005,97630,25009,-2951,-67811,99801,81587,-79793,-18547,-83086,69512,33127,-92145,-88497,47703,59527,1909,88785,-88882,69188,-46131,-5589,-15086,36255,-53238,-33009,82664,53901,35939,-42946,-25571,33298,69291,53199,74746,-40127,-39050,91033,51717,-98048,87240,36172,65453,-94425,-63694,-30027,59004,88660,3649,-20267,-52565,-67321,34037,4320,91515,-56753,60115,27134,68617,-61395,-26503,-98929,-8849,-63318,10709,-16151,61905,-95785,5262,23670,-25277,90206,-19391,45735,37208,-31992,-92450,18516,-90452,-58870,-58602,93383,14333,17994,82411,-54126,-32576,35440,-60526,-78764,-25069,-9022,-394,92186,-38057,55328,-61569,67780,77169,19546,-92664,-94948,44484,-13439,83529,27518,-48333,72998,38342,-90553,-98578,-76906,81515,-16464,78439,92529,35225,-39968,-10130,-7845,-32245,-74955,-74996,67731,-13897,-82493,33407,93619,59560,-24404,-57553,19486,-45341,34098,-24978,-33612,79058,71847,76713,-95422,6421,-96075,-59130,-28976,-16922,-62203,69970,68331,21874,40551,89650,51908,58181,66480,-68177,34323,-3046,-49656,-59758,43564,-10960,-30796,15473,-20216,46085,-85355,41515,-30669,-87498,57711,56067,63199,-83805,62042,91213,-14606,4394,-562,74913,10406,96810,-61595,32564,31640,-9732,42058,98052,-7908,-72330,1558,-80301,34878,32900,3939,-8824,88316,20937,21566,-3218,-66080,-31620,86859,54289,90476,-42889,-15016,-18838,75456,30159,-67101,42328,-92703,85850,-5475,23470,-80806,68206,17764,88235,46421,-41578,74005,-81142,80545,20868,-1560,64017,83784,68863,-97516,-13016,-72223,79630,-55692,82255,88467,28007,-34686,-69049,-41677,88535,-8217,68060,-51280,28971,49088,49235,26905,-81117,-44888,40623,74337,-24662,97476,79542,-72082,-35093,98175,-61761,-68169,59697,-62542,-72965,59883,-64026,-37656,-92392,-12113,-73495,98258,68379,-21545,64607,-70957,-92254,-97460,-63436,-8853,-19357,-51965,-76582,12687,-49712,45413,-60043,33496,31539,-57347,41837,67280,-68813,52088,-13155,-86430,-15239,-45030,96041,18749,-23992,46048,35243,-79450,85425,-58524,88781,-39454,53073,-48864,-82289,39086,82540,-11555,25014,-5431,-39585,-89526,2705,31953,-81611,36985,-56022,68684,-27101,11422,64655,-26965,-63081,-13840,-91003,-78147,-8966,41488,1988,99021,-61575,-47060,65260,-23844,-21781,-91865,-19607,44808,2890,63692,-88663,-58272,15970,-65195,-45416,-48444,-78226,-65332,-24568,42833,-1806,-71595,80002,-52250,30952,48452,-90106,31015,-22073,62339,63318,78391,28699,77900,-4026,-76870,-45943,33665,9174,-84360,-22684,-16832,-67949,-38077,-38987,-32847,51443,-53580,-13505,9344,-92337,26585,70458,-52764,-67471,-68411,-1119,-2072,-93476,67981,40887,-89304,-12235,41488,1454,5355,-34855,-72080,24514,-58305,3340,34331,8731,77451,-64983,-57876,82874,62481,-32754,-39902,22451,-79095,-23904,78409,-7418,77916])
|
py | b4145f03b90f1378174cddb092b38602c5f387cf | from __future__ import print_function, division
import sys
import os
import torch
import numpy as np
import random
import csv
from typing import List, Tuple
from torch.utils.data import Dataset, DataLoader
import torch
import torch.utils.data
from visualDet3D.data.kitti.kittidata import KittiData, KittiObj, KittiCalib
from visualDet3D.data.pipeline import build_augmentator
import os
import pickle
import numpy as np
from copy import deepcopy
from visualDet3D.utils.utils import alpha2theta_3d, theta2alpha_3d, draw_3D_box
from visualDet3D.networks.utils import BBox3dProjector
from visualDet3D.networks.utils.registry import DATASET_DICT
import sys
from matplotlib import pyplot as plt
ros_py_path = '/opt/ros/kinetic/lib/python2.7/dist-packages'
if sys.version_info > (3, 0) and ros_py_path in sys.path:
#Python 3, compatible with a naive ros environment
sys.path.remove(ros_py_path)
import cv2
sys.path.append(ros_py_path)
else:
#Python 2
import cv2
@DATASET_DICT.register_module
class KittiStereoDataset(torch.utils.data.Dataset):
"""Some Information about KittiDataset"""
def __init__(self, cfg, split='training'):
super(KittiStereoDataset, self).__init__()
preprocessed_path = cfg.path.preprocessed_path
obj_types = cfg.obj_types
aug_cfg = cfg.data.augmentation
is_train = (split == 'training')
imdb_file_path = os.path.join(preprocessed_path, split, 'imdb.pkl')
self.imdb = pickle.load(open(imdb_file_path, 'rb')) # list of kittiData
self.output_dict = {
"calib": True,
"image": True,
"image_3":True,
"label": False,
"velodyne": False
}
if is_train:
self.transform = build_augmentator(cfg.data.train_augmentation)
else:
self.transform = build_augmentator(cfg.data.test_augmentation)
self.projector = BBox3dProjector()
self.is_train = is_train
self.obj_types = obj_types
self.preprocessed_path = preprocessed_path
def _reproject(self, P2:np.ndarray, transformed_label:List[KittiObj]) -> Tuple[List[KittiObj], np.ndarray]:
bbox3d_state = np.zeros([len(transformed_label), 7]) #[camera_x, camera_y, z, w, h, l, alpha]
if len(transformed_label) > 0:
#for obj in transformed_label:
# obj.alpha = theta2alpha_3d(obj.ry, obj.x, obj.z, P2)
bbox3d_origin = torch.tensor([[obj.x, obj.y - 0.5 * obj.h, obj.z, obj.w, obj.h, obj.l, obj.alpha] for obj in transformed_label], dtype=torch.float32)
try:
abs_corner, homo_corner, _ = self.projector.forward(bbox3d_origin, bbox3d_origin.new(P2))
except:
print('\n',bbox3d_origin.shape, len(transformed_label), len(label), label, transformed_label, bbox3d_origin)
for i, obj in enumerate(transformed_label):
extended_center = np.array([obj.x, obj.y - 0.5 * obj.h, obj.z, 1])[:, np.newaxis] #[4, 1]
extended_bottom = np.array([obj.x, obj.y, obj.z, 1])[:, np.newaxis] #[4, 1]
image_center = (P2 @ extended_center)[:, 0] #[3]
image_center[0:2] /= image_center[2]
image_bottom = (P2 @ extended_bottom)[:, 0] #[3]
image_bottom[0:2] /= image_bottom[2]
bbox3d_state[i] = np.concatenate([image_center,
[obj.w, obj.h, obj.l, obj.alpha]]) #[7]
max_xy, _= homo_corner[:, :, 0:2].max(dim = 1) # [N,2]
min_xy, _= homo_corner[:, :, 0:2].min(dim = 1) # [N,2]
result = torch.cat([min_xy, max_xy], dim=-1) #[:, 4]
bbox2d = result.cpu().numpy()
for i in range(len(transformed_label)):
transformed_label[i].bbox_l = bbox2d[i, 0]
transformed_label[i].bbox_t = bbox2d[i, 1]
transformed_label[i].bbox_r = bbox2d[i, 2]
transformed_label[i].bbox_b = bbox2d[i, 3]
return transformed_label, bbox3d_state
def __getitem__(self, index):
kitti_data = self.imdb[index]
# The calib and label has been preloaded to minimize the time in each indexing
kitti_data.output_dict = self.output_dict
calib, left_image, right_image, _, _ = kitti_data.read_data()
calib.image_shape = left_image.shape
label = []
for obj in kitti_data.label:
if obj.type in self.obj_types:
label.append(obj)
transformed_left_image, transformed_right_image, P2, P3, transformed_label = self.transform(
left_image, right_image, deepcopy(calib.P2),deepcopy(calib.P3), deepcopy(label)
)
bbox3d_state = np.zeros([len(transformed_label), 7]) #[camera_x, camera_y, z, w, h, l, alpha]
if len(transformed_label) > 0:
transformed_label, bbox3d_state = self._reproject(P2, transformed_label)
if self.is_train:
if abs(P2[0, 3]) < abs(P3[0, 3]): # not mirrored or swaped, disparity should base on pointclouds projecting through P2
disparity = cv2.imread(os.path.join(self.preprocessed_path, 'training', 'disp', "P2%06d.png" % index), -1)
else: # mirrored and swap, disparity should base on pointclouds projecting through P3, and also mirrored
disparity = cv2.imread(os.path.join(self.preprocessed_path, 'training', 'disp', "P3%06d.png" % index), -1)
disparity = disparity[:, ::-1]
disparity = disparity / 16.0
else:
disparity = None
bbox2d = np.array([[obj.bbox_l, obj.bbox_t, obj.bbox_r, obj.bbox_b] for obj in transformed_label])
output_dict = {'calib': [P2, P3],
'image': [transformed_left_image, transformed_right_image],
'label': [obj.type for obj in transformed_label],
'bbox2d': bbox2d, #[N, 4] [x1, y1, x2, y2]
'bbox3d': bbox3d_state,
'original_shape': calib.image_shape,
'disparity': disparity,
'original_P':calib.P2.copy()}
return output_dict
def __len__(self):
return len(self.imdb)
@staticmethod
def collate_fn(batch):
left_images = np.array([item["image"][0] for item in batch])#[batch, H, W, 3]
left_images = left_images.transpose([0, 3, 1, 2])
right_images = np.array([item["image"][1] for item in batch])#[batch, H, W, 3]
right_images = right_images.transpose([0, 3, 1, 2])
P2 = [item['calib'][0] for item in batch]
P3 = [item['calib'][1] for item in batch]
label = [item['label'] for item in batch]
bbox2ds = [item['bbox2d'] for item in batch]
bbox3ds = [item['bbox3d'] for item in batch]
disparities = [item['disparity'] for item in batch]
if disparities[0] is None:
return torch.from_numpy(left_images).float(), torch.from_numpy(right_images).float(), torch.tensor(P2).float(), torch.tensor(P3).float(), label, bbox2ds, bbox3ds
else:
return torch.from_numpy(left_images).float(), torch.from_numpy(right_images).float(), torch.tensor(P2).float(), torch.tensor(P3).float(), label, bbox2ds, bbox3ds, torch.tensor(disparities).float()
@DATASET_DICT.register_module
class KittiStereoTestDataset(KittiStereoDataset):
def __init__(self, cfg, split='test'):
preprocessed_path = cfg.path.preprocessed_path
obj_types = cfg.obj_types
aug_cfg = cfg.data.augmentation
super(KittiStereoTestDataset, self).__init__(cfg, split)
imdb_file_path = os.path.join(preprocessed_path, 'test', 'imdb.pkl')
self.imdb = pickle.load(open(imdb_file_path, 'rb')) # list of kittiData
self.output_dict = {
"calib": True,
"image": True,
"image_3":True,
"label": False,
"velodyne": False
}
def __getitem__(self, index):
kitti_data = self.imdb[index]
# The calib and label has been preloaded to minimize the time in each indexing
kitti_data.output_dict = self.output_dict
calib, left_image, right_image, _, _ = kitti_data.read_data()
calib.image_shape = left_image.shape
transformed_left_image, transformed_right_image, P2, P3 = self.transform(
left_image, right_image, deepcopy(calib.P2),deepcopy(calib.P3)
)
output_dict = {'calib': [P2, P3],
'image': [transformed_left_image, transformed_right_image],
'original_shape': calib.image_shape,
'original_P':calib.P2.copy()}
return output_dict
@staticmethod
def collate_fn(batch):
left_images = np.array([item["image"][0] for item in batch])#[batch, H, W, 3]
left_images = left_images.transpose([0, 3, 1, 2])
right_images = np.array([item["image"][1] for item in batch])#[batch, H, W, 3]
right_images = right_images.transpose([0, 3, 1, 2])
P2 = [item['calib'][0] for item in batch]
P3 = [item['calib'][1] for item in batch]
return torch.from_numpy(left_images).float(), torch.from_numpy(right_images).float(), P2, P3
|
py | b4145fc84d57dd48e3a99e3f97d6a29415359f97 | import requests
from bs4 import BeautifulSoup
from time import sleep
from multiprocessing import Pool
def get_listing(url):
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'}
html = None
links = None
r = requests.get(url, headers=headers, timeout=10)
if r.status_code == 200:
html = r.text
soup = BeautifulSoup(html, 'lxml')
listing_section = soup.select('#offers_table table > tbody > tr > td > h3 > a')
links = [link['href'].strip() for link in listing_section]
return links
# parse a single item to get information
def parse(url):
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'}
r = requests.get(url, headers=headers, timeout=10)
sleep(2)
info = []
title_text = '-'
location_text = '-'
price_text = '-'
title_text = '-'
images = '-'
description_text = '-'
if r.status_code == 200:
print('Processing..' + url)
html = r.text
soup = BeautifulSoup(html, 'lxml')
title = soup.find('h1')
if title is not None:
title_text = title.text.strip()
location = soup.find('strong', {'class': 'c2b small'})
if location is not None:
location_text = location.text.strip()
price = soup.select('div > .xxxx-large')
if price is not None:
price_text = price[0].text.strip('Rs').replace(',', '')
images = soup.select('#bigGallery > li > a')
img = [image['href'].strip() for image in images]
images = '^'.join(img)
description = soup.select('#textContent > p')
if description is not None:
description_text = description[0].text.strip()
info.append(url)
info.append(title_text)
info.append(location_text)
info.append(price_text)
info.append(images)
return ','.join(info)
car_links = None
cars_info = []
cars_links = get_listing('https://www.olx.com.pk/cars/')
with Pool(10) as p:
records = p.map(parse, cars_links)
if len(records) > 0:
with open('data_parallel.csv', 'a+') as f:
f.write('\n'.join(records))
|
py | b41460f84cb3d91cec816f8d451ba3f09a16e8aa | from fastapi import FastAPI
from mangum import Mangum
from MyApp.api.routers import router
__all__ = ["api"]
api = FastAPI(
title="FastAPI-Lambda-Docker",
description="An example API deployed to AWS lambda in a docker container",
)
api.include_router(router=router)
handler = Mangum(app=api)
|
py | b4146107faf0de811cd9e3233fffbfb21e3a18c8 | from data.tools import *
class SyncDB(BaseDB):
def __init__(self, d):
super().__init__(d, 'sync')
def find_by_uid(self, uid: int):
return find_one(self.col, {'uid': uid})
def update(self, uid: int, data: dict):
now = self.find_by_uid(uid)
struct = {'uid': uid, 'data': data}
if now is None:
auto_time_insert(self.col, struct)
else:
auto_time_update(self.col, {'uid': uid}, struct)
|
py | b414615d80c75fcfd2eae434611e9c95c5f6f14b | from django.contrib import admin
# Register your models here.
from .models import Character
admin.site.register(Character)
from .models import CharacterIcon
admin.site.register(CharacterIcon)
|
py | b41461e50bbcf7073acb313ac5791f726337c4a1 |
import json
import os
import subprocess
import time
import math
from pySmartDL import SmartDL
import asyncio
from hachoir.metadata import extractMetadata
from hachoir.parser import createParser
from telethon.tl.types import DocumentAttributeVideo
from userbot import LOGS, CMD_HELP, TEMP_DOWNLOAD_DIRECTORY
from userbot.events import register
async def progress(current, total, event, start, type_of_ps, file_name=None):
"""Generic progress_callback for uploads and downloads."""
now = time.time()
diff = now - start
if round(diff % 10.00) == 0 or current == total:
percentage = current * 100 / total
speed = current / diff
elapsed_time = round(diff) * 1000
time_to_completion = round((total - current) / speed) * 1000
estimated_total_time = elapsed_time + time_to_completion
progress_str = "[{0}{1}] {2}%\n".format(
''.join(["█" for i in range(math.floor(percentage / 10))]),
''.join(["░" for i in range(10 - math.floor(percentage / 10))]),
round(percentage, 2))
tmp = progress_str + \
"{0} of {1}\nETA: {2}".format(
humanbytes(current),
humanbytes(total),
time_formatter(estimated_total_time)
)
if file_name:
await event.edit("{}\nFile Name: `{}`\n{}".format(
type_of_ps, file_name, tmp))
else:
await event.edit("{}\n{}".format(type_of_ps, tmp))
def humanbytes(size):
"""Input size in bytes,
outputs in a human readable format"""
# https://stackoverflow.com/a/49361727/4723940
if not size:
return ""
# 2 ** 10 = 1024
power = 2**10
raised_to_pow = 0
dict_power_n = {0: "", 1: "Ki", 2: "Mi", 3: "Gi", 4: "Ti"}
while size > power:
size /= power
raised_to_pow += 1
return str(round(size, 2)) + " " + dict_power_n[raised_to_pow] + "B"
def time_formatter(milliseconds: int) -> str:
"""Inputs time in milliseconds, to get beautified time,
as string"""
seconds, milliseconds = divmod(int(milliseconds), 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
tmp = ((str(days) + " day(s), ") if days else "") + \
((str(hours) + " hour(s), ") if hours else "") + \
((str(minutes) + " minute(s), ") if minutes else "") + \
((str(seconds) + " second(s), ") if seconds else "") + \
((str(milliseconds) + " millisecond(s), ") if milliseconds else "")
return tmp[:-2]
@register(pattern=r"^\!download(?: |$)(.*)", outgoing=True)
async def download(target_file):
""" For .download command, download files to the userbot's server. """
await target_file.edit("Processing ...")
input_str = target_file.pattern_match.group(1)
if not os.path.isdir(TEMP_DOWNLOAD_DIRECTORY):
os.makedirs(TEMP_DOWNLOAD_DIRECTORY)
if "|" in input_str:
url, file_name = input_str.split("|")
url = url.strip()
# https://stackoverflow.com/a/761825/4723940
file_name = file_name.strip()
head, tail = os.path.split(file_name)
if head:
if not os.path.isdir(os.path.join(TEMP_DOWNLOAD_DIRECTORY, head)):
os.makedirs(os.path.join(TEMP_DOWNLOAD_DIRECTORY, head))
file_name = os.path.join(head, tail)
downloaded_file_name = TEMP_DOWNLOAD_DIRECTORY + "" + file_name
downloader = SmartDL(url, downloaded_file_name, progress_bar=False)
downloader.start(blocking=False)
c_time = time.time()
display_message = None
while not downloader.isFinished():
status = downloader.get_status().capitalize()
total_length = downloader.filesize if downloader.filesize else None
downloaded = downloader.get_dl_size()
now = time.time()
diff = now - c_time
percentage = downloader.get_progress() * 100
speed = downloader.get_speed()
elapsed_time = round(diff) * 1000
progress_str = "[{0}{1}] {2}%".format(
''.join(["█" for i in range(math.floor(percentage / 10))]),
''.join(["░"
for i in range(10 - math.floor(percentage / 10))]),
round(percentage, 2))
estimated_total_time = downloader.get_eta(human=True)
try:
current_message = f"{status}..\
\nURL: {url}\
\nFile Name: {file_name}\
\n{progress_str}\
\n{humanbytes(downloaded)} of {humanbytes(total_length)}\
\nETA: {estimated_total_time}"
if round(diff %
10.00) == 0 and current_message != display_message:
await target_file.edit(current_message)
display_message = current_message
except Exception as e:
LOGS.info(str(e))
if downloader.isSuccessful():
await target_file.edit("Downloaded to `{}` successfully !!".format(
downloaded_file_name))
else:
await target_file.edit("Incorrect URL\n{}".format(url))
elif target_file.reply_to_msg_id:
try:
c_time = time.time()
downloaded_file_name = await target_file.client.download_media(
await target_file.get_reply_message(),
TEMP_DOWNLOAD_DIRECTORY,
progress_callback=lambda d, t: asyncio.get_event_loop(
).create_task(
progress(d, t, target_file, c_time, "Downloading...")))
except Exception as e: # pylint:disable=C0103,W0703
await target_file.edit(str(e))
else:
await target_file.edit("Downloaded to `{}` successfully !!".format(
downloaded_file_name))
else:
await target_file.edit(
"Reply to a message to download to my local server.")
@register(pattern=r"^\!uploadir (.*)", outgoing=True)
async def uploadir(udir_event):
""" For .uploadir command, allows you to upload everything from a folder in the server"""
input_str = udir_event.pattern_match.group(1)
if os.path.exists(input_str):
await udir_event.edit("Processing ...")
lst_of_files = []
for r, d, f in os.walk(input_str):
for file in f:
lst_of_files.append(os.path.join(r, file))
for file in d:
lst_of_files.append(os.path.join(r, file))
LOGS.info(lst_of_files)
uploaded = 0
await udir_event.edit(
"Found {} files. Uploading will start soon. Please wait!".format(
len(lst_of_files)))
for single_file in lst_of_files:
if os.path.exists(single_file):
# https://stackoverflow.com/a/678242/4723940
caption_rts = os.path.basename(single_file)
c_time = time.time()
if not caption_rts.lower().endswith(".mp4"):
await udir_event.client.send_file(
udir_event.chat_id,
single_file,
caption=caption_rts,
force_document=False,
allow_cache=False,
reply_to=udir_event.message.id,
progress_callback=lambda d, t: asyncio.get_event_loop(
).create_task(
progress(d, t, udir_event, c_time, "Uploading...",
single_file)))
else:
thumb_image = os.path.join(input_str, "thumb.jpg")
c_time = time.time()
metadata = extractMetadata(createParser(single_file))
duration = 0
width = 0
height = 0
if metadata.has("duration"):
duration = metadata.get("duration").seconds
if metadata.has("width"):
width = metadata.get("width")
if metadata.has("height"):
height = metadata.get("height")
await udir_event.client.send_file(
udir_event.chat_id,
single_file,
caption=caption_rts,
thumb=thumb_image,
force_document=False,
allow_cache=False,
reply_to=udir_event.message.id,
attributes=[
DocumentAttributeVideo(
duration=duration,
w=width,
h=height,
round_message=False,
supports_streaming=True,
)
],
progress_callback=lambda d, t: asyncio.get_event_loop(
).create_task(
progress(d, t, udir_event, c_time, "Uploading...",
single_file)))
os.remove(single_file)
uploaded = uploaded + 1
await udir_event.edit(
"Uploaded {} files successfully !!".format(uploaded))
else:
await udir_event.edit("404: Directory Not Found")
@register(pattern=r"^\!upload (.*)", outgoing=True)
async def upload(u_event):
""" For .upload command, allows you to upload a file from the userbot's server """
await u_event.edit("Processing ...")
input_str = u_event.pattern_match.group(1)
if input_str in ("userbot.session", "config.env", "userbot.modules", "*/modules/*" ):
await u_event.edit("`That's a dangerous operation! Not Permitted!`")
return
if os.path.exists(input_str):
c_time = time.time()
await u_event.client.send_file(
u_event.chat_id,
input_str,
force_document=True,
allow_cache=False,
reply_to=u_event.message.id,
progress_callback=lambda d, t: asyncio.get_event_loop(
).create_task(
progress(d, t, u_event, c_time, "Uploading...", input_str)))
await u_event.edit("Uploaded successfully !!")
else:
await u_event.edit("404: File Not Found")
def get_video_thumb(file, output=None, width=90):
""" Get video thumbnail """
metadata = extractMetadata(createParser(file))
popen = subprocess.Popen(
[
"ffmpeg",
"-i",
file,
"-ss",
str(
int((0, metadata.get("duration").seconds
)[metadata.has("duration")] / 2)),
"-filter:v",
"scale={}:-1".format(width),
"-vframes",
"1",
output,
],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
if not popen.returncode and os.path.lexists(file):
return output
return None
def extract_w_h(file):
""" Get width and height of media """
command_to_run = [
"ffprobe",
"-v",
"quiet",
"-print_format",
"json",
"-show_format",
"-show_streams",
file,
]
# https://stackoverflow.com/a/11236144/4723940
try:
t_response = subprocess.check_output(command_to_run,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
LOGS.warning(exc)
else:
x_reponse = t_response.decode("UTF-8")
response_json = json.loads(x_reponse)
width = int(response_json["streams"][0]["width"])
height = int(response_json["streams"][0]["height"])
return width, height
@register(pattern=r"^\!uploadas(stream|vn|all) (.*)", outgoing=True)
async def uploadas(uas_event):
""" For .uploadas command, allows you to specify some arguments for upload. """
await uas_event.edit("Processing ...")
type_of_upload = uas_event.pattern_match.group(1)
supports_streaming = False
round_message = False
spam_big_messages = False
if type_of_upload == "stream":
supports_streaming = True
if type_of_upload == "vn":
round_message = True
if type_of_upload == "all":
spam_big_messages = True
input_str = uas_event.pattern_match.group(2)
thumb = None
file_name = None
if "|" in input_str:
file_name, thumb = input_str.split("|")
file_name = file_name.strip()
thumb = thumb.strip()
else:
file_name = input_str
thumb_path = "a_random_f_file_name" + ".jpg"
thumb = get_video_thumb(file_name, output=thumb_path)
if os.path.exists(file_name):
metadata = extractMetadata(createParser(file_name))
duration = 0
width = 0
height = 0
if metadata.has("duration"):
duration = metadata.get("duration").seconds
if metadata.has("width"):
width = metadata.get("width")
if metadata.has("height"):
height = metadata.get("height")
try:
if supports_streaming:
c_time = time.time()
await uas_event.client.send_file(
uas_event.chat_id,
file_name,
thumb=thumb,
caption=input_str,
force_document=False,
allow_cache=False,
reply_to=uas_event.message.id,
attributes=[
DocumentAttributeVideo(
duration=duration,
w=width,
h=height,
round_message=False,
supports_streaming=True,
)
],
progress_callback=lambda d, t: asyncio.get_event_loop(
).create_task(
progress(d, t, uas_event, c_time, "Uploading...",
file_name)))
elif round_message:
c_time = time.time()
await uas_event.client.send_file(
uas_event.chat_id,
file_name,
thumb=thumb,
allow_cache=False,
reply_to=uas_event.message.id,
video_note=True,
attributes=[
DocumentAttributeVideo(
duration=0,
w=1,
h=1,
round_message=True,
supports_streaming=True,
)
],
progress_callback=lambda d, t: asyncio.get_event_loop(
).create_task(
progress(d, t, uas_event, c_time, "Uploading...",
file_name)))
elif spam_big_messages:
await uas_event.edit("TBD: Not (yet) Implemented")
return
os.remove(thumb)
await uas_event.edit("Uploaded successfully !!")
except FileNotFoundError as err:
await uas_event.edit(str(err))
else:
await uas_event.edit("404: File Not Found")
CMD_HELP.update({
"upload-download":
"!download <link|filename> or reply to media\
\nUsage: Downloads file to the server.\
\n\n!upload <path in server>\
\nUsage: Uploads a locally stored file to the chat.\
\n\n!uploadas(stream|vn|all) \
\nUsage: allows you to specify some arguments for upload\
\n\n!uploadir(stream|vn|all) \
\nUsage: upload everything from a folder in the server\
"
})
|
py | b414628a6018f4dc03ff9ba482953197df4daca6 | """
Copyright (c) 2020 VMware, Inc.
This product is licensed to you under the Apache License, Version 2.0 (the "License").
You may not use this product except in compliance with the License.
This product may include a number of subcomponents with separate copyright notices
and license terms. Your use of these subcomponents is subject to the terms and
conditions of the subcomponent's license, as noted in the LICENSE file.
Modifications for phpIPAM by John Bowdre ([email protected])
"""
import requests
from vra_ipam_utils.ipam import IPAM
import logging
from datetime import datetime
import ipaddress
"""
Example payload
"inputs": {
"resourceInfo": {
"id": "11f912e71454a075574a728848458",
"name": "external-ipam-it-mcm-323412",
"description": "test",
"type": "VM",
"owner": "[email protected]",
"orgId": "ce811934-ea1a-4f53-b6ec-465e6ca7d126",
"properties": {
"osType": "WINDOWS",
"vcUuid": "ff257ed9-070b-45eb-b2e7-d63926d5bdd7",
"__moref": "VirtualMachine:vm-288560",
"memoryGB": "4",
"datacenter": "Datacenter:datacenter-2",
"provisionGB": "1",
"__dcSelfLink": "/resources/groups/b28c7b8de065f07558b1612fce028",
"softwareName": "Microsoft Windows XP Professional (32-bit)",
"__computeType": "VirtualMachine",
"__hasSnapshot": "false",
"__placementLink": "/resources/compute/9bdc98681fb8b27557252188607b8",
"__computeHostLink": "/resources/compute/9bdc98681fb8b27557252188607b8"
}
},
"ipAllocations": [
{
"id": "111bb2f0-02fd-4983-94d2-8ac11768150f",
"ipRangeIds": [
"network/ZG5zLm5ldHdvcmskMTAuMjMuMTE3LjAvMjQvMA:10.23.117.0/24/default"
],
"nicIndex": "0",
"isPrimary": "true",
"size": "1",
"properties": {
"__moref": "DistributedVirtualPortgroup:dvportgroup-307087",
"__dvsUuid": "0c 8c 0b 50 46 b6 1c f2-e8 63 f4 24 24 d7 24 6c",
"__dcSelfLink": "/resources/groups/abe46b8cfa663a7558b28a6ffe088",
"__computeType": "DistributedVirtualPortgroup",
"__portgroupKey": "dvportgroup-307087"
}
}
],
"endpoint": {
"id": "f097759d8736675585c4c5d272cd",
"endpointProperties": {
"hostName": "sampleipam.sof-mbu.eng.vmware.com",
"projectId": "111bb2f0-02fd-4983-94d2-8ac11768150f",
"providerId": "d8a5e3f2-d839-4365-af5b-f48de588fdc1",
"certificate": "-----BEGIN CERTIFICATE-----\nMIID0jCCArqgAwIBAgIQQaJF55UCb58f9KgQLD/QgTANBgkqhkiG9w0BAQUFADCB\niTELMAkGA1UEBhMCVVMxEzARBgNVBAgTCkNhbGlmb3JuaWExEjAQBgNVBAcTCVN1\nbm55dmFsZTERMA8GA1UEChMISW5mb2Jsb3gxFDASBgNVBAsTC0VuZ2luZWVyaW5n\nMSgwJgYDVQQDEx9pbmZvYmxveC5zb2YtbWJ1LmVuZy52bXdhcmUuY29tMB4XDTE5\nMDEyOTEzMDExMloXDTIwMDEyOTEzMDExMlowgYkxCzAJBgNVBAYTAlVTMRMwEQYD\nVQQIEwpDYWxpZm9ybmlhMRIwEAYDVQQHEwlTdW5ueXZhbGUxETAPBgNVBAoTCElu\nZm9ibG94MRQwEgYDVQQLEwtFbmdpbmVlcmluZzEoMCYGA1UEAxMfaW5mb2Jsb3gu\nc29mLW1idS5lbmcudm13YXJlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC\nAQoCggEBAMMLNTqbAri6rt/H8iC4UgRdN0qj+wk0R2blmD9h1BiZJTeQk1r9i2rz\nzUOZHvE8Bld8m8xJ+nysWHaoFFGTX8bOd/p20oJBGbCLqXtoLMMBGAlP7nzWGBXH\nBYUS7kMv/CG+PSX0uuB0pRbhwOFq8Y69m4HRnn2X0WJGuu+v0FmRK/1m/kCacHga\nMBKaIgbwN72rW1t/MK0ijogmLR1ASY4FlMn7OBHIEUzO+dWFBh+gPDjoBECTTH8W\n5AK9TnYdxwAtJRYWmnVqtLoT3bImtSfI4YLUtpr9r13Kv5FkYVbXov1KBrQPbYyp\n72uT2ZgDJT4YUuWyKpMppgw1VcG3MosCAwEAAaM0MDIwMAYDVR0RBCkwJ4cEChda\nCoIfaW5mb2Jsb3guc29mLW1idS5lbmcudm13YXJlLmNvbTANBgkqhkiG9w0BAQUF\nAAOCAQEAXFPIh00VI55Sdfx+czbBb4rJz3c1xgN7pbV46K0nGI8S6ufAQPgLvZJ6\ng2T/mpo0FTuWCz1IE9PC28276vwv+xJZQwQyoUq4lhT6At84NWN+ZdLEe+aBAq+Y\nxUcIWzcKv8WdnlS5DRQxnw6pQCBdisnaFoEIzngQV8oYeIemW4Hcmb//yeykbZKJ\n0GTtK5Pud+kCkYmMHpmhH21q+3aRIcdzOYIoXhdzmIKG0Och97HthqpvRfOeWQ/A\nPDbxqQ2R/3D0gt9jWPCG7c0lB8Ynl24jLBB0RhY6mBrYpFbtXBQSEciUDRJVB2zL\nV8nJiMdhj+Q+ZmtSwhNRvi2qvWAUJQ==\n-----END CERTIFICATE-----\n"
},
"authCredentialsLink": "/core/auth/credentials/13c9cbade08950755898c4b89c4a0"
}
}
"""
def handler(context, inputs):
ipam = IPAM(context, inputs)
IPAM.do_allocate_ip = do_allocate_ip
return ipam.allocate_ip()
def auth_session(uri, auth, cert):
auth_uri = f'{uri}/user/'
req = requests.post(auth_uri, auth=auth, verify=cert)
if req.status_code != 200:
raise requests.exceptions.RequestException('Authentication Failure!')
token = {"token": req.json()['data']['token']}
return token
def do_allocate_ip(self, auth_credentials, cert):
# Build variables
username = auth_credentials["privateKeyId"]
password = auth_credentials["privateKey"]
hostname = self.inputs["endpoint"]["endpointProperties"]["hostName"]
apiAppId = self.inputs["endpoint"]["endpointProperties"]["apiAppId"]
uri = f'https://{hostname}/api/{apiAppId}/'
auth = (username, password)
# Auth to API
token = auth_session(uri, auth, cert)
bundle = {
'uri': uri,
'token': token,
'cert': cert
}
allocation_result = []
try:
resource = self.inputs["resourceInfo"]
for allocation in self.inputs["ipAllocations"]:
allocation_result.append(allocate(resource, allocation, self.context, self.inputs["endpoint"], bundle))
except Exception as e:
try:
rollback(allocation_result, bundle)
except Exception as rollback_e:
logging.error(f"Error during rollback of allocation result {str(allocation_result)}")
logging.error(rollback_e)
raise e
assert len(allocation_result) > 0
return {
"ipAllocations": allocation_result
}
def allocate(resource, allocation, context, endpoint, bundle):
last_error = None
for range_id in allocation["ipRangeIds"]:
logging.info(f"Allocating from range {range_id}")
try:
return allocate_in_range(range_id, resource, allocation, context, endpoint, bundle)
except Exception as e:
last_error = e
logging.error(f"Failed to allocate from range {range_id}: {str(e)}")
logging.error("No more ranges. Raising last error")
raise last_error
def allocate_in_range(range_id, resource, allocation, context, endpoint, bundle):
if int(allocation['size']) ==1:
vmName = resource['name']
uri = bundle['uri']
token = bundle['token']
cert = bundle['cert']
# Attempt to grab 'owner' to work around bug in vRA 8.6 (fixed in 8.6.1)
try:
owner_string = f" for {resource['owner']} "
except:
owner_string = " "
payload = {
'hostname': vmName,
'description': f'Reserved by vRA{owner_string}at {datetime.now()}'
}
allocate_uri = f'{uri}/addresses/first_free/{str(range_id)}/'
allocate_req = requests.post(allocate_uri, data=payload, headers=token, verify=cert)
allocate_req = allocate_req.json()
if allocate_req['success']:
version = ipaddress.ip_address(allocate_req['data']).version
result = {
"ipAllocationId": allocation['id'],
"ipRangeId": range_id,
"ipVersion": "IPv" + str(version),
"ipAddresses": [allocate_req['data']]
}
logging.info(f"Successfully reserved {str(result['ipAddresses'])} for {vmName}.")
else:
raise Exception("Unable to allocate IP!")
return result
else:
# TODO: implement allocation of continuous block of IPs
pass
raise Exception("Not implemented")
## Rollback any previously allocated addresses in case this allocation request contains multiple ones and failed in the middle
def rollback(allocation_result, bundle):
uri = bundle['uri']
token = bundle['token']
cert = bundle['cert']
for allocation in reversed(allocation_result):
logging.info(f"Rolling back allocation {str(allocation)}")
ipAddresses = allocation.get("ipAddresses", None)
for ipAddress in ipAddresses:
rollback_uri = f'{uri}/addresses/{allocation.get("id")}/'
requests.delete(rollback_uri, headers=token, verify=cert)
return
|
py | b41462bf2c58355d9d20014a331fb66580404251 | #!/usr/bin/env python3
import logging
import random
from copy import copy
from os import listdir
from os.path import isfile
from os.path import join
import bonobo
import mondrian
import requests as req
from bonobo.config import use
from bonobo.config import use_context_processor
from bonobo.config import use_raw_input
from bonobo.constants import NOT_MODIFIED
from cachecontrol import CacheControl
from cachecontrol.caches.file_cache import FileCache
from cachecontrol.heuristics import ExpiresAfter
import util
# One line setup (excepthook=True tells mondrian to handle uncaught exceptions)
mondrian.setup(excepthook=True)
# Use logging, as usual.
logger = logging.getLogger('mtg')
logger.setLevel(logging.INFO)
CACHE_TIME = 14 + (random.randint(0, 14)) # nosec
logger.warning('Caching for %d days' % CACHE_TIME)
CACHE = FileCache('.web_cache')
requests = CacheControl(
req.Session(), cache=CACHE, heuristic=ExpiresAfter(days=CACHE_TIME)
)
SALE = False
CUTOFF = 4
PRICE_MODIFIER = 1.15
MIN_PRICE = 0.25
IN_USE_CARDS = {}
QUALITY = ''
MTG_STUDIO = True
DECKBOX = True
ECHO_MTG = False
def _used_cards(foo, bar):
yield IN_USE_CARDS
@use_context_processor(_used_cards)
def in_use_cards(_used_cards, count, name, section, edition, *rest):
# Scratchpad, we don't care about
if section == 'scratchpad':
return
if edition not in _used_cards:
_used_cards[edition] = {}
if name not in _used_cards[edition]:
_used_cards[edition][name] = 0
_used_cards[edition][name] += int(count)
# pprint.pprint(IN_USE_CARDS)
return
def get_decks(**options):
"""
This function builds the graph that needs to be executed.
:return: bonobo.Graph
"""
graph = bonobo.Graph()
csv_in = bonobo.noop
graph.add_chain(csv_in, in_use_cards, _input=None)
for deck in listdir('decks'):
deck_path = join('decks', deck)
if deck == '.gitignore':
continue
if isfile(deck_path):
graph.add_chain(bonobo.CsvReader(deck_path), _output=csv_in)
return graph
def get_graph(**options):
"""
This function builds the graph that needs to be executed.
:return: bonobo.Graph
"""
graph = bonobo.Graph()
split = bonobo.noop
graph.add_chain(
bonobo.CsvWriter('DeckedBuilder.csv'),
# bonobo.Limit(10),
metadata,
# bonobo.UnpackItems(0),
split,
_input=None,
_name='main',
)
graph.add_chain(
bonobo.CsvReader('main-en.csv'),
bonobo.Format(Language='English'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-de.csv'),
bonobo.Format(Language='German'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-ru.csv'),
bonobo.Format(Language='Russian'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-it.csv'),
bonobo.Format(Language='Italian'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-jp.csv'),
bonobo.Format(Language='Japanese'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-fr.csv'),
bonobo.Format(Language='French'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-kr.csv'),
bonobo.Format(Language='Korean'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('main-cs.csv'),
bonobo.Format(Language='Chinese'),
_output='main',
)
graph.add_chain(
bonobo.CsvReader('Deckbox-extras.csv'),
bonobo.Format(Language='English'),
_output='main',
)
if ECHO_MTG:
# Reg Qty,Foil Qty,Name,Set,Acquired,Language
echomtg = {'Acquired For': '0.004', 'Language': 'en'}
graph.add_chain(
# echomtg specific fiddling
remove_metadata,
bonobo.UnpackItems(0),
# bonobo.PrettyPrinter(),
bonobo.Rename(Name='Card'),
bonobo.Format(**echomtg),
bonobo.CsvWriter('EchoMTG.csv'),
_input=split,
)
# MTG Studio
if MTG_STUDIO:
graph.add_chain(
mtg_studio,
remove_metadata,
bonobo.UnpackItems(0),
# bonobo.Format(Edition='{Set}'),
bonobo.Rename(Edition='Set'),
# bonobo.Rename(Name='Card'),
# bonobo.Rename(Qty='Reg Qty'),
# bonobo.Rename(Foil='Foil Qty'),
# bonobo.PrettyPrinter(),
bonobo.CsvWriter('MTG-Studio.csv'),
_input=split,
)
# graph.add_chain(
# tradeable,
# bonobo.UnpackItems(0),
# #bonobo.PrettyPrinter(),
# #bonobo.Limit(3000),
# bonobo.CsvWriter("DeckedBuilder-tradelist.csv"),
# bonobo.OrderFields([
# 'Card',
# 'Set',
# 'Foil',
# 'Quantity',
# ]),
# bonobo.CsvWriter("CardKingdom-buylist.csv"),
# bonobo.OrderFields([
# 'Quantity',
# 'Card',
# 'Set',
# ]),
# bonobo.CsvWriter(
# "mtgprice-buylist.csv",
# delimiter="\t",
# ),
# _input=split,
# )
#
if DECKBOX:
csv_out = bonobo.CsvWriter('Deckbox-inventory.csv')
graph.add_chain(
# # metadata,
# #bonobo.UnpackItems(0),
deckbox,
bonobo.UnpackItems(0),
csv_out,
_input=split,
)
graph.add_chain(
bonobo.CsvReader('Deckbox-specials.csv'), _output=csv_out
)
return graph
def remove_metadata(card):
if 'scryfall' in card:
out_card = copy(card)
out_card.pop('scryfall')
yield out_card
else:
yield NOT_MODIFIED
@use('http')
@use_raw_input
def metadata(card, *, http):
mvid = int(card.get('Mvid') or 0)
name = card.get('Card')
note = card.get('Notes')
scryfall = None
# Cards with a note assume a Scryfall UUID
if note:
try:
response = requests.get(
'https://api.scryfall.com/cards/%s' % note
).json()
if response.get('object') == 'card':
scryfall = response
else:
logger.warning(
'[mvid:%s] Invalid scyfall response %r'
% (mvid, response.get('details'))
)
except Exception as e:
logger.warning(
f'[scryfall] Looking up {name!r} failed: Exception was {e!r}'
)
# Decked Builder bug mvids are very high
if mvid > 0 and mvid < 1200000 and not scryfall:
try:
response = requests.get(
'https://api.scryfall.com/cards/multiverse/%s' % mvid
).json()
if response.get('object') == 'card':
scryfall = response
else:
logger.warning(
'[mvid:%s] Invalid scyfall response %r'
% (mvid, response.get('details'))
)
except Exception as e:
logger.warning(
f'[scryfall] Looking up {name!r} failed: Exception was {e!r}'
)
# mvid == 0 => promo cards of some sort
if mvid > 0 and not scryfall:
set_name = card.get('Set')
logger.debug(f'[mvid:{mvid}] falling back {name} [{set_name}]')
set = list(
filter(
lambda x: x['name'] == set_name,
requests.get('https://api.scryfall.com/sets')
.json()
.get('data'),
)
)
cards = []
if len(set) == 1:
set_code = set[0]['code']
logger.debug('Set code is %s' % set_code)
params = {'q': f'set:{set_code} name:"{name}"'}
cards = (
requests.get(
'https://api.scryfall.com/cards/search', params=params
)
.json()
.get('data', [])
)
if len(cards) == 1:
scryfall = cards[0]
if len(scryfall['multiverse_ids']) == 1:
diff = int(mvid) - scryfall['multiverse_ids'][0]
logger.debug('Diff is %s' % diff)
mvid = scryfall['multiverse_ids'][0]
if scryfall and scryfall['name'] and scryfall['name'] != name:
layout = scryfall['layout']
if layout == 'normal':
logger.debug(
'Name mismatch %s vs %s for layout %s'
% (name, scryfall['name'], layout)
)
name = scryfall['name']
if scryfall:
if scryfall['reserved']:
logger.debug(
'Reserved card: %s [%s]: %.2f$'
% (
scryfall['name'],
scryfall['set_name'],
float(scryfall['prices']['usd']),
)
)
elif float(scryfall['prices']['usd'] or 0) > 1:
value = float(scryfall['prices']['usd'] or 0) * int(
card.get('Total Qty')
)
logger.debug(
'%s [%s] : %d x %.2f$ == %.2f$'
% (
scryfall['name'],
scryfall['set_name'],
int(card.get('Total Qty')),
float(scryfall['prices']['usd']),
value,
)
)
yield {**card._asdict(), 'Card': name, 'Mvid': mvid, 'scryfall': scryfall}
@use_raw_input
def a_lot(row):
qty = int(row.get('Total Qty'))
if qty > 16:
return NOT_MODIFIED
def is_standard(card):
scryfall = card.get('scryfall')
if scryfall:
legality = scryfall.get('legalities', None)
if legality:
standard = legality.get('standard', None)
if standard == 'legal':
return True
return False
@use_raw_input
def more_than_set(row):
qty = int(row.get('Reg Qty'))
if qty > CUTOFF:
yield {**row._asdict(), 'Reg Qty': qty - CUTOFF}
# Count,Tradelist Count,Name,Edition,Card
# Number,Condition,Language,Foil,Signed,Artist Proof,Altered
# Art,Misprint,Promo,Textless,My Price
@use_context_processor(_used_cards)
def deckbox(_used_cards, row):
# pprint.pprint(_used_cards)
edition = row.get('Set')
name = row.get('Card')
# XXX: Check here
standard = is_standard(row)
trace = False
if name == 'XXX':
print(f'Name: {name}, Edition: {edition}, Standard: {standard}')
trace = True
if trace:
import pprint
pprint.pprint(row)
qty = int(row.get('Reg Qty'))
foil_qty = int(row.get('Foil Qty'))
trade_qty = 0
trade_foil_qty = 0
rarity = row.get('Rarity')
price_str = row.get('Single Price') or '0'
price = float(price_str)
foil_price_str = row.get('Single Foil Price') or '0'
foil_price = float(foil_price_str)
scryfall = row.get('scryfall')
# mtgio = row.get('mtgio')
if scryfall and 'prices' in scryfall:
if scryfall['prices']['usd']:
price = float(scryfall['prices']['usd'])
if scryfall['prices']['usd_foil']:
foil_price = float(scryfall['prices']['usd_foil'])
total_value = (price * qty) + (foil_qty * foil_price)
if total_value > 5:
logger.debug(
'Prices from Scryfall for %s [%s] are %s/%s Total:%2.2f'
% (name, edition, price, foil_price, total_value)
)
foil_cutoff = 0
if rarity == 'Rare' or rarity == 'Mythic Rare':
qty_cutoff = 4
else:
qty_cutoff = CUTOFF
if standard:
if qty_cutoff < 4:
qty_cutoff = 4
if rarity != 'Rare' and rarity != 'Mythic Rare':
foil_cutoff = 0
# Do not care about basic lands at all
if scryfall and scryfall['type_line'].startswith('Basic Land'):
qty_cutoff = 0
foil_cutoff = 0
# Promos can go as well
if scryfall and scryfall['promo']:
qty_cutoff = 0
foil_cutoff = 0
# Are we using this card in our built decks ?
if edition in _used_cards:
if name in _used_cards[edition]:
deck_qty = _used_cards[edition][name]
# if deck_qty > qty_cutoff:
# qty_cutoff = deck_qty
qty_cutoff += deck_qty
if qty > qty_cutoff:
trade_qty = qty - qty_cutoff
if foil_qty > foil_cutoff:
trade_foil_qty = foil_qty - foil_cutoff
if scryfall:
if 'set_name' not in scryfall:
logger.error('Missing set_name from scryfall %r' % scryfall)
scryfall_set_name = scryfall['set_name']
scryfall_set = scryfall['set']
# Fix Conspiracy
if scryfall_set == 'cns':
edition = scryfall_set_name
if scryfall_set_name is not None:
if edition != scryfall_set_name:
mvid = row.get('Mvid')
logger.debug(
f'[mvid:{mvid}] Set {edition} vs {scryfall_set_name}'
)
# edition = scryfall_set_name
if scryfall:
if scryfall['layout'] != 'normal':
if scryfall['name'] != name:
if scryfall['card_faces'][0]['name'] != name:
logger.warning(
"Card name isn't of the first face %s vs %s [%s]"
% (name, scryfall['name'], scryfall['layout'])
)
name = scryfall['card_faces'][0]['name']
if edition == 'Time Spiral ""Timeshifted""':
edition = 'Time Spiral "Timeshifted"'
if edition == 'Magic: The Gathering-Commander':
edition = 'Commander'
if edition == 'Commander 2013 Edition':
edition = 'Commander 2013'
if edition == 'Planechase 2012 Edition':
edition = 'Planechase 2012'
if edition == 'Commander Anthology 2018':
edition = 'Commander Anthology Volume II'
if edition == 'M19 Gift Pack':
edition = 'M19 Gift Pack Promos'
edition = util.edition_to_deckbox(edition)
collector_number = 0
if scryfall:
collector_number = scryfall['collector_number']
# Dont sell yet
if not SALE:
price = 0
foil_price = 0
# Non-english cards can all go
if row.get('Language') != 'English':
trade_foil_qty = foil_qty
trade_qty = qty
if foil_qty > 0:
yield {
'Count': foil_qty,
'Tradelist Count': trade_foil_qty,
'Name': name,
'Edition': edition,
'Card Number': collector_number,
'Condition': QUALITY,
'Language': row.get('Language'),
'Foil': 'foil',
'Signed': '',
'Artist Proof': '',
'Altered Art': '',
'Misprint': '',
'Promo': '',
'Textless': '',
'My Price': format(foil_price * PRICE_MODIFIER, '.2f'),
}
if qty > 0:
# Don't price below MIN_PRICE
price = price * PRICE_MODIFIER
if price < MIN_PRICE and SALE:
price = MIN_PRICE
yield {
'Count': qty,
'Tradelist Count': trade_qty,
'Name': name,
'Edition': edition,
'Card Number': collector_number,
'Condition': QUALITY,
'Language': row.get('Language'),
'Foil': '',
'Signed': '',
'Artist Proof': '',
'Altered Art': '',
'Misprint': '',
'Promo': '',
'Textless': '',
'My Price': format(price, '.2f'),
}
def mtg_studio(card):
name = card.get('Card')
scryfall = card.get('scryfall')
output = copy(card)
if scryfall and scryfall['name'] and scryfall['name'] != name:
output['Card'] = scryfall['name']
# Skip Basic lands
if scryfall and scryfall['type_line'].startswith('Basic Land'):
return
yield output
@use_raw_input
def tradeable(row):
qty = int(row.get('Reg Qty'))
foil_qty = int(row.get('Foil Qty'))
rarity = row.get('Rarity')
foil_cutoff = 1
if rarity == 'Rare' or rarity == 'Mythic Rare':
qty_cutoff = 1
else:
qty_cutoff = CUTOFF
if qty > qty_cutoff:
qty -= qty_cutoff
else:
qty = 0
if foil_qty > foil_cutoff:
foil_qty -= foil_cutoff
else:
foil_qty = 0
price_str = row.get('Single Price') or '0'
price = float(price_str)
if foil_qty > 0:
yield {
**row._asdict(),
'Reg Qty': 0,
'Foil Qty': foil_qty,
'Quantity': foil_qty,
'Foil': 1,
}
if qty > 0 and price > 0:
yield {
**row._asdict(),
'Reg Qty': qty,
'Foil Qty': 0,
'Quantity': qty,
'Foil': 0,
}
@use_raw_input
def foils(row):
foil = int(row.get('Foil Qty'))
if foil > 0:
return NOT_MODIFIED
@use_raw_input
def not_foils(row):
foil = int(row.get('Foil Qty'))
if foil <= 0:
return NOT_MODIFIED
@use_raw_input
def rares(row):
rarity = row.get('Rarity')
if rarity == 'Rare' or rarity == 'Mythic Rare':
return NOT_MODIFIED
@use_raw_input
def not_rares(row):
rarity = row.get('Rarity')
if rarity != 'Rare' and rarity != 'Mythic Rare':
return NOT_MODIFIED
def get_services(**options):
return {'http': requests}
# The __main__ block actually execute the graph.
if __name__ == '__main__':
parser = bonobo.get_argument_parser()
with bonobo.parse_args(parser) as options:
bonobo.run(get_decks(**options), services=get_services(**options))
bonobo.run(
get_graph(**options),
services=get_services(**options),
strategy='threadpool',
)
|
py | b41463ae7871fb37f9614dee4b2b122a0ef7499d | # The original repo: https://github.com/Reagan1311/DABNet
import torch
import torch.nn as nn
from mmcv.cnn import build_conv_layer, build_norm_layer, constant_init, normal_init
from mmcv.utils.parrots_wrapper import _BatchNorm
from mmcv.runner import load_checkpoint
from mmseg.utils import get_root_logger
from ..builder import BACKBONES
class Conv(nn.Module):
def __init__(self, in_channels, num_channels, kernel, stride, padding,
dilation=(1, 1), groups=1, bn_act=False, bias=False,
conv_cfg=None, norm_cfg=dict(type='BN')):
super().__init__()
self.conv = build_conv_layer(
conv_cfg,
in_channels,
num_channels,
kernel_size=kernel,
stride=stride,
padding=padding,
dilation=dilation,
groups=groups,
bias=bias
)
self.bn_act = bn_act
if self.bn_act:
self.bn_prelu = BNPReLU(num_channels, norm_cfg)
def forward(self, input):
output = self.conv(input)
if self.bn_act:
output = self.bn_prelu(output)
return output
class BNPReLU(nn.Module):
def __init__(self, num_channels, norm_cfg=dict(type='BN')):
super().__init__()
self.bn = build_norm_layer(norm_cfg, num_channels)[1]
self.act = nn.PReLU(num_channels)
def forward(self, x):
y = self.bn(x)
y = self.act(y)
return y
class DABModule(nn.Module):
def __init__(self, in_channels, d=1, kernel=3, dilated_kernel=3, conv_cfg=None, norm_cfg=dict(type='BN')):
super().__init__()
self.bn_relu_1 = BNPReLU(in_channels, norm_cfg)
self.conv3x3 = Conv(
in_channels,
in_channels // 2,
kernel=kernel,
stride=1,
padding=1,
bn_act=True,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg
)
self.dconv3x1 = Conv(
in_channels // 2,
in_channels // 2,
kernel=(dilated_kernel, 1),
stride=1,
padding=(1, 0),
groups=in_channels // 2,
bn_act=True,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg
)
self.dconv1x3 = Conv(
in_channels // 2,
in_channels // 2,
kernel=(1, dilated_kernel),
stride=1,
padding=(0, 1),
groups=in_channels // 2,
bn_act=True,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg)
self.ddconv3x1 = Conv(
in_channels // 2,
in_channels // 2,
kernel=(dilated_kernel, 1),
stride=1,
padding=(1 * d, 0),
dilation=(d, 1),
groups=in_channels // 2,
bn_act=True,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg)
self.ddconv1x3 = Conv(
in_channels // 2,
in_channels // 2,
kernel=(1, dilated_kernel),
stride=1,
padding=(0, 1 * d),
dilation=(1, d),
groups=in_channels // 2,
bn_act=True,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg)
self.bn_relu_2 = BNPReLU(in_channels // 2, norm_cfg)
self.conv1x1 = Conv(
in_channels // 2,
in_channels,
kernel=1,
stride=1,
padding=0,
bn_act=False,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg)
def forward(self, input):
output = self.bn_relu_1(input)
output = self.conv3x3(output)
br1 = self.dconv3x1(output)
br1 = self.dconv1x3(br1)
br2 = self.ddconv3x1(output)
br2 = self.ddconv1x3(br2)
output = br1 + br2
output = self.bn_relu_2(output)
output = self.conv1x1(output)
return output + input
class DownSamplingBlock(nn.Module):
def __init__(self, in_channels, num_channels, conv_cfg=None, norm_cfg=dict(type='BN')):
super().__init__()
self.in_channels = in_channels
self.num_channels = num_channels
if self.in_channels < self.num_channels:
num_conv = num_channels - in_channels
else:
num_conv = num_channels
self.conv3x3 = Conv(
in_channels,
num_conv,
kernel=3,
stride=2,
padding=1,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg
)
self.max_pool = nn.MaxPool2d(2, stride=2)
self.bn_prelu = BNPReLU(num_channels, norm_cfg)
def forward(self, input):
output = self.conv3x3(input)
if self.in_channels < self.num_channels:
max_pool = self.max_pool(input)
output = torch.cat([output, max_pool], 1)
output = self.bn_prelu(output)
return output
class InputInjection(nn.Module):
def __init__(self, ratio):
super().__init__()
self.pool = nn.ModuleList()
for i in range(0, ratio):
self.pool.append(nn.AvgPool2d(3, stride=2, padding=1))
def forward(self, input):
for pool in self.pool:
input = pool(input)
return input
@BACKBONES.register_module()
class DABNet(nn.Module):
def __init__(self,
extra,
in_channels=3,
conv_cfg=None,
norm_cfg=dict(type='BN'),
norm_eval=False):
super().__init__()
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.norm_eval = norm_eval
self.extra = extra
self.block_1 = self.extra['block_1']
self.block_2 = self.extra['block_2']
self.init_conv = nn.Sequential(
Conv(in_channels, 32, 3, 2, padding=1, bn_act=True, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg),
Conv(32, 32, 3, 1, padding=1, bn_act=True, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg),
Conv(32, 32, 3, 1, padding=1, bn_act=True, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg),
)
self.down_1 = InputInjection(1) # down-sample the image 1 times
self.down_2 = InputInjection(2) # down-sample the image 2 times
self.down_3 = InputInjection(3) # down-sample the image 3 times
self.bn_prelu_1 = BNPReLU(32 + 3, norm_cfg)
# DAB Block 1
self.downsample_1 = DownSamplingBlock(32 + 3, 64, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg)
self.DAB_Block_1 = nn.Sequential()
for i in range(0, self.block_1):
dab_module = DABModule(64, d=2, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg)
self.DAB_Block_1.add_module("DAB_Module_1_" + str(i), dab_module)
self.bn_prelu_2 = BNPReLU(128 + 3, norm_cfg)
# DAB Block 2
dilation_block_2 = [4, 4, 8, 8, 16, 16]
self.downsample_2 = DownSamplingBlock(128 + 3, 128, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg)
self.DAB_Block_2 = nn.Sequential()
for i in range(0, self.block_2):
dab_module = DABModule(128, d=dilation_block_2[i], conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg)
self.DAB_Block_2.add_module("DAB_Module_2_" + str(i), dab_module)
self.bn_prelu_3 = BNPReLU(256 + 3, norm_cfg)
def init_weights(self, pretrained=None):
"""Initialize the weights in backbone.
Args:
pretrained (str, optional): Path to pre-trained weights.
Defaults to None.
"""
if isinstance(pretrained, str):
logger = get_root_logger()
load_checkpoint(self, pretrained, strict=False, logger=logger)
elif pretrained is None:
for m in self.modules():
if isinstance(m, nn.Conv2d):
normal_init(m, std=0.001)
elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
constant_init(m, 1)
else:
raise TypeError('pretrained must be a str or None')
def forward(self, input):
output0 = self.init_conv(input)
down_1 = self.down_1(input)
down_2 = self.down_2(input)
down_3 = self.down_3(input)
output0_cat = self.bn_prelu_1(torch.cat([output0, down_1], 1))
# DAB Block 1
output1_0 = self.downsample_1(output0_cat)
output1 = self.DAB_Block_1(output1_0)
output1_cat = self.bn_prelu_2(torch.cat([output1, output1_0, down_2], 1))
# DAB Block 2
output2_0 = self.downsample_2(output1_cat)
output2 = self.DAB_Block_2(output2_0)
output2_cat = self.bn_prelu_3(torch.cat([output2, output2_0, down_3], 1))
y_list = [output0_cat, output1_cat, output2_cat]
return y_list
def train(self, mode=True):
"""Convert the model into training mode."""
super().train(mode)
if mode and self.norm_eval:
for m in self.modules():
if isinstance(m, _BatchNorm):
m.eval()
|
py | b41464ed35e6045efc6b7a1ed204e2370e58a3ec | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Dirk Chang and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class CloudEmployeeInvitation(Document):
def on_submit(self):
data = {
"doctype": "Cloud Employee",
"company": self.company,
"user": self.user
}
frappe.get_doc(data).insert(ignore_permissions=True)
rg = frappe.get_value("Cloud Company Group", {"company": self.company, "group_name": "root"})
if rg:
group_doc = frappe.get_doc("Cloud Company Group", rg)
group_doc.append_users("user", self.user)
group_doc.save(ignore_permissions=True)
def get_permission_query_conditions(user):
if 'Cloud Manager' in frappe.get_roles(user):
return ""
from cloud.cloud.doctype.cloud_company.cloud_company import list_admin_companies
if 'Cloud Admin' in frappe.get_roles(user):
ent_list = list_admin_companies(user)
return """(`tabCloud Employee Invitation`.company in ({user_ents}))""".format(
user_ents='"' + '", "'.join(ent_list) + '"')
else:
return '''(`tabCloud Employee Invitation`.user = '{user}')'''.format(user=user)
def has_permission(doc, ptype, user):
if 'Cloud Manager' in frappe.get_roles(user):
return True
if frappe.get_value('Cloud Company', {'admin': user, 'name': doc.company}):
return True
if doc.user == user:
if ptype in ['create', 'cancel']:
return False
else:
return True
return False |
py | b41465c0f34b890304bdbb80b012f54edfbc9919 | '''Trains a Hierarchical Attention Model on the IMDB sentiment classification task.
Modified from keras' examples/imbd_lstm.py.
'''
from __future__ import print_function
import numpy as np
from model import createHierarchicalAttentionModel
np.random.seed(1337) # for reproducibility
from keras.preprocessing import sequence
from keras.datasets import imdb
max_features = 20000
maxlen = 80 # cut texts after this number of words (among top max_features most common words)
batch_size = 32
print('Loading data...')
(X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features)
print(len(X_train), 'train sequences')
print(len(X_test), 'test sequences')
print('Pad sequences (samples x time)')
X_train = sequence.pad_sequences(X_train, maxlen=maxlen)
X_test = sequence.pad_sequences(X_test, maxlen=maxlen)
#add one extra dimention as the sentence (1 sentence per doc!)
X_train = np.expand_dims(X_train, axis=1)
X_test = np.expand_dims(X_test, axis=1)
print('X_train shape:', X_train.shape)
print('X_test shape:', X_test.shape)
print('Build model...')
model, modelAttEval = createHierarchicalAttentionModel(maxlen, embeddingSize = 200, vocabSize = max_features)
print('Train...')
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=10,
validation_data=(X_test, y_test))
score, acc = model.evaluate(X_test, y_test,
batch_size=batch_size)
print('Test score:', score)
print('Test accuracy:', acc)
|
py | b41466d6fb6f778668764f6a51cd3e6385944885 | #!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Spectralcoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Spectralcoin-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
|
py | b4146774f109183cc6b88e761812d43349b9dd4e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, ConcurrencyContainer, PriorityContainer, Logger
from meka_flexbe_states.WaitForDoldButton import WaitForDoldButton
from meka_flexbe_states.HandoverAdaptionInit import HandoverAdaptionInit
from meka_flexbe_states.HandoverAdaptionExec import HandoverAdaptionExec
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
# [/MANUAL_IMPORT]
'''
Created on Mon Mar 04 2019
@author: me
'''
class adaption_testSM(Behavior):
'''
test for behavior adaption
'''
def __init__(self):
super(adaption_testSM, self).__init__()
self.name = 'adaption_test'
# parameters of this behavior
# references to used behaviors
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
# [/MANUAL_INIT]
# Behavior comments:
def create(self):
# x:30 y:365, x:130 y:365
_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'])
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
# [/MANUAL_CREATE]
with _state_machine:
# x:334 y:56
OperatableStateMachine.add('adapt_init',
HandoverAdaptionInit(topic='/hace/people', x_min=0, x_max=0.5, y_min=-0.75, y_max=0.05, z_min=-0.25, z_max=0.55),
transitions={'right_hand_in_ws': 'adapt_button_wait', 'left_hand_in_ws': 'failed', 'error': 'failed'},
autonomy={'right_hand_in_ws': Autonomy.Off, 'left_hand_in_ws': Autonomy.Off, 'error': Autonomy.Off})
# x:633 y:148
OperatableStateMachine.add('adapt_exec',
HandoverAdaptionExec(command='trigger', topic='/do_adaption', reality_damp=0.5, fixed_orientation=False, terminate=True, dynamic_orientation=True),
transitions={'stopped': 'failed', 'succeeded': 'adapt_reset', 'error': 'failed'},
autonomy={'stopped': Autonomy.Off, 'succeeded': Autonomy.Off, 'error': Autonomy.Off})
# x:387 y:307
OperatableStateMachine.add('adapt_reset',
HandoverAdaptionExec(command='trigger', topic='/do_adaption', reality_damp=0.5, fixed_orientation=False, terminate=True, dynamic_orientation=True),
transitions={'stopped': 'failed', 'succeeded': 'finished', 'error': 'failed'},
autonomy={'stopped': Autonomy.Off, 'succeeded': Autonomy.Off, 'error': Autonomy.Off})
# x:568 y:47
OperatableStateMachine.add('adapt_button_wait',
WaitForDoldButton(dold_button_topic='/dold_driver/state'),
transitions={'done': 'adapt_exec', 'failure': 'failed'},
autonomy={'done': Autonomy.Off, 'failure': Autonomy.Off})
return _state_machine
# Private functions can be added inside the following tags
# [MANUAL_FUNC]
# [/MANUAL_FUNC]
|
py | b4146777993581437a339c30a148e078e7bdeeb5 | import os
def sip_alg(s: str):
os.startfile("program\sip-alg-detector.exe") |
py | b41469ae6d8a1ccdd014ae8fa60579820a9b3956 | import hashlib
import requests
import sys
import json
import time
def proof_of_work(block):
"""
Simple Proof of Work Algorithm
Stringify the block and look for a proof.
Loop through possibilities, checking each one against `valid_proof`
in an effort to find a number that is a valid proof
:return: A valid proof for the provided block
"""
block_string = json.dumps(block, sort_keys=True)
proof = 0
while not valid_proof(block_string, proof):
proof += 1
return proof
def valid_proof(block_string, proof):
"""
Validates the Proof: Does hash(block_string, proof) contain 6
leading zeroes? Return true if the proof is valid
:param block_string: <string> The stringified block to use to
check in combination with `proof`
:param proof: <int?> The value that when combined with the
stringified previous block results in a hash that has the
correct number of leading zeroes.
:return: True if the resulting hash is a valid proof, False otherwise
"""
guess = f"{block_string}{proof}".encode()
guess_hash = hashlib.sha256(guess).hexdigest()
return guess_hash[:6] == "000000"
if __name__ == '__main__':
# What is the server address? IE `python3 miner.py https://server.com/api/`
if len(sys.argv) > 1:
node = sys.argv[1]
else:
node = "http://localhost:5000"
# Load ID
f = open("my_id.txt", "r")
id = f.read()
print("ID is", id)
f.close()
coins_mined = 0
# Run forever until interrupted
while True:
r = requests.get(url=node + "/last_block")
# Handle non-json response
try:
data = r.json()
except ValueError:
print("Error: Non-json response")
print("Response returned:")
print(r)
break
# TODO: Get the block from `data` and use it to look for a new proof
print("Starting proof of work")
start_time = time.perf_counter()
new_proof = proof_of_work(data)
end_time = time.perf_counter()
print(f"Proof of work finished in {end_time - start_time:0.2f} seconds")
# When found, POST it to the server {"proof": new_proof, "id": id}
post_data = {"proof": new_proof, "id": id}
r = requests.post(url=node + "/mine", json=post_data)
data = r.json()
# TODO: If the server responds with a 'message' 'New Block Forged'
# add 1 to the number of coins mined and print it. Otherwise,
# print the message from the server.
if data['message'] == "New Block Forged":
coins_mined += 1
print(f"Coins mined: {coins_mined}")
else:
print(data['message'])
|
py | b41469e65645f78d636eb9c5abe42eeebaba5652 | """Import category trees from a file."""
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from slugify import slugify
from categories.models import Category
from categories.settings import SLUG_TRANSLITERATOR
class Command(BaseCommand):
"""Import category trees from a file."""
help = (
"Imports category tree(s) from a file. Sub categories must be indented by the same multiple of spaces or tabs."
)
args = "file_path [file_path ...]"
def get_indent(self, string):
"""
Look through the string and count the spaces.
"""
indent_amt = 0
if string[0] == "\t":
return "\t"
for char in string:
if char == " ":
indent_amt += 1
else:
return " " * indent_amt
@transaction.atomic
def make_category(self, string, parent=None, order=1):
"""
Make and save a category object from a string.
"""
cat = Category(
name=string.strip(),
slug=slugify(SLUG_TRANSLITERATOR(string.strip()))[:49],
# arent=parent,
order=order,
)
cat._tree_manager.insert_node(cat, parent, "last-child", True)
cat.save()
if parent:
parent.rght = cat.rght + 1
parent.save()
return cat
def parse_lines(self, lines):
"""
Do the work of parsing each line.
"""
indent = ""
level = 0
if lines[0][0] in [" ", "\t"]:
raise CommandError("The first line in the file cannot start with a space or tab.")
# This keeps track of the current parents at a given level
current_parents = {0: None}
for line in lines:
if len(line) == 0:
continue
if line[0] in [" ", "\t"]:
if indent == "":
indent = self.get_indent(line)
elif line[0] not in indent:
raise CommandError("You can't mix spaces and tabs for indents")
level = line.count(indent)
current_parents[level] = self.make_category(line, parent=current_parents[level - 1])
else:
# We are back to a zero level, so reset the whole thing
current_parents = {0: self.make_category(line)}
current_parents[0]._tree_manager.rebuild()
def handle(self, *file_paths, **options):
"""
Handle the basic import.
"""
import os
for file_path in file_paths:
if not os.path.isfile(file_path):
print("File %s not found." % file_path)
continue
with open(file_path, "r") as f:
data = f.readlines()
self.parse_lines(data)
|
py | b4146a41b2399a89782a00b838cb1d5996411b2f | """Nox sessions."""
import tempfile
from typing import Any
import nox
from nox.sessions import Session
nox.options.sessions = "lint", "mypy", "pytype", "safety", "tests"
locations = "src", "tests", "noxfile.py", "docs/conf.py"
package = "python3_template"
def install_with_constraints(session: Session, *args: str, **kwargs: Any) -> None:
"""Install packages constrained by Poetry's lock file."""
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
f"--output={requirements.name}",
external=True,
)
session.install(f"--constraint={requirements.name}", *args, **kwargs)
@nox.session(python="3.8")
def black(session: Session) -> None:
"""Run black code formatter."""
args = session.posargs or locations
install_with_constraints(session, "black")
session.run("black", *args)
@nox.session(python=["3.8"])
def lint(session: Session) -> None:
"""Lint using flake8."""
args = session.posargs or locations
install_with_constraints(
session,
"flake8",
"flake8-annotations",
"flake8-bandit",
"flake8-black",
"flake8-bugbear",
"flake8-docstrings",
"flake8-import-order",
"darglint",
)
session.run("flake8", *args)
@nox.session(python="3.8")
def safety(session: Session) -> None:
"""Scan dependencies for insecure packages."""
with tempfile.NamedTemporaryFile() as requirements:
session.run(
"poetry",
"export",
"--dev",
"--format=requirements.txt",
"--without-hashes",
f"--output={requirements.name}",
external=True,
)
install_with_constraints(session, "safety")
session.run("safety", "check", f"--file={requirements.name}", "--full-report")
@nox.session(python=["3.8"])
def mypy(session: Session) -> None:
"""Type-check using mypy."""
args = session.posargs or locations
install_with_constraints(session, "mypy")
session.run("mypy", *args)
@nox.session(python="3.7")
def pytype(session: Session) -> None:
"""Type-check using pytype."""
args = session.posargs or ["--disable=import-error", *locations]
install_with_constraints(session, "pytype")
session.run("pytype", *args)
@nox.session(python=["3.8"])
def tests(session: Session) -> None:
"""Run the test suite."""
args = session.posargs or ["--cov", "-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(
session, "coverage[toml]", "pytest", "pytest-cov", "pytest-mock"
)
session.run("pytest", *args)
@nox.session(python=["3.8"])
def typeguard(session: Session) -> None:
"""Runtime type checking using Typeguard."""
args = session.posargs or ["-m", "not e2e"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "pytest", "pytest-mock", "typeguard")
session.run("pytest", f"--typeguard-packages={package}", *args)
@nox.session(python=["3.8"])
def xdoctest(session: Session) -> None:
"""Run examples with xdoctest."""
args = session.posargs or ["all"]
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "xdoctest")
session.run("python", "-m", "xdoctest", package, *args)
@nox.session(python="3.8")
def docs(session: Session) -> None:
"""Build the documentation."""
session.run("poetry", "install", "--no-dev", external=True)
install_with_constraints(session, "sphinx", "sphinx-autodoc-typehints")
session.run("sphinx-build", "docs", "docs/_build")
@nox.session(python="3.8")
def coverage(session: Session) -> None:
"""Upload coverage data."""
install_with_constraints(session, "coverage[toml]", "codecov")
session.run("coverage", "xml", "--fail-under=0")
session.run("codecov", *session.posargs)
|
py | b4146adfcae62e84c03ec9dcd1100f552fdb33ba | from abc import ABC, abstractmethod
from typing import List
from pydantic import EmailStr
from contaxy.schema import File, Project, ProjectCreation, User, UserRegistration
from contaxy.utils.file_utils import FileStreamWrapper
class SeedOperations(ABC):
@abstractmethod
def create_user(
self,
user_input: UserRegistration = UserRegistration(
username="Foo", email=EmailStr("[email protected]"), password="Foobar"
),
) -> User:
pass
@abstractmethod
def create_users(self, amount: int) -> List[User]:
pass
@abstractmethod
def create_project(
self,
project_input: ProjectCreation = ProjectCreation(
id="my-test-project", display_name="My Test Project!"
),
) -> Project:
pass
@abstractmethod
def create_projects(self, amount: int) -> List[Project]:
pass
@abstractmethod
def create_file(
self,
project_id: str,
file_key: str = "my-test-file",
max_number_chars: int = 200,
) -> File:
pass
@abstractmethod
def create_files(
self,
project_id: str,
number_of_files: int,
prefix: str = "my-test-file",
max_number_chars: int = 200,
) -> List[File]:
pass
@abstractmethod
def create_file_stream(
self,
max_number_chars: int = 200,
) -> FileStreamWrapper:
pass
|
py | b4146b8aac17f9ba690c870dc1550d4c6909fdc0 | from talon import app, Module, Context, actions, ui, imgui, settings, app, registry
ctx = Context()
mod = Module()
ctx.matches = r"""
tag: user.generic_windows_shell
"""
@ctx.action_class("user")
class Actions:
# implements the function from generic_terminal.talon for unix shells
def terminal_list_directories():
"""Lists directories"""
actions.insert("ls")
actions.key("enter")
def terminal_list_all_directories():
"""Lists all directories including hidden"""
actions.insert("ls -force")
actions.key("enter")
def terminal_change_directory(path: str):
"""Lists change directory"""
actions.insert("cd {}".format(path))
if path:
actions.key("enter")
def terminal_change_directory_root():
"""Root of current drive"""
actions.insert("cd /")
actions.key("enter")
def terminal_clear_screen():
"""Clear screen"""
actions.insert("clear")
actions.key("enter")
def terminal_run_last():
"""Repeats the last command"""
actions.key("up enter")
def terminal_rerun_search(command: str):
"""Searches through the previously executed commands"""
actions.key("ctrl-r")
actions.insert(command)
def terminal_kill_all():
"""kills the running command"""
actions.key("ctrl-c")
actions.insert("y")
actions.key("enter")
|
py | b4146c7dd5d31362db3ef6997edec32f51f4bc21 | """Define a dynamical system for a 2D quadrotor"""
from typing import Tuple, List, Optional
import torch
import numpy as np
from .control_affine_system import ControlAffineSystem
from .utils import grav, Scenario
class Quad2D(ControlAffineSystem):
"""
Represents a planar quadrotor.
The system has state
x = [px, pz, theta, vx, vz, theta_dot]
representing the position, orientation, and velocities of the quadrotor, and it
has control inputs
u = [u_right, u_left]
representing the thrust at the right and left rotor.
The system is parameterized by
m: mass
I: rotational inertia
r: the distance from the center of mass to the rotors (assumed to be symmetric)
"""
# Number of states and controls
N_DIMS = 6
N_CONTROLS = 2
# State indices
PX = 0
PZ = 1
THETA = 2
VX = 3
VZ = 4
THETA_DOT = 5
# Control indices
U_RIGHT = 0
U_LEFT = 1
def __init__(
self,
nominal_params: Scenario,
dt: float = 0.01,
controller_dt: Optional[float] = None,
):
"""
Initialize the quadrotor.
args:
nominal_params: a dictionary giving the parameter values for the system.
Requires keys ["m", "I", "r"]
dt: the timestep to use for the simulation
controller_dt: the timestep for the LQR discretization. Defaults to dt
raises:
ValueError if nominal_params are not valid for this system
"""
super().__init__(nominal_params, dt, controller_dt)
def validate_params(self, params: Scenario) -> bool:
"""Check if a given set of parameters is valid
args:
params: a dictionary giving the parameter values for the system.
Requires keys ["m", "I", "r"]
returns:
True if parameters are valid, False otherwise
"""
valid = True
# Make sure all needed parameters were provided
valid = valid and "m" in params
valid = valid and "I" in params
valid = valid and "r" in params
# Make sure all parameters are physically valid
valid = valid and params["m"] > 0
valid = valid and params["I"] > 0
valid = valid and params["r"] > 0
return valid
@property
def n_dims(self) -> int:
return Quad2D.N_DIMS
@property
def angle_dims(self) -> List[int]:
return [Quad2D.THETA]
@property
def n_controls(self) -> int:
return Quad2D.N_CONTROLS
@property
def state_limits(self) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Return a tuple (upper, lower) describing the expected range of states for this
system
"""
# define upper and lower limits based around the nominal equilibrium input
upper_limit = torch.ones(self.n_dims)
upper_limit[Quad2D.PX] = 2.0
upper_limit[Quad2D.PZ] = 2.0
upper_limit[Quad2D.THETA] = np.pi
upper_limit[Quad2D.VX] = 2.0
upper_limit[Quad2D.VZ] = 2.0
upper_limit[Quad2D.THETA_DOT] = 2.0 * np.pi
lower_limit = -1.0 * upper_limit
return (upper_limit, lower_limit)
@property
def control_limits(self) -> Tuple[torch.Tensor, torch.Tensor]:
"""
Return a tuple (upper, lower) describing the range of allowable control
limits for this system
"""
# define upper and lower limits based around the nominal equilibrium input
upper_limit = self.nominal_params["m"] * grav / 2.0 + torch.tensor([4.0, 4.0])
lower_limit = self.nominal_params["m"] * grav / 2.0 - torch.tensor([4.0, 4.0])
return (upper_limit, lower_limit)
def safe_mask(self, x):
"""Return the mask of x indicating safe regions for the obstacle task
args:
x: a tensor of points in the state space
"""
safe_mask = torch.ones_like(x[:, 0], dtype=torch.bool)
# We have a floor that we need to avoid
safe_z = -0.1
floor_mask = x[:, 1] >= safe_z
safe_mask.logical_and_(floor_mask)
# We also have a block obstacle to the left at ground level
obs1_min_x, obs1_max_x = (-1.1, -0.4)
obs1_min_z, obs1_max_z = (-0.5, 0.6)
obs1_mask_x = torch.logical_or(x[:, 0] <= obs1_min_x, x[:, 0] >= obs1_max_x)
obs1_mask_z = torch.logical_or(x[:, 1] <= obs1_min_z, x[:, 1] >= obs1_max_z)
obs1_mask = torch.logical_or(obs1_mask_x, obs1_mask_z)
safe_mask.logical_and_(obs1_mask)
# We also have a block obstacle to the right in the air
obs2_min_x, obs2_max_x = (-0.1, 1.1)
obs2_min_z, obs2_max_z = (0.7, 1.5)
obs2_mask_x = torch.logical_or(x[:, 0] <= obs2_min_x, x[:, 0] >= obs2_max_x)
obs2_mask_z = torch.logical_or(x[:, 1] <= obs2_min_z, x[:, 1] >= obs2_max_z)
obs2_mask = torch.logical_or(obs2_mask_x, obs2_mask_z)
safe_mask.logical_and_(obs2_mask)
# Also constrain to be within a norm bound
norm_mask = x.norm(dim=-1) <= 4.5
safe_mask.logical_and_(norm_mask)
return safe_mask
def unsafe_mask(self, x):
"""Return the mask of x indicating unsafe regions for the obstacle task
args:
x: a tensor of points in the state space
"""
unsafe_mask = torch.zeros_like(x[:, 0], dtype=torch.bool)
# We have a floor that we need to avoid
unsafe_z = -0.3
floor_mask = x[:, 1] <= unsafe_z
unsafe_mask.logical_or_(floor_mask)
# We also have a block obstacle to the left at ground level
obs1_min_x, obs1_max_x = (-1.0, -0.5)
obs1_min_z, obs1_max_z = (-0.4, 0.5)
obs1_mask_x = torch.logical_and(x[:, 0] >= obs1_min_x, x[:, 0] <= obs1_max_x)
obs1_mask_z = torch.logical_and(x[:, 1] >= obs1_min_z, x[:, 1] <= obs1_max_z)
obs1_mask = torch.logical_and(obs1_mask_x, obs1_mask_z)
unsafe_mask.logical_or_(obs1_mask)
# We also have a block obstacle to the right in the air
obs2_min_x, obs2_max_x = (0.0, 1.0)
obs2_min_z, obs2_max_z = (0.8, 1.4)
obs2_mask_x = torch.logical_and(x[:, 0] >= obs2_min_x, x[:, 0] <= obs2_max_x)
obs2_mask_z = torch.logical_and(x[:, 1] >= obs2_min_z, x[:, 1] <= obs2_max_z)
obs2_mask = torch.logical_and(obs2_mask_x, obs2_mask_z)
unsafe_mask.logical_or_(obs2_mask)
# Also constrain with a norm bound
norm_mask = x.norm(dim=-1) >= 7.0
unsafe_mask.logical_or_(norm_mask)
return unsafe_mask
def goal_mask(self, x):
"""Return the mask of x indicating points in the goal set (within 0.2 m of the
goal).
args:
x: a tensor of points in the state space
"""
goal_mask = torch.ones_like(x[:, 0], dtype=torch.bool)
# Define the goal region as being near the goal
near_goal_xz = x[:, : Quad2D.PZ + 1].norm(dim=-1) <= 0.3
goal_mask.logical_and_(near_goal_xz)
near_goal_theta = x[:, Quad2D.THETA].abs() <= 1.0
goal_mask.logical_and_(near_goal_theta)
near_goal_xz_velocity = x[:, Quad2D.VX : Quad2D.VZ + 1].norm(dim=-1) <= 1.0
goal_mask.logical_and_(near_goal_xz_velocity)
near_goal_theta_velocity = x[:, Quad2D.THETA_DOT].abs() <= 1.0
goal_mask.logical_and_(near_goal_theta_velocity)
# The goal set has to be a subset of the safe set
goal_mask.logical_and_(self.safe_mask(x))
return goal_mask
def _f(self, x: torch.Tensor, params: Scenario):
"""
Return the control-independent part of the control-affine dynamics.
args:
x: bs x self.n_dims tensor of state
params: a dictionary giving the parameter values for the system. If None,
default to the nominal parameters used at initialization
returns:
f: bs x self.n_dims x 1 tensor
"""
# Extract batch size and set up a tensor for holding the result
batch_size = x.shape[0]
f = torch.zeros((batch_size, self.n_dims, 1))
f = f.type_as(x)
# The derivatives of px, pz, and theta are just the velocities
f[:, Quad2D.PX, 0] = x[:, Quad2D.VX]
f[:, Quad2D.PZ, 0] = x[:, Quad2D.VZ]
f[:, Quad2D.THETA, 0] = x[:, Quad2D.THETA_DOT]
# Acceleration in x has no control-independent part
f[:, 3, 0] = 0.0
# Acceleration in z is affected by the relentless pull of gravity
f[:, 4, 0] = -grav
# Acceleration in theta has no control-independent part
f[:, 5, 0] = 0.0
return f
def _g(self, x: torch.Tensor, params: Scenario):
"""
Return the control-independent part of the control-affine dynamics.
args:
x: bs x self.n_dims tensor of state
params: a dictionary giving the parameter values for the system. If None,
default to the nominal parameters used at initialization
returns:
g: bs x self.n_dims x self.n_controls tensor
"""
# Extract batch size and set up a tensor for holding the result
batch_size = x.shape[0]
g = torch.zeros((batch_size, self.n_dims, self.n_controls))
g = g.type_as(x)
# Extract the needed parameters
m, inertia, r = params["m"], params["I"], params["r"]
# and state variables
theta = x[:, Quad2D.THETA]
# Effect on x acceleration
g[:, Quad2D.VX, Quad2D.U_RIGHT] = -torch.sin(theta) / m
g[:, Quad2D.VX, Quad2D.U_LEFT] = -torch.sin(theta) / m
# Effect on z acceleration
g[:, Quad2D.VZ, Quad2D.U_RIGHT] = torch.cos(theta) / m
g[:, Quad2D.VZ, Quad2D.U_LEFT] = torch.cos(theta) / m
# Effect on heading from rotors
g[:, Quad2D.THETA_DOT, Quad2D.U_RIGHT] = r / inertia
g[:, Quad2D.THETA_DOT, Quad2D.U_LEFT] = -r / inertia
return g
@property
def u_eq(self):
u_eq = (
torch.zeros(
(
1,
self.n_controls,
)
)
+ self.nominal_params["m"] * grav / 2.0
)
return u_eq
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.