id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
248552
|
<gh_stars>1-10
'''
Description: common modules for gan model
Version: 1.0
Autor: searobbersanduck
Date: 2021-03-29 16:37:26
LastEditors: searobbersanduck
LastEditTime: 2021-03-31 09:36:52
License : (C)Copyright 2020-2021, MIT
'''
# ref: https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/models/networks.py
import os
import sys
import torch
import torch.nn as nn
from torch.nn import init
import functools
from torch.optim import lr_scheduler
###############################################################################
# Helper Functions
###############################################################################
class Identity(nn.Module):
def forward(self, x):
return x
def get_norm_layer(norm_type='instance', in_discrimimator=False):
"""Return a normalization layer
Parameters:
norm_type (str) -- the name of the normalization layer: batch | instance | none
For BatchNorm, we use learnable affine parameters and track running statistics (mean/stddev).
For InstanceNorm, we do not use learnable affine parameters. We do not track running statistics.
"""
if norm_type == 'batch':
if in_discrimimator:
norm_layer = functools.partial(nn.BatchNorm3d, affine=True, track_running_stats=False)
else:
norm_layer = functools.partial(nn.BatchNorm3d, affine=True, track_running_stats=True)
elif norm_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm3d, affine=False, track_running_stats=False)
elif norm_type == 'none':
def norm_layer(x): return Identity()
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return norm_layer
def get_scheduler(optimizer, opt):
"""Return a learning rate scheduler
Parameters:
optimizer -- the optimizer of the network
opt (option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions.
opt.lr_policy is the name of learning rate policy: linear | step | plateau | cosine
For 'linear', we keep the same learning rate for the first <opt.n_epochs> epochs
and linearly decay the rate to zero over the next <opt.n_epochs_decay> epochs.
For other schedulers (step, plateau, and cosine), we use the default PyTorch schedulers.
See https://pytorch.org/docs/stable/optim.html for more details.
"""
if opt.lr_policy == 'linear':
def lambda_rule(epoch):
lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.n_epochs) / float(opt.n_epochs_decay + 1)
return lr_l
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)
elif opt.lr_policy == 'step':
scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1)
elif opt.lr_policy == 'plateau':
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5)
elif opt.lr_policy == 'cosine':
scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.n_epochs, eta_min=0)
else:
return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy)
return scheduler
def init_weights(net, init_type='normal', init_gain=0.02):
"""Initialize network weights.
Parameters:
net (network) -- network to be initialized
init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
init_gain (float) -- scaling factor for normal, xavier and orthogonal.
We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might
work better for some applications. Feel free to try yourself.
"""
def init_func(m): # define the initialization function
classname = m.__class__.__name__
if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1):
if init_type == 'normal':
init.normal_(m.weight.data, 0.0, init_gain)
elif init_type == 'xavier':
init.xavier_normal_(m.weight.data, gain=init_gain)
elif init_type == 'kaiming':
init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif init_type == 'orthogonal':
init.orthogonal_(m.weight.data, gain=init_gain)
else:
raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
if hasattr(m, 'bias') and m.bias is not None:
init.constant_(m.bias.data, 0.0)
elif classname.find('BatchNorm3d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies.
init.normal_(m.weight.data, 1.0, init_gain)
init.constant_(m.bias.data, 0.0)
print('initialize network with %s' % init_type)
net.apply(init_func) # apply the initialization function <init_func>
def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[], opt=None):
"""Initialize a network: 1. register CPU/GPU device (with multi-GPU support); 2. initialize the network weights
Parameters:
net (network) -- the network to be initialized
init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal
gain (float) -- scaling factor for normal, xavier and orthogonal.
gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2
Return an initialized network.
"""
if opt is not None and 'distributed' in opt and opt.distributed is True:
# to do, add synchronize to initialize weights
import tempfile
import torch.distributed as dist
print('====> init distributed net!')
if not opt.continue_train:
checkpoint_path = os.path.join(tempfile.gettempdir(), "initial_weights.pt")
if opt.rank == 0:
init_weights(net, init_type, init_gain=init_gain)
torch.save(net.state_dict(), checkpoint_path)
dist.barrier()
net.load_state_dict(torch.load(checkpoint_path))
if opt.gpu is not None:
torch.cuda.set_device(opt.gpu)
net.cuda(opt.gpu)
net = torch.nn.parallel.DistributedDataParallel(net, device_ids=[opt.gpu], output_device=opt.gpu)
return net
if len(gpu_ids) > 0:
assert(torch.cuda.is_available())
net.to(gpu_ids[0])
net = torch.nn.DataParallel(net, gpu_ids) # multi-GPUs
init_weights(net, init_type, init_gain=init_gain)
return net
def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[], opt=None):
"""Create a generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
ngf (int) -- the number of filters in the last conv layer
netG (str) -- the architecture's name: resnet_9blocks | resnet_6blocks | unet_256 | unet_128
norm (str) -- the name of normalization layers used in the network: batch | instance | none
use_dropout (bool) -- if use dropout layers.
init_type (str) -- the name of our initialization method.
init_gain (float) -- scaling factor for normal, xavier and orthogonal.
gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2
Returns a generator
Our current implementation provides two types of generators:
U-Net: [unet_128] (for 128x128 input images) and [unet_256] (for 256x256 input images)
The original U-Net paper: https://arxiv.org/abs/1505.04597
Resnet-based generator: [resnet_6blocks] (with 6 Resnet blocks) and [resnet_9blocks] (with 9 Resnet blocks)
Resnet-based generator consists of several Resnet blocks between a few downsampling/upsampling operations.
We adapt Torch code from <NAME>'s neural style transfer project (https://github.com/jcjohnson/fast-neural-style).
The generator has been initialized by <init_net>. It uses RELU for non-linearity.
"""
net = None
norm_layer = get_norm_layer(norm_type=norm)
if netG == 'resnet_9blocks':
net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=9)
elif netG == 'resnet_6blocks':
net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=6)
elif netG == 'unet_128':
net = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout)
elif netG == 'unet_256':
net = UnetGenerator(input_nc, output_nc, 8, ngf, norm_layer=norm_layer, use_dropout=use_dropout)
else:
raise NotImplementedError('Generator model name [%s] is not recognized' % netG)
return init_net(net, init_type, init_gain, gpu_ids, opt)
# return ResnetGenerator(1,1, 32, n_blocks=6)
def define_D(input_nc, ndf, netD, n_layers_D=3, norm='batch', init_type='normal', init_gain=0.02, gpu_ids=[], opt=None):
"""Create a discriminator
Parameters:
input_nc (int) -- the number of channels in input images
ndf (int) -- the number of filters in the first conv layer
netD (str) -- the architecture's name: basic | n_layers | pixel
n_layers_D (int) -- the number of conv layers in the discriminator; effective when netD=='n_layers'
norm (str) -- the type of normalization layers used in the network.
init_type (str) -- the name of the initialization method.
init_gain (float) -- scaling factor for normal, xavier and orthogonal.
gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2
Returns a discriminator
Our current implementation provides three types of discriminators:
[basic]: 'PatchGAN' classifier described in the original pix2pix paper.
It can classify whether 70×70 overlapping patches are real or fake.
Such a patch-level discriminator architecture has fewer parameters
than a full-image discriminator and can work on arbitrarily-sized images
in a fully convolutional fashion.
[n_layers]: With this mode, you can specify the number of conv layers in the discriminator
with the parameter <n_layers_D> (default=3 as used in [basic] (PatchGAN).)
[pixel]: 1x1 PixelGAN discriminator can classify whether a pixel is real or not.
It encourages greater color diversity but has no effect on spatial statistics.
The discriminator has been initialized by <init_net>. It uses Leakly RELU for non-linearity.
"""
net = None
# if opt is not None and 'distributed' in opt and opt.distributed is True:
# norm_layer = torch.nn.InstanceNorm3d
# else:
norm_layer = get_norm_layer(norm_type=norm, in_discrimimator=True)
if netD == 'basic': # default PatchGAN classifier
net = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer)
elif netD == 'n_layers': # more options
net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer)
elif netD == 'pixel': # classify if each pixel is real or fake
net = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer)
else:
raise NotImplementedError('Discriminator model name [%s] is not recognized' % netD)
return init_net(net, init_type, init_gain, gpu_ids, opt)
# return PixelDiscriminator(2,8)
##############################################################################
# Classes
##############################################################################
class GANLoss(nn.Module):
"""Define different GAN objectives.
The GANLoss class abstracts away the need to create the target label tensor
that has the same size as the input.
"""
def __init__(self, gan_mode, target_real_label=1.0, target_fake_label=0.0):
""" Initialize the GANLoss class.
Parameters:
gan_mode (str) - - the type of GAN objective. It currently supports vanilla, lsgan, and wgangp.
target_real_label (bool) - - label for a real image
target_fake_label (bool) - - label of a fake image
Note: Do not use sigmoid as the last layer of Discriminator.
LSGAN needs no sigmoid. vanilla GANs will handle it with BCEWithLogitsLoss.
"""
super(GANLoss, self).__init__()
self.register_buffer('real_label', torch.tensor(target_real_label))
self.register_buffer('fake_label', torch.tensor(target_fake_label))
self.gan_mode = gan_mode
if gan_mode == 'lsgan':
self.loss = nn.MSELoss()
elif gan_mode == 'vanilla':
self.loss = nn.BCEWithLogitsLoss()
elif gan_mode in ['wgangp']:
self.loss = None
else:
raise NotImplementedError('gan mode %s not implemented' % gan_mode)
def get_target_tensor(self, prediction, target_is_real):
"""Create label tensors with the same size as the input.
Parameters:
prediction (tensor) - - tpyically the prediction from a discriminator
target_is_real (bool) - - if the ground truth label is for real images or fake images
Returns:
A label tensor filled with ground truth label, and with the size of the input
"""
if target_is_real:
target_tensor = self.real_label
else:
target_tensor = self.fake_label
return target_tensor.expand_as(prediction)
def __call__(self, prediction, target_is_real):
"""Calculate loss given Discriminator's output and grount truth labels.
Parameters:
prediction (tensor) - - tpyically the prediction output from a discriminator
target_is_real (bool) - - if the ground truth label is for real images or fake images
Returns:
the calculated loss.
"""
if self.gan_mode in ['lsgan', 'vanilla']:
target_tensor = self.get_target_tensor(prediction, target_is_real)
loss = self.loss(prediction, target_tensor)
elif self.gan_mode == 'wgangp':
if target_is_real:
loss = -prediction.mean()
else:
loss = prediction.mean()
return loss
class ResnetBlock(nn.Module):
"""Define a Resnet block"""
def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias):
"""Initialize the Resnet block
A resnet block is a conv block with skip connections
We construct a conv block with build_conv_block function,
and implement skip connections in <forward> function.
Original Resnet paper: https://arxiv.org/pdf/1512.03385.pdf
"""
super(ResnetBlock, self).__init__()
self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias)
def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias):
"""Construct a convolutional block.
Parameters:
dim (int) -- the number of channels in the conv layer.
padding_type (str) -- the name of padding layer: reflect | replicate | zero
norm_layer -- normalization layer
use_dropout (bool) -- if use dropout layers.
use_bias (bool) -- if the conv layer uses bias or not
Returns a conv block (with a conv layer, a normalization layer, and a non-linearity layer (ReLU))
"""
conv_block = []
p = 0
p = 1
conv_block += [nn.Conv3d(dim, dim//2, kernel_size=1, padding=0, bias=use_bias), norm_layer(dim//2), nn.ReLU(True)]
conv_block += [nn.Conv3d(dim//2, dim//2, kernel_size=3, padding=p, bias=use_bias), norm_layer(dim//2), nn.ReLU(True)]
conv_block += [nn.Conv3d(dim//2, dim, kernel_size=1, padding=0, bias=use_bias), norm_layer(dim), nn.ReLU(True)]
return nn.Sequential(*conv_block)
def forward(self, x):
"""Forward function (with skip connections)"""
out = x + self.conv_block(x) # add skip connections
return out
class ResnetGenerator(nn.Module):
"""Resnet-based generator that consists of Resnet blocks between a few downsampling/upsampling operations.
We adapt Torch code and idea from <NAME>'s neural style transfer project(https://github.com/jcjohnson/fast-neural-style)
"""
def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm3d, use_dropout=False, n_blocks=6, padding_type='zero', use_deconv=True):
"""Construct a Resnet-based generator
Parameters:
input_nc (int) -- the number of channels in input images
output_nc (int) -- the number of channels in output images
ngf (int) -- the number of filters in the last conv layer
norm_layer -- normalization layer
use_dropout (bool) -- if use dropout layers
n_blocks (int) -- the number of ResNet blocks
padding_type (str) -- the name of padding layer in conv layers: reflect | replicate | zero
"""
assert(n_blocks >= 0)
super(ResnetGenerator, self).__init__()
if type(norm_layer) == functools.partial:
use_bias = norm_layer.func == nn.InstanceNorm3d
else:
use_bias = norm_layer == nn.InstanceNorm3d
model = [nn.Conv3d(input_nc, ngf, kernel_size=7, padding=3, bias=use_bias),
norm_layer(ngf),
nn.ReLU(True)]
n_downsampling = 3
for i in range(n_downsampling): # add downsampling layers
mult = 2 ** i
model += [nn.Conv3d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1, bias=use_bias),
norm_layer(ngf * mult * 2),
nn.ReLU(True)]
mult = 2 ** n_downsampling
for i in range(n_blocks): # add ResNet blocks
model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)]
for i in range(n_downsampling): # add upsampling layers
mult = 2 ** (n_downsampling - i)
if use_deconv:
model += [nn.ConvTranspose3d(ngf * mult, int(ngf * mult / 2),
kernel_size=3, stride=2,
padding=1, output_padding=1,
bias=use_bias),
norm_layer(int(ngf * mult / 2)),
nn.ReLU(True)]
else:
model += [nn.Upsample(scale_factor=2, mode='trilinear'),
nn.Conv3d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=1, padding=1, bias=use_bias),
norm_layer(int(ngf * mult / 2)),nn.ReLU(True)]
model += [nn.Conv3d(ngf, output_nc, kernel_size=7, padding=3)]
# model += [nn.Tanh()]
self.model = nn.Sequential(*model)
def forward(self, input):
"""Standard forward"""
return self.model(input)
class PixelDiscriminator(nn.Module):
"""Defines a 1x1 PatchGAN discriminator (pixelGAN)"""
def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm3d):
"""Construct a 1x1 PatchGAN discriminator
Parameters:
input_nc (int) -- the number of channels in input images
ndf (int) -- the number of filters in the last conv layer
norm_layer -- normalization layer
"""
super(PixelDiscriminator, self).__init__()
if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm3d has affine parameters
use_bias = norm_layer.func == nn.InstanceNorm3d
else:
use_bias = norm_layer == nn.InstanceNorm3d
self.net = [
nn.Conv3d(input_nc, ndf, kernel_size=1, stride=1, padding=0),
nn.LeakyReLU(0.2, True),
nn.Conv3d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias),
norm_layer(ndf * 2),
nn.LeakyReLU(0.2, True),
nn.Conv3d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)]
self.net = nn.Sequential(*self.net)
def forward(self, input):
"""Standard forward."""
return self.net(input)
class NLayerDiscriminator(nn.Module):
"""Defines a PatchGAN discriminator"""
def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d):
"""Construct a PatchGAN discriminator
Parameters:
input_nc (int) -- the number of channels in input images
ndf (int) -- the number of filters in the last conv layer
n_layers (int) -- the number of conv layers in the discriminator
norm_layer -- normalization layer
"""
super(NLayerDiscriminator, self).__init__()
if type(norm_layer) == functools.partial: # no need to use bias as BatchNorm2d has affine parameters
use_bias = norm_layer.func == nn.InstanceNorm2d
else:
use_bias = norm_layer == nn.InstanceNorm2d
kw = 4
padw = 1
sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)]
nf_mult = 1
nf_mult_prev = 1
for n in range(1, n_layers): # gradually increase the number of filters
nf_mult_prev = nf_mult
nf_mult = min(2 ** n, 8)
sequence += [
nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias),
norm_layer(ndf * nf_mult),
nn.LeakyReLU(0.2, True)
]
nf_mult_prev = nf_mult
nf_mult = min(2 ** n_layers, 8)
sequence += [
nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias),
norm_layer(ndf * nf_mult),
nn.LeakyReLU(0.2, True)
]
sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map
self.model = nn.Sequential(*sequence)
def forward(self, input):
"""Standard forward."""
return self.model(input)
|
StarcoderdataPython
|
3322610
|
<filename>web/deploy/wtdeploy/wtdeploy/modules/fab_nginx.py
#!/usr/bin/python
# -*- encoding: utf-8 -*-
#
# author: <NAME>
from fabric.api import sudo
from fabric.api import cd
from fabric.api import run
from fabric.api import env
from fabric.contrib.files import upload_template
from fabric.contrib.files import exists
def install(conf_folder):
sudo("apt-get -y install nginx")
def copy_conf_files(conf_folder, deploy_folder, is_mobile):
with cd(deploy_folder):
run('mkdir -p nginx')
if is_mobile:
#nginx stuff
upload_template('%s/nginx/host_conf_mobile' % conf_folder, 'nginx', context=env)
sudo('cp nginx/host_conf_mobile /etc/nginx/sites-available/%(host)s' % env)
#apache stuff
if exists('%s/apache/apache' % conf_folder):
upload_template('%s/apache/apache_mobile' % conf_folder, 'apache2', context=env)
sudo('cp apache2/apache_mobile /etc/apache2/sites-available/%(host)s' % env)
#symbolic link to main app media
if exists('%(deploy_folder)s/app/media' % env):
sudo('rm -rvf %(deploy_folder)s/app/media' % env)
sudo('ln -fs %(main_app_deploy_folder)s/app/media %(deploy_folder)s/app/media' % env)
else:
#nginx stuff
upload_template('%s/nginx/host_conf' % conf_folder, 'nginx', context=env)
upload_template('%s/nginx/nginx.conf' % conf_folder, 'nginx', context=env)
upload_template('%s/nginx/proxy.conf' % conf_folder, 'nginx', context=env)
sudo('cp nginx/host_conf /etc/nginx/sites-available/%(host)s' % env)
sudo('cp nginx/nginx.conf /etc/nginx/nginx.conf')
sudo('cp nginx/proxy.conf /etc/nginx/proxy.conf')
#apache stuff
if exists('%s/apache/apache' % conf_folder):
upload_template('%s/apache/apache' % conf_folder, 'apache2', context=env)
sudo('cp apache2/apache /etc/apache2/sites-available/%(host)s' % env)
sudo('chmod a+r /etc/apache2/sites-available/%(host)s' % env)
if not exists('/etc/nginx/sites-enabled/%(host)s' % env):
sudo('ln -fs /etc/nginx/sites-available/%(host)s /etc/nginx/sites-enabled/%(host)s' % env)
sudo('chmod a+r /etc/nginx/sites-enabled/%(host)s' % env)
if exists('%s/apache/apache' % conf_folder) and not exists('/etc/apache2/sites-enabled/%(host)s' % env):
sudo('ln -fs /etc/apache2/sites-available/%(host)s /etc/apache2/sites-enabled/%(host)s' % env)
sudo('chmod a+r /etc/apache2/sites-enabled/%(host)s' % env)
def start():
sudo("/etc/init.d/nginx start")
def stop():
sudo("/etc/init.d/nginx stop")
def restart():
sudo("/etc/init.d/nginx restart")
|
StarcoderdataPython
|
1808001
|
<filename>Init_Guide/text-search/file-search.py
f_opn = open("file.txt", "r")
for line in f_opn:
if "hello" in line:
print line
f_opn.close()
|
StarcoderdataPython
|
5191018
|
from django.db import models
class LastRun(models.Model):
"""
Table: Lastrun
Comment: Table that stores all the background runner last run.
"""
component = models.CharField(max_length=30)
last_run = models.DateTimeField(auto_now=True, blank=True)
def __str__(self):
return self.component
class BackupHistory(models.Model):
"""
Table: backup_history
Comment: Table that stores all the backup information ...
"""
backup_date = models.DateTimeField(null=False)
backup_status = models.CharField(max_length=20, null=False)
backup_file = models.CharField(max_length=200, null=False)
backup_size = models.IntegerField(default=0)
def __str__(self):
return str(self.backup_date) + "(" + str(self.backup_status) + ")"
def backup_size_in_kb(self):
return int(self.backup_size) / 1024
|
StarcoderdataPython
|
6687927
|
<reponame>RKatana/inventory-app-django
# Generated by Django 3.2.8 on 2021-10-12 14:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('authentication', '0002_profile'),
]
operations = [
migrations.RenameField(
model_name='profile',
old_name='profilePic',
new_name='profile_pic',
),
]
|
StarcoderdataPython
|
368046
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sqlite3 as lite
import sys
con = lite.connect('test.db')
bookname = input("Book Name: ")
scifi = input ("Is this a Scifi book? y/n :")
with con:
cur = con.cursor()
cur.execute ("DROP TABLE IF EXISTS Book_classifier;")
cur.execute("CREATE TABLE Book_classifier(Id INT, bookname TEXT, scifi TEXT, )")
cur.execute("INSERT INTO Book_classifier VALUES(1,'Audi',52642)")
cur.execute("INSERT INTO Book_classifier VALUES(2,'Mercedes',57127)")
cur.execute("INSERT INTO Book_classifier VALUES(3,'Skoda',9000)")
cur.execute("INSERT INTO Book_classifier VALUES(4,'Volvo',29000)")
cur.execute("INSERT INTO Book_classifier VALUES(5,'Bentley',350000)")
cur.execute("INSERT INTO Book_classifier VALUES(6,'Citroen',21000)")
cur.execute("INSERT INTO Book_classifier VALUES(7,'Hummer',41400)")
cur.execute("INSERT INTO Book_classifier VALUES(8,'Volkswagen',21600)")
cur.execute('SELECT * FROM Book_classifier')
allrows = cur.fetchall()
cur.execute("DROP TABLE Book_classifier")
print(allrows)
print (scifi)
|
StarcoderdataPython
|
6705531
|
<gh_stars>0
# Generated by Django 3.0.4 on 2020-05-07 14:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0002_auto_20200426_1713'),
]
operations = [
migrations.CreateModel(
name='Rating',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rates', models.DecimalField(decimal_places=2, max_digits=50)),
('updated', models.DateTimeField(auto_now=True)),
('active', models.BooleanField(default=True)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='core.Item')),
],
),
]
|
StarcoderdataPython
|
5163425
|
# coding:utf-8
import pymysql
con = pymysql.connect(
host='127.0.0.1',
user='root',
password='<PASSWORD>',
port=3306,
database='android_app'
)
print(con)
cur = con.cursor()
print(cur)
cur.execute("select * from user")
print(cur.fetchone())
|
StarcoderdataPython
|
3493520
|
"""Console module.
Collection of console helpers.
"""
import sys
import beeprint
DEBUG = False
def pp(var):
"""Pretty print.
Wrapper around beeprint
Args:
var: some variable
"""
beeprint.pp(var)
def dd(var):
"""Pretty print and die.
Args:
var: some variable
"""
pp(var)
exit()
def p(var, end=None):
"""Print.
Wrapper around print.
Args:
var: some variable
end: end of line character (default: {None})
"""
if end is not None:
print(var, end=end)
else:
print(var)
sys.stdout.flush()
def dcr(message: str):
"""Print crawler debug info.
Args:
message: debug message
"""
if DEBUG:
p('debug [crawler]: ' + message)
def dp(message: str):
"""Print photographer debug info.
Args:
message: debug message
"""
if DEBUG:
p('debug [photographer]: ' + message)
def dca(message: str):
"""Print camera debug info.
Args:
message: debug message
"""
if DEBUG:
p('debug [camera]: ' + message)
def df(message: str):
"""Print filesystem debug info.
Args:
message: debug message
"""
if DEBUG:
p('debug [filesystem]: ' + message)
def eol():
"""Print end of line character."""
p('')
|
StarcoderdataPython
|
3569559
|
<reponame>ihmeuw/vivarium
from .hdf import EntityKey
from .artifact import Artifact, ArtifactException
from .manager import ArtifactManager, ArtifactInterface, parse_artifact_path_config, filter_data, validate_filter_term
|
StarcoderdataPython
|
1710118
|
import sys
import pytest
from konfetti import Konfig
from konfetti.exceptions import ForbiddenOverrideError
pytestmark = [pytest.mark.usefixtures("settings")]
skip_if_py2 = pytest.mark.skipif(sys.version_info[0] == 2, reason="Async syntax is not supported on Python 2.")
def test_override_function(testdir):
"""`override` decorator allows users to set custom config values per test function."""
testdir.makepyfile(
"""
from settings import config
import pytest
@pytest.fixture
def example():
return "test"
@config.override(INTEGER=123)
def test_override_function():
assert config.INTEGER == 123
@config.override(INTEGER=123)
def test_override_function_with_fixture(example):
assert config.INTEGER == 123
assert example == "test"
@config.override(INTEGER=123)
@pytest.mark.parametrize("x", [1, 2])
def test_override_function_with_parametrize(example, x):
assert config.INTEGER == 123
assert example == "test"
assert isinstance(x, int)
@pytest.mark.parametrize("x", [1, 2])
@config.override(INTEGER=123)
def test_override_function_with_parametrize_first(example, x):
assert config.INTEGER == 123
assert example == "test"
assert isinstance(x, int)
def test_disable():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest("-s")
result.assert_outcomes(passed=7)
def test_override_vault_secret(testdir):
"""Vault vault should be overridden correctly."""
testdir.makepyfile(
"""
from settings import config
@config.override(SECRET="not secret")
def test_override_function():
assert config.SECRET == "not secret"
def test_disable():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=2)
def test_override_method(testdir):
"""`override` decorator also works for class methods."""
testdir.makepyfile(
"""
from settings import config
import pytest
@pytest.fixture
def example():
return "test"
class TestOverride:
@config.override(INTEGER=123)
def test_override(self):
assert config.INTEGER == 123
@config.override(INTEGER=123)
def test_override_with_fixture(self, example):
assert config.INTEGER == 123
assert example == "test"
def test_disable_on_method(self):
assert config.INTEGER == 1
def test_disable_on_function():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=4)
def test_override_class(testdir):
"""`override` decorator also works for classes."""
testdir.makepyfile(
"""
from settings import config
import pytest
@pytest.fixture
def example():
return "test"
@config.override(INTEGER=123)
class TestOverride:
def test_override(self):
assert config.INTEGER == 123
def test_override_with_fixture(self, example):
assert config.INTEGER == 123
assert example == "test"
@config.override(INTEGER=456)
def test_another_override(self, example):
assert config.INTEGER == 456
assert example == "test"
def test_disable_on_function():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=4)
def test_override_class_with_setup(testdir):
"""`override` decorator also works for classes that have custom `setup_class` and `teardown_class` methods."""
testdir.makepyfile(
"""
from settings import config
@config.override(INTEGER=123)
class TestOverride:
@classmethod
def setup_class(cls):
cls.attr = 42
def test_override(self):
assert self.attr == 42
assert config.INTEGER == 123
def test_another_override(self):
assert self.attr == 42
assert config.INTEGER == 123
@classmethod
def teardown_class(cls):
print("TearDown call")
def test_disable_on_function():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest("-s")
result.assert_outcomes(passed=3)
result.stdout.fnmatch_lines(["*TearDown call*"])
def test_override_unittest_class(testdir):
"""`override` decorator also works for unittest-style classes."""
testdir.makepyfile(
"""
import unittest
from settings import config
@config.override(INTEGER=123)
class TestOverride(unittest.TestCase):
def test_override(self):
assert config.INTEGER == 123
def test_another_override(self):
assert config.INTEGER == 123
def test_disable_on_function():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=3)
def test_override_unittest_class_custom_setup(testdir):
"""If unittest-style class has custom `setUp` and `tearDown` then `override` should work as well."""
testdir.makepyfile(
"""
import unittest
from settings import config
@config.override(INTEGER=123)
class TestOverride(unittest.TestCase):
def setUp(self):
self.func = 1
@classmethod
def setUpClass(cls):
cls.cls = 2
def test_override(self):
assert self.func == 1
assert self.cls == 2
assert config.INTEGER == 123
def test_another_override(self):
assert self.func == 1
assert self.cls == 2
assert config.INTEGER == 123
def tearDown(self):
print("TearDown call")
@classmethod
def tearDownClass(cls):
print("TearDownClass call")
def test_disable_on_function():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest("-s")
result.assert_outcomes(passed=3)
result.stdout.fnmatch_lines(["*TearDownClass call*"])
result.stdout.fnmatch_lines(["*TearDown call*"])
def test_override_custom_setup_error(testdir):
"""When an error occurs in a custom setup method config should be unconfigured."""
testdir.makepyfile(
"""
from settings import config
@config.override(INTEGER=123)
class TestOverride:
@classmethod
def setup_class(cls):
1 / 0
def test_override(self):
print("NOT EXECUTED")
@classmethod
def teardown_class(cls):
1 / 0
def test_disabled():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest("-s")
result.assert_outcomes(passed=1, error=1)
assert "NOT EXECUTED" not in result.stdout._log_text
@skip_if_py2
def test_async_test(testdir):
"""`override` decorator works for async tests."""
testdir.makepyfile(
"""
import pytest
from settings import config
pytestmark = pytest.mark.asyncio
@config.override(INTEGER=123)
async def test_override_per_test():
assert config.INTEGER == 123
async def test_disable():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=2)
def test_override_unknown_type(config):
"""`override` can't decorate arbitrary types."""
with pytest.raises(TypeError, match="Don't know how to use `override` for `int`"):
config.override(INTEGER=123)(123)
def test_override_unknown_option():
"""If an option passed to `override` doesn't exist in the config module an error should be risen.
Active only with `strict_override` config option.
"""
config = Konfig(strict_override=True)
with pytest.raises(
ForbiddenOverrideError,
match="Can't override `NOT_EXIST` config option, because it is not defined in the config module",
):
with config.override(NOT_EXIST=123):
pass
def test_strict_override_valid():
config = Konfig(strict_override=True)
with config.override(INTEGER=123):
assert config.INTEGER == 123
def test_override_context_manager(config):
"""It is possible to use it as a context manager."""
with config.override(INTEGER=123):
assert config.INTEGER == 123
assert config.INTEGER == 1
def test_override_context_manager_nested(testdir):
"""Multiple levels of overriding are nested."""
testdir.makepyfile(
"""
from settings import config
def test_context_manager():
with config.override(INTEGER=123):
with config.override(KEY="overridden"):
assert config.INTEGER == 123
assert config.KEY == "overridden"
assert config.KEY == "value"
assert config.INTEGER == 123
assert config.INTEGER == 1
assert config.KEY == "value"
@config.override(KEY="foo")
def test_context_manager_with_decorator():
assert config.KEY == "foo"
with config.override(INTEGER=123):
with config.override(KEY="overridden"):
assert config.INTEGER == 123
assert config.KEY == "overridden"
assert config.KEY == "foo"
assert config.INTEGER == 123
assert config.INTEGER == 1
assert config.KEY == "foo"
def test_disable():
assert config.INTEGER == 1
"""
)
result = testdir.runpytest()
result.assert_outcomes(passed=3)
def test_no_setup_on_override(mocked_import_config_module):
"""If overridden option is accessed, then config is not loaded."""
config = Konfig(strict_override=False)
with config.override(EXAMPLE="awesome"):
assert config.EXAMPLE == "awesome"
mocked_import_config_module.assert_not_called()
def test_setup_on_override(mocked_import_config_module):
"""If non-overridden option is accessed, then config should be loaded."""
config = Konfig()
with config.override(SOMETHING="awesome"):
assert config.EXAMPLE == "test"
# Py2.7, Py3.5: replace with `assert_called` when 2.7/3.5 support will be dropped.
assert mocked_import_config_module.called is True
assert mocked_import_config_module.call_count >= 1
|
StarcoderdataPython
|
5012432
|
"""
Used for analysis of public companies filing with the
United States SEC.
"""
from dataclasses import dataclass
from typing import Iterable, Tuple, Union
import pandas as pd
from ..dataframes import Form10K, Form10Q, HistoricalStockPrices
from ..utils import NestedDepthError, nested_depth, ticker_or_cik_parser
from .connections import DatabaseConnection
@dataclass
class Fundamentals(DatabaseConnection):
"""
Fundamentals API
Args:
database (str, optional): database to connect to. Defaults to 'xbrl'.
"""
database: str = "xbrl"
def ten_k(self, ticker_or_cik:Union[str,int], years:Union[int,Iterable[int]]) -> Form10K:
"""
get company 10-Ks for selected years
Args:
ticker_or_cik (Union[str,int]): company's ticker or SEC issued Central Index Key (CIK)
years (Union[int,Iterable[int]]): a year or iterable of years of interest
Returns:
Form10K: a representation of the company's 10Ks
"""
cik = ticker_or_cik_parser(ticker_or_cik)
if isinstance(years, int):
year_where_clause = f"fy={years}"
elif len(years) == 1:
year_where_clause = f"fy={years[0]}"
else:
year_where_clause = f"fy IN {tuple(years)}"
command = ("SELECT a.fy, a.fye, b.tag, b.value, b.uom, b.ddate "
"FROM num b JOIN sub a ON a.adsh=b.adsh "
f"WHERE cik={cik} AND form='10-K' AND {year_where_clause}")
return Form10K(pd.read_sql_query(command, self.engine))
def ten_q(
self,
ticker_or_cik:Union[str,int],
periods:Union[int,Iterable[int],Tuple[Union[str,int],Union[str,int]],
Iterable[Tuple[Union[str,int],Union[str,int]]]]
) -> Form10Q:
"""
get company 10-Qs for selected periods
Args:
ticker_or_cik (Union[str,int]): company's ticker or SEC issued Central Index Key (CIK)
periods (Union[int,Iterable[int],Tuple[Union[str,int],Union[str,int]],\
Iterable[Tuple[Union[str,int],Union[str,int]]]]): the year of interest, will get all
quarters; years of interest, will get all quarters; (year, quarter) pair;
(year, quarter) pairs of interest
Raises:
NestedDepthError: inputted periods is not one of the correct formats
Returns:
Form10Q: a representation of the company's 10Qs
Example:
Querying 10-Q reports for Apple Inc.::
df = instance.ten_q("AAPL", 2019)
df = instance.ten_q("AAPL", [2018, 2019])
df = instance.ten_q("AAPL", (2019, "q1"))
df = instance.ten_q("AAPL", [(2019, "q1"), (2019, "q2")]
"""
cik = ticker_or_cik_parser(ticker_or_cik)
depth = nested_depth(periods)
if depth == 0:
periods = [(periods, quarter) for quarter in ["q1", "q2", "q3", "q4"]]
elif depth == 1:
if isinstance(periods, tuple):
periods = [periods]
else:
periods = [(year,quarter) for year in periods for quarter in ["q1","q2","q3","q4"]]
elif depth > 2:
raise NestedDepthError(input_depth=depth, correct_depth=[0, 1, 2])
period_where_clause = "(" + \
"".join(
f"(a.fy={year} AND a.fp='{quarter}') OR " for year,quarter in periods)[:-4] + \
")"
command = ("SELECT a.fy, a.fp, b.tag, b.value, b.uom, b.ddate "
"FROM num b JOIN sub a ON a.adsh=b.adsh "
f"WHERE cik={cik} AND form='10-Q' AND {period_where_clause}")
return Form10Q(pd.read_sql_query(command, self.engine))
@dataclass
class Stocks(DatabaseConnection):
"""
Stocks API
Args:
database (str, optional): database to connect to. Defaults to 'stocks'.
"""
database: str = "stocks"
def price(
self,
ticker_or_cik:Union[str,int],
period_start:str,
period_end:str=None
) -> pd.DataFrame:
"""
queries the price of a given equity over a period
Args:
ticker_or_cik (Union[str,int]): company or fund's ticker or SEC
issued Central Index Key (CIK)
period_start (str): the starting day for the period,
in year-month-day format (e.g. 2020-06-30).
period_end (str, optional): the ending day for the period,
in year-month-day format (e.g. 2020-06-30). Defaults to None,
indicating a single day period_start.
Returns:
pd.DataFrame: a representation of the company's historical stock price
"""
cik = ticker_or_cik_parser(ticker_or_cik)
if period_end is None:
period_end = period_start
command = (f"SELECT * from historical_eod WHERE cik={cik} "
f"AND date BETWEEN '{period_start}' AND '{period_end}'")
return HistoricalStockPrices(pd.read_sql_query(command, self.engine))
def sector(self, ticker_or_cik:Union[str,int]) -> str:
"""
queries the sector of a given entity
Args:
ticker_or_cik (Union[str,int]): company or fund's ticker or SEC
issued Central Index Key (CIK)
Returns:
str: the sector
"""
cik = ticker_or_cik_parser(ticker_or_cik)
command = (f"SELECT sector from sector WHERE cik={cik}")
return pd.read_sql_query(command, self.engine).squeeze()
|
StarcoderdataPython
|
9757324
|
import numpy as np
import pandas as pd
import statsmodels.api as sm
# Gaussian
algodao = pd.DataFrame({'percent' : [15, 20, 25, 30, 35,
15, 20, 25, 30, 35,
15, 20, 25, 30, 35,
15, 20, 25, 30, 35,
15, 20, 25, 30, 35 ],
'resist' : [7, 12, 14, 19, 7,
7, 17, 18, 25, 10,
15, 12, 18, 22, 11,
11, 18, 19, 19, 15,
9, 18, 19, 23, 11]})
# >>> algodao.head()
# percent resist
# 0 15 7
# 1 20 12
# 2 25 14
# 3 30 19
# 4 35 7
line_fit = sm.OLS(algodao['resist'], sm.add_constant(algodao['percent'], prepend=True)).fit()
print(line_fit.summary())
# >>> print(line_fit.summary())
# OLS Regression Results
# ==============================================================================
# Dep. Variable: resist R-squared: 0.053
# Model: OLS Adj. R-squared: 0.012
# Method: Least Squares F-statistic: 1.282
# Date: Mon, 25 Oct 2021 Prob (F-statistic): 0.269
# Time: 18:47:47 Log-Likelihood: -75.269
# No. Observations: 25 AIC: 154.5
# Df Residuals: 23 BIC: 157.0
# Df Model: 1
# Covariance Type: nonrobust
# ==============================================================================
# coef std err t P>|t| [0.025 0.975]
# ------------------------------------------------------------------------------
# const 10.9400 3.764 2.907 0.008 3.154 18.726
# percent 0.1640 0.145 1.132 0.269 -0.136 0.464
# ==============================================================================
# Omnibus: 2.066 Durbin-Watson: 1.932
# Prob(Omnibus): 0.356 Jarque-Bera (JB): 1.176
# Skew: -0.166 Prob(JB): 0.556
# Kurtosis: 1.991 Cond. No. 95.6
# ==============================================================================
#
# Notes:
# [1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
gauss = sm.GLM(algodao['resist'], sm.add_constant(algodao['percent'], prepend=True), family=sm.families.Gaussian())
gauss_results = gauss.fit()
print(gauss_results.summary())
# >>> print(gauss_results.summary())
# Generalized Linear Model Regression Results
# ==============================================================================
# Dep. Variable: resist No. Observations: 25
# Model: GLM Df Residuals: 23
# Model Family: Gaussian Df Model: 1
# Link Function: identity Scale: 26.232
# Method: IRLS Log-Likelihood: -75.269
# Date: Mon, 25 Oct 2021 Deviance: 603.34
# Time: 18:55:24 Pearson chi2: 603.
# No. Iterations: 3 Pseudo R-squ. (CS): 0.05318
# Covariance Type: nonrobust
# ==============================================================================
# coef std err z P>|z| [0.025 0.975]
# ------------------------------------------------------------------------------
# const 10.9400 3.764 2.907 0.004 3.563 18.317
# percent 0.1640 0.145 1.132 0.258 -0.120 0.448
# ==============================================================================
algodao['percent^2'] = np.power(algodao['percent'], 2)
# >>> algodao.head()
# percent resist percent^2
# 0 15 7 225
# 1 20 12 400
# 2 25 14 625
# 3 30 19 900
# 4 35 7 1225
gauss = sm.GLM(algodao['resist'], sm.add_constant(algodao[['percent', 'percent^2']], prepend=True), family=sm.families.Gaussian())
gauss_results = gauss.fit()
print(gauss_results.summary())
# >>> print(gauss_results.summary())
# Generalized Linear Model Regression Results
# ==============================================================================
# Dep. Variable: resist No. Observations: 25
# Model: GLM Df Residuals: 22
# Model Family: Gaussian Df Model: 2
# Link Function: identity Scale: 11.824
# Method: IRLS Log-Likelihood: -64.752
# Date: Mon, 25 Oct 2021 Deviance: 260.13
# Time: 19:02:22 Pearson chi2: 260.
# No. Iterations: 3 Pseudo R-squ. (CS): 0.7227
# Covariance Type: nonrobust
# ==============================================================================
# coef std err z P>|z| [0.025 0.975]
# ------------------------------------------------------------------------------
# const -39.9886 9.785 -4.087 0.000 -59.166 -20.811
# percent 4.5926 0.828 5.549 0.000 2.970 6.215
# percent^2 -0.0886 0.016 -5.388 0.000 -0.121 -0.056
# ==============================================================================
cypemethrin = pd.DataFrame({ 'Dose' : np.concatenate([
np.repeat(1, 20), np.repeat(2, 20), np.repeat(4, 20), np.repeat(8, 20), np.repeat(16, 20), np.repeat(32, 20),
np.repeat(1, 20), np.repeat(2, 20), np.repeat(4, 20), np.repeat(8, 20), np.repeat(16, 20), np.repeat(32, 20)]),
'mortos' : np.concatenate([
np.repeat(1, 1) , np.repeat(0, 19),
np.repeat(1, 4) , np.repeat(0, 16),
np.repeat(1, 9) , np.repeat(0, 20-9),
np.repeat(1, 13), np.repeat(0, 20-13),
np.repeat(1, 18), np.repeat(0, 20-18),
np.repeat(1, 20), np.repeat(0, 20-20),
np.repeat(1, 0), np.repeat(0, 20-0),
np.repeat(1, 2), np.repeat(0, 20-2),
np.repeat(1, 6), np.repeat(0, 20-6),
np.repeat(1, 10), np.repeat(0, 20-10),
np.repeat(1, 12), np.repeat(0, 20-12),
np.repeat(1, 16), np.repeat(0, 20-16)]),
'sexo' : np.concatenate([np.repeat('F', 20*6), np.repeat('M', 20*6)])})
# >>> cypemethrin.head()
# Dose mortos sexo
# 0 1 1 F
# 1 1 0 F
# 2 1 0 F
# 3 1 0 F
# 4 1 0 F
cypemethrin['logDose'] = np.log2(cypemethrin.Dose)
# >>> cypemethrin.tail()
# Dose mortos sexo logDose
# 235 32 1 M 5.0
# 236 32 0 M 5.0
# 237 32 0 M 5.0
# 238 32 0 M 5.0
# 239 32 0 M 5.0
cypemethrin['sexo'] = pd.get_dummies(data=cypemethrin['sexo'], drop_first=True)
# >>> cypemethrin.head()
# Dose mortos sexo logDose
# 0 1 1 0 0.0
# 1 1 0 0 0.0
# 2 1 0 0 0.0
# 3 1 0 0 0.0
# 4 1 0 0 0.0
glm_binom = sm.GLM(cypemethrin['mortos'], sm.add_constant(cypemethrin[['logDose', 'sexo']]), family=sm.families.Binomial())
res = glm_binom.fit()
print(res.summary())
# >>> print(res.summary())
# Generalized Linear Model Regression Results
# ==============================================================================
# Dep. Variable: mortos No. Observations: 240
# Model: GLM Df Residuals: 237
# Model Family: Binomial Df Model: 2
# Link Function: logit Scale: 1.0000
# Method: IRLS Log-Likelihood: -106.62
# Date: Mon, 25 Oct 2021 Deviance: 213.24
# Time: 19:25:46 Pearson chi2: 213.
# No. Iterations: 5 Pseudo R-squ. (CS): 0.3887
# Covariance Type: nonrobust
# ==============================================================================
# coef std err z P>|z| [0.025 0.975]
# ------------------------------------------------------------------------------
# const -2.3724 0.386 -6.154 0.000 -3.128 -1.617
# logDose 1.0642 0.131 8.119 0.000 0.807 1.321
# sexo -1.1007 0.356 -3.093 0.002 -1.798 -0.403
# ==============================================================================
cypemethrin = pd.DataFrame({
'mortos' : [1, 4, 9, 13, 18, 20, 0, 2, 6, 10, 12, 16],
'Nmortos' : [19, 16, 11, 7, 2, 0, 20, 18, 14, 10, 8, 4],
'sexo': [1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0 ],
'Dose' : [1, 2, 4, 8, 16, 32, 1, 2, 4, 8, 16, 32 ]})
# >>> cypemethrin
# mortos Nmortos sexo Dose
# 0 1 19 1 1
# 1 4 16 1 2
# 2 9 11 1 4
# 3 13 7 1 8
# 4 18 2 1 16
# 5 20 0 1 32
# 6 0 20 0 1
# 7 2 18 0 2
# 8 6 14 0 4
# 9 10 10 0 8
# 10 12 8 0 16
# 11 16 4 0 32
cypemethrin['logDose'] = np.log2(cypemethrin.Dose)
glm_binom = sm.GLM(cypemethrin[['mortos', 'Nmortos']], sm.add_constant(cypemethrin[['logDose', 'sexo']]), family=sm.families.Binomial())
res = glm_binom.fit()
print(res.summary())
# >>> print(res.summary())
# Generalized Linear Model Regression Results
# =================================================================================
# Dep. Variable: ['mortos', 'Nmortos'] No. Observations: 12
# Model: GLM Df Residuals: 9
# Model Family: Binomial Df Model: 2
# Link Function: logit Scale: 1.0000
# Method: IRLS Log-Likelihood: -18.434
# Date: Mon, 25 Oct 2021 Deviance: 6.7571
# Time: 19:56:12 Pearson chi2: 5.31
# No. Iterations: 5 Pseudo R-squ. (CS): 0.9999
# Covariance Type: nonrobust
# ==============================================================================
# coef std err z P>|z| [0.025 0.975]
# ------------------------------------------------------------------------------
# const -3.4732 0.469 -7.413 0.000 -4.391 -2.555
# logDose 1.0642 0.131 8.119 0.000 0.807 1.321
# sexo 1.1007 0.356 3.093 0.002 0.403 1.798
# ==============================================================================
# Quedas = pd.DataFrame({
# 'Quedas' : [1, 1, 2, 0, 2],
# 'Intervencao' : [1, 1, 1, 1, 1],
# 'Sexo' : [0, 0, 1, 1, 0],
# 'Balanco' : [45, 62, 43, 76, 51],
# 'Forca' : [70, 66, 64, 48, 72]})
Quedas = pd.DataFrame(np.loadtxt('https://www.ime.usp.br/~giapaula/geriatra.dat', unpack = True).T, columns = ['Quedas'
,'Intervencao'
,'Sexo'
,'Balanco'
,'Forca'], dtype= np.int8)
# >>> Quedas
# Quedas Intervencao Sexo Balanco Forca
# 0 1 1 0 45 70
# 1 1 1 0 62 66
# 2 2 1 1 43 64
# 3 0 1 1 76 48
# 4 2 1 0 51 72
# .. ... ... ... ... ...
# 95 5 0 1 76 46
# 96 2 0 1 33 55
# 97 4 0 0 69 48
# 98 4 0 1 50 52
# 99 2 0 0 37 56
#
# [100 rows x 5 columns]
glm_poisson = sm.GLM(Quedas['Quedas'], sm.add_constant(Quedas[['Intervencao' ,'Sexo' ,'Balanco' ,'Forca']]),
family=sm.families.Poisson())
res = glm_poisson.fit()
print(res.summary())
# >>> print(res.summary())
# Generalized Linear Model Regression Results
# ==============================================================================
# Dep. Variable: Quedas No. Observations: 100
# Model: GLM Df Residuals: 95
# Model Family: Poisson Df Model: 4
# Link Function: log Scale: 1.0000
# Method: IRLS Log-Likelihood: -183.64
# Date: Mon, 25 Oct 2021 Deviance: 108.79
# Time: 20:13:51 Pearson chi2: 106.
# No. Iterations: 5 Pseudo R-squ. (CS): 0.5951
# Covariance Type: nonrobust
# ===============================================================================
# coef std err z P>|z| [0.025 0.975]
# -------------------------------------------------------------------------------
# const 0.4895 0.337 1.453 0.146 -0.171 1.150
# Intervencao -1.0694 0.133 -8.031 0.000 -1.330 -0.808
# Sexo -0.0466 0.120 -0.388 0.698 -0.282 0.189
# Balanco 0.0095 0.003 3.207 0.001 0.004 0.015
# Forca 0.0086 0.004 1.986 0.047 0.000 0.017
# ===============================================================================
# Como prever
res.predict([ 1, 1, 0, 45, 70])
#array([1.56177349])
# a matematica continua funcionando?
np.exp(((res.params)*[ 1, 1, 0, 45, 70]).sum())
# 1.561773493060502
# >>> res.params
# const 0.489467
# Intervencao -1.069403
# Sexo -0.046606
# Balanco 0.009470
# Forca 0.008566
# dtype: float64
# >>> (res.params)*[ 1, 1, 0, 45, 70]
# const 0.489467
# Intervencao -1.069403
# Sexo -0.000000
# Balanco 0.426149
# Forca 0.599608
# dtype: float64
# >>> ((res.params)*[ 1, 1, 0, 45, 70]).sum()
# 0.44582203005765614
|
StarcoderdataPython
|
368750
|
#?install ~/.pdbrc.py -*-python-*-
#
# pdbrc.py - Advanced configuration for the Python debugger
#
# https://wiki.python.org/moin/PdbRcIdea
# Readline: see also pythonrc
try:
import readline
except ImportError:
print("Warning: readline module not available")
else:
# FIXME: improve completion
# Use a history file
import os, atexit
histfile = os.path.expanduser('~/.pdb_history')
try:
readline.read_history_file(histfile)
except IOError:
pass
# Write history file at exit. FIXME: merge history file
atexit.register(readline.write_history_file, histfile)
del histfile, atexit, os
readline.set_history_length(500)
del readline
|
StarcoderdataPython
|
12865533
|
<reponame>WWGolay/iota
#!/usr/bin/python
import pycurl
from io import BytesIO
def checkOpen():
isOpen = False
buffer = BytesIO()
c = pycurl.Curl()
c.setopt(c.URL, 'https://www.winer.org/Site/Roof.php')
c.setopt(c.WRITEDATA, buffer)
c.perform()
c.close()
body = buffer.getvalue()
if body.find(b'ROOFPOSITION=OPEN') > -1:
isOpen = True
return(isOpen)
|
StarcoderdataPython
|
8181360
|
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from .forms import UserCreationForm
from .models import MyUser
# Register your models here.
class UserAdmin(BaseUserAdmin):
add_form = UserCreationForm
list_display = ('username', 'email', 'is_staff', 'is_admin')
list_filter = ('is_admin', )
fieldsets = (
(None, {'fields': ('username', 'email', 'password')}),
('Permissions', {'fields': ('is_staff', 'is_admin',)})
)
search_fields = ('username', 'email')
ordering = ('username', 'email')
filter_horizontal = ()
admin.site.register(MyUser, UserAdmin)
admin.site.unregister(Group)
|
StarcoderdataPython
|
4943628
|
import collections
class Solution:
def numSubmatrixSumTarget(self, matrix: List[List[int]], target: int) -> int:
presum = [[0] * (len(matrix[0]) + 1) for _ in range(len(matrix))]
for i in range(len(matrix)):
for j in range(len(matrix[0])):
presum[i][j + 1] = presum[i][j] + matrix[i][j]
total = 0
for j in range(len(matrix[0])):
for i in range(j + 1):
curr = 0
table = collections.Counter([0])
for k in range(len(matrix)):
curr += presum[k][j + 1] - presum[k][i]
total += table[curr - target]
table[curr] += 1
return total
|
StarcoderdataPython
|
6509139
|
<reponame>chiranjeevbitp/Python27new<gh_stars>0
from matplotlib import pyplot as plt
plt.bar([1,3,5,7,9],[5,7,2,8,2],label='line one')
plt.bar([2,4,6,8,10],[8,6,2,5,4],label='line one',color='b')
plt.title('Epic info')
plt.ylabel('Y axis')
plt.xlabel('X axis')
plt.legend()
#plt.grid(True,color='g')
plt.show()
|
StarcoderdataPython
|
6688263
|
import os
import sys
def setup_env(env, version):
if sys.platform == 'darwin':
core_dir = '/Applications/redshift'
else:
core_dir = '/usr/redshift'
module_dir = os.path.join(core_dir, 'redshift4maya')
version_dir = os.path.join(module_dir, version)
if not os.path.exists(version_dir):
return
# These variables are based off of the `redshift4maya.mod.template`
# installed by the 2.6 versions::
#
# + MAYAVERSION:2018 redshift4maya any /Applications/redshift/redshift4maya
# scripts: common/scripts
# icons: common/icons
# plug-ins: 2018
# REDSHIFT_COREDATAPATH = /Applications/redshift
# MAYA_CUSTOM_TEMPLATE_PATH +:= common/scripts/NETemplates
# REDSHIFT_MAYAEXTENSIONSPATH +:= 2018/extensions
# REDSHIFT_PROCEDURALSPATH += $REDSHIFT_COREDATAPATH/procedurals
# See: http://help.autodesk.com/view/MAYAUL/2017/ENU/?guid=__files_GUID_130A3F57_2A5D_4E56_B066_6B86F68EEA22_htm
env.append('MAYA_SCRIPT_PATH', os.path.join(module_dir, 'common/scripts'))
env.append('XBMLANGPATH', os.path.join(module_dir, 'common/icons'))
env.append('MAYA_PLUG_IN_PATH', version_dir)
env['REDSHIFT_COREDATAPATH'] = core_dir
env.append('MAYA_CUSTOM_TEMPLATE_PATH', os.path.join(module_dir, 'common/scripts/NETemplates'))
env.append('REDSHIFT_MAYAEXTENSIONSPATH', os.path.join(version_dir, 'extensions'))
env.append('REDSHIFT_PROCEDURALSPATH', os.path.join(core_dir, 'procedurals'))
# This isn't in the .mod file, but it is handy anyways.
env.append('MAYA_RENDER_DESC_PATH', os.path.join(module_dir, 'common/rendererDesc')) # For `Render -r redshift`.
|
StarcoderdataPython
|
6401841
|
<filename>api/plugins/memory.py
from cmdb import models
from django.db import transaction
class Memory(object):
def __init__(self, server_obj, info, u_obj=None):
self.server_obj = server_obj
self.mem_dict = info
self.u_obj = u_obj
def process(self):
new_mem_info_dict = self.mem_dict['data']
"""
{'data':
'ChannelA-DIMM0':
{'speed': 'Unknown', 'capacity': 0, 'sn': '[Empty]', 'slot': 'ChannelA-DIMM0', 'model': 'Unknown', 'manufacturer': '[Empty]'},
'ChannelA-DIMM1':
{'speed': 'Unknown', 'capacity': 0, 'sn': '[Empty]', 'slot': 'ChannelA-DIMM1', 'model': 'Unknown', 'manufacturer': '[Empty]'},
'ChannelB-DIMM1':
{'speed': '1600 MHz', 'capacity': 8192, 'sn': '78166EEA', 'slot': 'ChannelB-DIMM1', 'model': 'DDR3', 'manufacturer': 'Kingston'}},
'status': True,
'msg': None},
"""
db_mem_obj_list = self.server_obj.memory.all()
"""
[
obj,
obj,
obj,
]
"""
new_mem_set = set(new_mem_info_dict.keys())
old_mem_set = {obj.slot for obj in db_mem_obj_list}
# add_slot_list = new_disk_slot_set - old_disk_slot_set
add_mem_list = new_mem_set.difference(old_mem_set)
del_mem_list = old_mem_set.difference(new_mem_set)
update_mem_list = old_mem_set.intersection(new_mem_set)
# add_record_list = []
# 增加 [2,5]
if add_mem_list:
for slot in add_mem_list:
value = new_mem_info_dict[slot]
self.add_mem(value)
# for slot in add_slot_list:
# value = new_disk_info_dict[slot]
# tmp = "添加硬盘slot{0}至{1}".format(slot,self.server_obj.hostname)
# add_record_list.append(tmp)
# value['server_obj'] = self.server_obj
# models.Disk.objects.create(**value)
# 删除 [4,6]
if del_mem_list:
self.del_mem(del_mem_list)
# models.Disk.objects.filter(server_obj=self.server_obj, slot__in=del_slot_list).delete()
# 更新 [7,8]
if update_mem_list:
for slot in update_mem_list:
value = new_mem_info_dict[slot]
self.update_mem(value)
# for slot in update_slot_list:
# value = new_disk_info_dict[
# slot] # {'slot': '0', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5NV'}
# obj = models.Disk.objects.filter(server_obj=self.server_obj, slot=slot).first()
# for k, new_val in value.items():
# old_val = getattr(obj, k)
# if old_val != new_val:
# setattr(obj, k, new_val)
# obj.save()
def add_mem(self, val_dict):
try:
with transaction.atomic():
record = "添加内存{0}至{1}".format(val_dict['slot'], self.server_obj.manage_ip)
val_dict['server_obj'] = self.server_obj
models.Memory.objects.create(**val_dict)
models.ServerRecord.objects.create(server_obj=self.server_obj,
content=record,
creator=self.u_obj)
except Exception as e:
print(e)
def del_mem(self, del_mem_list):
try:
with transaction.atomic():
record = "删除内存:{0}从{1}".format(del_mem_list, self.server_obj.manage_ip)
models.Memory.objects.filter(server_obj=self.server_obj,
slot__in=del_mem_list).delete()
models.ServerRecord.objects.create(server_obj=self.server_obj,
content=record,
creator=self.u_obj)
except Exception as e:
print(e)
def update_mem(self, val_dict):
# {'slot': '0', 'pd_type': 'SAS', 'capacity': '279.396', 'model': 'SEAGATE ST300MM0006 LS08S0K2B5NV'}
obj = models.Memory.objects.filter(server_obj=self.server_obj,
slot=val_dict['slot']).first()
record_list = []
try:
with transaction.atomic():
for k, new_val in val_dict.items():
old_val = getattr(obj, k)
# if type(old_val) == float or type(old_val) == int :
# old_val = str(old_val)
if old_val != new_val:
record = "[%s]:[%s]的[%s]由[%s]变更为[%s]" % (self.server_obj.manage_ip,
val_dict['slot'], k, old_val,
new_val)
record_list.append(record)
setattr(obj, k, new_val)
obj.save()
if record_list:
models.ServerRecord.objects.create(server_obj=self.server_obj,
content=';\n'.join(record_list),
creator=self.u_obj)
except Exception as e:
print(e)
|
StarcoderdataPython
|
5166575
|
<gh_stars>1-10
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
class Profile(models.Model):
bio = models.TextField(blank=True)
birth_date = models.DateTimeField(null=True, blank=True)
country = models.CharField(max_length=100, blank=True)
job = models.CharField(max_length=100, blank=True)
# add photo field
user = models.OneToOneField(User, on_delete=models.CASCADE, default="")
def __str__(self):
return f"<Profile: {self.user.first_name}>"
class SocialAccount(models.Model):
SOCIAL_ACCOUNTS = [
('FA', 'Facebook'),
('TW', 'Twitter'),
('GI', 'Github'),
('CO', 'Coretabs'),
('NO', 'None')
]
account_type = models.CharField(max_length=2, choices=SOCIAL_ACCOUNTS, default='NO')
account_link = models.URLField(max_length=200)
profile = models.ForeignKey(Profile, on_delete=models.CASCADE, related_name="social_accounts")
def __str__(self):
return f"<Social Account: {self.account_type}>"
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
|
StarcoderdataPython
|
6428761
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from accounts.models import User
from .models import Organization
class OrganizationRegistrationForm(forms.ModelForm):
class Meta:
model = Organization
class OwnerRegistrationForm(forms.ModelForm):
password1 = forms.CharField(
label=_(u'Password'), widget=forms.PasswordInput()
)
password2 = forms.CharField(
label=_(u'Password confirmation'), widget=forms.PasswordInput(),
)
class Meta:
model = User
fields = ['email', 'full_name', 'login']
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
_(u"The two password fields didn't match.")
)
return password2
def save(self, commit=True):
user = super(OwnerRegistrationForm, self).save(commit=commit)
user.email = User.objects.normalize_email(user.email)
user.set_password(self.cleaned_data['<PASSWORD>'])
if commit:
user.save()
return user
class InviteForm(forms.ModelForm):
class Meta:
model = User
fields = ['full_name', 'login', 'email']
def __init__(self, *args, **kwargs):
self.organization = kwargs.pop('organization')
super(InviteForm, self).__init__(*args, **kwargs)
def clean_email(self):
email = self.cleaned_data.get('email')
qs = self.organization.members.filter(email__iexact=email)
if qs.exists():
raise forms.ValidationError(_(u'This email already registered'))
return email
def save(self, commit=True):
user = super(InviteForm, self).save(commit=False)
user.organization = self.organization
user.set_unusable_password()
if commit:
user.save()
return user
class OrganizationForm(forms.ModelForm):
class Meta:
model = Organization
fields = ['name']
class OrganisationLogoForm(forms.ModelForm):
class Meta:
model = Organization
fields = ['logo']
|
StarcoderdataPython
|
306494
|
<filename>tools/record_create.py
"""
Tool: Record responses of creating a gist for unit test use
"""
import json
import pathlib
from typing import Dict
from typing import NamedTuple
from egggist.egggist import EggGist
from egggist.egggist import File
FILE_PATH = "tests/fixtures"
FILE_SUCCESS = "create_success.json"
FILE_FAIL = "create_fail_token.json"
TEST_FILE = "test_file.md"
TEST_CONTENT = "# Test Gist"
class Secrets(NamedTuple):
"""Hold our secrets"""
username: str
usertoken: str
def build_client() -> EggGist:
"""Fixture of our client"""
if not pathlib.Path(".env").exists():
raise FileNotFoundError("'.env' file required, check module docstring")
input_file = open(".env", "r", encoding="utf-8").read()
secrets = Secrets(**load_secrets(input_file))
gist_client = EggGist(check_config=False)
gist_client.config.username = secrets.username
gist_client.config.usertoken = secrets.usertoken
return gist_client
def load_secrets(input_file: str) -> Dict[str, str]:
"""Parses env file for required values"""
values: Dict[str, str] = {}
for line in input_file.split("\n"):
if not line or line.strip().startswith("#") or len(line.split("=", 1)) != 2:
continue
key, value = line.split("=", 1)
values[key.strip().lower()] = value
return values
def record_successful_create() -> None:
"""Record success"""
filepath = pathlib.Path(FILE_PATH, FILE_SUCCESS)
if filepath.exists():
print(f"Skipping 'record_successful_create, file exists: {filepath}")
return
gist_client = build_client()
gist_client.files.append(File("test1.md", "# Test 1"))
gist_client.files.append(File("test2.md", "# Test 2"))
result = gist_client.post_gist()
assert result is not None
open(filepath, "w").write(json.dumps(result.as_dict, indent=4))
if __name__ == "__main__":
if not pathlib.Path(FILE_PATH).exists():
raise ValueError(f"Missing '{FILE_PATH}' path")
record_successful_create()
|
StarcoderdataPython
|
5141128
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import argparse
import pandas as pd
from lib.programme_data import *
def filter_data(all_data):
"""
Remove all the students not applying for this current year
"""
# Remove all students not from this academic year
YEAR_COLUMN = 'Entry Year'
THIS_YEAR = '2018/9'
filtered_data = all_data.loc[all_data[YEAR_COLUMN] == THIS_YEAR]
return filtered_data
def filter_students_by_status(all_data, status):
"""
Filter the data to all students with the given status
"""
# Select all students with the given registration status
STATUS_COLUMN = 'Dec/'
return all_data.loc[all_data[STATUS_COLUMN] == status]
def group_and_count_by_status(all_data):
"""
Group by programme code and registration status and then
count how many of each combination there are
"""
grouped = all_data.groupby(['Prog Code', 'Dec/', 'App Cat']).size()
grouped = grouped.sort_index()
return grouped
def print_no_breakdown(grouped_data, statuses_to_print):
"""
Dump a count of each programme (grouped by 'real' programme)
and status to command line
"""
for prog_name in prog_names_short:
prog_codes = prog_name_short_2_prog_codes[prog_name]
total = 0
home = 0
international = 0
for status in statuses_to_print:
for prog_code in prog_codes:
if prog_code in grouped_data.index:
if status in grouped_data.loc[prog_code]:
if 'O' in grouped_data.loc[prog_code][status]:
total += grouped_data.loc[prog_code][status]['O']
home += grouped_data.loc[prog_code][status]['O']
if status in grouped_data.loc[prog_code]:
if 'H' in grouped_data.loc[prog_code][status]:
total += grouped_data.loc[prog_code][status]['H']
international += grouped_data.loc[prog_code][status]['H']
print('-----------------------------------------------------')
print('%s (all): %d' % (prog_name, total))
print('%s (home): %d' % (prog_name, home))
print('%s (international): %d' % (prog_name, international))
print('-----------------------------------------------------')
def print_no_breakdown_all_statuses(grouped_data):
print_no_breakdown(grouped_data, application_statuses)
def print_no_breakdown_only_unconditional_and_conditional_firm(grouped_data):
print_no_breakdown(grouped_data, ['UF', 'CFUF', 'CF'])
def print_no_breakdown_only_unconditional_firm(grouped_data):
print_no_breakdown(grouped_data, ['UF', 'CFUF'])
def output_csvs(all_data):
"""
Write out csv files containing the registration status counts for
each programme code and also grouped by course title
"""
grouped = all_data.groupby(['Prog Code', 'App Cat'])['Dec/'].value_counts()
grouped = grouped.sort_index()
csv_data = grouped.unstack(fill_value=0)
no_breakdown_data = csv_data.groupby([prog_codes_2_prog_name_short, 'App Cat'], level=[0, 1]).sum()
with open('application_status_per_programme_from_lotoap.csv', 'w') as output_file:
no_breakdown_data.to_csv(output_file)
def main():
# read in filename as command line argument
parser = argparse.ArgumentParser(description='Analysing MSc numbers in SIMS')
parser.add_argument('-i', '--input', help='Input file to be analysed', required=True, action='store')
parser.add_argument('-u', '--unconditional', help='show unconditional firm students only', action='store_true')
parser.add_argument('-l', '--conditional', help='show unconditional and conditional firm students', action='store_true')
parser.add_argument('-c', '--csv', help='output csv of application status per programme', action='store_true')
args = parser.parse_args()
# open and read the input file
input_file = os.path.join(os.getcwd(), args.input)
with open(input_file, 'r') as raw_data_file:
sims_data = pd.read_csv(raw_data_file)
# restrict it to this academic year block 1
filtered_data = filter_data(sims_data)
grouped = group_and_count_by_status(filtered_data)
if not args.unconditional:
print_no_breakdown_all_statuses(grouped)
elif args.unconditional and not args.conditional:
print_no_breakdown_only_unconditional_firm(grouped)
elif args.unconditional and args.conditional:
print_no_breakdown_only_unconditional_and_conditional_firm(grouped)
if args.csv:
output_csvs(filtered_data)
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
6562726
|
# View more python learning tutorial on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
"""
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
"""
from __future__ import print_function
from sklearn.model_selection import learning_curve
from sklearn.datasets import load_digits
from sklearn.svm import SVC
import matplotlib.pyplot as plt
import numpy as np
digits = load_digits()
X = digits.data
y = digits.target
train_sizes, train_loss, test_loss= learning_curve(
SVC(gamma=0.01), X, y, cv=10, scoring='neg_mean_squared_error',
train_sizes=[0.1, 0.25, 0.5, 0.75, 1])
train_loss_mean = -np.mean(train_loss, axis=1)
test_loss_mean = -np.mean(test_loss, axis=1)
plt.plot(train_sizes, train_loss_mean, 'o-', color="r",
label="Training")
plt.plot(train_sizes, test_loss_mean, 'o-', color="g",
label="Cross-validation")
plt.xlabel("Training examples")
plt.ylabel("Loss")
plt.legend(loc="best")
plt.show()
|
StarcoderdataPython
|
1692163
|
import os
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping, ProgressBarBase, ProgressBar
from pytorch_lightning.utilities.exceptions import MisconfigurationException
class CallbackConnector:
def __init__(self, trainer):
self.trainer = trainer
def on_trainer_init(
self,
callbacks,
early_stop_callback,
checkpoint_callback,
progress_bar_refresh_rate,
process_position,
default_root_dir,
weights_save_path,
resume_from_checkpoint
):
self.trainer.resume_from_checkpoint = resume_from_checkpoint
# init folder paths for checkpoint + weights save callbacks
self.trainer._default_root_dir = default_root_dir or os.getcwd()
self.trainer._weights_save_path = weights_save_path or self.trainer._default_root_dir
# init callbacks
self.trainer.callbacks = callbacks or []
# configure early stop callback
# creates a default one if none passed in
early_stop_callback = self.configure_early_stopping(early_stop_callback)
if early_stop_callback:
self.trainer.callbacks.append(early_stop_callback)
# configure checkpoint callback
# it is important that this is the last callback to run
# pass through the required args to figure out defaults
checkpoint_callback = self.init_default_checkpoint_callback(checkpoint_callback)
if checkpoint_callback:
self.trainer.callbacks.append(checkpoint_callback)
# TODO refactor codebase (tests) to not directly reach into these callbacks
self.trainer.checkpoint_callback = checkpoint_callback
self.trainer.early_stop_callback = early_stop_callback
# init progress bar
self.trainer._progress_bar_callback = self.configure_progress_bar(
progress_bar_refresh_rate, process_position
)
def init_default_checkpoint_callback(self, checkpoint_callback):
if checkpoint_callback is True:
checkpoint_callback = ModelCheckpoint(filepath=None)
elif checkpoint_callback is False:
checkpoint_callback = None
if checkpoint_callback:
checkpoint_callback.save_function = self.trainer.save_checkpoint
return checkpoint_callback
def configure_early_stopping(self, early_stop_callback):
if early_stop_callback is True or None:
early_stop_callback = EarlyStopping(
monitor='early_stop_on',
patience=3,
strict=True,
verbose=True,
mode='min'
)
elif not early_stop_callback:
early_stop_callback = None
else:
early_stop_callback = early_stop_callback
return early_stop_callback
def configure_progress_bar(self, refresh_rate=1, process_position=0):
progress_bars = [c for c in self.trainer.callbacks if isinstance(c, ProgressBarBase)]
if len(progress_bars) > 1:
raise MisconfigurationException(
'You added multiple progress bar callbacks to the Trainer, but currently only one'
' progress bar is supported.'
)
elif len(progress_bars) == 1:
progress_bar_callback = progress_bars[0]
elif refresh_rate > 0:
progress_bar_callback = ProgressBar(
refresh_rate=refresh_rate,
process_position=process_position,
)
self.trainer.callbacks.append(progress_bar_callback)
else:
progress_bar_callback = None
return progress_bar_callback
|
StarcoderdataPython
|
1868277
|
import time
from conans import load
from conans.errors import ConanException, NotFoundException
from conans.model.ref import PackageReference, ConanFileReference
from conans.util.log import logger
from conans.client.source import complete_recipe_sources
from conans.search.search import search_recipes, search_packages
def _is_a_reference(ref):
try:
ConanFileReference.loads(ref)
return "*" not in ref # If is a pattern, it is not a reference
except ConanException:
pass
return False
UPLOAD_POLICY_FORCE = "force-upload"
UPLOAD_POLICY_NO_OVERWRITE = "no-overwrite"
UPLOAD_POLICY_NO_OVERWRITE_RECIPE = "no-overwrite-recipe"
UPLOAD_POLICY_SKIP = "skip-upload"
class CmdUpload(object):
def __init__(self, client_cache, user_io, remote_manager, registry, loader, plugin_manager):
self._client_cache = client_cache
self._user_io = user_io
self._remote_manager = remote_manager
self._registry = registry
self._loader = loader
self._plugin_manager = plugin_manager
def upload(self, recorder, reference_or_pattern, package_id=None, all_packages=None,
confirm=False, retry=0, retry_wait=0, integrity_check=False, policy=None,
remote_name=None, query=None):
"""If package_id is provided, conan_reference_or_pattern is a ConanFileReference"""
if package_id and not _is_a_reference(reference_or_pattern):
raise ConanException("-p parameter only allowed with a valid recipe reference, "
"not with a pattern")
t1 = time.time()
if package_id or _is_a_reference(reference_or_pattern): # Upload package
ref = ConanFileReference.loads(reference_or_pattern)
references = [ref, ]
confirm = True
else:
references = search_recipes(self._client_cache, reference_or_pattern)
if not references:
raise NotFoundException(("No packages found matching pattern '%s'" %
reference_or_pattern))
for conan_ref in references:
upload = True
if not confirm:
msg = "Are you sure you want to upload '%s'?" % str(conan_ref)
upload = self._user_io.request_boolean(msg)
if upload:
try:
conanfile_path = self._client_cache.conanfile(conan_ref)
conan_file = self._loader.load_class(conanfile_path)
except NotFoundException:
raise NotFoundException(("There is no local conanfile exported as %s" %
str(conan_ref)))
if all_packages:
packages_ids = self._client_cache.conan_packages(conan_ref)
elif query:
packages = search_packages(self._client_cache, conan_ref, query)
packages_ids = list(packages.keys())
elif package_id:
packages_ids = [package_id, ]
else:
packages_ids = []
self._upload(conan_file, conan_ref, packages_ids, retry, retry_wait,
integrity_check, policy, remote_name, recorder)
logger.debug("====> Time manager upload: %f" % (time.time() - t1))
def _upload(self, conan_file, conan_ref, packages_ids, retry, retry_wait,
integrity_check, policy, remote_name, recorder):
"""Uploads the recipes and binaries identified by conan_ref"""
defined_remote = self._registry.get_recipe_remote(conan_ref)
if remote_name: # If remote_name is given, use it
upload_remote = self._registry.remote(remote_name)
elif defined_remote: # Else, if the package had defined a remote, use it
upload_remote = defined_remote
else: # Or use the default otherwise
upload_remote = self._registry.default_remote
conanfile_path = self._client_cache.conanfile(conan_ref)
self._plugin_manager.execute("pre_upload", conanfile_path=conanfile_path,
reference=conan_ref, remote=upload_remote)
if policy != UPLOAD_POLICY_FORCE:
self._check_recipe_date(conan_ref, upload_remote)
self._user_io.out.info("Uploading %s to remote '%s'" % (str(conan_ref), upload_remote.name))
self._upload_recipe(conan_ref, retry, retry_wait, policy, upload_remote)
recorder.add_recipe(str(conan_ref), upload_remote.name, upload_remote.url)
if packages_ids:
# Can't use build_policy_always here because it's not loaded (only load_class)
if conan_file.build_policy == "always":
raise ConanException("Conanfile has build_policy='always', "
"no packages can be uploaded")
total = len(packages_ids)
for index, package_id in enumerate(packages_ids):
ret_upload_package = self._upload_package(PackageReference(conan_ref, package_id),
index + 1, total, retry, retry_wait,
integrity_check,
policy, upload_remote)
if ret_upload_package:
recorder.add_package(str(conan_ref), package_id)
if not defined_remote and policy != UPLOAD_POLICY_SKIP:
self._registry.set_ref(conan_ref, upload_remote.name)
self._plugin_manager.execute("post_upload", conanfile_path=conanfile_path,
reference=conan_ref, remote=upload_remote)
def _upload_recipe(self, conan_reference, retry, retry_wait, policy, remote):
conan_file_path = self._client_cache.conanfile(conan_reference)
current_remote = self._registry.get_recipe_remote(conan_reference)
if remote != current_remote:
conanfile = self._loader.load_class(conan_file_path)
complete_recipe_sources(self._remote_manager, self._client_cache, self._registry,
conanfile, conan_reference)
result = self._remote_manager.upload_recipe(conan_reference, remote, retry, retry_wait,
policy=policy)
return result
def _upload_package(self, package_ref, index=1, total=1, retry=None, retry_wait=None,
integrity_check=False, policy=None, remote=None):
"""Uploads the package identified by package_id"""
msg = ("Uploading package %d/%d: %s" % (index, total, str(package_ref.package_id)))
t1 = time.time()
self._user_io.out.info(msg)
result = self._remote_manager.upload_package(package_ref, remote, retry, retry_wait,
integrity_check, policy)
logger.debug("====> Time uploader upload_package: %f" % (time.time() - t1))
return result
def _check_recipe_date(self, conan_ref, remote):
try:
remote_recipe_manifest = self._remote_manager.get_conan_manifest(conan_ref, remote)
except NotFoundException:
return # First time uploading this package
local_manifest = self._client_cache.load_manifest(conan_ref)
if (remote_recipe_manifest != local_manifest and
remote_recipe_manifest.time > local_manifest.time):
raise ConanException("Remote recipe is newer than local recipe: "
"\n Remote date: %s\n Local date: %s" %
(remote_recipe_manifest.time, local_manifest.time))
|
StarcoderdataPython
|
4915095
|
<filename>modules/modelchecker/statespace.py<gh_stars>1-10
import re
class StateSpace:
def __init__(self, _stateSpace=dict(), _declarations=""):
self.statespace = _stateSpace.copy()
self.declarations = _declarations
def addState(self, position, state):
self.statespace[position] = state
def getStateSpace(self):
return self.statespace
def getStateAtPosition(self, position):
return self.statespace.get(position, [])
def getAllStates(self):
allstates = []
allstates.extend(self.statespace.values())
return allstates
def getStatesRange(self, start=0, howmany=0):
states = []
if howmany > 0:
for stateNo in range(start, start + howmany):
states.append(self.getStateAtPosition(stateNo))
return states
def setDeclarations(self, _declarations=""):
self.declarations = _declarations
def getDeclarations(self):
return self.declarations
def __getStatesForParsing(self, start=0, howmany=0):
statesForParsing = []
if howmany > 0:
statesForParsing = self.getStatesRange(start, howmany)
else:
statesForParsing = self.getAllStates()
return statesForParsing
def __extractVariables(self, assertion):
_vars = set()
smtKeywords = ["ite", "assert", "and", "or"]
generalPattern = re.compile("[a-zA-Z0-9_]+")
numberPattern = re.compile("^[-]?[0-9]*([.,][0-9]+)?$")
matches = generalPattern.findall(assertion)
for m in matches:
if (numberPattern.match(m) == None and
m.lower() not in smtKeywords):
_vars.add(m)
return _vars
def __generateDeclarations(self, variables):
declarations = []
for _var in variables:
declarations.append("(declare-const {0} Real)".format(_var))
return declarations
def __loadCustomFunctions(self, customFunctionsFileLocation):
cFunctions = ""
try:
_file = open(customFunctionsFileLocation, "r");
cFunctions = _file.read()
except:
print("{0} could not be loaded.".format(pathToJsonFile))
return cFunctions
def genenrateSMT2Script(self, start=0, howmany=0):
statesForParsing = self.__getStatesForParsing(start, howmany)
customFunctions = self.__loadCustomFunctions("./models/custom-functions.smt2")
script = "{0} \n {1} \n".format(self.declarations, customFunctions)
for state in statesForParsing:
pass
script += "\n".join(state)
return script
|
StarcoderdataPython
|
5153290
|
<reponame>nickersk/streamlink-27
import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
log = logging.getLogger(__name__)
STREAMS_URL = "https://piczel.tv/api/streams?followedStreams=false&live_only=false&sfw=false"
HLS_URL = "https://piczel.tv/hls/{0}/index.m3u8"
_streams_schema = validate.Schema([
{
"id": int,
"live": bool,
"slug": validate.text
}
])
@pluginmatcher(re.compile(
r"https?://piczel\.tv/watch/(\w+)"
))
class Piczel(Plugin):
def _get_streams(self):
channel_name = self.match.group(1)
res = self.session.http.get(STREAMS_URL)
streams = self.session.http.json(res, schema=_streams_schema)
for stream in streams:
if stream["slug"] != channel_name:
continue
if not stream["live"]:
return
log.debug("HLS stream URL: {}", HLS_URL.format(stream["id"]))
return {"live": HLSStream(self.session, HLS_URL.format(stream["id"]))}
__plugin__ = Piczel
|
StarcoderdataPython
|
1608872
|
<reponame>alliance-genome/agr_literature_service<filename>backend/app/literature/crud/reference_manual_term_tag_crud.py
from sqlalchemy.orm import Session
from fastapi import HTTPException
from fastapi import status
from fastapi.encoders import jsonable_encoder
from literature.schemas import ReferenceManualTermTagSchemaPost
from literature.schemas import ReferenceManualTermTagSchemaPatch
from literature.models import ReferenceManualTermTagModel
from literature.models import ReferenceModel
def create(db: Session, reference_manual_term_tag: ReferenceManualTermTagSchemaPost):
reference_manual_term_tag_data = jsonable_encoder(reference_manual_term_tag)
reference_curie = reference_manual_term_tag_data['reference_curie']
reference = db.query(ReferenceModel).filter(ReferenceModel.curie == reference_curie).first()
if not reference:
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=f"Reference_curie {reference_curie} does not exist")
del reference_manual_term_tag_data['reference_curie']
db_obj = ReferenceManualTermTagModel(**reference_manual_term_tag_data)
db_obj.reference = reference
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj.reference_manual_term_tag_id
def destroy(db: Session, reference_manual_term_tag_id: int):
db_obj = db.query(ReferenceManualTermTagModel).filter(ReferenceManualTermTagModel.reference_manual_term_tag_id == reference_manual_term_tag_id).first()
if not db_obj:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"Reference Manual Tag Term with reference_manual_term_tag_id {reference_manual_term_tag_id} not found")
db.delete(db_obj)
db.commit()
return None
def patch(db: Session, reference_manual_term_tag_id: int, reference_manual_term_tag_update: ReferenceManualTermTagSchemaPatch):
db_obj = db.query(ReferenceManualTermTagModel).filter(ReferenceManualTermTagModel.reference_manual_term_tag_id == reference_manual_term_tag_id).first()
if not db_obj:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"Reference Manual Term Tag ID with reference_manual_term_tag_id {reference_manual_term_tag_id} not found")
for field, value in reference_manual_term_tag_update.dict().items():
if field == "reference_curie" and value:
reference_curie_to = value
reference = db.query(ReferenceModel).filter(ReferenceModel.curie == reference_curie_to).first()
if not reference:
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail="Reference with curie {reference_curie_to} does not exist")
db_obj.reference = reference
else:
setattr(db_obj, field, value)
db.commit()
return {"message": "updated"}
def show(db: Session, reference_manual_term_tag_id: int):
db_obj = db.query(ReferenceManualTermTagModel).filter(ReferenceManualTermTagModel.reference_manual_term_tag_id == reference_manual_term_tag_id).first()
data = jsonable_encoder(db_obj)
if not db_obj:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"Reference Manual Tag Term ID with the reference_manual_term_tag_id {reference_manual_term_tag_id} is not available")
data['reference_curie'] = db.query(ReferenceModel.curie).filter(ReferenceModel.reference_id == data['reference_id']).first()[0]
del data['reference_id']
return data
def show_changesets(db: Session, reference_manual_term_tag_id: int):
db_obj = db.query(ReferenceManualTermTagModel).filter(ReferenceManualTermTagModel.reference_manual_term_tag_id == reference_manual_term_tag_id).first()
if not db_obj:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail=f"Reference Manual term Tag with the reference_manual_term_tag_id {reference_manual_term_tag_id} is not available")
history = []
for version in db_obj.versions:
tx = version.transaction
history.append({'transaction': {'id': tx.id,
'issued_at': tx.issued_at,
'user_id': tx.user_id},
'changeset': version.changeset})
return history
|
StarcoderdataPython
|
3238349
|
"""Shonan Rotation Averaging.
The algorithm was proposed in "Shonan Rotation Averaging:Global Optimality by
Surfing SO(p)^n" and is implemented by wrapping up over implementation provided
by GTSAM.
References:
- https://arxiv.org/abs/2008.02737
- https://gtsam.org/
Authors: <NAME>, <NAME>, <NAME>
"""
from typing import Dict, List, Optional, Tuple
import gtsam
import numpy as np
from gtsam import (
BetweenFactorPose3,
LevenbergMarquardtParams,
Rot3,
Pose3,
ShonanAveraging3,
ShonanAveragingParameters3,
)
from gtsfm.averaging.rotation.rotation_averaging_base import RotationAveragingBase
class ShonanRotationAveraging(RotationAveragingBase):
"""Performs Shonan rotation averaging."""
def __init__(self) -> None:
"""
Note: `p_min` and `p_max` describe the minimum and maximum relaxation rank.
"""
self._p_min = 5
self._p_max = 30
def __run_with_consecutive_ordering(
self, num_connected_nodes: int, i2Ri1_dict: Dict[Tuple[int, int], Optional[Rot3]]
) -> List[Optional[Rot3]]:
"""Run the rotation averaging on a connected graph w/ N keys ordered consecutively [0,...,N-1].
Note: GTSAM requires the N input nodes to be connected and ordered from [0 ... N-1].
Modifying GTSAM would require a major philosophical overhaul, so we perform the re-ordering
here in a sort of "wrapper". See https://github.com/borglab/gtsam/issues/784 for more details.
Args:
num_connected_nodes: number of unique connected nodes (i.e. images) in the graph
(<= the number of images in the dataset)
i2Ri1_dict: relative rotations for each edge between nodes as dictionary (i1, i2): i2Ri1.
Note: i1 < num_connected_nodes, and also i2 < num_connected_nodes.
Returns:
Global rotations for each **CONNECTED** camera pose, i.e. wRi, as a list. The number of entries in
the list is `num_connected_nodes`. The list may contain `None` where the global rotation could
not be computed (either underconstrained system or ill-constrained system).
"""
lm_params = LevenbergMarquardtParams.CeresDefaults()
shonan_params = ShonanAveragingParameters3(lm_params)
shonan_params.setUseHuber(False)
shonan_params.setCertifyOptimality(True)
noise_model = gtsam.noiseModel.Unit.Create(6)
between_factors = gtsam.BetweenFactorPose3s()
for (i1, i2), i2Ri1 in i2Ri1_dict.items():
if i2Ri1 is not None:
# ignore translation during rotation averaging
i2Ti1 = Pose3(i2Ri1, np.zeros(3))
between_factors.append(BetweenFactorPose3(i2, i1, i2Ti1, noise_model))
obj = ShonanAveraging3(between_factors, shonan_params)
initial = obj.initializeRandomly()
result_values, _ = obj.run(initial, self._p_min, self._p_max)
wRi_list_consecutive = [None] * num_connected_nodes
for i in range(num_connected_nodes):
if result_values.exists(i):
wRi_list_consecutive[i] = result_values.atRot3(i)
return wRi_list_consecutive
def run(self, num_images: int, i2Ri1_dict: Dict[Tuple[int, int], Optional[Rot3]]) -> List[Optional[Rot3]]:
"""Run the rotation averaging on a connected graph with arbitrary keys, where each key is a image/pose index.
Note: run() functions as a wrapper that re-orders keys to prepare a graph w/ N keys ordered [0,...,N-1].
All input nodes must belong to a single connected component, in order to obtain an absolute pose for each
camera in a single, global coordinate frame.
Args:
num_images: number of images. Since we have one pose per image, it is also the number of poses.
i2Ri1_dict: relative rotations for each image pair-edge as dictionary (i1, i2): i2Ri1.
Returns:
Global rotations for each camera pose, i.e. wRi, as a list. The number of entries in the list is
`num_images`. The list may contain `None` where the global rotation could not be computed (either
underconstrained system or ill-constrained system), or where the camera pose had no valid observation
in the input to run().
"""
connected_nodes = set()
for (i1, i2) in i2Ri1_dict.keys():
connected_nodes.add(i1)
connected_nodes.add(i2)
connected_nodes = sorted(list(connected_nodes))
# given original index, this map gives back a new temporary index, starting at 0
reordered_idx_map = {}
for (new_idx, i) in enumerate(connected_nodes):
reordered_idx_map[i] = new_idx
# now, map the original indices to reordered indices
i2Ri1_dict_reordered = {}
for (i1, i2), i2Ri1 in i2Ri1_dict.items():
i1_ = reordered_idx_map[i1]
i2_ = reordered_idx_map[i2]
i2Ri1_dict_reordered[(i1_, i2_)] = i2Ri1
wRi_list_subset = self.__run_with_consecutive_ordering(
num_connected_nodes=len(connected_nodes), i2Ri1_dict=i2Ri1_dict_reordered
)
wRi_list = [None] * num_images
for remapped_i, original_i in enumerate(connected_nodes):
wRi_list[original_i] = wRi_list_subset[remapped_i]
return wRi_list
|
StarcoderdataPython
|
326398
|
etree.SubElement(dictionary, u'Name').text = dict_name
|
StarcoderdataPython
|
8054645
|
<reponame>Pahandrovich/omniscidb
def get_source_version():
import os
d = dict(MAJOR='5', MINOR='6', MICRO='0', EXTRA='none')
here = os.path.abspath(os.path.dirname(__file__))
try:
f = open(os.path.join(here, '..', '..', 'CMakeLists.txt'))
except FileNotFoundError:
return None
for line in f.readlines():
if line.lstrip().startswith('set(MAPD_VERSION_'):
k = line.split()[0].rsplit('_', 1)[-1]
n = line.split('"')[1]
d[k] = n
return '{MAJOR}.{MINOR}.{MICRO}{EXTRA}'.format(**d)
def get_package_version():
from pkg_resources import get_distribution, DistributionNotFound
try:
return get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
return get_source_version()
|
StarcoderdataPython
|
9682044
|
<filename>src/verify/design/message.py
"""
"""
import struct
from dataclasses import dataclass, field
from data_pipe.packer import BufferPacker
@dataclass(frozen=True)
class MessageBuffer(BufferPacker):
""
buffer:memoryview
|
StarcoderdataPython
|
1812035
|
<gh_stars>0
# Generated by Django 3.0 on 2021-08-16 23:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('shop', '0002_auto_20210723_1518'),
('orders', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='OrderItems',
new_name='OrderItem',
),
migrations.AlterModelOptions(
name='order',
options={'ordering': ('-created',)},
),
]
|
StarcoderdataPython
|
14870
|
# Copyright 2015 - Mirantis, Inc.
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import json
import sqlalchemy as sa
from sqlalchemy import event
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
import sys
from oslo_config import cfg
from oslo_log import log as logging
from mistral.db.sqlalchemy import model_base as mb
from mistral.db.sqlalchemy import types as st
from mistral import exceptions as exc
from mistral.services import security
from mistral import utils
# Definition objects.
LOG = logging.getLogger(__name__)
def _get_hash_function_by(column_name):
def calc_hash(context):
val = context.current_parameters[column_name] or {}
if isinstance(val, dict):
# If the value is a dictionary we need to make sure to have
# keys in the same order in a string representation.
hash_base = json.dumps(sorted(val.items()))
else:
hash_base = str(val)
return hashlib.sha256(hash_base.encode('utf-8')).hexdigest()
return calc_hash
def validate_long_type_length(cls, field_name, value):
"""Makes sure the value does not exceeds the maximum size."""
if value:
# Get the configured limit.
size_limit_kb = cfg.CONF.engine.execution_field_size_limit_kb
# If the size is unlimited.
if size_limit_kb < 0:
return
size_kb = int(sys.getsizeof(str(value)) / 1024)
if size_kb > size_limit_kb:
LOG.error(
"Size limit %dKB exceed for class [%s], "
"field %s of size %dKB.",
size_limit_kb, str(cls), field_name, size_kb
)
raise exc.SizeLimitExceededException(
field_name,
size_kb,
size_limit_kb
)
def register_length_validator(attr_name):
"""Register an event listener on the attribute.
This event listener will validate the size every
time a 'set' occurs.
"""
for cls in utils.iter_subclasses(Execution):
if hasattr(cls, attr_name):
event.listen(
getattr(cls, attr_name),
'set',
lambda t, v, o, i: validate_long_type_length(cls, attr_name, v)
)
class Definition(mb.MistralSecureModelBase):
__abstract__ = True
id = mb.id_column()
name = sa.Column(sa.String(255))
definition = sa.Column(st.MediumText(), nullable=True)
spec = sa.Column(st.JsonMediumDictType())
tags = sa.Column(st.JsonListType())
is_system = sa.Column(sa.Boolean())
# There's no WorkbookExecution so we safely omit "Definition" in the name.
class Workbook(Definition):
"""Contains info about workbook (including definition in Mistral DSL)."""
__tablename__ = 'workbooks_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
class WorkflowDefinition(Definition):
"""Contains info about workflow (including definition in Mistral DSL)."""
__tablename__ = 'workflow_definitions_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_is_system' % __tablename__, 'is_system'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
class ActionDefinition(Definition):
"""Contains info about registered Actions."""
__tablename__ = 'action_definitions_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_is_system' % __tablename__, 'is_system'),
sa.Index('%s_action_class' % __tablename__, 'action_class'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
# Main properties.
description = sa.Column(sa.Text())
input = sa.Column(sa.Text())
# Service properties.
action_class = sa.Column(sa.String(200))
attributes = sa.Column(st.JsonDictType())
# Execution objects.
class Execution(mb.MistralSecureModelBase):
__abstract__ = True
# Common properties.
id = mb.id_column()
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255), nullable=True)
workflow_name = sa.Column(sa.String(255))
workflow_id = sa.Column(sa.String(80))
spec = sa.Column(st.JsonMediumDictType())
state = sa.Column(sa.String(20))
state_info = sa.Column(sa.Text(), nullable=True)
tags = sa.Column(st.JsonListType())
# Internal properties which can be used by engine.
runtime_context = sa.Column(st.JsonLongDictType())
class ActionExecution(Execution):
"""Contains action execution information."""
__tablename__ = 'action_executions_v2'
__table_args__ = (
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_state' % __tablename__, 'state'),
sa.Index('%s_updated_at' % __tablename__, 'updated_at')
)
# Main properties.
accepted = sa.Column(sa.Boolean(), default=False)
input = sa.Column(st.JsonLongDictType(), nullable=True)
output = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True))
class WorkflowExecution(Execution):
"""Contains workflow execution information."""
__tablename__ = 'workflow_executions_v2'
__table_args__ = (
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_state' % __tablename__, 'state'),
sa.Index('%s_updated_at' % __tablename__, 'updated_at'),
)
# Main properties.
accepted = sa.Column(sa.Boolean(), default=False)
input = sa.Column(st.JsonLongDictType(), nullable=True)
output = sa.orm.deferred(sa.Column(st.JsonLongDictType(), nullable=True))
params = sa.Column(st.JsonLongDictType())
# Initial workflow context containing workflow variables, environment,
# openstack security context etc.
# NOTES:
# * Data stored in this structure should not be copied into inbound
# contexts of tasks. No need to duplicate it.
# * This structure does not contain workflow input.
context = sa.Column(st.JsonLongDictType())
class TaskExecution(Execution):
"""Contains task runtime information."""
__tablename__ = 'task_executions_v2'
__table_args__ = (
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_state' % __tablename__, 'state'),
sa.Index('%s_updated_at' % __tablename__, 'updated_at'),
sa.UniqueConstraint('unique_key')
)
# Main properties.
action_spec = sa.Column(st.JsonLongDictType())
unique_key = sa.Column(sa.String(250), nullable=True)
type = sa.Column(sa.String(10))
# Whether the task is fully processed (publishing and calculating commands
# after it). It allows to simplify workflow controller implementations
# significantly.
processed = sa.Column(sa.BOOLEAN, default=False)
# Data Flow properties.
in_context = sa.Column(st.JsonLongDictType())
published = sa.Column(st.JsonLongDictType())
@property
def executions(self):
return (
self.action_executions
if not self.spec.get('workflow')
else self.workflow_executions
)
for cls in utils.iter_subclasses(Execution):
event.listen(
# Catch and trim Execution.state_info to always fit allocated size.
# Note that the limit is 65500 which is less than 65535 (2^16 -1).
# The reason is that utils.cut() is not exactly accurate in case if
# the value is not a string, but, for example, a dictionary. If we
# limit it exactly to 65535 then once in a while it may go slightly
# beyond the allowed maximum size. It may depend on the order of
# keys in a string representation and other things that are hidden
# inside utils.cut_dict() method.
cls.state_info,
'set',
lambda t, v, o, i: utils.cut(v, 65500),
retval=True
)
# Many-to-one for 'ActionExecution' and 'TaskExecution'.
ActionExecution.task_execution_id = sa.Column(
sa.String(36),
sa.ForeignKey(TaskExecution.id, ondelete='CASCADE'),
nullable=True
)
TaskExecution.action_executions = relationship(
ActionExecution,
backref=backref('task_execution', remote_side=[TaskExecution.id]),
cascade='all, delete-orphan',
foreign_keys=ActionExecution.task_execution_id,
lazy='select'
)
sa.Index(
'%s_task_execution_id' % ActionExecution.__tablename__,
'task_execution_id'
)
# Many-to-one for 'WorkflowExecution' and 'TaskExecution'.
WorkflowExecution.task_execution_id = sa.Column(
sa.String(36),
sa.ForeignKey(TaskExecution.id, ondelete='CASCADE'),
nullable=True
)
TaskExecution.workflow_executions = relationship(
WorkflowExecution,
backref=backref('task_execution', remote_side=[TaskExecution.id]),
cascade='all, delete-orphan',
foreign_keys=WorkflowExecution.task_execution_id,
lazy='select'
)
sa.Index(
'%s_task_execution_id' % WorkflowExecution.__tablename__,
'task_execution_id'
)
# Many-to-one for 'TaskExecution' and 'WorkflowExecution'.
TaskExecution.workflow_execution_id = sa.Column(
sa.String(36),
sa.ForeignKey(WorkflowExecution.id, ondelete='CASCADE')
)
WorkflowExecution.task_executions = relationship(
TaskExecution,
backref=backref('workflow_execution', remote_side=[WorkflowExecution.id]),
cascade='all, delete-orphan',
foreign_keys=TaskExecution.workflow_execution_id,
lazy='select'
)
sa.Index(
'%s_workflow_execution_id' % TaskExecution.__tablename__,
TaskExecution.workflow_execution_id
)
# Other objects.
class DelayedCall(mb.MistralModelBase):
"""Contains info about delayed calls."""
__tablename__ = 'delayed_calls_v2'
id = mb.id_column()
factory_method_path = sa.Column(sa.String(200), nullable=True)
target_method_name = sa.Column(sa.String(80), nullable=False)
method_arguments = sa.Column(st.JsonDictType())
serializers = sa.Column(st.JsonDictType())
key = sa.Column(sa.String(250), nullable=True)
auth_context = sa.Column(st.JsonDictType())
execution_time = sa.Column(sa.DateTime, nullable=False)
processing = sa.Column(sa.Boolean, default=False, nullable=False)
sa.Index(
'%s_execution_time' % DelayedCall.__tablename__,
DelayedCall.execution_time
)
class Environment(mb.MistralSecureModelBase):
"""Contains environment variables for workflow execution."""
__tablename__ = 'environments_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.Index('%s_name' % __tablename__, 'name'),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
)
# Main properties.
id = mb.id_column()
name = sa.Column(sa.String(200))
description = sa.Column(sa.Text())
variables = sa.Column(st.JsonLongDictType())
class CronTrigger(mb.MistralSecureModelBase):
"""Contains info about cron triggers."""
__tablename__ = 'cron_triggers_v2'
__table_args__ = (
sa.UniqueConstraint('name', 'project_id'),
sa.UniqueConstraint(
'workflow_input_hash', 'workflow_name', 'pattern', 'project_id',
'workflow_params_hash', 'remaining_executions',
'first_execution_time'
),
sa.Index(
'%s_next_execution_time' % __tablename__,
'next_execution_time'
),
sa.Index('%s_project_id' % __tablename__, 'project_id'),
sa.Index('%s_scope' % __tablename__, 'scope'),
sa.Index('%s_workflow_name' % __tablename__, 'workflow_name'),
)
id = mb.id_column()
name = sa.Column(sa.String(200))
pattern = sa.Column(
sa.String(100),
nullable=True,
default='0 0 30 2 0' # Set default to 'never'.
)
first_execution_time = sa.Column(sa.DateTime, nullable=True)
next_execution_time = sa.Column(sa.DateTime, nullable=False)
workflow_name = sa.Column(sa.String(255))
remaining_executions = sa.Column(sa.Integer)
workflow_id = sa.Column(
sa.String(36),
sa.ForeignKey(WorkflowDefinition.id)
)
workflow = relationship('WorkflowDefinition', lazy='joined')
workflow_params = sa.Column(st.JsonDictType())
workflow_params_hash = sa.Column(
sa.CHAR(64),
default=_get_hash_function_by('workflow_params')
)
workflow_input = sa.Column(st.JsonDictType())
workflow_input_hash = sa.Column(
sa.CHAR(64),
default=_get_hash_function_by('workflow_input')
)
trust_id = sa.Column(sa.String(80))
def to_dict(self):
d = super(CronTrigger, self).to_dict()
utils.datetime_to_str_in_dict(d, 'first_execution_time')
utils.datetime_to_str_in_dict(d, 'next_execution_time')
return d
# Register all hooks related to secure models.
mb.register_secure_model_hooks()
# TODO(rakhmerov): This is a bad solution. It's hard to find in the code,
# configure flexibly etc. Fix it.
# Register an event listener to verify that the size of all the long columns
# affected by the user do not exceed the limit configuration.
for attr_name in ['input', 'output', 'params', 'published']:
register_length_validator(attr_name)
class ResourceMember(mb.MistralModelBase):
"""Contains info about resource members."""
__tablename__ = 'resource_members_v2'
__table_args__ = (
sa.UniqueConstraint(
'resource_id',
'resource_type',
'member_id'
),
)
id = mb.id_column()
resource_id = sa.Column(sa.String(80), nullable=False)
resource_type = sa.Column(
sa.String(50),
nullable=False,
default='workflow'
)
project_id = sa.Column(sa.String(80), default=security.get_project_id)
member_id = sa.Column(sa.String(80), nullable=False)
status = sa.Column(sa.String(20), nullable=False, default="pending")
class EventTrigger(mb.MistralSecureModelBase):
"""Contains info about event triggers."""
__tablename__ = 'event_triggers_v2'
__table_args__ = (
sa.UniqueConstraint('exchange', 'topic', 'event', 'workflow_id',
'project_id'),
sa.Index('%s_project_id_workflow_id' % __tablename__, 'project_id',
'workflow_id'),
)
id = mb.id_column()
name = sa.Column(sa.String(200))
workflow_id = sa.Column(
sa.String(36),
sa.ForeignKey(WorkflowDefinition.id)
)
workflow_params = sa.Column(st.JsonDictType())
workflow_input = sa.Column(st.JsonDictType())
exchange = sa.Column(sa.String(80), nullable=False)
topic = sa.Column(sa.String(80), nullable=False)
event = sa.Column(sa.String(80), nullable=False)
trust_id = sa.Column(sa.String(80))
class NamedLock(mb.MistralModelBase):
"""Contains info about named locks.
Usage of named locks is based on properties of READ COMMITTED
transactions of the most generally used SQL databases such as
Postgres, MySQL, Oracle etc.
The locking scenario is as follows:
1. Transaction A (TX-A) inserts a row with unique 'id' and
some value that identifies a locked object stored in 'name'.
2. Transaction B (TX-B) and any subsequent transactions tries
to insert a row with unique 'id' and the same value of 'name'
field and it waits till TX-A is completed due to transactional
properties of READ COMMITTED.
3. If TX-A then immediately deletes the record and commits then
TX-B and or one of the subsequent transactions are released
and its 'insert' is completed.
4. Then the scenario repeats with step #2 where the role of TX-A
will be playing a transaction that just did insert.
Practically, this table should never contain any committed rows.
All its usage is around the play with transactional storages.
"""
__tablename__ = 'named_locks'
sa.UniqueConstraint('name')
id = mb.id_column()
name = sa.Column(sa.String(250))
sa.UniqueConstraint(NamedLock.name)
|
StarcoderdataPython
|
1984066
|
"""Unique Paths
A robot is located at the top-left corner of a m x n grid (marked 'Start' in the diagram below).
The robot can only move either down or right at any point in time. The robot is trying to reach the bottom-right corner of the grid (marked 'Finish' in the diagram below).
How many possible unique paths are there?"""
class Solution(object):
def uniquePaths(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
"""
## Practice:
grid = [[1]*n for _ in range(m)]
for i in range(1, m):
for j in range(1, n):
grid[i][j] = grid[i][j-1] + grid[i-1][j]
return grid[-1][-1]
# R3:
dp = [1]*n
for i in range(1, m):
for j in range(1, n):
dp[j] += dp[j-1]
return dp[-1]
# R2: Done
# R1:
matrix = [[1 for x in range(m)] for x in range(n)]
for i in range(1, m):
for j in range(1, n):
matrix[i][j] = matrix[i][j-1] + matrix[i-1][j]
return matrix[m-1][n-1]
|
StarcoderdataPython
|
272718
|
<reponame>NiklasRosenstein/houdini-manage<filename>houdini_manage/gui.py
# Copyright (C) 2017 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
import os
import webbrowser
from . import __version__, library
from .config import config
from .envfile import SectionEnvfile
resdir = os.path.join(os.path.dirname(__file__), 'res')
def _fileselectFor(edit):
def handler():
path = QFileDialog.getExistingDirectory()
if path:
edit.setText(path)
return handler
class LibraryModel(QAbstractListModel):
def __init__(self, envfile):
QAbstractTableModel.__init__(self)
self.envfile = envfile
self.update()
def update(self):
self.sections = list(s for s in self.envfile.iter_named_sections() if s.is_library())
self.layoutChanged.emit()
def getFromIndex(self, index):
index = index.row()
if index < 0 or index >= len(self.sections):
return
return self.sections[index]
def removeIndex(self, index):
section = self.getFromIndex(index)
if section:
self.envfile.remove_section(section.name)
def rowCount(self, parent=QModelIndex()):
return len(self.sections)
def data(self, index, role=Qt.DisplayRole):
if not index.isValid():
return None
section = self.sections[index.row()]
col = index.column()
if col == 0 and role == Qt.DisplayRole:
return '{} v{} ({})'.format(
section.get_library_name(),
section.get_library_version() or '???',
section.get_library_path() or '???'
)
class Window(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.setWindowTitle('Houdini Manage v' + __version__)
self.setWindowIcon(QIcon(os.path.join(resdir, 'icon_manage.png')))
self.resize(500, 300)
# Create widgets.
self.houdiniVersion = QComboBox()
self.houdiniPath = QLineEdit()
self.listView = QListView()
self.menuBar = QMenuBar()
self._model = None
self._envfile = None
self._envfilename = None
self._lastHoudiniVersionIndex = None
btnInstall = QPushButton('')
btnInstall.setIcon(QIcon(os.path.join(resdir, 'install.png')))
btnInstall.setFixedSize(32, 32)
btnInstall.setToolTip('Install Library')
btnInstall.clicked.connect(self._install)
btnRemove = QPushButton('')
btnRemove.setIcon(QIcon(os.path.join(resdir, 'remove.png')))
btnRemove.setFixedSize(32, 32)
btnRemove.setToolTip('Remove Library')
btnRemove.clicked.connect(self._remove)
btnBuild = QPushButton('')
btnBuild.setIcon(QIcon(os.path.join(resdir, 'build.png')))
btnBuild.setFixedSize(32, 32)
btnBuild.setToolTip('(Re)build DSO')
btnBuild.clicked.connect(self._buildDso)
btnSave = QPushButton('')
btnSave.setIcon(QIcon(os.path.join(resdir, 'save.png')))
btnSave.setFixedSize(32, 32)
btnSave.setToolTip('Save Environment')
btnSave.clicked.connect(self._save)
btnHelp = QPushButton('')
btnHelp.setIcon(QIcon(os.path.join(resdir, 'question.png')))
btnHelp.setFixedSize(32, 32)
btnHelp.setToolTip('Help')
btnHelp.clicked.connect(self._help)
# Layout.
layout = QVBoxLayout(self)
if True: # Houdini version selector
line = QVBoxLayout()
layout.addLayout(line)
box = QHBoxLayout()
line.addLayout(box)
box.addWidget(QLabel('Houdini Version'))
box.addWidget(self.houdiniVersion)
box = QHBoxLayout()
line.addLayout(box)
btn = QPushButton('...')
btn.clicked.connect(_fileselectFor(self.houdiniPath))
box.addWidget(QLabel('Houdini Application Directory'))
box.addWidget(self.houdiniPath)
box.addWidget(btn)
if True: # List view and right bar
line = QHBoxLayout()
layout.addLayout(line)
line.addWidget(self.listView)
vert = QVBoxLayout()
vert.setAlignment(Qt.AlignTop)
line.addLayout(vert)
vert.addWidget(btnInstall)
vert.addWidget(btnRemove)
vert.addWidget(btnBuild)
vert.addWidget(make_spacer(vertical=True))
vert.addWidget(btnSave)
vert.addWidget(btnHelp)
# Init values.
self.houdiniPrefPaths = library.get_houdini_user_prefs_directories()
self.houdiniVersion.addItems([x[0] for x in self.houdiniPrefPaths])
self.houdiniVersion.currentIndexChanged.connect(self._updateEnv)
self.houdiniVersion.setCurrentIndex(0)
self.houdiniPath.setText(library.get_houdini_application_dir())
self._updateEnv()
def closeEvent(self, event):
if self._envfile and self._envfile.changed:
reply = QMessageBox.question(self, 'Unsaved Changes',
'You have unsaved changes in this environment. Do you want to '
'quit?', QMessageBox.Yes | QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
else:
event.accept()
def _updateEnv(self):
index = self.houdiniVersion.currentIndex()
if self._envfile and self._envfile.changed and index != self._lastHoudiniVersionIndex:
reply = QMessageBox.question(self, 'Unsaved Changes',
'You have unsaved changes in this environment. Do you want to '
'switch versions?', QMessageBox.Yes | QMessageBox.No)
if reply != QMessageBox.Yes:
self.houdiniVersion.setCurrentIndex(self._lastHoudiniVersionIndex)
return
if index == self._lastHoudiniVersionIndex:
return
path = self.houdiniPrefPaths[index][1]
self._lastHoudiniVersionIndex = index
if os.path.isfile(path):
self._envfilename = path
with open(path) as fp:
self._envfile = SectionEnvfile.parse(fp)
self._model = LibraryModel(self._envfile)
else:
self._envfilename = None
self._envfile = None
self._model = None
self.listView.setModel(self._model)
def _install(self):
if not self._envfile:
return
directory = QFileDialog.getExistingDirectory(self)
if not directory:
return
hou_app_dir = self.houdiniPath.text()
if not hou_app_dir:
print('No houdini application directory specified, skipping DSO builds.')
try:
library.install_library(self._envfile, directory)
if hou_app_dir:
if not library.build_dso(hou_app_dir, directory):
error_dialog('DSO build failed', 'Check console for more information.')
except library.NotALibraryError as exc:
error_dialog('Not a Houdini Library', str(exc))
except library.PreviousInstallationFoundError as exc:
error_dialog('Previous installation found', 'Please uninstall "{}" first.'.format(exc.library_name))
except OSError as exc:
error_dialog('Fatal error', str(exc))
else:
self._model.update()
def _remove(self):
index = self.listView.selectionModel().selectedIndexes()
if len(index) != 1:
return
self._model.removeIndex(index[0])
self._model.update()
def _buildDso(self):
hou_app_dir = self.houdiniPath.text()
if not hou_app_dir:
error_dialog('Error', 'Specify the Houdini Application Path to build DSOs.')
return
count = 0
num_built = 0
for index in self.listView.selectionModel().selectedIndexes():
section = self._model.getFromIndex(index)
try:
num, ok = library.build_dso(hou_app_dir, section.get_library_path())
num_built += num
count += 1
except OSError as exc:
error_dialog('Fatal error', str(exc))
break
if not count:
error_dialog('Error', 'Please select a library to rebuild the DSOs for.')
elif not num_built:
message_dialog('Note', 'No DSOs in the selected libraries.')
def _save(self):
if not self._envfile or not self._envfilename:
return
with open(self._envfilename, 'w') as fp:
self._envfile.render(fp)
def _help(self):
webbrowser.open('https://niklasrosenstein.github.io/houdini-manage/')
class FilenameWidget(QWidget): # Currently not used
textChanged = pyqtSignal()
def __init__(self, parent=None, type='file'):
assert type in ('file', 'directory')
QWidget.__init__(self, parent)
QHBoxLayout(self)
self.type = 'file'
self.edit = QLineEdit()
self.edit.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
self.edit.textChanged.connect(self.textChanged.emit)
self.button = QPushButton('...')
self.button.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.button.clicked.connect(self._clicked)
self.layout().addWidget(self.edit)
self.layout().addWidget(self.button)
self.layout().setContentsMargins(0, 0, 0, 0)
def _clicked(self):
if self.type == 'directory':
path = QFileDialog.getExistingDirectory(self)
else:
path = QFileDialog.getOpenFileName(self)[0]
if path:
self.edit.setText(path)
def make_separator():
frame = QFrame()
frame.setFrameShape(QFrame.HLine)
frame.setFrameShadow(QFrame.Sunken)
return frame
def make_spacer(vertical=False):
label = QLabel('')
policy = QSizePolicy.Expanding, QSizePolicy.Preferred
if vertical:
policy = reversed(policy)
label.setSizePolicy(*policy)
return label
def message_dialog(title, message):
QMessageBox.information(None, title, message)
def error_dialog(title, message):
QMessageBox.critical(None, title, message)
def main():
app = QApplication([])
wnd = Window()
wnd.show()
app.exec_()
return 0
|
StarcoderdataPython
|
8035697
|
<reponame>a-farahani/Time-series-object-detection
import setuptools
with open("README.md", 'r') as fp:
long_description = fp.read()
setuptools.setup(
name = "rhodes",
version = "1.0.2",
author="<NAME>, <NAME>, <NAME>",
author_email="<EMAIL>, <EMAIL>, <EMAIL>",
license='MIT',
description="A package for neuron detection.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/dsp-uga/team-rhodes-P3",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
test_suite='nose.collector',
tests_require=['nose'],
install_requires=['imageio', 'thunder-extraction', 'joblib', 'image_slicer'],
)
|
StarcoderdataPython
|
1719633
|
<gh_stars>0
"""
The classification test module.
"""
|
StarcoderdataPython
|
5153512
|
from flask import Blueprint
bp = Blueprint('case_new', __name__)
from app.case_new import routes
|
StarcoderdataPython
|
4807347
|
"""
MIT License
Copyright (c) 2021, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
------------------------------------------------------------------------------------------------------------------------
Vanilla Stat attack
=====================
This class implements Vanilla Stat attack which is inspired by these three papers listed below
@inproceedings{serwadda2013when,
title={When kids' toys breach mobile phone security},
author={<NAME> and <NAME>},
booktitle={ACM SIGSAC Conference on Computer \& Communications Security},
year={2013},
organization={ACM}
}
@article{serwadda2013examining,
title={Examining a large keystroke biometrics dataset for statistical-attack openings},
author={<NAME> and <NAME>},
journal={ACM Transactions on Information and System Security},
volume={16},
number={2},
pages={8},
year={2013},
publisher={ACM}
}
@article{serwadda2016toward,
title={Toward robotic robbery on the touch screen},
author={<NAME> and <NAME> and <NAME> and <NAME> <NAME>},
journal={ACM Transactions on Information and System Security (TISSEC)},
volume={18},
number={4},
pages={1--25},
year={2016},
publisher={ACM New York, NY, USA}
}
"""
from source_code.adversaries.adversarial_attacks import Attacks
from source_code.synth_data_gen.gauss_blob_generator import GaussBlob
import pandas as pd
import numpy as np
import os
class StatAttack(Attacks):
def __init__(self, data, required_attack_samples, bootstrap_data_path, run_bootstrap=True,
bootstrap_iter=10000, random_state=42):
"""
@param required_attack_samples: Expects an integer for number of attack samples to generate
@param data: Expects a Pandas dataframe
"""
self.attack_df = data
self.attack_samples = required_attack_samples
self.attack_df_stat = None
self.boot_strap_st_at = run_bootstrap
self.bootstrap_iterations = bootstrap_iter
self.bs_data_path = bootstrap_data_path
self.rand_state = random_state
def generate_attack(self):
if 'user' in self.attack_df.columns:
# Using numpy arrays for more efficient usage
feat_list = self.attack_df.columns.drop('user').to_list()
else:
# Using numpy arrays for more efficient usage
feat_list = self.attack_df.columns.drop('user').to_list()
# Generating attack set
bs_results = dict()
attack_feat_stats = pd.DataFrame(columns=["mean", "std"], index=feat_list)
if self.boot_strap_st_at is True:
# calculating mean ans std by bootstrap
for feat in feat_list:
bs_results[feat] = pd.DataFrame(columns=['mean', "std"])
bs_iter = self.bootstrap_iterations
print("starting bootstrap experiment")
for feat in feat_list:
print(f"starting bootstrap for {feat}")
for itera in range(bs_iter):
ar = np.random.choice(self.attack_df.drop('user', axis=1)[feat].to_numpy(), replace=True,
size=len(self.attack_df))
bs_results[feat].loc[itera, 'mean'] = ar.mean()
bs_results[feat].loc[itera, 'std'] = ar.std()
bs_results[feat].to_csv(os.path.join(self.bs_data_path, f"{feat}_bs.csv"), index=False, mode='w+')
stats = bs_results[feat].to_numpy().mean(axis=0)
attack_feat_stats.loc[feat, "mean"] = stats[0]
attack_feat_stats.loc[feat, "std"] = stats[1]
print("starting bootstrap experiment done")
else:
print(f"Reading bootstrap data from disk")
for feat in feat_list:
bs_results[feat] = pd.read_csv(os.path.join(self.bs_data_path, f"{feat}_bs.csv"))
stats = bs_results[feat].to_numpy().mean(axis=0)
attack_feat_stats.loc[feat, "mean"] = stats[0]
attack_feat_stats.loc[feat, "std"] = stats[1]
print(f"Reading bootstrap data from disk")
print(f"Generating Stats attack data")
self.attack_df_stat = pd.DataFrame(columns=feat_list)
num_samples = self.attack_samples
for feat in feat_list:
X, y = GaussBlob().generate_data(n_classes=1, n_features=1, n_samples=num_samples,
centers=[attack_feat_stats.loc[feat, 'mean']],
random_state=self.rand_state,
cluster_std=[attack_feat_stats.loc[feat, 'std']])
self.attack_df_stat[feat] = X.Dim_00
return self.attack_df_stat
|
StarcoderdataPython
|
3392044
|
<reponame>Ernestyj/PyStudy<filename>finance/TradingAlgo.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.finance as finance
import zipline as zp
import math
from datetime import datetime
from zipline import TradingAlgorithm
class MyTradingAlog(TradingAlgorithm):
def __init__(self):
pass
def initialize(self):
pass
def handle_data(self, data):
pass
def analyze(self, perf):
pass
df = pd.DataFrame({code: wsdClose['2015']})
perf = MyTradingAlog().run(df)
import zipline
from zipline import TradingAlgorithm
from zipline.api import sid, order, order_target, record, symbol, history, add_history
from zipline.api import *
from zipline.pipeline import Pipeline
code='000001.SH'
df = pd.DataFrame({code:wsdClose['2015']})
shortWin=20
longWin=40
def initialize(context):
context.day = -1
context.code = symbol(code)
context.maDF = MA(df[code], shortWin=shortWin, longWin=longWin)
context.maShort = context.maDF[str(shortWin)+'MA']
context.maLong = context.maDF[str(longWin)+'MA']
context.invested = False
set_slippage(slippage.VolumeShareSlippage(volume_limit=1.0, price_impact=0.0))
set_commission(commission.PerDollar(cost=0.003))
pass
def handle_data(context, data):
#print context.portfolio.cash
context.day += 1
i = context.day
s = context.maShort[i]
l = context.maLong[i]
pres = s
prel = l
if i!=0:
pres = context.maShort[i-1]
prel = context.maLong[i-1]
if i>=longWin-1:
if s>l and pres<=prel and not context.invested:
order_percent(symbol(code), 1.0)
context.invested = True
elif s<l and context.invested:
order_percent(symbol(code), -1.0)
context.invested = False
record(maShort=s, maLong=l)
pass
def analyze(context, perf):
perf_trans = perf.ix[[t!=[] for t in perf.transactions]]
buys = perf_trans.ix[[t[0]['amount'] > 0 for t in perf_trans.transactions]]
sells = perf_trans.ix[[t[0]['amount'] < 0 for t in perf_trans.transactions]]
fig = plt.figure(figsize=(20,15))
ax1 = fig.add_subplot(311)
#data['AAPL'].plot(ax=ax1, color='r', lw=2.)
perf[['maShort', 'maLong']].plot(ax=ax1, lw=2.)
ax1.plot(buys.index, perf.maShort.ix[buys.index], '^', markersize=10, color='m')
ax1.plot(sells.index, perf.maLong.ix[sells.index], 'v', markersize=10, color='k')
ax2 = fig.add_subplot(312)
portfolio_ratio = perf.portfolio_value/100000.0
portfolio_ratio.plot(ax=ax2, lw=2.)
ax2.plot(buys.index, portfolio_ratio.ix[buys.index], '^', markersize=10, color='m')
ax2.plot(sells.index, portfolio_ratio.ix[sells.index], 'v', markersize=10, color='k')
# ax3 = fig.add_subplot(313)
# perf.portfolio_value.plot(ax=ax3, lw=2.)
# ax3.plot(buys.index, perf.portfolio_value.ix[buys.index], '^', markersize=10, color='m')
# ax3.plot(sells.index, perf.portfolio_value.ix[sells.index], 'v', markersize=10, color='k')
pass
algo = TradingAlgorithm(initialize=initialize, handle_data=handle_data)
algo._analyze = analyze
perf = algo.run(df)
perf_trans = perf.ix[[t!=[] for t in perf.transactions]]
buys = perf_trans.ix[[t[0]['amount'] > 0 for t in perf_trans.transactions]]
sells = perf_trans.ix[[t[0]['amount'] < 0 for t in perf_trans.transactions]]
investDays = validInvestDays(buys, sells, perf)
print investDays
cashes = perf.portfolio_value.ix[sells.index]
returnRatArr = returnRatioArr(cashes.values)
final_return_ratio = returnRatio(perf.portfolio_value[-1])
print '总收益率:', final_return_ratio
print '年化收益率:', annualizedReturnRatio([final_return_ratio], T=investDays, D=250.0)
from zipline.api import order_target, record, symbol, history, add_history
import numpy as np
def initialize(context):
# Register 2 histories that track daily prices,
# one with a 100 window and one with a 300 day window
add_history(100, '1d', 'price')
add_history(300, '1d', 'price')
context.i = 0
def handle_data(context, data):
# Skip first 300 days to get full windows
context.i += 1
if context.i < 300:
return
# Compute averages
# history() has to be called with the same params
# from above and returns a pandas dataframe.
short_mavg = history(100, '1d', 'price').mean()
long_mavg = history(300, '1d', 'price').mean()
# price_history = data.history(assets=symbol('TEST'), fields="price", bar_count=5, frequency="1d")
# Trading logic
if short_mavg[0] > long_mavg[0]:
# order_target orders as many shares as needed to
# achieve the desired number of shares.
order_target(symbol('AAPL'), 100)
elif short_mavg[0] < long_mavg[0]:
order_target(symbol('AAPL'), 0)
# Save values for later inspection
record(AAPL=data[symbol('AAPL')].price,
short_mavg=short_mavg[0],
long_mavg=long_mavg[0])
def analyze(context, perf):
fig = plt.figure()
ax1 = fig.add_subplot(211)
perf.portfolio_value.plot(ax=ax1)
ax1.set_ylabel('portfolio value in $')
ax2 = fig.add_subplot(212)
perf['AAPL'].plot(ax=ax2)
perf[['short_mavg', 'long_mavg']].plot(ax=ax2)
perf_trans = perf.ix[[t != [] for t in perf.transactions]]
buys = perf_trans.ix[[t[0]['amount'] > 0 for t in perf_trans.transactions]]
sells = perf_trans.ix[
[t[0]['amount'] < 0 for t in perf_trans.transactions]]
ax2.plot(buys.index, perf.short_mavg.ix[buys.index],
'^', markersize=10, color='m')
ax2.plot(sells.index, perf.short_mavg.ix[sells.index],
'v', markersize=10, color='k')
ax2.set_ylabel('price in $')
plt.legend(loc=0)
plt.show()
|
StarcoderdataPython
|
11361709
|
<reponame>dawidkski/space<filename>python/src/tensor/nn.py<gh_stars>1-10
from typing import List
from . import tensor as ts
from . import libtensor as _ts
from .autograd import Op, Variable
from .libtensor import Activation
class Conv2D(Op):
def __init__(self, in_channels: int, out_channels: int, kernel_size: int, stride: int,
pad: int, dilatation: int, activation: Activation = Activation.NONE,
use_bias: bool = True):
super().__init__()
self._layer = _ts.Conv2D(in_channels, out_channels, kernel_size, stride, pad,
dilatation, activation, use_bias)
def forward(self, *inputs: Variable):
tensor: Variable
tensor = self._check_inputs(*inputs, num=1)
if len(self._inputs) == 0:
self._inputs.append(tensor)
value = self._layer(tensor.value.data)
return Variable(ts.Tensor(value), self)
def backward(self, *grads: ts.Tensor):
d_output = self._check_grads(*grads, num=1)
d_input = self._layer.backward(d_output.data)
self._inputs[0].grad = ts.Tensor(d_input)
def weights(self) -> List[_ts.GradHolderF]:
return self._layer.weights()
def parameters(self) -> List[_ts.DataHolderF]:
return self._layer.parameters()
class Linear(Op):
def __init__(self, dim_in: int, dim_out: int, activation: Activation = Activation.NONE,
use_bias: bool = True):
super().__init__()
self._layer = _ts.FeedForward(dim_in, dim_out, activation, use_bias)
def forward(self, *inputs: Variable):
tensor: Variable
tensor = self._check_inputs(*inputs, num=1)
if len(self._inputs) == 0:
self._inputs.append(tensor)
value = self._layer(tensor.value.data)
return Variable(ts.Tensor(value), self)
def backward(self, *grads: ts.Tensor):
d_output = self._check_grads(*grads, num=1)
d_input = self._layer.backward(d_output.data)
self._inputs[0].grad = ts.Tensor(d_input)
def weights(self) -> List[_ts.GradHolderF]:
return self._layer.weights()
def parameters(self) -> List[_ts.DataHolderF]:
return self._layer.parameters()
class MaxPool2D(Op):
def __init__(self, kernel_size: int, stride: int, pad: int):
super(MaxPool2D, self).__init__()
self._layer = _ts.MaxPool2D(kernel_size, stride, pad)
def forward(self, *inputs: Variable):
tensor: Variable
tensor = self._check_inputs(*inputs, num=1)
if len(self._inputs) == 0:
self._inputs.append(tensor)
value = self._layer(tensor.value.data)
return Variable(ts.Tensor(value), self)
def backward(self, *grads: ts.Tensor):
d_output = self._check_grads(*grads, num=1)
d_input = self._layer.backward(d_output.data)
self._inputs[0].grad = ts.Tensor(d_input)
class ReLU(Op):
def __init__(self):
super().__init__()
self._relu = None
def forward(self, *inputs: Variable):
tensor: Variable
tensor = self._check_inputs(*inputs, num=1)
if len(self._inputs) == 0:
self._inputs.append(tensor)
if self._relu is None:
if tensor.value.dim == 2:
self._relu = _ts.ReLU_f2()
elif tensor.value.dim == 3:
self._relu = _ts.ReLU_f3()
else:
raise ValueError(f"Incompatible input dim (dim={tensor.value.dim}) with ReLU op")
value = self._relu(tensor.value.data)
return Variable(ts.Tensor(value), self)
def backward(self, *grads: ts.Tensor):
d_output = self._check_grads(*grads, num=1)
d_input = self._relu.backward(d_output.data)
self._inputs[0].grad = ts.Tensor(d_input)
class CrossEntropyLoss(Op):
EXPECTED_INPUTS_LENGTH: int = 2
EXPECTED_GRADS_LENGTH: int = 1
def __init__(self):
super().__init__()
self._loss = _ts.CrossEntropyLoss()
def forward(self, *inputs: Variable):
logits: Variable
labels: Variable
logits, labels = self._check_inputs(*inputs,
num=self.EXPECTED_INPUTS_LENGTH) # type: ignore
if len(self._inputs) == 0:
self._inputs.extend([logits, labels])
loss_value = self._loss.forward(logits.value.data, labels.value.data)
return Variable(ts.Tensor(loss_value), self)
def backward(self, *grads: ts.Tensor):
self._check_grads(*grads, num=self.EXPECTED_GRADS_LENGTH)
grad: _ts.MatrixF = self._loss.backward()
self.inputs[0].grad = ts.Tensor(grad)
def __str__(self):
return f"CrossEntropyLoss"
def relu(x: Variable):
op = ReLU()
return op(x)
def softmax(tensor: ts.Tensor) -> ts.Tensor:
return ts.Tensor(_ts.softmax(tensor.data))
def cross_entropy_loss(y: Variable, labels: Variable) -> Variable:
loss_fn = CrossEntropyLoss()
return loss_fn(y, labels)
|
StarcoderdataPython
|
5037368
|
<gh_stars>0
import route
if __name__ == '__main__':
route.run()
|
StarcoderdataPython
|
30965
|
import requests
from teste_app import settigns
def google(q: str):
"""Faz uma pesquisa no google"""
return requests.get(settigns.GOOGLE, params={"q": q})
|
StarcoderdataPython
|
1926895
|
#self._write_u8(_DRV2605_REG_AUDIOMAX, 0x64)
#self._write_u8(_DRV2605_REG_AUTOCALCOMP, 1) # = const(0x18)
#self._write_u8(_DRV2605_REG_AUTOCALEMP, 1) # = const(0x19)
#self._write_u8(_DRV2605_REG_FEEDBACK, 1) # = const(0x1A)
# G0832012 LRA
# rated voltage: 1.8 VrmsAC Sine
# input frequency: 235 Hz
# axis of vibration: Z (perp to surface -> parallel to user skin -> vibration toward user)
self._write_u8(_DRV2605_REG_MODE, 0x07) # into auto-cal
self.mode = MODE_AUTOCAL # auto-cal
self._write_u8(_DRV2605_REG_RATEDV, 0x12) # rated voltage (1.8V) !! CALCULATE ME !!
self._write_u8(_DRV2605_REG_CLAMPV, 0x19) # overdrive v (2.5V) !! CALCULATE ME !!
# want LRA closed loop instead
# need to auto-control resonant frequency
# maybe change DRIVE_TIME !! CALCULATE ME !!
# 1/235 hz => *10E3 = 42.55ms => *.5 = 21.3 ms | 0x15
# default: 1X0 10011
# ^000 00110
# want: 1X0 10101
control1 = self._read_u8(_DRV2605_REG_CONTROL1)
self._write_u8(_DRV2605_REG_CONTROL1, control1 ^ 0x06)
# maybe need to change LRA auto-resonance SAMPLE_TIME => default is 300 us
# 11 11 0101; 250us = 11 10 0101 = E5; 200us = 11 01 0101 = D5; 150us = 11 00 0101 = C5
#&11 10 0101
# 11 10 0101
#control2 = self._read_u8(_DRV2605_REG_CONTROL2)
#self._write_u8(_DRV2605_REG_CONTROL2, control2 & 0xE5)
# want LRA closed-loop default
# also maybe set LRA DRIVE MODE to twice per cycle => default is once per cycle
# set default thresh, closed-loop, default comp, default format, LRA twice cycle, default analog, default auto
# 10 1 0 0 0 0 0
#^00 1 0 0 1 0 0
# 10 0 0 0 1 0 0
control3 = self._read_u8(_DRV2605_REG_CONTROL3)
self._write_u8(_DRV2605_REG_CONTROL3, control3 ^ 0x24)
# maybe need to change auto calibration time => default 500ms - 700ms
# !!!!
# BE CAREFUL WITH CONTROL 4 AS IT HAS A _ONE TIME PROGRAM_ BIT => CANNOT REVERSE!!!!
# !!!!
#control4 = self._read_u8(_DRV2605_REG_CONTROL4)
#self._write_u8(_DRV2605_REG_CONTROL4, control4 | XxXX )
#self.autocal()
|
StarcoderdataPython
|
9643105
|
x = 10
print(x)
y = 5
print(x)
print(y)
my_name = "kittaphot_saeng"
age = 17
height = 162
weight = 89.2
print(my_name,age,height,weight)
|
StarcoderdataPython
|
6555117
|
"""empty message
Revision ID: e60c3d8b005
Revises: 52ae2f07ac3a
Create Date: 2014-12-09 15:39:16.272520
"""
# revision identifiers, used by Alembic.
revision = 'e60c3d8b005'
down_revision = '52ae2f07ac3a'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('case_files',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('kind', sa.Text(), nullable=True),
sa.Column('name', sa.Text(), nullable=True),
sa.Column('path', sa.Text(), nullable=True),
sa.Column('attributes', sa.Text(), nullable=True),
sa.Column('case_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['case_id'], ['cases.id'], ),
sa.PrimaryKeyConstraint('id'),
sqlite_autoincrement=True
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('case_files')
### end Alembic commands ###
|
StarcoderdataPython
|
8195668
|
exp_name = 'glean_in128out1024_4x2_300k_ffhq_celebahq'
scale = 8
# model settings
model = dict(
type='GLEAN',
generator=dict(
type='GLEANStyleGANv2',
in_size=128,
out_size=1024,
style_channels=512,
pretrained=dict(
ckpt_path='http://download.openmmlab.com/mmgen/stylegan2/'
'official_weights/stylegan2-ffhq-config-f-official_20210327'
'_171224-bce9310c.pth',
prefix='generator_ema')),
discriminator=dict(
type='StyleGAN2Discriminator',
in_size=1024,
pretrained=dict(
ckpt_path='http://download.openmmlab.com/mmgen/stylegan2/'
'official_weights/stylegan2-ffhq-config-f-official_20210327'
'_171224-bce9310c.pth',
prefix='discriminator')),
pixel_loss=dict(type='MSELoss', loss_weight=1.0, reduction='mean'),
perceptual_loss=dict(
type='PerceptualLoss',
layer_weights={'21': 1.0},
vgg_type='vgg16',
perceptual_weight=1e-2,
style_weight=0,
norm_img=True,
criterion='mse',
pretrained='torchvision://vgg16'),
gan_loss=dict(
type='GANLoss',
gan_type='vanilla',
loss_weight=1e-2,
real_label_val=1.0,
fake_label_val=0),
pretrained=None,
)
# model training and testing settings
train_cfg = None
test_cfg = dict(metrics=['PSNR'], crop_border=0)
# dataset settings
train_dataset_type = 'SRFolderDataset'
val_dataset_type = 'SRAnnotationDataset'
train_pipeline = [
dict(
type='LoadImageFromFile',
io_backend='disk',
key='gt',
channel_order='rgb'),
dict(type='RescaleToZeroOne', keys=['gt']),
dict(type='CopyValues', src_keys=['gt'], dst_keys=['lq']),
dict(
type='RandomBlur',
params=dict(
kernel_size=[41],
kernel_list=['iso', 'aniso'],
kernel_prob=[0.5, 0.5],
sigma_x=[0.2, 10],
sigma_y=[0.2, 10],
rotate_angle=[-3.1416, 3.1416],
),
keys=['lq'],
),
dict(
type='RandomResize',
params=dict(
resize_mode_prob=[0, 1, 0], # up, down, keep
resize_scale=[0.03125, 1],
resize_opt=['bilinear', 'area', 'bicubic'],
resize_prob=[1 / 3., 1 / 3., 1 / 3.]),
keys=['lq'],
),
dict(
type='RandomNoise',
params=dict(
noise_type=['gaussian'],
noise_prob=[1],
gaussian_sigma=[0, 50],
gaussian_gray_noise_prob=0),
keys=['lq'],
),
dict(
type='RandomJPEGCompression',
params=dict(quality=[5, 50]),
keys=['lq']),
dict(
type='RandomResize',
params=dict(
target_size=(1024, 1024),
resize_opt=['bilinear', 'area', 'bicubic'],
resize_prob=[1 / 3., 1 / 3., 1 / 3.]),
keys=['lq'],
),
dict(type='Quantize', keys=['lq']),
dict(
type='RandomResize',
params=dict(
target_size=(128, 128), resize_opt=['area'], resize_prob=[1]),
keys=['lq'],
),
dict(
type='Flip', keys=['lq', 'gt'], flip_ratio=0.5,
direction='horizontal'),
dict(
type='Normalize',
keys=['lq', 'gt'],
mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5],
),
dict(type='ImageToTensor', keys=['lq', 'gt']),
dict(type='Collect', keys=['lq', 'gt'], meta_keys=['gt_path'])
]
test_pipeline = [
dict(type='LoadImageFromFile', io_backend='disk', key='lq'),
dict(type='LoadImageFromFile', io_backend='disk', key='gt'),
dict(type='RescaleToZeroOne', keys=['lq', 'gt']),
dict(
type='Normalize',
keys=['lq', 'gt'],
mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5],
to_rgb=True),
dict(type='ImageToTensor', keys=['lq', 'gt']),
dict(type='Collect', keys=['lq', 'gt'], meta_keys=['lq_path', 'gt_path'])
]
demo_pipeline = [
dict(
type='RandomResize',
params=dict(
target_size=(128, 128), resize_opt=['area'], resize_prob=[1]),
keys=['lq'],
),
dict(type='RescaleToZeroOne', keys=['lq']),
dict(
type='Normalize',
keys=['lq'],
mean=[0.5, 0.5, 0.5],
std=[0.5, 0.5, 0.5],
to_rgb=True),
dict(type='ImageToTensor', keys=['lq']),
dict(type='Collect', keys=['lq'], meta_keys=[])
]
data = dict(
workers_per_gpu=6,
train_dataloader=dict(samples_per_gpu=2, drop_last=True), # 4 gpus
val_dataloader=dict(samples_per_gpu=1, persistent_workers=False),
test_dataloader=dict(samples_per_gpu=1),
train=dict(
type='RepeatDataset',
times=30,
dataset=dict(
type=train_dataset_type,
lq_folder='data/FFHQ_CelebAHQ/GT',
gt_folder='data/FFHQ_CelebAHQ/GT',
pipeline=train_pipeline,
scale=scale)),
val=dict(
type=val_dataset_type,
lq_folder='data/CelebA-HQ/BIx8_down',
gt_folder='data/CelebA-HQ/GT',
ann_file='data/CelebA-HQ/meta_info_CelebAHQ_val100_GT.txt',
pipeline=test_pipeline,
scale=scale),
test=dict(
type=val_dataset_type,
lq_folder='data/CelebA-HQ/BIx8_down',
gt_folder='data/CelebA-HQ/GT',
ann_file='data/CelebA-HQ/meta_info_CelebAHQ_val100_GT.txt',
pipeline=test_pipeline,
scale=scale))
# optimizer
optimizers = dict(
generator=dict(type='Adam', lr=1e-4, betas=(0.9, 0.99)),
discriminator=dict(type='Adam', lr=1e-4, betas=(0.9, 0.99)))
# learning policy
total_iters = 300000
lr_config = dict(
policy='CosineRestart',
by_epoch=False,
periods=[300000],
restart_weights=[1],
min_lr=1e-7)
checkpoint_config = dict(interval=5000, save_optimizer=True, by_epoch=False)
evaluation = dict(interval=5000, save_image=False, gpu_collect=True)
log_config = dict(
interval=100,
hooks=[
dict(type='TextLoggerHook', by_epoch=False),
# dict(type='TensorboardLoggerHook'),
])
visual_config = None
# runtime settings
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = f'./work_dirs/{exp_name}'
load_from = None
resume_from = None
workflow = [('train', 1)]
find_unused_parameters = True
|
StarcoderdataPython
|
249930
|
<gh_stars>0
import plotly_express as px
import pandas as pd
basic_plot = px.line(x=[1,2,3,4,5],
y=[2,3,4,5,6],
title="Basic line plots")
basic_plot.show()
|
StarcoderdataPython
|
304232
|
import os
import time
import random
import logging
import traceback
import subprocess
from multiprocessing import Pool
class GymFCMiner:
def __init__(self):
pass
def run_sh(self, args: tuple):
command = args[0]
if args.__len__() == 2:
thread_name = args[1]
else:
thread_name = 'Thread #0'
print('[{}] >>> user@pc:\033[0m \033[34m{}\033[0m'.format(thread_name, command))
try:
output = subprocess.check_output(command, shell=True).decode().strip()
return output
except (BaseException, Exception):
return traceback.format_exc()
if __name__ == '__main__':
dir_path = os.path.dirname(os.path.abspath(__file__))
log_filename = "logs/miner_{}.log".format(time.strftime('%H_%M_%S'))
log_filename = os.path.join(dir_path, log_filename)
# format='[LINE:%(lineno)d][%(asctime)s] %(message)s',
logging.basicConfig(filename=log_filename, filemode='w',
format='%(message)s',
level=logging.INFO)
logging.info('Logging initialized in file "{}"'.format(log_filename))
dataminer = GymFCMiner()
PROCESSES = 10
SEEDS_IN_PROCCESSES = 5
w_filenames = ['mined/thread{}_{}.csv'.format(thind, time.strftime('%H_%M_%S')) for thind in range(PROCESSES)]
seeds = []
while seeds.__len__() < PROCESSES:
seed_start = random.randint(0, 10000000)
seed_end = seed_start + SEEDS_IN_PROCCESSES
if all([seed_start < seeds[i][0] or seed_start > seeds[i][1] for i in range(seeds.__len__())]):
if all([seed_end < seeds[i][0] or seed_end > seeds[i][1] for i in range(seeds.__len__())]):
seeds.append((seed_start, seed_end))
commands = ['python3 dataminer.py --seed-from {} --seed-to {} --w-file {}'.format(
seeds[i][0], seeds[i][1], w_filenames[i]) for i in range(PROCESSES)]
pool_args = []
for ind in range(PROCESSES):
pool_args.append((commands[ind], 'Thread #{}'.format(ind),))
with Pool(PROCESSES) as pool:
logging.info('Pool initialized')
stdout = pool.map(dataminer.run_sh, pool_args)
logging.info('Pool closed')
logging.info('Mining ended')
|
StarcoderdataPython
|
1792552
|
import gumtreescraper
from gumtreescraper import SearchListing
from gumtreescraper import SearchAd
search = SearchListing()
searchResult = search.doSearch()
print (searchResult)
for i in searchResult:
ad = SearchAd(i.url)
ad.parsAd()
print(i)
|
StarcoderdataPython
|
9772712
|
<reponame>ZhiangChen/FCN.tensorflow
"""
Define Constants
"""
CAFFE_ROOT = '/home/huxley/caffe/'
# Params
PERCENTAGE_TRAIN = 0.2
NUM_TRAIN_PER_IMAGE = 100
CROP_WIDTH = 240
CROP_HEIGHT = 320
""" BLOB CONSTANTS """
BLOB_DATA_ROOT = '/notebooks/FCN.tensorflow/dataroot/'
BLOB_MODEL_ROOT = '/notebooks/FCN.tensorflow/dataroot/'
DATA_IMAGE_DIR = BLOB_DATA_ROOT + 'Oranges/'
DATA_LABELS_DIR = BLOB_DATA_ROOT + 'OrangesLabel/'
BLOB_TRAIN_IMAGE_DIR = BLOB_MODEL_ROOT + 'images/training/'
BLOB_TRAIN_LABELS_DIR = BLOB_MODEL_ROOT + 'annotations/training/'
BLOB_TEST_IMAGE_DIR = BLOB_MODEL_ROOT + 'images/validation/'
BLOB_TEST_LABELS_DIR = BLOB_MODEL_ROOT + 'annotations/validation/'
BLOB_TRAIN_INFO = BLOB_MODEL_ROOT + 'train_info.p'
BLOB_FILE_WITH_TRAIN_INDICES = BLOB_MODEL_ROOT + 'train.txt'
BLOB_FILE_WITH_TEST_INDICES = BLOB_MODEL_ROOT + 'test.txt'
BLOB_COST_MAP_DIR = BLOB_MODEL_ROOT + 'cost_map/'
BLOB_WORKDIR = '/home/huxley/fcn_model_files/orange/blob_model/'
BLOB_MODEL_NAME = 'blob_orange.caffemodel'
# Image Parameters
#BLOB_PICTURE_MEAN = (197.471060768, 219.051099514, 163.143913032)
BLOB_PICTURE_MEAN = (101.085444336, 113.0388712566, 82.5194905598)
# Solver Parameters
BLOB_SOLVER_DISPLAY = "20"
BLOB_SOLVER_AVERAGE_LOSS = "20"
BLOB_SOLVER_BASE_LR = "1e-10"
BLOB_SOLVER_MOMENTUM = "0.99"
BLOB_SOLVER_DEBUG_INFO = "false"
# Random Initial Model Parameters
BLOB_RANDOM_INITIAL_TRAIN = BLOB_MODEL_ROOT + 'random_train.prototxt'
BLOB_RANDOM_INITIAL_VAL = BLOB_MODEL_ROOT + 'random_val.prototxt'
BLOB_RANDOM_INITIAL_SOLVER = BLOB_MODEL_ROOT + 'random_solve.prototxt'
BLOB_RANDOM_INITIAL_MODEL = BLOB_MODEL_ROOT + 'random_model.caffemodel'
# Actual Model Parameters
BLOB_ACTUAL_TRAIN = BLOB_MODEL_ROOT + 'trainnet.prototxt'
BLOB_ACTUAL_VAL = BLOB_MODEL_ROOT + 'valnet.prototxt'
BLOB_ACTUAL_MODEL = BLOB_MODEL_ROOT + BLOB_MODEL_NAME
BLOB_ACTUAL_SOLVER = BLOB_MODEL_ROOT + 'solver.prototxt'
BLOB_SURGERY_MODEL_ORIGIN = BLOB_MODEL_ROOT + 'surgery_origin/fcn8s-heavy-pascal.caffemodel'
# Train Parameters
BLOB_NUM_ITERATIONS = 500
BLOB_NUM_STEPS = 100
# Test Parameters
BLOB_NUMPY_SAVE_FILE_DIR = BLOB_MODEL_ROOT + 'scores/'
BLOB_SCORE_IMAGE_DIR = BLOB_MODEL_ROOT + 'score_images/'
#BLOB_INFER_MODEL = BLOB_MODEL_ROOT + BLOB_MODEL_NAME
BLOB_INFER_MODEL = '/home/huxley/fcn_segmentation/code/python/blob_code/orange_models/snapshot_iter_50000.caffemodel'
BLOB_MODEL_DEPLOY = BLOB_MODEL_ROOT + 'deploy.prototxt'
# Analysis Parameters
BLOB_PICKLE_SAVE_LOCATION = BLOB_MODEL_ROOT + 'blob_analysis.p' # File to save data
BLOB_OVER_TIME_SAVE_LOCATION = BLOB_MODEL_ROOT + 'blob_over_time.p'
|
StarcoderdataPython
|
5030686
|
<reponame>mssung94/daishin-trading-system<filename>tutorial/33.py
# 대신증권 API
# 주식 잔고 실시간 조회(현재가 및 주문 체결 실시간 반영)
# 이번 예제는 PLUS API 를 이용하여 주식 잔고를 조회 하고, 실시간으로 현재가와 주문 체결 변경 상황을 관리하는 예제 입니다.
# ■ 사용된 클래스
# ▥ CpEvent - 실시간 이벤트 수신 (현재가와 주문 체결 실시간 처리)
# ▥ Cp6033 - 주식 잔고 조회
# ▥ CpRPCurrentPrice - 현재가 한 종목 조회
# ▥ CpMarketEye - 복수 현재가 종목 조회
# ■ 활용
# 잔고데이터에 대해 조회 및 매수 주문이나 매도 주문 후 잔고에 반영 여부를 실시간으로 확인가능합니다.
#
# ※ 주의사항: 본 예제는 PLUS 활용을 돕기 위해 예제로만 제공됩니다.
# 또한 현금 잔고에 대해서만 처리 하고 있어 신용 잔고 등은 별도 코드 처리가 필요 하니 이 점 유의하시기 바랍니다.
import sys
from PyQt5.QtWidgets import *
import win32com.client
import ctypes
################################################
# PLUS 공통 OBJECT
g_objCodeMgr = win32com.client.Dispatch('CpUtil.CpCodeMgr')
g_objCpStatus = win32com.client.Dispatch('CpUtil.CpCybos')
g_objCpTrade = win32com.client.Dispatch('CpTrade.CpTdUtil')
################################################
# PLUS 실행 기본 체크 함수
def InitPlusCheck():
# 프로세스가 관리자 권한으로 실행 여부
if ctypes.windll.shell32.IsUserAnAdmin():
print('정상: 관리자권한으로 실행된 프로세스입니다.')
else:
print('오류: 일반권한으로 실행됨. 관리자 권한으로 실행해 주세요')
return False
# 연결 여부 체크
if (g_objCpStatus.IsConnect == 0):
print("PLUS가 정상적으로 연결되지 않음. ")
return False
# 주문 관련 초기화
if (g_objCpTrade.TradeInit(0) != 0):
print("주문 초기화 실패")
return False
return True
################################################
# CpEvent: 실시간 이벤트 수신 클래스
class CpEvent:
def set_params(self, client, name, caller):
self.client = client # CP 실시간 통신 object
self.name = name # 서비스가 다른 이벤트를 구분하기 위한 이름
self.caller = caller # callback 을 위해 보관
# 구분값 : 텍스트로 변경하기 위해 딕셔너리 이용
self.dicflag12 = {'1': '매도', '2': '매수'}
self.dicflag14 = {'1': '체결', '2': '확인', '3': '거부', '4': '접수'}
self.dicflag15 = {'00': '현금', '01': '유통융자', '02': '자기융자', '03': '유통대주',
'04': '자기대주', '05': '주식담보대출', '07': '채권담보대출',
'06': '매입담보대출', '08': '플러스론',
'13': '자기대용융자', '15': '유통대용융자'}
self.dicflag16 = {'1': '정상주문', '2': '정정주문', '3': '취소주문'}
self.dicflag17 = {'1': '현금', '2': '신용', '3': '선물대용', '4': '공매도'}
self.dicflag18 = {'01': '보통', '02': '임의', '03': '시장가', '05': '조건부지정가'}
self.dicflag19 = {'0': '없음', '1': 'IOC', '2': 'FOK'}
def OnReceived(self):
# 실시간 처리 - 현재가 주문 체결
if self.name == 'stockcur':
code = self.client.GetHeaderValue(0) # 초
name = self.client.GetHeaderValue(1) # 초
timess = self.client.GetHeaderValue(18) # 초
exFlag = self.client.GetHeaderValue(19) # 예상체결 플래그
cprice = self.client.GetHeaderValue(13) # 현재가
diff = self.client.GetHeaderValue(2) # 대비
cVol = self.client.GetHeaderValue(17) # 순간체결수량
vol = self.client.GetHeaderValue(9) # 거래량
item = {}
item['code'] = code
# rpName = self.objRq.GetDataValue(1, i) # 종목명
# rpDiffFlag = self.objRq.GetDataValue(3, i) # 대비부호
item['diff'] = diff
item['cur'] = cprice
item['vol'] = vol
# 현재가 업데이트
self.caller.updateJangoCurPBData(item)
# 실시간 처리 - 주문체결
elif self.name == 'conclution':
# 주문 체결 실시간 업데이트
conc = {}
# 체결 플래그
conc['체결플래그'] = self.dicflag14[self.client.GetHeaderValue(14)]
conc['주문번호'] = self.client.GetHeaderValue(5) # 주문번호
conc['주문수량'] = self.client.GetHeaderValue(3) # 주문/체결 수량
conc['주문가격'] = self.client.GetHeaderValue(4) # 주문/체결 가격
conc['원주문'] = self.client.GetHeaderValue(6)
conc['종목코드'] = self.client.GetHeaderValue(9) # 종목코드
conc['종목명'] = g_objCodeMgr.CodeToName(conc['종목코드'])
conc['매수매도'] = self.dicflag12[self.client.GetHeaderValue(12)]
flag15 = self.client.GetHeaderValue(15) # 신용대출구분코드
if (flag15 in self.dicflag15):
conc['신용대출'] = self.dicflag15[flag15]
else:
conc['신용대출'] = '기타'
conc['정정취소'] = self.dicflag16[self.client.GetHeaderValue(16)]
conc['현금신용'] = self.dicflag17[self.client.GetHeaderValue(17)]
conc['주문조건'] = self.dicflag19[self.client.GetHeaderValue(19)]
conc['체결기준잔고수량'] = self.client.GetHeaderValue(23)
loandate = self.client.GetHeaderValue(20)
if (loandate == 0):
conc['대출일'] = ''
else:
conc['대출일'] = str(loandate)
flag18 = self.client.GetHeaderValue(18)
if (flag18 in self.dicflag18):
conc['주문호가구분'] = self.dicflag18[flag18]
else:
conc['주문호가구분'] = '기타'
conc['장부가'] = self.client.GetHeaderValue(21)
conc['매도가능수량'] = self.client.GetHeaderValue(22)
print(conc)
self.caller.updateJangoCont(conc)
return
################################################
# plus 실시간 수신 base 클래스
class CpPublish:
def __init__(self, name, serviceID):
self.name = name
self.obj = win32com.client.Dispatch(serviceID)
self.bIsSB = False
def Subscribe(self, var, caller):
if self.bIsSB:
self.Unsubscribe()
if (len(var) > 0):
self.obj.SetInputValue(0, var)
handler = win32com.client.WithEvents(self.obj, CpEvent)
handler.set_params(self.obj, self.name, caller)
self.obj.Subscribe()
self.bIsSB = True
def Unsubscribe(self):
if self.bIsSB:
self.obj.Unsubscribe()
self.bIsSB = False
################################################
# CpPBStockCur: 실시간 현재가 요청 클래스
class CpPBStockCur(CpPublish):
def __init__(self):
super().__init__('stockcur', 'DsCbo1.StockCur')
################################################
# CpPBConclusion: 실시간 주문 체결 수신 클래그
class CpPBConclusion(CpPublish):
def __init__(self):
super().__init__('conclution', 'DsCbo1.CpConclusion')
################################################
# Cp6033 : 주식 잔고 조회
class Cp6033:
def __init__(self):
acc = g_objCpTrade.AccountNumber[0] # 계좌번호
accFlag = g_objCpTrade.GoodsList(acc, 1) # 주식상품 구분
print(acc, accFlag[0])
self.objRq = win32com.client.Dispatch("CpTrade.CpTd6033")
self.objRq.SetInputValue(0, acc) # 계좌번호
self.objRq.SetInputValue(1, accFlag[0]) # 상품구분 - 주식 상품 중 첫번째
self.objRq.SetInputValue(2, 50) # 요청 건수(최대 50)
self.dicflag1 = {ord(' '): '현금',
ord('Y'): '융자',
ord('D'): '대주',
ord('B'): '담보',
ord('M'): '매입담보',
ord('P'): '플러스론',
ord('I'): '자기융자',
}
# 실제적인 6033 통신 처리
def requestJango(self, caller):
while True:
self.objRq.BlockRequest()
# 통신 및 통신 에러 처리
rqStatus = self.objRq.GetDibStatus()
rqRet = self.objRq.GetDibMsg1()
print("통신상태", rqStatus, rqRet)
if rqStatus != 0:
return False
cnt = self.objRq.GetHeaderValue(7)
print(cnt)
for i in range(cnt):
item = {}
code = self.objRq.GetDataValue(12, i) # 종목코드
item['종목코드'] = code
item['종목명'] = self.objRq.GetDataValue(0, i) # 종목명
item['현금신용'] = self.dicflag1[self.objRq.GetDataValue(1, i)] # 신용구분
print(code, '현금신용', item['현금신용'])
item['대출일'] = self.objRq.GetDataValue(2, i) # 대출일
item['잔고수량'] = self.objRq.GetDataValue(7, i) # 체결잔고수량
item['매도가능'] = self.objRq.GetDataValue(15, i)
item['장부가'] = self.objRq.GetDataValue(17, i) # 체결장부단가
# item['평가금액'] = self.objRq.GetDataValue(9, i) # 평가금액(천원미만은 절사 됨)
# item['평가손익'] = self.objRq.GetDataValue(11, i) # 평가손익(천원미만은 절사 됨)
# 매입금액 = 장부가 * 잔고수량
item['매입금액'] = item['장부가'] * item['잔고수량']
item['현재가'] = 0
item['대비'] = 0
item['거래량'] = 0
# 잔고 추가
# key = (code, item['현금신용'],item['대출일'] )
key = code
caller.jangoData[key] = item
if len(caller.jangoData) >= 200: # 최대 200 종목만,
break
if len(caller.jangoData) >= 200:
break
if (self.objRq.Continue == False):
break
return True
################################################
# 현재가 - 한종목 통신
class CpRPCurrentPrice:
def __init__(self):
self.objStockMst = win32com.client.Dispatch('DsCbo1.StockMst')
return
def Request(self, code, caller):
self.objStockMst.SetInputValue(0, code)
ret = self.objStockMst.BlockRequest()
if self.objStockMst.GetDibStatus() != 0:
print('통신상태', self.objStockMst.GetDibStatus(), self.objStockMst.GetDibMsg1())
return False
item = {}
item['code'] = code
# caller.curData['종목명'] = g_objCodeMgr.CodeToName(code)
item['cur'] = self.objStockMst.GetHeaderValue(11) # 종가
item['diff'] = self.objStockMst.GetHeaderValue(12) # 전일대비
item['vol'] = self.objStockMst.GetHeaderValue(18) # 거래량
caller.curDatas[code] = item
'''
caller.curData['기준가'] = self.objStockMst.GetHeaderValue(27) # 기준가
caller.curData['예상플래그'] = self.objStockMst.GetHeaderValue(58) # 예상플래그
caller.curData['예상체결가'] = self.objStockMst.GetHeaderValue(55) # 예상체결가
caller.curData['예상대비'] = self.objStockMst.GetHeaderValue(56) # 예상체결대비
# 10차호가
for i in range(10):
key1 = '매도호가%d' % (i + 1)
key2 = '매수호가%d' % (i + 1)
caller.curData[key1] = (self.objStockMst.GetDataValue(0, i)) # 매도호가
caller.curData[key2] = (self.objStockMst.GetDataValue(1, i)) # 매수호가
'''
return True
################################################
# CpMarketEye : 복수종목 현재가 통신 서비스
class CpMarketEye:
def __init__(self):
# 요청 필드 배열 - 종목코드, 시간, 대비부호 대비, 현재가, 거래량, 종목명
self.rqField = [0, 1, 2, 3, 4, 10, 17] # 요청 필드
# 관심종목 객체 구하기
self.objRq = win32com.client.Dispatch("CpSysDib.MarketEye")
def Request(self, codes, caller):
# 요청 필드 세팅 - 종목코드, 종목명, 시간, 대비부호, 대비, 현재가, 거래량
self.objRq.SetInputValue(0, self.rqField) # 요청 필드
self.objRq.SetInputValue(1, codes) # 종목코드 or 종목코드 리스트
self.objRq.BlockRequest()
# 현재가 통신 및 통신 에러 처리
rqStatus = self.objRq.GetDibStatus()
rqRet = self.objRq.GetDibMsg1()
print("통신상태", rqStatus, rqRet)
if rqStatus != 0:
return False
cnt = self.objRq.GetHeaderValue(2)
for i in range(cnt):
item = {}
item['code'] = self.objRq.GetDataValue(0, i) # 코드
# rpName = self.objRq.GetDataValue(1, i) # 종목명
# rpDiffFlag = self.objRq.GetDataValue(3, i) # 대비부호
item['diff'] = self.objRq.GetDataValue(3, i) # 대비
item['cur'] = self.objRq.GetDataValue(4, i) # 현재가
item['vol'] = self.objRq.GetDataValue(5, i) # 거래량
caller.curDatas[item['code']] = item
return True
################################################
# 테스트를 위한 메인 화면
class MyWindow(QMainWindow):
def __init__(self):
super().__init__()
# plus 상태 체크
if InitPlusCheck() == False:
exit()
self.setWindowTitle("주식 잔고(실시간) 처리 예제")
self.setGeometry(300, 300, 300, 180)
# 6033 잔고 object
self.obj6033 = Cp6033()
self.jangoData = {}
self.isSB = False
self.objCur = {}
# 현재가 정보
self.curDatas = {}
self.objRPCur = CpRPCurrentPrice()
# 실시간 주문 체결
self.objConclusion = CpPBConclusion()
nH = 20
btnExcel = QPushButton('Excel 내보내기', self)
btnExcel.move(20, nH)
btnExcel.clicked.connect(self.btnExcel_clicked)
nH += 50
btnPrint = QPushButton('잔고 Print', self)
btnPrint.move(20, nH)
btnPrint.clicked.connect(self.btnPrint_clicked)
nH += 50
btnExit = QPushButton('종료', self)
btnExit.move(20, nH)
btnExit.clicked.connect(self.btnExit_clicked)
nH += 50
# 잔고 요청
self.requestJango()
def StopSubscribe(self):
if self.isSB:
for key, obj in self.objCur.items():
obj.Unsubscribe()
self.objCur = {}
self.isSB = False
self.objConclusion.Unsubscribe()
def requestJango(self):
self.StopSubscribe();
# 주식 잔고 통신
if self.obj6033.requestJango(self) == False:
return
# 잔고 현재가 통신
codes = set()
for code, value in self.jangoData.items():
codes.add(code)
objMarkeyeye = CpMarketEye()
codelist = list(codes)
if (objMarkeyeye.Request(codelist, self) == False):
exit()
# 실시간 현재가 요청
cnt = len(codelist)
for i in range(cnt):
code = codelist[i]
self.objCur[code] = CpPBStockCur()
self.objCur[code].Subscribe(code, self)
self.isSB = True
# 실시간 주문 체결 요청
self.objConclusion.Subscribe('', self)
def btnExcel_clicked(self):
return
def btnPrint_clicked(self):
print('잔고')
for code, value in self.jangoData.items():
print(code, value)
print('실시간 현재가 수신 중인 종목')
for key, obj in self.objCur.items():
print(key)
return
def btnExit_clicked(self):
self.StopSubscribe()
exit()
return
# 실시간 주문 체결 처리 로직
def updateJangoCont(self, pbCont):
# 주문 체결에서 들어온 신용 구분 값 ==> 잔고 구분값으로 치환
dicBorrow = {
'현금': ord(' '),
'유통융자': ord('Y'),
'자기융자': ord('Y'),
'주식담보대출': ord('B'),
'채권담보대출': ord('B'),
'매입담보대출': ord('M'),
'플러스론': ord('P'),
'자기대용융자': ord('I'),
'유통대용융자': ord('I'),
'기타': ord('Z')
}
# 잔고 리스트 map 의 key 값
# key = (pbCont['종목코드'], dicBorrow[pbCont['현금신용']], pbCont['대출일'])
# key = pbCont['종목코드']
code = pbCont['종목코드']
# 접수, 거부, 확인 등은 매도 가능 수량만 업데이트 한다.
if pbCont['체결플래그'] == '접수' or pbCont['체결플래그'] == '거부' or pbCont['체결플래그'] == '확인':
if (code not in self.jangoData):
return
self.jangoData[code]['매도가능'] = pbCont['매도가능수량']
return
if (pbCont['체결플래그'] == '체결'):
if (code not in self.jangoData): # 신규 잔고 추가
if (pbCont['체결기준잔고수량'] == 0):
return
print('신규 잔고 추가', code)
# 신규 잔고 추가
item = {}
item['종목코드'] = pbCont['종목코드']
item['종목명'] = pbCont['종목명']
item['현금신용'] = dicBorrow[pbCont['현금신용']]
item['대출일'] = pbCont['대출일']
item['잔고수량'] = pbCont['체결기준잔고수량']
item['매도가능'] = pbCont['매도가능수량']
item['장부가'] = pbCont['장부가']
# 매입금액 = 장부가 * 잔고수량
item['매입금액'] = item['장부가'] * item['잔고수량']
print('신규 현재가 요청', code)
self.objRPCur.Request(code, self)
self.objCur[code] = CpPBStockCur()
self.objCur[code].Subscribe(code, self)
item['현재가'] = self.curDatas[code]['cur']
item['대비'] = self.curDatas[code]['diff']
item['거래량'] = self.curDatas[code]['vol']
self.jangoData[code] = item
else:
# 기존 잔고 업데이트
item = self.jangoData[code]
item['종목코드'] = pbCont['종목코드']
item['종목명'] = pbCont['종목명']
item['현금신용'] = dicBorrow[pbCont['현금신용']]
item['대출일'] = pbCont['대출일']
item['잔고수량'] = pbCont['체결기준잔고수량']
item['매도가능'] = pbCont['매도가능수량']
item['장부가'] = pbCont['장부가']
# 매입금액 = 장부가 * 잔고수량
item['매입금액'] = item['장부가'] * item['잔고수량']
# 잔고 수량이 0 이면 잔고 제거
if item['잔고수량'] == 0:
del self.jangoData[code]
self.objCur[code].Unsubscribe()
del self.objCur[code]
return
# 실시간 현재가 처리 로직
def updateJangoCurPBData(self, curData):
code = curData['code']
self.curDatas[code] = curData
self.upjangoCurData(code)
def upjangoCurData(self, code):
# 잔고에 동일 종목을 찾아 업데이트 하자 - 현재가/대비/거래량/평가금액/평가손익
curData = self.curDatas[code]
item = self.jangoData[code]
item['현재가'] = curData['cur']
item['대비'] = curData['diff']
item['거래량'] = curData['vol']
if __name__ == "__main__":
app = QApplication(sys.argv)
myWindow = MyWindow()
myWindow.show()
app.exec_()
|
StarcoderdataPython
|
4828694
|
<gh_stars>0
import Camera
class Configuration(object):
def __init__(self, filename):
import xml.etree.ElementTree as ET
configxml = ET.parse(filename)
self.data = {}
self.data["name"] = configxml.find("name").text
self.data["video_len"] = int(configxml.find("video/length").text)
self.data["width"] = int(configxml.find("video/width").text)
self.data["height"] = int(configxml.find("video/height").text)
self.data["minfreespace"] = int(configxml.find("video/minfreespace").text)
self.data["storage_path"] = configxml.find("video/storage").text
self.data["webport"] = int(configxml.find("webport").text)
if not self.data["storage_path"].endswith("/"):
self.data["storage_path"] += "/"
self.data["cameras"] = Camera.load_cameras(filename)
def __str__(self):
ret = "System Name: {0}\n".format(self.data["name"])
ret += "Web Interface port: {0}\n".format(self.data["webport"])
ret += "Video Length: {0} min.\n".format(self.data["video_len"])
ret += "Width x Height: {0}x{1}\n".format(self.data["width"], self.data["height"])
ret += "Sorage Path: {0}\n".format(self.data["storage_path"])
ret += "Minimal Free Disk Space: {0} GB\n".format(self.data["minfreespace"])
ret += "Cameras:\n"
for cam in self.data["cameras"]:
ret += "\tName: {0}\n".format(cam.name)
ret += "\tSource: {0}\n\n".format(cam.source)
return ret
|
StarcoderdataPython
|
391917
|
from tkinter import *
root = Tk()
root.title("calculator")
root.geometry('280x430')
root.resizable(width=0, height=0)
root.iconbitmap('calc.ico')
root.iconbitmap()
e = Entry(root, width=35, borderwidth=5)
e.grid(row=0, column=0, columnspan=3, padx=10, pady=10)
# click function
def button_click(number):
#e.delete(0, END)
current = e.get()
e.delete(0, END)
e.insert(0, str(current) + str(number))
# clear function
def button_clear():
e.delete(0, END)
# add function
def button_add():
first_number = e.get()
global f_num
global math
math = 'add'
f_num = int(first_number)
e.delete(0, END)
# equal function
def button_equal():
second_number = e.get()
e.delete(0, END)
if math == "add":
e.insert(0, f_num + int(second_number))
elif math == "sub":
e.insert(0, f_num - int(second_number))
elif math == "mul":
e.insert(0, f_num * int(second_number))
else:
e.insert(0, f_num / int(second_number))
# subraction function
def button_sub():
first_number = e.get()
global f_num
global math
math = 'sub'
f_num = int(first_number)
e.delete(0, END)
# multiplication function
def button_mul():
first_number = e.get()
global f_num
global math
math = 'mul'
f_num = int(first_number)
e.delete(0, END)
# division function
def button_div():
first_number = e.get()
global f_num
global math
math = 'div'
f_num = int(first_number)
e.delete(0, END)
button_1 = Button(root, text="1", padx=40, pady=20,
command=lambda: button_click(1))
button_2 = Button(root, text="2", padx=40, pady=20,
command=lambda: button_click(2))
button_3 = Button(root, text="3", padx=40, pady=20,
command=lambda: button_click(3))
button_4 = Button(root, text="4", padx=40, pady=20,
command=lambda: button_click(4))
button_5 = Button(root, text="5", padx=40, pady=20,
command=lambda: button_click(5))
button_6 = Button(root, text="6", padx=40, pady=20,
command=lambda: button_click(6))
button_7 = Button(root, text="7", padx=40, pady=20,
command=lambda: button_click(7))
button_8 = Button(root, text="8", padx=40, pady=20,
command=lambda: button_click(8))
button_9 = Button(root, text="9", padx=40, pady=20,
command=lambda: button_click(9))
button_0 = Button(root, text="0", padx=40, pady=20,
command=lambda: button_click(0))
button_add = Button(root, text="+", padx=39, pady=20,
command=button_add)
button_equal = Button(root, text="=", padx=87, pady=20,
command=button_equal)
button_clear = Button(root, text="clear", padx=79,
pady=20, command=button_clear)
button_subtract = Button(root, text="-", padx=40, pady=20,
command=button_sub)
button_multiply = Button(root, text="X", padx=39, pady=20,
command=button_mul)
button_divide = Button(root, text="/", padx=41, pady=20,
command=button_div)
# put buttons on screen
button_1.grid(row=3, column=0)
button_2.grid(row=3, column=1)
button_3.grid(row=3, column=2)
button_4.grid(row=2, column=0)
button_5.grid(row=2, column=1)
button_6.grid(row=2, column=2)
button_7.grid(row=1, column=0)
button_8.grid(row=1, column=1)
button_9.grid(row=1, column=2)
button_0.grid(row=4, column=0)
button_clear.grid(row=4, column=1, columnspan=2)
button_add.grid(row=5, column=0)
button_equal.grid(row=5, column=1, columnspan=2)
button_subtract.grid(row=6, column=0)
button_multiply.grid(row=6, column=1)
button_divide.grid(row=6, column=2)
root.mainloop()
|
StarcoderdataPython
|
1762111
|
<filename>src/batch/app/ProducerConsumer.py<gh_stars>0
import json
from elasticsearch import Elasticsearch
from kafka import KafkaConsumer
consumer = KafkaConsumer('new-posts',
group_id='creating-posts',
bootstrap_servers=['kafka:9092'])
esbody = {
"mappings": {
"post": {
"properties": {
"title": {
"type": "text"
},
"details": {
"type": "text"
},
"zip_code": {
"type": "integer"
},
"user": {
"type": "integer"
},
"date_created": {
"type": "date"
},
"deadline": {
"type": "date"
},
"preferred_contact": {
"type": "text"
},
"category": {
"type": "text"
},
"request_type": {
"type": "text"
}
}
}
}
}
es = Elasticsearch(['elasticsearch'])
x = es.indices.create(index="models", body=esbody, ignore=[400, 404])
# print(str(x))
while (True):
for message in consumer:
tmp = json.loads((message.value).decode('utf-8'))
post_id = tmp['id']
post = tmp['post']
es.index(index="models", id=post_id, body=post, doc_type="post")
es.indices.refresh(index="models")
|
StarcoderdataPython
|
3294053
|
<filename>tests/hooks/test_get_app.py
from ocean_spark.hooks import OceanSparkHook
from unittest.mock import MagicMock
def test_get_app(successful_get_app: None, get_connection_mock: None) -> None:
hook = OceanSparkHook()
app_dict = hook.get_app("test-app-name")
assert app_dict is not None
assert app_dict["displayName"] == "test app name"
|
StarcoderdataPython
|
3335967
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
import argparse
import sys
import os
import numpy as np
import modules.segmentation_points as segment
from modules.data_set import DataSet
from modules.segmentation_kind import *
from modules.sensors import sensors
from modules.constants import *
from plotter import main_impl as plotter_main
from modules.euclidean_norm import euclidean_norm
def validate_sensor(sensor):
if sensor not in sensors():
print(f"segment.py: {sensor} is not a valid sensor.", file=sys.stderr)
return False
return True
def validate(csv_file_path, sensor, imu, segmentation_kind, window_size):
if not os.path.isfile(csv_file_path):
print(f"segment.py: \"{csv_file_path}\" is not a file.", file=sys.stderr)
return False
if not validate_sensor(sensor):
return False
if imu not in [accelerometer_string(), gyroscope_string()]:
print(f"segment.py: {imu} is not a valid IMU.", file=sys.stderr)
return False
try:
segmentation_kind_from_str(segmentation_kind)
except Exception:
print(
f"segment.py: \"{segmentation_kind}\" is not a valid segmentation kind.",
file=sys.stderr)
return False
if window_size < 3:
print(
f"segment.py: window size was {window_size}, but must be at least 3!",
file=sys.stderr)
return False
if not window_size % 2 == 1:
print(
f"segment.py: window size {window_size} is an even number, but an odd number is required.",
file=sys.stderr)
return False
return True
def delete_segmentation_points_if(segmentation_points, condition):
maximum_index = len(segmentation_points) - 1
current_index = 1
while current_index != maximum_index + 1:
previous_segmentation_point = segmentation_points[current_index - 1]
current_segmentation_point = segmentation_points[current_index]
if condition(previous_segmentation_point, current_segmentation_point):
segmentation_points.pop(current_index)
maximum_index -= 1
else:
current_index += 1
def delete_too_close_segmenting_hardware_timestamps(data_set,
segmentation_points):
minimum_distance_milliseconds = 250 # TODO: This may need to change.
def is_distance_too_small(previous_segmentation_point,
current_segmentation_point):
previous_hardware_timestamp = data_set.hardware_timestamp[
previous_segmentation_point]
current_hardware_timestamp = data_set.hardware_timestamp[
current_segmentation_point]
distance = current_hardware_timestamp - previous_hardware_timestamp
return distance < minimum_distance_milliseconds
delete_segmentation_points_if(segmentation_points, is_distance_too_small)
def delete_low_variance_segmentation_points(normed_data, segmentation_points):
minimum_variance = 0.002 # TODO: This may need to change.
def is_variance_too_low(previous_segmentation_point,
current_segmentation_point):
variance = np.var(
normed_data[previous_segmentation_point:current_segmentation_point])
return variance < minimum_variance
delete_segmentation_points_if(segmentation_points, is_variance_too_low)
def is_felix_1(csv_file_path):
return "Felix/2020-07-02_11.17.39" in csv_file_path
def is_felix_2(csv_file_path):
return "Felix/2020-07-02_12.50.00" in csv_file_path
def is_felix_3(csv_file_path):
return "Felix/2020-07-02_13.00.09" in csv_file_path
def is_marcelle_1(csv_file_path):
return "Marcelle/2020-07-02_14.59.59" in csv_file_path
def is_marcelle_2(csv_file_path):
return "Marcelle/2020-07-02_15.13.22" in csv_file_path
def is_marcelle_3(csv_file_path):
return "Marcelle/2020-07-02_15.31.36" in csv_file_path
def is_marcelle_4(csv_file_path):
return "Marcelle/2020-07-02_15.39.22" in csv_file_path
def is_mike_1(csv_file_path):
return "Mike/2020-07-02_14.07.33" in csv_file_path
def is_mike_2(csv_file_path):
return "Mike/2020-07-02_14.14.32" in csv_file_path
def is_mike_3(csv_file_path):
return "Mike/2020-07-02_14.20.28" in csv_file_path
def is_mike_4(csv_file_path):
return "Mike/2020-07-02_14.38.40" in csv_file_path
def exercise_range(csv_file_path):
# [begin;end)
if is_felix_1(csv_file_path):
return 58227, 108143
if is_felix_2(csv_file_path):
return 16003, 63001
if is_felix_3(csv_file_path):
return 26999, 62415
if is_marcelle_1(csv_file_path):
return 74001, 100001
if is_marcelle_2(csv_file_path):
return 23003, 66101
if is_marcelle_3(csv_file_path):
return 72271, 113979
if is_marcelle_4(csv_file_path):
return 0, 0 # TODO: HERE
if is_mike_1(csv_file_path):
return 7003, 44121
if is_mike_2(csv_file_path):
return 32003, 65001
if is_mike_3(csv_file_path):
return 61995, 84015
if is_mike_4(csv_file_path):
return 0, 0 # TODO: HERE
raise Exception(f'"{csv_file_path}" is not a known CSV file.')
def create_euclidean_norm(data_set, imu):
if imu == accelerometer_string():
return euclidean_norm(data_set.accelerometer_x, data_set.accelerometer_y,
data_set.accelerometer_z)
elif imu == gyroscope_string():
return euclidean_norm(data_set.gyroscope_x, data_set.gyroscope_y,
data_set.gyroscope_z)
def main(arguments):
parser = argparse.ArgumentParser(description='Segment a MoGaSens CSV file.')
parser.add_argument('--skip_window', dest='skip_window', action='store_true')
parser.add_argument('--no-skip_window',
dest='skip_window',
action='store_false')
parser.add_argument('--delete_too_close',
dest='delete_too_close',
action='store_true')
parser.add_argument('--no-delete_too_close',
dest='delete_too_close',
action='store_false')
parser.add_argument('--delete_low_variance',
dest='delete_low_variance',
action='store_true')
parser.add_argument('--no-delete_low_variance',
dest='delete_low_variance',
action='store_false')
parser.add_argument('--image_format',
type=str,
help='The image format to use e.g. svg',
default='png',
required=False)
parser.add_argument('--csv_file_path',
type=str,
help='Path to the CSV file to segment.',
required=True)
parser.add_argument('--sensor',
type=int,
help='The sensor to use (769 | 770 | 771 | 772)',
required=True)
parser.add_argument('--imu',
type=str,
help='The IMU to use (accelerometer | gyroscope)',
required=True)
parser.add_argument('--segmentation_kind',
type=str,
help='The segmentation kind to use (min | max | both)',
required=True)
parser.add_argument('--window_size',
type=int,
help='The window size to use for segmenting.',
required=True)
args = parser.parse_args(arguments)
csv_file_path = args.csv_file_path
sensor = args.sensor
imu = args.imu
segmentation_kind = args.segmentation_kind
window_size = args.window_size
skip_window = args.skip_window # Whether to skip the window used for segmentation when a segmentation point is found.
delete_too_close = args.delete_too_close
delete_low_variance = args.delete_low_variance
if not validate(csv_file_path, sensor, imu, segmentation_kind, window_size):
sys.exit(1)
entire_data_set = DataSet.from_file(csv_file_path)
desired_sensor_data_set = entire_data_set.filter_by_sensor(sensor)
exercise_begin, exercise_end = exercise_range(csv_file_path)
if exercise_begin == 0 and exercise_end == 0:
# TODO: HERE This is commented out temporarily
#print(
# f"segment.py: exercise_range returned the empty range for \"{csv_file_path}\", skipping file."
#)
#print(f"segment.py: Exiting.")
sys.exit(0)
desired_sensor_data_set.crop_front(exercise_begin)
desired_sensor_data_set.crop_back(exercise_end)
normed_data = create_euclidean_norm(desired_sensor_data_set, imu)
segmentation_points = segment.segmentation_points(
normed_data, segmentation_kind_from_str(segmentation_kind), window_size,
skip_window)
if delete_too_close:
delete_too_close_segmenting_hardware_timestamps(desired_sensor_data_set,
segmentation_points)
if delete_low_variance:
delete_low_variance_segmentation_points(normed_data, segmentation_points)
segmenting_hardware_timestamps = desired_sensor_data_set.segmenting_hardware_timestamps(
segmentation_points)
print(
f"segment.py: {len(segmenting_hardware_timestamps)} segmentation points found in \"{csv_file_path}\"."
)
# TODO: HERE This stuff is commented out temporarily
# imus = [accelerometer_string(), gyroscope_string()]
# for sensor in sensors():
# for imu in imus:
# plotter_main(
# arguments=[
# '--image_format',
# args.image_format,
# '--no-moving_average_filter', # Don't use a filter
# '--time_based_split', # Time based split setting
# csv_file_path, # Path to the entire CSV file
# f'{sensor}', # The sensor (left arm, belly, right arm, chest)
# imu, # The imu (accelerometer / gyroscope)
# '0' # Filter sample count (must be 0 if none is used)
# ],
# segmenting_hwstamps=segmenting_hardware_timestamps)
sys.exit(0)
if __name__ == "__main__":
main(sys.argv[1:])
|
StarcoderdataPython
|
9615984
|
<filename>venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_firewall_proxy_policy.py<gh_stars>1-10
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_proxy_policy
short_description: Configure proxy policies in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall feature and proxy_policy category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.10"
author:
- <NAME> (@chillancezen)
- <NAME> (@JieX19)
- <NAME> (@fgtdev-hblu)
- <NAME> (@frankshen01)
- <NAME> (@mamunozgonzalez)
- <NAME> (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
access_token:
description:
- Token-based authentication.
Generated from GUI of Fortigate.
type: str
required: false
enable_log:
description:
- Enable/Disable logging for task.
type: bool
required: false
default: false
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
member_path:
type: str
description:
- Member attribute path to operate on.
- Delimited by a slash character if there are more than one attribute.
- Parameter marked with member_path is legitimate for doing member operation.
member_state:
type: str
description:
- Add or delete a member under specified attribute path.
- When member_state is specified, the state option is ignored.
choices:
- present
- absent
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
firewall_proxy_policy:
description:
- Configure proxy policies.
default: null
type: dict
suboptions:
access_proxy:
description:
- Access Proxy.
type: list
suboptions:
name:
description:
- Access Proxy name. Source firewall.access-proxy.name.
required: true
type: str
access_proxy6:
description:
- IPv6 access proxy.
type: list
suboptions:
name:
description:
- Access proxy name. Source firewall.access-proxy6.name.
required: true
type: str
action:
description:
- Accept or deny traffic matching the policy parameters.
type: str
choices:
- accept
- deny
- redirect
application_list:
description:
- Name of an existing Application list. Source application.list.name.
type: str
av_profile:
description:
- Name of an existing Antivirus profile. Source antivirus.profile.name.
type: str
cifs_profile:
description:
- Name of an existing CIFS profile. Source cifs.profile.name.
type: str
comments:
description:
- Optional comments.
type: str
decrypted_traffic_mirror:
description:
- Decrypted traffic mirror. Source firewall.decrypted-traffic-mirror.name.
type: str
device_ownership:
description:
- When enabled, the ownership enforcement will be done at policy level.
type: str
choices:
- enable
- disable
disclaimer:
description:
- 'Web proxy disclaimer setting: by domain, policy, or user.'
type: str
choices:
- disable
- domain
- policy
- user
dlp_sensor:
description:
- Name of an existing DLP sensor. Source dlp.sensor.name.
type: str
dstaddr:
description:
- Destination address objects.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address.name firewall.addrgrp.name firewall.proxy-address.name firewall.proxy-addrgrp.name
firewall.vip.name firewall.vipgrp.name firewall.vip46.name firewall.vipgrp46.name system.external-resource.name.
required: true
type: str
dstaddr_negate:
description:
- When enabled, destination addresses match against any address EXCEPT the specified destination addresses.
type: str
choices:
- enable
- disable
dstaddr6:
description:
- IPv6 destination address objects.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name firewall.vip6.name firewall.vipgrp6.name firewall.vip64.name
firewall.vipgrp64.name system.external-resource.name.
required: true
type: str
dstintf:
description:
- Destination interface names.
type: list
suboptions:
name:
description:
- Interface name. Source system.interface.name system.zone.name.
required: true
type: str
emailfilter_profile:
description:
- Name of an existing email filter profile. Source emailfilter.profile.name.
type: str
file_filter_profile:
description:
- Name of an existing file-filter profile. Source file-filter.profile.name.
type: str
global_label:
description:
- Global web-based manager visible label.
type: str
groups:
description:
- Names of group objects.
type: list
suboptions:
name:
description:
- Group name. Source user.group.name.
required: true
type: str
http_tunnel_auth:
description:
- Enable/disable HTTP tunnel authentication.
type: str
choices:
- enable
- disable
icap_profile:
description:
- Name of an existing ICAP profile. Source icap.profile.name.
type: str
internet_service:
description:
- Enable/disable use of Internet Services for this policy. If enabled, destination address and service are not used.
type: str
choices:
- enable
- disable
internet_service_custom:
description:
- Custom Internet Service name.
type: list
suboptions:
name:
description:
- Custom name. Source firewall.internet-service-custom.name.
required: true
type: str
internet_service_custom_group:
description:
- Custom Internet Service group name.
type: list
suboptions:
name:
description:
- Custom Internet Service group name. Source firewall.internet-service-custom-group.name.
required: true
type: str
internet_service_group:
description:
- Internet Service group name.
type: list
suboptions:
name:
description:
- Internet Service group name. Source firewall.internet-service-group.name.
required: true
type: str
internet_service_id:
description:
- Internet Service ID.
type: list
suboptions:
id:
description:
- Internet Service ID. Source firewall.internet-service.id.
required: true
type: int
internet_service_name:
description:
- Internet Service name.
type: list
suboptions:
name:
description:
- Internet Service name. Source firewall.internet-service-name.name.
required: true
type: str
internet_service_negate:
description:
- When enabled, Internet Services match against any internet service EXCEPT the selected Internet Service.
type: str
choices:
- enable
- disable
ips_sensor:
description:
- Name of an existing IPS sensor. Source ips.sensor.name.
type: str
label:
description:
- VDOM-specific GUI visible label.
type: str
logtraffic:
description:
- Enable/disable logging traffic through the policy.
type: str
choices:
- all
- utm
- disable
logtraffic_start:
description:
- Enable/disable policy log traffic start.
type: str
choices:
- enable
- disable
mms_profile:
description:
- Name of an existing MMS profile. Source firewall.mms-profile.name.
type: str
name:
description:
- Policy name.
type: str
policyid:
description:
- Policy ID.
required: true
type: int
poolname:
description:
- Name of IP pool object.
type: list
suboptions:
name:
description:
- IP pool name. Source firewall.ippool.name.
required: true
type: str
profile_group:
description:
- Name of profile group. Source firewall.profile-group.name.
type: str
profile_protocol_options:
description:
- Name of an existing Protocol options profile. Source firewall.profile-protocol-options.name.
type: str
profile_type:
description:
- Determine whether the firewall policy allows security profile groups or single profiles only.
type: str
choices:
- single
- group
proxy:
description:
- Type of explicit proxy.
type: str
choices:
- explicit-web
- transparent-web
- ftp
- ssh
- ssh-tunnel
- wanopt
- access-proxy
redirect_url:
description:
- Redirect URL for further explicit web proxy processing.
type: str
replacemsg_override_group:
description:
- Authentication replacement message override group. Source system.replacemsg-group.name.
type: str
scan_botnet_connections:
description:
- Enable/disable scanning of connections to Botnet servers.
type: str
choices:
- disable
- block
- monitor
schedule:
description:
- Name of schedule object. Source firewall.schedule.onetime.name firewall.schedule.recurring.name firewall.schedule.group.name.
type: str
sctp_filter_profile:
description:
- Name of an existing SCTP filter profile. Source sctp-filter.profile.name.
type: str
service:
description:
- Name of service objects.
type: list
suboptions:
name:
description:
- Service name. Source firewall.service.custom.name firewall.service.group.name.
required: true
type: str
service_negate:
description:
- When enabled, services match against any service EXCEPT the specified destination services.
type: str
choices:
- enable
- disable
session_ttl:
description:
- TTL in seconds for sessions accepted by this policy (0 means use the system ).
type: int
spamfilter_profile:
description:
- Name of an existing Spam filter profile. Source spamfilter.profile.name.
type: str
srcaddr:
description:
- Source address objects.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address.name firewall.addrgrp.name firewall.proxy-address.name firewall.proxy-addrgrp.name system
.external-resource.name.
required: true
type: str
srcaddr_negate:
description:
- When enabled, source addresses match against any address EXCEPT the specified source addresses.
type: str
choices:
- enable
- disable
srcaddr6:
description:
- IPv6 source address objects.
type: list
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name system.external-resource.name.
required: true
type: str
srcintf:
description:
- Source interface names.
type: list
suboptions:
name:
description:
- Interface name. Source system.interface.name system.zone.name.
required: true
type: str
ssh_filter_profile:
description:
- Name of an existing SSH filter profile. Source ssh-filter.profile.name.
type: str
ssh_policy_redirect:
description:
- Redirect SSH traffic to matching transparent proxy policy.
type: str
choices:
- enable
- disable
ssl_ssh_profile:
description:
- Name of an existing SSL SSH profile. Source firewall.ssl-ssh-profile.name.
type: str
status:
description:
- Enable/disable the active status of the policy.
type: str
choices:
- enable
- disable
transparent:
description:
- Enable to use the IP address of the client to connect to the server.
type: str
choices:
- enable
- disable
users:
description:
- Names of user objects.
type: list
suboptions:
name:
description:
- Group name. Source user.local.name.
required: true
type: str
utm_status:
description:
- Enable the use of UTM profiles/sensors/lists.
type: str
choices:
- enable
- disable
uuid:
description:
- Universally Unique Identifier (UUID; automatically assigned but can be manually reset).
type: str
videofilter_profile:
description:
- Name of an existing VideoFilter profile. Source videofilter.profile.name.
type: str
voip_profile:
description:
- Name of an existing VoIP profile. Source voip.profile.name.
type: str
waf_profile:
description:
- Name of an existing Web application firewall profile. Source waf.profile.name.
type: str
webcache:
description:
- Enable/disable web caching.
type: str
choices:
- enable
- disable
webcache_https:
description:
- Enable/disable web caching for HTTPS (Requires deep-inspection enabled in ssl-ssh-profile).
type: str
choices:
- disable
- enable
webfilter_profile:
description:
- Name of an existing Web filter profile. Source webfilter.profile.name.
type: str
webproxy_forward_server:
description:
- Name of web proxy forward server. Source web-proxy.forward-server.name web-proxy.forward-server-group.name.
type: str
webproxy_profile:
description:
- Name of web proxy profile. Source web-proxy.profile.name.
type: str
ztna_ems_tag:
description:
- ZTNA EMS Tag names.
type: list
suboptions:
name:
description:
- EMS Tag name. Source firewall.address.name firewall.addrgrp.name.
required: true
type: str
'''
EXAMPLES = '''
- collections:
- fortinet.fortios
connection: httpapi
hosts: fortigate01
vars:
ansible_httpapi_port: 443
ansible_httpapi_use_ssl: true
ansible_httpapi_validate_certs: false
vdom: root
tasks:
- name: fortios_firewall_proxy_policy
fortios_firewall_proxy_policy:
vdom: root
state: present
firewall_proxy_policy:
action: deny
disclaimer: disable
dstaddr:
- name: all
dstaddr_negate: disable
dstintf:
- name: port4
http_tunnel_auth: disable
internet_service: disable
internet_service_negate: disable
logtraffic: disable
logtraffic_start: disable
policyid: 1
profile_type: single
proxy: transparent-web
schedule: always
service:
- name: webproxy
service_negate: disable
session_ttl: 0
srcaddr:
- name: all
srcaddr_negate: disable
srcintf:
- name: port3
ssh_policy_redirect: disable
status: enable
transparent: disable
utm_status: disable
webcache: disable
webcache_https: disable
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_legacy_fortiosapi
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import schema_to_module_spec
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_schema_versioning
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.comparison import is_same_comparison
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.comparison import serialize
def filter_firewall_proxy_policy_data(json):
option_list = ['access_proxy', 'access_proxy6', 'action',
'application_list', 'av_profile', 'cifs_profile',
'comments', 'decrypted_traffic_mirror', 'device_ownership',
'disclaimer', 'dlp_sensor', 'dstaddr',
'dstaddr_negate', 'dstaddr6', 'dstintf',
'emailfilter_profile', 'file_filter_profile', 'global_label',
'groups', 'http_tunnel_auth', 'icap_profile',
'internet_service', 'internet_service_custom', 'internet_service_custom_group',
'internet_service_group', 'internet_service_id', 'internet_service_name',
'internet_service_negate', 'ips_sensor', 'label',
'logtraffic', 'logtraffic_start', 'mms_profile',
'name', 'policyid', 'poolname',
'profile_group', 'profile_protocol_options', 'profile_type',
'proxy', 'redirect_url', 'replacemsg_override_group',
'scan_botnet_connections', 'schedule', 'sctp_filter_profile',
'service', 'service_negate', 'session_ttl',
'spamfilter_profile', 'srcaddr', 'srcaddr_negate',
'srcaddr6', 'srcintf', 'ssh_filter_profile',
'ssh_policy_redirect', 'ssl_ssh_profile', 'status',
'transparent', 'users', 'utm_status',
'uuid', 'videofilter_profile', 'voip_profile',
'waf_profile', 'webcache', 'webcache_https',
'webfilter_profile', 'webproxy_forward_server', 'webproxy_profile',
'ztna_ems_tag']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_proxy_policy(data, fos, check_mode=False):
vdom = data['vdom']
state = data['state']
firewall_proxy_policy_data = data['firewall_proxy_policy']
filtered_data = underscore_to_hyphen(filter_firewall_proxy_policy_data(firewall_proxy_policy_data))
# check_mode starts from here
if check_mode:
mkey = fos.get_mkey('firewall', 'proxy_policy', filtered_data, vdom=vdom)
current_data = fos.get('firewall', 'proxy_policy', vdom=vdom, mkey=mkey)
is_existed = current_data and current_data.get('http_status') == 200 \
and isinstance(current_data.get('results'), list) \
and len(current_data['results']) > 0
# 2. if it exists and the state is 'present' then compare current settings with desired
if state == 'present' or state is True:
if mkey is None:
return False, True, filtered_data
# if mkey exists then compare each other
# record exits and they're matched or not
if is_existed:
is_same = is_same_comparison(
serialize(current_data['results'][0]), serialize(filtered_data))
return False, not is_same, filtered_data
# record does not exist
return False, True, filtered_data
if state == 'absent':
if mkey is None:
return False, False, filtered_data
if is_existed:
return False, True, filtered_data
return False, False, filtered_data
return True, False, {'reason: ': 'Must provide state parameter'}
if state == "present" or state is True:
return fos.set('firewall',
'proxy-policy',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall',
'proxy-policy',
mkey=filtered_data['policyid'],
vdom=vdom)
else:
fos._module.fail_json(msg='state must be present or absent!')
def is_successful_status(resp):
return 'status' in resp and resp['status'] == 'success' or \
'http_status' in resp and resp['http_status'] == 200 or \
'http_method' in resp and resp['http_method'] == "DELETE" and resp['http_status'] == 404
def fortios_firewall(data, fos, check_mode):
fos.do_member_operation('firewall_proxy_policy')
if data['firewall_proxy_policy']:
resp = firewall_proxy_policy(data, fos, check_mode)
else:
fos._module.fail_json(msg='missing task body: %s' % ('firewall_proxy_policy'))
if check_mode:
return resp
return not is_successful_status(resp), \
is_successful_status(resp) and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
versioned_schema = {
"type": "list",
"children": {
"redirect_url": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ssh_filter_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"srcaddr_negate": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"icap_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"application_list": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"internet_service_negate": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"service_negate": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"session_ttl": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"av_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"device_ownership": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"proxy": {
"type": "string",
"options": [
{
"value": "explicit-web",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "transparent-web",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ftp",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ssh",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ssh-tunnel",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wanopt",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "access-proxy",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mms_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"webproxy_forward_server": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"cifs_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"internet_service_custom": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dstaddr": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"uuid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"service": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"poolname": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"webproxy_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dlp_sensor": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ssl_ssh_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"access_proxy6": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True
}
}
},
"revisions": {
"v7.0.1": True
}
},
"comments": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"label": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"profile_group": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"http_tunnel_auth": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dstaddr6": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"global_label": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"srcintf": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"status": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"users": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"profile_type": {
"type": "string",
"options": [
{
"value": "single",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "group",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"schedule": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"disclaimer": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "domain",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "policy",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "user",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"policyid": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"utm_status": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"videofilter_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"waf_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"srcaddr": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"spamfilter_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": False,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"emailfilter_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"dstintf": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"groups": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"access_proxy": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"voip_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"transparent": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"internet_service_custom_group": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"srcaddr6": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"decrypted_traffic_mirror": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"file_filter_profile": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"webfilter_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"logtraffic_start": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"name": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"sctp_filter_profile": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"scan_botnet_connections": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.11": True,
"v6.0.0": True,
"v6.0.5": True
}
},
{
"value": "block",
"revisions": {
"v6.0.11": True,
"v6.0.0": True,
"v6.0.5": True
}
},
{
"value": "monitor",
"revisions": {
"v6.0.11": True,
"v6.0.0": True,
"v6.0.5": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": False,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"internet_service_id": {
"type": "list",
"children": {
"id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dstaddr_negate": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"webcache_https": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ips_sensor": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ztna_ems_tag": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"ssh_policy_redirect": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"internet_service_name": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
},
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"internet_service_group": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"internet_service": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"action": {
"type": "string",
"options": [
{
"value": "accept",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "deny",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "redirect",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"webcache": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"replacemsg_override_group": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"profile_protocol_options": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"logtraffic": {
"type": "string",
"options": [
{
"value": "all",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "utm",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'policyid'
fields = {
"access_token": {"required": False, "type": "str", "no_log": True},
"enable_log": {"required": False, "type": bool},
"vdom": {"required": False, "type": "str", "default": "root"},
"member_path": {"required": False, "type": "str"},
"member_state": {
"type": "str",
"required": False,
"choices": ["present", "absent"]
},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"firewall_proxy_policy": {
"required": False, "type": "dict", "default": None,
"options": {
}
}
}
for attribute_name in module_spec['options']:
fields["firewall_proxy_policy"]['options'][attribute_name] = module_spec['options'][attribute_name]
if mkeyname and mkeyname == attribute_name:
fields["firewall_proxy_policy"]['options'][attribute_name]['required'] = True
check_legacy_fortiosapi()
module = AnsibleModule(argument_spec=fields,
supports_check_mode=True)
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if 'access_token' in module.params:
connection.set_option('access_token', module.params['access_token'])
if 'enable_log' in module.params:
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, "firewall_proxy_policy")
is_error, has_changed, result = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
5029874
|
from numpy import zeros
# scipy implementation has issues with different sized matrices
def block_diag(matrix_list):
""""""
row_length = sum(mat.shape[0] for mat in matrix_list)
col_length = sum(mat.shape[1] for mat in matrix_list)
result = zeros((row_length, col_length))
row_idx = col_idx = 0
for mat in matrix_list:
row_len = mat.shape[0]
col_len = mat.shape[1]
result[row_idx : (row_idx + row_len), col_idx : (col_idx + col_len)] = mat[:, :]
row_idx = row_idx + row_len
col_idx = col_idx + col_len
return result
def jacobian(func, x, h=1e-6, relative=False, *args):
""""""
y = func(x)
result = zeros((len(y), len(x)))
for row in range(len(y)):
def row_func(x):
return func(x)[row]
for idx in range(len(x)):
dx = zeros(x.shape)
hx = x[idx] * h if relative else h
dx[idx] = hx
f1 = row_func(x + dx, *args)
f2 = row_func(x - dx, *args)
result[row, idx] = 0.5 * (f1 - f2) / hx
return result
def hessian(func, x, h=1e-5, relative=False, *args):
""""""
len_x = len(x)
result = zeros((len_x, len_x))
for row in range(1, len_x):
for col in range(row):
dx = zeros(len_x)
dy = zeros(len_x)
if relative:
hx = h * x[col]
hy = h * x[row]
else:
hx = hy = h
dx[col] = hx
dy[row] = hy
f1 = func(x + dx + dy, *args)
f2 = func(x + dx - dy, *args)
f3 = func(x - dx + dy, *args)
f4 = func(x - dx - dy, *args)
result[row, col] = 0.25 * (f1 - f2 - f3 + f4) / hx / hy
result += result.T
for idx in range(len_x):
dx = zeros(len_x)
hx = h * x[idx] if relative else h
dx[idx] = hx
f1 = func(x + dx, *args)
f2 = func(x, *args)
f3 = func(x - dx, *args)
result[idx, idx] = (f1 - 2 * f2 + f3) / hx / hx
return result
def identity_fn(x):
""""""
return x
|
StarcoderdataPython
|
294423
|
from crispy_forms.bootstrap import InlineCheckboxes
from crispy_forms.helper import FormHelper
from crispy_forms.layout import HTML, ButtonHolder, Div, Fieldset, Layout, Submit
from datetimewidget.widgets import DateWidget
from django import forms
from .models import (
CHF,
CKD,
IBD,
PVD,
Alcohol,
AllopurinolHypersensitivity,
Angina,
Anticoagulation,
Bleed,
ColchicineInteractions,
Cyclosporine,
Diabetes,
Diuretics,
Erosions,
FebuxostatHypersensitivity,
Fructose,
Gout,
HeartAttack,
Hypertension,
Hyperuricemia,
OrganTransplant,
Osteoporosis,
Shellfish,
Stroke,
Tophi,
UrateKidneyStones,
XOIInteractions,
)
### Medical History ModelForms ###
class AnginaForm(forms.ModelForm):
prefix = "Angina"
class Meta:
model = Angina
fields = ("value",)
def __init__(self, *args, **kwargs):
super(AnginaForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"",
"value",
id="angina_for_profile",
),
)
class AnticoagulationForm(forms.ModelForm):
prefix = "Anticoagulation"
class Meta:
model = Anticoagulation
fields = (
"value",
"apixaban",
"clopidogrel",
"dabigatran",
"enoxaparin",
"rivaroxaban",
"warfarin",
)
def __init__(self, *args, **kwargs):
super(AnticoagulationForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"Anticoagulation",
"value",
"apixaban",
"clopidogrel",
"dabigatran",
"enoxaparin",
"rivaroxaban",
"warfarin",
id="anticoagulation_for_profile",
),
)
class AnticoagulationSimpleForm(AnticoagulationForm):
class Meta:
model = Anticoagulation
fields = ("value",)
def __init__(self, *args, **kwargs):
super(AnticoagulationSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"",
"value",
id="anticoagulation_for_profile",
),
)
class CKDSimpleForm(forms.ModelForm):
prefix = "CKD"
class Meta:
model = CKD
fields = (
"value",
"dialysis",
)
def __init__(self, *args, **kwargs):
super(CKDSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.fields["dialysis"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset(
"",
"value",
"dialysis",
id="CKD_for_profile",
),
)
class CKDForm(CKDSimpleForm):
class Meta:
model = CKD
fields = (
"value",
"dialysis",
"stage",
)
def __init__(self, *args, **kwargs):
super(CKDForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.fields["dialysis"].widget = forms.CheckboxInput()
self.fields["stage"].empty_label = None
self.fields["stage"].required = False
self.fields["stage"].widget = forms.RadioSelect()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"",
"value",
InlineCheckboxes("stage"),
"dialysis",
id="CKD_for_profile",
),
)
class ColchicineInteractionsForm(forms.ModelForm):
prefix = "Colchicine Interactions"
class Meta:
model = ColchicineInteractions
fields = (
"value",
"clarithromycin",
"simvastatin",
)
def __init__(self, *args, **kwargs):
super(ColchicineInteractionsForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset(
"Colchicine Interactions",
"value",
"clarithromycin",
"simvastatin",
id="colchicine_interactions_for_profile",
),
)
class ColchicineInteractionsSimpleForm(ColchicineInteractionsForm):
class Meta:
model = ColchicineInteractions
fields = ("value",)
def __init__(self, *args, **kwargs):
super(ColchicineInteractionsSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"",
"value",
id="colchicine_interactions_for_profile",
),
)
class HypertensionForm(forms.ModelForm):
prefix = "hypertension"
class Meta:
model = Hypertension
fields = (
"value",
"medication",
)
def __init__(self, *args, **kwargs):
super(HypertensionForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.fields["medication"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset("", "value", "medication", id="hypertension_for_profile"),
)
class HypertensionSimpleForm(HypertensionForm):
class Meta:
model = Hypertension
fields = ("value",)
def __init__(self, *args, **kwargs):
super(HypertensionForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset("", "value", id="hypertension_for_profile"),
)
class PVDForm(forms.ModelForm):
prefix = "PVD"
class Meta:
model = PVD
fields = ("value",)
def __init__(self, *args, **kwargs):
super(PVDForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset("", "value", id="PVD_for_profile"),
)
class HyperuricemiaForm(forms.ModelForm):
prefix = "hyperuricemia"
class Meta:
model = Hyperuricemia
fields = ("value",)
def __init__(self, *args, **kwargs):
super(HyperuricemiaForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="hyperuricemia_for_profile"),
)
class IBDForm(forms.ModelForm):
prefix = "IBD"
class Meta:
model = IBD
fields = ("value",)
def __init__(self, *args, **kwargs):
super(IBDForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Inflammatory Bowel Disease", "value", id="IBD_for_profile"),
)
class IBDSimpleForm(IBDForm):
def __init__(self, *args, **kwargs):
super(IBDSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="IBD_for_profile"),
)
class OsteoporosisForm(forms.ModelForm):
prefix = "Osteoporosis"
class Meta:
model = Osteoporosis
fields = ("value",)
def __init__(self, *args, **kwargs):
super(OsteoporosisForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Osteoporosis", "value", id="osteoporosis_for_profile"),
)
class OsteoporosisSimpleForm(OsteoporosisForm):
def __init__(self, *args, **kwargs):
super(OsteoporosisSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="osteoporosis_for_profile"),
)
class CHFForm(forms.ModelForm):
prefix = "CHF"
class Meta:
model = CHF
fields = (
"value",
"systolic",
)
def __init__(self, *args, **kwargs):
super(CHFForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.fields["systolic"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset("", "value", "systolic", id="CHF_for_profile"),
)
class CHFSimpleForm(CHFForm):
class Meta:
model = CHF
fields = ("value",)
def __init__(self, *args, **kwargs):
super(CHFForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
# You can dynamically adjust your layout
self.helper.layout = Layout(
Fieldset("", "value", id="CHF_for_profile"),
)
class DiabetesForm(forms.ModelForm):
prefix = "diabetes"
class Meta:
model = Diabetes
fields = (
"value",
"type",
"insulin",
)
def __init__(self, *args, **kwargs):
super(DiabetesForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Diabetes", "value", "type", "insulin", id="diabetes_for_profile"),
)
class DiabetesSimpleForm(DiabetesForm):
class Meta:
model = Diabetes
fields = ("value",)
def __init__(self, *args, **kwargs):
super(DiabetesSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="diabetes_for_profile"),
)
class ErosionsForm(forms.ModelForm):
prefix = "erosions"
class Meta:
model = Erosions
fields = ("value",)
def __init__(self, *args, **kwargs):
super(ErosionsForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="erosions_for_profile"),
)
class OrganTransplantForm(forms.ModelForm):
prefix = "organ_transplant"
class Meta:
model = OrganTransplant
fields = (
"value",
"organ",
)
def __init__(self, *args, **kwargs):
super(OrganTransplantForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.fields["organ"].required = False
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", InlineCheckboxes("organ"), id="organ_transplant_for_profile"),
)
class TophiForm(forms.ModelForm):
prefix = "tophi"
class Meta:
model = Tophi
fields = ("value",)
def __init__(self, *args, **kwargs):
super(TophiForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="tophi_for_profile"),
)
class UrateKidneyStonesForm(forms.ModelForm):
prefix = "urate_kidney_stones"
class Meta:
model = UrateKidneyStones
fields = ("value",)
def __init__(self, *args, **kwargs):
super(UrateKidneyStonesForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="urate_kidney_stones_for_profile"),
)
class StrokeForm(forms.ModelForm):
prefix = "stroke"
class Meta:
model = Stroke
fields = (
"value",
"number",
"date",
)
widgets = {
"date": DateWidget(attrs={"id": "stroke_date.pk"}, usel10n=True, bootstrap_version=3),
}
def __init__(self, *args, **kwargs):
super(StrokeForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Stroke", "value", "number", "date", id="stroke_for_contraindications"),
)
class StrokeSimpleForm(StrokeForm):
class Meta:
model = Stroke
fields = ("value",)
def __init__(self, *args, **kwargs):
super(StrokeSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="stroke_for_contraindications"),
)
class HeartAttackForm(forms.ModelForm):
prefix = "heartattack"
class Meta:
model = HeartAttack
fields = (
"value",
"number",
"date",
"stent",
"stent_date",
"cabg",
"cabg_date",
)
dateTimeOptions = {
"autoclose": True,
"pickerPosition": "bottom-left",
}
widgets = {
"date": DateWidget(attrs={"id": "heartattack_date.pk"}, usel10n=True, bootstrap_version=3),
"stent_date": DateWidget(attrs={"id": "stent_date.pk"}, usel10n=True, bootstrap_version=3),
"cabg_date": DateWidget(attrs={"id": "cabg_date.pk"}, usel10n=True, bootstrap_version=3),
}
def __init__(self, *args, **kwargs):
super(HeartAttackForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"Heart Attack",
"value",
"number",
"date",
"stent",
"stent_date",
"cabg",
"cabg_date",
id="heart_attack_for_contraindications",
),
)
class HeartAttackSimpleForm(HeartAttackForm):
class Meta:
model = HeartAttack
fields = ("value",)
def __init__(self, *args, **kwargs):
super(HeartAttackSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"",
"value",
id="heart_attack_for_contraindications",
),
)
class BleedForm(forms.ModelForm):
prefix = "bleed"
class Meta:
model = Bleed
fields = (
"value",
"number",
"date",
"GIB",
"GIB_date",
"CNS",
"CNS_date",
"transfusion",
)
dateTimeOptions = {
"autoclose": True,
"pickerPosition": "bottom-left",
}
widgets = {
"date": DateWidget(
options=dateTimeOptions, attrs={"id": "bleed_date.pk"}, usel10n=True, bootstrap_version=3
),
"GIB_date": DateWidget(
options=dateTimeOptions, attrs={"id": "GIB_date.pk"}, usel10n=True, bootstrap_version=3
),
"CNS_date": DateWidget(
options=dateTimeOptions, attrs={"id": "CNS_date.pk"}, usel10n=True, bootstrap_version=3
),
}
def __init__(self, *args, **kwargs):
super(BleedForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"Bleed (major)",
"value",
"number",
"date",
"GIB",
"GIB_date",
"CNS",
"CNS_date",
"transfusion",
id="bleed_for_profile",
),
)
class BleedSimpleForm(BleedForm):
class Meta:
model = Bleed
fields = ("value",)
def __init__(self, *args, **kwargs):
super(BleedSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"",
"value",
id="bleed_for_contraindications",
),
)
class AllopurinolHypersensitivityForm(forms.ModelForm):
prefix = "AllopruinolHypersensitivity"
class Meta:
model = AllopurinolHypersensitivity
fields = (
"value",
"rash",
"transaminitis",
"cytopenia",
)
def __init__(self, *args, **kwargs):
super(AllopurinolHypersensitivityForm, self).__init__(*args, **kwargs)
self.fields["value"].label = "Allopurinol Side Effects"
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"Allopurinol Hypersensitivity",
"value",
"rash",
"transaminitis",
"cytopenia",
id="allopurinolhypersensitivity_for_profile",
),
)
class AllopurinolHypersensitivitySimpleForm(AllopurinolHypersensitivityForm):
class Meta:
model = AllopurinolHypersensitivity
fields = ("value",)
def __init__(self, *args, **kwargs):
super(AllopurinolHypersensitivityForm, self).__init__(*args, **kwargs)
self.fields["value"].label = "Allopurinol Side Effects"
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="gout_for_profile"),
)
class FebuxostatHypersensitivityForm(forms.ModelForm):
prefix = "FebuxostatHypersensitivity"
class Meta:
model = FebuxostatHypersensitivity
fields = (
"value",
"rash",
"transaminitis",
"cytopenia",
)
def __init__(self, *args, **kwargs):
super(FebuxostatHypersensitivityForm, self).__init__(*args, **kwargs)
self.fields["value"].label = "Febuxostat Side Effects"
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset(
"Febuxostat Hypersensitivity",
"value",
"rash",
"transaminitis",
"cytopenia",
id="febuxostathypersensitivity_for_profile",
),
)
class FebuxostatHypersensitivitySimpleForm(FebuxostatHypersensitivityForm):
class Meta:
model = FebuxostatHypersensitivity
fields = ("value",)
def __init__(self, *args, **kwargs):
super(FebuxostatHypersensitivityForm, self).__init__(*args, **kwargs)
self.fields["value"].label = "Febuxostat Side Effects"
self.fields["value"].widget = forms.CheckboxInput()
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="febuxostathypersensitivity_for_profile"),
)
class XOIInteractionsSimpleForm(forms.ModelForm):
class Meta:
model = XOIInteractions
fields = ("value",)
def __init__(self, *args, **kwargs):
super(XOIInteractionsSimpleForm, self).__init__(*args, **kwargs)
self.fields["value"].widget = forms.CheckboxInput()
self.fields["value"].label = "XOI Interactions"
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("", "value", id="XOIInteractions_for_profile"),
)
### Family History Forms ###
class GoutForm(forms.ModelForm):
prefix = "gout"
class Meta:
model = Gout
fields = ("value",)
def __init__(self, *args, **kwargs):
super(GoutForm, self).__init__(*args, **kwargs)
self.fields["value"].label = "Gout Family History"
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Gout", "value", id="gout_for_profile"),
)
### Social History Forms ###
class AlcoholForm(forms.ModelForm):
prefix = "alcohol"
class Meta:
model = Alcohol
fields = (
"value",
"number",
"wine",
"beer",
"liquor",
)
def __init__(self, *args, **kwargs):
super(AlcoholForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Alcohol", "value", "number", "wine", "beer", "liquor", id="alcohol_for_profile"),
)
class FructoseForm(forms.ModelForm):
prefix = "fructose"
class Meta:
model = Fructose
fields = ("value",)
def __init__(self, *args, **kwargs):
super(FructoseForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Fructose", "value", id="fructose_for_profile"),
)
class ShellfishForm(forms.ModelForm):
prefix = "shellfish"
class Meta:
model = Shellfish
fields = ("value",)
def __init__(self, *args, **kwargs):
super(ShellfishForm, self).__init__(*args, **kwargs)
self.helper = FormHelper(self)
self.helper.form_tag = False
self.helper.layout = Layout(
Fieldset("Shellfish", "value", id="shellfish_for_profile"),
)
|
StarcoderdataPython
|
1719553
|
<reponame>MarcinOrlowski/prop-tool<filename>tests/checks/test_quotation_marks.py<gh_stars>1-10
"""
# trans-tool
# The translation files checker and syncing tool.
#
# Copyright ©2021 <NAME> <mail [@] <EMAIL>>
# https://github.com/MarcinOrlowski/trans-tool/
#
"""
from typing import Dict, Union
from transtool.checks.base.check import Check
from transtool.checks.quotation_marks import QuotationMarks
from transtool.decorators.overrides import overrides
from transtool.prop.items import Blank, Comment, Translation
from tests.checks.checks_test_case import ChecksTestCase
class TestQuotationMarks(ChecksTestCase):
@overrides(ChecksTestCase)
def get_checker(self, config: Union[Dict, None] = None) -> Check:
return QuotationMarks(config)
# #################################################################################################
def test_translation_no_faults(self) -> None:
self.check_single_file(Translation('key', '""'))
def test_translation_with_faults(self) -> None:
self.check_single_file(Translation('key', '"""'), exp_errors = 1)
# #################################################################################################
def test_comment_no_faults(self) -> None:
tests = [
Comment('# "foo" '),
]
self._do_checker_comment_test(tests, 0)
def test_comment_with_faults(self) -> None:
tests = [
Comment('# "foo `" '),
]
self._do_checker_comment_test(tests, 1)
# #################################################################################################
def test_handling_of_unsupported_types(self) -> None:
self.check_single_file(Blank())
|
StarcoderdataPython
|
6525181
|
import numpy as np
from speech_datasets.transform.interface import FuncTrans
def delta(feat, window):
assert window > 0
delta_feat = np.zeros_like(feat)
for i in range(1, window + 1):
delta_feat[:-i] += i * feat[i:]
delta_feat[i:] += -i * feat[:-i]
delta_feat[-i:] += i * feat[-1]
delta_feat[:i] += -i * feat[0]
delta_feat /= 2 * sum(i ** 2 for i in range(1, window + 1))
return delta_feat
def add_deltas(x, window=2, order=2):
"""
:param x: Features
:param window: size of the window to use to approximate time derivative computation
:param order: highest order time derivative to compute
:return: Features, concatenated with all the relevant derivatives
"""
feats = [x]
for _ in range(order):
feats.append(delta(feats[-1], window))
return np.concatenate(feats, axis=1)
class AddDeltas(FuncTrans):
_func = add_deltas
__doc__ = add_deltas.__doc__
|
StarcoderdataPython
|
8151680
|
<filename>days_conversion.py<gh_stars>0
no_of_days=int(input())
years=int(((no_of_days)/365))
weeks=int((no_of_days % 365)/7)
days=int(((no_of_days)%365)%7)
result=(years+" "+"years"+" "+weeks+" "+"weeks"+" "+days+" "+days)
print(result)
|
StarcoderdataPython
|
3356467
|
import os
import gdal
import osr
import logging
import glob
import shapefile
import geopandas as gp
import numpy as np
import settings
from cv2 import imread
from shapely.geometry import Polygon
from PIL import Image, ImageDraw
logging.getLogger('shapely.geos').setLevel(logging.CRITICAL)
class Tiling:
""" Command-line routine to tile images and shapefiles according to desired width and heights """
def __init__(self):
"""
Constructor method
"""
pass
def slice_array(self, array, positions):
"""
Auxiliar method to slice an array in small other array
:param array: the input array
:param positions: the position to be sliced
:return: the new corresponded array list sliced
"""
new_arrays = []
positions.append(len(array) - 1)
for i in range(len(positions) - 1):
new_arrays.append(array[positions[i]:positions[i + 1]])
return new_arrays
def draw_class_id_label(self, width, height, classes, shapes, ext, x_ratio, y_ratio, records, output):
"""
Auxiliar method to create a new label image in class id format, where the classes are represented by integer in
a grayscale image. The image create have dimension width x height, based on the classes presented in the vector
features
:param width: a integer represented the image's width
:param height: a integer represented the image's height
:param classes: the classes to be checked in the vector records and to be interpreted by the method
:param shapes: the vector features
:param ext: the extends from the respective of vector file
:param x_ratio: the aspect ratio in axis x, which determines the pixel size/dimension
:param y_ratio: the aspect ratio in axis y, which determines the pixel size/dimension
:param records: a list of records/rows from the vector content
:param output: the absolute path to output file
"""
img = Image.new("L", (width, height), "black")
classes_content = classes['type']
draw = ImageDraw.Draw(img)
for i, record in enumerate(records):
if record[1] in classes_content:
parts = shapes[i].parts
pixels = []
fill_color = classes_content[record[1]]
for x, y in shapes[i].points:
px = int(width - ((ext[3][0] - x) * x_ratio))
py = int((ext[3][1] - y) * y_ratio)
pixels.append((px, py))
if len(parts) > 1:
polygons_parts = self.slice_array(pixels, parts)
for k in range(len(polygons_parts)):
draw.polygon(polygons_parts[k], outline=None, fill=fill_color)
else:
draw.polygon(pixels, outline=None, fill=fill_color)
img.save(output)
def draw_rgb_label(self, width, height, classes, shapes, ext, x_ratio, y_ratio, records, output):
"""
Auxiliar method to create a new label image in rgb format, where the classes are represented by RGB colors in
a 3 channel image. The image create have dimension width x height, based on the classes presented in the vector
features
:param width: a integer represented the image's width
:param height: a integer represented the image's height
:param classes: the classes to be checked in the vector records and to be interpreted by the method
:param shapes: the vector features
:param ext: the extends from the respective of vector file
:param x_ratio: the aspect ratio in axis x, which determines the pixel size/dimension
:param y_ratio: the aspect ratio in axis y, which determines the pixel size/dimension
:param records: a list of records/rows from the vector content
:param output: the absolute path to output file
"""
img = Image.new("RGB", (width, height), "black")
classes_content = classes['color']
draw = ImageDraw.Draw(img)
for i, record in enumerate(records):
if record[1] in classes_content:
parts = shapes[i].parts
pixels = []
fill_color = "rgb(" + str(classes_content[record[1]][0]) + ", " + \
str(classes_content[record[1]][1]) + ", " + str(classes_content[record[1]][2]) + ")"
for x, y in shapes[i].points:
px = int(width - ((ext[3][0] - x) * x_ratio))
py = int((ext[3][1] - y) * y_ratio)
pixels.append((px, py))
if len(parts) > 1:
polygons_parts = self.slice_array(pixels, parts)
for k in range(len(polygons_parts)):
draw.polygon(polygons_parts[k], outline=None, fill=fill_color)
else:
draw.polygon(pixels, outline=None, fill=fill_color)
img.save(output)
def draw_one_hot_label(self, image_path, classes):
"""
Auxiliar method to create a new label image in class id format, where the classes are represented by integer in
a grayscale image. The image create have dimension width x height, based on the classes presented in the vector
features
:param image_path: the absolute path to image file
:param classes: the classes to be checked in the vector records and to be interpreted by the method
"""
im = imread(image_path, 0)
one_hot = np.zeros((im.shape[0], im.shape[1], len(classes)))
for i, unique_value in enumerate(np.unique(im)):
one_hot[:, :, i][im == unique_value] = 1
im = Image.fromarray(one_hot)
im.save(image_path)
def shp2png(self, raster_folder, shapefile_folder, output_folder, width, height, classes, label_type):
"""
Transform the vector files in shapefile_folder, which has its correspondent image with same name in
raster_folder (with geographic metadata information), in PNG image formats (labeled images), where each
polygon in vector are read and draw according to the classes defined.
Source:
- https://github.com/GeospatialPython/geospatialpython/blob/master/shp2img.py
Example of classes variable:
classes = {
"def": [255, 255, 0],
"water": [102, 204, 255],
"cloud": [255, 255, 255],
"shadow": [128, 128, 128],
"other": [0, 0, 0],
}
:param raster_folder: the absolute path to raster, with all georeferenced information
:param shapefile_folder: the absolute path to the vector file, with polygons representing the location
of interest objects
:param output_folder: the absolute path to the outputs
:param width: the width of each tile presented in shapefile_folder
:param height: the width of each tile presented in shapefile_folder
:param classes: the classes to be found in vector file and to be draw in png format
:param label_type: the type of label image. Options are: class_id, rgb or one_hot
"""
files = os.listdir(shapefile_folder)
shp_list = [file for file in files if file.endswith(settings.VALID_VECTOR_EXTENSION)]
for shape in shp_list:
name, file_extension = os.path.splitext(shape)
shape_path = os.path.join(shapefile_folder, shape)
output = os.path.join(output_folder, name + ".png")
raster = os.path.join(raster_folder, name + ".tif")
# TODO: the raster extension could vary from TIF to TIFF, tif, tiff, so on.
if os.path.isfile(raster):
tile = gdal.Open(raster)
gt = tile.GetGeoTransform()
cols_tile = tile.RasterXSize
rows_tile = tile.RasterYSize
ext = self.get_extent(gt, cols_tile, rows_tile)
else:
continue
if os.path.isfile(shape_path):
# TODO: to predict the encoding - hardcoded
r = shapefile.Reader(shape_path, encoding='ISO8859-1')
if not r:
logging.info('>>>> Error: could not open the shapefile')
continue
else:
logging.info('>>>> Error: could not open the shapefile')
continue
x_dist = ext[3][0] - ext[1][0]
y_dist = ext[3][1] - ext[1][1]
x_ratio = width / x_dist
y_ratio = height / y_dist
shapes = r.shapes()
records = r.records()
if label_type == 'class_id':
self.draw_class_id_label(width, height, classes, shapes, ext, x_ratio, y_ratio, records, output)
elif label_type == 'rgb':
self.draw_rgb_label(width, height, classes, shapes, ext, x_ratio, y_ratio, records, output)
elif label_type == 'one_hot':
self.draw_rgb_label(width, height, classes, shapes, ext, x_ratio, y_ratio, records, output)
self.draw_one_hot_label(output, classes)
else:
logging.warning(">>>>>> Wrong label type: {} . Options are: class_id, rgb or one_hot"
.format(label_type))
def get_extent(self, gt, cols, rows):
"""
Read and returns the extends bounds from a geographic raster in x,y coordinates
Source:
- https://gis.stackexchange.com/questions/57834/
how-to-get-raster-corner-coordinates-using-python-gdal-bindings
:param gt: the GeoTransform metadata from geographic raster tile
:param cols: the number of columns in tile
:param rows: the number of rows in tile
:return: the converted extend bounds in x,y coordinates
"""
ext = []
x_arr = [0, cols]
y_arr = [0, rows]
for px in x_arr:
for py in y_arr:
x = gt[0] + (px * gt[1]) + (py * gt[2])
y = gt[3] + (px * gt[4]) + (py * gt[5])
ext.append([x, y])
y_arr.reverse()
return ext
def tiling_raster(self, image, output_folder, width, height, strecthing=True):
"""
Take a image with high dimensions, and tile it in small other pieces with dimension of width x height
:param image: the absolute path to the image file (raster)
:param output_folder: the absolute path to the outputs
:param width: the width of the image
:param height: the width of the image
:param strecthing: default True. If False, the outputs will be save as it is the inputs
"""
if os.path.isfile(image) and image.endswith(settings.VALID_RASTER_EXTENSION):
filename = os.path.basename(image)
name, file_extension = os.path.splitext(filename)
ds = gdal.Open(image)
datatype = ds.GetRasterBand(1).DataType
dtype = gdal.GetDataTypeName(datatype)
if ds is None:
logging.warning(">>>>>> Could not open image file {}. Skipped!".format(image))
rows = ds.RasterXSize
cols = ds.RasterYSize
tiles_cols = cols / width
tiles_rows = rows / height
logging.info(">>>> File {} opened! Image with [{}, {}] size and {} type!".format(image, rows, cols, dtype))
logging.info(">> Tiling image {}. {} x {} pixels. Estimated {} tiles of {} x {}..."
.format(image, rows, cols, round(tiles_cols * tiles_rows), width, height))
gdal.UseExceptions()
for i in range(0, rows, width):
for j in range(0, cols, height):
try:
output = os.path.join(output_folder, name + "_" + str(i) + "_" + str(j) + file_extension)
if settings.ALL_BANDS is True:
options = ['-epo', '-eco']
else:
options = ['-epo', '-eco',
'-b', settings.RASTER_TILES_COMPOSITION[0],
'-b', settings.RASTER_TILES_COMPOSITION[1],
'-b', settings.RASTER_TILES_COMPOSITION[2]]
if strecthing is True:
stats = [ds.GetRasterBand(i + 1).GetStatistics(True, True) for i in range(ds.RasterCount)]
vmin, vmax, vmean, vstd = zip(*stats)
scale_params = [[list(zip(*[vmin, vmax]))]]
else:
scale_params = None
gdal.Translate(output, ds, format='GTIFF', srcWin=[i, j, width, height],
outputType=datatype, scaleParams=scale_params,
options=options)
except RuntimeError:
continue
else:
logging.info(">>>> Image file {} does not exist or it is an invalid extension!".format(image))
logging.info(">>>> Done")
def tiling_vector(self, image_tiles_folder, shp_reference, output_folder):
"""
Take a vector file and tile it according to the extend's image presented in image_tiles_folder
:param image_tiles_folder: the absolute path to the image file (raster)
:param shp_reference: the vector file to be tiled, which shares the same region as the
images presented in image_tiles_folder
:param output_folder: the absolute path to the outputs
"""
if not os.path.isdir(image_tiles_folder):
logging.warning(">>>> {} is not a folder!".format(image_tiles_folder))
return
if not os.path.isfile(shp_reference):
logging.warning(">>>> {} is not a file!".format(shp_reference))
return
filename = os.path.basename(shp_reference)
name, file_extension = os.path.splitext(filename)
if file_extension.lower() not in settings.VALID_VECTOR_EXTENSION:
logging.warning(">>>> {} not a valid extension for a vector!".format(file_extension))
return
list_correspondent_raster = glob.glob(os.path.join(image_tiles_folder, name + '*'))
if len(list_correspondent_raster) == 0:
logging.info(">>>> No raster tiles with shapefile suffix {}".format(name))
return
logging.info(">> Tiling vector {} respecting to the tiles extends".format(shp_reference))
for image in list_correspondent_raster:
filename = os.path.basename(image)
name, ext = os.path.splitext(filename)
if ext.lower() not in settings.VALID_RASTER_EXTENSION:
logging.warning(">>>> {} not a valid extension for a raster!".format(ext))
continue
output = os.path.join(output_folder, name + ".shp")
complete_path = os.path.join(image_tiles_folder, image)
tile = gdal.Open(complete_path)
gt = tile.GetGeoTransform()
cols_tile = tile.RasterXSize
rows_tile = tile.RasterYSize
ext = self.get_extent(gt, cols_tile, rows_tile)
bounds = Polygon(ext)
baseshp = gp.read_file(shp_reference)
# baseshp = baseshp.to_crs(epsg=32722)
if os.path.isfile(output):
continue
else:
ids = []
classes = []
polygons_intersecting = []
for i in range(len(baseshp)):
p1 = baseshp['geometry'][i]
p2 = bounds
if p1 is None:
logging.info(">>>>>> Geometry is empty! File {}".format(os.path.basename(shp_reference)))
continue
if p1.is_valid is False:
p1 = p1.buffer(0)
if not p1.intersection(p2).is_empty:
ids.append(i)
classes.append(baseshp[settings.CLASS_NAME][i])
polygons_intersecting.append(p1.intersection(p2))
if len(polygons_intersecting) != 0:
gdf = gp.GeoDataFrame()
gdf.crs = baseshp.crs
gdf['id'] = ids
gdf['class'] = classes
gdf['geometry'] = polygons_intersecting
gdf.to_file(output, driver='ESRI Shapefile')
logging.info(">>>> Raster {} intersect {}. Vector tile {} created!".
format(complete_path, os.path.basename(shp_reference), output))
else:
logging.warning(">>>> Raster {} does not intersect {}. Discarted!".
format(complete_path, os.path.basename(shp_reference)))
os.remove(complete_path)
logging.info(">>>> Done")
|
StarcoderdataPython
|
9720171
|
# coding=utf-8
# Copyright 2018-2020 EVA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from typing import List
import numpy as np
import pandas as pd
import torch
from torch import nn, Tensor
from PIL import Image
from torchvision import transforms
from src.udfs.filters.abstract_filter import AbstractFilter
from src.udfs.gpu_compatible import GPUCompatible
from src.configuration.configuration_manager import ConfigurationManager
class PytorchAbstractFilter(AbstractFilter, nn.Module, GPUCompatible, ABC):
"""
A PyTorch based filter.
"""
def __init__(self):
AbstractFilter.__init__(self)
nn.Module.__init__(self)
def get_device(self):
return next(self.parameters()).device
@property
def transforms(self) -> transforms.Compose:
return transforms.Compose([
transforms.Lambda(lambda x: Image.fromarray(x[:, :, ::-1])),
transforms.Resize((224, 224)),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[
0.229, 0.224, 0.225]),
transforms.Lambda(lambda x: x.unsqueeze(0))
])
def transform(self, images: np.ndarray):
return self.transforms(images)
def forward(self, frames: List[np.ndarray]):
tens_batch = torch.cat([self.transform(x) for x in frames])\
.to(self.get_device())
return self.classify(tens_batch)
@abstractmethod
def _get_predictions(self, frames: Tensor) -> pd.DataFrame:
"""
Abstract method to work with tensors.
Specified transformations are already applied.
Arguments:
frames (Tensor): tensor on which transformation is performed
Returns:
pd.DataFrame: outcome after prediction
"""
def classify(self, frames: Tensor) -> pd.DataFrame:
"""
Given the gpu_batch_size, we split the input tensor into chunks,
and call the _get_predictions and merge the results.
Arguments:
frames (Tensor): tensor on which transformation is performed
Returns:
pd.DataFrame: outcome after prediction
"""
gpu_batch_size = ConfigurationManager()\
.get_value('executor', 'gpu_batch_size')
if gpu_batch_size:
chunks = torch.split(frames, gpu_batch_size)
outcome = pd.DataFrame()
for tensor in chunks:
outcome = outcome.append(self._get_predictions(tensor),
ignore_index=True)
return outcome
else:
return self._get_predictions(frames)
def as_numpy(self, val: Tensor) -> np.ndarray:
"""
Given a tensor in GPU, detach and get the numpy output.
Arguments:
val (Tensor): tensor to be converted
Returns:
np.ndarray: numpy array representation
"""
return val.detach().cpu().numpy()
def to_device(self, device: str):
"""
Transfer filter to specified device.
Arguments:
device (str): device's string identifier
Returns:
New instance on desired device
"""
return self.to(torch.device("cuda:{}".format(device)))
def __call__(self, *args, **kwargs):
frames = None
if len(args):
frames = args[0]
if isinstance(frames, pd.DataFrame):
frames = frames.transpose().values.tolist()[0]
return nn.Module.__call__(self, frames, **kwargs)
|
StarcoderdataPython
|
1718909
|
try:
import mock
except ImportError: # PY3
from unittest import mock
from simple_module.level_two.other import other_func
def test_other_func(mock_func):
"""
You can mock functions imported from a different
module.
"""
mock_func.return_value = mock.sentinel.retval
ret = other_func(mock.sentinel.a, mock.sentinel.b)
mock_func.assert_called_once_with(
mock.sentinel.a)
assert ret == (mock.sentinel.b, mock.sentinel.retval)
|
StarcoderdataPython
|
307385
|
# -*- coding: utf-8 -*-
import curses
from curses import panel
from strategy import get_pytify_class_by_platform
"""
TODO: Rewrite this crappy menu class
"""
class Menu(object):
def __init__(self, items, stdscreen):
self.pytify = get_pytify_class_by_platform()()
self.window = stdscreen.subwin(0, 0)
self.window.keypad(1)
self.panel = panel.new_panel(self.window)
self.panel.hide()
panel.update_panels()
self.position = 2
self.items = items
self.song_length = len(items) - 1
self.items.append(' ')
self.items.append('<UP> and <DOWN> for navigation.')
self.items.append('<Enter> to select song.')
self.items.append('<Esc> for search.')
self.items.append('<LEFT> and <RIGHT> for prev/next song.')
self.items.append('<SPACEBAR> for play/pause.')
def navigate(self, n):
self.position += n
if self.position < 2:
self.position = 2
elif self.position > self.song_length:
self.position = self.song_length
def display(self):
self.panel.top()
self.panel.show()
self.window.clear()
while True:
self.window.refresh()
curses.doupdate()
for index, item in enumerate(self.items):
if index == self.position:
mode = curses.A_REVERSE
else:
mode = curses.A_NORMAL
self.window.addstr(index, 1, str(item), mode)
key = self.window.getch()
if key in [curses.KEY_ENTER, ord('\n')]:
self.pytify.listen(int(self.position - 1))
elif key == curses.KEY_UP:
self.navigate(-1)
elif key == curses.KEY_DOWN:
self.navigate(1)
elif key == curses.KEY_LEFT:
self.pytify.prev()
elif key == curses.KEY_RIGHT:
self.pytify.next()
# spacebar
elif key == 32:
self.pytify.play_pause()
# escape
elif key == 27:
break
self.window.clear()
self.panel.hide()
panel.update_panels()
curses.doupdate()
|
StarcoderdataPython
|
3531588
|
<filename>models/equipment.py
from pydantic import BaseModel, Field
from typing import List, Optional
from helpers.objectid import PyObjectId
from bson import ObjectId
class Equipment(BaseModel):
id: Optional[PyObjectId] = Field(alias='_id')
mac_address: str
name: str
description: str
samppling_frequency: str
available: bool = True
sensors: Optional[List[PyObjectId]]
class Config:
arbitrary_types_allowed = True
json_encoders = {
ObjectId: str
}
schema_extra = {
"example": {
"mac_address": "string",
"name": "string",
"description": "string",
"samppling_frequency": "string",
"available": "false"
}
}
|
StarcoderdataPython
|
12847832
|
<reponame>emina13/ITMO_ICT_WebDevelopment_2021-2022
from rest_framework.authentication import TokenAuthentication
from rest_framework.generics import *
from rest_framework.permissions import *
from .serializers import *
class IsManager(BasePermission):
def has_permission(self, request, view):
return request.user.type == 'manager'
class IsDeputy(BasePermission):
def has_permission(self, request, view):
print(request.user.type)
return request.user.type == 'deputy'
class StudentListView(ListAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
permission_classes = [AllowAny]
class StudentAllView(RetrieveUpdateDestroyAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
permission_classes = [IsDeputy]
class StudentCreateView(CreateAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
permission_classes = [IsDeputy]
class TeacherListView(ListAPIView):
queryset = Teacher.objects.all()
serializer_class = TeacherSerializer
permission_classes = [AllowAny]
class TeacherAllView(RetrieveUpdateDestroyAPIView):
queryset = Teacher.objects.all()
serializer_class = TeacherSerializer
permission_classes = [IsDeputy]
class TeacherCreateView(CreateAPIView):
queryset = Teacher.objects.all()
serializer_class = TeacherSerializer
permission_classes = [IsDeputy]
class SubjectListView(ListAPIView):
queryset = Subject.objects.all()
serializer_class = SubjectSerializer
permission_classes = [AllowAny]
class SubjectAllView(RetrieveUpdateDestroyAPIView):
queryset = Subject.objects.all()
serializer_class = SubjectSerializer
permission_classes = [IsDeputy]
class SubjectCreateView(CreateAPIView):
queryset = Subject.objects.all()
serializer_class = SubjectSerializer
permission_classes = [IsDeputy]
class MarkListView(ListAPIView):
queryset = Mark.objects.all()
serializer_class = MarkSerializer
permission_classes = [AllowAny]
class MarkAllView(RetrieveUpdateDestroyAPIView):
queryset = Mark.objects.all()
serializer_class = MarkSerializer
permission_classes = [IsDeputy]
class MarkCreateView(CreateAPIView):
queryset = Mark.objects.all()
serializer_class = MarkSerializer
permission_classes = [IsDeputy]
class PairListView(ListAPIView):
queryset = Pair.objects.all()
serializer_class = PairSerializer
permission_classes = [AllowAny]
class PairAllView(RetrieveUpdateDestroyAPIView):
queryset = Pair.objects.all()
serializer_class = PairSerializer
permission_classes = [IsManager]
class PairCreateView(CreateAPIView):
queryset = Pair.objects.all()
serializer_class = PairSerializer
permission_classes = [IsManager]
class SubjectToTeacherListView(ListAPIView):
queryset = SubjectToTeacher.objects.all()
serializer_class = SubjectToTeacherSerializer
permission_classes = [AllowAny]
class SubjectToTeacherAllView(RetrieveUpdateDestroyAPIView):
queryset = SubjectToTeacher.objects.all()
serializer_class = SubjectToTeacherSerializer
permission_classes = [IsDeputy]
class SubjectToTeacherCreateView(CreateAPIView):
queryset = SubjectToTeacher.objects.all()
serializer_class = SubjectToTeacherSerializer
permission_classes = [IsDeputy]
class GroupListView(ListAPIView):
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [AllowAny]
class GroupCreateView(CreateAPIView):
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [IsDeputy]
class GroupAllView(RetrieveUpdateDestroyAPIView):
queryset = Group.objects.all()
serializer_class = GroupSerializer
permission_classes = [IsDeputy]
class StudentToGroupListView(ListAPIView):
queryset = StudentToGroup.objects.all()
serializer_class = StudentToGroupSerializer
permission_classes = [AllowAny]
class StudentToGroupAllView(RetrieveUpdateDestroyAPIView):
queryset = StudentToGroup.objects.all()
serializer_class = StudentToGroupSerializer
permission_classes = [IsDeputy]
class StudentToGroupCreateView(CreateAPIView):
queryset = StudentToGroup.objects.all()
serializer_class = StudentToGroupSerializer
permission_classes = [IsDeputy]
|
StarcoderdataPython
|
3477280
|
from collections import defaultdict
from typing import Iterable, List
import torch
from ppq.core import (COMPELING_OP_TYPES, LINEAR_ACTIVATIONS,
ORT_OOS_FUSE_START_OPS, PPLCUDA_ACTIVATIONS,
QuantizationProperty, QuantizationStates, RoundingPolicy,
TargetPlatform, TensorQuantizationConfig,
empty_ppq_cache)
from ppq.executor import BaseGraphExecutor
from ppq.IR import GraphCommandProcesser, QuantableOperation, Variable
from ppq.IR.base.graph import Operation
from ppq.IR.quantize import QuantableVariable
from ppq.IR.search import SearchableGraph, TraversalCommand
from ppq.quantization.observer.range import minmax_to_scale_offset
from .base import QuantizationOptimizationPass
class QuantizeReducePass(QuantizationOptimizationPass):
"""
QuantizeReducePass 用来简化量化定点信息:通常每一个 Quantable 算子都有前后两个定点信息,
而运算时通常可以屏蔽一半定点信息以加速。QuantizeReducePass 被设计用来找出可以屏蔽的定点信息。
对于两个相邻算子(op_1 -> op_2)而言,将会出现以下几种情况
1. op_1 与 op_2 均不量化,此时无需对数据流进行额外处理
2. op_1 量化,op_2 不量化,op_1 需要对结果进行量化
3. op_1 不量化,op_2 量化,此时需要按 op_2 的量化参数对数据流进行量化
4. op_1 与 op_2 均量化,此时分情况讨论:
4.1. op_1 量化位宽高于 op_2,此时按 op_2 的量化参数对数据流进行量化
4.2. op_1 量化位宽低于 op_2,此时按 op_1 的量化参数对数据流进行量化
4.3. op_1 量化位等于 op_2,此时按 op_1 的量化参数对数据流进行量化
------> op_2
对于更为复杂的网络结构 op_1 ----+
------> op_3
op_1 如果有定点信息,则必须对数据流进行量化
op_2, op_3 则需要分别确认是否需要再次对输入数据执行再次量化
总结:
当 下游节点 的量化位宽大于等于 上游节点 时,按 上游节点 的量化信息执行量化,此时量化操作发生在上游
当 下游节点 的量化位宽小于 上游节点 时,按 下游节点 的量化信息执行量化,此时量化操作发生在下游(上游量化未必可以省略)
QuantizeReducePass is used to reduce quantization fixation: we could block half of fixation points to accelerate
the inference
for 2 neighbouring ops(op_1 -> op_2), there are several situations:
1. neither of op_1 and op_2 needs quantization
2. op_1 needs quantization while op_2 doesn't
3. op_2 needs quantization while op_1 does
4. both need quantization:
4.1. bit width of op_1 is larger than op_2, then we should use quantization parameters of op_2
4.2. bit width of op_2 is larger than op_1, then we should use quantization parameters of op_1
4.3. equal, we should use quantization parameters of op_1
Conclusion:
when the bit width of downstream op is larger or equal to that of upstream op, we should use quantization
information of upstream op, otherwise we should use quantization information of downstream op(and the upstream
quantization may not be omitted)
"""
def __init__(self) -> None:
super().__init__(name='PPQ Quantize Point Reduce Pass')
def optimize(
self,
processer: GraphCommandProcesser,
dataloader: Iterable,
executor: BaseGraphExecutor,
**kwargs
) -> None:
graph = processer.graph
for _, varaible in graph.variables.items():
assert isinstance(varaible, Variable)
source_op = varaible.source_op
if source_op is None: continue # input variables in network, they do not have a source
if not isinstance(source_op, QuantableOperation): continue
source_config = source_op.config.output_quantization_config[source_op.outputs.index(varaible)]
for downstream_op, dest_idx in zip(varaible.dest_ops, varaible.dest_idx):
if downstream_op is None: continue # output variables in network, they do not have a destination
if not isinstance(downstream_op, QuantableOperation): continue
input_config = downstream_op.config.input_quantization_config[dest_idx]
if source_op.platform == downstream_op.platform:
if input_config.state == QuantizationStates.INITIAL:
input_config.dominated_by = source_config
class QuantizeRefinePass(QuantizationOptimizationPass):
"""
修复算子上的定点错误,主要针对 Onnx 的一些特殊算子,其部分输入需要定点,而部分输入不需要定点
例如对于 Reshape 算子而言,其存在 data, shape 两个输入,其中 shape 不需要定点
因此 QuantizeRefinePass 会纠正 Reshape 算子的 Quantization config,避免错误地对 shape 输入进行量化。
目前我们针对 'Reshape', 'Slice', 'Gather', 'Clip', 'Pad', 'Resize', 'Split' 算子进行了详细讨论
修正了已知的所有量化行为错误
对于所有平台的 Quantizer 而言,都应当调用 QuantizeRefinePass 修复上述量化行为错误
customize quantization for special operators, more specificly, for certain op, some of inputs
need quantization while some don't, this pass refines quantization behaviors of
'Reshape', 'Slice', 'Gather', 'Clip', 'Pad', 'Resize', 'Split' ops
this pass should be applied regardless of backend platforms
"""
def __init__(self) -> None:
super().__init__(name='PPQ Quantization Config Refine Pass')
@ empty_ppq_cache
def optimize(
self,
processer: GraphCommandProcesser,
dataloader: Iterable,
executor: BaseGraphExecutor,
**kwargs
) -> None:
graph = processer.graph
for _, operation in graph.operations.items():
if not isinstance(operation, QuantableOperation): continue
if operation.type in {'Reshape', 'Slice', 'Gather', 'Clip', 'Pad', 'Resize', 'Split'}:
if operation.type == 'Reshape':
# Inputs:
# data (differentiable) : T
# An input tensor.
# shape (non-differentiable) : tensor(int64)
# Specified shape for output.
# see aslo https://github.com/onnx/onnx/blob/master/docs/Operators.md#Reshape
assert len(operation.config.input_quantization_config) == 2, f'Reshape Operation {operation.name} should have exact 2 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
operation.config.input_quantization_config[-1].state = QuantizationStates.SOI
continue
if operation.type == 'Slice':
# Inputs (3 - 5)
# data (differentiable) : T
# Tensor of data to extract slices from.
# starts (non-differentiable) : Tind
# 1-D tensor of starting indices of corresponding axis in `axes`
# ends (non-differentiable) : Tind
# 1-D tensor of ending indices (exclusive) of corresponding axis in `axes`
# axes (optional, non-differentiable) : Tind
# 1-D tensor of axes that `starts` and `ends` apply to. Negative value means
# counting dimensions from the back. Accepted range is [-r, r-1] where r = rank(data).
# steps (optional, non-differentiable) : Tind
# 1-D tensor of slice step of corresponding axis in `axes`.
# Negative value means slicing backward. 'steps' cannot be 0. Defaults to 1.
# see aslo https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Slice-11
assert len(operation.config.input_quantization_config) in {3, 4, 5}, f'Reshape {operation.name} Operation should have 3 - 5 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
for config in operation.config.input_quantization_config[1: ]:
config.state = QuantizationStates.SOI
continue
if operation.type == 'Gather':
# Inputs
# data (differentiable) : T
# Tensor of rank r >= 1.
# indices (non-differentiable) : Tind
# Tensor of int32/int64 indices, of any rank q.
# All index values are expected to be within bounds [-s, s-1] along axis of size s.
# It is an error if any of the index values are out of bounds.
# see also https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Gather-11
assert len(operation.config.input_quantization_config) == 2, f'Gather Operation {operation.name} should have 2 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
operation.config.input_quantization_config[-1].state = QuantizationStates.SOI
continue
if operation.type == 'Clip':
# Inputs (1 - 3)
# input : T
# Input tensor whose elements to be clipped
# min (optional) : T
# Minimum value, under which element is replaced by min.
# It must be a scalar(tensor of empty shape).
# max (optional) : T
# Maximum value, above which element is replaced by max.
# It must be a scalar(tensor of empty shape).
# see aslo https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Clip-11
assert len(operation.config.input_quantization_config) in {1, 2, 3}, f'Clip Operation {operation.name} should have 1 - 3 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
for config in operation.config.input_quantization_config[1: ]:
config.state = QuantizationStates.PASSIVE_INIT
continue
if operation.type == 'Pad':
# Inputs (2 - 3)
# data : T
# Input tensor.
# pads : tensor(int64)
# Tensor of integers indicating the number of padding elements to add or remove
# (if negative) at the beginning and end of each axis.
# For 2D input tensor, it is the number of pixels. `pads` should be a 1D tensor of shape [2 * input_rank].
# `pads` format should be: [x1_begin, x2_begin,...,x1_end, x2_end,...],
# where xi_begin is the number of pad values added at the beginning of axis `i` and xi_end,
# the number of pad values added at the end of axis `i`.
# constant_value (optional) : T
# (Optional) A scalar value to be used if the mode chosen is `constant` (by default it is 0).
# https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Pad-11
assert len(operation.config.input_quantization_config) in {2, 3}, f'Pad Operation {operation.name} should have 2 - 3 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
operation.config.input_quantization_config[1].state = QuantizationStates.SOI
if len(operation.config.input_quantization_config) == 3:
operation.config.input_quantization_config[-1].state = QuantizationStates.PASSIVE_INIT
continue
if operation.type == 'Resize':
# Inputs (3 - 4)
# X : T1
# N-D tensor
# roi : T2
# 1-D tensor given as [start1, ..., startN, end1, ..., endN],
# where N is the rank of X. The RoIs' coordinates are normalized in the coordinate system of the input image.
# It only takes effect when coordinate_transformation_mode is "tf_crop_and_resize"
# scales : tensor(float)
# The scale array along each dimension.
# It takes value greater than 0. If it's less than 1, it's sampling down,
# otherwise, it's upsampling. The number of elements of 'scales' should be the same as the rank of input 'X'.
# Only one of 'scales' and 'sizes' can be specified.
# If 'size' is needed, the user can use an empty string as the name of 'scales' in this operator's input list.
# sizes (optional) : tensor(int64)
# The size of the output tensor.
# The number of elements of 'sizes' should be the same as the rank of input 'X'.
# Only one of 'scales' and 'sizes' can be specified.
# https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Resize-11
assert len(operation.config.input_quantization_config) in {3, 4}, f'Resize Operation {operation.name} should have 3 - 4 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
for config in operation.config.input_quantization_config[1: ]:
config.state = QuantizationStates.SOI
continue
if operation.type == 'Split':
# Inputs (1 - 2)
# input (differentiable) : T
# The tensor to split
# split (optional, non-differentiable) : tensor(int64) (opset 13)
# Optional length of each output.
# Values should be >= 0.Sum of the values must be equal to the dim value at 'axis' specified.
# see also: https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Split-11
# see also: https://github.com/onnx/onnx/blob/master/docs/Operators.md#Split
assert len(operation.config.input_quantization_config) in {1, 2}, f'Split Operation {operation.name} should have 1 - 2 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
for config in operation.config.input_quantization_config[1: ]:
config.state = QuantizationStates.SOI
continue
if operation.type == 'Split':
# Inputs (1 - 2)
# input (differentiable) : T
# The tensor to split
# split (optional, non-differentiable) : tensor(int64) (opset 13)
# Optional length of each output.
# Values should be >= 0.Sum of the values must be equal to the dim value at 'axis' specified.
# see also: https://github.com/onnx/onnx/blob/master/docs/Changelog.md#Split-11
# see also: https://github.com/onnx/onnx/blob/master/docs/Operators.md#Split
assert len(operation.config.input_quantization_config) in {1, 2}, f'Split Operation {operation.name} should have 1 - 2 inputs, '\
f'while {len(operation.config.input_quantization_config)} was given, is graph defination different from onnx opset 11?'
for config in operation.config.input_quantization_config[1: ]:
config.state = QuantizationStates.SOI
continue
class NxpInputRoundingRefinePass(QuantizationOptimizationPass):
def __init__(self) -> None:
super().__init__(name='PPQ Input Quantization Refine Pass')
def optimize(self, processer: GraphCommandProcesser,
dataloader: Iterable, executor: BaseGraphExecutor, **kwargs) -> None:
graph = processer.graph
for variable in graph.variables.values():
if isinstance(variable, QuantableVariable):
if variable.source_op is None or not isinstance(variable.source_op, QuantableOperation):
for config in variable.dest_op_configs:
if config is None: continue
config.rounding = RoundingPolicy.ROUND_HALF_DOWN
class NxpQuantizeFusionPass(QuantizationOptimizationPass):
def __init__(self) -> None:
super().__init__(name='PPQ Quantization Fusion Pass')
@ empty_ppq_cache
def optimize(
self,
processer: GraphCommandProcesser,
dataloader: Iterable,
executor: BaseGraphExecutor,
**kwargs
) -> None:
graph = processer.graph
processer = SearchableGraph(processer)
relu_fusion_matching = processer.activation_matching(
start_op_types=['Conv', 'Add'], end_types=['Relu'])
for conv_name, activation_names in relu_fusion_matching.items():
conv = graph.operations[conv_name]
if not isinstance(conv, QuantableOperation): continue
if len(activation_names) == 1:
activation = graph.operations[activation_names[0]]
if not isinstance(activation, QuantableOperation): continue
activation_cfg = activation.config.output_quantization_config[0]
conv_cfg = conv.config.output_quantization_config[0]
conv_cfg.dominated_by = activation_cfg
conv_cfg.state = QuantizationStates.OVERLAPPED
concat_fusion_matching = processer.concat_matching(
relay_pattern=lambda x, y: False, end_pattern=lambda _: True)
for concat_name, upstream_layer_collection in concat_fusion_matching.items():
concat = graph.operations[concat_name]
if not isinstance(concat, QuantableOperation): continue
for upstream_layer_name in upstream_layer_collection:
upstream_layer = graph.operations[upstream_layer_name]
if not isinstance(upstream_layer, QuantableOperation): continue
upstream_cfg = upstream_layer.config.output_quantization_config[0]
concat_cfg = concat.config.output_quantization_config[0]
upstream_cfg.dominated_by = concat_cfg
upstream_cfg.state = QuantizationStates.OVERLAPPED
class QuantizeFusionPass(QuantizationOptimizationPass):
def __init__(self, platform: TargetPlatform,
fuse_concat: bool = False,
fuse_activation: bool = True,
fuse_passive_op: bool = True,
) -> None:
self.platform = platform
self.fuse_concat = fuse_concat
self.fuse_activation = fuse_activation
self.fuse_passive_op = fuse_passive_op
super().__init__(name='PPQ Quantization Fusion Pass')
def is_same_platform(self, operations: List[Operation]):
platforms = [operation.platform for operation in operations]
return all([platform == platforms[0] for platform in platforms])
@ empty_ppq_cache
def optimize(
self,
processer: GraphCommandProcesser,
dataloader: Iterable,
executor: BaseGraphExecutor,
**kwargs
) -> None:
graph = processer.graph
processer = SearchableGraph(processer)
# fuse computing opeartions and its following activation.
if self.fuse_activation:
# find all activation operations
act_ops = []
for op in graph.operations.values():
if not isinstance(op, QuantableOperation): continue
if op.type in LINEAR_ACTIVATIONS: act_ops.append(op)
elif self.platform == TargetPlatform.PPL_CUDA_INT8:
if op.type in PPLCUDA_ACTIVATIONS: act_ops.append(op)
else: continue
# fusion
for op in act_ops:
assert isinstance(op, QuantableOperation)
upstream_ops = graph.get_upstream_operations(op)
assert len(upstream_ops) == 1, 'Oops, we got some problem here.'
upstream_op = upstream_ops[0]
if self.platform == TargetPlatform.ORT_OOS_INT8:
if not upstream_op.type in ORT_OOS_FUSE_START_OPS: continue
if (isinstance(upstream_op, QuantableOperation) and
len(graph.get_downstream_operations(upstream_op)) == 1 and
upstream_op.platform == op.platform):
upstream_op.config.output_quantization_config[0].dominated_by = (
op.config.output_quantization_config[0])
if self.fuse_passive_op:
# all passive operations should never changes quantization configuration of its input
# so to say their input and output share a same scale.
for op in graph.operations.values():
upstream_layers = graph.get_upstream_operations(op)
if len(upstream_layers) == 0: continue # begining op, can not merge.
if (isinstance(op, QuantableOperation) and
not op.config.is_active_quant_op and
self.is_same_platform(upstream_layers + [op])):
# There are many types of passive operations.
# 'Resize', 'MaxPool', 'GlobalMaxPool',
# 'Slice', 'Pad', 'Split'
# Their first input variable should be data.
input_cfg = op.config.input_quantization_config[0]
for output_cfg in op.config.output_quantization_config:
output_cfg.dominated_by = input_cfg
class InplaceQuantizationSettingPass(QuantizationOptimizationPass):
def __init__(self) -> None:
super().__init__(name='Inplace Qunantization Setting Pass')
def optimize(self, processer: GraphCommandProcesser, dataloader: Iterable,
executor: BaseGraphExecutor, **kwargs) -> None:
for op in processer.graph.operations.values():
if isinstance(op, QuantableOperation):
# set all tensor to be inplace quantized for memory saving.
for cfg, var in op.config_with_variable:
if not var.is_parameter:
cfg.inplace = True
class PPLCudaAddConvReluMerge(QuantizationOptimizationPass):
def __init__(self) -> None:
super().__init__(name='PPL CUDA Conv(Relu) - Add - Relu Merge')
def is_same_platform(self, operations: List[Operation]):
platforms = [operation.platform for operation in operations]
return all([platform == platforms[0] for platform in platforms])
def optimize(self,
processer: GraphCommandProcesser,
dataloader: Iterable,
executor: BaseGraphExecutor,
**kwargs) -> None:
def ep_expr(operation: Operation):
if not isinstance(operation, QuantableOperation): return False
if operation.type == 'Conv': return True
if operation.type in PPLCUDA_ACTIVATIONS:
upstream_ops = graph.get_upstream_operations(operation=operation)
if len(upstream_ops) == 0 and upstream_ops[0].type == 'Conv': return True
if upstream_ops[0] in merged: return True
return False
def retrospect(opeartion: QuantableOperation) -> QuantableOperation:
if not isinstance(opeartion, QuantableOperation): return None
if len(graph.get_upstream_operations(operation)) != 1: return None
parent = graph.get_upstream_operations(operation)[0]
if parent.type != 'Conv': return None
if not isinstance(parent, QuantableOperation): return None
return parent
def merge_fn(operation: QuantableOperation):
assert isinstance(operation, QuantableOperation) and operation.type == 'Add'
# check if upstream ops can be merged
up_ops = graph.get_upstream_operations(operation)
if not self.is_same_platform(up_ops + [operation]): return
# Conv - Add - Relu Merge
config = operation.config.output_quantization_config[0]
# Step - 1: merge add output to next activation.
down_ops = graph.get_downstream_operations(operation)
if (len(down_ops) == 1 and
down_ops[0].type in PPLCUDA_ACTIVATIONS and
isinstance(down_ops[0], QuantableOperation) and
down_ops[0].platform == operation.platform):
config.dominated_by = down_ops[0].config.output_quantization_config[0]
# Step - 2: disable input conv's quantization(only one).
up_ops = graph.get_upstream_operations(operation)
assert len(up_ops) == 2, f'Opeartion {operation.name} should has exact 2 input operations.'
target_operation = None
for op in up_ops:
if op.type == 'Conv':
target_operation = op
elif op.type in PPLCUDA_ACTIVATIONS:
target_operation = retrospect(operation)
if target_operation is not None:
break
if target_operation is not None:
target_operation.config.output_quantization_config[0].dominated_by = config
graph, merged, unchanged = processer.graph, set(), False
# merge conv - add iteratively, until there is no one left.
while not unchanged:
unchanged = True
search_engine = SearchableGraph(processer)
matchings = search_engine(TraversalCommand(
sp_expr=lambda x: (x.type == 'Add' and
isinstance(x, QuantableOperation) and
x not in merged),
rp_expr=lambda x, y: False,
ep_expr=ep_expr,
direction='up'))
# count how many matched inputs does an add operation has.
counter = defaultdict(lambda : 0)
# path[0] is add operation.
for path in matchings: counter[path[0]] += 1
for operation, count in counter.items():
if count == 2:
merge_fn(operation)
merged.add(operation)
unchanged = False
class QuantAlignmentPass(QuantizationOptimizationPass):
"""
对多输入算子执行强制定点覆盖
"""
def __init__(self,
elementwise_merge_method: str = 'Align to Large',
concat_merge_method: str = 'Align to Output',
force_overlap: bool = False) -> None:
self.elementwise_merge_method = elementwise_merge_method
self.concat_merge_method = concat_merge_method
self.force_overlap = force_overlap
assert self.elementwise_merge_method in {'Align to Large', 'Align to Output'}, (
'elementwise_merge_method can only be Align to Large or Align to Output')
assert self.concat_merge_method in {'Align to Large', 'Align to Output'}, (
'concat_merge_method can only be Align to Large or Align to Output')
super().__init__(name='PPQ Quantization Alignment Pass')
def align_to_large(self, op: QuantableOperation) -> TensorQuantizationConfig:
"""
Align quant scale and offset to larger input config.
The first input config will be set as master config,
all slave config will share the same scale and offset with master.
Any change to slave config will be rejected since then.
"""
global_min, global_max, master_config = 0, 0, op.config.input_quantization_config[0]
for config in op.config.input_quantization_config:
assert config.policy.has_property(QuantizationProperty.PER_TENSOR), (
'Quant Alignment can only happen with per tensor quantization.')
local_min = config.scale * (config.quant_min - config.offset)
local_max = config.scale * (config.quant_max - config.offset)
assert isinstance(local_min, torch.Tensor)
assert isinstance(local_max, torch.Tensor)
global_max = max(global_max, local_max.item())
global_min = min(global_min, local_min.item())
# recompute scale and offset
scale, offset = minmax_to_scale_offset(
global_min, global_max, op.config.input_quantization_config[0])
device = master_config.scale.device
master_config._father_config = master_config
master_config.state = QuantizationStates.ACTIVATED
master_config.scale = torch.tensor([scale], dtype=torch.float32, device=device)
master_config.offset = torch.tensor([offset], dtype=torch.float32, device=device)
for slave_config in op.config.input_quantization_config[1: ]:
slave_config.set_master(master=master_config)
return master_config
def align_to_output(self, op: QuantableOperation) -> TensorQuantizationConfig:
"""
Align quant scale and offset to output config.
All input configs wiil share a same scale and offset with output config.
(as a slave to output config)
Any change to slave config will be rejected since then.
"""
master_config = op.config.output_quantization_config[0]
for slave_config in op.config.input_quantization_config:
slave_config.set_master(master=master_config)
return master_config
def optimize(
self, processer: GraphCommandProcesser,
dataloader: Iterable,
executor: BaseGraphExecutor, **kwargs) -> None:
graph = processer.graph
for operation in processer.graph.operations.values():
if isinstance(operation, QuantableOperation) and operation.type in COMPELING_OP_TYPES:
assert operation.config.output_quantization_config[0].state != QuantizationStates.INITIAL, (
f'Can not modify quantization state of opeartion {operation.name}, '
'cause it has not been correctly quantized.')
method = self.elementwise_merge_method if operation.type != 'Concat' else self.concat_merge_method
if method == 'Align to Large':
master_config = self.align_to_large(operation)
else: master_config = self.align_to_output(operation)
# override up stream layer's config if possible
for up_op in graph.get_upstream_operations(operation):
if not isinstance(up_op, QuantableOperation): continue
if self.force_overlap:
for cfg, var in up_op.config_with_variable:
if operation in var.dest_ops:
cfg.set_master(master=master_config, recursive=True)
else:
if len(graph.get_downstream_operations(up_op)) != 1: continue
for cfg, var in up_op.config_with_variable:
if operation in var.dest_ops:
cfg.set_master(master=master_config, recursive=False)
|
StarcoderdataPython
|
325690
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
# @package coursera2012
# @author <NAME> <<EMAIL>>
# @version 1.00
# @date 2015-01-01
# @copyright Apache License, Version 2.0
#
# Implementation of the game
# "Guess The Number"
# for the Coursera course
# "An Introduction to Interactive Programming in Python"
#
# Copyright 2012-2015 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
# 1 import modules
import random
from guessthenumber import myRandomSeed
from guessthenumber import guessTheNumber
# 8 run your code
# This statement must be just enabled for testing
# by the developer, not by the evaluation tester.
# By this the sequence of random number is repeatable..
random.seed(myRandomSeed)
gTN = guessTheNumber()
gTN.main()
gTN.get_input("Cheat")
gTN.get_input(50)
gTN.get_input(25)
gTN.get_input(13)
gTN.get_input(17)
gTN.get_input(20)
gTN.get_input(22)
gTN.get_input(23)
|
StarcoderdataPython
|
11347224
|
import aiohttp
import config
import ujson
GIPHY_RANDOM_URL = 'http://api.giphy.com/v1/gifs/random?tag=cat&limit=5&api_key='
# Random cat gif from giphy.com
class GiphyConnector:
"""Base gifs and mp4 sources class"""
# asynchronous load content from url
@staticmethod
async def get(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
return await resp.text()
@staticmethod
async def get_random_gif() -> str:
response = await GiphyConnector.get(GIPHY_RANDOM_URL + config.GIPHY_API_KEY)
parse_json = ujson.loads(response)
return parse_json['image_original_url']
|
StarcoderdataPython
|
9670994
|
<filename>2020/10/p2.py
# Python 3.8.3
from collections import defaultdict
def count_paths(chain):
d = defaultdict(int)
d[0] = 1
for i in chain:
d[i] = d[i - 3] + d[i - 2] + d[i - 1]
return d[i]
def get_input():
with open("input.txt", "r") as f:
return set(int(i) for i in f.read().split())
def main():
puzzle = get_input()
d = defaultdict(int)
d[0] = 1
for i in puzzle:
d[i] = d[i - 3] + d[i - 2] + d[i - 1]
return d[i]
if __name__ == "__main__":
import time
start = time.perf_counter()
print(main())
print(time.perf_counter() - start)
|
StarcoderdataPython
|
11292901
|
<reponame>royerloic/aydin<filename>aydin/util/denoise_nd/test/test_denoise_nd.py
# flake8: noqa
import numpy
from scipy.ndimage import gaussian_filter
from aydin.util.denoise_nd.denoise_nd import extend_nd
def test_denoise_nd():
# raw function that only supports 2D images:
def function(image, sigma):
if image.ndim != 2:
raise RuntimeError("Function only supports arrays of dimensions 2")
return gaussian_filter(image, sigma)
# extended function that supports all dimension (with all caveats associated to how we actually do this extension...)
@extend_nd(available_dims=[2])
def extended_function(image, sigma):
return function(image, sigma)
# Wrongly extended function: we pretend that it can do dim 1 when in fact it can't!
@extend_nd(available_dims=[1, 2])
def wrongly_extended_function(image, sigma):
return function(image, sigma)
image = numpy.zeros((32,))
image[16] = 1
try:
function(image, sigma=1)
assert False
except RuntimeError as e:
# expected!
assert True
try:
extended_function(image, sigma=1)
assert True
except RuntimeError as e:
assert False
try:
wrongly_extended_function(image, sigma=1)
assert False
except RuntimeError as e:
assert True
image = numpy.zeros((32, 5, 64))
image[16, 2, 32] = 1
try:
function(image, sigma=1)
assert False
except RuntimeError as e:
# expected!
assert True
try:
extended_function(image, sigma=1)
assert True
except RuntimeError as e:
assert False
|
StarcoderdataPython
|
4931605
|
"""
Rocketfuel topology and traffic matrix
======================================
This example shows how to import a topology from RocketFuel, configure it
(assign capacities, weights and delays), generate a traffic matrix and
save topology and traffic matrix to XML files.
"""
import fnss
import random
# Import RocketFuel topology
# Replace the filename with the actual location of the file you want to parse
topology = fnss.parse_rocketfuel_isp_map("rocket-fuel-topo-file.cch")
# add capacities
capacities = [1, 10, 40]
capacity_unit = 'Gbps'
fnss.set_capacities_edge_betweenness(topology, capacities, capacity_unit,
weighted=False)
# add weights proportional to inverse of capacity
fnss.set_weights_inverse_capacity(topology)
# add constant link delays of 2 ms
fnss.set_delays_constant(topology, 2, delay_unit='ms')
# generate cyclostationary traffic matrix (period 7 days, 24 samples per day)
tmc = fnss.sin_cyclostationary_traffic_matrix(
topology,
mean=0.5, # average flow in TM is 0,5 Gbps
stddev=0.05, # this is the std among average flows of different OD pairs
gamma=0.8, # gamma and log_psi are parameters for fitting the std of
log_psi=-0.33, # volume fluctuations over time. Look at Nucci et al. paper
delta=0.2, # traffic variation from period max and avg as fraction of average
n=24, # number of samples per each period
periods=7, # number of periods
max_u=0.9, # max link utilization desired
origin_nodes=None, # Specify origin and destination nodes. If None,
destination_nodes=None # all nodes of the topology are both
) # origin and destination nodes of traffic
# now we generate a static traffic matrix, but this time instead of generating
# a matrix where all nodes are both origin and destinations, we pick up only
# few nodes as sources and destinations of traffic.
# Let's select 5 sources and 5 destinations
nodes = topology.nodes()
origins = random.sample(nodes, 5)
destinations = random.sample(nodes, 5)
# generate traffic matrix
tms = fnss.static_traffic_matrix(topology, mean=0.5, stddev=0.05, max_u=0.9,
origin_nodes=origins,
destination_nodes=destinations)
# save topology on a file
fnss.write_topology(topology, 'topology.xml')
# save traffic matrices on files
fnss.write_traffic_matrix(tmc, 'cyclostationary-traffic-matrix.xml')
fnss.write_traffic_matrix(tms, 'static-traffic-matrix.xml')
|
StarcoderdataPython
|
12861724
|
<reponame>gliahq/Glia
from PyQt5.QtWidgets import QTabWidget
from glia.widgets.editor import Editor
class EditorTabs(QTabWidget):
def __init__(self, parent=None):
"""
Generates tab with editor depending on the file and it's type
selected.
"""
super(EditorTabs, self).__init__(parent)
# Tab Properties
self.setTabsClosable(True)
self.setMovable(True)
# Default Editor
self.editor = Editor(self)
self.addTab(self.editor, "main.py")
# Slots
self.tabCloseRequested.connect(self.handle_tab_closed)
def handle_tab_closed(self, index):
self.removeTab(index)
|
StarcoderdataPython
|
11327597
|
from docker.api import APIClient
from docker.client import DockerClient
import pytest
from deck_chores.utils import split_string
@pytest.fixture
def cfg(mocker):
from deck_chores.config import cfg
cfg.client = mocker.MagicMock(DockerClient)
cfg.client.api = mocker.MagicMock(APIClient)
cfg.debug = True
cfg.default_max = 1
cfg.default_flags = split_string('image,service', sort=True)
cfg.default_user = 'root'
cfg.label_ns = 'deck-chores.'
cfg.service_identifiers = split_string(
'com.docker.compose.project,com.docker.compose.service'
)
cfg.timezone = 'UTC'
yield cfg
|
StarcoderdataPython
|
6696007
|
from mindspore.train.serialization import load_checkpoint, load_param_into_net
def load_ckpt(network, pretrain_ckpt_path, trainable=True):
"""
incremental_learning or not
"""
param_dict = load_checkpoint(pretrain_ckpt_path)
load_param_into_net(network, param_dict)
if not trainable:
for param in network.get_parameters():
param.requires_grad = False
|
StarcoderdataPython
|
11288163
|
<filename>money/money.py
from __future__ import annotations
from .currency import Currency
import operator
import math
class Money:
def __init__(self, amount: int, currency: Currency):
self.__assert_amount(amount)
self.__amount = amount
self.__currency = currency
def instance(self, amount: int) -> Money:
"""
Return new money object using the
same currency and given amount
"""
self.__assert_amount(amount)
return self.__class__(
amount, self.currency
)
def __assert_currency(self, money: Money):
"""
Assert that given money has the same currency
"""
if not self.currency.equals(money.currency):
raise ValueError('Currencies do not match')
@staticmethod
def __assert_amount(amount):
"""
Assert that given amount is an integer
"""
if not isinstance(amount, int):
raise ValueError('Amount must be an integer')
@staticmethod
def __assert_operand(operand):
"""
Assert that given operand is a numeric type
"""
if not isinstance(operand, (int, float)):
raise ValueError('Operand must be a numeric value')
@property
def amount(self) -> int:
"""
Return money amount
"""
return self.__amount
@property
def currency(self) -> Currency:
"""
Return currency object
"""
return self.__currency
def __add__(self, money: Money) -> Money:
"""
Return a new money object that amounts to
sum of this object and given money object
"""
self.__assert_currency(money)
return self.__class__(
self.amount + money.amount, self.currency
)
def __sub__(self, money: Money) -> Money:
"""
Return a new money object that amounts to
difference of this object and given money object
"""
self.__assert_currency(money)
return self.__class__(
self.amount - money.amount, self.currency
)
def __mul__(self, factor: (int, float)) -> Money:
"""
Return a new money object that amounts to
product of this object and given money object
"""
self.__assert_operand(factor)
return self.__class__(
round(self.amount * factor), self.currency
)
def __eq__(self, money: Money) -> bool:
"""
Check if given money object value
and currency matches this object
"""
if not self.currency.equals(money.currency):
return False
return self.amount == money.amount
def __gt__(self, money: Money) -> bool:
"""
Check if object amount is
greater than given money amount
"""
return self.__compare(money, operator.gt)
def __ge__(self, money: Money) -> bool:
"""
Check if object amount is greater
or if it equals to given money amount
"""
return self.__compare(money, operator.ge)
def __lt__(self, money: Money) -> bool:
"""
Check if object amount is
less than given money amount
"""
return self.__compare(money, operator.lt)
def __le__(self, money: Money) -> bool:
"""
Check if object amount is less or
if it equals to given money amount
"""
return self.__compare(money, operator.le)
def __compare(self, money: Money, operator) -> bool:
"""
Compare object amount to given money
amount using the provided comparison operator
"""
self.__assert_currency(money)
return operator(self.amount, money.amount)
def allocate(self, ratios: list) -> list[Money]:
"""
Allocate object amount to given ratios
and return a collection of new money objects
"""
results = []
fractions = []
total = sum(ratios)
remainder = self.amount
if total == 0:
raise ValueError('Sum of ratios must be greater than zero')
"""
Share for each ratio is calculated
and stored as a new money object
"""
for ratio in ratios:
if ratio < 0:
raise ValueError('Ratio must be zero or positive')
fraction = self.amount * ratio / total
fraction = fraction - math.floor(fraction)
fractions.append(fraction)
share = self.amount * ratio // total
results.append(self.instance(share))
remainder = remainder - share
"""
Distribute the remainder one by one, starting with
the biggest fraction first until all is allocated
"""
while remainder > 0:
index = fractions.index(max(fractions))
fractions[index] = 0
results[index] += self.instance(1)
remainder -= 1
return results
|
StarcoderdataPython
|
194330
|
<reponame>ripolln/hywaves
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pip
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# import constants
from .config import _faspect, _fsize, _fdpi
def axplot_scatter_mda_vs_data(ax, x_mda, y_mda, x_data, y_data):
'axes scatter plot variable1 vs variable2 mda vs data'
# full dataset
ax.scatter(
x_data, y_data,
marker = '.',
c = 'lightblue',
s = 2, label = 'dataset'
)
# mda selection
ax.scatter(
x_mda, y_mda,
marker = '.',
c = 'k',
s = 6, label='subset'
)
def Plot_MDA_Data(pd_data, pd_mda, show=True):
'''
Plot scatter with MDA selection vs original data
pd_data - pandas.DataFrame, complete data
pd_mda - pandas.DataFrame, mda selected data
pd_data and pd_mda should share columns names
'''
# TODO: activate dlab ?
## figure conf.
#d_lab = {
# 'pressure_min': 'Pmin (mbar)',
# 'gamma': 'gamma (º)',
# 'delta': 'delta (º)',
# 'velocity_mean': 'Vmean (km/h)',
#}
# variables to plot
vns = pd_data.columns
# filter
vfs = ['n_sim']
vns = [v for v in vns if v not in vfs]
n = len(vns)
# figure
fig = plt.figure(figsize=(_faspect*_fsize, _faspect*_fsize))
gs = gridspec.GridSpec(n-1, n-1, wspace=0.2, hspace=0.2)
for i in range(n):
for j in range(i+1, n):
# get variables to plot
vn1 = vns[i]
vn2 = vns[j]
# mda and entire-dataset
vv1_mda = pd_mda[vn1].values[:]
vv2_mda = pd_mda[vn2].values[:]
vv1_dat = pd_data[vn1].values[:]
vv2_dat = pd_data[vn2].values[:]
# scatter plot
ax = plt.subplot(gs[i, j-1])
axplot_scatter_mda_vs_data(ax, vv2_mda, vv1_mda, vv2_dat, vv1_dat)
# custom axes
if j==i+1:
ax.set_xlabel(
#d_lab[vn2],
vn2,
{'fontsize':10, 'fontweight':'bold'}
)
if j==i+1:
ax.set_ylabel(
#d_lab[vn1],
vn1,
{'fontsize':10, 'fontweight':'bold'}
)
if i==0 and j==n-1:
ax.legend()
# show and return figure
if show: plt.show()
return fig
|
StarcoderdataPython
|
11329116
|
import pkgutil
import importlib
from base64 import b64encode, b64decode
from functools import partial
from gql.client import Client
from . import api
from .api.input.turn import Turn
def wrapper(self, func):
def inner(*args, **kwargs):
result = func(self, *args, **kwargs)
if hasattr(result, "result"):
result = result.result
return result
return inner
class GraphqlClient(Client):
_api_map = {
m.name: getattr(
getattr(importlib.import_module(f"{api.__name__}.{m.name}"), m.name),
"execute",
)
for m in pkgutil.iter_modules(api.__path__)
if not m.ispkg
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.custom_wrappers = {
name.split("_")[-1]: partial(method, self)
for name, method in self.__class__.__dict__.items()
if name.startswith("_wrap_")
}
def _wrap_similarity(self, orig_method):
def wrapper(*args, **kwargs):
result = orig_method(*args, **kwargs)
if not result:
return
return [s.to_dict() for s in result]
return wrapper
_wrap_sentiment = _wrap_similarity
_wrap_topics = _wrap_similarity
def _wrap_chitchat(self, orig_method):
def wrapper(input, history):
return orig_method(input, [Turn.from_dict(h) for h in history])
return wrapper
def _wrap_sensibility(self, orig_method):
def wrapper(input, history):
return [
alt.to_dict()
for alt in orig_method(input, [Turn.from_dict(h) for h in history])
]
return wrapper
def _wrap_speak(self, orig_method):
def wrapper(input, stream):
return stream.write(b64decode(orig_method(input)))
return wrapper
def _wrap_transcribe(self, orig_method):
def wrapper(input):
return orig_method(b64encode(input.read()).decode())
return wrapper
def __getattr__(self, name):
try:
custom_wrapper = self.custom_wrappers.get(name)
method = self._api_map[name]
if not custom_wrapper:
return wrapper(self, method)
else:
return custom_wrapper(wrapper(self, method))
except KeyError:
raise AttributeError(name)
|
StarcoderdataPython
|
4946963
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright 2021-... <NAME> <<EMAIL>>.
# This program is distributed under the MIT license.
# Glory to Ukraine!
import os
import lxml.etree
from termcolor import colored
import DipTrace
def store_temp(data, name) -> str:
if not os.path.exists('temp'):
os.mkdir('temp')
filename = f'temp/{name}'
with open(filename, 'w', encoding='utf-8') as f:
f.write(data)
return filename
def compare(filename: str, xpath_first: str, xpath_second: str):
xml = lxml.etree.parse(filename)
first = xml.xpath(xpath_first)[0]
second = xml.xpath(xpath_second)[0]
if first is None:
print(colored(f"Can't find first element: '{xpath_first}'", 'red'))
return
if second is None:
print(colored(f"Can't find second element: '{xpath_second}'", 'red'))
return
first_text = lxml.etree.tostring(first, xml_declaration=True, encoding='utf-8', pretty_print=True).decode('utf8')
second_text = lxml.etree.tostring(second, xml_declaration=True, encoding='utf-8', pretty_print=True).decode('utf8')
first_file = store_temp(first_text, 'first')
second_file = store_temp(second_text, 'second')
DipTrace.format_xml(first_file)
DipTrace.format_xml(second_file)
DipTrace.compare(first_file, second_file)
if __name__ == '__main__':
compare('expected/LEDs.elixml', '/Library/Components/Component[1]/Part', '/Library/Components/Component[8]/Part')
|
StarcoderdataPython
|
4895642
|
<filename>es_maml/blackbox/blackbox_functions.py
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Library for creating different tensorized versions of blackbox functions.
Responsible for creating different tensorized versions of blackbox functions.
Each blackbox function accepts a list of tensors and outputs a list of tensors.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
class BlackBoxFunction(object):
r"""Abstract class for different blackbox functions.
Class is responsible for creating different blackbox functions. In particular,
it provides a way to define functions that take as arguments the parameters
of neural networks and output the total reward obtained by applying
policies defined by these neural networks in a particular RL environment.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def get_function(self):
"""Outputs the blackbox function.
Outputs the blackbox function. The blackbox function takes as an input
the list of numpy arrays (each numpy array is a different argument)
and outputs a list of numpy arrays.
Args:
Returns:
"""
raise NotImplementedError('Abstract method')
class TesterBlackBox(BlackBoxFunction):
r"""Class responsible for creating simple tester blackbox functions.
Class inheriting from BlackBoxFunction and responsible for creating function
of the form:
f(x_0, x_1, x_2, x_3, x_4) =
-[(x_0-1.0)^2+(x_1-2.0)^2+(x_2-3.0)^2+(x_3-4.0)^2+(x_4-5.0)^2].
"""
def __init__(self):
pass
def get_function(self):
def blackbox_function(parameters, _):
value_0 = (parameters[0] - 1.0) * (parameters[0] - 1.0)
value_1 = (parameters[1] - 2.0) * (parameters[1] - 2.0)
value_2 = (parameters[2] - 3.0) * (parameters[2] - 3.0)
value_3 = (parameters[3] - 4.0) * (parameters[3] - 4.0)
value_4 = (parameters[4] - 5.0) * (parameters[4] - 5.0)
return -(value_0 + value_1 + value_2 + value_3 + value_4)
return blackbox_function
def rl_rollout(policy, environment, number_of_steps):
"""Runs <number_of_steps> steps in the <environment> by conducting <policy>.
Args:
policy: applied policy
environment: environment in which policy is deployed. Object environment
should provide three methods, namely *restart* - responsible for resetting
the environment and not taking any arguments, *deterministic_start* -
responsible for setting up deterministic initial configuration of the
environment and *step()* - taking as an argument an action and outputting
a list of at least three elements. These elements are: [new_state, reward,
done, _] where: <new_state> - new state after applying <action>, <reward>
- the immediate reward obtained after applying <action> in the current
state and transitioning to <new_state>, <done> - boolean that indicates
whether the current episode has been completed. Examples of RL
environments that match this framework are all OpenAI Gym tasks
number_of_steps: upper bound on the number of steps of the single rollout
(the episode might be potentially completed before <number_of_steps> steps
are conducted)
Returns:
Total cost of the rollout (negated total reward).
"""
state = environment.deterministic_start()
sum_reward = 0
steps = number_of_steps
for _ in range(steps):
proposed_action = policy.get_action(state)
proposed_action = np.reshape(proposed_action, (len(proposed_action)))
state, reward, done, _ = environment.step(proposed_action)
sum_reward += reward
if done:
break
return float(0.0 - sum_reward)
def renormalize(state, mean_state_vector, std_state_vector):
"""Outputs renormalized state vector using mean and std dev information.
Outputs renormalized state vector given by the following formula:
state_renormalized = (state - mean_state_vector) / renormalized_state_vector
(all operations conducted element-wise).
Args:
state: state vector to be renormalized
mean_state_vector: vector of mean dimension values
std_state_vector: vector of std devs for different dimensions
Returns:
renormalized state vector
"""
if mean_state_vector is None:
return state
if (isinstance(mean_state_vector, list) and not mean_state_vector):
return state
elif (isinstance(mean_state_vector, np.ndarray) and
mean_state_vector.size == 0):
return state
else:
state_shape = state.shape
centralized_state_vector = [
a - b for a, b in zip(state.flatten().tolist(), mean_state_vector)
]
for i in range(len(std_state_vector)):
if std_state_vector[i] == 0.0:
std_state_vector[i] = 1.0
renormalized_state = [
a / b for a, b in zip(centralized_state_vector, std_state_vector)
]
renorm_state = np.array(renormalized_state).reshape(state_shape)
return renorm_state
def renormalize_with_epsilon(state, mean_state_vector, std_state_vector):
"""Outputs renormalized state vector using mean and std dev information.
Outputs renormalized state vector given by the following formula:
state_renormalized = (state - mean_state_vector) /
std_state_vector + epsilon
(all operations conducted element-wise).
epsilon prevents divide by zero errors and is set to 1e-8
Args:
state: matrix, state to be renormalized
mean_state_vector: list of mean dimension values
std_state_vector: list of std devs for different dimensions
Returns:
renormalized state vector
"""
if (isinstance(mean_state_vector, np.ndarray) and
mean_state_vector.size > 0) or (isinstance(mean_state_vector, list) and
mean_state_vector):
state_shape = state.shape
state = state.flatten()
mean = np.asarray(mean_state_vector)
std = np.asarray(std_state_vector)
norm_state = (state - mean) / (std + 1e-8)
norm_state = norm_state.reshape(state_shape)
return norm_state
else:
return state
def rl_extended_rollout(policy, hyperparameters, environment, number_of_steps):
"""Runs <number_of_steps> steps in the <environment> by conducting <policy>.
Args:
policy: applied policy
hyperparameters: the list of hyperparameters
environment: environment in which policy is deployed. Object environment
should provide three methods, namely *restart* - responsible for resetting
the environment and not taking any arguments, *deterministic_start* -
responsible for setting up deterministic initial configuration of the
environment and *step()* - taking as an argument an action and outputting
a list of at least three elements. These elements are
[new_state, reward, done, _] where: <new_state> - new state
after applying <action>, <reward> - the immediate reward
obtained after applying <action> in the current state and
transitioning to <new_state>, <done> - boolean that
indicates whether the current episode has been completed.
Examples of RL environments that match this framework are
all OpenAI Gym tasks
number_of_steps: upper bound on the number of steps of the single rollout
(the episode might be potentially completed before <number_of_steps> steps
are conducted)
Returns:
Total cost of the rollout (negated total reward).
"""
state = environment.deterministic_start()
sum_reward = 0
steps = number_of_steps
# Vector such that its i^th entry stores the sum of i^th dimensions of the
# states visited so far.
sum_state_vector = []
# Vector such that its i^th entry stores the sum of squares of i^th dimensions
# of the states visited so far.
squares_state_vector = []
nb_points = 0
mean_state_vector = []
std_state_vector = []
hyperparameters = hyperparameters[1:]
if hyperparameters:
state_dim = int(len(hyperparameters) / 2)
mean_state_vector = hyperparameters[:state_dim]
std_state_vector = hyperparameters[state_dim:]
sum_state_vector = [0.0] * state_dim
squares_state_vector = [0.0] * state_dim
for _ in range(steps):
proposed_action = policy.get_action(
renormalize(state, mean_state_vector, std_state_vector))
proposed_action = np.reshape(proposed_action, (len(proposed_action)))
state, reward, done, _ = environment.step(proposed_action)
if hyperparameters:
# Updating sum_state_vector and squares_state_vector based on the new
# visited state (look above for the definition of: sum_state_vector and
# squares_state_vector).
nb_points += 1
squared = [x * x for x in state.tolist()]
sum_state_vector = [sum(x) for x in zip(sum_state_vector, state.tolist())]
squares_state_vector = [
sum(x) for x in zip(squares_state_vector, squared)
]
sum_reward += reward
if done:
break
if hyperparameters:
return [
float(sum_reward),
[float(nb_points)] + sum_state_vector + squares_state_vector
]
else:
return [float(sum_reward), []]
|
StarcoderdataPython
|
6536877
|
<filename>tetris.py
import pygame
import random
pygame.init()
class Point: # Struktrua do tab[]
def __init__(self, x=0, y=0):
self.x = x
self.y = y
self.name = 0
class Pole: # Struktura do blocksTab
def __init__(self, color, empty):
self.empty = empty
self.color = color
def InsertionSort(tab):
for x in range(1,len(tab)):
key = tab[x]
y = x-1
while y >= 0 and tab[y] < key:
tab[y+1] = tab[y]
y = y-1
tab[y+1] = key
class Menu:
def __init__(self):
self.open_function = 0
self.music = True
self.max_lvl = 3
self.engine()
def main_menu(self):
self.background()
for x in range(5):
x_pos = pos_x + 50
y_pos = pos_y + 150 + 80 * x
if self.mouse_contained(x_pos, y_pos, x_pos + menu_button_w, y_pos + menu_button_h):
gameDisplay.blit(button_1, (x_pos, y_pos))
if pygame.mouse.get_pressed() == (1, 0, 0):
if self.music and x % 5 != 0:
button_sound.play()
self.open_function = x + 1
else:
gameDisplay.blit(button_2, (x_pos, y_pos))
text = font.render(menu_texts[x], True, white)
gameDisplay.blit(text, (x_pos + (menu_button_w - text.get_width()) // 2, y_pos))
pygame.display.update()
def mouse_contained(self, x1, y1, x2, y2):
x = pygame.mouse.get_pos()[0]
y = pygame.mouse.get_pos()[1]
if x1 <= x <= x2:
if y1 <= y <= y2:
return True
else:
return False
else:
return False
def engine(self):
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
if self.open_function == 0:
self.main_menu()
elif self.open_function == 1:
Game(self, self.max_lvl)
self.open_function = 0
elif self.open_function == 2:
self.high_scores_menu()
elif self.open_function == 3:
self.options_menu()
elif self.open_function == 4:
self.about()
elif self.open_function == 5:
self.quit_menu()
def high_scores_menu(self):
self.background()
self.return_button(pos_x, pos_y)
self.reset_button(pos_x, pos_y + 400)
max = len(high_scores)
for i in range(1,11):
if i < max + 1:
text = menu_font.render(str(i)+". "+str(high_scores[i-1]), True, white)
else:
text = menu_font.render(str(i) + ". ", True, white)
gameDisplay.blit(text, (pos_x + 20, pos_y + 100 + 30 * i))
pygame.display.update()
def options_menu(self):
self.background()
self.return_button(pos_x, pos_y)
music_text = ['Music ON', 'Music OFF']
lvl_text = ['Max lvl - 1', 'Max lvl - 2', 'Max lvl - 3', 'Max lvl - 4']
x_pos = pos_x + 50
y_pos = pos_y + 150
if self.music:
text = font.render(music_text[1], True, white)
else:
text = font.render(music_text[0], True, white)
if self.mouse_contained(x_pos, y_pos, x_pos + menu_button_w, y_pos + menu_button_h):
gameDisplay.blit(button_1, (x_pos, y_pos))
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if self.music:
button_sound.play()
if self.music:
text = font.render(music_text[0], True, white)
self.music = False
else:
text = font.render(music_text[1], True, white)
self.music = True
else:
gameDisplay.blit(button_2, (x_pos, y_pos))
if self.music:
text = font.render(music_text[0], True, white)
else:
text = font.render(music_text[1], True, white)
gameDisplay.blit(text, (x_pos + (menu_button_w - text.get_width()) // 2, y_pos))
x_pos = pos_x + 50
y_pos = pos_y + 250
text2 = font.render(lvl_text[self.max_lvl], True, white)
if self.mouse_contained(x_pos, y_pos, x_pos + menu_button_w, y_pos + menu_button_h):
gameDisplay.blit(button_1, (x_pos, y_pos))
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if self.music:
button_sound.play()
self.max_lvl += 1
self.max_lvl = self.max_lvl % 4
text2 = font.render(lvl_text[self.max_lvl], True, white)
else:
gameDisplay.blit(button_2, (x_pos, y_pos))
gameDisplay.blit(text2, (x_pos + (menu_button_w - text2.get_width()) // 2, y_pos))
pygame.display.update()
def about(self):
x_pos = pos_x + 50
y_pos = pos_y + 50
self.background()
self.return_button(pos_x, pos_y)
length = 0
height = 0
to_print = about_text.split(" ")
for i in range(len(to_print)):
text = menu_font.render(to_print[i]+" ", True, white)
if pos_x + 20 + length > pos_x + board_width - 70:
height += 1
length = 0
gameDisplay.blit(text, (pos_x + 20 + length, y_pos + 100 + 30 * height))
length += text.get_width()
pygame.display.update()
def quit_menu(self):
pygame.quit()
quit(0)
def background(self):
gameDisplay.blit(background, (0, 0))
gameDisplay.blit(menu_panel, (pos_x, pos_y))
font = pygame.font.SysFont("comicsansms", 60)
text = font.render('Tetris', True, white)
gameDisplay.blit(text, (pos_x + (board_width - text.get_width()) // 2, pos_y + 30))
def return_button(self, x, y):
x_pos = x + 50
y_pos = y + 50
if self.mouse_contained(x_pos, y_pos, x_pos + menu_button_w, y_pos + menu_button_h):
gameDisplay.blit(button_1, (x_pos, y_pos))
if pygame.mouse.get_pressed() == (1, 0, 0):
if self.music:
button_sound.play()
self.open_function = 0
else:
gameDisplay.blit(button_2, (x_pos, y_pos))
text = font.render("Return", True, white)
gameDisplay.blit(text, (x_pos + (menu_button_w - text.get_width()) // 2, y_pos))
def reset_button(self, x, y):
x_pos = x + 50
y_pos = y + 50
if self.mouse_contained(x_pos, y_pos, x_pos + menu_button_w, y_pos + menu_button_h):
gameDisplay.blit(button_1, (x_pos, y_pos))
if pygame.mouse.get_pressed() == (1, 0, 0):
if self.music:
button_sound.play()
plik = open("Docs\\wyniki.txt", "w")
plik.write("")
plik.close()
high_scores.clear()
else:
gameDisplay.blit(button_2, (x_pos, y_pos))
text = font.render("Reset", True, white)
gameDisplay.blit(text, (x_pos + (menu_button_w - text.get_width()) // 2, y_pos))
class Game:
def __init__(self, menu, max_lvl):
self.Menu = menu
self.max_lvl = max_lvl
self.bc_blue = pygame.transform.scale(pygame.image.load('Graphics\\blue.bmp'), (block_size, block_size))
self.bc_azure = pygame.transform.scale(pygame.image.load('Graphics\\azure.bmp'), (block_size, block_size))
self.bc_green = pygame.transform.scale(pygame.image.load('Graphics\\green.bmp'), (block_size, block_size))
self.bc_red = pygame.transform.scale(pygame.image.load('Graphics\\red.bmp'), (block_size, block_size))
self.bc_violet = pygame.transform.scale(pygame.image.load('Graphics\\violet.bmp'), (block_size, block_size))
self.bc_orange = pygame.transform.scale(pygame.image.load('Graphics\\orange.bmp'), (block_size, block_size))
self.grid = pygame.image.load('Graphics\\grid.bmp')
self.blockColors = [bc_yellow, self.bc_blue, self.bc_azure, self.bc_green, self.bc_red, self.bc_violet, self.bc_orange]
self.blocksTab = [[Pole(0, False) for col in range(N)] for row in range(M)]
self.figures = [[1, 3, 5, 7], [2, 4, 5, 7], [3, 5, 4, 6], [3, 5, 4, 7], [2, 3, 5, 7], [3, 2, 4, 6], [2, 3, 4, 5]]
self.tab = self.figure()
self.tab_next = self.figure()
self.color = self.color_gen()
self.color_next = self.color_gen(self.color)
self.clock = pygame.time.Clock()
self.move_speed = 3
self.fps = 25
self.tick = 0
self.isEnd = True
self.points = 0
self.paused = False
self.stop = False
self.level = 1
self.level_points = 0
self.deadline = 0
self.speed = 0
if self.Menu.music:
start_game.play()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
elif event.key == pygame.K_UP:
self.tab = self.rotate(self.isEnd, self.tab)
self.clock.tick(self.fps)
xmv = 0
ymv = 0
self.speed = self.fps + 15 - 7 * self.level
self.tick += 1
if self.tick % self.move_speed == 0:
if pygame.key.get_pressed()[pygame.K_RIGHT]:
xmv = 1
if pygame.key.get_pressed()[pygame.K_LEFT]:
xmv = -1
if pygame.key.get_pressed()[pygame.K_DOWN]:
self.speed = 1
if self.tick % self.speed == 0:
ymv = 1
if pygame.key.get_pressed()[pygame.K_DOWN]:
self.points += self.level
self.level_points += self.level
if xmv != 0 or ymv != 0:
self.move(xmv, ymv)
self.isEnd = self.check_height()
if not self.isEnd:
self.last_move()
self.view_background()
self.print_block(self.color, self.tab, pos_x, pos_y)
self.end_block()
self.delete_line()
self.print_all()
if self.check_gameover():
return
self.pause_button(pause_x, pause_y)
self.stop_button(stop_x, stop_y)
if self.stop is True:
return
pygame.display.update()
def color_gen(self, last=-1): # Generator liczb odpowiadajacych kolorom z pominieciem ostatniego.
col = random.randint(0, len(self.blockColors) - 1)
while last == col:
col = random.randint(0, len(self.blockColors) - 1)
return col
def figure(self, n=-1): # Generator figur
if n == -1:
n = random.randint(0, len(self.figures) - 1)
tab = [Point() for i in range(4)]
for i in range(4):
tab[i].x = self.figures[n][i] % 2 + 4
tab[i].y = self.figures[n][i] // 2
tab[0].name = n
tab = self.rotate(True, tab, True)
self.set_default_position(tab)
return tab
def print_all(self): # Funkcja rysuje już postawione bloki, które są zapisane w blockColors.
for y in range(M):
for x in range(N):
if self.blocksTab[y][x].empty is True:
pygame.Surface.blit(gameDisplay, self.blockColors[self.blocksTab[y][x].color],
(pos_x + x * block_size, pos_y + y * block_size))
def print_block(self, color, tab, x, y): # Funkcja rysuje całą figurę wg wzoru w tab[].
for i in range(4):
pygame.Surface.blit(gameDisplay, self.blockColors[color],
(x + tab[i].x * block_size, y + tab[i].y * block_size))
def rotate(self, is_end, tab, first=False): # Funkcja obraca blok jeśli isEnd == True.
if is_end and self.can_rotate(tab, first):
if tab[0].name == 6:
if self.Menu.music:
rotate_sound.play()
return tab
point = Point(tab[1].x, tab[1].y)
tmp = [Point() for _ in range(4)]
check_x = True
check_y = True
for i in range(4):
x = tab[i].y - point.y
y = tab[i].x - point.x
tmp[i].x = point.x - x
tmp[i].y = point.y + y
if tmp[i].x < 0 or tmp[i].x > N - 1:
check_x = False
if tmp[i].y > M - 1:
check_y = False
if check_x and check_y:
if self.Menu.music and not first:
rotate_sound.play()
for i in range(4):
tab[i].x = tmp[i].x
tab[i].y = tmp[i].y
return tab
def move(self, xmv, ymv): # Funkcja sprawdza czy porusza się w siatce gry i porusza blokiem.
if True:
check_x = True
check_y = True
for i in range(4):
if self.tab[i].x + xmv < 0 or self.tab[i].x + xmv > N - 1:
check_x = False
if self.tab[i].y + ymv > M - 1:
check_y = False
if check_x and self.check_sites(xmv):
for i in range(4):
self.tab[i].x += xmv
if check_y:
for i in range(4):
self.tab[i].y += ymv
def check_height(self): # Funkcja sprawdza czy blok "dotkął podłogi/ bloku" pod sobą. Jeśli tak to zwraca False, w przeciwnym wypadku True.
for i in range(4):
if self.tab[i].y + 1 > M - 1:
return False
elif self.blocksTab[self.tab[i].y + 1][self.tab[i].x].empty:
return False
return True
def check_sites(self, xmv): # Funkcja sprawdza czy możliwe jest przemieszczenie w poziomie. Jeśli tak zwraca True, w przeciwnym wypadku False.
for i in range(4):
x = self.tab[i].x + xmv
if 0 > x or x >= N:
return False
elif self.blocksTab[self.tab[i].y][x].empty is True:
return False
return True
def delete_line(self): # Funkcja usuwa wiersze w pełni zapełnione
for y in range(M):
counter = 0
for x in range(N):
if self.blocksTab[y][x].empty:
counter += 1
if counter == N:
self.points += 200
self.level_points += 200
for line in range(y - 1, -1, -1):
for x in range(0, N):
self.blocksTab[line + 1][x].empty = self.blocksTab[line][x].empty
self.blocksTab[line + 1][x].color = self.blocksTab[line][x].color
def view_background(self):
gameDisplay.blit(background, (0, 0))
if self.level < 3:
self.next_block_panel()
self.score_panel()
gameDisplay.blit(play_panel, (pos_x - 25, pos_y - 25))
pygame.draw.rect(gameDisplay, (98, 98, 98), (pos_x - 5, pos_y - 5, board_width + 10, board_height + 10))
pygame.draw.rect(gameDisplay, (45, 53, 73), (pos_x, pos_y, board_width, board_height))
pygame.Surface.blit(gameDisplay, self.grid, (pos_x, pos_y))
def print_panel(self, x, y):
gameDisplay.blit(obszar, (x, y))
gameDisplay.blit(obszar_light, (x + 10, y + 10))
def can_rotate_simple(self, tab):
if tab[0].name == {2, 3, 6}:
return True
else:
if tab[0].name == {1, 4}:
if tab[1].y >= 1:
return True
return False
else:
if tab[1].y >= 2:
return True
return False
def can_rotate(self, tab, check):
if check:
return True
if self.can_rotate_simple(tab):
tmp = [Point() for _ in range(4)]
for i in range(4):
tmp[i].x = tab[i].x
tmp[i].y = tab[i].y
point = Point(tmp[1].x, tmp[1].y)
for i in range(4):
x = tab[i].y - point.y
y = tab[i].x - point.x
tmp[i].x = point.x - x
tmp[i].y = point.y + y
if tmp[i].x < 0 or tmp[i].x > N - 1:
return False
if tmp[i].y > M - 1:
return False
for i in range(4):
if self.blocksTab[tmp[i].y][tmp[i].x].empty:
return False
return True
def check_gameover(self):
for x in range(N):
if self.blocksTab[0][x].empty is True:
self.gameover()
return True
return False
def gameover(self, paused=False):
if not paused:
pygame.image.save(gameDisplay, 'Graphics\\surface.bmp')
gameDisplay.blit(pygame.image.load('Graphics\\surface.bmp'), (0, 0))
gameDisplay.blit(button_background, (pause_x, pause_y))
gameDisplay.blit(button_inside1, (pause_x + 3, pause_y + 3))
gameDisplay.blit(pause, (pause_x, pause_y))
gameDisplay.blit(button_background, (stop_x, stop_y))
gameDisplay.blit(button_inside1, (stop_x + 3, stop_y + 3))
gameDisplay.blit(stop, (stop_x, stop_y))
font = pygame.font.SysFont("comicsansms", 60)
text = font.render('Game over', True, white)
gameDisplay.blit(text, (pos_x + (board_width - text.get_width()) // 2, 300))
pygame.display.update()
high_scores.append(self.points)
InsertionSort(high_scores)
text = open("Docs\\wyniki.txt", "w")
for x in range(len(high_scores) - 1):
text.write(str(high_scores[x])+",")
text.write(str(high_scores[len(high_scores) - 1]))
text.close()
pygame.time.wait(2000)
def pause_button(self, x=690, y=600):
gameDisplay.blit(button_background, (x, y))
if Menu.mouse_contained(self.Menu, x, y, x + 50, y + 50):
gameDisplay.blit(button_inside2, (x + 3, y + 3))
gameDisplay.blit(pause, (x, y))
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if self.Menu.music:
button_sound.play()
pygame.image.save(gameDisplay, 'Graphics\\surface.bmp')
if self.paused is False:
self.paused = True
else:
self.paused = False
return self.game_pause(x, y, pygame.image.load('Graphics\\surface.bmp'))
else:
gameDisplay.blit(button_inside1, (x + 3, y + 3))
gameDisplay.blit(pause, (x, y))
def stop_button(self, x=780, y=600):
gameDisplay.blit(button_background, (x, y))
if Menu.mouse_contained(self.Menu, x, y, x + 50, y + 50):
gameDisplay.blit(button_inside2, (x + 3, y + 3))
gameDisplay.blit(stop, (x, y))
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if self.Menu.music:
button_sound.play()
if self.paused:
self.paused = False
self.gameover(True)
else:
self.gameover(False)
pygame.display.update()
self.stop = True
else:
gameDisplay.blit(button_inside1, (x + 3, y + 3))
gameDisplay.blit(stop, (x, y))
def game_pause(self, x, y, surface):
while self.paused:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
quit()
gameDisplay.blit(surface, (0, 0))
font = pygame.font.SysFont("comicsansms", 60)
text = font.render('Game paused', True, white)
gameDisplay.blit(text, (pos_x + (board_width - text.get_width()) // 2, 300))
self.pause_button(x, y)
self.stop_button(stop_x, stop_y)
pygame.display.update()
def print_score(self):
text = font.render('Score', True, white)
x = ((pos_x - 200) // 2) + (200 - text.get_width()) // 2
gameDisplay.blit(text, (x, pos_y + 340))
text = font.render(str(self.points), True, white)
x = ((pos_x - 200) // 2) + (200 - text.get_width()) // 2
gameDisplay.blit(text, (x, pos_y + 380))
def check_block_size(self, tab):
x_min = tab[0].x
x_max = tab[0].x
y_min = tab[0].y
y_max = tab[0].y
for i in range(1,4):
if tab[i].x > x_max: x_max = tab[i].x
if tab[i].x < x_min: x_min = tab[i].x
if tab[i].y > y_max: y_max = tab[i].y
if tab[i].y < y_min: y_min = tab[i].y
return x_min, x_max, y_min, y_max
def next_block_panel(self):
size = self.check_block_size(self.tab_next)
self.print_panel((pos_x - 200) // 2, pos_y + 66)
text = font.render('Next block', True, white)
gameDisplay.blit(text, (50 + (200 - text.get_width()) // 2, 130))
self.print_block(self.color_next, self.tab_next, 35 - size[0] * 30 + (200 - (size[1] - size[0]) * 30) // 2,
160 - size[2] * 30 + (100 - (size[3] - size[2]) * 30) // 2)
def score_panel(self):
self.print_panel((pos_x - 200) // 2, pos_y + 332)
self.print_score()
self.print_level()
if self.level < self.max_lvl + 1:
if self.level_points >= 1000:
self.level_points -= 1000
self.level += 1
def set_default_position(self, tab):
size = self.check_block_size(tab)
r_x = size[1] - size[0] + 1 # Długość bloku
rx = (N - r_x) // 2 # Gdzie powinien być min x
for j in range(size[2]):
for i in range(4):
tab[i].y -= 1
if rx - size[0] < 0:
x = 1
else:
x = -1
size = abs(rx - size[0])
for j in range(size):
for i in range(4):
tab[i].x -= x
def print_level(self):
text = font.render('Level', True, white)
x = ((pos_x - 200) // 2) + (200 - text.get_width()) // 2
gameDisplay.blit(text, (x, pos_y + 420))
text = font.render(str(self.level), True, white)
x = ((pos_x - 200) // 2) + (200 - text.get_width()) // 2
gameDisplay.blit(text, (x, pos_y + 460))
def end_block(self):
if not self.isEnd:
for i in range(4):
self.blocksTab[self.tab[i].y][self.tab[i].x].empty = True
self.blocksTab[self.tab[i].y][self.tab[i].x].color = self.color
self.tab = self.tab_next
self.tab_next = self.figure()
self.color = self.color_next
self.color_next = self.color_gen(self.color)
def last_move(self):
if pygame.key.get_pressed()[pygame.K_RIGHT]:
xmv = 1
self.move(xmv, 0)
if pygame.key.get_pressed()[pygame.K_LEFT]:
xmv = -1
self.move(xmv, 0)
if pygame.key.get_pressed()[pygame.K_UP]:
self.tab = self.rotate(self.isEnd, self.tab)
self.isEnd = self.check_height()
# Config
M = 20
N = 10
block_size = 30
display_width = 900
display_height = 700
board_width = N * block_size
board_height = M * block_size
pos_x = (display_width - board_width) // 2
pos_y = (display_height - board_height) // 2
menu_button_w = 200
menu_button_h = 50
black = (0, 0, 0)
white = (255, 255, 255)
red = (255, 0, 0)
stop_x = 780
stop_y = 600
pause_x = 690
pause_y = 600
font = pygame.font.SysFont("comicsansms", 30)
menu_font = pygame.font.SysFont("comicsansms", 20)
menu_texts = ["Play", "High Scores", "Options", "About", "Quit"]
about_text = "Game crated by <NAME> (MrRedonis)."
high_scores = []
text = open("Docs\\wyniki.txt", "r")
readings = (text.readline())
text.close()
if not readings == "":
words = readings.split(",")
for x in range(len(words)):
high_scores.append(int(words[x]))
# Obrazy i zdjęcia
bc_yellow = pygame.transform.scale(pygame.image.load('Graphics\\yellow.bmp'), (block_size, block_size))
background = pygame.transform.scale(pygame.image.load('Graphics\\background.bmp'), (display_width, display_height))
menu_panel = pygame.transform.scale(pygame.image.load('Graphics\\panel.bmp'), (board_width, board_height))
play_panel = pygame.transform.scale(pygame.image.load('Graphics\\panel.bmp'), (board_width + 50, board_height + 50))
obszar = pygame.transform.scale(pygame.image.load('Graphics\\panel_kwadrat.bmp'), (200, 200))
obszar_light = pygame.transform.scale(pygame.image.load('Graphics\\panel_kwadrat2.bmp'), (180, 180))
button_1 = pygame.transform.scale(pygame.image.load('Graphics\\button.bmp'), (menu_button_w, menu_button_h))
button_2 = pygame.transform.scale(pygame.image.load('Graphics\\button2.bmp'), (menu_button_w, menu_button_h))
# Przyciski play/pause/stop
button_background = pygame.transform.scale(pygame.image.load('Graphics\\panel_kwadrat.bmp'), (50, 50))
button_inside1 = pygame.transform.scale(pygame.image.load('Graphics\\panel_kwadrat2.bmp'), (44, 44))
button_inside2 = pygame.transform.scale(pygame.image.load('Graphics\\panel_kwadrat3.bmp'), (44, 44))
pause = pygame.transform.scale(pygame.image.load('Graphics\\pause-play_button.bmp'), (50, 50))
stop = pygame.transform.scale(pygame.image.load('Graphics\\stop.bmp'), (50, 50))
# Initialization
gameDisplay = pygame.display.set_mode((display_width, display_height), pygame.SRCALPHA)
pygame.display.set_caption('Tetris')
pygame.display.set_icon(bc_yellow)
# Music
start_game = pygame.mixer.Sound('Sounds\\start_game.ogg')
rotate_sound = pygame.mixer.Sound('Sounds\\rotate.ogg')
button_sound = pygame.mixer.Sound('Sounds\\button.ogg')
move_sound = pygame.mixer.Sound('Sounds\\move.ogg')
if __name__ == "__main__":
Menu()
|
StarcoderdataPython
|
1762761
|
<reponame>su-vikas/pytlspect
# Authors:
# <NAME>
# Google - defining ClientCertificateType
# Google (adapted by <NAME>) - NPN support
# <NAME> - Anon ciphersuites
# <NAME> (Arcode Corporation) - canonicalCipherName
#
# See the LICENSE file for legal information regarding use of this file.
"""Constants used in various places."""
class SSLVersions:
SSLV3 = (3,0)
TLSV10 = (3,1)
TLSV11 = (3,2)
TLSV12 = (3,3)
class CertificateType:
x509 = 0
openpgp = 1
class ClientCertificateType:
rsa_sign = 1
dss_sign = 2
rsa_fixed_dh = 3
dss_fixed_dh = 4
class HandshakeType:
hello_request = 0
client_hello = 1
server_hello = 2
certificate = 11
server_key_exchange = 12
certificate_request = 13
server_hello_done = 14
certificate_verify = 15
client_key_exchange = 16
finished = 20
next_protocol = 67
class ContentType:
change_cipher_spec = 20
alert = 21
handshake = 22
application_data = 23
all = (20,21,22,23)
class ExtensionType: # RFC 6066 / 4366
server_name = 0 # RFC 6066 / 4366
srp = 12 # RFC 5054
cert_type = 9 # RFC 6091
tack = 0xF300
supports_npn = 13172
renegotiation_info = 0xFF01
heartbeat = 0x000F
ocsp = 0x0005
session_ticket_tls = 0x0023
elliptic_curves = 0x000a
ec_point_formats = 0x000b
class EllipticCurves:
# http://tools.ietf.org/html/rfc4492#page-11
sect163k1 = 0x0001
sect163r1 = 0x0002
sect163r2 = 0x0003
sect193r1 = 0x0004
sect193r2 = 0x0005
sect233k1 = 0x0006
sect233r1 = 0x0007
sect239k1 = 0x0008
sect283k1 = 0x0009
sect283r1 = 0x000a
sect409k1 = 0x000b
sect409r1 = 0x000c
sect571k1 = 0x000d
sect571r1 = 0x000e
secp160k1 = 0x000f
secp160r1 = 0x0010
secp160r2 = 0x0011
secp192k1 = 0x0012
secp192r1 = 0x0013
secp224k1 = 0x0014
secp224r1 = 0x0015
secp256k1 = 0x0016
secp256r1 = 0x0017
secp384r1 = 0x0018
secp521r1 = 0x0019
def get_opera_curves(self):
opera_curves = []
opera_curves.append(secp256r1)
opera_curves.append(secp384r1)
opera_curves.append(secp521r1)
return opera_curves
def get_all_curves(self):
all_curves = []
return all_curves
class NameType:
host_name = 0
class AlertLevel:
warning = 1
fatal = 2
class AlertDescription:
"""
@cvar bad_record_mac: A TLS record failed to decrypt properly.
If this occurs during a SRP handshake it most likely
indicates a bad password. It may also indicate an implementation
error, or some tampering with the data in transit.
This alert will be signalled by the server if the SRP password is bad. It
may also be signalled by the server if the SRP username is unknown to the
server, but it doesn't wish to reveal that fact.
@cvar handshake_failure: A problem occurred while handshaking.
This typically indicates a lack of common ciphersuites between client and
server, or some other disagreement (about SRP parameters or key sizes,
for example).
@cvar protocol_version: The other party's SSL/TLS version was unacceptable.
This indicates that the client and server couldn't agree on which version
of SSL or TLS to use.
@cvar user_canceled: The handshake is being cancelled for some reason.
"""
close_notify = 0
unexpected_message = 10
bad_record_mac = 20
decryption_failed = 21
record_overflow = 22
decompression_failure = 30
handshake_failure = 40
no_certificate = 41 #SSLv3
bad_certificate = 42
unsupported_certificate = 43
certificate_revoked = 44
certificate_expired = 45
certificate_unknown = 46
illegal_parameter = 47
unknown_ca = 48
access_denied = 49
decode_error = 50
decrypt_error = 51
export_restriction = 60
protocol_version = 70
insufficient_security = 71
internal_error = 80
user_canceled = 90
no_renegotiation = 100
unknown_psk_identity = 115
inappropriate_fallback = 86 # in case of POODLE downgrade attack, this alert is used
class OIDMap:
# taken from https://github.com/hiviah/pyx509/tree/master/pkcs7/asn1_models
'''
Map of OIDs and their names
'''
oid_map = {
"1.3.14.3.2.26" : "SHA-1",
"2.16.840.1.101.3.4.2.1" : "SHA-256",
"2.16.840.1.101.3.4.2.2" : "SHA-384",
"2.16.840.1.101.3.4.2.3" : "SHA-512",
"1.2.840.113549.1.7.1" : "data",
"1.2.840.113549.1.7.2" : "signedData",
"1.2.840.113549.1.1.5" : "SHA1withRSA",
"1.2.840.113549.1.1.1" : "RSA",
"1.2.840.113549.1.1.11" : "SHA256withRSA",
"1.2.840.10040.4.1" : "DSA",
"1.2.840.10040.4.3" : "SHA1withDSA",
"1.2.840.10045.4.1" : "SHA1withECDSA",
"1.2.840.10045.2.1" : "EC",
"2.5.4.6" : "id-at-countryName",
"2.5.4.8": "id-at-stateorProvinceName",
"2.5.4.7" : "id-at-localityName",
"2.5.4.10" : "id-at-organizationName ",
"2.5.4.3" : "id-at-commonName",
"172.16.31.10" : "id-at-organizationalUnitName",
"172.16.58.3" : "id-ce-subjectAltName",
"192.168.127.12" : "basicConstraints",
"172.16.17.32" : "Certificate policies",
"1.3.6.1.5.5.7.1.3" : "id-pe-qcStatements",
"172.16.58.3" : "id-ce-keyUsage",
"172.16.58.3" : "id-ce-subjectKeyIdentifier ",
"172.16.58.3" : "id-ce-CRLDistributionPoints ",
"192.168.127.12" : "id-ce-authorityKeyIdentifier ",
"192.168.127.12" : "CRL Number",
"192.168.127.12" : "Reason Code",
"172.16.17.32" : "Invalidity Data",
"1.2.840.113549.1.9.3" : "contentType",
"1.2.840.113549.1.9.4" : "messageDigest",
"1.2.840.113549.1.9.5" : "Signing Time"
}
class AlgorithmIdentifier:
#https://www.ipa.go.jp/security/rfc/RFC3279EN.html#211
# sha-1 with rsa encryption: 1.2.840.113549.1.1.5
shawithrsa = bytearray(b'\x2a\x86\x48\x86\xf7\x0d\x01\x01\x05')
#sha256 with rsa encryption
sha256withrsa = bytearray(b'\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b')
# md-5 with rsa encryption: 1.2.840.113549.1.1.4
md5withrsa = bytearray(b'\x2a\x86\x48\x86\xf7\x0d\x01\x01\x04')
# md-2 with rsa encryption: 1.2.840.113549.1.1.2
md2withrsa = bytearray(b'\x2a\x86\x48\x86\xf7\x0d\x01\x01\x02')
# dsa with sha 1
# ecdsa with sha1
class CipherSuite:
# Weird pseudo-ciphersuite from RFC 5746
# Signals that "secure renegotiation" is supported
# We actually don't do any renegotiation, but this
# prevents renegotiation attacks
# Cipher suite ids and names from wireshark/epan/dissectors/packet-ssl-utils.c + GOST
# Classification is based OpenSSL's ciphers(1) man page.
cipher_suites = {
'000000': {'name': 'TLS_NULL_WITH_NULL_NULL', 'protocol': 'TLS', 'kx': 'NULL', 'au': 'NULL', 'enc': 'NULL', 'bits': '0', 'mac': 'NULL', 'kxau_strength': 'NULL', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'000001': {'name': 'TLS_RSA_WITH_NULL_MD5', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'NULL', 'bits': '0', 'mac': 'MD5', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'000002': {'name': 'TLS_RSA_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'000003': {'name': 'TLS_RSA_EXPORT_WITH_RC4_40_MD5', 'protocol': 'TLS', 'kx': 'RSA_EXPORT', 'au': 'RSA_EXPORT', 'enc': 'RC4_40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000004': {'name': 'TLS_RSA_WITH_RC4_128_MD5', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000005': {'name': 'TLS_RSA_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000006': {'name': 'TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5', 'protocol': 'TLS', 'kx': 'RSA_EXPORT', 'au': 'RSA_EXPORT', 'enc': 'RC2_CBC_40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000007': {'name': 'TLS_RSA_WITH_IDEA_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'IDEA_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000008': {'name': 'TLS_RSA_EXPORT_WITH_DES40_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA_EXPORT', 'au': 'RSA_EXPORT', 'enc': 'DES40_CBC', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000009': {'name': 'TLS_RSA_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'00000A': {'name': 'TLS_RSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00000B': {'name': 'TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'DES40_CBC', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'00000C': {'name': 'TLS_DH_DSS_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'00000D': {'name': 'TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00000E': {'name': 'TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'DES40_CBC', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'00000F': {'name': 'TLS_DH_RSA_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'000010': {'name': 'TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000011': {'name': 'TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'DES40_CBC', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000012': {'name': 'TLS_DHE_DSS_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'000013': {'name': 'TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000014': {'name': 'TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'DES40_CBC', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000015': {'name': 'TLS_DHE_RSA_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'000016': {'name': 'TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000017': {'name': 'TLS_DH_Anon_EXPORT_WITH_RC4_40_MD5', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'RC4_40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'MITM', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000018': {'name': 'TLS_DH_Anon_WITH_RC4_128_MD5', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'RC4_128', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'MITM', 'enc_strength': 'MEDIUM', 'overall_strength': 'MITM'},
'000019': {'name': 'TLS_DH_Anon_EXPORT_WITH_DES40_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'DES40_CBC', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'00001A': {'name': 'TLS_DH_Anon_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'LOW', 'overall_strength': 'MITM'},
'00001B': {'name': 'TLS_DH_Anon_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00001C': {'name': 'SSL_FORTEZZA_KEA_WITH_NULL_SHA', 'protocol': 'SSL', 'kx': 'FORTEZZA', 'au': 'KEA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00001D': {'name': 'SSL_FORTEZZA_KEA_WITH_FORTEZZA_CBC_SHA', 'protocol': 'SSL', 'kx': 'FORTEZZA', 'au': 'KEA', 'enc': 'FORTEZZA_CBC', 'bits': '80', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00001E': {'name': 'TLS_KRB5_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'00001F': {'name': 'TLS_KRB5_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000020': {'name': 'TLS_KRB5_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000021': {'name': 'TLS_KRB5_WITH_IDEA_CBC_SHA', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': 'IDEA_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000022': {'name': 'TLS_KRB5_WITH_DES_CBC_MD5', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'MD5', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'000023': {'name': 'TLS_KRB5_WITH_3DES_EDE_CBC_MD5', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'MD5', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000024': {'name': 'TLS_KRB5_WITH_RC4_128_MD5', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': 'RC4_128', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000025': {'name': 'TLS_KRB5_WITH_IDEA_CBC_MD5', 'protocol': 'TLS', 'kx': 'KRB5', 'au': 'KRB5', 'enc': 'IDEA_CBC', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000026': {'name': 'TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA', 'protocol': 'TLS', 'kx': 'KRB5_EXPORT', 'au': 'KRB5_EXPORT', 'enc': 'DES_CBC_40', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000027': {'name': 'TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA', 'protocol': 'TLS', 'kx': 'KRB5_EXPORT', 'au': 'KRB5_EXPORT', 'enc': 'RC2_CBC_40', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000028': {'name': 'TLS_KRB5_EXPORT_WITH_RC4_40_SHA', 'protocol': 'TLS', 'kx': 'KRB5_EXPORT', 'au': 'KRB5_EXPORT', 'enc': 'RC4_40', 'bits': '40', 'mac': 'SHA', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000029': {'name': 'TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5', 'protocol': 'TLS', 'kx': 'KRB5_EXPORT', 'au': 'KRB5_EXPORT', 'enc': 'DES_CBC_40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'00002A': {'name': 'TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5', 'protocol': 'TLS', 'kx': 'KRB5_EXPORT', 'au': 'KRB5_EXPORT', 'enc': 'RC2_CBC_40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'00002B': {'name': 'TLS_KRB5_EXPORT_WITH_RC4_40_MD5', 'protocol': 'TLS', 'kx': 'KRB5_EXPORT', 'au': 'KRB5_EXPORT', 'enc': 'RC4_40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'00002C': {'name': 'TLS_PSK_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00002D': {'name': 'TLS_DHE_PSK_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00002E': {'name': 'TLS_RSA_PSK_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00002F': {'name': 'TLS_RSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000030': {'name': 'TLS_DH_DSS_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000031': {'name': 'TLS_DH_RSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000032': {'name': 'TLS_DHE_DSS_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000033': {'name': 'TLS_DHE_RSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000034': {'name': 'TLS_DH_Anon_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'000035': {'name': 'TLS_RSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000036': {'name': 'TLS_DH_DSS_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000037': {'name': 'TLS_DH_RSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000038': {'name': 'TLS_DHE_DSS_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000039': {'name': 'TLS_DHE_RSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00003A': {'name': 'TLS_DH_Anon_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00003B': {'name': 'TLS_RSA_WITH_NULL_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00003C': {'name': 'TLS_RSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00003D': {'name': 'TLS_RSA_WITH_AES_256_CBC_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00003E': {'name': 'TLS_DH_DSS_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00003F': {'name': 'TLS_DH_RSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000040': {'name': 'TLS_DHE_DSS_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000041': {'name': 'TLS_RSA_WITH_CAMELLIA_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'CAMELLIA_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000042': {'name': 'TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'CAMELLIA_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000043': {'name': 'TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'CAMELLIA_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000044': {'name': 'TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'CAMELLIA_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000045': {'name': 'TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'CAMELLIA_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000046': {'name': 'TLS_DH_Anon_WITH_CAMELLIA_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'CAMELLIA_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'000047': {'name': 'TLS_ECDH_ECDSA_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'000048': {'name': 'TLS_ECDH_ECDSA_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000049': {'name': 'TLS_ECDH_ECDSA_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'00004A': {'name': 'TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00004B': {'name': 'TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00004C': {'name': 'TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000060': {'name': 'TLS_RSA_EXPORT1024_WITH_RC4_56_MD5', 'protocol': 'TLS', 'kx': 'RSA_EXPORT1024', 'au': 'RSA_EXPORT1024', 'enc': 'RC4_56', 'bits': '56', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000061': {'name': 'TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5', 'protocol': 'TLS', 'kx': 'RSA_EXPORT1024', 'au': 'RSA_EXPORT1024', 'enc': 'RC2_CBC_56', 'bits': '56', 'mac': 'MD5', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000062': {'name': 'TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA_EXPORT1024', 'au': 'RSA_EXPORT1024', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'EXPORT', 'enc_strength': 'LOW', 'overall_strength': 'EXPORT'},
'000063': {'name': 'TLS_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'000064': {'name': 'TLS_RSA_EXPORT1024_WITH_RC4_56_SHA', 'protocol': 'TLS', 'kx': 'RSA_EXPORT1024', 'au': 'RSA_EXPORT1024', 'enc': 'RC4_56', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'EXPORT', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000065': {'name': 'TLS_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'RC4_56', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'000066': {'name': 'TLS_DHE_DSS_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000067': {'name': 'TLS_DHE_RSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000068': {'name': 'TLS_DH_DSS_WITH_AES_256_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000069': {'name': 'TLS_DH_RSA_WITH_AES_256_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00006A': {'name': 'TLS_DHE_DSS_WITH_AES_256_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00006B': {'name': 'TLS_DHE_RSA_WITH_AES_256_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00006C': {'name': 'TLS_DH_Anon_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00006D': {'name': 'TLS_DH_Anon_WITH_AES_256_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA256', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'000080': {'name': 'TLS_GOSTR341094_WITH_28147_CNT_IMIT', 'protocol': 'TLS', 'kx': 'VKO GOST R 34.10-94', 'au': 'VKO GOST R 34.10-94', 'enc': 'GOST28147', 'bits': '256', 'mac': 'IMIT_GOST28147', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000081': {'name': 'TLS_GOSTR341001_WITH_28147_CNT_IMIT', 'protocol': 'TLS', 'kx': 'VKO GOST R 34.10-2001', 'au': 'VKO GOST R 34.10-2001', 'enc': 'GOST28147', 'bits': '256', 'mac': 'IMIT_GOST28147', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000082': {'name': 'TLS_GOSTR341094_WITH_NULL_GOSTR3411', 'protocol': 'TLS', 'kx': 'VKO GOST R 34.10-94 ', 'au': 'VKO GOST R 34.10-94 ', 'enc': 'NULL', 'bits': '0', 'mac': 'HMAC_GOSTR3411', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'000083': {'name': 'TLS_GOSTR341001_WITH_NULL_GOSTR3411', 'protocol': 'TLS', 'kx': 'VKO GOST R 34.10-2001', 'au': 'VKO GOST R 34.10-2001', 'enc': 'NULL', 'bits': '0', 'mac': 'HMAC_GOSTR3411', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'000084': {'name': 'TLS_RSA_WITH_CAMELLIA_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'CAMELLIA_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000085': {'name': 'TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'CAMELLIA_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000086': {'name': 'TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'CAMELLIA_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000087': {'name': 'TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'CAMELLIA_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000088': {'name': 'TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'CAMELLIA_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000089': {'name': 'TLS_DH_Anon_WITH_CAMELLIA_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'CAMELLIA_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00008A': {'name': 'TLS_PSK_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00008B': {'name': 'TLS_PSK_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00008C': {'name': 'TLS_PSK_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00008D': {'name': 'TLS_PSK_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00008E': {'name': 'TLS_DHE_PSK_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00008F': {'name': 'TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000090': {'name': 'TLS_DHE_PSK_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000091': {'name': 'TLS_DHE_PSK_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000092': {'name': 'TLS_RSA_PSK_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'000093': {'name': 'TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000094': {'name': 'TLS_RSA_PSK_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000095': {'name': 'TLS_RSA_PSK_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000096': {'name': 'TLS_RSA_WITH_SEED_CBC_SHA', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'SEED_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000097': {'name': 'TLS_DH_DSS_WITH_SEED_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'SEED_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000098': {'name': 'TLS_DH_RSA_WITH_SEED_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'SEED_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'000099': {'name': 'TLS_DHE_DSS_WITH_SEED_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'SEED_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00009A': {'name': 'TLS_DHE_RSA_WITH_SEED_CBC_SHA', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'SEED_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00009B': {'name': 'TLS_DH_Anon_WITH_SEED_CBC_SHA', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'SEED_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00009C': {'name': 'TLS_RSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00009D': {'name': 'TLS_RSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'RSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00009E': {'name': 'TLS_DHE_RSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00009F': {'name': 'TLS_DHE_RSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'RSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A0': {'name': 'TLS_DH_RSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A1': {'name': 'TLS_DH_RSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'DH', 'au': 'RSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A2': {'name': 'TLS_DHE_DSS_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A3': {'name': 'TLS_DHE_DSS_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'DSS', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A4': {'name': 'TLS_DH_DSS_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A5': {'name': 'TLS_DH_DSS_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'DH', 'au': 'DSS', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A6': {'name': 'TLS_DH_Anon_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A7': {'name': 'TLS_DH_Anon_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'DH', 'au': 'Anon', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'0000A8': {'name': 'TLS_PSK_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000A9': {'name': 'TLS_PSK_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000AA': {'name': 'TLS_DHE_PSK_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000AB': {'name': 'TLS_DHE_PSK_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000AC': {'name': 'TLS_RSA_PSK_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000AD': {'name': 'TLS_RSA_PSK_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000AE': {'name': 'TLS_PSK_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000AF': {'name': 'TLS_PSK_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000B0': {'name': 'TLS_PSK_WITH_NULL_SHA256', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'0000B1': {'name': 'TLS_PSK_WITH_NULL_SHA384', 'protocol': 'TLS', 'kx': 'PSK', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'0000B2': {'name': 'TLS_DHE_PSK_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000B3': {'name': 'TLS_DHE_PSK_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000B4': {'name': 'TLS_DHE_PSK_WITH_NULL_SHA256', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'0000B5': {'name': 'TLS_DHE_PSK_WITH_NULL_SHA384', 'protocol': 'TLS', 'kx': 'DHE', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'0000B6': {'name': 'TLS_RSA_PSK_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000B7': {'name': 'TLS_RSA_PSK_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'0000B8': {'name': 'TLS_RSA_PSK_WITH_NULL_SHA256', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'0000B9': {'name': 'TLS_RSA_PSK_WITH_NULL_SHA384', 'protocol': 'TLS', 'kx': 'RSA', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C001': {'name': 'TLS_ECDH_ECDSA_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C002': {'name': 'TLS_ECDH_ECDSA_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00C003': {'name': 'TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C004': {'name': 'TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C005': {'name': 'TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C006': {'name': 'TLS_ECDHE_ECDSA_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C007': {'name': 'TLS_ECDHE_ECDSA_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00C008': {'name': 'TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C009': {'name': 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C00A': {'name': 'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C00B': {'name': 'TLS_ECDH_RSA_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C00C': {'name': 'TLS_ECDH_RSA_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00C00D': {'name': 'TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C00E': {'name': 'TLS_ECDH_RSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C00F': {'name': 'TLS_ECDH_RSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C010': {'name': 'TLS_ECDHE_RSA_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C011': {'name': 'TLS_ECDHE_RSA_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00C012': {'name': 'TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C013': {'name': 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C014': {'name': 'TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C015': {'name': 'TLS_ECDH_Anon_WITH_NULL_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'Anon', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C016': {'name': 'TLS_ECDH_Anon_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'Anon', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'MEDIUM', 'overall_strength': 'MITM'},
'00C017': {'name': 'TLS_ECDH_Anon_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'Anon', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00C018': {'name': 'TLS_ECDH_Anon_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'Anon', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00C019': {'name': 'TLS_ECDH_Anon_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'Anon', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'MITM', 'enc_strength': 'HIGH', 'overall_strength': 'MITM'},
'00C01A': {'name': 'TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C01B': {'name': 'TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C01C': {'name': 'TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C01D': {'name': 'TLS_SRP_SHA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C01E': {'name': 'TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C01F': {'name': 'TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C020': {'name': 'TLS_SRP_SHA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C021': {'name': 'TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C022': {'name': 'TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'SRP', 'au': 'SHA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C023': {'name': 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C024': {'name': 'TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C025': {'name': 'TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C026': {'name': 'TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C027': {'name': 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C028': {'name': 'TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C029': {'name': 'TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C02A': {'name': 'TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C02B': {'name': 'TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C02C': {'name': 'TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'ECDSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C02D': {'name': 'TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C02E': {'name': 'TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'ECDSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C02F': {'name': 'TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C030': {'name': 'TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'RSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C031': {'name': 'TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'AES_128_GCM', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C032': {'name': 'TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384', 'protocol': 'TLS', 'kx': 'ECDH', 'au': 'RSA', 'enc': 'AES_256_GCM', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C033': {'name': 'TLS_ECDHE_PSK_WITH_RC4_128_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'RC4_128', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'MEDIUM', 'overall_strength': 'MEDIUM'},
'00C034': {'name': 'TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C035': {'name': 'TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C036': {'name': 'TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C037': {'name': 'TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'AES_128_CBC', 'bits': '128', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C038': {'name': 'TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'AES_256_CBC', 'bits': '256', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00C039': {'name': 'TLS_ECDHE_PSK_WITH_NULL_SHA ', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA ', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C03A': {'name': 'TLS_ECDHE_PSK_WITH_NULL_SHA256', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA256', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00C03B': {'name': 'TLS_ECDHE_PSK_WITH_NULL_SHA384', 'protocol': 'TLS', 'kx': 'ECDHE', 'au': 'PSK', 'enc': 'NULL', 'bits': '0', 'mac': 'SHA384', 'kxau_strength': 'HIGH', 'enc_strength': 'NULL', 'overall_strength': 'NULL'},
'00FEFE': {'name': 'SSL_RSA_FIPS_WITH_DES_CBC_SHA', 'protocol': 'SSL', 'kx': 'RSA_FIPS', 'au': 'RSA_FIPS', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'00FEFF': {'name': 'SSL_RSA_FIPS_WITH_3DES_EDE_CBC_SHA', 'protocol': 'SSL', 'kx': 'RSA_FIPS', 'au': 'RSA_FIPS', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00FFE0': {'name': 'SSL_RSA_FIPS_WITH_3DES_EDE_CBC_SHA', 'protocol': 'SSL', 'kx': 'RSA_FIPS', 'au': 'RSA_FIPS', 'enc': '3DES_EDE_CBC', 'bits': '168', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'HIGH', 'overall_strength': 'HIGH'},
'00FFE1': {'name': 'SSL_RSA_FIPS_WITH_DES_CBC_SHA', 'protocol': 'SSL', 'kx': 'RSA_FIPS', 'au': 'RSA_FIPS', 'enc': 'DES_CBC', 'bits': '56', 'mac': 'SHA', 'kxau_strength': 'HIGH', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'010080': {'name': 'SSL2_RC4_128_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC4_128', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'MEDIUM', 'overall_strength': 'LOW'},
'020080': {'name': 'SSL2_RC4_128_EXPORT40_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC4_128_EXPORT40', 'bits': '40', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'EXPORT', 'overall_strength': 'EXPORT'},
'030080': {'name': 'SSL2_RC2_CBC_128_CBC_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC2_CBC_128_CBC', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'040080': {'name': 'SSL2_RC2_CBC_128_CBC_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC2_CBC_128_CBC', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'050080': {'name': 'SSL2_IDEA_128_CBC_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'IDEA_128_CBC', 'bits': '128', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'HIGH', 'overall_strength': 'LOW'},
'060040': {'name': 'SSL2_DES_64_CBC_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'DES_64_CBC', 'bits': '64', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'0700C0': {'name': 'SSL2_DES_192_EDE3_CBC_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'DES_192_EDE3_CBC', 'bits': '192', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'HIGH', 'overall_strength': 'LOW'},
'080080': {'name': 'SSL2_RC4_64_WITH_MD5', 'protocol': 'SSL2', 'kx': 'RSA', 'au': 'RSA', 'enc': 'RC4_64', 'bits': '64', 'mac': 'MD5', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'800001': {'name': 'PCT_SSL_CERT_TYPE | PCT1_CERT_X509', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'800003': {'name': 'PCT_SSL_CERT_TYPE | PCT1_CERT_X509_CHAIN', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'810001': {'name': 'PCT_SSL_HASH_TYPE | PCT1_HASH_MD5', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'810003': {'name': 'PCT_SSL_HASH_TYPE | PCT1_HASH_SHA', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'820001': {'name': 'PCT_SSL_EXCH_TYPE | PCT1_EXCH_RSA_PKCS1', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'830004': {'name': 'PCT_SSL_CIPHER_TYPE_1ST_HALF | PCT1_CIPHER_RC4', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'842840': {'name': 'PCT_SSL_CIPHER_TYPE_2ND_HALF | PCT1_ENC_BITS_40 | PCT1_MAC_BITS_128', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'848040': {'name': 'PCT_SSL_CIPHER_TYPE_2ND_HALF | PCT1_ENC_BITS_128 | PCT1_MAC_BITS_128', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
'8F8001': {'name': 'PCT_SSL_COMPAT | PCT_VERSION_1', 'protocol': 'PCT', 'kx': '', 'au': '', 'enc': '', 'bits': '', 'mac': '', 'kxau_strength': 'LOW', 'enc_strength': 'LOW', 'overall_strength': 'LOW'},
}
# ADDED by VIKAs
TLS_NULL_WITH_NULL_NULL = 0x0000
TLS_RSA_WITH_NULL_MD5 = 0x0001
TLS_RSA_WITH_NULL_SHA = 0x0002
TLS_RSA_EXPORT_WITH_RC4_40_MD5 = 0x0003
TLS_RSA_WITH_RC4_128_MD5 = 0x0004
TLS_RSA_WITH_RC4_128_SHA = 0x0005
TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5 = 0x0006
TLS_RSA_WITH_IDEA_CBC_SHA = 0x0007
TLS_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0008
TLS_RSA_WITH_DES_CBC_SHA = 0x0009
TLS_RSA_WITH_3DES_EDE_CBC_SHA = 0x000A
TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA= 0x000B
TLS_DH_DSS_WITH_DES_CBC_SHA = 0x000C
TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA = 0x000D
TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x000E
TLS_DH_RSA_WITH_DES_CBC_SHA = 0x000F
TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA = 0x0010
TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x0011
TLS_DHE_DSS_WITH_DES_CBC_SHA = 0x0012
TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA = 0x0013
TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA = 0x0014
TLS_RSA_EXPORT1024_WITH_RC4_56_MD5 = 0x0060
TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5 = 0x0061
TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA = 0x0062
TLS_RSA_EXPORT1024_WITH_RC4_56_SHA = 0x0064
TLS_DHE_RSA_WITH_DES_CBC_SHA = 0x0015
TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA = 0x0016
TLS_DH_anon_EXPORT_WITH_RC4_40_MD5 = 0x0017
TLS_DH_anon_WITH_RC4_128_MD5 = 0x0018
TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA = 0x0019
TLS_DH_anon_WITH_DES_CBC_SHA = 0x001A
TLS_DH_anon_WITH_3DES_EDE_CBC_SHA = 0x001B
TLS_KRB5_WITH_DES_CBC_SHA = 0x001E
TLS_KRB5_WITH_3DES_EDE_CBC_SHA = 0x001F
TLS_KRB5_WITH_RC4_128_SHA = 0x0020
TLS_KRB5_WITH_IDEA_CBC_SHA = 0x0021
TLS_KRB5_WITH_DES_CBC_MD5 = 0x0022
TLS_KRB5_WITH_3DES_EDE_CBC_MD5 = 0x0023
TLS_KRB5_WITH_RC4_128_MD5 = 0x0024
TLS_KRB5_WITH_IDEA_CBC_MD5 = 0x0025
TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA = 0x0026
TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA = 0x0027
TLS_KRB5_EXPORT_WITH_RC4_40_SHA = 0x0028
TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5 = 0x0029
TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5 = 0x002A
TLS_KRB5_EXPORT_WITH_RC4_40_MD5 = 0x002B
TLS_PSK_WITH_NULL_SHA = 0x002C
TLS_DHE_PSK_WITH_NULL_SHA = 0x002D
TLS_RSA_PSK_WITH_NULL_SHA = 0x002E
TLS_PSK_WITH_RC4_128_SHA = 0x008A
TLS_PSK_WITH_3DES_EDE_CBC_SHA = 0x008B
TLS_PSK_WITH_AES_128_CBC_SHA = 0x008C
TLS_PSK_WITH_AES_256_CBC_SHA = 0x008D
TLS_DHE_PSK_WITH_RC4_128_SHA = 0x008E
TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA = 0x008F
TLS_DHE_PSK_WITH_AES_128_CBC_SHA = 0x0090
TLS_DHE_PSK_WITH_AES_256_CBC_SHA = 0x0091
TLS_RSA_PSK_WITH_RC4_128_SHA = 0x0092
TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA = 0x0093
TLS_RSA_PSK_WITH_AES_128_CBC_SHA = 0x0094
TLS_RSA_PSK_WITH_AES_256_CBC_SHA = 0x0095
TLS_PSK_WITH_AES_128_GCM_SHA256 = 0x00A8
TLS_PSK_WITH_AES_256_GCM_SHA384 = 0x00A9
TLS_DHE_PSK_WITH_AES_128_GCM_SHA256 = 0x00AA
TLS_DHE_PSK_WITH_AES_256_GCM_SHA384 = 0x00AB
TLS_RSA_PSK_WITH_AES_128_GCM_SHA256 = 0x00AC
TLS_RSA_PSK_WITH_AES_256_GCM_SHA384 = 0x00AD
TLS_PSK_WITH_AES_128_CBC_SHA256 = 0x00AE
TLS_PSK_WITH_AES_256_CBC_SHA384 = 0x00AF
TLS_PSK_WITH_NULL_SHA256 = 0x00B0
TLS_PSK_WITH_NULL_SHA384 = 0x00B1
TLS_DHE_PSK_WITH_AES_128_CBC_SHA256 = 0x00B2
TLS_DHE_PSK_WITH_AES_256_CBC_SHA384 = 0x00B3
TLS_DHE_PSK_WITH_NULL_SHA256 = 0x00B4
TLS_DHE_PSK_WITH_NULL_SHA384 = 0x00B5
TLS_RSA_PSK_WITH_AES_128_CBC_SHA256 = 0x00B6
TLS_RSA_PSK_WITH_AES_256_CBC_SHA384 = 0x00B7
TLS_RSA_PSK_WITH_NULL_SHA256 = 0x00B8
TLS_RSA_PSK_WITH_NULL_SHA384 = 0x00B9
TLS_ECDHE_PSK_WITH_RC4_128_SHA = 0xC033
TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA = 0xC034
TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA = 0xC035
TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA = 0xC036
TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256 = 0xC037
TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384 = 0xC038
TLS_ECDHE_PSK_WITH_NULL_SHA = 0xC039
TLS_ECDHE_PSK_WITH_NULL_SHA256 = 0xC03A
TLS_ECDHE_PSK_WITH_NULL_SHA384 = 0xC03B
TLS_PSK_WITH_ARIA_128_CBC_SHA256 = 0xC064
TLS_PSK_WITH_ARIA_256_CBC_SHA384 = 0xC065
TLS_DHE_PSK_WITH_ARIA_128_CBC_SHA256 = 0xC066
TLS_DHE_PSK_WITH_ARIA_256_CBC_SHA384 = 0xC067
TLS_RSA_PSK_WITH_ARIA_128_CBC_SHA256 = 0xC068
TLS_RSA_PSK_WITH_ARIA_256_CBC_SHA384 = 0xC069
TLS_PSK_WITH_ARIA_128_GCM_SHA256 = 0xC06A
TLS_PSK_WITH_ARIA_256_GCM_SHA384 = 0xC06B
TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256 = 0xC06C
TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384 = 0xC06D
TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256 = 0xC06E
TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384 = 0xC06F
TLS_ECDHE_PSK_WITH_ARIA_128_CBC_SHA256 = 0xC070
TLS_ECDHE_PSK_WITH_ARIA_256_CBC_SHA384 = 0xC071
TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256 = 0xC08E
TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384 = 0xC08F
TLS_DHE_PSK_WITH_CAMELLIA_128_GCM_SHA256 = 0xC090
TLS_DHE_PSK_WITH_CAMELLIA_256_GCM_SHA384 = 0xC091
TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256 = 0xC092
TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384 = 0xC093
TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC094
TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC095
TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC096
TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC097
TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC098
TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC099
TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256 = 0xC09A
TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384 = 0xC09B
TLS_PSK_WITH_AES_128_CCM = 0xC0A4
TLS_PSK_WITH_AES_256_CCM = 0xC0A5
TLS_DHE_PSK_WITH_AES_128_CCM = 0xC0A6
TLS_DHE_PSK_WITH_AES_256_CCM = 0xC0A7
TLS_PSK_WITH_AES_128_CCM_8 = 0xC0A8
TLS_PSK_WITH_AES_256_CCM_8 = 0xC0A9
TLS_PSK_DHE_WITH_AES_128_CCM_8 = 0xC0AA
TLS_PSK_DHE_WITH_AES_256_CCM_8 = 0xC0AB
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
TLS_DH_DSS_WITH_AES_128_CBC_SHA = 0x0030
TLS_DH_RSA_WITH_AES_128_CBC_SHA = 0x0031
TLS_DHE_DSS_WITH_AES_128_CBC_SHA = 0x0032
TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
TLS_DH_anon_WITH_AES_128_CBC_SHA = 0x0034
TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
TLS_DH_DSS_WITH_AES_256_CBC_SHA = 0x0036
TLS_DH_RSA_WITH_AES_256_CBC_SHA = 0x0037
TLS_DHE_DSS_WITH_AES_256_CBC_SHA = 0x0038
TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
TLS_DH_anon_WITH_AES_256_CBC_SHA = 0x003A
TLS_RSA_WITH_NULL_SHA256 = 0x003B
TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
TLS_DH_DSS_WITH_AES_128_CBC_SHA256 = 0x003E
TLS_DH_RSA_WITH_AES_128_CBC_SHA256 = 0x003F
TLS_RSA_WITH_CAMELLIA_128_CBC_SHA = 0x0041
TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA = 0x0042
TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA = 0x0043
TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA = 0x0044
TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA = 0x0045
TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA = 0x0046
TLS_RSA_WITH_CAMELLIA_256_CBC_SHA = 0x0084
TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA = 0x0085
TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA = 0x0086
TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA = 0x0087
TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA = 0x0088
TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA = 0x0089
TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BA
TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BB
TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BC
TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BD
TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BE
TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA256 = 0x00BF
TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C0
TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C1
TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C2
TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C3
TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C4
TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA256 = 0x00C5
TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC072
TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC073
TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC074
TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC075
TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC076
TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC077
TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256 = 0xC078
TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384 = 0xC079
TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC07A
TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC07B
TLS_DHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC07C
TLS_DHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC07D
TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC07E
TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC07F
TLS_DHE_DSS_WITH_CAMELLIA_128_GCM_SHA256 = 0xC080
TLS_DHE_DSS_WITH_CAMELLIA_256_GCM_SHA384 = 0xC081
TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256 = 0xC082
TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384 = 0xC083
TLS_DH_anon_WITH_CAMELLIA_128_GCM_SHA256 = 0xC084
TLS_DH_anon_WITH_CAMELLIA_256_GCM_SHA384 = 0xC085
TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC086
TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC087
TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC088
TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC089
TLS_ECDHE_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC08A
TLS_ECDHE_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC08B
TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256 = 0xC08C
TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384 = 0xC08D
TLS_DHE_DSS_WITH_AES_128_CBC_SHA256 = 0x0040
TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
TLS_DH_DSS_WITH_AES_256_CBC_SHA256 = 0x0068
TLS_DH_RSA_WITH_AES_256_CBC_SHA256 = 0x0069
TLS_DHE_DSS_WITH_AES_256_CBC_SHA256 = 0x006A
TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
TLS_DH_anon_WITH_AES_128_CBC_SHA256 = 0x006C
TLS_DH_anon_WITH_AES_256_CBC_SHA256 = 0x006D
TLS_RSA_WITH_SEED_CBC_SHA = 0x0096
TLS_DH_DSS_WITH_SEED_CBC_SHA = 0x0097
TLS_DH_RSA_WITH_SEED_CBC_SHA = 0x0098
TLS_DHE_DSS_WITH_SEED_CBC_SHA = 0x0099
TLS_DHE_RSA_WITH_SEED_CBC_SHA = 0x009A
TLS_DH_anon_WITH_SEED_CBC_SHA = 0x009B
TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
TLS_DH_RSA_WITH_AES_128_GCM_SHA256 = 0x00A0
TLS_DH_RSA_WITH_AES_256_GCM_SHA384 = 0x00A1
TLS_DHE_DSS_WITH_AES_128_GCM_SHA256 = 0x00A2
TLS_DHE_DSS_WITH_AES_256_GCM_SHA384 = 0x00A3
TLS_DH_DSS_WITH_AES_128_GCM_SHA256 = 0x00A4
TLS_DH_DSS_WITH_AES_256_GCM_SHA384 = 0x00A5
TLS_DH_anon_WITH_AES_128_GCM_SHA256 = 0x00A6
TLS_DH_anon_WITH_AES_256_GCM_SHA384 = 0x00A7
TLS_EMPTY_RENEGOTIATION_INFO_SCSV = 0x00FF
TLS_ECDH_ECDSA_WITH_NULL_SHA = 0xC001
TLS_ECDH_ECDSA_WITH_RC4_128_SHA = 0xC002
TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC003
TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA = 0xC004
TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA = 0xC005
TLS_ECDHE_ECDSA_WITH_NULL_SHA = 0xC006
TLS_ECDHE_ECDSA_WITH_RC4_128_SHA = 0xC007
TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA = 0xC008
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
TLS_ECDH_RSA_WITH_NULL_SHA = 0xC00B
TLS_ECDH_RSA_WITH_RC4_128_SHA = 0xC00C
TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA = 0xC00D
TLS_ECDH_RSA_WITH_AES_128_CBC_SHA = 0xC00E
TLS_ECDH_RSA_WITH_AES_256_CBC_SHA = 0xC00F
TLS_ECDHE_RSA_WITH_NULL_SHA = 0xC010
TLS_ECDHE_RSA_WITH_RC4_128_SHA = 0xC011
TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA = 0xC012
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
TLS_ECDH_anon_WITH_NULL_SHA = 0xC015
TLS_ECDH_anon_WITH_RC4_128_SHA = 0xC016
TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA = 0xC017
TLS_ECDH_anon_WITH_AES_128_CBC_SHA = 0xC018
TLS_ECDH_anon_WITH_AES_256_CBC_SHA = 0xC019
TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA = 0xC01A
TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA = 0xC01B
TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA = 0xC01C
TLS_SRP_SHA_WITH_AES_128_CBC_SHA = 0xC01D
TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA = 0xC01E
TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA = 0xC01F
TLS_SRP_SHA_WITH_AES_256_CBC_SHA = 0xC020
TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA = 0xC021
TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA = 0xC022
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC025
TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC026
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256 = 0xC029
TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384 = 0xC02A
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02D
TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02E
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256 = 0xC031
TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384 = 0xC032
TLS_RSA_WITH_ARIA_128_CBC_SHA256 = 0xC03C
TLS_RSA_WITH_ARIA_256_CBC_SHA384 = 0xC03D
TLS_DH_DSS_WITH_ARIA_128_CBC_SHA256 = 0xC03E
TLS_DH_DSS_WITH_ARIA_256_CBC_SHA384 = 0xC03F
TLS_DH_RSA_WITH_ARIA_128_CBC_SHA256 = 0xC040
TLS_DH_RSA_WITH_ARIA_256_CBC_SHA384 = 0xC041
TLS_DHE_DSS_WITH_ARIA_128_CBC_SHA256 = 0xC042
TLS_DHE_DSS_WITH_ARIA_256_CBC_SHA384 = 0xC043
TLS_DHE_RSA_WITH_ARIA_128_CBC_SHA256 = 0xC044
TLS_DHE_RSA_WITH_ARIA_256_CBC_SHA384 = 0xC045
TLS_DH_anon_WITH_ARIA_128_CBC_SHA256 = 0xC046
TLS_DH_anon_WITH_ARIA_256_CBC_SHA384 = 0xC047
TLS_ECDHE_ECDSA_WITH_ARIA_128_CBC_SHA256 = 0xC048
TLS_ECDHE_ECDSA_WITH_ARIA_256_CBC_SHA384 = 0xC049
TLS_ECDH_ECDSA_WITH_ARIA_128_CBC_SHA256 = 0xC04A
TLS_ECDH_ECDSA_WITH_ARIA_256_CBC_SHA384 = 0xC04B
TLS_ECDHE_RSA_WITH_ARIA_128_CBC_SHA256 = 0xC04C
TLS_ECDHE_RSA_WITH_ARIA_256_CBC_SHA384 = 0xC04D
TLS_ECDH_RSA_WITH_ARIA_128_CBC_SHA256 = 0xC04E
TLS_ECDH_RSA_WITH_ARIA_256_CBC_SHA384 = 0xC04F
TLS_RSA_WITH_ARIA_128_GCM_SHA256 = 0xC050
TLS_RSA_WITH_ARIA_256_GCM_SHA384 = 0xC051
TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256 = 0xC052
TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384 = 0xC053
TLS_DH_RSA_WITH_ARIA_128_GCM_SHA256 = 0xC054
TLS_DH_RSA_WITH_ARIA_256_GCM_SHA384 = 0xC055
TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256 = 0xC056
TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384 = 0xC057
TLS_DH_DSS_WITH_ARIA_128_GCM_SHA256 = 0xC058
TLS_DH_DSS_WITH_ARIA_256_GCM_SHA384 = 0xC059
TLS_DH_anon_WITH_ARIA_128_GCM_SHA256 = 0xC05A
TLS_DH_anon_WITH_ARIA_256_GCM_SHA384 = 0xC05B
TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256 = 0xC05C
TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384 = 0xC05D
TLS_ECDH_ECDSA_WITH_ARIA_128_GCM_SHA256 = 0xC05E
TLS_ECDH_ECDSA_WITH_ARIA_256_GCM_SHA384 = 0xC05F
TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256 = 0xC060
TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384 = 0xC061
TLS_ECDH_RSA_WITH_ARIA_128_GCM_SHA256 = 0xC062
TLS_ECDH_RSA_WITH_ARIA_256_GCM_SHA384 = 0xC063
TLS_RSA_WITH_AES_128_CCM = 0xC09C
TLS_RSA_WITH_AES_256_CCM = 0xC09D
TLS_DHE_RSA_WITH_AES_128_CCM = 0xC09E
TLS_DHE_RSA_WITH_AES_256_CCM = 0xC09F
TLS_RSA_WITH_AES_128_CCM_8 = 0xC0A0
TLS_RSA_WITH_AES_256_CCM_8 = 0xC0A1
TLS_DHE_RSA_WITH_AES_128_CCM_8 = 0xC0A2
TLS_DHE_RSA_WITH_AES_256_CCM_8 = 0xC0A3
TLS_ECDHE_ECDSA_WITH_AES_128_CCM = 0xC0AC
TLS_ECDHE_ECDSA_WITH_AES_256_CCM = 0xC0AD
TLS_ECDHE_ECDSA_WITH_AES_128_CCM_8 = 0xC0AE
TLS_ECDHE_ECDSA_WITH_AES_256_CCM_8 = 0xC0AF
TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA = 0x000B
TLS_DH_anon_WITH_AES_128_CBC_SHA = 0x0034
#poodle defense dummy ciphersuite
TLS_FALLBACK_SCSV = 0x5600
all_suites = []
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_PSK_WITH_RC4_128_SHA)
all_suites.append(TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_PSK_WITH_NULL_SHA)
all_suites.append(TLS_ECDHE_PSK_WITH_NULL_SHA256)
all_suites.append(TLS_ECDHE_PSK_WITH_NULL_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_NULL_SHA)
all_suites.append(TLS_ECDHE_RSA_WITH_RC4_128_SHA)
all_suites.append(TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_ECDHE_ECDSA_WITH_NULL_SHA)
all_suites.append(TLS_ECDHE_ECDSA_WITH_RC4_128_SHA)
all_suites.append(TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_NULL_WITH_NULL_NULL)
all_suites.append(TLS_RSA_WITH_NULL_MD5)
all_suites.append(TLS_RSA_WITH_NULL_SHA)
all_suites.append(TLS_RSA_EXPORT_WITH_RC4_40_MD5)
all_suites.append(TLS_RSA_WITH_RC4_128_MD5)
all_suites.append(TLS_RSA_WITH_RC4_128_SHA)
all_suites.append(TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5)
all_suites.append(TLS_RSA_WITH_IDEA_CBC_SHA)
all_suites.append(TLS_RSA_EXPORT_WITH_DES40_CBC_SHA)
all_suites.append(TLS_RSA_WITH_DES_CBC_SHA)
all_suites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_DES_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_DES_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_DES_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_DES_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_DH_anon_EXPORT_WITH_RC4_40_MD5)
all_suites.append(TLS_DH_anon_WITH_RC4_128_MD5)
all_suites.append(TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_DES_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_KRB5_WITH_DES_CBC_SHA)
all_suites.append(TLS_KRB5_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_KRB5_WITH_RC4_128_SHA)
all_suites.append(TLS_KRB5_WITH_IDEA_CBC_SHA)
all_suites.append(TLS_KRB5_WITH_DES_CBC_MD5)
all_suites.append(TLS_KRB5_WITH_3DES_EDE_CBC_MD5)
all_suites.append(TLS_KRB5_WITH_RC4_128_MD5)
all_suites.append(TLS_KRB5_WITH_IDEA_CBC_MD5)
all_suites.append(TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA)
all_suites.append(TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA)
all_suites.append(TLS_KRB5_EXPORT_WITH_RC4_40_SHA)
all_suites.append(TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5)
all_suites.append(TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5)
all_suites.append(TLS_KRB5_EXPORT_WITH_RC4_40_MD5)
all_suites.append(TLS_PSK_WITH_NULL_SHA)
all_suites.append(TLS_DHE_PSK_WITH_NULL_SHA)
all_suites.append(TLS_RSA_PSK_WITH_NULL_SHA)
all_suites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_RSA_WITH_NULL_SHA256)
all_suites.append(TLS_RSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_RSA_WITH_AES_256_CBC_SHA256)
all_suites.append(TLS_DH_DSS_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_DH_RSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_RSA_WITH_CAMELLIA_128_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_DH_DSS_WITH_AES_256_CBC_SHA256)
all_suites.append(TLS_DH_RSA_WITH_AES_256_CBC_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_AES_256_CBC_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_AES_256_CBC_SHA256)
all_suites.append(TLS_DH_anon_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_DH_anon_WITH_AES_256_CBC_SHA256)
all_suites.append(TLS_RSA_WITH_CAMELLIA_256_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA)
all_suites.append(TLS_PSK_WITH_RC4_128_SHA)
all_suites.append(TLS_PSK_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_PSK_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_PSK_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_DHE_PSK_WITH_RC4_128_SHA)
all_suites.append(TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_DHE_PSK_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_DHE_PSK_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_RSA_PSK_WITH_RC4_128_SHA)
all_suites.append(TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_RSA_PSK_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_RSA_PSK_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_RSA_WITH_SEED_CBC_SHA)
all_suites.append(TLS_DH_DSS_WITH_SEED_CBC_SHA)
all_suites.append(TLS_DH_RSA_WITH_SEED_CBC_SHA)
all_suites.append(TLS_DHE_DSS_WITH_SEED_CBC_SHA)
all_suites.append(TLS_DHE_RSA_WITH_SEED_CBC_SHA)
all_suites.append(TLS_DH_anon_WITH_SEED_CBC_SHA)
all_suites.append(TLS_RSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_RSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_DHE_RSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_DH_RSA_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_DH_RSA_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_DHE_DSS_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_DH_DSS_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_DH_DSS_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_DH_anon_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_DH_anon_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_PSK_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_PSK_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_AES_128_GCM_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_AES_256_GCM_SHA384)
all_suites.append(TLS_PSK_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_PSK_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_PSK_WITH_NULL_SHA256)
all_suites.append(TLS_PSK_WITH_NULL_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_NULL_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_NULL_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_AES_128_CBC_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_AES_256_CBC_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_NULL_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_NULL_SHA384)
all_suites.append(TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256)
all_suites.append(TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256)
all_suites.append(TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256)
all_suites.append(TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA256)
all_suites.append(TLS_EMPTY_RENEGOTIATION_INFO_SCSV)
all_suites.append(TLS_ECDH_ECDSA_WITH_NULL_SHA)
all_suites.append(TLS_ECDH_ECDSA_WITH_RC4_128_SHA)
all_suites.append(TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_ECDH_RSA_WITH_NULL_SHA)
all_suites.append(TLS_ECDH_RSA_WITH_RC4_128_SHA)
all_suites.append(TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_ECDH_RSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_ECDH_RSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_ECDH_anon_WITH_NULL_SHA)
all_suites.append(TLS_ECDH_anon_WITH_RC4_128_SHA)
all_suites.append(TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_ECDH_anon_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_ECDH_anon_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA)
all_suites.append(TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA)
all_suites.append(TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA)
all_suites.append(TLS_RSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_RSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_DH_DSS_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_DH_DSS_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_DH_RSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_DH_RSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_DHE_DSS_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_DHE_RSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_DH_anon_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_DH_anon_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_ECDSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_ECDSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_ECDH_ECDSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_ECDH_ECDSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_RSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_ECDH_RSA_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_ECDH_RSA_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_RSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_RSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_DH_RSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_DH_RSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_DH_DSS_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_DH_DSS_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_DH_anon_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_DH_anon_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_ECDH_ECDSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_ECDH_ECDSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_ECDH_RSA_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_ECDH_RSA_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_PSK_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_PSK_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_PSK_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_PSK_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_PSK_WITH_ARIA_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_PSK_WITH_ARIA_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_DHE_RSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_DHE_RSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_DHE_DSS_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_DHE_DSS_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_DH_anon_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_DH_anon_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_ECDHE_RSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_ECDHE_RSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384)
all_suites.append(TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256)
all_suites.append(TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384)
all_suites.append(TLS_RSA_WITH_AES_128_CCM)
all_suites.append(TLS_RSA_WITH_AES_256_CCM)
all_suites.append(TLS_DHE_RSA_WITH_AES_128_CCM)
all_suites.append(TLS_DHE_RSA_WITH_AES_256_CCM)
all_suites.append(TLS_RSA_WITH_AES_128_CCM_8)
all_suites.append(TLS_RSA_WITH_AES_256_CCM_8)
all_suites.append(TLS_DHE_RSA_WITH_AES_128_CCM_8)
all_suites.append(TLS_DHE_RSA_WITH_AES_256_CCM_8)
all_suites.append(TLS_PSK_WITH_AES_128_CCM)
all_suites.append(TLS_PSK_WITH_AES_256_CCM)
all_suites.append(TLS_DHE_PSK_WITH_AES_128_CCM)
all_suites.append(TLS_DHE_PSK_WITH_AES_256_CCM)
all_suites.append(TLS_PSK_WITH_AES_128_CCM_8)
all_suites.append(TLS_PSK_WITH_AES_256_CCM_8)
all_suites.append(TLS_PSK_DHE_WITH_AES_128_CCM_8)
all_suites.append(TLS_PSK_DHE_WITH_AES_256_CCM_8)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_128_CCM)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_256_CCM)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_128_CCM_8)
all_suites.append(TLS_ECDHE_ECDSA_WITH_AES_256_CCM_8)
#existing ciphers
tripleDESSuites = []
tripleDESSuites.append(TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
tripleDESSuites.append(TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
tripleDESSuites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
aes128Suites = []
aes128Suites.append(TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
aes128Suites.append(TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
aes128Suites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
aes128Suites.append(TLS_DH_anon_WITH_AES_128_CBC_SHA)
aes256Suites = []
aes256Suites.append(TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
aes256Suites.append(TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
aes256Suites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
aes256Suites.append(TLS_DH_anon_WITH_AES_256_CBC_SHA)
rc4Suites = []
rc4Suites.append(TLS_RSA_WITH_RC4_128_SHA)
rc4Suites.append(TLS_RSA_WITH_RC4_128_MD5)
shaSuites = []
shaSuites.append(TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
shaSuites.append(TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
shaSuites.append(TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
shaSuites.append(TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
shaSuites.append(TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
shaSuites.append(TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
shaSuites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
shaSuites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
shaSuites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
shaSuites.append(TLS_RSA_WITH_RC4_128_SHA)
shaSuites.append(TLS_DH_anon_WITH_AES_128_CBC_SHA)
shaSuites.append(TLS_DH_anon_WITH_AES_256_CBC_SHA)
ecdheSuites = []
ecdheSuites.append(TLS_ECDHE_RSA_WITH_NULL_SHA)
ecdheSuites.append(TLS_ECDHE_RSA_WITH_RC4_128_SHA)
ecdheSuites.append(TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA)
ecdheSuites.append(TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA)
ecdheSuites.append(TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA)
ecdheSuites.append(TLS_ECDHE_ECDSA_WITH_NULL_SHA)
ecdheSuites.append(TLS_ECDHE_ECDSA_WITH_RC4_128_SHA)
ecdheSuites.append(TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA)
ecdheSuites.append(TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA)
ecdheSuites.append(TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA)
poodleTestSuites = []
poodleTestSuites.append(TLS_RSA_WITH_RC4_128_SHA)
poodleTestSuites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
poodleTestSuites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
poodleTestSuites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
poodleTestSuites.append(TLS_FALLBACK_SCSV) #in the end, as ciphersuites are in order of clients preference
freakTestSuites = []
freakTestSuites.append(TLS_RSA_EXPORT_WITH_RC4_40_MD5)
freakTestSuites.append(TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5)
freakTestSuites.append(TLS_RSA_EXPORT_WITH_DES40_CBC_SHA)
freakTestSuites.append(TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA)
freakTestSuites.append(TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA)
freakTestSuites.append(TLS_RSA_EXPORT1024_WITH_RC4_56_MD5)
freakTestSuites.append(TLS_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5)
freakTestSuites.append(TLS_RSA_EXPORT1024_WITH_DES_CBC_SHA)
freakTestSuites.append(TLS_RSA_EXPORT1024_WITH_RC4_56_SHA)
logjamTestSuites = []
#logjamTestSuites.append(TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA) # uses DH_EXPORT, hence ommitted.
logjamTestSuites.append(TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA)
#ecdheSuites.append(ECDHE-ECDSA-DES-CBC3-SHA)
#ecdheSuites.append(ECDHE-ECDSA-AES128-SHA)
#ecdheSuites.append(ECDHE-ECDSA-AES256-SHA)
#ecdheSuites.append(ECDHE-RSA-RC4-SHA)
#ecdheSuites.append(ECDHE-RSA-DES-CBC3-SHA)
#ecdheSuites.append(ECDHE-RSA-AES128-SHA)
#ecdheSuites.append(ECDHE-RSA-AES256-SHA)
#ecdheSuites.append(ECDHE-ECDSA-NULL-SHA)
#ecdheSuites.append(ECDHE-ECDSA-RC4-SHA)
#ecdheSuites.append(ECDHE-RSA-NULL-SHA)
ecdheSuites.append(TLS_RSA_WITH_NULL_SHA256)
ecdheSuites.append(TLS_RSA_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_RSA_WITH_AES_256_CBC_SHA256)
ecdheSuites.append(TLS_RSA_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_RSA_WITH_AES_256_GCM_SHA384)
ecdheSuites.append(TLS_DH_RSA_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_DH_RSA_WITH_AES_256_CBC_SHA256)
ecdheSuites.append(TLS_DH_RSA_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_DH_RSA_WITH_AES_256_GCM_SHA384)
ecdheSuites.append(TLS_DH_DSS_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_DH_DSS_WITH_AES_256_CBC_SHA256)
ecdheSuites.append(TLS_DH_DSS_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_DH_DSS_WITH_AES_256_GCM_SHA384)
ecdheSuites.append(TLS_DHE_RSA_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_DHE_RSA_WITH_AES_256_CBC_SHA256)
ecdheSuites.append(TLS_DHE_RSA_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_DHE_RSA_WITH_AES_256_GCM_SHA384)
ecdheSuites.append(TLS_DHE_DSS_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_DHE_DSS_WITH_AES_256_CBC_SHA256)
ecdheSuites.append(TLS_DHE_DSS_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_DHE_DSS_WITH_AES_256_GCM_SHA384)
ecdheSuites.append(TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384)
ecdheSuites.append(TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384)
ecdheSuites.append(TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256)
ecdheSuites.append(TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384)
ecdheSuites.append(TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256)
ecdheSuites.append(TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384)
#ecdheSuites.append(NULL-SHA256)
#ecdheSuites.append(AES128-SHA256)
#ecdheSuites.append(AES256-SHA256)
#ecdheSuites.append(AES128-GCM-SHA256)
#ecdheSuites.append(AES256-GCM-SHA384)
#ecdheSuites.append(DH-RSA-AES128-SHA256)
#ecdheSuites.append(DH-RSA-AES256-SHA256)
#ecdheSuites.append(DH-RSA-AES128-GCM-SHA256)
#ecdheSuites.append(DH-RSA-AES256-GCM-SHA384)
#ecdheSuites.append(DH-DSS-AES128-SHA256)
#ecdheSuites.append(DH-DSS-AES256-SHA256)
#ecdheSuites.append(DH-DSS-AES128-GCM-SHA256)
#ecdheSuites.append(DH-DSS-AES256-GCM-SHA384)
#ecdheSuites.append(DHE-RSA-AES128-SHA256)
#ecdheSuites.append(DHE-RSA-AES256-SHA256)
#ecdheSuites.append(DHE-RSA-AES128-GCM-SHA256)
#ecdheSuites.append(DHE-RSA-AES256-GCM-SHA384)
#ecdheSuites.append(DHE-DSS-AES128-SHA256)
#ecdheSuites.append(DHE-DSS-AES256-SHA256)
#ecdheSuites.append(DHE-DSS-AES128-GCM-SHA256)
#ecdheSuites.append(DHE-DSS-AES256-GCM-SHA384)
#ecdheSuites.append(ECDH-RSA-AES128-SHA256)
#ecdheSuites.append(ECDH-RSA-AES256-SHA384)
#ecdheSuites.append(ECDH-RSA-AES128-GCM-SHA256)
#ecdheSuites.append(ECDH-RSA-AES256-GCM-SHA384)
#ecdheSuites.append(ECDH-ECDSA-AES128-SHA256)
#ecdheSuites.append(ECDH-ECDSA-AES256-SHA384)
#ecdheSuites.append(ECDH-ECDSA-AES128-GCM-SHA256)
#ecdheSuites.append(ECDH-ECDSA-AES256-GCM-SHA384)
md5Suites = []
md5Suites.append(TLS_RSA_WITH_RC4_128_MD5)
@staticmethod
def _filterSuites(suites, settings):
macNames = settings.macNames
cipherNames = settings.cipherNames
macSuites = []
if "sha" in macNames:
macSuites += CipherSuite.shaSuites
if "md5" in macNames:
macSuites += CipherSuite.md5Suites
cipherSuites = []
if "aes128" in cipherNames:
cipherSuites += CipherSuite.aes128Suites
if "aes256" in cipherNames:
cipherSuites += CipherSuite.aes256Suites
if "3des" in cipherNames:
cipherSuites += CipherSuite.tripleDESSuites
if "rc4" in cipherNames:
cipherSuites += CipherSuite.rc4Suites
return [s for s in suites if s in macSuites and s in cipherSuites]
srpSuites = []
srpSuites.append(TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA)
srpSuites.append(TLS_SRP_SHA_WITH_AES_128_CBC_SHA)
srpSuites.append(TLS_SRP_SHA_WITH_AES_256_CBC_SHA)
@staticmethod
def getSrpSuites(settings):
return CipherSuite._filterSuites(CipherSuite.srpSuites, settings)
srpCertSuites = []
srpCertSuites.append(TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA)
srpCertSuites.append(TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA)
srpCertSuites.append(TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA)
@staticmethod
def getSrpCertSuites(settings):
return CipherSuite._filterSuites(CipherSuite.srpCertSuites, settings)
srpAllSuites = srpSuites + srpCertSuites
@staticmethod
def getSrpAllSuites(settings):
return CipherSuite._filterSuites(CipherSuite.srpAllSuites, settings)
certSuites = []
certSuites.append(TLS_RSA_WITH_3DES_EDE_CBC_SHA)
certSuites.append(TLS_RSA_WITH_AES_128_CBC_SHA)
certSuites.append(TLS_RSA_WITH_AES_256_CBC_SHA)
certSuites.append(TLS_RSA_WITH_RC4_128_SHA)
certSuites.append(TLS_RSA_WITH_RC4_128_MD5)
certAllSuites = srpCertSuites + certSuites
@staticmethod
def getCertSuites(settings):
return CipherSuite._filterSuites(CipherSuite.certSuites, settings)
anonSuites = []
anonSuites.append(TLS_DH_anon_WITH_AES_128_CBC_SHA)
anonSuites.append(TLS_DH_anon_WITH_AES_256_CBC_SHA)
@staticmethod
def getAnonSuites(settings):
return CipherSuite._filterSuites(CipherSuite.anonSuites, settings)
@staticmethod
def canonicalCipherName(ciphersuite):
"Return the canonical name of the cipher whose number is provided."
if ciphersuite in CipherSuite.aes128Suites:
return "aes128"
elif ciphersuite in CipherSuite.aes256Suites:
return "aes256"
elif ciphersuite in CipherSuite.rc4Suites:
return "rc4"
elif ciphersuite in CipherSuite.tripleDESSuites:
return "3des"
else:
return None
@staticmethod
def canonicalMacName(ciphersuite):
"Return the canonical name of the MAC whose number is provided."
if ciphersuite in CipherSuite.shaSuites:
return "sha"
elif ciphersuite in CipherSuite.md5Suites:
return "md5"
else:
return None
# The following faults are induced as part of testing. The faultAlerts
# dictionary describes the allowed alerts that may be triggered by these
# faults.
class Fault:
badUsername = 101
badPassword = <PASSWORD>
badA = 103
clientSrpFaults = list(range(101,104))
badVerifyMessage = 601
clientCertFaults = list(range(601,602))
badPremasterPadding = 501
shortPremasterSecret = 502
clientNoAuthFaults = list(range(501,503))
badB = 201
serverFaults = list(range(201,202))
badFinished = 300
badMAC = 301
badPadding = 302
genericFaults = list(range(300,303))
faultAlerts = {\
badUsername: (AlertDescription.unknown_psk_identity, \
AlertDescription.bad_record_mac),\
badPassword: (AlertDescription.bad_record_mac,),\
badA: (AlertDescription.illegal_parameter,),\
badPremasterPadding: (AlertDescription.bad_record_mac,),\
shortPremasterSecret: (AlertDescription.bad_record_mac,),\
badVerifyMessage: (AlertDescription.decrypt_error,),\
badFinished: (AlertDescription.decrypt_error,),\
badMAC: (AlertDescription.bad_record_mac,),\
badPadding: (AlertDescription.bad_record_mac,)
}
faultNames = {\
badUsername: "bad username",\
badPassword: "<PASSWORD>",\
badA: "bad A",\
badPremasterPadding: "bad premaster padding",\
shortPremasterSecret: "short premaster secret",\
badVerifyMessage: "bad verify message",\
badFinished: "bad finished message",\
badMAC: "bad MAC",\
badPadding: "bad padding"
}
|
StarcoderdataPython
|
132347
|
<filename>build/driver/joystick/joystick_drivers/wiimote/cmake/wiimote-genmsg-context.py
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/lty/catkin_ws/src/driver/joystick/joystick_drivers/wiimote/msg/IrSourceInfo.msg;/home/lty/catkin_ws/src/driver/joystick/joystick_drivers/wiimote/msg/State.msg;/home/lty/catkin_ws/src/driver/joystick/joystick_drivers/wiimote/msg/TimedSwitch.msg"
services_str = ""
pkg_name = "wiimote"
dependencies_str = "geometry_msgs;std_msgs;sensor_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "wiimote;/home/lty/catkin_ws/src/driver/joystick/joystick_drivers/wiimote/msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg;sensor_msgs;/opt/ros/kinetic/share/sensor_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = 'TRUE' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
|
StarcoderdataPython
|
12840316
|
from configutator import ConfigMap, ArgMap, EnvMap, loadConfig
import sys
def test(param1: int, param2: str):
"""
This is a test
:param param1: An integer
:param param2: A string
:return: Print the params
"""
print(param1, param2)
if __name__ == '__main__':
for argMap in loadConfig(sys.argv, (test,), "Test"):
test(**argMap[test])
|
StarcoderdataPython
|
173488
|
"""
Functions to read talks data.
"""
import tempfile
import json
from ..server_utils import epcon_fetch_file
def _call_for_talks(out_filepath, status="accepted", conference="ep2017", host="europython.io", with_votes=False):
""" Create json file with talks data. `status` choices: ['accepted', 'proposed']
"""
cmd = "talk_abstracts {} --talk_status {}".format(conference, status)
if with_votes:
cmd += " --votes"
return epcon_fetch_file(cmd=cmd, fpath=out_filepath, host=host)
def load_events(talks_filepath):
""" Return a list of event records from the talks file."""
sessions = json.load(open(talks_filepath, "r"))
events = [event for name in sessions for event in sessions[name].values()]
return events
def fetch_talks_json(out_filepath="", status="proposed", conference="ep2017", host="europython.io", with_votes=False):
""" Return the talks in a json format. `status` choices: ['accepted', 'proposed']
"""
if not out_filepath:
out_filepath = tempfile.NamedTemporaryFile(suffix=".json").name
_call_for_talks(out_filepath=out_filepath, status=status, conference=conference, host=host, with_votes=with_votes)
with open(out_filepath, "r") as f:
talks = json.load(f)
return talks
|
StarcoderdataPython
|
12264
|
<filename>app/request/queue.py
import logging
from time import sleep
logger = logging.getLogger(__name__)
class StopRequestQueue:
cursor = 0
queue = None
service = None
current_request = None
request_delay = 0 # seconds
def __init__(self, service, request_delay=10):
self.queue = []
self.service = service
self.request_delay = request_delay
def add_request(self, request):
self.queue.append(request)
logger.info("Request added for %r. Queue length at %d" % (request.stop_id, len(self.queue)))
def success(self, data):
logger.debug("Success returned")
# could become none upon stop(), considered inactive
if self.current_request is not None:
for component in self.current_request.components:
component.data(data)
sleep(self.request_delay)
self.next()
def failure(self, error):
logger.debug("Failure returned")
# could become none upon stop(), considered inactive
if self.current_request is not None:
for component in self.current_request.components:
component.error(error)
sleep(self.request_delay)
self.next()
def next(self, increment=True):
logger.info('next()')
self.cursor = self.cursor + 1 if increment else self.cursor
if self.cursor < len(self.queue):
self.current_request = self.queue[self.cursor]
self.service.access(self.current_request.stop_id, self)
"""
Not allowing wrapped cursor.
:next() is run through, then this queue is exited and the service
availability is checked again, starting the sequence again.
"""
# self.cursor = 0 if self.cursor == len(self.queue) - 1 else self.cursor + 1
def start(self):
logger.info('start()')
self.cursor = 0
self.next(False)
logger.info('start() - out')
def stop(self):
del self.queue[:]
self.current_request = None
|
StarcoderdataPython
|
8159095
|
<reponame>dqshuai/MetaFormer
import os
import torch
import importlib
import torch.distributed as dist
try:
# noinspection PyUnresolvedReferences
from apex import amp
except ImportError:
amp = None
def relative_bias_interpolate(checkpoint,config):
for k in list(checkpoint['model']):
if 'relative_position_index' in k:
del checkpoint['model'][k]
if 'relative_position_bias_table' in k:
relative_position_bias_table = checkpoint['model'][k]
cls_bias = relative_position_bias_table[:1,:]
relative_position_bias_table = relative_position_bias_table[1:,:]
size = int(relative_position_bias_table.shape[0]**0.5)
img_size = (size+1)//2
if 'stage_3' in k:
downsample_ratio = 16
elif 'stage_4' in k:
downsample_ratio = 32
new_img_size = config.DATA.IMG_SIZE//downsample_ratio
new_size = 2*new_img_size-1
if new_size == size:
continue
relative_position_bias_table = relative_position_bias_table.reshape(size,size,-1)
relative_position_bias_table = relative_position_bias_table.unsqueeze(0).permute(0,3,1,2)#bs,nhead,h,w
relative_position_bias_table = torch.nn.functional.interpolate(
relative_position_bias_table, size=(new_size, new_size), mode='bicubic', align_corners=False)
relative_position_bias_table = relative_position_bias_table.permute(0,2,3,1)
relative_position_bias_table = relative_position_bias_table.squeeze(0).reshape(new_size*new_size,-1)
relative_position_bias_table = torch.cat((cls_bias,relative_position_bias_table),dim=0)
checkpoint['model'][k] = relative_position_bias_table
return checkpoint
def load_pretained(config,model,logger=None,strict=False):
if logger is not None:
logger.info(f"==============> pretrain form {config.MODEL.PRETRAINED}....................")
checkpoint = torch.load(config.MODEL.PRETRAINED, map_location='cpu')
if 'model' not in checkpoint:
if 'state_dict_ema' in checkpoint:
checkpoint['model'] = checkpoint['state_dict_ema']
else:
checkpoint['model'] = checkpoint
if config.MODEL.DORP_HEAD:
if 'head.weight' in checkpoint['model'] and 'head.bias' in checkpoint['model']:
if logger is not None:
logger.info(f"==============> drop head....................")
del checkpoint['model']['head.weight']
del checkpoint['model']['head.bias']
if 'head.fc.weight' in checkpoint['model'] and 'head.fc.bias' in checkpoint['model']:
if logger is not None:
logger.info(f"==============> drop head....................")
del checkpoint['model']['head.fc.weight']
del checkpoint['model']['head.fc.bias']
if config.MODEL.DORP_META:
if logger is not None:
logger.info(f"==============> drop meta head....................")
for k in list(checkpoint['model']):
if 'meta' in k:
del checkpoint['model'][k]
checkpoint = relative_bias_interpolate(checkpoint,config)
if 'point_coord' in checkpoint['model']:
if logger is not None:
logger.info(f"==============> drop point coord....................")
del checkpoint['model']['point_coord']
msg = model.load_state_dict(checkpoint['model'], strict=strict)
del checkpoint
torch.cuda.empty_cache()
def load_checkpoint(config, model, optimizer, lr_scheduler, logger):
logger.info(f"==============> Resuming form {config.MODEL.RESUME}....................")
if config.MODEL.RESUME.startswith('https'):
checkpoint = torch.hub.load_state_dict_from_url(
config.MODEL.RESUME, map_location='cpu', check_hash=True)
else:
checkpoint = torch.load(config.MODEL.RESUME, map_location='cpu')
if 'model' not in checkpoint:
if 'state_dict_ema' in checkpoint:
checkpoint['model'] = checkpoint['state_dict_ema']
else:
checkpoint['model'] = checkpoint
msg = model.load_state_dict(checkpoint['model'], strict=False)
logger.info(msg)
max_accuracy = 0.0
if not config.EVAL_MODE and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
config.defrost()
config.TRAIN.START_EPOCH = checkpoint['epoch'] + 1
config.freeze()
if 'amp' in checkpoint and config.AMP_OPT_LEVEL != "O0" and checkpoint['config'].AMP_OPT_LEVEL != "O0":
amp.load_state_dict(checkpoint['amp'])
logger.info(f"=> loaded successfully '{config.MODEL.RESUME}' (epoch {checkpoint['epoch']})")
if 'max_accuracy' in checkpoint:
max_accuracy = checkpoint['max_accuracy']
del checkpoint
torch.cuda.empty_cache()
return max_accuracy
def save_checkpoint(config, epoch, model, max_accuracy, optimizer, lr_scheduler, logger):
save_state = {'model': model.state_dict(),
'optimizer': optimizer.state_dict(),
'lr_scheduler': lr_scheduler.state_dict(),
'max_accuracy': max_accuracy,
'epoch': epoch,
'config': config}
if config.AMP_OPT_LEVEL != "O0":
save_state['amp'] = amp.state_dict()
save_path = os.path.join(config.OUTPUT, f'ckpt_epoch_{epoch}.pth')
logger.info(f"{save_path} saving......")
torch.save(save_state, save_path)
logger.info(f"{save_path} saved !!!")
lastest_save_path = os.path.join(config.OUTPUT, f'latest.pth')
logger.info(f"{lastest_save_path} saving......")
torch.save(save_state, lastest_save_path)
logger.info(f"{lastest_save_path} saved !!!")
def get_grad_norm(parameters, norm_type=2):
if isinstance(parameters, torch.Tensor):
parameters = [parameters]
parameters = list(filter(lambda p: p.grad is not None, parameters))
norm_type = float(norm_type)
total_norm = 0
for p in parameters:
param_norm = p.grad.data.norm(norm_type)
total_norm += param_norm.item() ** norm_type
total_norm = total_norm ** (1. / norm_type)
return total_norm
def auto_resume_helper(output_dir):
checkpoints = os.listdir(output_dir)
checkpoints = [ckpt for ckpt in checkpoints if ckpt.endswith('pth')]
print(f"All checkpoints founded in {output_dir}: {checkpoints}")
if len(checkpoints) > 0:
latest_checkpoint = max([os.path.join(output_dir, d) for d in checkpoints], key=os.path.getmtime)
print(f"The latest checkpoint founded: {latest_checkpoint}")
resume_file = latest_checkpoint
else:
resume_file = None
return resume_file
def reduce_tensor(tensor):
rt = tensor.clone()
dist.all_reduce(rt, op=dist.ReduceOp.SUM)
rt /= dist.get_world_size()
return rt
def load_ext(name, funcs):
ext = importlib.import_module(name)
for fun in funcs:
assert hasattr(ext, fun), f'{fun} miss in module {name}'
return ext
|
StarcoderdataPython
|
11264705
|
<filename>data/temp/czml3_test.py<gh_stars>0
# from czml3.examples import simple
# output=simple
import gdal
from gdalos.calc.gdal_to_czml import gdal_to_czml
raster_filename = r'd:\dev\czml\1.tif'
ds = gdal.Open(raster_filename, gdal.GA_ReadOnly)
ds, output = gdal_to_czml.gdal_to_czml(ds, name="")
del ds
print(output)
output_filename = "czml3.czml"
with open(output_filename, 'w') as f:
print(output, file=f)
|
StarcoderdataPython
|
3251211
|
<gh_stars>10-100
"""Initial migration.
Revision ID: <PASSWORD>
Revises:
Create Date: 2018-02-07 21:05:49.629867
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '<PASSWORD>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('oauth_clients',
sa.Column('name', sa.String(), nullable=True),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('client_secret', sa.String(length=55), nullable=False),
sa.Column('redirect_uris', postgresql.ARRAY(sa.String()), nullable=True),
sa.Column('is_confidential', sa.Boolean(), nullable=True),
sa.Column('default_scopes', postgresql.ARRAY(sa.String()), nullable=True),
sa.PrimaryKeyConstraint('client_id')
)
op.create_index(op.f('ix_oauth_clients_client_secret'), 'oauth_clients', ['client_secret'], unique=True)
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('pebble_auth_uid', sa.String(length=24), nullable=True),
sa.Column('pebble_dev_portal_uid', sa.String(length=24), nullable=True),
sa.Column('pebble_token', sa.String(), nullable=True),
sa.Column('has_logged_in', sa.Boolean(), server_default='false', nullable=False),
sa.Column('account_type', sa.Integer(), server_default='0', nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_pebble_token'), 'users', ['pebble_token'], unique=False)
op.create_table('issued_tokens',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=40), nullable=False),
sa.Column('scopes', postgresql.ARRAY(sa.String()), nullable=True),
sa.Column('expires', sa.DateTime(), nullable=True),
sa.Column('access_token', sa.String(), nullable=True),
sa.Column('refresh_token', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['client_id'], ['oauth_clients.client_id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('access_token'),
sa.UniqueConstraint('refresh_token')
)
op.create_table('user_identities',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('idp', sa.String(), nullable=True),
sa.Column('idp_user_id', sa.String(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('user_identity_idp_id_index', 'user_identities', ['idp', 'idp_user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('user_identity_idp_id_index', table_name='user_identities')
op.drop_table('user_identities')
op.drop_table('issued_tokens')
op.drop_index(op.f('ix_users_pebble_token'), table_name='users')
op.drop_table('users')
op.drop_index(op.f('ix_oauth_clients_client_secret'), table_name='oauth_clients')
op.drop_table('oauth_clients')
# ### end Alembic commands ###
|
StarcoderdataPython
|
8002526
|
from clubs.models import Club, FinancialStatementFact, FinancialStatementLine
from rest_framework import viewsets, permissions
from .searilizers import ClubSerializer, FinancialStatementFactSerializer, FinancialStatementLineSerializer
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework import renderers
from rest_framework.filters import SearchFilter, OrderingFilter
# from django_filters import rest_framework as filters
def is_valid_queryparam(param):
return param != '' and param is not None
def filter(kwargs):
id_exact_query = kwargs.get("id", "")
# print(id_exact_query)
qs = FinancialStatementFact.objects.all()
# id_exact_query = ""
fiscal_year_query = ""
'''
fiscal_year_query = request.GET.get('fiscal_year')
fiscal_period_query = request.GET.get('fiscal_period')
start_date_query = request.GET.get('start_date')
end_date_query = request.GET.get('end_date')
amount_query = request.GET.get('amount')
club_id_query = request.GET.get('club_id')
financial_statement_line_id_query = request.GET.get('financial_statement_line_id')
period_length_months_query = request.GET.get('period_length_months')
reporting_standard_query = request.GET.get('reporting_standard')
reporting_standard_query = request.GET.get('currency')
financial_statement_type_query = request.GET.get('financial_statement_type')'''
if is_valid_queryparam(id_exact_query):
qs = qs.filter(id=id_exact_query)
if is_valid_queryparam(fiscal_year_query):
qs = qs.filter(fiscal_year=fiscal_year_query)
'''
if is_valid_queryparam(fiscal_period_query):
qs = qs.filter(fiscal_period=fiscal_period_query)
if is_valid_queryparam(start_date_query):
qs = qs.filter(start_date=start_date_query)
if is_valid_queryparam(end_date_query):
qs = qs.filter(end_date=end_date_query)
if is_valid_queryparam(amount_query):
qs = qs.filter(amount=amount_query)
if is_valid_queryparam(club_id_query):
qs = qs.filter(club_id=club_id_query)
if is_valid_queryparam(financial_statement_line_id_query):
qs = qs.filter(financial_statement_line_id=financial_statement_line_id_query)
if is_valid_queryparam(period_length_months_query):
qs = qs.filter(period_length_months=period_length_months_query)
if is_valid_queryparam(reporting_standard_query):
qs = qs.filter(reporting_standard=reporting_standard_query)
if is_valid_queryparam(reporting_standard_query):
qs = qs.filter(currency=reporting_standard_query)
if is_valid_queryparam(financial_statement_type_query):
qs = qs.filter(financial_statement_type=financial_statement_type_query)'''
return qs
def filter_clubs(**kwargs):
clubs = Club.objects.all()
clubs = clubs.filter(**kwargs)
return clubs
def filter_fsdata(**kwargs):
qs = FinancialStatementFact.objects.all()
qs = qs.filter(**kwargs)
return qs
# Club Viewset
class ClubViewSet(viewsets.ModelViewSet):
# queryset = Club.objects.all()
permission_classes = [
permissions.AllowAny
]
serializer_class = ClubSerializer
def get_queryset(self):
clubs = Club.objects.all()
params = self.request.query_params.dict()
invalid_param_keys = []
for key in params:
print(key)
print(params[key])
if is_valid_queryparam(params[key]):
print("key ja arvo:")
print(key)
print(params[key])
else:
print("ei valid")
print(params[key])
invalid_param_keys.append(key)
print(invalid_param_keys)
for item in invalid_param_keys:
params.pop(item)
print(params)
clubs = filter_clubs(**params)
return clubs
# renderer_classes=[renderers.StaticHTMLRenderer]
# @action(detail=False, methods=['get'])
def retrieve(self, request, *args, **kwargs):
print("search_val::")
# search_val = request.GET('name', '')
# print(search_val)
print("get_routes_clubs!!")
params = kwargs
print("paramas:::")
print(params)
print("params_list:")
params_list = params['pk'].split('&')
print(params_list)
clubs = Club.objects.all()
serializer = ClubSerializer(clubs, many=True)
return Response(serializer.data)
class FinancialStatementFactViewSet(viewsets.ModelViewSet):
'''def get_queryset(self):
qs = filter({})
print("QQQQQQX:::")
# print(qs)
return qs'''
def get_queryset(self):
qs = FinancialStatementFact.objects.all()
params = self.request.query_params.dict()
print("params")
print(params)
invalid_param_keys = []
for key in params:
print(key)
print(params[key])
if is_valid_queryparam(params[key]):
print("key ja arvo:")
print(key)
print(params[key])
else:
print("ei valid")
print(params[key])
invalid_param_keys.append(key)
print(invalid_param_keys)
for item in invalid_param_keys:
params.pop(item)
print(params)
qs = filter_fsdata(**params)
return qs
queryset = FinancialStatementFact.objects.all()
# print(queryset)
# queryset = self.get_queryset()
serializer_class = FinancialStatementFactSerializer
permission_classes = [
permissions.AllowAny
]
@action(detail=True, renderer_classes=[renderers.StaticHTMLRenderer])
def get_routes(request):
print("get_routes!!")
print(request.query_params)
@action(detail=True, renderer_classes=[renderers.StaticHTMLRenderer])
def highlight(self, request, *args, **kwargs):
print("HIGHLIGHT:::")
fact = self.get_object()
return Response(fact.highlighted)
'''
def perform_create(self, serializer):
serializer.save(owner=self.request.user)'''
class FinancialStatementLineViewSet(viewsets.ModelViewSet):
'''def get_queryset(self):
qs = filter({})
return qs'''
serializer_class = FinancialStatementLineSerializer
permission_classes = [
permissions.AllowAny
]
queryset = FinancialStatementLine.objects.all()
class FinancialStatementFact_Clubs_ViewSet(FinancialStatementFactViewSet):
queryset = FinancialStatementFactViewSet.queryset.values(
'club_id').distinct().order_by('club_id')
print(queryset)
|
StarcoderdataPython
|
93073
|
<filename>websauna/system/core/traversal.py<gh_stars>100-1000
"""Traversing core logic."""
# Pyramid
from pyramid.interfaces import ILocation
from zope.interface import implementer
@implementer(ILocation)
class Resource:
"""Traversable resource in a nested tree hierarchy with basic breadcrumbs support.
All traverable context classes should inherit from this class. Note that this is not a strict requirement, as often anything implementing :py:class:`pyramid.interfaces.ILocation` and ``get_title()`` will work.
For more information see :ref:`Traversal <traversal>`.
.. _traversal:
"""
# TODO: Cannot annotate request as it breaks sphinx-autodoc-typehints, sphinx-autodoc-typehints==1.1.0, when doing make html
def __init__(self, request):
#: Pointer to the parent object in traverse hierarchy. This is none until make_lineage is called.
self.__parent__ = None
#: The id of this resource as its appear in URL and traversing path
self.__name__ = None
self.request = request
def get_title(self) -> str:
"""Return human-readable title of this resource.
This is viewed in admin breadcrumbs path, etc.
"""
title = getattr(self, "title", None)
if title:
return title
raise NotImplementedError("get_title() implementation missing for {}".format(self))
@classmethod
def make_lineage(self, parent, child, name, allow_new_parent=False) -> "Resource":
"""Set traversing pointers between the child and the parent resources.
Builds __parent__ and __name__ pointer and sets it on the child resource.
* If lineage relationship is not lazy and the referenced children is stored in the parent, the lineage must be set when the child is put into parent container.
* If lineage relationship is lazy and child resource is constructed upon lookup in ``__item__``, the lineage is constructed before the child is returned.
:param parent: Parent resource who children is become part to
:param child: Child resource mutated in place
:param name: Id of the child resource as it will appear in the URL traversing path
:param allow_new_parent: If the child has alraedy a parent assigned, allow override the parent... or basically move an existing resource. You don't usually want this for in-memory resource and this is for catching bugs.
:return: The mutated child resource
"""
assert child
assert parent
assert name
if not allow_new_parent:
# Catch bugs when you try to double lineage a persistnet parent -> child relationship
assert not getattr(child, "__parent__", None), "Tried to double init lineage for {} -> {}, previous parent was {}".format(parent, child, child.__parent__)
child.__parent__ = parent
child.__name__ = name
return child
|
StarcoderdataPython
|
9641636
|
import pytest
import xarray as xr
from ipyfastscape.common import (
AppComponent,
AppLinker,
Coloring,
DimensionExplorer,
TimeStepper,
VizApp,
)
from .utils import counter_callback
def test_app_component(dataset_init):
with pytest.raises(NotImplementedError):
AppComponent(dataset_init)
class DummyAppComponent(AppComponent):
def setup(self):
return 'widget'
component = DummyAppComponent(dataset_init)
xr.testing.assert_identical(component.dataset, dataset_init)
assert component.widget == 'widget'
assert component.linkable_traits == []
def test_dimension_explorer(dataset_init):
counter, clb = counter_callback()
dim_explorer = DimensionExplorer(dataset_init, canvas_callback=clb)
assert list(dim_explorer.sliders) == ['batch']
assert dim_explorer.sliders['batch'].max == dataset_init.sizes['batch'] - 1
assert dim_explorer.value_labels['batch'][0].value == '1'
assert dim_explorer.linkable_traits == [(dim_explorer.sliders['batch'], 'value')]
# test changing slider value
dim_explorer.sliders['batch'].value = 1
assert dim_explorer.value_labels['batch'][0].value == '2'
xr.testing.assert_equal(dataset_init.isel(batch=1), dataset_init._widgets.view)
assert counter['called'] == 1
def test_timestepper(dataset_init):
counter, clb = counter_callback()
timestepper = TimeStepper(dataset_init, canvas_callback=clb)
nsteps = dataset_init.time.size
assert timestepper.label.value == '0 / 0'
assert timestepper.slider.max == nsteps - 1
assert timestepper.play.max == nsteps - 1
assert timestepper.linkable_traits == [
(timestepper.slider, 'value'),
(timestepper.play, 'value'),
(timestepper.play_speed, 'value'),
]
# test changing slider value
timestepper.slider.value = 1
assert dataset_init._widgets.timestep == 1
assert timestepper.label.value == '1 / 100'
assert counter['called'] == 1
# test update play speed
previous_interval = timestepper.play.interval
timestepper.play_speed.value = timestepper.play_speed.max
assert timestepper.play.interval != previous_interval
# test extra methods
timestepper.go_to_step(2)
assert timestepper.slider.value == 2
timestepper.go_to_time(99)
assert timestepper.slider.value == 1
def test_coloring(dataset_init):
counter_var, clb_var = counter_callback()
counter_range, clb_range = counter_callback()
counter_scale, clb_scale = counter_callback()
coloring = Coloring(
dataset_init,
colormaps=['c1', 'c2'],
default_colormap='c1',
canvas_callback_var=clb_var,
canvas_callback_range=clb_range,
canvas_callback_scale=clb_scale,
)
assert set(coloring.color_vars) == {'topography__elevation', 'other_var'}
assert coloring.var_dropdown.value == 'topography__elevation'
assert coloring.var_dropdown.options == coloring.color_vars
assert coloring.colormaps_dropdown.value == 'c1'
assert coloring.colormaps_dropdown.options == ('c1', 'c2')
assert coloring.min_input.value == dataset_init['topography__elevation'].min()
assert coloring.max_input.value == dataset_init['topography__elevation'].max()
# test log scale checkbox
coloring.log_scale_checkbox.value = True
assert counter_scale['called'] == 1
# test changing var dropdown (should reset color scale)
coloring.var_dropdown.value = 'other_var'
assert dataset_init._widgets.color_var == 'other_var'
assert counter_var['called'] == 1
assert counter_range['called'] == 1
assert coloring.log_scale_checkbox.value is False
assert counter_scale['called'] == 2
# test rescale buttons
coloring.rescale_button.click()
assert counter_range['called'] == 2
coloring.rescale_step_button.click()
assert counter_range['called'] == 3
# test extra methods
coloring.set_color_var('topography__elevation')
assert coloring.var_dropdown.value == 'topography__elevation'
with pytest.raises(ValueError, match='Invalid variable name.*'):
coloring.set_color_var('not_a_var')
coloring.set_colormap('c2')
assert coloring.colormaps_dropdown.value == 'c2'
with pytest.raises(ValueError, match='.*is not a valid colormap.*'):
coloring.set_colormap('not_a_colormap')
coloring.set_color_limits(1, 2)
assert coloring.min_input.value == 1
assert coloring.max_input.value == 2
coloring.set_color_scale(log=True)
assert counter_scale['called'] == 3
def test_viz_app_init(dataset):
app = VizApp()
assert app.dataset is None
assert app.components == {}
assert app.widget is app._output
# check keyword arguments are passed to load_dataset
app = VizApp(dataset, time_dim='time')
assert app.dataset._widgets.time_dim == 'time'
def test_viz_app_load_dataset(dataset):
app = VizApp()
app.load_dataset(dataset, time_dim='time')
xr.testing.assert_equal(app.dataset, dataset)
assert app.dataset is not dataset # must be a copy!
assert 'timestepper' in app.components
assert 'dimensions' in app.components
with pytest.raises(TypeError, match='.*is not a xarray.Dataset object'):
app.load_dataset('not_a_dataset')
def test_app_linker(dataset):
app1 = VizApp(dataset, time_dim='time')
app2 = VizApp(dataset, time_dim='time')
linker = AppLinker([app1, app2], link_server=True)
# test linked
for b in linker.buttons:
b.value = True
app1.components['timestepper'].slider.value = 2
assert app2.components['timestepper'].slider.value == 2
app1.components['dimensions'].sliders['batch'].value = 1
assert app2.components['dimensions'].sliders['batch'].value == 1
# test unlinked
for b in linker.buttons:
b.value = False
app1.components['timestepper'].slider.value = 0
assert app2.components['timestepper'].slider.value != 0
app1.components['dimensions'].sliders['batch'].value = 0
assert app2.components['dimensions'].sliders['batch'].value != 0
def test_app_linker_error():
with pytest.raises(TypeError, match='.*only accepts VizApp objects'):
AppLinker([VizApp(), 'not_an_app'])
with pytest.raises(ValueError, match='AppLinker works with at least two VizApp objects'):
AppLinker([VizApp()])
with pytest.raises(ValueError, match='AppLinker works with distinct VizApp objects'):
app = VizApp()
AppLinker([app, app])
|
StarcoderdataPython
|
12829298
|
"""
Description:
"""
__author__ = "<NAME>, <NAME>, <NAME>"
|
StarcoderdataPython
|
8191804
|
<gh_stars>0
import copy
import time
from collections import Counter
"""Errors are still being found, validation processes are currently being built"""
"""If an error is found please notify"
class Puzzles:
Suduko_Input_Matrix_0 = [[0, 0, 0, 2, 6, 0, 7, 0, 1],
[6, 8, 0, 0, 7, 0, 0, 9, 0],
[1, 9, 0, 0, 0, 4, 5, 0, 0],
[8, 2, 0, 1, 0, 0, 0, 4, 0],
[0, 0, 4, 6, 0, 2, 9, 0, 0],
[0, 5, 0, 0, 0, 3, 0, 2, 8],
[0, 0, 9, 3, 0, 0, 0, 7, 4],
[0, 4, 0, 0, 5, 0, 0, 3, 6],
[7, 0, 3, 0, 1, 8, 0, 0, 0]]
# Completed
Suduko_Input_Matrix_1 = [[0, 0, 0, 6, 0, 0, 4, 0, 0],
[7, 0, 0, 0, 0, 3, 6, 0, 0],
[0, 0, 0, 0, 9, 1, 0, 8, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 5, 0, 1, 8, 0, 0, 0, 3],
[0, 0, 0, 3, 0, 6, 0, 4, 5],
[0, 4, 0, 2, 0, 0, 0, 6, 0],
[9, 0, 3, 0, 0, 0, 0, 0, 0],
[0, 2, 0, 0, 0, 0, 1, 0, 0]]
Suduko_Input_Matrix_2 = [[0, 2, 0, 6, 0, 8, 0, 0, 0],
[5, 8, 0, 0, 0, 9, 7, 0, 0],
[0, 0, 0, 0, 4, 0, 0, 0, 0],
[3, 7, 0, 0, 0, 0, 5, 0, 0],
[6, 0, 0, 0, 0, 0, 0, 0, 4],
[0, 0, 8, 0, 0, 0, 0, 1, 3],
[0, 0, 0, 0, 2, 0, 0, 0, 0],
[0, 0, 9, 8, 0, 0, 0, 3, 6],
[0, 0, 0, 3, 0, 6, 0, 9, 0]
]
Suduko_Input_Matrix_Easy = [[0, 6, 1, 7, 2, 0, 3, 0, 0],
[0, 7, 0, 0, 9, 0, 0, 0, 0],
[0, 5, 3, 0, 4, 0, 0, 0, 1],
[0, 0, 4, 9, 6, 0, 0, 0, 2],
[3, 8, 5, 0, 0, 0, 6, 1, 9],
[6, 0, 0, 0, 1, 3, 8, 0, 0],
[1, 0, 0, 0, 5, 0, 9, 8, 0],
[0, 0, 0, 0, 8, 0, 0, 2, 0],
[0, 0, 9, 0, 3, 2, 7, 6, 0]]
# Completed
Suduko_Input_Matrix_Medium = [[0, 0, 0, 0, 9, 0, 4, 2, 0],
[0, 0, 0, 0, 0, 4, 5, 0, 0],
[0, 5, 0, 0, 2, 0, 0, 0, 3],
[9, 0, 0, 0, 0, 7, 0, 0, 2],
[8, 1, 6, 0, 0, 0, 7, 5, 4],
[2, 0, 0, 4, 0, 0, 0, 0, 1],
[4, 0, 0, 0, 8, 0, 0, 1, 0],
[0, 0, 3, 1, 0, 0, 0, 0, 0],
[0, 2, 1, 0, 5, 0, 0, 0, 0]]
# Completed
class Puzzle:
"""
A Place to Store the Suduko Puzzles and Answers, this will be used to validate the correct outcome.
"""
class Easy:
"""Easy Problems"""
pass
class Medium:
"""Medium Problems"""
pass
class Hard:
"""Hard Problems"""
pass
def cubic_array():
"""
Creates a 9x9x9 array
:return: suduko_cube == 9x9x9 array
"""
suduko_cube = []
for y in range(9):
suduko_cube.append([])
for x in range(9):
suduko_cube[y].append([])
for z in range(9):
suduko_cube[y][x].append(0)
return suduko_cube
def print_suduko(array_2d):
"""
Print 2D array line by line
:param array_2d: Array 2D
:return: None
"""
for col in range(9):
print(array_2d[col])
print()
def print_cube(array_3d):
"""
Print 3D array array by array
:param array_3d: Array 3D
:return: None
"""
for col in range(9):
print(array_3d[col])
print()
def print_cube_2(array_3d):
"""
Print 3D array, array by array
:param array_3d: Array 3D
:return: None
"""
line = '\t' + "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"*2
for col in range(9):
print(line, '\n' + line if col % 3 == 0 else '')
print('\t┃ ┃' +
''.join(map(str, array_3d[col][0])) + '┃' +
''.join(map(str, array_3d[col][1])) + '┃' +
''.join(map(str, array_3d[col][2])) +
'┃ ┃' +
''.join(map(str, array_3d[col][3])) + '┃' +
''.join(map(str, array_3d[col][4])) + '┃' +
''.join(map(str, array_3d[col][5])) +
'┃ ┃' +
''.join(map(str, array_3d[col][6])) + '┃' +
''.join(map(str, array_3d[col][7])) + '┃' +
''.join(map(str, array_3d[col][8])) + '┃ ┃')
print(line, '\n' + line, '\n')
def fill_known_values(array_2d, array_3d):
"""
Iterates through each column and row, if a values besides 0 is found in the array_2d it is then in the coordinate
space with array_3d all values are changed to a list of 1 through 9, reflecting that no other value can be entered.
:param array_2d:
:param array_3d:
:return:
"""
for y in range(9):
for x in range(9):
if array_2d[y][x] != 0:
array_3d[y][x] = [x for x in range(1, 10)]
return array_3d
def by_row(array_2d, array_3d):
"""
Iterates through each row filling in values from the 2d array to the 3d array
:param array_2d: 2D Array 9x9
:param array_3d: 3D Array 9x9x9
:return: array_3d
"""
for col in range(9):
for row in range(9):
value = array_2d[col][row]
if value == 0:
pass
else:
for depth in range(9):
array_3d[col][depth][value-1] = value
return array_3d
def by_column(array_2d, array_3d):
"""
Iterates through each column filling in values from the 2d array to the 3d array
:param array_2d: 2D Array 9x9
:param array_3d: 3D Array 9x9x9
:return: array_3d
"""
for col in range(0, 9):
for row in range(0, 9):
value = array_2d[col][row]
if value == 0:
pass
else:
for depth in range(9):
array_3d[depth][row][value-1] = value
return array_3d
def by_square(array_2d, array_3d):
"""
Iterates through each 3x3 square filling in values from the 2d array to the 3d array
:param array_2d: 2D Array 9x9
:param array_3d: 3D Array 9x9x9
:return: array_3d
"""
for y in range(0, 3):
for x in range(0, 3):
for sub_y in range(3):
for sub_x in range(3):
coordinate_y = y*3 + sub_y
coordinate_x = x*3 + sub_x
value = array_2d[coordinate_y][coordinate_x]
if value == 0:
pass
else:
for sub_y_placement in range(3):
for sub_x_placement in range(3):
array_3d[y*3 + sub_y_placement][x*3 + sub_x_placement][value-1] = value
return array_3d
def isolate_zeros(array_2d, array_3d):
"""
Finds Zeros in each array within each column and row of the 3D array, if 1 zero is found in each while the value
in the 2D array is equal to 0 that value is added to the 2d array based on the index + 1
:param array_2d: 2D Array 9x9
:param array_3d: 3D Array 9x9x9
:return: array_3d
"""
for y in range(9):
for x in range(9):
zero_count = array_3d[y][x].count(0)
if array_2d[y][x] == 0 and zero_count == 1:
value = array_3d[y][x].index(0) + 1
array_2d[y][x] = value
print()
return array_2d
def isolate_double_by_row(array_3d):
"""
This function will iterate through each column, then row, then take each value of that row and find if any have two
zeros and cross reference it against all other number sets in that row. If a row has a matching pair or multiple
matching pair it is then passed to the next part of the function that basically iterates through all values in that
row besides the indexes the doubles were found and removes the zeros from the other values.
:param array_3d: 3D Array 9x9x9
:return: array_3d
"""
for col in range(9):
# print('\nColumn', col)
double_index = []
suduko_y = []
suduko_x = []
for row in range(9):
if array_3d[col][row].count(0) == 2:
for double_validation in range(9):
if double_validation == row:
pass
elif array_3d[col][row] == array_3d[col][double_validation]:
double_index += [[i for i, x in enumerate(array_3d[col][row]) if x == 0]]
suduko_y += [col]
suduko_x += [row]
else:
continue
else:
continue
if len(double_index) == 0:
# print('\t No Values to adjust')
pass
# todo i might want to bring to my attention the number of values.
# todo move the zip function here, then count in 2's to see if there are matching pairs, if not delete index 1
# If the value error occurs when there is only one option you will need to add a condition to work through
#each list to make sure that the conditions are met. aka there are two indexs with the same value.
else:
double_index, suduko_y, suduko_x = zip(*sorted(zip(double_index, suduko_y, suduko_x)))
print('\t0 Index: ', double_index)
print('\tCol Index: ', suduko_y)
print('\tRow Index: ', suduko_x)
for value in range(len(double_index)):
print('\t', array_3d[suduko_y[value]][suduko_x[value]])
# Todo Convert this into a function, then reuse it.
print('\tBefore - ┃ ┃' +
''.join(map(str, array_3d[col][0])) + '┃' +
''.join(map(str, array_3d[col][1])) + '┃' +
''.join(map(str, array_3d[col][2])) +
'┃ ┃' +
''.join(map(str, array_3d[col][3])) + '┃' +
''.join(map(str, array_3d[col][4])) + '┃' +
''.join(map(str, array_3d[col][5])) +
'┃ ┃' +
''.join(map(str, array_3d[col][6])) + '┃' +
''.join(map(str, array_3d[col][7])) + '┃' +
''.join(map(str, array_3d[col][8])) + '┃ ┃')
for value_removal in range(0, len(double_index), 2):
for row in range(9):
if row == suduko_x[value_removal] or row == suduko_x[value_removal + 1]:
pass
else:
array_3d[col][row][double_index[value_removal][0]] = double_index[value_removal][0] + 1
array_3d[col][row][double_index[value_removal][1]] = double_index[value_removal][1] + 1
continue
print('\tAfter - ┃ ┃' +
''.join(map(str, array_3d[col][0])) + '┃' +
''.join(map(str, array_3d[col][1])) + '┃' +
''.join(map(str, array_3d[col][2])) +
'┃ ┃' +
''.join(map(str, array_3d[col][3])) + '┃' +
''.join(map(str, array_3d[col][4])) + '┃' +
''.join(map(str, array_3d[col][5])) +
'┃ ┃' +
''.join(map(str, array_3d[col][6])) + '┃' +
''.join(map(str, array_3d[col][7])) + '┃' +
''.join(map(str, array_3d[col][8])) + '┃ ┃')
print()
return array_3d
def isolate_double_by_column(array_3d):
"""
:param array_3d:
:return:
"""
for row in range(9):
# print('\nRow', row)
double_index = []
suduko_y = []
suduko_x = []
for col in range(9):
if array_3d[col][row].count(0) == 2:
for double_validation in range(9):
if double_validation == col:
pass
elif array_3d[col][row] == array_3d[double_validation][row]:
double_index += [[i for i, x in enumerate(array_3d[col][row]) if x == 0]]
suduko_y += [col]
suduko_x += [row]
else:
continue
# print(len(double_index))
if len(double_index) == 0 or len(double_index) % 2 != 0:
# print('\t No Values to adjust')
# todo move the zip function here, then count in 2's to see if there are matching pairs, if not delete index 1
# If the value error occurs when there is only one option you will need to add a condition to work through
# each list to make sure that the conditions are met. aka there are two indexs with the same value.
pass
else:
pass
double_index, suduko_y, suduko_x = zip(*sorted(zip(double_index, suduko_y, suduko_x)))
print('\t0 Index: ', double_index)
print('\tCol Index: ', suduko_y)
print('\tRow Index: ', suduko_x)
for value in range(len(double_index)):
print('\t', array_3d[suduko_y[value]][suduko_x[value]])
print('\n\tBefore - |')
for col_i in range(9):
print('\t', ''.join(map(str, array_3d[col_i][row])))
for value_removal in range(0, len(double_index), 2):
for col in range(9):
if col == suduko_y[value_removal] or col == suduko_y[value_removal + 1]:
pass
else:
array_3d[col][row][double_index[value_removal][0]] = double_index[value_removal][0] + 1
array_3d[col][row][double_index[value_removal][1]] = double_index[value_removal][1] + 1
continue
print('\n\tAfter - |')
for col_i in range(9):
print('\t', ''.join(map(str, array_3d[col_i][row])))
print()
return array_3d
def isolate_double_by_square(array_3d):
"""
:param array_3d:
:return:
"""
# todo ^ fill this in.
for y in range(0, 3):
for x in range(0, 3):
double_index = []
suduko_y = []
suduko_x = []
for sub_y in range(3):
for sub_x in range(3):
coordinate_y = y * 3 + sub_y
coordinate_x = x * 3 + sub_x
# print('\n\tStart Calculations for', coordinate_y, coordinate_x)
# print('\t\ty:', coordinate_y, 'x:', coordinate_x, '==', array_3d[coordinate_y][coordinate_x])
if array_3d[coordinate_y][coordinate_x].count(0) == 2:
# print('\t\t\tMatch value contains two zeros')
# print('\t\t\t\tchecking sub_coordinates')
for double_validation_sub_y in range(3):
for double_validation_sub_x in range(3):
sub_coordinate_y = y * 3 + double_validation_sub_y
sub_coordinate_x = x * 3 + double_validation_sub_x
if coordinate_y == sub_coordinate_y and coordinate_x == sub_coordinate_x:
pass
elif array_3d[coordinate_y][coordinate_x] == \
array_3d[sub_coordinate_y][sub_coordinate_x]:
# print("\t\t\t\t\tMatch =", sub_coordinate_y, sub_coordinate_x,
# array_3d[sub_coordinate_y][sub_coordinate_x])
double_index += [[i for i, x in enumerate(array_3d[coordinate_y][coordinate_x])
if x == 0]]
suduko_y += [coordinate_y]
suduko_x += [coordinate_x]
else:
continue
if len(double_index) == 0 or len(double_index) % 2 != 0:
# print('\n\n\n HERE \n\n\n')
# print('\t No Values to adjust')
# todo move the zip function here, then count in 2's to see if there are matching pairs, if not delete index 1
# If the value error occurs when there is only one option you will need to add a condition to work through
# each list to make sure that the conditions are met. aka there are two indexs with the same value.
pass
else:
# print('\n\n\n True \n\n\n')
double_index, suduko_y, suduko_x = zip(*sorted(zip(double_index, suduko_y, suduko_x)))
# print('\n\t0 Index: ', double_index)
# print('\tCol Index: ', suduko_y)
# print('\tRow Index: ', suduko_x)
for value_removal in range(0, len(double_index), 2):
for sub_y_2 in range(3):
for sub_x_2 in range(3):
coordinate_y = y * 3 + sub_y_2
coordinate_x = x * 3 + sub_x_2
if coordinate_y == suduko_y[value_removal] and \
coordinate_x == suduko_x[value_removal]:
pass
elif coordinate_y == suduko_y[value_removal + 1] and \
coordinate_x == suduko_x[value_removal + 1]:
pass
else:
# print('\n\t\tStart Calculations for', coordinate_y, coordinate_x)
# print('\t\t\ty:', coordinate_y, 'x:', coordinate_x, '==',
# array_3d[coordinate_y][coordinate_x])
array_3d[coordinate_y][coordinate_x][double_index[value_removal][0]] = \
double_index[value_removal][0] + 1
array_3d[coordinate_y][coordinate_x][double_index[value_removal][1]] = \
double_index[value_removal][1] + 1
continue
return array_3d
def isolated_integer_by_row(array_2d, array_3d):
"""
:param array_2d:
:param array_3d:
:return:
"""
for y in range(9):
for x in range(9):
for z in range(9):
if array_3d[y][x][z] == 0:
value_counter = 0
value_to_check = z + 1
for i in range(9):
if array_3d[y][i][z] == 0:
value_counter += 1
else:
continue
if value_counter == 1 and array_3d[y][x][z] == 0:
print(y, x, z+1)
array_3d[y][x] = [x for x in range(1, 10)]
for value_removal in range(9):
array_3d[value_removal][x][z] = value_to_check
array_3d[y][value_removal][z] = value_to_check
# todo removale of values in square using [y][x][z]
array_2d[y][x] = value_to_check
#
print_cube_2(array_3d)
print_suduko(array_2d)
# input('Value Found')
# input('pause')
continue
return array_2d, array_3d
def isolated_integer_by_column(array_2d, array_3d):
"""
:param array_2d:
:param array_3d:
:return:
"""
for x in range(9):
for y in range(9):
for z in range(9):
if array_3d[y][x][z] == 0:
value_counter = 0
value_to_check = z + 1
for i in range(9):
if array_3d[y][i][z] == 0:
value_counter += 1
else:
continue
if value_counter == 1 and array_3d[y][x][z] == 0:
print(y, x, z+1)
array_3d[y][x] = [x for x in range(1, 10)]
for value_removal in range(9):
array_3d[value_removal][x][z] = value_to_check
array_3d[y][value_removal][z] = value_to_check
# todo removale of values in square using [y][x][z]
array_2d[y][x] = value_to_check
#
print_cube_2(array_3d)
print_suduko(array_2d)
# input('Value Found')
# input('pause')
continue
return array_2d, array_3d
def isolated_integer_by_square(array_2d, array_3d):
"""
:param array_2d:
:param array_3d:
:return:
"""
for y in range(3):
for x in range(3):
for sub_y in range(3):
for sub_x in range(3):
coordinate_y = y * 3 + sub_y
coordinate_x = x * 3 + sub_x
# print('Checking', coordinate_y, coordinate_x)
for z in range(9):
"""To check each list values"""
if array_3d[coordinate_y][coordinate_x][z] == 0:
value_counter = 0
value_to_check = z + 1
for sub_y_2 in range(3):
for sub_x_2 in range(3):
coordinate_y_2 = y * 3 + sub_y_2
coordinate_x_2 = x * 3 + sub_x_2
# print('\tAgainst', coordinate_y_2, coordinate_x_2, z)
if array_3d[coordinate_y_2][coordinate_x_2][z] == 0:
value_counter += 1
else:
continue
if value_counter == 1 and array_3d[coordinate_y][coordinate_x][z] == 0:
# print('\t\t', 'y', coordinate_y, 'x', coordinate_x, 'count', value_counter, 'value', value_to_check)
# print('\t\tValue_Count = ', value_counter)
# print('\t\t', coordinate_y, coordinate_x)
array_3d[coordinate_y][coordinate_x] = [x for x in range(1, 10)]
# print(array_3d[coordinate_y][coordinate_x])
#
# print()
# print_suduko(array_2d)
for value_removal in range(9):
array_3d[value_removal][coordinate_x][z] = value_to_check
array_3d[coordinate_y][value_removal][z] = value_to_check
array_2d[coordinate_y][coordinate_x] = value_to_check
#
# print_suduko(array_2d)
# input('Value Found')
pass
else:
continue
return array_2d, array_3d
# todo Create a validation process to make sure that each coloumn row and square contain valuess one through nine,
# todo place these between each function running through array_2d to validate where are issue may rise
# todo, however if a value was misplaced earlier this won't help
Suduko_Cube = cubic_array()
Puzzle_Select = Puzzles.Suduko_Input_Matrix_Medium
Suduko_Starting_Matrix = copy.deepcopy(Puzzle_Select)
Suduko_Resulting_Matrix = copy.deepcopy(Puzzle_Select)
Suduko_Cube = fill_known_values(Suduko_Resulting_Matrix, Suduko_Cube)
print("Suduko".upper())
print_suduko(Suduko_Resulting_Matrix)
input("Enter to Start:")
instance_count = 1
while True:
print("===================================================================================================")
print("STARTING INSTANCE", instance_count)
store_suduko = copy.deepcopy(Suduko_Resulting_Matrix)
"""=============================================================================================================="""
print("ROWS")
Suduko_Cube = by_row(Suduko_Resulting_Matrix, Suduko_Cube)
print_cube_2(Suduko_Cube)
print("Columns")
Suduko_Cube = by_column(Suduko_Resulting_Matrix, Suduko_Cube)
print_cube_2(Suduko_Cube)
print("Squares")
Suduko_Cube = by_square(Suduko_Resulting_Matrix, Suduko_Cube)
print_cube_2(Suduko_Cube)
"""=============================================================================================================="""
print("Isolating Doubles by row")
Suduko_Cube = isolate_double_by_row(Suduko_Cube)
print_cube_2(Suduko_Cube)
print("Isolating Doubles by column")
Suduko_Cube = isolate_double_by_column(Suduko_Cube)
print_cube_2(Suduko_Cube)
print("Isolating Doubles by square")
Suduko_Cube = isolate_double_by_square(Suduko_Cube)
print_cube_2(Suduko_Cube)
isolate_double_by_square(Suduko_Cube)
input('Enter to Continue')
"""=============================================================================================================="""
print("Isolating Single Integer by Row")
Suduko_Resulting_Matrix, Suduko_Cube = isolated_integer_by_row(Suduko_Resulting_Matrix, Suduko_Cube)
print_cube_2(Suduko_Cube)
print("Isolating Single Integer by Column")
Suduko_Resulting_Matrix, Suduko_Cube = isolated_integer_by_column(Suduko_Resulting_Matrix, Suduko_Cube)
print_cube_2(Suduko_Cube)
print("Isolating Single Integer by Square")
Suduko_Resulting_Matrix, Suduko_Cube = isolated_integer_by_square(Suduko_Resulting_Matrix, Suduko_Cube)
print_cube_2(Suduko_Cube)
# input('Enter to Continue')
"""=============================================================================================================="""
print("Isolate Zeros")
Suduko_Resulting_Matrix = isolate_zeros(Suduko_Resulting_Matrix, Suduko_Cube)
print_suduko(Suduko_Resulting_Matrix)
input(('pause instance', instance_count))
"""=============================================================================================================="""
"""=============================================================================================================="""
if store_suduko == Suduko_Resulting_Matrix:
print('None Changes Made')
exit()
count = 0
for y in range(9):
if Suduko_Resulting_Matrix[y].count(0) == 0:
count += 1
else:
continue
if count == 9:
print("Computation Completed")
break
print("===================================================================================================")
time.sleep(0.5)
instance_count += 1
print("===================================================================================================")
print("\nResult!".upper())
for y in range(9):
print(Suduko_Starting_Matrix[y], ' > ', Suduko_Resulting_Matrix[y])
input("Enter to Exit:")
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.