blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8e189763df76ff793adc00ad9e28f6f4735ed11a | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_8283.py | 918d064e2191d7202565a0b1ed149fa29f954b66 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | # Python module BeautifulSoup extracting anchors href
if a.has_key('href')
links.append(a['href'])
| [
"[email protected]"
] | |
b67d6e305933672f68375300c807f45babd82765 | 9784a90cac667e8e0aaba0ca599b4255b215ec67 | /gluon/gluoncv2/models/lednet.py | c8743c908d18f8d460c8e3eab5d4910ce8b29ed4 | [
"MIT"
] | permissive | osmr/imgclsmob | d2f48f01ca541b20119871393eca383001a96019 | f2993d3ce73a2f7ddba05da3891defb08547d504 | refs/heads/master | 2022-07-09T14:24:37.591824 | 2021-12-14T10:15:31 | 2021-12-14T10:15:31 | 140,285,687 | 3,017 | 624 | MIT | 2022-07-04T15:18:37 | 2018-07-09T12:57:46 | Python | UTF-8 | Python | false | false | 24,449 | py | """
LEDNet for image segmentation, implemented in Gluon.
Original paper: 'LEDNet: A Lightweight Encoder-Decoder Network for Real-Time Semantic Segmentation,'
https://arxiv.org/abs/1905.02423.
"""
__all__ = ['LEDNet', 'lednet_cityscapes']
import os
from mxnet import cpu
from mxnet.gluon import nn, HybridBlock
from .common import conv3x3, conv1x1_block, conv3x3_block, conv5x5_block, conv7x7_block, ConvBlock, NormActivation,\
ChannelShuffle, InterpolationBlock, Hourglass, BreakBlock
class AsymConvBlock(HybridBlock):
"""
Asymmetric separable convolution block.
Parameters:
----------
channels : int
Number of input/output channels.
kernel_size : int
Convolution window size.
padding : int
Padding value for convolution layer.
dilation : int, default 1
Dilation value for convolution layer.
groups : int, default 1
Number of groups.
use_bias : bool, default False
Whether the layer uses a bias vector.
lw_use_bn : bool, default True
Whether to use BatchNorm layer (leftwise convolution block).
rw_use_bn : bool, default True
Whether to use BatchNorm layer (rightwise convolution block).
bn_epsilon : float, default 1e-5
Small float added to variance in Batch norm.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool, default False
Whether to disable CUDNN batch normalization operator.
lw_activation : function or str or None, default nn.Activation('relu')
Activation function after the leftwise convolution block.
rw_activation : function or str or None, default nn.Activation('relu')
Activation function after the rightwise convolution block.
"""
def __init__(self,
channels,
kernel_size,
padding,
dilation=1,
groups=1,
use_bias=False,
lw_use_bn=True,
rw_use_bn=True,
bn_epsilon=1e-5,
bn_use_global_stats=False,
bn_cudnn_off=False,
lw_activation=(lambda: nn.Activation("relu")),
rw_activation=(lambda: nn.Activation("relu")),
**kwargs):
super(AsymConvBlock, self).__init__(**kwargs)
with self.name_scope():
self.lw_conv = ConvBlock(
in_channels=channels,
out_channels=channels,
kernel_size=(kernel_size, 1),
strides=1,
padding=(padding, 0),
dilation=(dilation, 1),
groups=groups,
use_bias=use_bias,
use_bn=lw_use_bn,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off,
activation=lw_activation)
self.rw_conv = ConvBlock(
in_channels=channels,
out_channels=channels,
kernel_size=(1, kernel_size),
strides=1,
padding=(0, padding),
dilation=(1, dilation),
groups=groups,
use_bias=use_bias,
use_bn=rw_use_bn,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off,
activation=rw_activation)
def hybrid_forward(self, F, x):
x = self.lw_conv(x)
x = self.rw_conv(x)
return x
def asym_conv3x3_block(padding=1,
**kwargs):
"""
3x3 asymmetric separable convolution block.
Parameters:
----------
channels : int
Number of input/output channels.
padding : int, default 1
Padding value for convolution layer.
dilation : int, default 1
Dilation value for convolution layer.
groups : int, default 1
Number of groups.
use_bias : bool, default False
Whether the layer uses a bias vector.
lw_use_bn : bool, default True
Whether to use BatchNorm layer (leftwise convolution block).
rw_use_bn : bool, default True
Whether to use BatchNorm layer (rightwise convolution block).
bn_epsilon : float, default 1e-5
Small float added to variance in Batch norm.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool, default False
Whether to disable CUDNN batch normalization operator.
lw_activation : function or str or None, default nn.Activation('relu')
Activation function after the leftwise convolution block.
rw_activation : function or str or None, default nn.Activation('relu')
Activation function after the rightwise convolution block.
"""
return AsymConvBlock(
kernel_size=3,
padding=padding,
**kwargs)
class LEDDownBlock(HybridBlock):
"""
LEDNet specific downscale block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
correct_size_mistmatch : bool
Whether to correct downscaled sizes of images.
bn_epsilon : float
Small float added to variance in Batch norm.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool, default False
Whether to disable CUDNN batch normalization operator.
"""
def __init__(self,
in_channels,
out_channels,
correct_size_mismatch,
bn_epsilon,
bn_use_global_stats=False,
bn_cudnn_off=False,
**kwargs):
super(LEDDownBlock, self).__init__(**kwargs)
self.correct_size_mismatch = correct_size_mismatch
with self.name_scope():
self.pool = nn.MaxPool2D(
pool_size=2,
strides=2)
self.conv = conv3x3(
in_channels=in_channels,
out_channels=(out_channels - in_channels),
strides=2,
use_bias=True)
self.norm_activ = NormActivation(
in_channels=out_channels,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off)
def hybrid_forward(self, F, x):
y1 = self.pool(x)
y2 = self.conv(x)
if self.correct_size_mismatch:
diff_h = y2.size()[2] - y1.size()[2]
diff_w = y2.size()[3] - y1.size()[3]
y1 = F.pad(
y1,
mode="constant",
pad_width=(0, 0, 0, 0, diff_w // 2, diff_w - diff_w // 2, diff_h // 2, diff_h - diff_h // 2),
constant_value=0)
x = F.concat(y2, y1, dim=1)
x = self.norm_activ(x)
return x
class LEDBranch(HybridBlock):
"""
LEDNet encoder branch.
Parameters:
----------
channels : int
Number of input/output channels.
dilation : int
Dilation value for convolution layer.
dropout_rate : float
Parameter of Dropout layer. Faction of the input units to drop.
bn_epsilon : float
Small float added to variance in Batch norm.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool, default False
Whether to disable CUDNN batch normalization operator.
"""
def __init__(self,
channels,
dilation,
dropout_rate,
bn_epsilon,
bn_use_global_stats=False,
bn_cudnn_off=False,
**kwargs):
super(LEDBranch, self).__init__(**kwargs)
self.use_dropout = (dropout_rate != 0.0)
with self.name_scope():
self.conv1 = asym_conv3x3_block(
channels=channels,
use_bias=True,
lw_use_bn=False,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off)
self.conv2 = asym_conv3x3_block(
channels=channels,
padding=dilation,
dilation=dilation,
use_bias=True,
lw_use_bn=False,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off,
rw_activation=None)
if self.use_dropout:
self.dropout = nn.Dropout(rate=dropout_rate)
def hybrid_forward(self, F, x):
x = self.conv1(x)
x = self.conv2(x)
if self.use_dropout:
x = self.dropout(x)
return x
class LEDUnit(HybridBlock):
"""
LEDNet encoder unit (Split-Shuffle-non-bottleneck).
Parameters:
----------
channels : int
Number of input/output channels.
dilation : int
Dilation value for convolution layer.
dropout_rate : float
Parameter of Dropout layer. Faction of the input units to drop.
bn_epsilon : float
Small float added to variance in Batch norm.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool, default False
Whether to disable CUDNN batch normalization operator.
"""
def __init__(self,
channels,
dilation,
dropout_rate,
bn_epsilon,
bn_use_global_stats=False,
bn_cudnn_off=False,
**kwargs):
super(LEDUnit, self).__init__(**kwargs)
mid_channels = channels // 2
with self.name_scope():
self.left_branch = LEDBranch(
channels=mid_channels,
dilation=dilation,
dropout_rate=dropout_rate,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off)
self.right_branch = LEDBranch(
channels=mid_channels,
dilation=dilation,
dropout_rate=dropout_rate,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off)
self.activ = nn.Activation("relu")
self.shuffle = ChannelShuffle(
channels=channels,
groups=2)
def hybrid_forward(self, F, x):
identity = x
x1, x2 = F.split(x, axis=1, num_outputs=2)
x1 = self.left_branch(x1)
x2 = self.right_branch(x2)
x = F.concat(x1, x2, dim=1)
x = x + identity
x = self.activ(x)
x = self.shuffle(x)
return x
class PoolingBranch(HybridBlock):
"""
Pooling branch.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
use_bias : bool
Whether the layer uses a bias vector.
bn_epsilon : float
Small float added to variance in Batch norm.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool
Whether to disable CUDNN batch normalization operator.
in_size : tuple of 2 int or None
Spatial size of input image.
down_size : int
Spatial size of downscaled image.
"""
def __init__(self,
in_channels,
out_channels,
use_bias,
bn_epsilon,
bn_use_global_stats,
bn_cudnn_off,
in_size,
down_size,
**kwargs):
super(PoolingBranch, self).__init__(**kwargs)
self.in_size = in_size
self.down_size = down_size
with self.name_scope():
self.conv = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
use_bias=use_bias,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off)
self.up = InterpolationBlock(
scale_factor=None,
out_size=in_size)
def hybrid_forward(self, F, x):
in_size = self.in_size if self.in_size is not None else x.shape[2:]
x = F.contrib.AdaptiveAvgPooling2D(x, output_size=self.down_size)
x = self.conv(x)
x = self.up(x, in_size)
return x
class APN(HybridBlock):
"""
Attention pyramid network block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
bn_epsilon : float
Small float added to variance in Batch norm.
bn_use_global_stats : bool
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool
Whether to disable CUDNN batch normalization operator.
in_size : tuple of 2 int or None
Spatial size of input image.
"""
def __init__(self,
in_channels,
out_channels,
bn_epsilon,
bn_use_global_stats,
bn_cudnn_off,
in_size,
**kwargs):
super(APN, self).__init__(**kwargs)
self.in_size = in_size
att_out_channels = 1
with self.name_scope():
self.pool_branch = PoolingBranch(
in_channels=in_channels,
out_channels=out_channels,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off,
in_size=in_size,
down_size=1)
self.body = conv1x1_block(
in_channels=in_channels,
out_channels=out_channels,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off)
down_seq = nn.HybridSequential(prefix="")
down_seq.add(conv7x7_block(
in_channels=in_channels,
out_channels=att_out_channels,
strides=2,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
down_seq.add(conv5x5_block(
in_channels=att_out_channels,
out_channels=att_out_channels,
strides=2,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
down3_subseq = nn.HybridSequential(prefix="")
down3_subseq.add(conv3x3_block(
in_channels=att_out_channels,
out_channels=att_out_channels,
strides=2,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
down3_subseq.add(conv3x3_block(
in_channels=att_out_channels,
out_channels=att_out_channels,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
down_seq.add(down3_subseq)
up_seq = nn.HybridSequential(prefix="")
up = InterpolationBlock(scale_factor=2)
up_seq.add(up)
up_seq.add(up)
up_seq.add(up)
skip_seq = nn.HybridSequential(prefix="")
skip_seq.add(BreakBlock())
skip_seq.add(conv7x7_block(
in_channels=att_out_channels,
out_channels=att_out_channels,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
skip_seq.add(conv5x5_block(
in_channels=att_out_channels,
out_channels=att_out_channels,
use_bias=True,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
self.hg = Hourglass(
down_seq=down_seq,
up_seq=up_seq,
skip_seq=skip_seq)
def hybrid_forward(self, F, x):
y = self.pool_branch(x)
w = self.hg(x)
x = self.body(x)
x = x * w
x = x + y
return x
class LEDNet(HybridBlock):
"""
LEDNet model from 'LEDNet: A Lightweight Encoder-Decoder Network for Real-Time Semantic Segmentation,'
https://arxiv.org/abs/1905.02423.
Parameters:
----------
channels : list of int
Number of output channels for each unit.
dilations : list of int
Dilations for units.
dropout_rates : list of list of int
Dropout rates for each unit in encoder.
correct_size_mistmatch : bool
Whether to correct downscaled sizes of images in encoder.
bn_epsilon : float, default 1e-5
Small float added to variance in Batch norm.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
bn_cudnn_off : bool, default False
Whether to disable CUDNN batch normalization operator.
aux : bool, default False
Whether to output an auxiliary result.
fixed_size : bool, default False
Whether to expect fixed spatial size of input image.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (1024, 2048)
Spatial size of the expected input image.
classes : int, default 19
Number of segmentation classes.
"""
def __init__(self,
channels,
dilations,
dropout_rates,
correct_size_mismatch=False,
bn_epsilon=1e-5,
bn_use_global_stats=False,
bn_cudnn_off=False,
aux=False,
fixed_size=False,
in_channels=3,
in_size=(1024, 2048),
classes=19,
**kwargs):
super(LEDNet, self).__init__(**kwargs)
assert (aux is not None)
assert (fixed_size is not None)
assert ((in_size[0] % 8 == 0) and (in_size[1] % 8 == 0))
self.in_size = in_size
self.classes = classes
self.fixed_size = fixed_size
with self.name_scope():
self.encoder = nn.HybridSequential(prefix="")
for i, dilations_per_stage in enumerate(dilations):
out_channels = channels[i]
dropout_rate = dropout_rates[i]
stage = nn.HybridSequential(prefix="stage{}_".format(i + 1))
for j, dilation in enumerate(dilations_per_stage):
if j == 0:
stage.add(LEDDownBlock(
in_channels=in_channels,
out_channels=out_channels,
correct_size_mismatch=correct_size_mismatch,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
in_channels = out_channels
else:
stage.add(LEDUnit(
channels=in_channels,
dilation=dilation,
dropout_rate=dropout_rate,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off))
self.encoder.add(stage)
self.apn = APN(
in_channels=in_channels,
out_channels=classes,
bn_epsilon=bn_epsilon,
bn_use_global_stats=bn_use_global_stats,
bn_cudnn_off=bn_cudnn_off,
in_size=(in_size[0] // 8, in_size[1] // 8) if fixed_size else None)
self.up = InterpolationBlock(scale_factor=8)
def hybrid_forward(self, F, x):
x = self.encoder(x)
x = self.apn(x)
x = self.up(x)
return x
def get_lednet(model_name=None,
pretrained=False,
ctx=cpu(),
root=os.path.join("~", ".mxnet", "models"),
**kwargs):
"""
Create LEDNet model with specific parameters.
Parameters:
----------
model_name : str or None, default None
Model name for loading pretrained model.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
channels = [32, 64, 128]
dilations = [[0, 1, 1, 1], [0, 1, 1], [0, 1, 2, 5, 9, 2, 5, 9, 17]]
dropout_rates = [0.03, 0.03, 0.3]
bn_epsilon = 1e-3
net = LEDNet(
channels=channels,
dilations=dilations,
dropout_rates=dropout_rates,
bn_epsilon=bn_epsilon,
**kwargs)
if pretrained:
if (model_name is None) or (not model_name):
raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.")
from .model_store import get_model_file
net.load_parameters(
filename=get_model_file(
model_name=model_name,
local_model_store_dir_path=root),
ctx=ctx,
ignore_extra=True)
return net
def lednet_cityscapes(classes=19, **kwargs):
"""
LEDNet model for Cityscapes from 'LEDNet: A Lightweight Encoder-Decoder Network for Real-Time Semantic
Segmentation,' https://arxiv.org/abs/1905.02423.
Parameters:
----------
classes : int, default 19
Number of segmentation classes.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
return get_lednet(classes=classes, model_name="lednet_cityscapes", **kwargs)
def _calc_width(net):
import numpy as np
net_params = net.collect_params()
weight_count = 0
for param in net_params.values():
if (param.shape is None) or (not param._differentiable):
continue
weight_count += np.prod(param.shape)
return weight_count
def _test():
import mxnet as mx
pretrained = False
fixed_size = True
correct_size_mismatch = False
in_size = (1024, 2048)
classes = 19
models = [
lednet_cityscapes,
]
for model in models:
net = model(pretrained=pretrained, in_size=in_size, fixed_size=fixed_size,
correct_size_mismatch=correct_size_mismatch)
ctx = mx.cpu()
if not pretrained:
net.initialize(ctx=ctx)
# net.hybridize()
weight_count = _calc_width(net)
print("m={}, {}".format(model.__name__, weight_count))
assert (model != lednet_cityscapes or weight_count == 922821)
batch = 4
x = mx.nd.random.normal(shape=(batch, 3, in_size[0], in_size[1]), ctx=ctx)
y = net(x)
assert (y.shape == (batch, classes, in_size[0], in_size[1]))
if __name__ == "__main__":
_test()
| [
"[email protected]"
] | |
704c318e614ef0ed1ac677b6e615272c162d3661 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit_Class36.py | e0acf1a88e1f96250e4f122e4227bc4cdb34de6e | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,427 | py | # qubit number=3
# total number=9
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[3],input_qubit[0]) # number=5
prog.swap(input_qubit[3],input_qubit[0]) # number=6
prog.cx(input_qubit[1],input_qubit[0]) # number=7
prog.cx(input_qubit[1],input_qubit[0]) # number=8
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5600
writefile = open("../data/startQiskit_Class36.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('statevector_simulator')
circuit1 = transpile(prog, FakeYorktown())
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
5cbb883ea1c04efe8a7a2cef7957a2a2273cf727 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03946/s811408190.py | 1dcbe0bb3e40ad9cf5592528edaf02ff138bc42a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | N,T = map(int,input().split())
A = list(map(int,input().split()))
data = [0]*(N-1)
M = [0]*N
M[N-1] = A[N-1]
M_index = [N-1]*N
for i in range(2,N+1):
if A[N-i] > M[N-i+1]:
M[N-i] = A[N-i]
M_index[N-i] = N-i
else:
M[N-i] = M[N-i+1]
M_index[N-i] = M_index[N-i+1]
data[N-i] = M[N-i] - A[N-i]
m = max(data)
l = []
for i in range(N-1):
if data[i] == m:
l.append(i)
ans = [0]*N
for x in l:
ans[M_index[x]] = 1
#####
#print("A",A)
#print("M",M)
#print("M_index",M_index)
#print("data",data)
#print("ans",ans)
#####
print(sum(ans)) | [
"[email protected]"
] | |
269573a3fb80c759aef1eb6092c16b6415cf6a42 | b7f4d49ace49ae2affd9adc619f13e2cdd887ff3 | /archive/pl-implementation/dinr/modeling/modules/ms_inr_generator.py | e7d2c88118eb8d44c3dab99225a227af06cecfc1 | [] | no_license | kylemin/inr-gan | 828c5f513387c7cfcf2e6baf4b976f4b572bf685 | 58d866e8f986f04e8001083dc9be2427fd7f5d30 | refs/heads/master | 2023-08-28T07:52:59.352021 | 2021-10-29T13:56:29 | 2021-10-29T13:56:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,301 | py | import math
import random
import numpy as np
import torch
from torch import nn
import torch.nn.functional as F
from .conv import StyledConv
from .linear import EqualLinear, StyledLinear
from .style import PixelNorm, CoordBasedConstantInput, CoordFuser
from .torgb import ToRGB
class MSINRGenerator(nn.Module):
def __init__(self,
fallback: bool,
size,
style_dim,
n_mlp,
channel_multiplier=2,
blur_kernel=(1, 3, 3, 1),
lr_mlp=0.01,
transform_type='linear',
modulation_type='fmm_inrgan',
is_multiscale: bool=False,
interpolation_type='nearest',
factorization_rank: int=3,
fourier_scale: int=10.0,
coord_fuse_type: str="concat",
multi_scale_coord_embs: bool=False,
use_noise_injection: bool=True,
coord_emb_dim_multiplier: int=0.5,
linear_conv_block: bool=False,
no_fourier_embs: bool=False):
super().__init__()
self.size = size
self.style_dim = style_dim
self.is_multiscale = is_multiscale
self.interpolation_type = interpolation_type
mapping_network_layers = [PixelNorm()]
for i in range(n_mlp):
mapping_network_layers.append(EqualLinear(
self.style_dim, self.style_dim, lr_mul=lr_mlp, activation='fused_lrelu'))
self.style = nn.Sequential(*mapping_network_layers)
self.log_size = int(math.log(self.size, 2))
self.num_layers = (self.log_size - 2) * 2 + 1
if not fallback:
if self.is_multiscale:
self.channels = {
4: 512,
8: 512,
16: 512,
32: 512,
64: int(256 * channel_multiplier),
128: int(128 * channel_multiplier),
256: int(64 * channel_multiplier),
512: int(32 * channel_multiplier),
1024: int(16 * channel_multiplier),
}
self.coord_concat_emb_dims = {
4: int(128 * coord_emb_dim_multiplier),
8: int(128 * coord_emb_dim_multiplier),
16: int(128 * coord_emb_dim_multiplier),
32: int(256 * coord_emb_dim_multiplier),
64: int(256 * channel_multiplier * coord_emb_dim_multiplier),
128: int(128 * channel_multiplier * coord_emb_dim_multiplier),
256: int(64 * channel_multiplier * coord_emb_dim_multiplier),
512: int(32 * channel_multiplier * coord_emb_dim_multiplier),
1024: int(16 * channel_multiplier * coord_emb_dim_multiplier),
}
resolutions = [2 ** log_res for log_res in range(3, self.log_size + 1)]
input_resolution = 4
use_coordinates_input = False
else:
self.channels = {256: 512}
resolutions = [256] * len(range(3, self.log_size + 1))
input_resolution = 256
use_coordinates_input = True
kernel_size = 1
use_upsample = False
factorization_ranks = {
4: 3,
8: 3,
16: 3,
32: 5,
64: 5,
128: 5,
256: 10,
512: 10,
1024: 10,
}
else:
self.channels = {
4: 512,
8: 512,
16: 512,
32: 512,
64: 256 * channel_multiplier,
128: 128 * channel_multiplier,
256: 64 * channel_multiplier,
512: 32 * channel_multiplier,
1024: 16 * channel_multiplier,
}
resolutions = [2 ** log_res for log_res in range(3, self.log_size + 1)]
kernel_size = 3
input_resolution = 4
use_upsample = True
use_coordinates_input = False
use_noise_injection = True
self.input = CoordBasedConstantInput(self.channels[input_resolution], input_resolution, use_coordinates_input)
if fallback or transform_type == 'conv':
self.conv1 = StyledConv(
self.input.get_total_dim(),
self.channels[input_resolution],
kernel_size,
self.style_dim,
blur_kernel=blur_kernel,
noise_injection=use_noise_injection)
else:
self.conv1 = StyledLinear(
self.input.get_total_dim(),
self.channels[input_resolution],
self.style_dim,
modulation_type,
factorization_ranks[input_resolution])
self.to_rgb1 = ToRGB(self.channels[input_resolution], self.style_dim, upsample=False)
self.convs = nn.ModuleList()
self.upsamples = nn.ModuleList()
self.to_rgbs = nn.ModuleList()
self.noises = nn.Module()
self.coord_concats = nn.ModuleList()
for layer_idx in range(self.num_layers):
res = (layer_idx + 5) // 2
shape = [1, 1, 2 ** res, 2 ** res]
self.noises.register_buffer(f'noise_{layer_idx}', torch.randn(*shape))
in_channel = self.channels[resolutions[0]]
for res in resolutions:
out_channel = self.channels[res]
if self.is_multiscale:
self.coord_concats.append(CoordFuser(
emb_dim=self.coord_concat_emb_dims[res],
style_dim=self.style_dim,
coord_dim=2,
fourier_scale=fourier_scale,
fuse_type=coord_fuse_type,
multi_scale_coord_embs=multi_scale_coord_embs,
img_size=res,
no_fourier_embs=no_fourier_embs,
))
in_channel += self.coord_concats[-1].get_total_dim()
else:
self.coord_concats.append(nn.Identity())
if fallback or transform_type == 'conv':
self.convs.append(StyledConv(
in_channel,
out_channel,
kernel_size,
self.style_dim,
upsample=use_upsample,
blur_kernel=blur_kernel,
noise_injection=use_noise_injection
))
self.convs.append(StyledConv(
out_channel,
out_channel,
kernel_size,
self.style_dim,
blur_kernel=blur_kernel,
noise_injection=use_noise_injection))
elif transform_type == 'linear':
self.convs.append(StyledLinear(in_channel, out_channel, self.style_dim, modulation_type, factorization_ranks[res]))
if linear_conv_block:
self.convs.append(StyledConv(
out_channel,
out_channel,
3,
self.style_dim,
blur_kernel=blur_kernel,
noise_injection=use_noise_injection))
else:
self.convs.append(StyledLinear(out_channel, out_channel, self.style_dim, modulation_type, factorization_ranks[res]))
else:
raise NotImplementedError(f'Unknown transform type: {transform_type}')
self.to_rgbs.append(ToRGB(out_channel, self.style_dim, upsample=use_upsample))
in_channel = out_channel
self.n_latent = self.log_size * 2 - 2
def forward(
self,
styles,
return_latents=False,
inject_index=None,
truncation: float=1.0,
truncation_latent=None,
input_is_latent=False,
noise=None,
randomize_noise=True,
):
if isinstance(styles, torch.Tensor):
styles = [styles]
if not input_is_latent:
styles = [self.style(s) for s in styles]
if noise is None:
if randomize_noise:
noise = [None] * self.num_layers
else:
noise = [getattr(self.noises, f"noise_{i}") for i in range(self.num_layers)]
if truncation < 1.0:
style_t = []
for style in styles:
style_t.append(truncation_latent + truncation * (style - truncation_latent))
styles = style_t
if len(styles) < 2:
inject_index = self.n_latent
if styles[0].ndim < 3:
latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1)
else:
latent = styles[0]
else:
if inject_index is None:
inject_index = random.randint(1, self.n_latent - 1)
latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1)
latent2 = styles[1].unsqueeze(1).repeat(1, self.n_latent - inject_index, 1)
latent = torch.cat([latent, latent2], 1)
out = self.input(latent)
out = self.conv1(out, latent[:, 0], noise=noise[0])
skip = self.to_rgb1(out, latent[:, 1])
i = 1
for conv1, conv2, noise1, noise2, to_rgb, coord_concat in zip(
self.convs[::2], self.convs[1::2], noise[1::2], noise[2::2], self.to_rgbs, self.coord_concats):
if self.is_multiscale:
out = F.interpolate(out, scale_factor=2, mode=self.interpolation_type) # [batch_size, c, h * 2, w * 2]
skip = F.interpolate(skip, scale_factor=2, mode=self.interpolation_type) # [batch_size, 3, h * 2, w * 2]
out = coord_concat(out, latent[:, i]) # [batch_size, c + emb_dim, h, w]
out = conv1(out, latent[:, i], noise=noise1) # [batch_size, c, h, w]
out = conv2(out, latent[:, i + 1], noise=noise2) # [batch_size, c, h, w]
skip = to_rgb(out, latent[:, i + 2], skip) # [batch_size, 3, h * 2, w * 2]
i += 2
image = skip
if return_latents:
return image, latent
else:
return image, None
| [
"[email protected]"
] | |
534d7a8d361d7a451f2b98d94d9b4539fe824687 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/137/usersdata/218/47092/submittedfiles/Maratona.py | 6d8876f8f1158c2fd5d5be7ed13195de53f89d34 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 412 | py | # -*- coding: utf-8 -*-
N=int(input('digite o numero de postos da prova:'))
M=int(input('digite a distancia de alcance maxima entre os postos:'))
maxima=42195
minima=0
cont=0
for i in range (0,N,1):
p=int(input('digite a posição do posto:'))
if p<minima:
minima=p
if p>maxima:
maxima=p
if maxima-minima>M:
cont=1
if cont==0:
print('S')
else:
print('N')
| [
"[email protected]"
] | |
90183842ee994ee4c500c4ac32e0e8ac30df18d8 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_118/2306.py | 480ae3036f9ad1679c427e947506870bf81d64db | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 717 | py | import sys
import math
def is_palindrom(number):
return str(number) == str(number)[::-1]
def make_work(input='input.txt', output='output.txt'):
file_in = open(input)
cases_number = int(file_in.readline().strip())
for n in xrange(cases_number):
case_number = n + 1
a, b = map(int, file_in.readline().split(' '))
sq_a, sq_b = map(math.sqrt, (a, b))
sq_a, sq_b = map(int, (math.ceil(sq_a), math.floor(sq_b)))
fair_square = 0
for x in xrange(sq_a, sq_b + 1):
if is_palindrom(x) and is_palindrom(x*x):
fair_square += 1
print "Case #%s: %s" % (case_number, fair_square)
if len(sys.argv) >= 2:
make_work(input=sys.argv[1])
else:
make_work()
| [
"[email protected]"
] | |
5cbd9ab1e7cbca8d9043ca2d795fd3e2e95dce6d | e63ab09f227459380c317aa1694cffd04255c807 | /cheshire3/lucene/indexStore.py | d8219db0cb88d5d868bd9d0e3bef35b30995337d | [
"ICU",
"X11"
] | permissive | bitwhite/cheshire3 | 91a0d2f8d2e79ac277ac4f7a3bea9efa911ce3d6 | ca27bc2600d217e36a429ccfe064f11d9b200193 | refs/heads/master | 2021-05-27T03:50:09.456813 | 2013-10-10T13:47:16 | 2013-10-10T13:47:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,186 | py |
from cheshire3.baseObjects import IndexStore
from cheshire3.lucene.utils import NullC3Analyzer, C3TokenStream, cqlToLucene
from cheshire3.resultSet import SimpleResultSet, SimpleResultSetItem
import lucene
class LuceneIndexStore(IndexStore):
def __init__(self, session, config, parent):
IndexStore.__init__(self, session, config, parent)
path = self.get_path(session, 'defaultPath')
self.analyzer = NullC3Analyzer()
self.dir = lucene.FSDirectory.getDirectory(path, False)
self.parser = lucene.QueryParser("", lucene.StandardAnalyzer())
self.searcher = lucene.IndexSearcher(self.dir)
self.writer = None
self.currDoc = None
self.currRec = None
def create_index(self, session, index):
# created in begin_indexing()
pass
def begin_indexing(self, session, index):
# will append if exists, or create if not
if not self.writer:
self.writer = lucene.IndexWriter(self.dir, self.analyzer, lucene.IndexWriter.MaxFieldLength.UNLIMITED)
def commit_indexing(self, session, index):
if self.currDoc:
self.writer.addDocument(self.currDoc)
self.currDoc = None
elif self.writer:
self.writer.optimize()
self.writer.close()
self.writer = None
print "called commit"
def store_terms(self, session, index, terms, rec):
strm = C3TokenStream(terms)
if rec != self.currRec:
if self.currDoc:
# write it
self.writer.addDocument(self.currDoc)
doc = lucene.Document()
self.currDoc = doc
doc.add(lucene.Field(index.id, strm))
doc.add(lucene.Field('id', str(rec),
lucene.Field.Store.YES,
lucene.Field.Index.UN_TOKENIZED))
else:
doc.add(lucene.Field(index.id, strm))
def search(self, session, query, db):
# take CQL query and translate to Lucene
pm = db.get_path(session, 'protocolMap')
if not pm:
db._cacheProtocolMaps(session)
pm = db.protocolMaps.get('http://www.loc.gov/zing/srw/')
query.config = pm
lq = cqlToLucene(session, query, pm)
q = self.parser.parse(lq)
results = self.searcher.search(q, lucene.Sort.RELEVANCE)
# now map to a ResultSet
items = []
for x in range(len(results)):
hit = results[x]
w = results.score(x)
rsid = hit.getField('id').stringValue()
(recStore, id) = rsid.split('/')
if id.isdigit():
id = int(id)
rsi = SimpleResultSetItem(session, id, recStore, weight=w)
items.append(rsi)
rs = SimpleResultSet(session, items)
return rs
def index_record(self, session, rec):
pass
def delete_record(self, session, rec):
pass
def fetch_term(self, session, term, summary, prox):
pass
def fetch_summary(self, session, index):
raise NotImplementedError()
| [
"[email protected]"
] | |
97373bd89810120c5216516ef09d1046ca0c6302 | 509c3eb9d205be19426c01f222a6e5870cca256f | /runs/sim-study/configs/test-sim-6-8-6/tsne/viz.py | b608dfa400b45c5118d89e6526b248dbdb47ad82 | [
"MIT"
] | permissive | luiarthur/CytofRepFAM.jl | b4d23cd32cc89493015b72777f1016b41862aaf7 | 1f997d1620d74861c5bde5559ebdd1e6c449b9e7 | refs/heads/master | 2021-07-07T16:07:23.489103 | 2021-04-30T02:43:42 | 2021-04-30T02:43:42 | 238,282,263 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,998 | py | import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import os
import sys
sys.path.append('../../../../PlotUtils')
import plot_yz
def graph_tsne(tsne_df, clust, i, method, outpath, method_suffix=''):
if clust is None:
tsne_df[method] = tsne_df[method].astype(int)
else:
tsne_df[method] = clust.astype(int)
mask_i = (tsne_df.sample_ind == i)
df = tsne_df[mask_i]
markersize = 15
sns.pairplot(x_vars="tsne1", y_vars="tsne2", data=df,
hue=method,
plot_kws=dict(linewidth=0, s=markersize),
aspect=1, height=3)
plt.savefig(outpath, bbox_inches="tight")
plt.close()
def get_data_dict(tsne_df, y_header='Y', marker_header='M',
sample_ind_name='sample_ind',
true_labels_name='true_labels'):
y_columns = filter(lambda x: x.startswith(y_header), tsne_df.columns)
Y = tsne_df[y_columns].to_numpy()
m_columns = filter(lambda x: x.startswith(marker_header), tsne_df.columns)
M = tsne_df[m_columns].to_numpy() == 1
Y[M] = np.nan
sample_ind = tsne_df[sample_ind_name].to_numpy().astype(int)
true_labels = tsne_df[true_labels_name].to_numpy().astype(int)
return dict(Y=Y, M=M, sample_ind=sample_ind, true_labels=true_labels)
def make_heatmap(data_dict, clust, i, outpath):
if clust is None:
clust = data_dict['true_labels']
else:
clust = clust.astype(int)
K = clust.max()
wi_mean = np.zeros(K)
lami = clust[data_dict['sample_ind'] == i]
for k in range(K):
wi_mean[k] = (lami == k + 1).mean()
yi = data_dict['Y'][data_dict['sample_ind'] == i]
plt.figure(figsize=(6, 6))
plot_yz.plot_y(yi, wi_mean, lami, vlim=(-3, 3),
cm=plot_yz.blue2red.cm(5),
fs_lab=15, fs_cbar=15, lw=3, fs_xlab=15, fs_ylab=15,
interpolation='nearest')
plt.savefig(outpath, bbox_inches="tight")
plt.close()
if __name__ == "__main__":
path_to_csv = 'viz/csv'
# methods = ['mclust', 'flowsom', 'rfam']
methods = ['mclust', 'flowsom', 'true_labels']
os.makedirs('viz/img', exist_ok=True)
method = methods[0]
for pmiss in [0.0, 0.2]:
for zind in [1, 2, 3]:
simname = f'pmiss{pmiss}-phi0-zind{zind}'
tsne_path = f'{path_to_csv}/tsne-{simname}.csv'
tsne_df = pd.read_csv(tsne_path)
data_dict = get_data_dict(tsne_df)
for method in methods:
if method == 'true_labels':
clust = None
else:
clust_path = f'{path_to_csv}/{method}-{simname}.csv'
print(clust_path)
clust = np.loadtxt(clust_path)
for i in range(2):
outpath = f'viz/img/tsne-{method}{i + 1}-{simname}.pdf'
graph_tsne(tsne_df, clust, i + 1, method, outpath)
heatmap_outpath = f'viz/img/heatmap-{method}{i + 1}-{simname}.pdf'
make_heatmap(data_dict, clust, i + 1, heatmap_outpath)
# rfam
method = "rfam"
for phi in [0, 1, 10, 25, 100]: # NOTE: mind this!
clust_simname = f'pmiss{pmiss}-phi{phi}-zind{zind}'
clust_path = f'{path_to_csv}/{method}-{clust_simname}.csv'
print(clust_path)
clust = np.loadtxt(clust_path)
tsne_path = f'{path_to_csv}/tsne-{simname}.csv'
tsne_df = pd.read_csv(tsne_path)
for i in range(2):
outpath = f'viz/img/tsne-{method}{i + 1}-{clust_simname}.pdf'
graph_tsne(tsne_df, clust, i + 1, method, outpath,
method_suffix=f'phi={phi}')
heatmap_outpath = f'viz/img/heatmap-{method}{i + 1}-{clust_simname}.pdf'
make_heatmap(data_dict, clust, i + 1, heatmap_outpath)
| [
"[email protected]"
] | |
4f1a9db5a85cb6b5683c2bd6e86cbc75cb4ce5ed | 06737979a3d4924dc6a3d926d1b3c1c144891fb8 | /yq/operators/subsequence.py | ac83dc051cc727e3080488e6edb463a6a9c0e07a | [] | no_license | abesto/yq | 630fc2377adfb5198b5b0068a1505af01744a339 | daf0c0d8f4da2ce2eace0c21ce4d8c2d7055ba54 | refs/heads/master | 2020-05-16T22:29:59.196970 | 2017-01-19T12:43:07 | 2017-01-19T12:43:07 | 16,524,927 | 83 | 10 | null | 2016-02-28T18:24:32 | 2014-02-04T20:41:24 | Python | UTF-8 | Python | false | false | 507 | py | from yq.operators.base import Operator
class Subsequence(Operator):
def __init__(self, low, hi):
if low is None:
self.low = low
else:
self.low = int(low)
if hi is None:
self.hi = None
else:
self.hi = int(hi)
def _apply_item(self, data):
try:
return data[self.low:self.hi]
except IndexError:
return None
def __repr__(self):
return '[%s:%s]' % (self.low, self.hi)
| [
"[email protected]"
] | |
7a5f991168b0684d7ef53377e5e56cfcb36cefe7 | 32ba9f1c35ae916d33b121daeeea8e1910a447d7 | /rig/mocapOverride.py | bd69527c39da90596c2246322f0eb67ad57e7eaa | [
"MIT"
] | permissive | rituparna/glTools | 8b02fa2751e1b997f7a202c7df8a3dd3d3032722 | c512a96c20ba7a4ee93a123690b626bb408a8fcd | refs/heads/master | 2020-03-19T19:23:47.684580 | 2018-06-10T23:53:58 | 2018-06-10T23:53:58 | 136,853,456 | 0 | 0 | null | 2018-06-10T23:46:54 | 2018-06-10T23:46:53 | null | UTF-8 | Python | false | false | 30,025 | py | import maya.cmds as mc
import maya.mel as mm
import glTools.rig.utils
import glTools.tools.constraint
import glTools.utils.attribute
import glTools.utils.base
import glTools.utils.channelState
import glTools.utils.constraint
import glTools.utils.joint
import glTools.utils.stringUtils
import os.path
import types
def overrideAttribute():
'''
Return the control override toggle attribute name.
'''
return 'enableMocapOverride'
def overrideOffsetAttribute():
'''
Return the control override offset attribute name.
'''
return 'mocapOverrideOffset'
def buildOverrideTransform(transform,prefix=None):
'''
Build a mocap override transform from the specified input transform
@param transform: Control transform to create override transforms from
@type transform: str
'''
# ==========
# - Checks -
# ==========
if not mc.objExists(transform):
raise Exception('Rig control transform "'+transform+'" does not exist!')
if not prefix:
prefix = glTools.utils.stringUtils.stripSuffix(transform)
# =============================
# - Create Override Transform -
# =============================
# Duplicate Transform
overrideTransform = mc.duplicate(transform,po=True,n=prefix+'_grp')[0]
# Set Display Mode
glTools.utils.base.displayOverride(overrideTransform,overrideEnable=1,overrideLOD=1)
if glTools.utils.joint.isJoint(overrideTransform):
mc.setAttr(overrideTransform+'.drawStyle',2) # None
# Delete Unused Attributes
glTools.utils.attribute.deleteUserAttrs(overrideTransform)
# =================
# - Return Result -
# =================
return overrideTransform
def getOverrideConstraint(control):
'''
Return the mocap override constraint(s) for the specified control.
@param control: The control to get the mocap override constraint for.
@type control: list
'''
# Check Control
if not mc.objExists(control):
raise Exception('Rig control transform "'+control+'" does not exist!')
# Override Enable Attribute
overrideAttr = overrideAttribute()
if not mc.objExists(control+'.'+overrideAttr):
raise Exception('OverrideEnable attribute "'+control+'.'+overrideAttr+'" does not exist!')
# Override Constraint
overrideConstraint = mc.listConnections(control+'.'+overrideAttr,s=False,d=True)
if not overrideConstraint:
raise Exception('Override constraint could not be determined from overrideEnabled attribute "'+control+'.'+overrideAttr+'"!')
overrideConstraint = mc.ls(overrideConstraint,type='constraint')
if not overrideConstraint:
raise Exception('Override constraint could not be determined from overrideEnabled attribute "'+control+'.'+overrideAttr+'"!')
# Return Result
return list(set(overrideConstraint))
def getOverrideTarget(control):
'''
Return the mocap override target for the specified control.
@param control: The control to get the mocap override constraint for.
@type control: list
'''
# Get Override Constraint
overrideConstraint = getOverrideConstraint(control)
# Get Override Target
overrideTarget = glTools.utils.constraint.targetList(overrideConstraint[0])
if not overrideTarget:
raise Exception('Unable to determine override target transform from constraint "'+overrideConstraint[0]+'"!')
# Return Result
return overrideTarget[0]
def getOverrideTargetNew(control):
'''
Return the mocap override target for the specified control.
@param control: The control to get the mocap override constraint for.
@type control: list
'''
# Get Override Target
overrideTargetAttr = 'mocapOverrideTarget'
if not mc.attributeQuery(overrideTargetAttr,n=control,ex=True):
raise Exception('Unable to determine override target transform! Attribute "'+control+'.'+overrideTargetAttr+'" does not exist.')
overrideTarget = mc.listConnections(control+'.'+overrideTargetAttr,s=True,d=False)
if not overrideTarget:
raise Exception('Unable to determine override target transform! Attribute "'+control+'.'+overrideTargetAttr+'" has no incoming connections.')
# Return Result
return overrideTarget[0]
def createControlOverride(transform,prefix=None):
'''
Create mocap anim override transforms for the specified control transform.
@param transform: Control transform to create mocap override transforms for
@type transform: str
@param prefix: Name prefix for override nodes created by function. If empty, prefix is taken from the input transform name.
@type prefix: str
'''
# ==========
# - Checks -
# ==========
if not mc.objExists(transform):
raise Exception('Rig control transform "'+transform+'" does not exist!')
if not prefix:
prefix = glTools.utils.stringUtils.stripSuffix(transform)
# ======================================
# - Create Control Override Transforms -
# ======================================
overrideTarget = buildOverrideTransform(transform,prefix=prefix+'_overrideTarget')
overrideTransform = buildOverrideTransform(transform,prefix=prefix+'_overrideTransform')
# Set Channel States
channelState = glTools.utils.channelState.ChannelState()
channelState.setFlags([0,0,0,0,0,0,2,2,2,1],objectList=[overrideTarget])
channelState.setFlags([1,1,1,1,1,1,2,2,2,1],objectList=[overrideTransform])
channelState.set(1,[overrideTarget,overrideTransform])
# Parent Control to Override Transform
mc.parent(transform,overrideTransform)
# ======================================
# - Create Control Override Constraint -
# ======================================
# Create ParentConstraint
overrideConstraint = mc.parentConstraint(overrideTarget,overrideTransform,n=prefix+'_overrideTransform_parentConstraint')[0]
overrideAlias = mc.parentConstraint(overrideConstraint,q=True,wal=True)
# Reset Rest Values
mc.setAttr(overrideConstraint+'.restTranslate',0,0,0)
mc.setAttr(overrideConstraint+'.restRotate',0,0,0)
# ==========================
# - Create Override Toggle -
# ==========================
# Create Toggle Attribute
overrideAttr = overrideAttribute()
if mc.objExists(transform+'.'+overrideAttr):
mc.deleteAttr(transform+'.'+overrideAttr)
mc.addAttr(transform,ln=overrideAttr,at='bool')
mc.setAttr(transform+'.'+overrideAttr,False)
# Connect Toggle Attribute
mc.connectAttr(transform+'.'+overrideAttr,overrideConstraint+'.'+overrideAlias[0],f=True)
# ==========================
# - Create Override Offset -
# ==========================
offsetAttr = overrideOffsetAttribute()
if mc.objExists(transform+'.'+offsetAttr):
mc.deleteAttr(transform+'.'+offsetAttr)
mc.addAttr(transform,ln=offsetAttr,at='double3')
mc.addAttr(transform,ln=offsetAttr+'X',at='double',p=offsetAttr,dv=0)
mc.addAttr(transform,ln=offsetAttr+'Y',at='double',p=offsetAttr,dv=0)
mc.addAttr(transform,ln=offsetAttr+'Z',at='double',p=offsetAttr,dv=0)
# Connect Offset
mc.connectAttr(transform+'.'+offsetAttr,overrideConstraint+'.target[0].targetOffsetTranslate',f=True)
# =================
# - Return Result -
# =================
result = {}
result['overrideTarget'] = overrideTarget
result['overrideTransform'] = overrideTransform
result['overrideConstraint'] = overrideConstraint
return result
def createControlOverrideNew(transform,prefix=''):
'''
Create mocap anim override transforms for the specified control transform.
@param transform: Control transform to create mocap override transforms for
@type transform: str
@param prefix: Name prefix for override nodes created by function. If empty, prefix is taken from the input transform name.
@type prefix: str
'''
# ==========
# - Checks -
# ==========
if not mc.objExists(transform):
raise Exception('Rig control transform "'+transform+'" does not exist!')
if not prefix:
prefix = glTools.utils.stringUtils.stripSuffix(transform)
# ======================================
# - Create Control Override Transforms -
# ======================================
overrideTarget = buildOverrideTransform(transform,prefix=prefix+'_overrideTarget')
overrideTransform = buildOverrideTransform(transform,prefix=prefix+'_overrideTransform')
# Parent Control to Override Transform
mc.parent(transform,overrideTransform)
# ==========================
# - Create Override Toggle -
# ==========================
# Create Toggle Attribute
overrideAttr = overrideAttribute()
if mc.objExists(transform+'.'+overrideAttr):
mc.deleteAttr(transform+'.'+overrideAttr)
mc.addAttr(transform,ln=overrideAttr,at='enum',en='Off:On')
mc.setAttr(transform+'.'+overrideAttr,k=False,cb=True)
mc.setAttr(transform+'.'+overrideAttr,False)
# Mocap Override Message Connection
overrideTargetAttr = 'mocapOverrideTarget'
if mc.objExists(transform+'.'+overrideTargetAttr):
mc.deleteAttr(transform+'.'+overrideTargetAttr)
mc.addAttr(transform,ln=overrideTargetAttr,at='message')
mc.connectAttr(overrideTarget+'.message',transform+'.'+overrideTargetAttr,f=True)
# ===================================
# - Create Control Override Network -
# ===================================
# Create Override Conditions
overrideTranslate = mc.createNode('condition',n=prefix+'_overrideTranslate_condition')
overrideRotate = mc.createNode('condition',n=prefix+'_overrideRotate_condition')
# Override Translate
baseTranslate = mc.getAttr(overrideTransform+'.t')[0]
mc.setAttr(overrideTranslate+'.colorIfFalse',*baseTranslate,l=True,cb=False)
mc.connectAttr(overrideTarget+'.t',overrideTranslate+'.colorIfTrue',f=True)
mc.setAttr(overrideTranslate+'.colorIfTrue',l=True,cb=False)
mc.setAttr(overrideTranslate+'.operation',0,l=True,cb=False) # Equal
mc.setAttr(overrideTranslate+'.secondTerm',1,l=True,cb=False)
mc.connectAttr(transform+'.'+overrideAttr,overrideTranslate+'.firstTerm',f=True)
mc.setAttr(overrideTranslate+'.firstTerm',l=True,cb=False)
# Override Rotate
baseRotate = mc.getAttr(overrideTransform+'.r')[0]
mc.setAttr(overrideRotate+'.colorIfFalse',*baseRotate,l=True,cb=False)
mc.connectAttr(overrideTarget+'.t',overrideRotate+'.colorIfTrue',f=True)
mc.setAttr(overrideRotate+'.colorIfTrue',l=True,cb=False)
mc.setAttr(overrideRotate+'.secondTerm',1,l=True,cb=False)
mc.setAttr(overrideRotate+'.operation',0,l=True,cb=False) # Equal
mc.connectAttr(transform+'.'+overrideAttr,overrideRotate+'.firstTerm',f=True)
mc.setAttr(overrideRotate+'.firstTerm',l=True,cb=False)
# ==================
# - Channel States -
# ==================
chanState = glTools.utils.channelState.ChannelState()
chanState.setFlags([0,0,0,0,0,0,2,2,2,1],objectList=[overrideTarget])
chanState.setFlags([2,2,2,2,2,2,2,2,2,1],objectList=[overrideTransform])
#chanState.set(1,[overrideTarget,overrideTransform])
# =================
# - Return Result -
# =================
result = {}
result['overrideTarget'] = overrideTarget
result['overrideTransform'] = overrideTransform
result['overrideTranslate'] = overrideTranslate
result['overrideRotate'] = overrideRotate
return result
def constrainControlOverrideTarget( control,
constraintTarget,
constraintType = 'parent',
maintainOffset = False,
skipTranslate = [],
skipRotate = [],
interpType = None,
prefix = '' ):
'''
Constrain the override target transform of the specified control to a given target transform.
This function can be used to constrain rig controls to a mocap driven skeleton.
@param control: The control that will have its override target transform constrainted to the specified target transform.
@type control: str
@param constraintTarget: The target transform that the override target transform will be constrainted to.
@type constraintTarget: str
@param constraintType: The constraint type to apply to the override target transform.
@type constraintType: str
@param maintainOffset: Initialize the constraint offset necessary for the slave transform to mainatain its current position and orientation.
@type maintainOffset: bool
@param skipTranslate: List the translate channels to leave unaffected by the constraint.
@type skipTranslate: list
@param skipRotate: List the rotate channels to leave unaffected by the constraint.
@type skipRotate: list
@param interpType: Orientation interpolation type. "average", "shortest", "longest"
@type interpType: str or None
@param prefix: Name prefix for override nodes created by the function. If empty, prefix is taken from the input control name.
@type prefix: str
'''
# ==========
# - Checks -
# ==========
# Control
if not mc.objExists(control):
raise Exception('Rig control transform "'+control+'" does not exist!')
# Constraint Target
if isinstance(constraintTarget,types.StringTypes):
if not mc.objExists(constraintTarget):
raise Exception('Override transform target "'+constraintTarget+'" does not exist!')
elif isinstance(constraintTarget,types.ListType):
for target in constraintTarget:
if not mc.objExists(target):
raise Exception('Override transform target "'+target+'" does not exist!')
else:
raise Exception('Invalid argument type for constraintTarget!')
# Constraint Type
if not constraintType in ['point','orient','parent']:
raise Exception('Unsupported constraint type "'+constraintType+'"!')
# Override Enable Attribute
overrideAttr = overrideAttribute()
if not mc.objExists(control+'.'+overrideAttr):
raise Exception('OverrideEnable attribute "'+control+'.'+overrideAttr+'" does not exist!')
# Override Constraint
overrideConstraint = mc.listConnections(control+'.'+overrideAttr,s=False,d=True)
if not overrideConstraint:
raise Exception('Override constraint could not be determined from overrideEnabled attribute "'+control+'.'+overrideAttr+'"!')
overrideConstraint = mc.ls(overrideConstraint,type='constraint')
if not overrideConstraint:
raise Exception('Override constraint could not be determined from overrideEnabled attribute "'+control+'.'+overrideAttr+'"!')
# Override Target
overrideTarget = glTools.utils.constraint.targetList(overrideConstraint[0])
if not overrideTarget:
raise Exception('Unable to determine override target transform from constraint "'+overrideConstraint[0]+'"!')
# InterpType
interpIndex = {'average':1,'shortest':2,'longest':3}
if constraintType == 'parent' or constraintType == 'orient':
if interpType and not interpType in interpIndex.keys():
raise Exception('Invalid interpolation type "'+interpType+'"!')
# Prefix
if not prefix:
prefix = glTools.utils.stringUtils.stripSuffix(control)
# =====================================
# - Create Override Target Constraint -
# =====================================
overrideTargetConstraint = ''
# Create pointConstraint
if constraintType == 'point':
overrideTargetConstraint = mc.pointConstraint( constraintTarget,
overrideTarget[0],
mo = maintainOffset,
sk = skipTranslate,
n = prefix+'_overrideTarget_pointConstraint' )[0]
# Create orientConstraint
elif constraintType == 'orient':
overrideTargetConstraint = mc.orientConstraint( constraintTarget,
overrideTarget[0],
mo = maintainOffset,
sk = skipRotate,
n = prefix+'_overrideTarget_orientConstraint' )[0]
# Interp Type
if interpType: mc.setAttr(overrideTargetConstraint+'.interpType',interpIndex[interpType])
# Create parentConstraint
elif constraintType == 'parent':
overrideTargetConstraint = mc.parentConstraint( constraintTarget,
overrideTarget[0],
mo = maintainOffset,
st = skipTranslate,
sr = skipRotate,
n = prefix+'_overrideTarget_parentConstraint' )[0]
# Interp Type
if interpType: mc.setAttr(overrideTargetConstraint+'.interpType',interpIndex[interpType])
# Unsupported Constraint Type
else:
raise Exception('Unsupported constraint type "'+constraintType+'"!')
# Enable overrideConstraint
mc.setAttr(control+'.'+overrideAttr,True)
# =================
# - Return Result -
# =================
return overrideTargetConstraint
def constrainControlToTarget( control,
constraintTarget,
constraintType = 'parent',
maintainOffset = False,
skipTranslate = [],
skipRotate = [],
interpType = None,
prefix = '' ):
'''
Constrain the the specified control to a given target transform.
This function can be used to constrain rig controls to a mocap driven skeleton.
@param control: The control that will be constrainted to the specified target transform.
@type control: str
@param constraintTarget: The target transform that the control transform will be constrainted to.
@type constraintTarget: str
@param constraintType: The constraint type to apply to the control transform.
@type constraintType: str
@param maintainOffset: Initialize the constraint offset necessary for the slave transform to mainatain its current position and orientation.
@type maintainOffset: bool
@param skipTranslate: List the translate channels to leave unaffected by the constraint.
@type skipTranslate: list
@param skipRotate: List the rotate channels to leave unaffected by the constraint.
@type skipRotate: list
@param interpType: Orientation interpolation type. "average", "shortest", "longest"
@type interpType: str or None
@param prefix: Name prefix for override nodes created by the function. If empty, prefix is taken from the input control name.
@type prefix: str
'''
# ==========
# - Checks -
# ==========
# Control
if not mc.objExists(control):
raise Exception('Rig control transform "'+control+'" does not exist!')
# Constraint Target
if isinstance(constraintTarget,types.StringTypes):
if not mc.objExists(constraintTarget):
raise Exception('Override transform target "'+constraintTarget+'" does not exist!')
elif isinstance(constraintTarget,types.ListType):
for target in constraintTarget:
if not mc.objExists(target):
raise Exception('Override transform target "'+target+'" does not exist!')
# Constraint Type
if not constraintType in ['point','orient','parent']:
raise Exception('Unsupported constraint type "'+constraintType+'"!')
# InterpType
interpIndex = {'average':1,'shortest':2,'longest':3}
if constraintType == 'parent' or constraintType == 'orient':
if interpType and not interpType in interpIndex.keys():
raise Exception('Invalid interpolation type "'+interpType+'"!')
# Prefix
if not prefix:
prefix = glTools.utils.stringUtils.stripSuffix(control)
# =====================================
# - Create Override Target Constraint -
# =====================================
overrideCtrlConstraint = None
# Create pointConstraint
if constraintType == 'point':
try:
overrideCtrlConstraint = mc.pointConstraint( constraintTarget,
control,
mo = maintainOffset,
sk = skipTranslate,
n = prefix+'_overrideCtrl_pointConstraint' )[0]
except Exception, e:
raise Exception('Error creating point constraint for control "'+control+'"! '+str(e))
# Create orientConstraint
elif constraintType == 'orient':
try:
overrideCtrlConstraint = mc.orientConstraint( constraintTarget,
control,
mo = maintainOffset,
sk = skipRotate,
n = prefix+'_overrideCtrl_orientConstraint' )[0]
except Exception, e:
raise Exception('Error creating orient constraint for control "'+control+'"! '+str(e))
# Interp Type
if interpType: mc.setAttr(overrideCtrlConstraint+'.interpType',interpIndex[interpType])
# Create parentConstraint
elif constraintType == 'parent':
try:
attrList = ['tx','ty','tz','rx','ry','rz']
if skipTranslate: [attrList.remove('t'+i) for i in skipTranslate if 't'+i in attrList]
if skipRotate: [attrList.remove('r'+i) for i in skipRotate if 'r'+i in attrList]
overrideCtrlConstraint = glTools.tools.constraint.parentConstraint( master = constraintTarget,
slave = control,
mo = maintainOffset,
attrList = attrList )
except Exception, e:
raise Exception('Error creating parent constraint for control "'+control+'"! '+str(e))
# Interp Type
if interpType: mc.setAttr(overrideCtrlConstraint+'.interpType',interpIndex[interpType])
# =================
# - Return Result -
# =================
return overrideCtrlConstraint
def bakeControlOverrideTarget(controlList,start=None,end=None,bakeSim=True):
'''
Bake control override target constraint to transform channel keys.
@param controlList: The control list that will have its override target transform constraints baked to keyframes.
@type controlList: list
@param start: Start frame of the bake animation range. If greater that end, use current playback settings.
@type start: float
@param end: End frame of the bake animation range. If less that start, use current playback settings.
@type end: float
@param bakeSim: Bake results using simulation option which updates the entire scene at each bake sample.
@type bakeSim: bool
'''
print('!!==== DEPRICATED ====!! (glTools.rig.mocapOverride.bakeControlOverrideTarget)')
# ==========
# - Checks -
# ==========
# Start/End
if start == None: start = mc.playbackOptions(q=True,min=True)
if end == None: end = mc.playbackOptions(q=True,max=True)
# For Each Control
overrideTargetList = []
overrideConstraintList = []
for control in controlList:
# Control
if not mc.objExists(control):
raise Exception('Rig control transform "'+control+'" does not exist!')
# Override Enable Attribute
overrideAttr = overrideAttribute()
if not mc.objExists(control+'.'+overrideAttr):
raise Exception('OverrideEnable attribute "'+control+'.'+overrideAttr+'" does not exist!')
# Override Constraint
overrideConstraint = mc.listConnections(control+'.'+overrideAttr,s=False,d=True)
if not overrideConstraint:
raise Exception('Override constraint could not be determined from overrideEnabled attribute "'+control+'.'+overrideAttr+'"!')
overrideConstraint = mc.ls(overrideConstraint,type='constraint')
if not overrideConstraint:
raise Exception('Override constraint could not be determined from overrideEnabled attribute "'+control+'.'+overrideAttr+'"!')
# Override Target
overrideTarget = glTools.utils.constraint.targetList(overrideConstraint[0])
if not overrideTarget:
raise Exception('Unable to determine override target transform from constraint "'+overrideConstraint[0]+'"!')
# Override Target Constraint
overrideTargetConstraint = mc.ls(mc.listConnections(overrideTarget[0],s=True,d=False),type='constraint')
if not overrideTargetConstraint:
# Check PairBlend (intermediate) Connection
# - This fix was made in preparation for baking keys from multiple mocap sources (bake in frame chunks).
overrideTargetPairBlend = mc.ls(mc.listConnections(overrideTarget[0],s=True,d=False),type='pairBlend')
if overrideTargetPairBlend:
overrideTargetConstraint = mc.ls(mc.listConnections(overrideTargetPairBlend,s=True,d=False),type='constraint')
if not overrideTargetConstraint:
print('Unable to determine override target constraint from override target transform "'+overrideTarget[0]+'"!')
continue
# Append to Override Target List
overrideTargetList.append(overrideTarget[0])
# Append to Override Constraint List
[overrideConstraintList.append(i) for i in overrideTargetConstraint if not i in overrideConstraintList]
# =================================
# - Bake Override Target Channels -
# =================================
mc.bakeResults( overrideTargetList,
t = (start,end),
at = ['tx','ty','tz','rx','ry','rz'],
preserveOutsideKeys = True,
simulation = bakeSim )
# ======================
# - Delete Constraints -
# ======================
mc.delete(overrideConstraintList)
def bakeControlOverride(controlList,start=None,end=None,bakeSim=True):
'''
Bake control constraint to transform channel keys.
@param controlList: The control list that will have its constraints baked to keyframes.
@type controlList: list
@param start: Start frame of the bake animation range. If greater that end, use current playback settings.
@type start: float
@param end: End frame of the bake animation range. If less that start, use current playback settings.
@type end: float
@param bakeSim: Bake results using simulation option which updates the entire scene at each bake sample.
@type bakeSim: bool
'''
print('!!==== DEPRICATED ====!! (glTools.rig.mocapOverride.bakeControlOverride)')
# ==========
# - Checks -
# ==========
# Start/End
if start == None: start = mc.playbackOptions(q=True,min=True)
if end == None: end = mc.playbackOptions(q=True,max=True)
# For Each Control
bakeControlList = []
constraintList = []
for control in controlList:
# Control
if not mc.objExists(control):
raise Exception('Rig control transform "'+control+'" does not exist!')
# Override Target Constraint
overrideConstraint = mc.ls(mc.listConnections(control+'.'+overrideAttribute(),d=True,s=False),type='constraint')
if not overrideConstraint:
# Check PairBlend (intermediate) Connection
# - This fix was made in preparation for baking keys from multiple mocap sources (bake in frame chunks).
overridePairBlend = mc.ls(mc.listConnections(control,s=True,d=False),type='pairBlend')
if overridePairBlend:
overrideConstraint = mc.ls(mc.listConnections(overrideTargetPairBlend,s=True,d=False) or [],type='constraint')
if not overrideConstraint:
print('Unable to determine override constraint from control transform "'+control+'"!')
continue
# Append to Override Target List
bakeControlList.append(control)
# Append to Override Constraint List
[constraintList.append(i) for i in overrideConstraint if not i in constraintList]
# =================================
# - Bake Override Target Channels -
# =================================
# Check Bake Control List
if not bakeControlList:
print('Found no controls to bake! Skipping...')
return None
# Bake to Controls
mc.bakeResults( bakeControlList,
t = (start,end),
at = ['tx','ty','tz','rx','ry','rz'],
preserveOutsideKeys = True,
simulation = bakeSim )
# ======================
# - Delete Constraints -
# ======================
if constraintList: mc.delete(constraintList)
# =================
# - Return Result -
# =================
return bakeControlList
def bakeExportSkeleton(exportNS,start=1,end=0):
'''
Bake export skeleton constraints to joint channel keysframes.
@param exportNS: The namespace of the export skeleton.
@type exportNS: str
@param start: Start frame of the bake animation range. If greater that end, use current playback settings.
@type start: float
@param end: End frame of the bake animation range. If less that start, use current playback settings.
@type end: float
'''
# ==========
# - Checks -
# ==========
# Start/End
if start > end:
start = mc.playbackOptions(q=True,min=True)
end = mc.playbackOptions(q=True,max=True)
# Namespace
if not exportNS:
raise Exception('An export namespace must be specified')
exportNS = exportNS+':'
if not mc.namespace(exists=':'+exportNS):
raise Exception('Export namespace "'+exportNS+'" does not exist!')
# ======================
# - Bake Export Joints -
# ======================
jointList = mc.ls(exportNS+'*',type='joint')
bakeList = [i for i in jointList if mc.ls(mc.listConnections(i,s=True,d=False),type='constraint')]
mc.bakeResults(bakeList,t=(start,end),at=['tx','ty','tz','rx','ry','rz','sx','sy','sz'],simulation=True)
# ======================
# - Delete Constraints -
# ======================
constraintList = mc.ls(exportNS+'*',type='constraint')
if not constraintList:
raise Exception('Unbale to determine export skeleton constraint list!')
mc.delete(constraintList)
# =================
# - Return Result -
# =================
return bakeList
def fbxExportSkeleton(exportNS,exportPath,force=False):
'''
Export an animated skeleton as FBX (v2009) to the specified file path.
@param exportNS: Namespace of the skeleton to be exported.
@type exportNS: str
@param exportPath: The destination file path.
@type exportPath: str
@param force: Force save if file already exists. (Overwrite).
@type force: bool
'''
# ==========
# - Checks -
# ==========
# Namespace
if not exportNS:
raise Exception('An export namespace must be specified')
exportNS = exportNS+':'
if not mc.namespace(exists=':'+exportNS):
raise Exception('Export namespace "'+exportNS+'" does not exist!')
# Check Directory Path
exportDir = os.path.dirname(exportPath)
if not os.path.isdir(exportDir):
if force:
os.makedirs(exportDir)
else:
raise Exception('Export directory "'+exportDir+'" does not exist! Use "force=True" to automatically create the required directory structure.')
# Check File Extension
if not os.path.splitext(exportPath)[-1].lower() == '.fbx':
exportPath += '.fbx'
# Check File Path
if os.path.isfile(exportPath) and not force:
raise Exception('File "'+exportPath+'" already exista! Use "force=True" to overwrite the existing file.')
# ==========
# - Export -
# ==========
# Define export selection
exportList = mc.ls(exportNS+'*',type='joint')
if not exportList:
raise Exception('Export namespace "'+exportNS+'" is empty!')
mc.select(exportList)
# Set FBX export options
mm.eval('FBXExportFileVersion "FBX200900"')
mm.eval('FBXExportInAscii -v 1')
mm.eval('FBXExportSkins -v 1')
mm.eval('FBXExportDxfDeformation -v 1')
mm.eval('FBXExportSmoothMesh -v 1')
mm.eval('FBXExportCameras -v 0')
mm.eval('FBXExportLights -v 0')
mm.eval('FBXExportAnimationOnly -v 0')
# Set Scale Conversion
#mm.eval('FBXExportConvertUnitString "m"')
# Export Selected Nodes to FBX file
mm.eval('FBXExport -f "'+exportPath+'" -s;')
# Clear Selection
mc.select(cl=True)
# =================
# - Return Result -
# =================
return exportPath
| [
"[email protected]"
] | |
0a5b64d704824ce71bb59cd4c8f347f2cc8e081b | 0001e8b4a35ea0530cb0f6f803b896da96d06ce3 | /sommashampoosales.py | be0a16dd95394ca00a7b52f08f417ddef8f97b50 | [] | no_license | elisagiacomini/programming.lab | 7374340ed875b17702566f2e760b184a4abbb70d | 03cef1f1befa3aeb0ccc74c89f5663bdc3e50712 | refs/heads/main | 2023-01-27T17:46:24.651148 | 2020-12-01T16:03:04 | 2020-12-01T16:03:04 | 311,688,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | # Inizializzo una lista vuota per salvare i valori
values = []
# Apro e leggo il file, linea per linea
my_file = open('shampoo_sales.csv', 'r')
for line in my_file:
# Faccio lo split di ogni riga sulla virgola
elements = line.split(',')
# Se NON sto processando l’intestazione...
if elements[0] != 'Date':
# Setto la data e il valore
date = elements[0]
value = elements[1]
# Aggiungo alla lista dei valori questo valore
values.append(float(value))
print(values)
somma = sum(values)
print('La somma dei valori della lista è: {}'.format(somma))
| [
"[email protected]"
] | |
9c633d0178aaab7a6564f7243bc00f052ac8dad1 | 55628a9a08a6b6646b4a8aa74bedbf2e3fd7d850 | /.history/master_20200121133148.py | 4d804da697adee5d30b336771852358949557519 | [] | no_license | StRobertCHSCS/final-project-team | c115dc11b318f7ac782c94860a8801bb558bd107 | 48907e72813c4dd3b48ff36f794f6fce04533219 | refs/heads/master | 2020-12-03T22:35:37.833893 | 2020-01-31T04:05:38 | 2020-01-31T04:05:38 | 231,506,873 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,791 | py | '''
-**make snake longer when eaten
- FIGURE OUT HOW TO KNOW WHERE TO ADD THE NEXT BLOCK (MOVE LAST LOCATION TO BACK)
DONEEE
-fix player_location lists, so that the list only has the location of the current snake location, not infinite list (done)
- fix apple so disappers when you go over it (done)
- add score
'''
import arcade
import random
# Starting screen
buttons = []
button_text = ["Noob: 0.5 speed", "Normal speed: 1", "Hard: 1.5 speed", "Expert: 2.5 speed"]
for i in range (2, 10, 2):
my_button = [i*100, 200, 150, 50, button_text[(i // 2) - 1]] # x, y, width, height
buttons.append(my_button)
show_text = False
# Set how many rows and columns we will have
ROW_COUNT = 29
COLUMN_COUNT = 51
# This sets the WIDTH and HEIGHT of each grid location
WIDTH = 20
HEIGHT = 20
# This sets the margin between each cell
# and on the edges of the screen.
MARGIN = 5
# Do the math to figure out our screen dimensions
SCREEN_WIDTH = (WIDTH + MARGIN) * COLUMN_COUNT + MARGIN
SCREEN_HEIGHT = (HEIGHT + MARGIN) * ROW_COUNT + MARGIN
# Direction the snake is moving in
up = False
down = False
left = False
right = False
# Use snakes position shown on grid, not the python coordinates
player_x_column = 5
player_y_row = 5
# Length of the snake body
body = 1
# Current snake location
snake_pos = []
# Determine where the starting apple will be drawn in
apple_x = random.randint(0, COLUMN_COUNT)
apple_y = random.randint(0, ROW_COUNT)
# Boolean to see if apple needs to be moved
apple_display = True
# Background grid
grid_texture = arcade.load_texture("29x51_grid.jpg")
score = 0
page = 0
SPEED = 0
def on_update(delta_time):
snake_move()
def on_draw():
arcade.start_render()
if page == 0:
start_screen()
elif page == 1:
main_game()
print(page)
def main_game():
grid_background()
snake()
apple()
def start_screen():
global buttons
arcade.draw_text("Welcome to snake \n choose your level", 2*(SCREEN_WIDTH//5), 3*(SCREEN_HEIGHT//4), arcade.color.WHITE, 25, font_name='calibri')
for i in range (0, 4):
arcade.draw_xywh_rectangle_filled(buttons[i][0],
buttons[i][1],
buttons[i][2],
buttons[i][3],
arcade.color.WHITE)
arcade.draw_text(buttons[i][4], buttons[i][0] + (buttons[i][2] // 2), buttons[i][1] + (buttons[i][3] // 2),
arcade.color.BLACK, 15, font_name='calibri', anchor_x="center", anchor_y="center")
if show_text:
print("click")
arcade.draw_text("the button was clicked", 500, 600, arcade.color.RED, 12)
def grid_background():
arcade.draw_texture_rectangle(SCREEN_WIDTH//2, SCREEN_HEIGHT//2, grid_texture.width, grid_texture.height, grid_texture, 0)
def snake_move():
global player_x, player_y, player_x_column, player_y_row
global snake_pos
if (0 <= player_x_column < COLUMN_COUNT) and (0 <= player_y_row < ROW_COUNT):
if up:
player_y_row += 1
elif down:
player_y_row -= 1
elif right:
player_x_column += 1
elif left:
player_x_column -= 1
else:
restart()
suicide_check = []
for position in snake_pos:
if position not in suicide_check:
suicide_check.append(position)
else:
restart()
# Player coordinates
player_x = (MARGIN + WIDTH) * player_x_column + MARGIN + WIDTH // 2
player_y = (MARGIN + HEIGHT) * player_y_row + MARGIN + HEIGHT // 2
def restart():
global player_x_column, player_y_row, snake_len, body, snake_pos
global up, down, left, right
player_x_column = 5
player_y_row = 5
snake_len = []
body = 1
snake_pos = []
up = False
down = False
left = False
right = False
print ("You died")
def snake():
global player_x_column, player_y_row, snake_len, body
global apple_x, apple_y
arcade.draw_rectangle_filled(player_x , player_y, WIDTH, HEIGHT, arcade.color.BLUE)
snake_len = [[player_x_column, player_y_row]]
snake_pos.append([player_x_column, player_y_row])
if body < len(snake_pos):
snake_pos.pop(0)
if (body > 1):
for num in range (1, body):
snake_len.append([snake_pos[num - 1][0], snake_pos[num - 1][1]])
for i in range (body):
arcade.draw_rectangle_filled(
(MARGIN + WIDTH) * snake_len[i][0] + MARGIN + WIDTH // 2,
(MARGIN + HEIGHT) * snake_len[i][1] + MARGIN + HEIGHT // 2 ,
WIDTH, HEIGHT, arcade.color.BLUE)
def apple():
global apple_x, apple_y, apple_x_coordinate, apple_y_coordinate, body, snake_len
global score
global SPEED
apple_x_coordinate = (MARGIN + WIDTH) * apple_x + MARGIN + WIDTH // 2
apple_y_coordinate = (MARGIN + HEIGHT) * apple_y + MARGIN + HEIGHT // 2
if (player_x_column == apple_x) and (player_y_row == apple_y):
apple_display = False
body += 1
print ("hit")
else:
apple_display = True
if apple_display is True:
arcade.draw_rectangle_filled(apple_x_coordinate, apple_y_coordinate, WIDTH, HEIGHT, arcade.color.RED)
elif apple_display is False:
apple_x = random.randint(0, COLUMN_COUNT)
apple_y = random.randint(0, ROW_COUNT)
# Make sure that apple doesn't spawn where the snake is
for apple in range (len(snake_pos)):
if apple_x == snake_pos[apple][0] or apple_y == snake_pos[apple][1]:
apple_x = random.randint(0, COLUMN_COUNT)
apple_y = random.randint(0, ROW_COUNT)
apple_x_coordinate = (MARGIN + WIDTH) * apple_x + MARGIN + WIDTH // 2
apple_y_coordinate = (MARGIN + HEIGHT) * apple_y + MARGIN + HEIGHT // 2
apple_display == True
score += 1
def high_score(score):
arcade.draw_text("Score: " + score, 9 * (SCREEN_WIDTH//10), 3*(SCREEN_HEIGHT//4), arcade.color.WHITE, 10, font_name= "comic sans")
def on_key_press(key, modifiers):
global up, down, left, right
if (key == arcade.key.W) and (down == False):
up = True
down = False
right = False
left = False
elif (key == arcade.key.S) and (up == False):
down = True
up = False
right = False
left = False
elif (key == arcade.key.A) and (right == False):
left = True
up = False
down = False
right = False
elif (key == arcade.key.D) and (left == False):
right = True
up = False
down = False
left = False
def on_key_release(key, modifiers):
pass
def on_mouse_press(x, y, button, modifiers):
global show_text, my_button, page
while page == 0:
# For starting screen, check which button has been clicked
if (x > buttons[0][0] and x < buttons[0][0] + buttons[0][2] and
y > buttons[0][1] and y < buttons[0][1] + buttons[0][3]):
show_text = True
page += 1
SPEED = 0.5
print("noob")
elif (x > buttons[1][0] and x < buttons[1][0] + buttons[1][2] and
y > buttons[1][1] and y < buttons[1][1] + buttons[1][3]):
show_text = True
page += 1
SPEED = 1
print("normal")
elif (x > buttons[2][0] and x < buttons[2][0] + buttons[2][2] and
y > buttons[2][1] and y < buttons[2][1] + buttons[2][3]):
show_text = True
page += 1
SPEED = 2
print("hard")
elif (x > buttons[3][0] and x < buttons[3][0] + buttons[3][2] and
y > buttons[3][1] and y < buttons[3][1] + buttons[3][3]):
show_text = True
page += 1
SPEED = 2.5
print("expert")
else:
show_text = False
def setup():
global grid
# SPEED = float(input("What fast do you want? \n Noob: Type 0.5 \n Normal: Type 1 \n Hard: Type 1.5 - 2 \n Expert: Type 2.5 or more \n *Changes the refresh rate* \n"))
arcade.open_window(SCREEN_WIDTH, SCREEN_HEIGHT, "snake")
arcade.set_background_color(arcade.color.BLACK)
arcade.schedule(on_update, 1/(10 + SPEED))
# Override arcade window methods
window = arcade.get_window()
window.on_draw = on_draw
window.on_key_press = on_key_press
window.on_key_release = on_key_release
window.on_mouse_press = on_mouse_press
arcade.run()
if __name__ == '__main__':
setup()
| [
"[email protected]"
] | |
4e9a8679f6d3e1a43540bb78022b35d6cb07679d | a74b980fd95d5d810315f181449fc9d1710e6923 | /savecode/threeyears/idownclient/scan/plugin/zgrab2/zgrab2scanner/zgrab2scannerpop3.py | b822e7e9071e3ecd8759508da388a167c5059ce4 | [
"Apache-2.0"
] | permissive | cbbbbbbbb/sspywork | b70f5539203b47b21eec2f0514ddca155affc2b8 | 8f05a6b91fc205960edd57f9076facec04f49a1a | refs/heads/master | 2023-03-22T19:45:13.024076 | 2021-03-08T01:24:21 | 2021-03-08T01:24:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,680 | py | """
zgrab2去扫pop3
create by judy 2019/11/19
"""
import os
import signal
import traceback
import uuid
from datacontract.iscandataset.iscantask import IscanTask
from .zgrab2scannerbase import Zgrab2ScannerBase
from ..zgrab2parser import Zgrab2ParserPop3
class Zgrab2ScannerPop3(Zgrab2ScannerBase):
"""zgrab2 http scanner"""
def __init__(self, zgrab_path: str):
Zgrab2ScannerBase.__init__(self, "zgrab2pop3")
self._parser: Zgrab2ParserPop3 = Zgrab2ParserPop3()
def get_banner_pop3(
self,
task: IscanTask,
level,
pinfo_dict,
port,
*args,
zgrab2path: str = "zgrab2",
sudo: bool = False,
timeout: float = 600,
) -> iter:
"""scan http services and get the banner"""
try:
if not isinstance(port, int) or port < 0 or port > 65535:
raise Exception("Invalid port: {}".format(port))
hosts: iter = pinfo_dict.keys()
hostfi = self._write_hosts_to_file(task, hosts)
if hostfi is None:
return
outfi = self._scan_pop3(
task,
level,
hostfi,
port,
*args,
zgrab2path=zgrab2path,
sudo=sudo,
timeout=timeout,
)
if outfi is None or not os.path.isfile(outfi):
return
# should there be modified?
self._parser.parse_banner_pop3(task, level, pinfo_dict, outfi)
except Exception:
self._logger.error("Scan pop3 error: {}".format(traceback.format_exc()))
def _scan_pop3(
self,
task: IscanTask,
level,
host_file: str,
port: int,
*args,
zgrab2path: str = "zgrab2",
sudo: bool = False,
timeout: float = 600,
) -> str:
"""scan the ips or domains, and write the output files to specified output directory.
host_file: the full path of a file with list of ['1.1.1.1','www.xxx.com'] in the file per line
port: '80' or '443'
outfi: result file path
"""
outfi: str = None
try:
enhanced_args = []
# add hosts and ports to args
enhanced_args.append("pop3")
enhanced_args.append(f"-p {port}")
# zgrab2 pop3 -p 110 -n pop3 -t 10 -o ./mt1.json -f ./mtip.txt
enhanced_args.append("-n pop3")
enhanced_args.append(f"-t {timeout}")
if "--debug" not in enhanced_args:
enhanced_args.append("--debug")
if "--send-help" not in enhanced_args:
enhanced_args.append("--send-help")
if "--send-noop" not in enhanced_args:
enhanced_args.append("--send-noop")
if "--send-quit" not in enhanced_args:
enhanced_args.append("--send-quit")
if port == 110 and "--starttls" not in enhanced_args:
enhanced_args.append("--starttls")
elif port == 995 and "--pop3s" not in enhanced_args:
enhanced_args.append("--pop3s")
# 这个args里面几乎没有东西,除非真的是外面有特殊说明这个才有值,所以还是先留在这里
enhanced_args.extend(args)
if "--input-file=" not in args or "-f" not in args:
enhanced_args.append(f"-f {host_file}") # input file
# outfi = os.path.join(self._tmpdir, "{}_{}.pop3".format(task.taskid, port))
with self._outfile_locker:
outfi = os.path.join(
self._tmpdir, "{}_{}.pop3".format(str(uuid.uuid1()), port)
)
while os.path.isfile(outfi):
outfi = os.path.join(
self._tmpdir, "{}_{}.pop3".format(str(uuid.uuid1()), port)
)
if "--output-file=" not in args or "-o" not in args:
# here must use -o, use '--output-file' will cause exception 'No such file or directory'
# this may be a bug
# 人家没说可以用--output-file
enhanced_args.append(f"-o {outfi}") # output file
# 如果没有当前文件夹那么就创建当前文件夹
outdir = os.path.dirname(outfi)
if not os.path.exists(outdir) or not os.path.isdir(outdir):
os.makedirs(outdir)
curr_process = None
try:
curr_process = self._run_process(
zgrab2path, *enhanced_args, rootDir=outdir, sudo=sudo
)
stdout, stderr = curr_process.communicate(timeout=timeout)
exitcode = curr_process.wait(timeout=10)
if stdout is not None:
self._logger.trace(stdout)
if stderr is not None:
self._logger.trace(stderr)
if exitcode != 0:
raise Exception(f"Scan pop3 error: {stdout}\n{stderr}")
self._logger.info(
f"Scan pop3 exitcode={str(exitcode)}\ntaskid:{task.taskid}\nbatchid:{task.batchid}\nport:{port}"
)
finally:
if curr_process is not None:
curr_process.kill()
except Exception:
if outfi is not None and os.path.isfile(outfi):
os.remove(outfi)
outfi = None
self._logger.info(
f"Scan pop3 error\ntaskid:{task.taskid}\nbatchid:{task.batchid}\nport:{port}"
)
return outfi
| [
"[email protected]"
] | |
481788fbe8f4d3aa3fde5561c1b9c96254847246 | 32abbd752bb333b45872abeb1295596b529c60a9 | /modules/pdb2pqr/src/na.py | ee17076a9cafb73079b73faa4b859e4c55ed584c | [] | no_license | tevang/Pymol-script-repo | a5adb80856b0e0d1dd8d60c9aa1053200524e19b | a7a94dec980c717131d0605b487d6df77bfeff38 | refs/heads/master | 2023-05-14T15:37:07.934823 | 2023-04-27T13:20:27 | 2023-04-27T13:20:27 | 179,960,720 | 2 | 0 | null | 2023-04-28T04:09:46 | 2019-04-07T12:07:22 | Python | UTF-8 | Python | false | false | 12,412 | py | """
Nucleic Acid Structures for PDB2PQR
This module contains the base nucleic acid structures for
pdb2pqr.
----------------------------
PDB2PQR -- An automated pipeline for the setup, execution, and analysis of
Poisson-Boltzmann electrostatics calculations
Copyright (c) 2002-2011, Jens Erik Nielsen, University College Dublin;
Nathan A. Baker, Battelle Memorial Institute, Developed at the Pacific
Northwest National Laboratory, operated by Battelle Memorial Institute,
Pacific Northwest Division for the U.S. Department Energy.;
Paul Czodrowski & Gerhard Klebe, University of Marburg.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of University College Dublin, Battelle Memorial Institute,
Pacific Northwest National Laboratory, US Department of Energy, or University
of Marburg nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------
"""
__date__ = "28 February 2006"
__author__ = "Todd Dolinsky"
import string
from structures import *
class Nucleic(Residue):
"""
Nucleic class
This class provides standard features of the nucleic acids listed
below
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
ref: The reference object for the amino acid. Used to
convert from the alternate naming scheme to the
main naming scheme.
"""
def __init__(self, atoms, ref):
sampleAtom = atoms[-1]
self.atoms = []
self.name = sampleAtom.resName
self.chainID = sampleAtom.chainID
self.resSeq = sampleAtom.resSeq
self.iCode = sampleAtom.iCode
self.ffname = self.name
self.map = {}
self.dihedrals = []
self.patches = []
self.is3term = 0
self.is5term = 0
self.isCterm = 0
self.isNterm = 0
self.missing = []
self.reference = ref
# Create each atom
for a in atoms:
if a.name in ref.altnames: # Rename atoms
a.name = ref.altnames[a.name]
if a.name not in self.map:
atom = Atom(a, "ATOM", self)
self.addAtom(atom)
def createAtom(self, atomname, newcoords):
"""
Create an atom. Overrides the generic residue's createAtom().
Parameters
atomname: The name of the atom to add (string)
newcoords: The coordinates of the atom (list)
"""
oldatom = self.atoms[0]
newatom = Atom(oldatom, "ATOM", self)
newatom.set("x",newcoords[0])
newatom.set("y",newcoords[1])
newatom.set("z",newcoords[2])
newatom.set("name", atomname)
newatom.set("occupancy",1.00)
newatom.set("tempFactor",0.00)
newatom.added = 1
self.addAtom(newatom)
def addAtom(self, atom):
"""
Override the existing addAtom - include the link to the
reference object
"""
self.atoms.append(atom)
atomname = atom.get("name")
self.map[atomname] = atom
try:
atom.reference = self.reference.map[atomname]
for bond in atom.reference.bonds:
if self.hasAtom(bond):
bondatom = self.map[bond]
if bondatom not in atom.bonds: atom.bonds.append(bondatom)
if atom not in bondatom.bonds: bondatom.bonds.append(atom)
except KeyError:
atom.reference = None
def addDihedralAngle(self, value):
"""
Add the value to the list of chiangles
Parameters
value: The value to be added (float)
"""
self.dihedrals.append(value)
def setState(self):
"""
Adds the termini for all inherited objects
"""
if self.is5term: self.ffname = self.ffname + "5"
if self.is3term: self.ffname = self.ffname + "3"
class A(Nucleic):
"""
Adenosine class
This class gives data about the Adenosine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA.
"""
if self.hasAtom("O2'"): self.ffname = "RA"
else: self.ffname = "DA"
Nucleic.setState(self)
class C(Nucleic):
"""
Cytidine class
This class gives data about the Cytidine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA.
"""
if self.hasAtom("O2'"): self.ffname = "RC"
else: self.ffname = "DC"
Nucleic.setState(self)
class G(Nucleic):
"""
Guanosine class
This class gives data about the Guanosine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA.
"""
if self.hasAtom("O2'"): self.ffname = "RG"
else: self.ffname = "DG"
Nucleic.setState(self)
class T(Nucleic):
"""
Thymine class
This class gives data about the Thymine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA. In this case it is
always DNA.
"""
self.ffname = "DT"
Nucleic.setState(self)
class U(Nucleic):
"""
Uridine class
This class gives data about the Uridine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA. In this case it is
always RNA.
"""
self.ffname = "RU"
Nucleic.setState(self)
class ADE(Nucleic):
"""
Adenosine class
This class gives data about the Adenosine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA.
"""
if self.hasAtom("O2'"): self.ffname = "RA"
else: self.ffname = "DA"
Nucleic.setState(self)
class CYT(Nucleic):
"""
Cytidine class
This class gives data about the Cytidine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA.
"""
if self.hasAtom("O2'"): self.ffname = "RC"
else: self.ffname = "DC"
Nucleic.setState(self)
class GUA(Nucleic):
"""
Guanosine class
This class gives data about the Guanosine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA.
"""
if self.hasAtom("O2'"): self.ffname = "RG"
else: self.ffname = "DG"
Nucleic.setState(self)
class THY(Nucleic):
"""
Thymine class
This class gives data about the Thymine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA. In this case it is
always DNA.
"""
self.ffname = "DT"
Nucleic.setState(self)
class URA(Nucleic):
"""
Uridine class
This class gives data about the Uridine object, and inherits
off the base residue class.
"""
def __init__(self, atoms, ref):
"""
Initialize the class
Parameters
atoms: A list of Atom objects to be stored in this class
(list)
"""
Nucleic.__init__(self, atoms, ref)
self.reference = ref
def setState(self):
"""
Set the state to distinguish RNA from DNA. In this case it is
always RNA.
"""
self.ffname = "RU"
Nucleic.setState(self)
| [
"[email protected]"
] | |
7cb2899c4727fad9262175e7b21678934c7180e0 | 03e3138f99f275d15d41a5c5bfb212f85d64d02e | /source/res/scripts/client/bootcamp/Bootcamp.py | ffa79356e78da65ba8ef200b12a24e38d16c6bc7 | [] | no_license | TrenSeP/WorldOfTanks-Decompiled | e428728e7901146d0b599d02c930d70532232a97 | 1faa748acec1b7e435b657fd054ecba23dd72778 | refs/heads/1.4.1 | 2020-04-27T08:07:49.813023 | 2019-03-05T17:37:06 | 2019-03-05T17:37:06 | 174,159,837 | 1 | 0 | null | 2019-03-06T14:33:33 | 2019-03-06T14:24:36 | Python | UTF-8 | Python | false | false | 23,750 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/bootcamp/Bootcamp.py
import base64
import cPickle
from collections import namedtuple
import BigWorld
import BattleReplay
import TriggersManager
import WWISE
import MusicControllerWWISE as MC
from account_helpers.AccountSettings import CURRENT_VEHICLE, AccountSettings
from account_helpers.settings_core.settings_constants import BATTLE_EVENTS
from account_helpers.settings_core.ServerSettingsManager import SETTINGS_SECTIONS
from account_helpers.settings_core import ISettingsCore
from account_helpers.counter_settings import dropCounters as dropNewSettingsCounters
from adisp import process, async
from debug_utils_bootcamp import LOG_DEBUG_DEV_BOOTCAMP
from PlayerEvents import g_playerEvents
from bootcamp_shared import BOOTCAMP_BATTLE_ACTION
from gui import makeHtmlString
from gui.Scaleform.daapi.view.lobby.referral_program.referral_program_helpers import isReferralProgramEnabled, isCurrentUserRecruit
from gui.app_loader import g_appLoader
from gui.prb_control.dispatcher import g_prbLoader
from gui.Scaleform.Waiting import Waiting
from gui.Scaleform.daapi.view.login.EULADispatcher import EULADispatcher
from gui.Scaleform.framework.entities.EventSystemEntity import EventSystemEntity
from gui.Scaleform.locale.RES_ICONS import RES_ICONS
from gui.prb_control import prbEntityProperty
from gui.shared.items_cache import CACHE_SYNC_REASON
from gui.battle_control.arena_info import player_format
from helpers import dependency, aop, i18n
from skeletons.connection_mgr import IConnectionManager
from skeletons.gui.battle_session import IBattleSessionProvider
from .BootcampGUI import BootcampGUI
from .BootcampReplayController import BootcampReplayController
from .BootcampConstants import BOOTCAMP_BATTLE_RESULT_MESSAGE
from .BootCampEvents import g_bootcampEvents
from .BootcampContext import Chapter
from .BootcampSettings import getBattleSettings
from .BootcampGarageLessons import GarageLessons
from .ReloadLobbyHelper import ReloadLobbyHelper
from .states import STATE
from .states.StateInGarage import StateInGarage
from .states.StateInitial import StateInitial
from .states.StateOutroVideo import StateOutroVideo
from .states.StateResultScreen import StateResultScreen
from .aop.common import weave
from . import GAME_SETTINGS_NEWBIE, GAME_SETTINGS_COMMON
DISABLED_TANK_LEVELS = (1,)
class _BCNameFormatter(player_format.PlayerFullNameFormatter):
@staticmethod
def _normalizePlayerName(name):
msgid = '#bootcamp:{0}'.format(name)
locname = i18n.makeString(msgid)
if locname != msgid:
name = locname
return name
_BattleResults = namedtuple('_BattleResults', ('type', 'typeStr'))
class BOOTCAMP_LESSON(object):
BATTLE = 0
GARAGE = 1
class Bootcamp(EventSystemEntity):
settingsCore = dependency.descriptor(ISettingsCore)
connectionMgr = dependency.descriptor(IConnectionManager)
sessionProvider = dependency.descriptor(IBattleSessionProvider)
BOOTCAMP_SOUND_BANKS = ('bootcamp.pck', 'bootcamp_gui.bnk', 'bootcamp_hangar.bnk', 'bootcamp_hangar_voiceover.bnk', 'bootcamp_voiceover.bnk', 'bootcamp_result_screen.bnk')
def __init__(self):
super(Bootcamp, self).__init__()
self.__currentState = StateInitial()
self.__running = False
self.__account = None
self.__avatar = None
self.__lessonId = 0
self.__isRecruit = False
self.__isBattleLesson = False
self.__context = {}
self.__chapter = None
self.__gui = None
self.__arenaUniqueID = None
self.__lobbyReloader = ReloadLobbyHelper()
self.__battleResults = None
self.__hangarSpace = None
self.__hangarSpacePremium = None
self.__bonuses = None
self.__isIntroVideoPlayed = False
self.__requestBootcampFinishFromBattle = False
self.__transitionFlash = None
self.__isSniperModeUsed = False
self.__showingWaitingActionWindow = False
self.__nation = 0
self.__nationsData = {}
self.__checkpoint = ''
self.__replayController = None
self.__minimapSize = 0.0
self.__garageLessons = GarageLessons()
self.__finalVideoCallback = None
self.__p = {'manualStart': False,
'finished': False}
self.__weaver = aop.Weaver()
return
@async
def nextFrame(self, callback):
BigWorld.callback(0.0, lambda : callback(True))
def replayCallbackSubscribe(self):
BattleReplay.g_replayCtrl.setDataCallback('bootcamp_setContext', self.replaySetContext)
def serializeContext(self):
return base64.b64encode(cPickle.dumps(self.__context, -1))
def deserializeContext(self, contextBin):
return cPickle.loads(base64.b64decode(contextBin))
@property
def requestBootcampFinishFromBattle(self):
return self.__requestBootcampFinishFromBattle
@requestBootcampFinishFromBattle.setter
def requestBootcampFinishFromBattle(self, value):
self.__requestBootcampFinishFromBattle = value
def replaySetContext(self, contextBin):
self.setContext(self.deserializeContext(contextBin))
def setContext(self, context):
self.__context = context
self.__checkpoint = ''
if context is None:
return
else:
if 'checkpoint' in self.__context:
self.__checkpoint = self.__context['checkpoint']
return
def __cm_onDisconnected(self):
self.__lobbyReloader.cancel()
self.stop(0)
def setAccount(self, account):
self.__account = account
@property
def account(self):
return self.__account
def getContext(self):
return self.__context
def getContextIntParameter(self, parameter, default=0):
return self.__context.get(parameter, default)
def getLessonNum(self):
return self.__lessonId
def getLessonType(self):
return self.__lessonType
def getCheckpoint(self):
return self.__checkpoint
def saveCheckpoint(self, checkpoint):
self.__checkpoint = checkpoint
if self.__account is not None:
self.__account.base.saveBootcampCheckpoint(self.__checkpoint, self.__lessonId)
return
def setBattleResults(self, arenaUniqueID, resultType, resultReason):
self.__arenaUniqueID = arenaUniqueID
from gui.battle_results.settings import PLAYER_TEAM_RESULT
if not resultType:
teamResult = PLAYER_TEAM_RESULT.DRAW
elif resultType == BOOTCAMP_BATTLE_RESULT_MESSAGE.VICTORY:
teamResult = PLAYER_TEAM_RESULT.WIN
else:
teamResult = PLAYER_TEAM_RESULT.DEFEAT
self.__battleResults = _BattleResults(resultType, makeHtmlString('html_templates:bootcamp/battle_results', teamResult))
def getBattleResults(self):
return self.__battleResults
def getBattleResultsExtra(self, lessonId):
return self.__garageLessons.getBattleResult(lessonId)
def isManualStart(self):
return self.__p['manualStart']
def isIntroVideoPlayed(self):
return self.__isIntroVideoPlayed
def setIntroVideoPlayed(self):
self.__isIntroVideoPlayed = True
def resetSniperModeUsed(self):
self.__isSniperModeUsed = False
def isSniperModeUsed(self):
return self.__isSniperModeUsed
def setSniperModeUsed(self, value):
self.__isSniperModeUsed = value
@process
def start(self, lessonNum, isBattleLesson):
LOG_DEBUG_DEV_BOOTCAMP('Starting bootcamp', lessonNum, isBattleLesson)
from gui.shared.personality import ServicesLocator
if BattleReplay.g_replayCtrl.isPlaying:
self.__replayController = BootcampReplayController()
self.__replayController.init()
g_bootcampEvents.onBattleLessonFinished += self.onBattleLessonFinished
g_bootcampEvents.onGarageLessonFinished += self.onGarageLessonFinished
g_bootcampEvents.onBattleLoaded += self.onBattleLoaded
g_bootcampEvents.onResultScreenFinished += self.onResultScreenFinished
g_bootcampEvents.onRequestBootcampFinish += self.onRequestBootcampFinish
g_bootcampEvents.onOutroVideoStop += self.onOutroVideoStop
g_bootcampEvents.onBootcampBecomeNonPlayer += self.onBootcampBecomeNonPlayer
g_playerEvents.onAvatarBecomeNonPlayer += self.__onAvatarBecomeNonPlayer
g_playerEvents.onArenaCreated += self.__onArenaCreated
self.connectionMgr.onDisconnected += self.__cm_onDisconnected
self.__requestBootcampFinishFromBattle = False
ctx = self.getContext()
isRelogin = ctx['relogin']
LOG_DEBUG_DEV_BOOTCAMP('IsRelogin', isRelogin)
autoStartBattle = isRelogin or BattleReplay.isPlaying()
if not autoStartBattle:
self.showActionWaitWindow()
yield self.settingsCore.serverSettings.settingsCache.update()
self.settingsCore.serverSettings.applySettings()
isNewbie = False
if ctx['isNewbieSettings'] and not ctx['completed'] and ctx['runCount'] == 1:
isNewbie = True
self.__setupPreferences(isNewbie)
self.hideActionWaitWindow()
eula = EULADispatcher()
yield eula.processLicense()
eula.fini()
self.__running = True
self.__lessonId = lessonNum
self.__lessonType = BOOTCAMP_LESSON.BATTLE if isBattleLesson else BOOTCAMP_LESSON.GARAGE
if (lessonNum == 0 or not isBattleLesson) and not autoStartBattle:
self.showActionWaitWindow()
yield ServicesLocator.itemsCache.update(CACHE_SYNC_REASON.SHOW_GUI)
self.__isRecruit = isCurrentUserRecruit()
self.hideActionWaitWindow()
if self.__currentState is not None:
self.__currentState.deactivate()
self.__hangarSpace = ctx['hangarSpace']
self.__hangarSpacePremium = ctx['hangarSpacePremium']
self.__bonuses = ctx['bonuses']
showRewards = ctx['needAwarding']
previousLesson = self.getContextIntParameter('lastLessonNum') - 1
if previousLesson >= 0 and previousLesson < len(self.__bonuses['battle']):
self.__bonuses['battle'][previousLesson]['showRewards'] = showRewards
self.__nation = ctx['nation']
self.__nationsData = ctx['nationsData']
self.__p['completed'] = ctx['completed']
self.__p['needAwarding'] = ctx['needAwarding']
weave(self.__weaver)
if AccountSettings.isCleanPC():
dropNewSettingsCounters()
g_bootcampEvents.onBootcampStarted()
if not autoStartBattle:
if isBattleLesson:
g_prbLoader.createBattleDispatcher()
g_prbLoader.setEnabled(True)
self.enqueueBattleLesson()
else:
self.showActionWaitWindow()
yield self.nextFrame()
self.__currentState = StateInGarage()
self.__lobbyReloader.reload()
self.hideActionWaitWindow()
self.__currentState.activate()
else:
from states.StateBattlePreparing import StateBattlePreparing
self.__currentState = StateBattlePreparing(self.__lessonId, BigWorld.player())
self.__currentState.activate()
BigWorld.overloadBorders(True)
if self.__chapter is None:
self.__chapter = Chapter()
if self.__gui is None:
self.__gui = BootcampGUI()
WWISE.loadSoundPool(self.BOOTCAMP_SOUND_BANKS, 'Bootcamp')
self.sessionProvider.getCtx().setPlayerFullNameFormatter(_BCNameFormatter())
return
def stop(self, reason):
g_bootcampEvents.onBootcampFinished()
self.__weaver.clear()
BigWorld.overloadBorders(False)
if self.__gui is not None:
self.__gui.clear()
self.__gui = None
if self.__chapter is not None:
self.__chapter.clear()
self.__chapter = None
g_bootcampEvents.onBattleLessonFinished -= self.onBattleLessonFinished
g_bootcampEvents.onGarageLessonFinished -= self.onGarageLessonFinished
g_bootcampEvents.onBattleLoaded -= self.onBattleLoaded
g_bootcampEvents.onResultScreenFinished -= self.onResultScreenFinished
g_bootcampEvents.onRequestBootcampFinish -= self.onRequestBootcampFinish
g_bootcampEvents.onOutroVideoStop -= self.onOutroVideoStop
g_playerEvents.onAvatarBecomeNonPlayer -= self.__onAvatarBecomeNonPlayer
g_playerEvents.onArenaCreated -= self.__onArenaCreated
self.connectionMgr.onDisconnected -= self.__cm_onDisconnected
if self.__running:
self.__running = False
if self.__currentState:
self.__currentState.deactivate()
self.__currentState = StateInitial()
self.__account = None
self.__context = {}
self.__isIntroVideoPlayed = False
if self.__replayController is not None:
self.__replayController.fini()
self.__replayController = None
MC.g_musicController.stopAmbient(True)
WWISE.unloadSoundPool()
self.sessionProvider.getCtx().resetPlayerFullNameFormatter()
return
def isRunning(self):
return self.__running
def isFinished(self):
return self.__p['completed']
def handleKeyEvent(self, event):
if self.isRunning() and self.__currentState is not None:
self.__currentState.handleKeyEvent(event)
return
@property
def replayCtrl(self):
return self.__replayController
@prbEntityProperty
def prbEntity(self):
pass
def showActionWaitWindow(self):
if not self.__showingWaitingActionWindow:
self.__showingWaitingActionWindow = True
Waiting.show('sinhronize')
def hideActionWaitWindow(self):
if self.__showingWaitingActionWindow:
self.__showingWaitingActionWindow = False
Waiting.hide('sinhronize')
def onBootcampBecomeNonPlayer(self):
self.hideActionWaitWindow()
def onBattleLoaded(self, lessonId):
from states.StateInBattle import StateInBattle
LOG_DEBUG_DEV_BOOTCAMP('onBattleLoaded called')
self.__currentState.deactivate()
if self.__currentState.skipBootcamp:
self.requestBootcampFinishFromBattle = True
self.sessionProvider.exit()
else:
self.__avatar = BigWorld.player()
self.__currentState = StateInBattle(lessonId, self.__avatar, self.__chapter, self.__gui)
self.__currentState.activate()
def onBattleLessonFinished(self, lessonId, lessonResults):
self.__lessonId = lessonId
self.__currentState.deactivate()
if self.requestBootcampFinishFromBattle:
self.onRequestBootcampFinish()
return
self.__currentState = StateResultScreen(lessonResults)
self.__currentState.activate()
def __onAvatarBecomeNonPlayer(self):
if self.__currentState is not None:
self.__currentState.onAvatarBecomeNonPlayer()
return
def __onArenaCreated(self):
from states.StateBattlePreparing import StateBattlePreparing
if self.__currentState:
self.__currentState.deactivate()
self.__currentState = StateBattlePreparing(self.__lessonId, self.__account)
self.__currentState.activate()
def onBattleAction(self, actionId, actionArgs):
if actionId == BOOTCAMP_BATTLE_ACTION.PLAYER_OBSERVED_ACTION:
if actionArgs[0] == 1:
TriggersManager.g_manager.activateTrigger(TriggersManager.TRIGGER_TYPE.PLAYER_VEHICLE_OBSERVED)
else:
TriggersManager.g_manager.deactivateTrigger(TriggersManager.TRIGGER_TYPE.PLAYER_VEHICLE_OBSERVED)
else:
g_bootcampEvents.onBattleAction(actionId, actionArgs)
self.__currentState.onBattleAction(actionId, actionArgs)
def isInBattleResultState(self):
return isinstance(self.__currentState, StateResultScreen)
def onResultScreenFinished(self):
self.__currentState.deactivate()
isVictory = self.__battleResults.type == BOOTCAMP_BATTLE_RESULT_MESSAGE.VICTORY
if self.__lessonId == 0:
if not isVictory:
g_prbLoader.createBattleDispatcher()
g_prbLoader.setEnabled(True)
g_appLoader.showLobby()
self.__currentState = StateInitial()
self.enqueueBattleLesson()
else:
self.__currentState = StateInGarage()
else:
self.__currentState = StateInGarage()
self.__currentState.activate()
def enqueueBattleLesson(self):
if self.prbEntity is not None:
self.prbEntity.doAction()
return
def showFinalVideo(self, callback):
LOG_DEBUG_DEV_BOOTCAMP('showFinalVideo')
MC.g_musicController.muteMusic(True)
self.__finalVideoCallback = callback
self.__currentState.deactivate()
self.__currentState = StateOutroVideo()
self.__currentState.activate()
def onGarageLessonFinished(self, lessonId):
LOG_DEBUG_DEV_BOOTCAMP('onGarageLessonFinished', lessonId)
self.__account.base.completeBootcampLesson(0)
lastLesson = self.getContextIntParameter('lastLessonNum')
if self.__lessonId == lastLesson:
LOG_DEBUG_DEV_BOOTCAMP('Finished last lesson', lessonId)
else:
self.enqueueBattleLesson()
def onRequestBootcampFinish(self):
LOG_DEBUG_DEV_BOOTCAMP('onRequestBootcampFinish')
self.__account.base.requestBootcampQuit(AccountSettings.getFavorites(CURRENT_VEHICLE))
def finishBootcamp(self):
LOG_DEBUG_DEV_BOOTCAMP('finishBootcamp', self.__currentState.id())
self.finishFromGarageLesson()
self.setContext({})
self.setAccount(None)
return
def finishFromGarageLesson(self):
self.__running = False
self.setDefaultHangarSpace()
self.stop(0)
self.__lobbyReloader.reload()
def onOutroVideoStop(self):
if self.__finalVideoCallback is not None:
self.__finalVideoCallback()
self.__finalVideoCallback = None
return
def getBattleSettings(self):
settings = getBattleSettings(self.__lessonId)
return (settings.visiblePanels, settings.hiddenPanels)
def getBattleRibbonsSettings(self):
return getBattleSettings(self.__lessonId).ribbons
def getBattleLoadingPages(self):
return getBattleSettings(self.__lessonId).lessonPages
def getIntroPageData(self, isChoice=False):
parameters = self.getParameters()
autoStart = parameters.get('introAutoStart', False)
if BattleReplay.isPlaying():
autoStart = True
introPageData = {'backgroundImage': RES_ICONS.MAPS_ICONS_BOOTCAMP_LOADING_INTROLOADING,
'video': '',
'autoStart': autoStart,
'lessonNumber': self.__lessonId,
'tutorialPages': self.getBattleLoadingPages(),
'showSkipOption': True,
'isReferralEnabled': self.isReferralEnabled(),
'isChoice': isChoice}
return introPageData
def getIntroVideoData(self):
if self.__currentState:
introVideoPageData = self.getIntroPageData()
introVideoPageData.update(self.__currentState.getIntroVideoData())
return introVideoPageData
return {}
def getUI(self):
return self.__gui
def setHangarSpace(self, hangarSpace, hangarSpacePremium):
from gui.ClientHangarSpace import g_clientHangarSpaceOverride
g_clientHangarSpaceOverride.setPath(hangarSpacePremium, -1, True, False)
g_clientHangarSpaceOverride.setPath(hangarSpace, -1, False, False)
def setBootcampHangarSpace(self):
BigWorld.updateTerrainBorders((-127, -237, -37, -157))
self.setHangarSpace(self.__hangarSpace, self.__hangarSpacePremium)
def setDefaultHangarSpace(self):
self.setHangarSpace(None, None)
return
def changeNation(self, nationIndex):
self.__nation = nationIndex
Waiting.show('sinhronize')
self.__account.base.changeBootcampLessonBonus(nationIndex)
g_playerEvents.onClientUpdated += self.onNationChanged
def onNationChanged(self, _, __):
g_playerEvents.onClientUpdated -= self.onNationChanged
Waiting.hide('sinhronize')
def getBonuses(self):
return self.__bonuses
@property
def nation(self):
return self.__nation
def getNationData(self):
return self.__nationsData[self.__nation]
def isReferralEnabled(self):
return isReferralProgramEnabled() and self.__isRecruit
def isMarkerLittlePiercingEnabled(self):
return self.__context.get('little_pierced', False)
def getPredefinedPiercingPower(self):
return self.__context.get('piercingPower', None)
def isLittlePiercingDisabledModules(self, armor):
return armor in (10, 15, 20)
def checkLittlePiercingVehicle(self, entity):
return entity.typeDescriptor.name == self.__context['little_pierced_vehicle']
def isInGarageState(self):
return self.__currentState.id() == STATE.IN_GARAGE
def isResearchFreeLesson(self):
return self.getLessonNum() >= self.getContextIntParameter('researchFreeLesson')
def isEnableDamageIcon(self):
return self.getLessonNum() >= self.getContextIntParameter('enableDamageIconLesson')
def checkBigConsumablesIconsLesson(self):
return self.__lessonId == self.getContextIntParameter('researchSecondVehicleLesson')
def getBattleStatsLesson(self):
return self.__context.get('battleStats', [])
def setBootcampParams(self, params):
self.__p.update(params)
def getParameters(self):
return self.__p
def addPointcut(self, pointcut, *args, **kwargs):
return self.__weaver.weave(pointcut=pointcut, *args, **kwargs)
def removePointcut(self, pointcutIndex):
if pointcutIndex is not None:
self.__weaver.clear(pointcutIndex)
return
def showBattleResultTransition(self):
from .BattleResultTransition import BattleResultTransition
if self.__transitionFlash is not None:
self.__transitionFlash.close()
self.__transitionFlash = BattleResultTransition()
self.__transitionFlash.active(True)
return
def hideBattleResultTransition(self):
if self.__transitionFlash is not None:
self.__transitionFlash.active(False)
self.__transitionFlash.close()
self.__transitionFlash = None
return
def __setupPreferences(self, isNewbie):
if isNewbie:
self.settingsCore.serverSettings.setSectionSettings(SETTINGS_SECTIONS.BATTLE_EVENTS, {setting:True for _, setting in BATTLE_EVENTS.getIterator()})
settingsTemplate = GAME_SETTINGS_NEWBIE
else:
settingsTemplate = GAME_SETTINGS_COMMON
settings = {}
for k, v in settingsTemplate.iteritems():
i = k.find(':')
if i > -1:
settings.setdefault(k[:i], {})[k[i + 1:]] = v
settings[k] = v
self.settingsCore.applySettings(settings)
self.settingsCore.confirmChanges(self.settingsCore.applyStorages(restartApproved=False))
self.settingsCore.clearStorages()
g_bootcamp = Bootcamp()
| [
"[email protected]"
] | |
33a2d5d740644f1f35166c70f71a5928b409bcb6 | 072f8bffbfef6e149ad1934ea9183a79864c1acd | /venv/Lib/site-packages/watcherclient/tests/unit/v1/test_strategy_shell.py | 7a1568714e2603e647f5b0d99f8bceeb0ede953e | [] | no_license | numvc/LuxoftBot | 77d9bf8f5f63aee63350f1ec82f4b940afe203d2 | 29d7ca8868ab86bc076509d103f7596039333417 | refs/heads/master | 2020-09-21T21:37:12.527546 | 2019-12-04T23:24:35 | 2019-12-04T23:24:35 | 224,939,956 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,741 | py | # Copyright (c) 2016 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import mock
import six
from oslo_serialization import jsonutils
from watcherclient import shell
from watcherclient.tests.unit.v1 import base
from watcherclient import v1 as resource
from watcherclient.v1 import resource_fields
STRATEGY_1 = {
'uuid': '2cf86250-d309-4b81-818e-1537f3dba6e5',
'name': 'basic',
'display_name': 'Basic consolidation',
'goal_uuid': 'fc087747-61be-4aad-8126-b701731ae836',
'goal_name': 'SERVER_CONSOLIDATION',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
'parameters_spec': {},
}
STRATEGY_2 = {
'uuid': 'b20bb987-ea8f-457a-a4ea-ab3ffdfeff8b',
'name': 'dummy',
'display_name': 'Dummy',
'goal_uuid': '407b03b1-63c6-49b2-adaf-4df5c0090047',
'goal_name': 'DUMMY',
'created_at': datetime.datetime.now().isoformat(),
'updated_at': None,
'deleted_at': None,
'parameters_spec': {},
}
class StrategyShellTest(base.CommandTestCase):
SHORT_LIST_FIELDS = resource_fields.STRATEGY_SHORT_LIST_FIELDS
SHORT_LIST_FIELD_LABELS = (
resource_fields.STRATEGY_SHORT_LIST_FIELD_LABELS)
FIELDS = resource_fields.STRATEGY_FIELDS
FIELD_LABELS = resource_fields.STRATEGY_FIELD_LABELS
STATE_FIELDS = resource_fields.STRATEGY_STATE_FIELDS
STATE_FIELD_LABELS = resource_fields.STRATEGY_STATE_FIELD_LABELS
def setUp(self):
super(self.__class__, self).setUp()
p_strategy_manager = mock.patch.object(resource, 'StrategyManager')
self.m_strategy_mgr_cls = p_strategy_manager.start()
self.addCleanup(p_strategy_manager.stop)
self.m_strategy_mgr = mock.Mock()
self.m_strategy_mgr_cls.return_value = self.m_strategy_mgr
self.stdout = six.StringIO()
self.cmd = shell.WatcherShell(stdout=self.stdout)
def test_do_strategy_list(self):
strategy1 = resource.Strategy(mock.Mock(), STRATEGY_1)
strategy2 = resource.Strategy(mock.Mock(), STRATEGY_2)
self.m_strategy_mgr.list.return_value = [
strategy1, strategy2]
exit_code, results = self.run_cmd('strategy list')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(strategy1, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS),
self.resource_as_dict(strategy2, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS)],
results)
self.m_strategy_mgr.list.assert_called_once_with(detail=False)
def test_do_strategy_list_marker(self):
strategy2 = resource.Strategy(mock.Mock(), STRATEGY_2)
self.m_strategy_mgr.list.return_value = [strategy2]
exit_code, results = self.run_cmd(
'strategy list --marker 2cf86250-d309-4b81-818e-1537f3dba6e5')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(strategy2, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS)],
results)
self.m_strategy_mgr.list.assert_called_once_with(
detail=False,
marker='2cf86250-d309-4b81-818e-1537f3dba6e5')
def test_do_strategy_list_detail(self):
strategy1 = resource.Strategy(mock.Mock(), STRATEGY_1)
strategy2 = resource.Strategy(mock.Mock(), STRATEGY_2)
self.m_strategy_mgr.list.return_value = [
strategy1, strategy2]
exit_code, results = self.run_cmd('strategy list --detail')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(strategy1, self.FIELDS,
self.FIELD_LABELS),
self.resource_as_dict(strategy2, self.FIELDS,
self.FIELD_LABELS)],
results)
self.m_strategy_mgr.list.assert_called_once_with(detail=True)
def test_do_strategy_list_filter_by_goal_name(self):
strategy2 = resource.Strategy(mock.Mock(), STRATEGY_2)
self.m_strategy_mgr.list.return_value = [strategy2]
exit_code, results = self.run_cmd(
'strategy list --goal '
'DUMMY')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(strategy2, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS)],
results)
self.m_strategy_mgr.list.assert_called_once_with(
detail=False,
goal='DUMMY',
)
def test_do_strategy_list_filter_by_goal_uuid(self):
strategy1 = resource.Strategy(mock.Mock(), STRATEGY_1)
self.m_strategy_mgr.list.return_value = [strategy1]
exit_code, results = self.run_cmd(
'strategy list --goal '
'fc087747-61be-4aad-8126-b701731ae836')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(strategy1, self.SHORT_LIST_FIELDS,
self.SHORT_LIST_FIELD_LABELS)],
results)
self.m_strategy_mgr.list.assert_called_once_with(
detail=False,
goal='fc087747-61be-4aad-8126-b701731ae836',
)
def test_do_strategy_show_by_uuid(self):
strategy = resource.Strategy(mock.Mock(), STRATEGY_1)
self.m_strategy_mgr.get.return_value = strategy
exit_code, result = self.run_cmd(
'strategy show f8e47706-efcf-49a4-a5c4-af604eb492f2')
self.assertEqual(0, exit_code)
self.assertEqual(
self.resource_as_dict(strategy, self.FIELDS, self.FIELD_LABELS),
result)
self.m_strategy_mgr.get.assert_called_once_with(
'f8e47706-efcf-49a4-a5c4-af604eb492f2')
def test_do_strategy_state(self):
strategy1 = resource.Strategy(mock.Mock(), STRATEGY_1)
strategy_req = [
{'type': 'Datasource', 'mandatory': True,
'comment': '', 'state': 'gnocchi: True'},
{'type': 'Metrics', 'mandatory': False,
'comment': '', 'state': jsonutils.dumps([
{'compute.node.cpu.percent': 'available'},
{'cpu_util': 'available'},
{'memory.resident': 'available'},
{'hardware.memory.used': 'not available'}])},
{'type': 'CDM', 'mandatory': True,
'comment': '',
'state': jsonutils.dumps([{'compute_model': 'available'},
{'storage_model': 'not available'}])},
{'type': 'Name', 'mandatory': '', 'comment': '',
'state': strategy1.name}]
requirements = [resource.Strategy(mock.Mock(), req)
for req in strategy_req]
self.m_strategy_mgr.state.return_value = requirements
exit_code, results = self.run_cmd('strategy state basic')
self.assertEqual(0, exit_code)
self.assertEqual(
[self.resource_as_dict(req, self.STATE_FIELDS,
self.STATE_FIELD_LABELS)
for req in requirements],
results)
| [
"[email protected]"
] | |
688a3352badf0b2f3f6ee521627b783d65c0d14a | d18ed72d6f8d27dd8a13eab5c6366f9dca48aa6b | /espresso/lab/hydra/usecases/cassandra/cassandra/time/Scheduler.py | 29bbabb9f1513382a67171657fec3a84c5a2406f | [] | no_license | danse-inelastic/AbInitio | 6f1dcdd26a8163fa3026883fb3c40f63d1105b0c | 401e8d5fa16b9d5ce42852b002bc2e4274afab84 | refs/heads/master | 2021-01-10T19:16:35.770411 | 2011-04-12T11:04:52 | 2011-04-12T11:04:52 | 34,972,670 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,527 | py |
class Scheduler(object):
def __init__(self):
self.now = self.getCurrentTime()
self.alarmIndex = []
self.alarms = {}
return
def alarm(self, interval, callback):
"""Call the given callback after the specified time interval
elapses."""
from pyre.units.time import second
alarmTime = self.now + interval/second
newAlarm = self.Alarm(alarmTime)
alarm = self.alarms.setdefault(alarmTime, newAlarm)
alarm.append(callback)
if alarm is newAlarm:
self.alarmIndex.append(alarmTime)
self.alarmIndex.sort(reverse = True)
return
def poll(self):
"""Call the callbacks for any alarms that have gone off.
Answer the number of seconds we can sleep until the next
alarm. If there are no more alarms, answer None."""
self.updateInternalClock()
activeAlarm = self.activeAlarm
if activeAlarm is None:
return None # sleep indefinitely
while activeAlarm.time <= self.now:
for callback in activeAlarm:
callback()
# activate the next alarm
activeAlarm = self.popActiveAlarm()
if activeAlarm is None:
return None # sleep indefinitely
return activeAlarm.time - self.now
# private
def updateInternalClock(self):
"""Advance our internal clock to the current system time."""
now = self.getCurrentTime()
if now < self.now:
self.clockSetBack(self.now - now)
self.now = now
return
def clockSetBack(self, delta):
"""The system clock was set back; update our internal data
structures."""
if not self.alarms:
return # nothing to do
self.alarmIndex = []
for alarm in self.alarms:
alarm.time -= delta
self.alarmIndex.append(alarm.time)
self.alarmIndex.sort(reverse = True)
return
def getActiveAlarm(self):
if self.alarmIndex:
return self.alarms[self.alarmIndex[-1]]
return None
activeAlarm = property(getActiveAlarm)
def popActiveAlarm(self):
"""Discard the currently active alarm. Answer the new active
alarm, if any."""
time = self.alarmIndex.pop()
self.alarms.pop(time)
return self.activeAlarm
from time import time as getCurrentTime
from Alarm import Alarm
| [
"[email protected]"
] | |
2c492ea4b85b3ed7315977442b45bb309b1f88a1 | 3c59b7bde01cfbc1fbd170883393e8ebf7a0a92f | /HackerRank/Binary Search Tree-Lowest Common Ancestor.py | 50b0ac929272da74203ba36f002280fdd857dc6f | [] | no_license | gf234/python_problem_solving | 93ae00d940091131d8f8b06e478e385e4c2a4503 | 4c95751f5a687215c14bf61c37e6dc2e7e752342 | refs/heads/main | 2023-05-10T07:28:12.351006 | 2021-06-14T04:59:33 | 2021-06-14T04:59:33 | 314,479,583 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | def lca(root, v1, v2):
if root.info < v1 and root.info < v2:
return lca(root.right, v1, v2)
elif root.info > v1 and root.info > v2:
return lca(root.left, v1, v2)
return root
| [
"[email protected]"
] | |
bf62b7c5129f5a203112132403faaf45ab790855 | ab9a9736c58f388785890d42287f3ba8d5caa48d | /freqopttest/data.py | be7886f8c0fb13880b49222fad36b891c856dc65 | [
"MIT"
] | permissive | liyu95/interpretable-test | 845fc4ed67a2f1bbd9997daadf84b48aeb82e3a4 | 8141cefe138da7962ea321a179366d0951f85213 | refs/heads/master | 2021-09-04T17:38:17.164487 | 2018-01-20T15:04:30 | 2018-01-20T15:04:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,160 | py | __author__ = 'wittawat'
from abc import ABCMeta, abstractmethod
import math
import matplotlib.pyplot as plt
import numpy as np
import freqopttest.util as util
import matplotlib.pyplot as plt
import scipy.stats as stats
class SampleSource(object):
"""A data source where it is possible to resample. Subclasses may prefix
class names with SS"""
__metaclass__ = ABCMeta
@abstractmethod
def sample(self, n, seed):
"""Return a TSTData. Returned result should be deterministic given
the input (n, seed)."""
raise NotImplementedError()
@abstractmethod
def dim(self):
"""Return the dimension of the problem"""
raise NotImplementedError()
def visualize(self, n=400):
"""Visualize the data, assuming 2d. If not possible graphically,
subclasses should print to the console instead."""
data = self.sample(n, seed=1)
x, y = data.xy()
d = x.shape[1]
if d==2:
plt.plot(x[:, 0], x[:, 1], '.r', label='X')
plt.plot(y[:, 0], y[:, 1], '.b', label='Y')
plt.legend(loc='best')
else:
# not 2d. Print stats to the console.
print(data)
class SSResample(SampleSource):
"""
A SampleSource which subsamples without replacement from two samples independently
through the specified TSTData.
"""
def __init__(self, tst_data):
self.tst_data = tst_data
def dim(self):
return self.tst_data.dim()
def sample(self, n, seed=900):
tst_sub = self.tst_data.subsample(n, seed)
return tst_sub
class SSNullResample(SampleSource):
"""
A SampleSource which subsamples without replacement from only one sample.
Randomly partition the one sample into two samples.
This is meant to simulate the case where H0: P=Q is true.
"""
def __init__(self, X):
"""
X: nxd numpy array
"""
self.X = X
def dim(self):
return self.X.shape[1]
def sample(self, n, seed=981):
if 2*n > self.X.shape[0]:
raise ValueError('2*n=%d exceeds the size of X = %d'%(2*n, self.X.shape[0]))
ind = util.subsample_ind(self.X.shape[0], 2*n, seed)
#print ind
x = self.X[ind[:n]]
y = self.X[ind[n:]]
assert(x.shape[0]==y.shape[0])
return TSTData(x, y)
class SSUnif(SampleSource):
"""
Multivariate (or univariate) uniform distributions for both P, Q
on the specified boundaries
"""
def __init__(self, plb, pub, qlb, qub):
"""
plb: a numpy array of lower bounds of p
pub: a numpy array of upper bounds of p
qlb: a numpy array of lower bounds of q
qub: a numpy array of upper bounds of q
"""
convertif = lambda a: np.array(a) if isinstance(a, list) else a
plb, pub, qlb, qub = map(convertif, [plb, pub, qlb, qub])
if not np.all([plb.shape[0]==pub.shape[0], pub.shape[0]==qlb.shape[0],
qlb.shape[0]==qub.shape[0]]):
raise ValueError('all lower and upper bounds must have the same length')
if not np.all(pub - plb > 0):
raise ValueError('Require upper - lower to be positive. False for p')
if not np.all(qub - qlb > 0):
raise ValueError('Require upper - lower to be positive. False for q')
self.plb = plb
self.pub = pub
self.qlb = qlb
self.qub = qub
def dim(self):
return self.plb.shape[0]
def sample(self, n, seed):
rstate = np.random.get_state()
np.random.seed(seed)
d = self.dim()
X = np.zeros((n, d))
Y = np.zeros((n, d))
pscale = self.pub - self.plb
qscale = self.qub - self.qlb
for i in range(d):
X[:, i] = stats.uniform.rvs(loc=self.plb[i], scale=pscale[i], size=n)
Y[:, i] = stats.uniform.rvs(loc=self.qlb[i], scale=qscale[i], size=n)
np.random.set_state(rstate)
return TSTData(X, Y, label='unif_d%d'%d)
# end of SSUnif
class SSMixUnif1D(SampleSource):
"""
A one-dimensional mixture of uniforms problem.
P, Q are mixtures of uniform distributions.
"""
def __init__(self, pbs, qbs, pmix=None, qmix=None):
"""
pbs: a 2d numpy array (cx x 2) of lower/upper bounds.
pbs[i, 0] and pbs[i, 1] specifies the lower and upper bound
for the ith mixture component of P.
qbs: a 2d numpy array (cy x 2) of lower/upper bounds.
pmix: The mixture weight vector for P. A numpy array of length cx.
Automatically set to uniform weights if unspecified.
qmix: The mixture weight vector for Q. A numpy array of length cy.
"""
cx = pbs.shape[0]
cy = qbs.shape[0]
if pbs.shape[1] != 2:
raise ValueError('pbs must have 2 columns')
if qbs.shape[1] != 2:
raise ValueError('qbs must have 2 columns')
if pmix is None:
pmix = np.ones(cx)/float(cx)
if qmix is None:
qmix = np.ones(cy)/float(cy)
if len(pmix) != cx:
raise ValueError('Length of pmix does not match #rows of pbs')
if len(qmix) != cy:
raise ValueError('Length of qmix does not match #rows of qbs')
if np.abs(np.sum(pmix) - 1) > 1e-6:
raise ValueError('mixture weights pmix has to sum to 1')
if np.abs(np.sum(qmix) - 1) > 1e-6:
raise ValueError('mixture weights qmix has to sum to 1')
if not np.all(pbs[:, 1] - pbs[:, 0] > 0):
raise ValueError('Require upper - lower to be positive. False for p')
if not np.all(qbs[:, 1] - qbs[:, 0] > 0):
raise ValueError('Require upper - lower to be positive. False for q')
self.pbs = pbs
self.qbs = qbs
self.pmix = pmix
self.qmix = qmix
def dim(self):
return 1
def sample(self, n, seed):
with util.NumpySeedContext(seed=seed):
X = SSMixUnif1D.draw_mix_uniforms(self.pbs, self.pmix, n, seed)
X = X[:, np.newaxis]
Y = SSMixUnif1D.draw_mix_uniforms(self.qbs, self.qmix, n, seed+2)
Y = Y[:, np.newaxis]
return TSTData(X, Y, label='mix_unif1d')
def density_p(self, x):
"""
x: a one-dimensional array.
Return density values in a numpy array at x.
"""
return SSMixUnif1D.density_mix_uniforms(x, self.pbs, self.pmix)
def density_q(self, y):
"""
y: a one-dimensional array.
Return density values in a numpy array at y.
"""
return SSMixUnif1D.density_mix_uniforms(y, self.qbs, self.qmix)
@staticmethod
def density_mix_uniforms(x, pbs, pmix):
"""
Evaluate the density of the mixture of uniforms at the locations in x
(a one-dimensional numpy array).
"""
scale = pbs[:, 1] - pbs[:, 0]
den = np.zeros(len(x))
for i, pi in enumerate(pbs):
den += pmix[i]*stats.uniform.pdf(x, loc=pbs[i, 0], scale=scale[i])
return den
@staticmethod
def draw_mix_uniforms(pbs, pmix, n, seed):
"""
Draw n samples from a mixture of uniform distributions whose bounds
are specified in pbs, and mixing proportion is specified in pmix.
Return a one-dimensional numpy array of n samples.
"""
assert pbs.shape[0] == len(pmix)
c = len(pmix)
sam_list = []
scale = pbs[:, 1] - pbs[:, 0]
with util.NumpySeedContext(seed=seed):
# counts for each mixture component
counts = np.random.multinomial(n, pmix, size=1)
# counts is a 2d array
counts = counts[0]
# For each component, draw from its corresponding uniform
# distribution.
for i, nc in enumerate(counts):
#print i
sam_i = stats.uniform.rvs(loc=pbs[i, 0], scale=scale[i], size=nc)
#print 'pbs[{0},0]: {1}'.format(i, pbs[i, 0])
#print 'sam_i: {0}'.format(sam_i)
sam_list.append(sam_i)
sample = np.hstack(sam_list)
np.random.shuffle(sample)
return sample
# end of SSMixUnif1D
class SSBlobs(SampleSource):
"""Mixture of 2d Gaussians arranged in a 2d grid. This dataset is used
in Chwialkovski et al., 2015 as well as Gretton et al., 2012.
Part of the code taken from Dino Sejdinovic and Kacper Chwialkovski's code."""
def __init__(self, blob_distance=5, num_blobs=4, stretch=2, angle=math.pi/4.0):
self.blob_distance = blob_distance
self.num_blobs = num_blobs
self.stretch = stretch
self.angle = angle
def dim(self):
return 2
def sample(self, n, seed):
rstate = np.random.get_state()
np.random.seed(seed)
x = gen_blobs(stretch=1, angle=0, blob_distance=self.blob_distance,
num_blobs=self.num_blobs, num_samples=n)
y = gen_blobs(stretch=self.stretch, angle=self.angle,
blob_distance=self.blob_distance, num_blobs=self.num_blobs,
num_samples=n)
np.random.set_state(rstate)
return TSTData(x, y, label='blobs_s%d'%seed)
def gen_blobs(stretch, angle, blob_distance, num_blobs, num_samples):
"""Generate 2d blobs dataset """
# rotation matrix
r = np.array( [[math.cos(angle), -math.sin(angle)], [math.sin(angle), math.cos(angle)]] )
eigenvalues = np.diag(np.array([np.sqrt(stretch), 1]))
mod_matix = r.dot(eigenvalues)
mean = float(blob_distance * (num_blobs-1)) / 2
mu = np.random.randint(0, num_blobs,(num_samples, 2))*blob_distance - mean
return np.random.randn(num_samples,2).dot(mod_matix) + mu
class SSSameGauss(SampleSource):
"""Two same standard Gaussians for P, Q. The null hypothesis
H0: P=Q is true."""
def __init__(self, d):
"""
d: dimension of the data
"""
self.d = d
def dim(self):
return self.d
def sample(self, n, seed):
rstate = np.random.get_state()
np.random.seed(seed)
d = self.d
X = np.random.randn(n, d)
Y = np.random.randn(n, d)
np.random.set_state(rstate)
return TSTData(X, Y, label='sg_d%d'%self.d)
class SSGaussMeanDiff(SampleSource):
"""Toy dataset one in Chwialkovski et al., 2015.
P = N(0, I), Q = N( (my,0,0, 000), I). Only the first dimension of the means
differ."""
def __init__(self, d, my=1.0):
"""
d: dimension of the data
"""
self.d = d
self.my = my
def dim(self):
return self.d
def sample(self, n, seed):
rstate = np.random.get_state()
np.random.seed(seed)
d = self.d
mean_y = np.hstack((self.my, np.zeros(d-1) ))
X = np.random.randn(n, d)
Y = np.random.randn(n, d) + mean_y
np.random.set_state(rstate)
return TSTData(X, Y, label='gmd_d%d'%self.d)
class SSGaussVarDiff(SampleSource):
"""Toy dataset two in Chwialkovski et al., 2015.
P = N(0, I), Q = N(0, diag((2, 1, 1, ...))). Only the variances of the first
dimension differ."""
def __init__(self, d):
"""
d: dimension of the data
"""
self.d = d
def dim(self):
return self.d
def sample(self, n, seed):
rstate = np.random.get_state()
np.random.seed(seed)
d = self.d
std_y = np.diag(np.hstack((np.sqrt(2.0), np.ones(d-1) )))
X = np.random.randn(n, d)
Y = np.random.randn(n, d).dot(std_y)
np.random.set_state(rstate)
return TSTData(X, Y, label='gvd')
class TSTData(object):
"""Class representing data for two-sample test"""
"""
properties:
X, Y: numpy array
"""
def __init__(self, X, Y, label=None):
"""
:param X: n x d numpy array for dataset X
:param Y: n x d numpy array for dataset Y
"""
self.X = X
self.Y = Y
# short description to be used as a plot label
self.label = label
nx, dx = X.shape
ny, dy = Y.shape
#if nx != ny:
# raise ValueError('Data sizes must be the same.')
if dx != dy:
raise ValueError('Dimension sizes of the two datasets must be the same.')
def __str__(self):
mean_x = np.mean(self.X, 0)
std_x = np.std(self.X, 0)
mean_y = np.mean(self.Y, 0)
std_y = np.std(self.Y, 0)
prec = 4
desc = ''
desc += 'E[x] = %s \n'%(np.array_str(mean_x, precision=prec ) )
desc += 'E[y] = %s \n'%(np.array_str(mean_y, precision=prec ) )
desc += 'Std[x] = %s \n' %(np.array_str(std_x, precision=prec))
desc += 'Std[y] = %s \n' %(np.array_str(std_y, precision=prec))
return desc
def dimension(self):
"""Return the dimension of the data."""
dx = self.X.shape[1]
return dx
def dim(self):
"""Same as dimension()"""
return self.dimension()
def stack_xy(self):
"""Stack the two datasets together"""
return np.vstack((self.X, self.Y))
def xy(self):
"""Return (X, Y) as a tuple"""
return (self.X, self.Y)
def mean_std(self):
"""Compute the average standard deviation """
#Gaussian width = mean of stds of all dimensions
X, Y = self.xy()
stdx = np.mean(np.std(X, 0))
stdy = np.mean(np.std(Y, 0))
mstd = (stdx + stdy)/2.0
return mstd
#xy = self.stack_xy()
#return np.mean(np.std(xy, 0)**2.0)**0.5
def split_tr_te(self, tr_proportion=0.5, seed=820):
"""Split the dataset into training and test sets. Assume n is the same
for both X, Y.
Return (TSTData for tr, TSTData for te)"""
X = self.X
Y = self.Y
nx, dx = X.shape
ny, dy = Y.shape
if nx != ny:
raise ValueError('Require nx = ny')
Itr, Ite = util.tr_te_indices(nx, tr_proportion, seed)
label = '' if self.label is None else self.label
tr_data = TSTData(X[Itr, :], Y[Itr, :], 'tr_' + label)
te_data = TSTData(X[Ite, :], Y[Ite, :], 'te_' + label)
return (tr_data, te_data)
def subsample(self, n, seed=87):
"""Subsample without replacement. Return a new TSTData """
if n > self.X.shape[0] or n > self.Y.shape[0]:
raise ValueError('n should not be larger than sizes of X, Y.')
ind_x = util.subsample_ind( self.X.shape[0], n, seed )
ind_y = util.subsample_ind( self.Y.shape[0], n, seed )
return TSTData(self.X[ind_x, :], self.Y[ind_y, :], self.label)
### end TSTData class
def toy_2d_gauss_mean_diff(n_each, shift_2d=[0, 0], seed=1):
"""Generate a 2d toy data X, Y. 2 unit-variance Gaussians with different means."""
rand_state = np.random.get_state()
np.random.seed(seed)
d = 2
mean = [0, 00]
X = np.random.randn(n_each, d) + mean
Y = np.random.randn(n_each, d) + mean + shift_2d
tst_data = TSTData(X, Y, '2D-N. shift: %s'%(str(shift_2d)) )
np.random.set_state(rand_state)
return tst_data
def toy_2d_gauss_variance_diff(n_each, std_factor=2, seed=1):
"""Generate a 2d toy data X, Y. 2 zero-mean Gaussians with different variances."""
rand_state = np.random.get_state()
np.random.seed(seed)
d = 2
X = np.random.randn(n_each, d)
Y = np.random.randn(n_each, d)*std_factor
tst_data = TSTData(X, Y, '2D-N. std_factor: %.3g'%(std_factor) )
np.random.set_state(rand_state)
return tst_data
def plot_2d_data(tst_data):
"""
tst_data: an instance of TSTData
Return a figure handle
"""
X, Y = tst_data.xy()
n, d = X.shape
if d != 2:
raise ValueError('d must be 2 to plot.')
# plot
fig = plt.figure()
plt.plot(X[:, 0], X[:, 1], 'xb', label='X')
plt.plot(Y[:, 0], Y[:, 1], 'xr', label='Y')
plt.title(tst_data.label)
plt.legend()
return fig
| [
"[email protected]"
] | |
69eac2df32559b7d28c3633a5c1a52fe3f26db22 | b144c5142226de4e6254e0044a1ca0fcd4c8bbc6 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/trafficitem.py | 65a21b5e1c5dea57cd19f551f1c9e9b09ae2fd0f | [
"MIT"
] | permissive | iwanb/ixnetwork_restpy | fa8b885ea7a4179048ef2636c37ef7d3f6692e31 | c2cb68fee9f2cc2f86660760e9e07bd06c0013c2 | refs/heads/master | 2021-01-02T17:27:37.096268 | 2020-02-11T09:28:15 | 2020-02-11T09:28:15 | 239,721,780 | 0 | 0 | NOASSERTION | 2020-02-11T09:20:22 | 2020-02-11T09:20:21 | null | UTF-8 | Python | false | false | 37,680 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class TrafficItem(Base):
"""This object specifies the particular traffic item related properties.
The TrafficItem class encapsulates a list of trafficItem resources that is be managed by the user.
A list of resources can be retrieved from the server using the TrafficItem.find() method.
The list can be managed by the user by using the TrafficItem.add() and TrafficItem.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'trafficItem'
def __init__(self, parent):
super(TrafficItem, self).__init__(parent)
@property
def AppLibProfile(self):
"""An instance of the AppLibProfile class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.applibprofile.applibprofile.AppLibProfile)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.applibprofile.applibprofile import AppLibProfile
return AppLibProfile(self)
@property
def ConfigElement(self):
"""An instance of the ConfigElement class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.configelement.configelement.ConfigElement)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.configelement.configelement import ConfigElement
return ConfigElement(self)
@property
def DynamicUpdate(self):
"""An instance of the DynamicUpdate class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.dynamicupdate.dynamicupdate.DynamicUpdate)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.dynamicupdate.dynamicupdate import DynamicUpdate
return DynamicUpdate(self)
@property
def EgressTracking(self):
"""An instance of the EgressTracking class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.egresstracking.egresstracking.EgressTracking)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.egresstracking.egresstracking import EgressTracking
return EgressTracking(self)
@property
def EndpointSet(self):
"""An instance of the EndpointSet class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.endpointset.endpointset.EndpointSet)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.endpointset.endpointset import EndpointSet
return EndpointSet(self)
@property
def HighLevelStream(self):
"""An instance of the HighLevelStream class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.highlevelstream.highlevelstream.HighLevelStream)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.highlevelstream.highlevelstream import HighLevelStream
return HighLevelStream(self)
@property
def Tracking(self):
"""An instance of the Tracking class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.tracking.tracking.Tracking)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.tracking.tracking import Tracking
return Tracking(self)
@property
def TransmissionDistribution(self):
"""An instance of the TransmissionDistribution class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.transmissiondistribution.transmissiondistribution.TransmissionDistribution)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.traffic.trafficitem.transmissiondistribution.transmissiondistribution import TransmissionDistribution
return TransmissionDistribution(self)
@property
def AllowSelfDestined(self):
"""If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
Returns:
bool
"""
return self._get_attribute('allowSelfDestined')
@AllowSelfDestined.setter
def AllowSelfDestined(self, value):
self._set_attribute('allowSelfDestined', value)
@property
def BiDirectional(self):
"""If true, this enables traffic to be sent in forward and reverse destination.
Returns:
bool
"""
return self._get_attribute('biDirectional')
@BiDirectional.setter
def BiDirectional(self, value):
self._set_attribute('biDirectional', value)
@property
def EgressEnabled(self):
"""Enables the egress.
Returns:
bool
"""
return self._get_attribute('egressEnabled')
@EgressEnabled.setter
def EgressEnabled(self, value):
self._set_attribute('egressEnabled', value)
@property
def EnableDynamicMplsLabelValues(self):
"""Enables the dynamic MPLS label values.
Returns:
bool
"""
return self._get_attribute('enableDynamicMplsLabelValues')
@EnableDynamicMplsLabelValues.setter
def EnableDynamicMplsLabelValues(self, value):
self._set_attribute('enableDynamicMplsLabelValues', value)
@property
def Enabled(self):
"""If true, this enables the selected traffic item.
Returns:
bool
"""
return self._get_attribute('enabled')
@Enabled.setter
def Enabled(self, value):
self._set_attribute('enabled', value)
@property
def Errors(self):
"""Displays the errors.
Returns:
list(str)
"""
return self._get_attribute('errors')
@property
def FlowGroupCount(self):
"""Indicates the number of flow groups.
Returns:
number
"""
return self._get_attribute('flowGroupCount')
@property
def FrerDuplicateElimination(self):
"""
Returns:
bool
"""
return self._get_attribute('frerDuplicateElimination')
@FrerDuplicateElimination.setter
def FrerDuplicateElimination(self, value):
self._set_attribute('frerDuplicateElimination', value)
@property
def HasOpenFlow(self):
"""Indicates whether or not this trafficItem has openflow.
Returns:
bool
"""
return self._get_attribute('hasOpenFlow')
@HasOpenFlow.setter
def HasOpenFlow(self, value):
self._set_attribute('hasOpenFlow', value)
@property
def HostsPerNetwork(self):
"""The number of emulated hosts for the traffic stream.
Returns:
number
"""
return self._get_attribute('hostsPerNetwork')
@HostsPerNetwork.setter
def HostsPerNetwork(self, value):
self._set_attribute('hostsPerNetwork', value)
@property
def InterAsBgpPreference(self):
"""Signifies the inter as BGP prefence
Returns:
str(one|two)
"""
return self._get_attribute('interAsBgpPreference')
@InterAsBgpPreference.setter
def InterAsBgpPreference(self, value):
self._set_attribute('interAsBgpPreference', value)
@property
def InterAsLdpPreference(self):
"""Preferences inter as LDP
Returns:
str(one|two)
"""
return self._get_attribute('interAsLdpPreference')
@InterAsLdpPreference.setter
def InterAsLdpPreference(self, value):
self._set_attribute('interAsLdpPreference', value)
@property
def MaxNumberOfVpnLabelStack(self):
"""Signifies the maximum number of VPN label stack
Returns:
number
"""
return self._get_attribute('maxNumberOfVpnLabelStack')
@MaxNumberOfVpnLabelStack.setter
def MaxNumberOfVpnLabelStack(self, value):
self._set_attribute('maxNumberOfVpnLabelStack', value)
@property
def MergeDestinations(self):
"""If true, merges the traffic flow in the destination ranges.
Returns:
bool
"""
return self._get_attribute('mergeDestinations')
@MergeDestinations.setter
def MergeDestinations(self, value):
self._set_attribute('mergeDestinations', value)
@property
def MulticastForwardingMode(self):
"""
Returns:
str(loadBalancing|replication)
"""
return self._get_attribute('multicastForwardingMode')
@MulticastForwardingMode.setter
def MulticastForwardingMode(self, value):
self._set_attribute('multicastForwardingMode', value)
@property
def Name(self):
"""The name of the traffic item.
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def NumVlansForMulticastReplication(self):
"""Set the number of vlans for multicast replication
Returns:
number
"""
return self._get_attribute('numVlansForMulticastReplication')
@NumVlansForMulticastReplication.setter
def NumVlansForMulticastReplication(self, value):
self._set_attribute('numVlansForMulticastReplication', value)
@property
def OrdinalNo(self):
"""Signifies the ordinal number
Returns:
number
"""
return self._get_attribute('ordinalNo')
@OrdinalNo.setter
def OrdinalNo(self, value):
self._set_attribute('ordinalNo', value)
@property
def OriginatorType(self):
"""Indicates who created this trafficItem.
Returns:
str(endUser|quickTest)
"""
return self._get_attribute('originatorType')
@OriginatorType.setter
def OriginatorType(self, value):
self._set_attribute('originatorType', value)
@property
def RoundRobinPacketOrdering(self):
"""This option enables Round Robin Packet Ordering within endpoints across Rx ports.
Returns:
bool
"""
return self._get_attribute('roundRobinPacketOrdering')
@RoundRobinPacketOrdering.setter
def RoundRobinPacketOrdering(self, value):
self._set_attribute('roundRobinPacketOrdering', value)
@property
def RouteMesh(self):
"""The traffic flow type between each pair of source route endpoint and destination route endpoint.
Returns:
str(fullMesh|oneToOne)
"""
return self._get_attribute('routeMesh')
@RouteMesh.setter
def RouteMesh(self, value):
self._set_attribute('routeMesh', value)
@property
def SrcDestMesh(self):
"""Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
Returns:
str(fullMesh|manyToMany|none|oneToOne)
"""
return self._get_attribute('srcDestMesh')
@SrcDestMesh.setter
def SrcDestMesh(self, value):
self._set_attribute('srcDestMesh', value)
@property
def State(self):
"""(Read only) A read-only field which indicates the current state of the traffic item.
Returns:
str
"""
return self._get_attribute('state')
@property
def Suspend(self):
"""Suspends all traffic on this stream.
Returns:
bool
"""
return self._get_attribute('suspend')
@Suspend.setter
def Suspend(self, value):
self._set_attribute('suspend', value)
@property
def TrafficItemType(self):
"""Helps to configure and edit a traffic item that is sent across Ixia ports.
Returns:
str(application|applicationLibrary|l2L3|quick)
"""
return self._get_attribute('trafficItemType')
@TrafficItemType.setter
def TrafficItemType(self, value):
self._set_attribute('trafficItemType', value)
@property
def TrafficType(self):
"""Helps to select the type of traffic endpoint to be configured.
Returns:
str(atm|avb1722|avbRaw|ethernetVlan|fc|fcoe|frameRelay|hdlc|ipv4|ipv4ApplicationTraffic|ipv6|ipv6ApplicationTraffic|ppp|raw)
"""
return self._get_attribute('trafficType')
@TrafficType.setter
def TrafficType(self, value):
self._set_attribute('trafficType', value)
@property
def TransmitMode(self):
"""The transmit mode for this traffic item
Returns:
str(interleaved|sequential)
"""
return self._get_attribute('transmitMode')
@TransmitMode.setter
def TransmitMode(self, value):
self._set_attribute('transmitMode', value)
@property
def TransportLdpPreference(self):
"""Transports LDP preference
Returns:
str(one|two)
"""
return self._get_attribute('transportLdpPreference')
@TransportLdpPreference.setter
def TransportLdpPreference(self, value):
self._set_attribute('transportLdpPreference', value)
@property
def TransportRsvpTePreference(self):
"""Transports RSVP TE preference
Returns:
str(one|two)
"""
return self._get_attribute('transportRsvpTePreference')
@TransportRsvpTePreference.setter
def TransportRsvpTePreference(self, value):
self._set_attribute('transportRsvpTePreference', value)
@property
def UseControlPlaneFrameSize(self):
"""
Returns:
bool
"""
return self._get_attribute('useControlPlaneFrameSize')
@UseControlPlaneFrameSize.setter
def UseControlPlaneFrameSize(self, value):
self._set_attribute('useControlPlaneFrameSize', value)
@property
def UseControlPlaneRate(self):
"""
Returns:
bool
"""
return self._get_attribute('useControlPlaneRate')
@UseControlPlaneRate.setter
def UseControlPlaneRate(self, value):
self._set_attribute('useControlPlaneRate', value)
@property
def Warnings(self):
"""Displays the warnings.
Returns:
list(str)
"""
return self._get_attribute('warnings')
def update(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, Enabled=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None):
"""Updates a child instance of trafficItem on the server.
Args:
AllowSelfDestined (bool): If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
BiDirectional (bool): If true, this enables traffic to be sent in forward and reverse destination.
EgressEnabled (bool): Enables the egress.
EnableDynamicMplsLabelValues (bool): Enables the dynamic MPLS label values.
Enabled (bool): If true, this enables the selected traffic item.
FrerDuplicateElimination (bool):
HasOpenFlow (bool): Indicates whether or not this trafficItem has openflow.
HostsPerNetwork (number): The number of emulated hosts for the traffic stream.
InterAsBgpPreference (str(one|two)): Signifies the inter as BGP prefence
InterAsLdpPreference (str(one|two)): Preferences inter as LDP
MaxNumberOfVpnLabelStack (number): Signifies the maximum number of VPN label stack
MergeDestinations (bool): If true, merges the traffic flow in the destination ranges.
MulticastForwardingMode (str(loadBalancing|replication)):
Name (str): The name of the traffic item.
NumVlansForMulticastReplication (number): Set the number of vlans for multicast replication
OrdinalNo (number): Signifies the ordinal number
OriginatorType (str(endUser|quickTest)): Indicates who created this trafficItem.
RoundRobinPacketOrdering (bool): This option enables Round Robin Packet Ordering within endpoints across Rx ports.
RouteMesh (str(fullMesh|oneToOne)): The traffic flow type between each pair of source route endpoint and destination route endpoint.
SrcDestMesh (str(fullMesh|manyToMany|none|oneToOne)): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
Suspend (bool): Suspends all traffic on this stream.
TrafficItemType (str(application|applicationLibrary|l2L3|quick)): Helps to configure and edit a traffic item that is sent across Ixia ports.
TrafficType (str(atm|avb1722|avbRaw|ethernetVlan|fc|fcoe|frameRelay|hdlc|ipv4|ipv4ApplicationTraffic|ipv6|ipv6ApplicationTraffic|ppp|raw)): Helps to select the type of traffic endpoint to be configured.
TransmitMode (str(interleaved|sequential)): The transmit mode for this traffic item
TransportLdpPreference (str(one|two)): Transports LDP preference
TransportRsvpTePreference (str(one|two)): Transports RSVP TE preference
UseControlPlaneFrameSize (bool):
UseControlPlaneRate (bool):
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def add(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, Enabled=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None):
"""Adds a new trafficItem node on the server and retrieves it in this instance.
Args:
AllowSelfDestined (bool): If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
BiDirectional (bool): If true, this enables traffic to be sent in forward and reverse destination.
EgressEnabled (bool): Enables the egress.
EnableDynamicMplsLabelValues (bool): Enables the dynamic MPLS label values.
Enabled (bool): If true, this enables the selected traffic item.
FrerDuplicateElimination (bool):
HasOpenFlow (bool): Indicates whether or not this trafficItem has openflow.
HostsPerNetwork (number): The number of emulated hosts for the traffic stream.
InterAsBgpPreference (str(one|two)): Signifies the inter as BGP prefence
InterAsLdpPreference (str(one|two)): Preferences inter as LDP
MaxNumberOfVpnLabelStack (number): Signifies the maximum number of VPN label stack
MergeDestinations (bool): If true, merges the traffic flow in the destination ranges.
MulticastForwardingMode (str(loadBalancing|replication)):
Name (str): The name of the traffic item.
NumVlansForMulticastReplication (number): Set the number of vlans for multicast replication
OrdinalNo (number): Signifies the ordinal number
OriginatorType (str(endUser|quickTest)): Indicates who created this trafficItem.
RoundRobinPacketOrdering (bool): This option enables Round Robin Packet Ordering within endpoints across Rx ports.
RouteMesh (str(fullMesh|oneToOne)): The traffic flow type between each pair of source route endpoint and destination route endpoint.
SrcDestMesh (str(fullMesh|manyToMany|none|oneToOne)): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
Suspend (bool): Suspends all traffic on this stream.
TrafficItemType (str(application|applicationLibrary|l2L3|quick)): Helps to configure and edit a traffic item that is sent across Ixia ports.
TrafficType (str(atm|avb1722|avbRaw|ethernetVlan|fc|fcoe|frameRelay|hdlc|ipv4|ipv4ApplicationTraffic|ipv6|ipv6ApplicationTraffic|ppp|raw)): Helps to select the type of traffic endpoint to be configured.
TransmitMode (str(interleaved|sequential)): The transmit mode for this traffic item
TransportLdpPreference (str(one|two)): Transports LDP preference
TransportRsvpTePreference (str(one|two)): Transports RSVP TE preference
UseControlPlaneFrameSize (bool):
UseControlPlaneRate (bool):
Returns:
self: This instance with all currently retrieved trafficItem data using find and the newly added trafficItem data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the trafficItem data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, AllowSelfDestined=None, BiDirectional=None, EgressEnabled=None, EnableDynamicMplsLabelValues=None, Enabled=None, Errors=None, FlowGroupCount=None, FrerDuplicateElimination=None, HasOpenFlow=None, HostsPerNetwork=None, InterAsBgpPreference=None, InterAsLdpPreference=None, MaxNumberOfVpnLabelStack=None, MergeDestinations=None, MulticastForwardingMode=None, Name=None, NumVlansForMulticastReplication=None, OrdinalNo=None, OriginatorType=None, RoundRobinPacketOrdering=None, RouteMesh=None, SrcDestMesh=None, State=None, Suspend=None, TrafficItemType=None, TrafficType=None, TransmitMode=None, TransportLdpPreference=None, TransportRsvpTePreference=None, UseControlPlaneFrameSize=None, UseControlPlaneRate=None, Warnings=None):
"""Finds and retrieves trafficItem data from the server.
All named parameters support regex and can be used to selectively retrieve trafficItem data from the server.
By default the find method takes no parameters and will retrieve all trafficItem data from the server.
Args:
AllowSelfDestined (bool): If true, this helps to send traffic from routes on an Ixia port to other routes on the same Ixia port.
BiDirectional (bool): If true, this enables traffic to be sent in forward and reverse destination.
EgressEnabled (bool): Enables the egress.
EnableDynamicMplsLabelValues (bool): Enables the dynamic MPLS label values.
Enabled (bool): If true, this enables the selected traffic item.
Errors (list(str)): Displays the errors.
FlowGroupCount (number): Indicates the number of flow groups.
FrerDuplicateElimination (bool):
HasOpenFlow (bool): Indicates whether or not this trafficItem has openflow.
HostsPerNetwork (number): The number of emulated hosts for the traffic stream.
InterAsBgpPreference (str(one|two)): Signifies the inter as BGP prefence
InterAsLdpPreference (str(one|two)): Preferences inter as LDP
MaxNumberOfVpnLabelStack (number): Signifies the maximum number of VPN label stack
MergeDestinations (bool): If true, merges the traffic flow in the destination ranges.
MulticastForwardingMode (str(loadBalancing|replication)):
Name (str): The name of the traffic item.
NumVlansForMulticastReplication (number): Set the number of vlans for multicast replication
OrdinalNo (number): Signifies the ordinal number
OriginatorType (str(endUser|quickTest)): Indicates who created this trafficItem.
RoundRobinPacketOrdering (bool): This option enables Round Robin Packet Ordering within endpoints across Rx ports.
RouteMesh (str(fullMesh|oneToOne)): The traffic flow type between each pair of source route endpoint and destination route endpoint.
SrcDestMesh (str(fullMesh|manyToMany|none|oneToOne)): Select the options to set the traffic mesh type between the Source Endpoint and Destination endpoint.
State (str): (Read only) A read-only field which indicates the current state of the traffic item.
Suspend (bool): Suspends all traffic on this stream.
TrafficItemType (str(application|applicationLibrary|l2L3|quick)): Helps to configure and edit a traffic item that is sent across Ixia ports.
TrafficType (str(atm|avb1722|avbRaw|ethernetVlan|fc|fcoe|frameRelay|hdlc|ipv4|ipv4ApplicationTraffic|ipv6|ipv6ApplicationTraffic|ppp|raw)): Helps to select the type of traffic endpoint to be configured.
TransmitMode (str(interleaved|sequential)): The transmit mode for this traffic item
TransportLdpPreference (str(one|two)): Transports LDP preference
TransportRsvpTePreference (str(one|two)): Transports RSVP TE preference
UseControlPlaneFrameSize (bool):
UseControlPlaneRate (bool):
Warnings (list(str)): Displays the warnings.
Returns:
self: This instance with matching trafficItem data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of trafficItem data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the trafficItem data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def ConvertToRaw(self):
"""Executes the convertToRaw operation on the server.
Converts a non-raw traffic item to a raw traffic item.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('convertToRaw', payload=payload, response_object=None)
def Duplicate(self, *args, **kwargs):
"""Executes the duplicate operation on the server.
Duplicates a specific traffic item.
duplicate(Arg2:number)
Args:
args[0] is Arg2 (number): The number of times to duplicate the traffic item.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('duplicate', payload=payload, response_object=None)
def DuplicateItems(self):
"""Executes the duplicateItems operation on the server.
Duplicates a list of traffic items.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('duplicateItems', payload=payload, response_object=None)
def Generate(self):
"""Executes the generate operation on the server.
Generate traffic for specific traffic items.
The IxNetwork modeling infrastructure allows for multiple method Signatures with the same name while python does not.
The following correlates the modeling Signatures to the python *args variable length list:
generate()
generate()
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('generate', payload=payload, response_object=None)
def ResolveAptixiaEndpoints(self):
"""Executes the resolveAptixiaEndpoints operation on the server.
Resolves /vport/protocolStack/. endpoints being used by a specific traffic item.
Returns:
str: This exec returns a string containing the resolved endpoints.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('resolveAptixiaEndpoints', payload=payload, response_object=None)
def StartDefaultLearning(self):
"""Executes the startDefaultLearning operation on the server.
Starts default learning for a list of traffic items.
The IxNetwork modeling infrastructure allows for multiple method Signatures with the same name while python does not.
The following correlates the modeling Signatures to the python *args variable length list:
startDefaultLearning()
startDefaultLearning()
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('startDefaultLearning', payload=payload, response_object=None)
def StartLearning(self, *args, **kwargs):
"""Executes the startLearning operation on the server.
Sends learning frames.
The IxNetwork modeling infrastructure allows for multiple method Signatures with the same name while python does not.
The following correlates the modeling Signatures to the python *args variable length list:
startLearning(Arg2:number, Arg3:number, Arg4:number)
Args:
args[0] is Arg2 (number): The framesize of the learning frame.
args[1] is Arg3 (number): The framecount of the learning frames.
args[2] is Arg4 (number): The frames per second of the learning frames.
startLearning(Arg2:number, Arg3:number, Arg4:number, Arg5:bool, Arg6:bool, Arg7:bool)
Args:
args[0] is Arg2 (number): The framesize of the learning frame.
args[1] is Arg3 (number): The framecount of the learning frames.
args[2] is Arg4 (number): The frames per second of the learning frames.
args[3] is Arg5 (bool): Send gratuitous ARP frames.
args[4] is Arg6 (bool): Send MAC frames.
args[5] is Arg7 (bool): Send Fast Path frames.
startLearning(Arg2:number, Arg3:number, Arg4:number, Arg5:bool, Arg6:bool, Arg7:bool, Arg8:bool)
Args:
args[0] is Arg2 (number): The framesize of the learning frame.
args[1] is Arg3 (number): The framecount of the learning frames.
args[2] is Arg4 (number): The frames per second of the learning frames.
args[3] is Arg5 (bool): Send gratuitous ARP frames.
args[4] is Arg6 (bool): Send MAC frames.
args[5] is Arg7 (bool): Send Fast Path frames.
args[6] is Arg8 (bool): Send full mesh.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('startLearning', payload=payload, response_object=None)
def StartStatelessTraffic(self):
"""Executes the startStatelessTraffic operation on the server.
Start the traffic configuration for stateless traffic items only.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('startStatelessTraffic', payload=payload, response_object=None)
def StartStatelessTrafficBlocking(self):
"""Executes the startStatelessTrafficBlocking operation on the server.
Start the traffic configuration for stateless traffic items only. This will block until traffic is fully started.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('startStatelessTrafficBlocking', payload=payload, response_object=None)
def StopStatelessTraffic(self):
"""Executes the stopStatelessTraffic operation on the server.
Stop the stateless traffic items.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('stopStatelessTraffic', payload=payload, response_object=None)
def StopStatelessTrafficBlocking(self):
"""Executes the stopStatelessTrafficBlocking operation on the server.
Stop the traffic configuration for stateless traffic items only. This will block until traffic is fully stopped.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('stopStatelessTrafficBlocking', payload=payload, response_object=None)
| [
"[email protected]"
] | |
94403f1b21bd73a763eaa34c5f40bef0076041d1 | f3b233e5053e28fa95c549017bd75a30456eb50c | /jnk1_input/24/24-36_MD_NVT_rerun/set_2.py | 3ad7ebd55a5bdc277eef71cc5e0ea648a32b10d0 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | import os
dir = '/mnt/scratch/songlin3/run/jnkl/L624/MD_NVT_rerun/ti_one-step/24_36/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_2.in'
temp_pbs = filesdir + 'temp_2.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_2.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_2.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
53d454a183cbff0e567046f5fd4165f5a6a8bd6d | 41d96fec63aff02b9eb0152002f45b1b867bfafe | /experiments/tools/noise.py | b725ec234fe89a9a7e61b4a4de3dd5803acf4c04 | [] | no_license | jon--lee/dart_dev | fef7b18e0c05ac64b3a1b1c42c1e0ac071504619 | 2539f1d918c07c15dea8d72865bb2faf1a49d22c | refs/heads/master | 2020-03-14T04:29:23.200345 | 2018-06-21T07:03:38 | 2018-06-21T07:03:38 | 131,442,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,239 | py | import numpy as np
import statistics
import IPython
def sample_covariance_lnr(env, lnr, sup, samples, T):
cov = np.zeros(env.action_space.shape[0])
for s in range(samples):
states, tmp_actions, _, _ = statistics.collect_traj(env, lnr, T)
sup_actions = np.array([sup.intended_action(s) for s in states])
lnr_actions = np.array(tmp_actions)
length = len(tmp_actions)
diff = sup_actions - lnr_actions
cov = cov + np.dot(diff.T, diff) / float(length)
return cov / float(samples)
def sample_covariance_sup(env, lnr, sup, samples, T):
cov = np.zeros(env.action_space.shape[0])
for s in range(samples):
states, tmp_actions, _, _ = statistics.collect_traj(env, sup, T)
sup_actions = np.array(tmp_actions)
lnr_actions = np.array([lnr.intended_action(s) for s in states])
length = len(tmp_actions)
diff = sup_actions - lnr_actions
cov = cov + np.dot(diff.T, diff) / float(length)
return cov / float(samples)
def sample_covariance_trajs(env, lnr, trajs, T):
d = env.action_space.shape[0]
cov = np.zeros((d, d))
for states, i_actions in trajs:
length = len(i_actions)
if not length == 0:
sup_actions = np.array([a for a in i_actions])
lnr_actions = np.array([lnr.intended_action(s) for s in states])
diff = sup_actions - lnr_actions
cov = cov + np.dot(diff.T, diff) / float(length)
print "Trajs: " + str(len(trajs))
return cov / float(len(trajs))
def sample_iso_cov_lnr(env, lnr, sup, samples, T):
d = env.action_space.shape[0]
cov = sample_covariance_lnr(env, lnr, sup, samples, T)
return np.trace(cov) / float(d) * np.identity(d)
def sample_iso_cov_sup(env, lnr, sup, samples, T):
d = env.action_space.shape[0]
cov = sample_covariance_sup(env, lnr, sup, samples, T)
return np.trace(cov) / float(d) * np.identity(d)
def sample_epsilon_lnr(env, lnr, sup, samples, T):
surr_loss = statistics.evaluate_agent_disc(env, lnr, sup, T, samples)
return surr_loss
def sample_epsilon_sup(env, lnr, sup, samples, T):
loss = statistics.evaluate_sup_disc(env, lnr, sup, T, samples)
return loss | [
"[email protected]"
] | |
17c41d0c9e81aa5d9e9f1da1181bc65e32417ecb | d2a0c5b7f9977720eb588a664b6ad40486691baa | /sports/tasks.py | 30dd137c0455fd2cbc90eb28af09f7dac89ebf4a | [] | no_license | cjredmond/final_project | 37df2ab88ff6e1aa06af12a196d5d7bc4cda71e0 | ae9bbabae8f611d88d12e045ffedf87a077b0580 | refs/heads/master | 2020-07-26T22:25:44.357141 | 2016-12-15T19:24:25 | 2016-12-15T19:24:25 | 73,711,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,221 | py | import time
from celery import Celery
import random
import datetime
import requests
from celery import shared_task
from sports.scraper import usable_data, fix_names, nba_scores
from sports.nhl_scraper import nhl_scores, nhl_usable_data
from sports.nfl_scraper import nfl_scores, nfl_usable_data
from sports.models import Score, Team, Squad, Clock
from django.utils import timezone
@shared_task
def cal():
items_nba = usable_data(fix_names(nba_scores()))
for dictionary in items_nba:
if dictionary['winner'] == 'LA Lakers':
winner = Team.objects.get(name='Lakers')
loser = Team.objects.get(city=dictionary['loser'], sport='k')
elif dictionary['loser'] == 'LA Lakers':
winner = Team.objects.get(city=dictionary['winner'], sport='k')
loser = Team.objects.get(name='Lakers')
else:
winner = Team.objects.get(city=dictionary['winner'], sport='k')
loser = Team.objects.get(city=dictionary['loser'], sport='k')
if Score.objects.filter(tag=dictionary['tag']):
x = Score.objects.filter(tag=dictionary['tag'])
prev_winner = Score.objects.get(tag=dictionary['tag'], team=winner)
y = list(prev_winner.active_squad.all())
prev_winner.delete()
Score.objects.create(team=winner, pts=dictionary['winner_pts'],tag=dictionary['tag'], time=timezone.now())
squads = list(Squad.objects.filter(roster__name=winner.name))
current = Score.objects.get(team=winner,tag=dictionary['tag'])
current.active_squad.add(*y)
current.save()
prev_loser = Score.objects.get(tag=dictionary['tag'], team=loser)
y = list(prev_loser.active_squad.all())
prev_loser.delete()
Score.objects.create(team=loser, pts=dictionary['loser_pts'],tag=dictionary['tag'], time=timezone.now())
current = Score.objects.get(team=loser,tag=dictionary['tag'])
current.active_squad.add(*y)
current.save()
else:
Score.objects.filter(tag=dictionary['tag']).delete()
Score.objects.create(team=winner, pts=dictionary['winner_pts'],tag=dictionary['tag'], time=timezone.now())
squads = list(Squad.objects.filter(roster__name=winner.name))
current = Score.objects.get(team=winner,tag=dictionary['tag'])
current.active_squad.add(*squads)
current.save()
Score.objects.create(team=loser, pts=dictionary['loser_pts'],tag=dictionary['tag'], time=timezone.now())
squads = list(Squad.objects.filter(roster__name=loser.name))
current = Score.objects.get(team=loser,tag=dictionary['tag'])
current.active_squad.add(*squads)
current.save()
########## HOCKEY ##########
items_nhl = nhl_usable_data(fix_names(nhl_scores()))
for dictionary in items_nhl:
winner = Team.objects.get(city=dictionary['winner'], sport='h')
loser = Team.objects.get(city=dictionary['loser'], sport='h')
Score.objects.filter(tag=dictionary['tag']).delete()
x = Score.objects.filter(tag=dictionary['tag'])
if x:
prev_winner = Score.objects.get(tag=dictionary['tag'], team=winner)
y = list(prev_winner.active_squad.all())
prev_winner.delete()
Score.objects.create(team=winner, pts=dictionary['winner_pts'],tag=dictionary['tag'], time=timezone.now())
squads = list(Squad.objects.filter(roster__name=winner.name))
current = Score.objects.get(team=winner,tag=dictionary['tag'])
current.active_squad.add(*y)
current.save()
prev_loser = Score.objects.get(tag=dictionary['tag'], team=loser)
y = list(prev_loser.active_squad.all())
prev_loser.delete()
Score.objects.create(team=loser, pts=dictionary['loser_pts'],tag=dictionary['tag'], time=timezone.now())
current = Score.objects.get(team=loser,tag=dictionary['tag'])
current.active_squad.add(*y)
current.save()
else:
Score.objects.filter(tag=dictionary['tag']).delete()
Score.objects.create(team=winner, pts=dictionary['winner_pts'],tag=dictionary['tag'], time=timezone.now())
squads = list(Squad.objects.filter(roster__name=winner.name))
current = Score.objects.get(team=winner,tag=dictionary['tag'])
current.active_squad.add(*squads)
current.save()
Score.objects.create(team=loser, pts=dictionary['loser_pts'],tag=dictionary['tag'], time=timezone.now())
squads = list(Squad.objects.filter(roster__name=loser.name))
current = Score.objects.get(team=loser,tag=dictionary['tag'])
current.active_squad.add(*squads)
current.save()
### FOOTBALL ######
# items_nfl = nfl_usable_data(fix_names(nfl_scores()))
# for dictionary in items_nfl:
# winner = Team.objects.get(city=dictionary['winner'], sport='f')
# loser = Team.objects.get(city=dictionary['loser'], sport='f')
# Score.objects.filter(tag=dictionary['tag']).delete()
#
# x = Score.objects.filter(tag=dictionary['tag'])
# if x:
# prev_winner = Score.objects.get(tag=dictionary['tag'], team=winner)
# y = list(prev_winner.active_squad.all())
# prev_winner.delete()
# Score.objects.create(team=winner, pts=dictionary['winner_pts'],tag=dictionary['tag'], time=timezone.now())
# squads = list(Squad.objects.filter(roster__name=winner.name))
# current = Score.objects.get(team=winner,tag=dictionary['tag'])
# current.active_squad.add(*y)
# current.save()
#
# prev_loser = Score.objects.get(tag=dictionary['tag'], team=loser)
# y = list(prev_loser.active_squad.all())
# prev_loser.delete()
# Score.objects.create(team=loser, pts=dictionary['loser_pts'],tag=dictionary['tag'], time=timezone.now())
# current = Score.objects.get(team=loser,tag=dictionary['tag'])
# current.active_squad.add(*y)
# current.save()
#
# else:
# Score.objects.filter(tag=dictionary['tag']).delete()
# Score.objects.create(team=winner, pts=dictionary['winner_pts'],tag=dictionary['tag'], time=timezone.now())
# squads = list(Squad.objects.filter(roster__name=winner.name))
# current = Score.objects.get(team=winner,tag=dictionary['tag'])
# current.active_squad.add(*squads)
# current.save()
#
# Score.objects.create(team=loser, pts=dictionary['loser_pts'],tag=dictionary['tag'], time=timezone.now())
# squads = list(Squad.objects.filter(roster__name=loser.name))
# current = Score.objects.get(team=loser,tag=dictionary['tag'])
# current.active_squad.add(*squads)
# current.save()
# @shared_task
# def timer():
# clock = Clock.objects.get(id=1)
# if clock.time == 0:
# pass
# else:
# clock.time = clock.time - 1
# clock.save()
| [
"[email protected]"
] | |
fa474f19b5c1d31fb875338677ede2815df5a871 | ae9bd6f9a8fc4133a03db60910c25617b65732f1 | /pyro_models/bcm/bernoulli.py | 9f6e2ccf839bc0c52db09b353c0c27383d26dbc1 | [
"MIT"
] | permissive | afcarl/pyro-models | 0af1a239af81436b1980752c132f288ea59bcbc3 | 1fc3f4b27fe946a99b2ddd6b57cca16f8c7da181 | refs/heads/master | 2022-01-27T13:15:33.641771 | 2019-07-09T18:55:39 | 2019-07-09T18:55:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 750 | py | # model file: example-models/basic_estimators/bernoulli.stan
import torch
import pyro
import pyro.distributions as dist
def init_vector(name, dims=None):
return pyro.sample(name, dist.Normal(torch.zeros(dims), 0.2 * torch.ones(dims)).to_event(1))
def validate_data_def(data):
assert 'N' in data, 'variable not found in data: key=N'
assert 'y' in data, 'variable not found in data: key=y'
# initialize data
N = data["N"]
y = data["y"]
def init_params(data):
params = {}
return params
def model(data, params):
# initialize data
N = data["N"]
y = data["y"]
with pyro.plate("data", N):
theta = pyro.sample("theta", dist.Beta(1., 1.))
pyro.sample('obs', dist.Bernoulli(theta), obs=y)
| [
"[email protected]"
] | |
d5b69845bfc9fdf1ab9d7e507f7523442b3dc405 | def27d5864764b877b6786835ec97f2bd74c6ba8 | /medium/RotateImage.py | 4c0c646667bb3773d3d30ad5ef12082eb7604994 | [] | no_license | bolan2014/leetcode | f6cf38a49a9250abeb36543ea2498062c58e811d | 1c35fde3a65c4f216218f459736d4c39a29980d5 | refs/heads/master | 2021-04-09T16:59:41.494568 | 2017-05-10T03:47:14 | 2017-05-10T03:47:14 | 46,648,353 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | class Solution(object):
def rotate(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: void Do not return anything, modify matrix in-place instead.
"""
n = len(matrix)
for i in range(n):
for j in range(i, n):
matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j]
for i in range(n):
for j in range(n / 2):
matrix[i][j], matrix[i][n - 1 - j] = matrix[i][n - 1 - j], matrix[i][j]
| [
"[email protected]"
] | |
74a9115ea8165c1ac45388b3eadab8e4a69c0bcc | ce2617bfb141c931dba3e1a1a5cc2331cece1fba | /notejam/python2.7/lib/python2.7/site-packages/ansible/modules/cloud/amazon/cloudtrail.py | 6e9b0c170e183526f0154a599df949e3adf2726f | [] | no_license | fercunha/notejam-django | 11e6f343a6e886d7e491ef6f1320d2bacaa625cb | 6e51763b530d7b4c5a0cfc9abb47caa797baaadc | refs/heads/master | 2020-05-04T19:54:58.561206 | 2019-04-04T03:19:09 | 2019-04-04T03:19:09 | 178,685,844 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,723 | py | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudtrail
short_description: manage CloudTrail create, delete, update
description:
- Creates, deletes, or updates CloudTrail configuration. Ensures logging is also enabled.
version_added: "2.0"
author:
- Ansible Core Team
- Ted Timmons (@tedder)
- Daniel Shepherd (@shepdelacreme)
requirements:
- boto3
- botocore
options:
state:
description:
- Add or remove CloudTrail configuration.
- The following states have been preserved for backwards compatibility. C(state=enabled) and C(state=disabled).
- enabled=present and disabled=absent.
required: true
choices: ['present', 'absent', 'enabled', 'disabled']
name:
description:
- Name for the CloudTrail.
- Names are unique per-region unless the CloudTrail is a multi-region trail, in which case it is unique per-account.
required: true
enable_logging:
description:
- Start or stop the CloudTrail logging. If stopped the trail will be paused and will not record events or deliver log files.
default: true
version_added: "2.4"
s3_bucket_name:
description:
- An existing S3 bucket where CloudTrail will deliver log files.
- This bucket should exist and have the proper policy.
- See U(http://docs.aws.amazon.com/awscloudtrail/latest/userguide/aggregating_logs_regions_bucket_policy.html)
- Required when C(state=present)
version_added: "2.4"
s3_key_prefix:
description:
- S3 Key prefix for delivered log files. A trailing slash is not necessary and will be removed.
is_multi_region_trail:
description:
- Specify whether the trail belongs only to one region or exists in all regions.
default: false
version_added: "2.4"
enable_log_file_validation:
description:
- Specifies whether log file integrity validation is enabled.
- CloudTrail will create a hash for every log file delivered and produce a signed digest file that can be used to ensure log files have not been tampered.
version_added: "2.4"
aliases: [ "log_file_validation_enabled" ]
include_global_events:
description:
- Record API calls from global services such as IAM and STS.
default: true
aliases: [ "include_global_service_events" ]
sns_topic_name:
description:
- SNS Topic name to send notifications to when a log file is delivered
version_added: "2.4"
cloudwatch_logs_role_arn:
description:
- Specifies a full ARN for an IAM role that assigns the proper permissions for CloudTrail to create and write to the log group.
- See U(https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html)
- Required when C(cloudwatch_logs_log_group_arn)
version_added: "2.4"
cloudwatch_logs_log_group_arn:
description:
- A full ARN specifying a valid CloudWatch log group to which CloudTrail logs will be delivered. The log group should already exist.
- See U(https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html)
- Required when C(cloudwatch_logs_role_arn)
version_added: "2.4"
kms_key_id:
description:
- Specifies the KMS key ID to use to encrypt the logs delivered by CloudTrail. This also has the effect of enabling log file encryption.
- The value can be an alias name prefixed by "alias/", a fully specified ARN to an alias, a fully specified ARN to a key, or a globally unique identifier.
- See U(https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html)
version_added: "2.4"
tags:
description:
- A hash/dictionary of tags to be applied to the CloudTrail resource.
- Remove completely or specify an empty dictionary to remove all tags.
default: {}
version_added: "2.4"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: create single region cloudtrail
cloudtrail:
state: present
name: default
s3_bucket_name: mylogbucket
s3_key_prefix: cloudtrail
region: us-west-1
- name: create multi-region trail with validation and tags
cloudtrail:
state: present
name: default
s3_bucket_name: mylogbucket
region: us-west-1
is_multi_region_trail: true
enable_log_file_validation: true
cloudwatch_logs_role_arn: "arn:aws:iam::123456789012:role/CloudTrail_CloudWatchLogs_Role"
cloudwatch_logs_log_group_arn: "arn:aws:logs:us-west-1:123456789012:log-group:CloudTrail/DefaultLogGroup:*"
kms_key_id: "alias/MyAliasName"
tags:
environment: dev
Name: default
- name: show another valid kms_key_id
cloudtrail:
state: present
name: default
s3_bucket_name: mylogbucket
kms_key_id: "arn:aws:kms:us-west-1:123456789012:key/12345678-1234-1234-1234-123456789012"
# simply "12345678-1234-1234-1234-123456789012" would be valid too.
- name: pause logging the trail we just created
cloudtrail:
state: present
name: default
enable_logging: false
s3_bucket_name: mylogbucket
region: us-west-1
is_multi_region_trail: true
enable_log_file_validation: true
tags:
environment: dev
Name: default
- name: delete a trail
cloudtrail:
state: absent
name: default
'''
RETURN = '''
exists:
description: whether the resource exists
returned: always
type: bool
sample: true
trail:
description: CloudTrail resource details
returned: always
type: complex
sample: hash/dictionary of values
contains:
trail_arn:
description: Full ARN of the CloudTrail resource
returned: success
type: string
sample: arn:aws:cloudtrail:us-west-1:123456789012:trail/default
name:
description: Name of the CloudTrail resource
returned: success
type: string
sample: default
is_logging:
description: Whether logging is turned on or paused for the Trail
returned: success
type: bool
sample: True
s3_bucket_name:
description: S3 bucket name where log files are delivered
returned: success
type: string
sample: myBucket
s3_key_prefix:
description: Key prefix in bucket where log files are delivered (if any)
returned: success when present
type: string
sample: myKeyPrefix
log_file_validation_enabled:
description: Whether log file validation is enabled on the trail
returned: success
type: bool
sample: true
include_global_service_events:
description: Whether global services (IAM, STS) are logged with this trail
returned: success
type: bool
sample: true
is_multi_region_trail:
description: Whether the trail applies to all regions or just one
returned: success
type: bool
sample: true
has_custom_event_selectors:
description: Whether any custom event selectors are used for this trail.
returned: success
type: bool
sample: False
home_region:
description: The home region where the trail was originally created and must be edited.
returned: success
type: string
sample: us-west-1
sns_topic_name:
description: The SNS topic name where log delivery notifications are sent.
returned: success when present
type: string
sample: myTopic
sns_topic_arn:
description: Full ARN of the SNS topic where log delivery notifications are sent.
returned: success when present
type: string
sample: arn:aws:sns:us-west-1:123456789012:topic/myTopic
cloud_watch_logs_log_group_arn:
description: Full ARN of the CloudWatch Logs log group where events are delivered.
returned: success when present
type: string
sample: arn:aws:logs:us-west-1:123456789012:log-group:CloudTrail/DefaultLogGroup:*
cloud_watch_logs_role_arn:
description: Full ARN of the IAM role that CloudTrail assumes to deliver events.
returned: success when present
type: string
sample: arn:aws:iam::123456789012:role/CloudTrail_CloudWatchLogs_Role
kms_key_id:
description: Full ARN of the KMS Key used to encrypt log files.
returned: success when present
type: string
sample: arn:aws:kms::123456789012:key/12345678-1234-1234-1234-123456789012
tags:
description: hash/dictionary of tags applied to this resource
returned: success
type: dict
sample: {'environment': 'dev', 'Name': 'default'}
'''
import traceback
try:
from botocore.exceptions import ClientError
except ImportError:
# Handled in main() by imported HAS_BOTO3
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (boto3_conn, ec2_argument_spec, get_aws_connection_info,
HAS_BOTO3, ansible_dict_to_boto3_tag_list,
boto3_tag_list_to_ansible_dict, camel_dict_to_snake_dict)
def create_trail(module, client, ct_params):
"""
Creates a CloudTrail
module : AnsibleModule object
client : boto3 client connection object
ct_params : The parameters for the Trail to create
"""
resp = {}
try:
resp = client.create_trail(**ct_params)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
return resp
def tag_trail(module, client, tags, trail_arn, curr_tags=None, dry_run=False):
"""
Creates, updates, removes tags on a CloudTrail resource
module : AnsibleModule object
client : boto3 client connection object
tags : Dict of tags converted from ansible_dict to boto3 list of dicts
trail_arn : The ARN of the CloudTrail to operate on
curr_tags : Dict of the current tags on resource, if any
dry_run : true/false to determine if changes will be made if needed
"""
adds = []
removes = []
updates = []
changed = False
if curr_tags is None:
# No current tags so just convert all to a tag list
adds = ansible_dict_to_boto3_tag_list(tags)
else:
curr_keys = set(curr_tags.keys())
new_keys = set(tags.keys())
add_keys = new_keys - curr_keys
remove_keys = curr_keys - new_keys
update_keys = dict()
for k in curr_keys.intersection(new_keys):
if curr_tags[k] != tags[k]:
update_keys.update({k: tags[k]})
adds = get_tag_list(add_keys, tags)
removes = get_tag_list(remove_keys, curr_tags)
updates = get_tag_list(update_keys, tags)
if removes or updates:
changed = True
if not dry_run:
try:
client.remove_tags(ResourceId=trail_arn, TagsList=removes + updates)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
if updates or adds:
changed = True
if not dry_run:
try:
client.add_tags(ResourceId=trail_arn, TagsList=updates + adds)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
return changed
def get_tag_list(keys, tags):
"""
Returns a list of dicts with tags to act on
keys : set of keys to get the values for
tags : the dict of tags to turn into a list
"""
tag_list = []
for k in keys:
tag_list.append({'Key': k, 'Value': tags[k]})
return tag_list
def set_logging(module, client, name, action):
"""
Starts or stops logging based on given state
module : AnsibleModule object
client : boto3 client connection object
name : The name or ARN of the CloudTrail to operate on
action : start or stop
"""
if action == 'start':
try:
client.start_logging(Name=name)
return client.get_trail_status(Name=name)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
elif action == 'stop':
try:
client.stop_logging(Name=name)
return client.get_trail_status(Name=name)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
else:
module.fail_json(msg="Unsupported logging action")
def get_trail_facts(module, client, name):
"""
Describes existing trail in an account
module : AnsibleModule object
client : boto3 client connection object
name : Name of the trail
"""
# get Trail info
try:
trail_resp = client.describe_trails(trailNameList=[name])
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
# Now check to see if our trail exists and get status and tags
if len(trail_resp['trailList']):
trail = trail_resp['trailList'][0]
try:
status_resp = client.get_trail_status(Name=trail['Name'])
tags_list = client.list_tags(ResourceIdList=[trail['TrailARN']])
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
trail['IsLogging'] = status_resp['IsLogging']
trail['tags'] = boto3_tag_list_to_ansible_dict(tags_list['ResourceTagList'][0]['TagsList'])
# Check for non-existent values and populate with None
optional_vals = set(['S3KeyPrefix', 'SnsTopicName', 'SnsTopicARN', 'CloudWatchLogsLogGroupArn', 'CloudWatchLogsRoleArn', 'KmsKeyId'])
for v in optional_vals - set(trail.keys()):
trail[v] = None
return trail
else:
# trail doesn't exist return None
return None
def delete_trail(module, client, trail_arn):
"""
Delete a CloudTrail
module : AnsibleModule object
client : boto3 client connection object
trail_arn : Full CloudTrail ARN
"""
try:
client.delete_trail(Name=trail_arn)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
def update_trail(module, client, ct_params):
"""
Delete a CloudTrail
module : AnsibleModule object
client : boto3 client connection object
ct_params : The parameters for the Trail to update
"""
try:
client.update_trail(**ct_params)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'absent', 'enabled', 'disabled']),
name=dict(default='default'),
enable_logging=dict(default=True, type='bool'),
s3_bucket_name=dict(),
s3_key_prefix=dict(),
sns_topic_name=dict(),
is_multi_region_trail=dict(default=False, type='bool'),
enable_log_file_validation=dict(type='bool', aliases=['log_file_validation_enabled']),
include_global_events=dict(default=True, type='bool', aliases=['include_global_service_events']),
cloudwatch_logs_role_arn=dict(),
cloudwatch_logs_log_group_arn=dict(),
kms_key_id=dict(),
tags=dict(default={}, type='dict'),
))
required_if = [('state', 'present', ['s3_bucket_name']), ('state', 'enabled', ['s3_bucket_name'])]
required_together = [('cloudwatch_logs_role_arn', 'cloudwatch_logs_log_group_arn')]
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_together=required_together, required_if=required_if)
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for this module')
# collect parameters
if module.params['state'] in ('present', 'enabled'):
state = 'present'
elif module.params['state'] in ('absent', 'disabled'):
state = 'absent'
tags = module.params['tags']
enable_logging = module.params['enable_logging']
ct_params = dict(
Name=module.params['name'],
S3BucketName=module.params['s3_bucket_name'],
IncludeGlobalServiceEvents=module.params['include_global_events'],
IsMultiRegionTrail=module.params['is_multi_region_trail'],
)
if module.params['s3_key_prefix']:
ct_params['S3KeyPrefix'] = module.params['s3_key_prefix'].rstrip('/')
if module.params['sns_topic_name']:
ct_params['SnsTopicName'] = module.params['sns_topic_name']
if module.params['cloudwatch_logs_role_arn']:
ct_params['CloudWatchLogsRoleArn'] = module.params['cloudwatch_logs_role_arn']
if module.params['cloudwatch_logs_log_group_arn']:
ct_params['CloudWatchLogsLogGroupArn'] = module.params['cloudwatch_logs_log_group_arn']
if module.params['enable_log_file_validation'] is not None:
ct_params['EnableLogFileValidation'] = module.params['enable_log_file_validation']
if module.params['kms_key_id']:
ct_params['KmsKeyId'] = module.params['kms_key_id']
try:
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
client = boto3_conn(module, conn_type='client', resource='cloudtrail', region=region, endpoint=ec2_url, **aws_connect_params)
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
results = dict(
changed=False,
exists=False
)
# Get existing trail facts
trail = get_trail_facts(module, client, ct_params['Name'])
# If the trail exists set the result exists variable
if trail is not None:
results['exists'] = True
if state == 'absent' and results['exists']:
# If Trail exists go ahead and delete
results['changed'] = True
results['exists'] = False
results['trail'] = dict()
if not module.check_mode:
delete_trail(module, client, trail['TrailARN'])
elif state == 'present' and results['exists']:
# If Trail exists see if we need to update it
do_update = False
for key in ct_params:
tkey = str(key)
# boto3 has inconsistent parameter naming so we handle it here
if key == 'EnableLogFileValidation':
tkey = 'LogFileValidationEnabled'
# We need to make an empty string equal None
if ct_params.get(key) == '':
val = None
else:
val = ct_params.get(key)
if val != trail.get(tkey):
do_update = True
results['changed'] = True
# If we are in check mode copy the changed values to the trail facts in result output to show what would change.
if module.check_mode:
trail.update({tkey: ct_params.get(key)})
if not module.check_mode and do_update:
update_trail(module, client, ct_params)
trail = get_trail_facts(module, client, ct_params['Name'])
# Check if we need to start/stop logging
if enable_logging and not trail['IsLogging']:
results['changed'] = True
trail['IsLogging'] = True
if not module.check_mode:
set_logging(module, client, name=ct_params['Name'], action='start')
if not enable_logging and trail['IsLogging']:
results['changed'] = True
trail['IsLogging'] = False
if not module.check_mode:
set_logging(module, client, name=ct_params['Name'], action='stop')
# Check if we need to update tags on resource
tag_dry_run = False
if module.check_mode:
tag_dry_run = True
tags_changed = tag_trail(module, client, tags=tags, trail_arn=trail['TrailARN'], curr_tags=trail['tags'], dry_run=tag_dry_run)
if tags_changed:
results['changed'] = True
trail['tags'] = tags
# Populate trail facts in output
results['trail'] = camel_dict_to_snake_dict(trail)
elif state == 'present' and not results['exists']:
# Trail doesn't exist just go create it
results['changed'] = True
if not module.check_mode:
# If we aren't in check_mode then actually create it
created_trail = create_trail(module, client, ct_params)
# Apply tags
tag_trail(module, client, tags=tags, trail_arn=created_trail['TrailARN'])
# Get the trail status
try:
status_resp = client.get_trail_status(Name=created_trail['Name'])
except ClientError as err:
module.fail_json(msg=err.message, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
# Set the logging state for the trail to desired value
if enable_logging and not status_resp['IsLogging']:
set_logging(module, client, name=ct_params['Name'], action='start')
if not enable_logging and status_resp['IsLogging']:
set_logging(module, client, name=ct_params['Name'], action='stop')
# Get facts for newly created Trail
trail = get_trail_facts(module, client, ct_params['Name'])
# If we are in check mode create a fake return structure for the newly minted trail
if module.check_mode:
acct_id = '123456789012'
try:
sts_client = boto3_conn(module, conn_type='client', resource='sts', region=region, endpoint=ec2_url, **aws_connect_params)
acct_id = sts_client.get_caller_identity()['Account']
except ClientError:
pass
trail = dict()
trail.update(ct_params)
if 'EnableLogFileValidation' not in ct_params:
ct_params['EnableLogFileValidation'] = False
trail['EnableLogFileValidation'] = ct_params['EnableLogFileValidation']
trail.pop('EnableLogFileValidation')
fake_arn = 'arn:aws:cloudtrail:' + region + ':' + acct_id + ':trail/' + ct_params['Name']
trail['HasCustomEventSelectors'] = False
trail['HomeRegion'] = region
trail['TrailARN'] = fake_arn
trail['IsLogging'] = enable_logging
trail['tags'] = tags
# Populate trail facts in output
results['trail'] = camel_dict_to_snake_dict(trail)
module.exit_json(**results)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
cccb144b3b818fd1c34f78878597e6c7566e5ae1 | 18239524612cf572bfeaa3e001a3f5d1b872690c | /clients/keto/python/ory_keto_client/models/list_ory_access_control_policy_roles.py | 012f3b490b2695312354cb16433a47b77482f36d | [
"Apache-2.0"
] | permissive | simoneromano96/sdk | 2d7af9425dabc30df830a09b26841fb2e8781bf8 | a6113d0daefbbb803790297e4b242d4c7cbbcb22 | refs/heads/master | 2023-05-09T13:50:45.485951 | 2021-05-28T12:18:27 | 2021-05-28T12:18:27 | 371,689,133 | 0 | 0 | Apache-2.0 | 2021-05-28T12:11:41 | 2021-05-28T12:11:40 | null | UTF-8 | Python | false | false | 5,559 | py | # coding: utf-8
"""
ORY Keto
A cloud native access control server providing best-practice patterns (RBAC, ABAC, ACL, AWS IAM Policies, Kubernetes Roles, ...) via REST APIs. # noqa: E501
The version of the OpenAPI document: v0.0.0-alpha.37
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from ory_keto_client.configuration import Configuration
class ListOryAccessControlPolicyRoles(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'flavor': 'str',
'limit': 'int',
'offset': 'int'
}
attribute_map = {
'flavor': 'flavor',
'limit': 'limit',
'offset': 'offset'
}
def __init__(self, flavor=None, limit=None, offset=None, local_vars_configuration=None): # noqa: E501
"""ListOryAccessControlPolicyRoles - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._flavor = None
self._limit = None
self._offset = None
self.discriminator = None
self.flavor = flavor
if limit is not None:
self.limit = limit
if offset is not None:
self.offset = offset
@property
def flavor(self):
"""Gets the flavor of this ListOryAccessControlPolicyRoles. # noqa: E501
The ORY Access Control Policy flavor. Can be \"regex\", \"glob\", and \"exact\" in: path # noqa: E501
:return: The flavor of this ListOryAccessControlPolicyRoles. # noqa: E501
:rtype: str
"""
return self._flavor
@flavor.setter
def flavor(self, flavor):
"""Sets the flavor of this ListOryAccessControlPolicyRoles.
The ORY Access Control Policy flavor. Can be \"regex\", \"glob\", and \"exact\" in: path # noqa: E501
:param flavor: The flavor of this ListOryAccessControlPolicyRoles. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and flavor is None: # noqa: E501
raise ValueError("Invalid value for `flavor`, must not be `None`") # noqa: E501
self._flavor = flavor
@property
def limit(self):
"""Gets the limit of this ListOryAccessControlPolicyRoles. # noqa: E501
The maximum amount of policies returned. in: query # noqa: E501
:return: The limit of this ListOryAccessControlPolicyRoles. # noqa: E501
:rtype: int
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this ListOryAccessControlPolicyRoles.
The maximum amount of policies returned. in: query # noqa: E501
:param limit: The limit of this ListOryAccessControlPolicyRoles. # noqa: E501
:type: int
"""
self._limit = limit
@property
def offset(self):
"""Gets the offset of this ListOryAccessControlPolicyRoles. # noqa: E501
The offset from where to start looking. in: query # noqa: E501
:return: The offset of this ListOryAccessControlPolicyRoles. # noqa: E501
:rtype: int
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this ListOryAccessControlPolicyRoles.
The offset from where to start looking. in: query # noqa: E501
:param offset: The offset of this ListOryAccessControlPolicyRoles. # noqa: E501
:type: int
"""
self._offset = offset
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListOryAccessControlPolicyRoles):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ListOryAccessControlPolicyRoles):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
d99415ddf223c396bd34c3886c0908198511a6c6 | de6fb3a55196b6bd36a4fda0e08ad658679fb7a1 | /modules/resource/orchestrator/src/delegate/geni/v3/utils/ro.py | 5e877ef9e520129ca30707d53a4bf5b7ffbcc9fc | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | dana-i2cat/felix | 4a87af639e4c7db686bfa03f1ae4ce62711615e3 | 059ed2b3308bda2af5e1942dc9967e6573dd6a53 | refs/heads/master | 2021-01-02T23:12:43.840754 | 2016-02-04T10:04:24 | 2016-02-04T10:04:24 | 17,132,912 | 4 | 4 | null | null | null | null | UTF-8 | Python | false | false | 6,153 | py | from db.db_manager import db_sync_manager
from delegate.geni.v3.rm_adaptor import AdaptorFactory
from rspecs.ro.manifest_parser import ROManifestParser
from commons import CommonUtils
from tn import TNUtils
from vl import VLUtils
import core
logger = core.log.getLogger("ro-utils")
class ROUtils(CommonUtils):
def __init__(self):
super(ROUtils, self).__init__()
def manage_describe(self, peer, urns, creds):
try:
adaptor, uri = AdaptorFactory.create_from_db(peer)
logger.debug("Adaptor=%s, uri=%s" % (adaptor, uri))
m, urn, ss = adaptor.describe(urns, creds[0]["geni_value"])
ret = self.generate_internal_return(m)
return (ret, urn, ss)
except Exception as e:
logger.critical("manage_describe exception: %s", e)
raise e
def manage_provision(self, peer, urns, creds, beffort, etime, gusers):
try:
adaptor, uri = AdaptorFactory.create_from_db(peer)
logger.debug("Adaptor=%s, uri=%s" % (adaptor, uri))
m, urn = adaptor.provision(
urns, creds[0]["geni_value"], beffort, etime, gusers)
ret = self.generate_internal_return(m)
return (ret, urn)
except Exception as e:
# It is possible that RO does not implement this method!
if beffort:
logger.error("manage_provision exception: %s", e)
return (ret, [])
else:
logger.critical("manage_provision exception: %s", e)
raise e
def generate_internal_return(self, m):
ret = {"com_nodes": [], "sdn_slivers": [],
"tn_nodes": [], "tn_links": [],
"se_nodes": [], "se_links": []}
manifest = ROManifestParser(from_string=m)
logger.debug("ROManifestParser=%s" % (manifest,))
self.validate_rspec(manifest.get_rspec())
ret["com_nodes"] = manifest.com_nodes()
logger.info("COMNodes(%d)=%s" %
(len(ret["com_nodes"]), ret["com_nodes"],))
ret["sdn_slivers"] = manifest.sdn_slivers()
logger.info("SDNSlivers(%d)=%s" %
(len(ret["sdn_slivers"]), ret["sdn_slivers"],))
ret["tn_nodes"] = manifest.tn_nodes()
logger.info("TNNodes(%d)=%s" %
(len(ret["tn_nodes"]), ret["tn_nodes"],))
ret["tn_links"] = manifest.tn_links()
logger.info("TNLinks(%d)=%s" %
(len(ret["tn_links"]), ret["tn_links"],))
ret["se_nodes"] = manifest.se_nodes()
logger.info("SENodes(%d)=%s" %
(len(ret["se_nodes"]), ret["se_nodes"],))
ret["se_links"] = manifest.se_links()
logger.info("SELinks(%d)=%s" %
(len(ret["se_links"]), ret["se_links"],))
return ret
@staticmethod
def generate_list_resources(rspec, geni_available=False, show_interdomain=False, inner_call=True):
for n in db_sync_manager.get_com_nodes():
logger.debug("COM resources node=%s" % (n,))
rspec.com_node(n, inner_call)
for d in db_sync_manager.get_sdn_datapaths():
logger.debug("OF resources dpid=%s" % (d,))
rspec.datapath(d, inner_call)
for l in db_sync_manager.get_com_links():
logger.debug("COM resources link=%s" % (l,))
rspec.com_link(l, inner_call)
(of_links, fed_links) = db_sync_manager.get_sdn_links()
for l in of_links:
logger.debug("OF resources of-link=%s" % (l,))
rspec.of_link(l, inner_call)
for l in fed_links:
logger.debug("OF resources fed-link=%s" % (l,))
rspec.fed_link(l, inner_call)
# Internal use (M/RO) -- OR show inter-domain resources, through config flag
if inner_call or show_interdomain:
ROUtils.generate_list_resources_internal(rspec, inner_call)
# External use (experimenter) -- OR show inter-domain resources, through config flag
if geni_available or show_interdomain:
ROUtils.generate_list_resources_external(rspec, inner_call)
return rspec
@staticmethod
def generate_list_resources_internal(rspec, inner_call=True):
"""
Appends TN and SE (internal) information when any of the following:
* It is an internal call (MRO->RO, MRO->MRO)
* Configuration flag "interdomain_available_to_user" is set to True
"""
for n in db_sync_manager.get_tn_nodes():
logger.debug("TN resources node=%s" % (n,))
rspec.tn_node(n, inner_call)
for n in db_sync_manager.get_se_nodes():
logger.debug("SE resources node=%s" % (n,))
rspec.se_node(n, inner_call)
for l in db_sync_manager.get_tn_links():
logger.debug("TN resources tn-link=%s" % (l,))
rspec.tn_link(l, inner_call)
for l in db_sync_manager.get_se_links():
logger.debug("SE resources se-link=%s" % (l,))
rspec.se_link(l, inner_call)
@staticmethod
def generate_list_resources_external(rspec, inner_call=True):
"""
Appends VL (external) information when any of the following:
* GENI flag "geni_available" is set to True
* Configuration flag "interdomain_available_to_user" is set to True
"""
for l in VLUtils.find_vlinks_from_tn_stps(TNUtils()):
logger.debug("VL resources vl-link=%s" % (l,))
rspec.vl_link(l, inner_call)
@staticmethod
def generate_describe_manifest(ro_manifest, ro_m_info):
for n in ro_m_info.get("com_nodes"):
ro_manifest.com_node(n)
for s in ro_m_info.get("sdn_slivers"):
ro_manifest.of_sliver(s)
for n in ro_m_info.get("tn_nodes"):
ro_manifest.tn_node(n)
for l in ro_m_info.get("tn_links"):
ro_manifest.tn_link(l)
for n in ro_m_info.get("se_nodes"):
ro_manifest.se_node(n)
for l in ro_m_info.get("se_links"):
ro_manifest.se_link(l)
return ro_manifest
| [
"[email protected]"
] | |
9746c655fff4a9fe413aa78411dd64741596373b | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj_233734.00+434646.2/sdB_SDSSJ_233734.00+434646.2_lc.py | db14c23c567efeaa0cbc6954ff394fa00c9bcd68 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[354.391667,43.7795], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_SDSSJ_233734.00+434646.2 /sdB_SDSSJ_233734.00+434646.2_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
98fcf5fcfca6641dd017597b7d01b3e14688866e | f99f30752e9bb9e023b37c731f64fb2155ac3daf | /05/for.py | 7557743e98836a3021a0668b8696a0f0828f700a | [] | no_license | chu83/python-basics | 148ff6977f5ca04775951d90ed1f5f763c51a9ff | 19fe0937842c668f604876be0aeb0962a2630dd2 | refs/heads/master | 2023-01-19T01:29:25.203738 | 2020-11-29T18:34:33 | 2020-11-29T18:34:33 | 311,258,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,331 | py | #for loop
print('========== for loop 기본 ===========')
a=['cat', 'cow', 'tiger']
for animal in a:
print(animal, end=' ')
else:
print('')
print('========== 복합 자료형의 for loop ===========')
l1 = [('둘리', 10), ('마이콜', 20), ('또치', 10)]
for t in l1:
#print(f'이름 : {t[0]}, 나이 : {t[1]}')
print('이름 : %s, 나이 : %d' %t)
print('========== 1~10 합 ===========')
sum = 0
for i in range(1, 11):
sum = sum+i
print(sum)
#break
for i in range(10):
if i>5:
break
print(i, end=' ')
else:
print('--end loop\n')
print('========== continue ===========')
for i in range(10):
if i <= 5 :
continue
print(i, end=' ')
else:
print('--end loop\n')
print('========== 삼각형 ===========')
for i in range(0, 5):
for j in range(0,i+1):
print('*', end='')
print("")
print('========== 역삼각형 ===========')
for i in range(10, 1, -1):
if i <= 10:
star = '*' * i
print(star)
print('========== 정삼각형 ===========')
for i in range(1,10):
if i <= 10:
star = '*' * i
vacant = ' '* (10-i)
print(star+vacant)
print('========== 구구단 ===========')
for i in range(1,10):
for j in range(1, 10):
print(f'{j} * {i} = {i*j} ', end='\t')
print(end='\n')
| [
"[email protected]"
] | |
e031a15e54ff318ebc704179b54cb081cd8a680f | 7ad19e854135977ee5b789d7c9bdd39d67ec9ea4 | /mapping_models/setup.py | b83d962eff3cd62e6d0205c3795371958ee84111 | [
"MIT"
] | permissive | Leofltt/rg_sound_generation | 1b4d522507bf06247247f3ef929c8d0b93015e61 | 8e79b4d9dce028def43284f80521a2ec61d0066c | refs/heads/main | 2023-05-02T19:53:23.645982 | 2021-05-22T16:09:54 | 2021-05-22T16:09:54 | 369,842,561 | 0 | 0 | MIT | 2021-05-22T15:27:28 | 2021-05-22T15:27:27 | null | UTF-8 | Python | false | false | 780 | py | import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="mapping_models",
version="0.0.1",
author="Sound Generation OSR",
author_email="[email protected]",
description="mapping models module",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/TheSoundOfAIOSR/rg_sound_generation/mapping_models",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
packages=setuptools.find_packages(),
python_requires=">=3.7",
install_requires=[
"ddsp==1.0.1",
"click==7.1.2"
]
)
| [
"[email protected]"
] | |
5ddb60ad27fa77fedbf2551bbab1bb2e95fa771c | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_2692487_0/Python/kangz/skel.py | d65aa33398c04c3302fbb0196b83fee415960cd0 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,174 | py | import sys
def concat_str(args):
s = ""
for arg in args:
s += str(arg)
return s
def debug(*args):
sys.stderr.write(concat_str(args) + "\n")
def printf(*args):
debug(*args)
print concat_str(args)
def int_input():
return map(int, raw_input().split(' '))
#######################################################
def read_input( ):
size, _ = int_input()
motes = int_input()
return size, motes
def solve(size, motes):
#do not divide by zero
if size == 1:
return len(motes)
motes.sort(reverse=True)
mini = len(motes)
n_added = 0
while len(motes) != 0:
if motes[-1] < size:
size += motes[-1]
motes.pop()
mini = min(mini, n_added + len(motes))
else:
val = motes[-1]
n = (val - size + 1 + size - 2) // (size - 1)
n_added += n
size += n * (size - 1)
return mini
#######################################################
for n_test_case in range(1, int(raw_input()) + 1):
debug("Solving case ", n_test_case)
printf("Case #", n_test_case, ": ", str(solve(*read_input())))
| [
"[email protected]"
] | |
316a5d32e5f75abc519788e2b798965ebd1d55c5 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_136/2973.py | a8200601b088f6309637d43fb933939df7d0a51b | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | tests = int(raw_input(''))
for t in xrange(tests):
r = raw_input('').split()
C = float(r[0])
F = float(r[1])
X = float(r[2])
time = 0.0
increase = 2.0
result = 100000000000000.0
bound = int( (F*X - 2.0*C)/(F*C) )
for i in xrange(bound+10):
result = min(result, time + X/increase)
time += C/increase
increase += F
print "Case #"+str(t+1)+":", result | [
"[email protected]"
] | |
eba134ac77216aec2247bf99d6822f1a554a1e1e | 28431c6bdfd15b346741f62b06410d815c1a1482 | /jupytex/tools.py | 5142309970c92b973730541273841ea8f6bb80b3 | [
"MIT"
] | permissive | agoose77/jupytex | e3740c367f3fd87bc32819e0a91ef9674a14ff66 | 0906c35e29c6f4b6030ae7f86565d667fb30c1da | refs/heads/master | 2021-06-09T22:31:58.200217 | 2021-05-21T13:55:40 | 2021-05-21T13:55:40 | 123,421,594 | 1 | 0 | null | 2019-01-30T09:12:09 | 2018-03-01T10:45:25 | Python | UTF-8 | Python | false | false | 1,287 | py | import logging
import pathlib
import subprocess
import typing
import importlib_resources as resources
from . import data
logger = logging.getLogger(__name__)
def get_resource_names(package: resources.Package) -> typing.Iterator[str]:
for name in resources.contents(package):
if name == "__init__.py":
continue
if resources.is_resource(package, name):
yield name
def install(directory: pathlib.Path):
logger.info(f"Installing Jupytex into {directory}")
for name in get_resource_names(data):
logger.info(f"Copying {name}")
source = resources.read_text(data, name)
(directory / name).write_text(source)
logger.info("Done!")
def uninstall(directory: pathlib.Path):
logger.info(f"Uninstalling Jupytex from {directory}")
for name in get_resource_names(data):
logger.info(f"Removing {name}")
resource_path = directory / name
if resource_path.exists():
resource_path.unlink()
logger.info("Done!")
def make(sys_args: typing.List[str]):
subprocess.call(["latexmk", "--shell-escape", *sys_args])
def clean(sys_args: typing.List[str], full: bool = False):
clean_type = "-C" if full else "-c"
subprocess.run(["latexmk", clean_type, *sys_args])
| [
"[email protected]"
] | |
05857782727dc8cbb0cce52c998e642c1192f449 | 1049d75b4d94564e54fbd0f2d8c36f774832bbf3 | /career/migrations/0001_initial.py | 2c6bf0213e1deef916116f664196fc4205095f74 | [] | no_license | wahid999/Alumni-Studen-Portal-FYP--master | b7597b4fd7f9e9e7966b09229b887bc156238c6b | 1c9719f33fc338786220cbceb8e91789aa0161ab | refs/heads/master | 2023-07-16T10:43:41.431096 | 2021-08-31T07:06:44 | 2021-08-31T07:06:44 | 401,582,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,083 | py | # Generated by Django 3.0.5 on 2020-04-17 19:46
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CareerPost',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('postType', models.CharField(choices=[('internship', 'Internship'), ('job', 'Job')], max_length=30)),
('description', models.TextField()),
('careerFile', models.FileField(null=True, upload_to='Career(Job/Internship)')),
('datePosted', models.DateField(auto_now_add=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
5d61287b4f126287b3558dee2293d4b9935db595 | 43362c036fa9c4dd61ead35a38ede10f3e72c222 | /setup.py | 27bd1aebc9af315fd4e7a748e43a283f8c40ed78 | [
"MIT"
] | permissive | lowks/esios | acea4d86c1362641ab022d30f885dea980384473 | 2d0b00807d3e5900dfae7b89d3aacc4bc123066b | refs/heads/master | 2021-01-15T09:47:59.190672 | 2016-03-18T13:17:39 | 2016-03-18T13:17:39 | 54,208,454 | 0 | 1 | null | 2016-03-18T14:43:14 | 2016-03-18T14:43:14 | null | UTF-8 | Python | false | false | 303 | py | from setuptools import setup, find_packages
setup(
name='esios',
version='0.1.7',
packages=find_packages(),
url='https://github.com/gisce/esios',
license='MIT',
install_requires=['libsaas'],
author='GISCE-TI, S.L.',
author_email='[email protected]',
description=''
)
| [
"[email protected]"
] | |
6af1c0d0207f68460356be283522043e05fdfea8 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/climbingStairs_20200709184851.py | ec2e4ea6b8d9d4c5fca3297afeae1c421371ba08 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50 | py | def climbing(n):
'''
Problem:
''' | [
"[email protected]"
] | |
136d460415ebedfb775c6636af07945e6dfede00 | 7ed9402076707e3c37a3706dacb208d9f196f71b | /app/blog_admin/views.py | 21e75396177e4d893348859d8424f2432347815d | [
"BSD-2-Clause"
] | permissive | jiangbingo/django-angularjs-blog | 8b007b5f51520d7da85316a60be716185e7d3cb5 | 27b7a40b163bdb18aa20257c6e796a213392ea16 | refs/heads/master | 2021-01-18T02:21:52.007439 | 2016-06-24T07:59:17 | 2016-06-24T07:59:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,176 | py | # -*- coding: utf-8 -*-
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from django.contrib.auth import authenticate
from app.utils import *
from app.myblog.models import *
from app.decorater import login_api
from app.decorater import login_require
# Create your views here.
def index(req):
try:
if req.session['user'] != '' or req.session['user'] is not None:
return render_to_response("blog_admin.html")
except:
pass
return render_to_response("badmin.html")
@login_require
def blog(req):
return render_to_response("blog_admin.html")
@login_require
def know_admin(req):
return render_to_response("know_admin.html")
@login_require
def comment_admin(req):
return render_to_response("comment_admin.html")
@login_require
def know(req, kid=None):
return render_to_response("new_know.html")
@login_require
def create_blog(req, bid=None):
return render_to_response("new_blog.html")
@csrf_exempt
def login(req):
body={}
account = req.POST.get('account')
password = req.POST.get('password')
user = authenticate(username=account, password=password)
if user is None:
body['fail_mes'] = '用户或密码不正确~'
return HttpResponse(encodejson(7, body), content_type="application/json")
if not user.is_active:
body['fail_mes'] = '用户已被禁用~'
return HttpResponse(encodejson(13, body), content_type="application/json")
req.session['user'] = account
return HttpResponse(encodejson(1, body), content_type="application/json")
@csrf_exempt
@login_api
def new_blog(req):
body={}
caption = req.POST.get('caption')
sub_caption = req.POST.get('sub_caption')
classify = req.POST.get('classify')
tag_str = str(req.POST.get('tags'))
content = req.POST.get('content')
bid = req.POST.get('id')
pub = req.POST.get('publish')
tag_str = tag_str.split(',')
classifcation = Classification.objects.get(id=classify)
if bid != '' and bid is not None:
mblog = Article.objects.get(id=bid)
mblog.caption = caption
mblog.sub_caption = sub_caption
mblog.content = content
mblog.publish = pub
mblog.classification = classifcation
mblog.save()
else:
nblog = Article(caption=caption,
sub_caption=sub_caption,
content=content,
classification=classifcation,
publish=pub)
nblog.save()
for itm in tag_str:
try:
tag_list = Tag.objects.get(id=itm)
nblog.tags.add(tag_list)
except:
continue
nblog.save()
return HttpResponse(encodejson(1, body), content_type='application/json')
@login_api
def get_blog_util(req, bid=None):
body={}
body['modify'] = False
if bid is not None:
blog = Article.objects.get(id=bid)
blog_json = model_serializer(blog, deep=True)
blog_json['tags'] = model_serializer(blog.tags, include_attr=['id', 'tag_name'])
body['blog'] = blog_json
body['modify'] = True
classify = Classification.objects.all()
classify_json = model_serializer(classify, include_attr=['id', 'c_name'])
tags = Tag.objects.all()
tags_json = model_serializer(tags, include_attr=['id', 'tag_name'])
body['tags'] = tags_json
body['classify'] = classify_json
return HttpResponse(encodejson(1, body), content_type="application/json")
@login_api
def get_env(req, kid=None):
body={}
env_list = Env.objects.all()
env_json = model_serializer(env_list, include_attr=['id', 'content'])
if kid is not None:
know = Knowledge.objects.get(id=kid)
know_json = model_serializer(know, datetime_format="string")
body['know'] = know_json
body['env'] = env_json
return HttpResponse(encodejson(1, body), content_type="application/json")
@csrf_exempt
@login_api
def new_tag(req):
body={}
try:
tag = req.POST.get('tag')
ntag = Tag(tag_name=tag)
ntag.save()
tag_list = Tag.objects.all()
tag_json = model_serializer(tag_list, include_attr=['id', 'tag_name'])
body['tags'] = tag_json
body['type'] = 2
except:
body['fail_mes'] = 'Unknow Error'
return HttpResponse(encodejson(2, body), content_type='application/json')
return HttpResponse(encodejson(1, body), content_type="application/json")
@login_api
def new_classify(req):
body={}
try:
classify = req.POST.get('classify')
nclassify = Classification(c_name=classify)
nclassify.save()
classify_list = Classification.objects.all()
classify_json = model_serializer(classify_list, include_attr=['id', 'c_name'])
body['classify'] = classify_json
body['type'] = 1
except:
body['fail_mes'] = 'Unknow Error'
return HttpResponse(encodejson(2, body), content_type="application/json")
return HttpResponse(encodejson(1, body), content_type='application/json')
@login_api
def new_env(req):
body={}
try:
env = req.POST.get('env')
nenv = Env(content=env)
nenv.save()
env_list = Env.objects.all()
env_json = model_serializer(env_list, include_attr=['id', 'content'])
body['env'] = env_json
except:
body['fail_mes'] = 'Unknow Error'
return HttpResponse(encodejson(2, body), content_type="application/json")
return HttpResponse(encodejson(1, body), content_type='application/json')
@csrf_exempt
@login_api
def new_know(req):
body={}
question = req.POST.get('question')
answer = req.POST.get('answer')
env_str = str(req.POST.get('env'))
pub = req.POST.get('publish')
kid = req.POST.get('id')
if kid != '':
mknow = Knowledge.objects.get(id=kid)
mknow.question = question
mknow.answer = answer
mknow.publish = pub
mknow.save()
else:
nknow = Knowledge(question=question,
answer=answer)
nknow.save()
env_str = env_str.split(',')
for itm in env_str:
try:
senv = Env.objects.get(id=itm)
nknow.env.add(senv)
except:
continue
nknow.save()
return HttpResponse(encodejson(1, body), content_type='application/json')
@login_api
def know_list(req):
body={}
knows = Knowledge.objects.all().order_by('-create_time')
know_json = model_serializer(knows, datetime_format="string", except_attr=['answer'])
# for i, itm in enumerate(knows):
# know_json[i]['env'] = model_serializer(itm.env.all(), include_attr=['content'])
body['know_list'] = know_json
return HttpResponse(encodejson(1, body), content_type='application/json')
@login_api
def blog_list(req):
body={}
blogs = Article.objects.all().order_by('-create_time')
blog_json = model_serializer(blogs, datetime_format="string", except_attr=['content'])
# for i, itm in enumerate(knows):
# know_json[i]['env'] = model_serializer(itm.env.all(), include_attr=['content'])
body['blog_list'] = blog_json
return HttpResponse(encodejson(1, body), content_type='application/json')
@login_api
def comment_list(req):
body={}
c_list = Comment.objects.all()
comment_json = model_serializer(c_list, datetime_format="string")
for i, itm in enumerate(c_list):
comment_json[i]['belong'] = itm.belong.caption
if comment_json[i]['reply']:
comment_json[i]['reply_list'] = model_serializer(itm.replys.all(), datetime_format="string")
else:
comment_json[i]['reply_list'] = None
body['comment_list'] = comment_json
return HttpResponse(encodejson(1, body), content_type="application/json")
@csrf_exempt
@login_api
def blog_opt(req):
body={}
otype = int(req.POST.get("type"))
bid = req.POST.get("bid")
blog = Article.objects.get(id=str(bid))
if otype == 1:
blog_json = model_serializer(blog, datetime_format="string", deep=True)
blog_json['tags'] = model_serializer(blog.tags, include_attr=['id', 'tag_name'])
body['blog'] = blog_json
return HttpResponse(encodejson(1, body), content_type="application/json")
elif otype == 2:
blog.delete()
return HttpResponse(encodejson(1, body), content_type="application/json")
elif otype == 3:
blog.publish = not blog.publish
blog.save()
return HttpResponse(encodejson(1, body), content_type="application/json")
else:
pass
return HttpResponse(encodejson(13, body), content_type="application/json")
@csrf_exempt
@login_api
def comment_opt_del(req):
body={}
otype = int(req.POST.get("type"))
cid = req.POST.get("cid")
if otype == 1:
comment = Comment.objects.get(id=cid)
blog = comment.belong
blog.comment_count -= 1
blog.save()
if comment.reply:
reply = comment.replys.all()
for itm in reply:
itm.delete()
comment.delete()
elif otype == 2:
comment = CommentReply.objects.get(id=cid)
blog = comment.replyed.belong
blog.comment_count -= 1
blog.save()
comment.delete()
return HttpResponse(encodejson(1, body), content_type="application/json")
@csrf_exempt
@login_api
def comment_opt_new(req):
body={}
otype = int(req.POST.get("type"))
cid = req.POST.get("cid")
to = req.POST.get('to')
content = req.POST.get('content')
# print '----'
# print content
new_reply = CommentReply(content=content,
author="RaPoSpectre",
avatar="master.png")
if otype == 1:
comment = Comment.objects.get(id=cid)
comment.reply = True
blog = comment.belong
blog.comment_count += 1
new_reply.replyed = comment
new_reply.to = comment.author
new_reply.save()
comment.save()
blog.save()
elif otype == 2:
r_comment = CommentReply.objects.get(id=to)
comment = Comment.objects.get(id=cid)
blog = comment.belong
blog.comment_count += 1
blog.save()
new_reply.to = r_comment.author
new_reply.replyed = comment
new_reply.save()
return HttpResponse(encodejson(1, body), content_type="application/json")
@csrf_exempt
@login_api
def know_opt(req):
body={}
otype = int(req.POST.get("type"))
kid = req.POST.get("kid")
know = Knowledge.objects.get(id=str(kid))
if otype == 2:
know.delete()
return HttpResponse(encodejson(1, body), content_type="application/json")
elif otype == 3:
know.publish = not know.publish
know.save()
return HttpResponse(encodejson(1, body), content_type="application/json")
else:
pass
return HttpResponse(encodejson(13, body), content_type="application/json")
def tt(req):
return 0
| [
"[email protected]"
] | |
33fd60f33c6e12e205067a4078dfebf1066dbc9c | 7d76d00142cd5c3b8d4aaeb917829ddf46871fb5 | /mlir/lib/Bindings/Python/mlir/dialects/linalg/opdsl/ops/core_named_ops.py | 229458855939a794af5b3415dc548a23455b0cde | [
"LLVM-exception",
"Apache-2.0"
] | permissive | pauljoo28/llvm-project | 8ff49f620f6ae2e9bacaad83991cd0c2bc8b36bd | 4bf8985f4fb1411831505a4b38265eb517783dc7 | refs/heads/main | 2023-04-23T07:14:36.877563 | 2021-04-06T20:23:58 | 2021-04-06T20:23:58 | 320,934,047 | 4 | 2 | null | 2020-12-12T22:09:36 | 2020-12-12T22:09:36 | null | UTF-8 | Python | false | false | 2,315 | py | from ..lang import *
T1 = TV.T1
T2 = TV.T2
Batch = S.Batch
@linalg_structured_op
def matmul(A=TensorDef(T1, S.M, S.K),
B=TensorDef(T2, S.K, S.N),
C=TensorDef(U, S.M, S.N, output=True)):
"""Performs a matrix multiplacation of two 2D inputs.
Numeric casting is performed on the operands to the inner multiply, promoting
them to the same data type as the accumulator/output.
"""
implements(ContractionOpInterface)
C[D.m, D.n] += cast(U, A[D.m, D.k]) * cast(U, B[D.k, D.n])
@linalg_structured_op
def batch_matmul(A=TensorDef(T1, Batch, S.M, S.K),
B=TensorDef(T2, Batch, S.K, S.N),
C=TensorDef(U, Batch, S.M, S.N, output=True)):
"""Performs a batched matrix multiplacation of two 3D inputs.
Numeric casting is performed on the operands to the inner multiply, promoting
them to the same data type as the accumulator/output.
"""
implements(ContractionOpInterface)
C[D.b, D.m, D.n] += cast(U, A[D.b, D.m, D.k]) * cast(U, B[D.b, D.k, D.n])
@linalg_structured_op
def matvec(A=TensorDef(T1, S.M, S.N),
y=TensorDef(T2, S.N),
x=TensorDef(U, S.M, output=True)):
"""Performs a matrix-vector multiplication.
Numeric casting is performed on the operands to the inner multiply, promoting
them to the same data type as the accumulator/output.
"""
implements(ContractionOpInterface)
x[D.m] += cast(U, A[D.m, D.n]) * cast(U, y[D.n])
@linalg_structured_op
def vecmat(y=TensorDef(T1, S.M),
A=TensorDef(T2, S.M, S.N),
x=TensorDef(U, S.N, output=True)):
"""Performs a vector-matrix multiplacation.
Numeric casting is performed on the operands to the inner multiply, promoting
them to the same data type as the accumulator/output.
"""
implements(ContractionOpInterface)
x[D.n] += cast(U, y[D.m]) * cast(U, A[D.m, D.n])
@linalg_structured_op
def dot(A=TensorDef(T1, S.M), B=TensorDef(T2, S.M), C=TensorDef(U,
output=True)):
"""Performs a dot product of two vectors to a scalar result.
Numeric casting is performed on the operands to the inner multiply, promoting
them to the same data type as the accumulator/output.
"""
implements(ContractionOpInterface)
C[None] += cast(U, A[D.m]) * cast(U, B[D.m])
| [
"[email protected]"
] | |
337f351e1ea69e017e7449fa313e64f16d2fe44b | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03826/s197657905.py | d41500e1f0227c37ec64234be575400e91386bbe | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py |
def main():
t = list(map(int, input().split()))
print(max(t[0]*t[1],t[2]*t[3]))
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
8bdc625bce37546dcff8911293c12f4f99a0b92a | ff81a9d7880f1b85a1dc19d5eba5ac72d7179c86 | /pychron/loading/tasks/loading_plugin.py | ab3f96269cb4288e7d554f9723dd6eaa10080d38 | [
"Apache-2.0"
] | permissive | UManPychron/pychron | 2fb7e479a9f492423c0f458c70102c499e1062c4 | b84c9fd70072f9cbda30abe2c471e64fe3dd75d8 | refs/heads/develop | 2022-12-03T23:32:45.579326 | 2020-01-29T19:02:20 | 2020-01-29T19:02:20 | 36,100,637 | 0 | 0 | null | 2015-05-23T00:10:06 | 2015-05-23T00:10:05 | null | UTF-8 | Python | false | false | 3,651 | py | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from envisage.ui.tasks.task_extension import TaskExtension
from envisage.ui.tasks.task_factory import TaskFactory
from pyface.tasks.action.schema_addition import SchemaAddition
# from pyface.action.group import Group
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.envisage.tasks.base_task_plugin import BaseTaskPlugin
from pychron.loading.loading_manager import LoadingManager
from pychron.loading.tasks.actions import SaveLoadingPDFAction, SaveTrayPDFAction, GenerateResultsAction
from pychron.loading.tasks.load_task import LoadingTask
from pychron.loading.tasks.loading_preferences import LoadingPreferencesPane
from pychron.loading.tasks.panes import LoadDockPane, LoadTablePane
from pychron.pychron_constants import DVC_PROTOCOL
class LoadingPlugin(BaseTaskPlugin):
name = 'Loading'
id = 'pychron.loading'
def _task_extensions_default(self):
actions = [SchemaAddition(id='save_loading_figure',
factory=SaveLoadingPDFAction,
path='MenuBar/file.menu'),
SchemaAddition(id='save_tray',
factory=SaveTrayPDFAction,
path='MenuBar/file.menu'),
SchemaAddition(id='generate_results',
factory=GenerateResultsAction,
path='MenuBar/file.menu')]
return [TaskExtension(task_id='pychron.loading',
actions=actions)]
def _tasks_default(self):
return [TaskFactory(id='pychron.loading',
factory=self._load_task_factory,
name='Loading',
accelerator='Ctrl+Y',
task_group='experiment')]
def _service_offers_default(self):
load = self.service_offer_factory(protocol=LoadDockPane,
factory=LoadDockPane)
table = self.service_offer_factory(protocol=LoadTablePane,
factory=LoadTablePane)
man = self.service_offer_factory(protocol=LoadingManager,
factory=self._loading_manager_factory)
return [load, table, man]
def _loading_manager_factory(self):
return LoadingManager(application=self.application,
dvc=self.application.get_service(DVC_PROTOCOL))
def _load_task_factory(self):
return LoadingTask(manager=self._loading_manager_factory())
def _preferences_panes_default(self):
return [LoadingPreferencesPane]
# ============= EOF =============================================
| [
"[email protected]"
] | |
43b33abced12630b4b24f937759544ab20ef8370 | 7f7bf9a5827d1441f18f568fc75ed5bf0159ca6c | /14_Yandex_Final_Tasks/A/A.py | d4258510d4f596ba4eaf12fd680b41e62c69c28d | [] | no_license | KorsakovPV/yandex_contest | 08bcff4eaf38d46a8348ac3abbb5f496857fe8e4 | f67917ef710f5b138142b11ec4e6e4678b23e408 | refs/heads/master | 2023-01-06T13:04:07.955570 | 2020-10-24T20:22:41 | 2020-10-24T20:22:41 | 290,097,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,166 | py | """
A. Большое число
Вечером ребята решили поиграть в игру "Большое число".
Даны числа. Нужно определить, какое самое большое число можно из них составить.
Формат ввода
В первой строке записано n - количество чисел. Оно не превосходит 100.
Во второй строке через пробел записаны n неотрицательных чисел, каждое из
которых не превосходит 1000.
Формат вывода
Нужно вывести самое большое число, которое можно из них составить.
"""
# 34601917
class solution_key(str):
def __lt__(self, y):
return self + y > y + self
class Solution:
def largest_number(self, nums):
nums.sort(key=solution_key)
return ''.join(nums)
def main(input_file):
input_file = input_file.rstrip().split('\n')
data = input_file[1].rstrip().split()
sol = Solution()
return sol.largest_number(data)
def test(num):
left = 1
while left < len(num) // 2:
data = list()
data.append(num[:left])
data.append(num[left:-left])
data.append(num[-left:])
sol = Solution()
print(data, sol.largest_number(data))
left += 1
if __name__ == '__main__':
with open('input.txt') as f:
input_file = f.read()
answer = main(input_file)
with open('output.txt', 'w') as f:
f.write(answer)
test('123456789')
test('998877665544332211')
test('11111111112')
with open('input1.txt') as f:
input_file = f.read()
assert main(input_file) == '56215', 'input1.txt error\n' + str(
main(input_file))
with open('input2.txt') as f:
input_file = f.read()
assert main(input_file) == '78321', 'input2.txt error\n' + str(
main(input_file))
with open('input3.txt') as f:
input_file = f.read()
assert main(input_file) == '542210', 'input3.txt error\n' + str(
main(input_file))
| [
"[email protected]"
] | |
7d92023a6701a36aaf56c760746c31ac23388b18 | eee85a1ee54fa54e74b93bf3af8391c3f0c80b2a | /basic/pythonweb/string/pythonweb01-01-07.py | e811c4a36ccd9856f65614b3caff5d7a6ac48ac2 | [] | no_license | ryu-0406/study-python | 8712a6e235e1ca92bb3c00ad053c8298f691108c | da10d5913de32569b2ba4bc98d9919a78e85d22a | refs/heads/master | 2022-12-14T22:43:45.236184 | 2020-09-13T03:55:36 | 2020-09-13T03:55:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | # coding:utf-8
print("Hello の文字数は " + str(len("Hello")))
print("東京都 の文字数は " + str(len("東京都"))) | [
"[email protected]"
] | |
db312e4b277fb2bb1bcd2efcb4f3457c0f939a0b | 460f981dfe1a05f14d2a4cdc6cc71e9ad798b785 | /3/amd64/envs/navigator/lib/python3.6/site-packages/osgeo/_osr.py | 35c7ae045de0cf8a2f9d8473b59af53c353048cc | [
"GPL-2.0-only",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"LicenseRef-scancode-mit-old-style",
"dtoa",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain-disclaimer",
"Zlib",
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"Intel",
"Python-2.0"
] | permissive | DFO-Ocean-Navigator/navigator-toolchain | d8c7351b477e66d674b50da54ec6ddc0f3a325ee | 930d26886fdf8591b51da9d53e2aca743bf128ba | refs/heads/master | 2022-11-05T18:57:30.938372 | 2021-04-22T02:02:45 | 2021-04-22T02:02:45 | 234,445,230 | 0 | 1 | BSD-3-Clause | 2022-10-25T06:46:23 | 2020-01-17T01:26:49 | C++ | UTF-8 | Python | false | false | 309 | py | def __bootstrap__():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__, '_osr.cpython-36m-x86_64-linux-gnu.so')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
| [
"[email protected]"
] | |
a5d2f2afc771f9616114150718cd4b7d835be5b4 | 30150c7f6ed7a10ac50eee3f40101bc3165ebf9e | /src/coghq/DistributedMazeAI.py | 29fd883b37c9512dac79c0b2b4c58979121052c7 | [] | no_license | toontown-restoration-project/toontown | c2ad0d552cb9d5d3232ae6941e28f00c11ca3aa8 | 9bef6d9f823b2c12a176b33518eaa51ddbe3fd2f | refs/heads/master | 2022-12-23T19:46:16.697036 | 2020-10-02T20:17:09 | 2020-10-02T20:17:09 | 300,672,330 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,244 | py | from otp.level import DistributedEntityAI
from . import DistributedBarrelBaseAI
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.ClockDelta import globalClockDelta
from direct.task import Task
class DistributedMazeAI(DistributedEntityAI.DistributedEntityAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedMazeAI')
def __init__(self, level, entId):
"""Create the maze."""
DistributedEntityAI.DistributedEntityAI.__init__(
self, level, entId)
self.roomDoId = level.doId
self.GameDuration = 60.0
self.DamageOnFailure = 20
self.finishedList = []
def delete(self):
"""Delete ourself and cleanup tasks."""
self.removeAllTasks()
DistributedEntityAI.DistributedEntityAI.delete(self)
def announceGenerate(self):
"""Load fields dependent on required fields."""
DistributedEntityAI.DistributedEntityAI.announceGenerate(self)
self.mazeEndTimeTaskName = self.uniqueName('mazeEndTime')
def getRoomDoId(self):
"""Return the doId of the room that contains us."""
return self.roomDoId
def setClientTriggered(self):
"""A player entered us, start the moles."""
if not hasattr(self, 'gameStartTime'):
self.gameStartTime = globalClock.getRealTime()
self.b_setGameStart(globalClockDelta.localToNetworkTime(\
self.gameStartTime))
def b_setGameStart(self, timestamp):
# send the distributed message first, so that any network msgs
# sent by the subclass upon start of the game will arrive
# after the game start msg
self.d_setGameStart(timestamp)
self.setGameStart(timestamp)
def d_setGameStart(self, timestamp):
self.notify.debug("BASE: Sending setGameStart")
self.sendUpdate("setGameStart", [timestamp])
def setGameStart(self, timestamp):
"""
This method gets called when all avatars are ready
Inheritors should call this plus the code to start the game
"""
self.notify.debug("BASE: setGameStart")
self.GameDuration = 35.0 + (self.numSections * 15.0)
self.prepareForGameStartOrRestart()
def prepareForGameStartOrRestart(self):
"""Zero out needed fields on a start or restart of the mole field."""
self.doMethodLater(self.GameDuration, self.gameEndingTimeHit, self.mazeEndTimeTaskName )
def setFinishedMaze(self):
senderId = self.air.getAvatarIdFromSender()
if senderId not in self.finishedList:
toon = simbase.air.doId2do.get(senderId)
if toon:
if len(self.finishedList) < 1:
toon.toonUp(200.0)
else:
toon.toonUp(20.0)
lastToon = 0
if hasattr(self, 'level'):
numToons = len(self.level.presentAvIds)
if numToons == (len(self.finishedList) + 1):
lastToon = 1
self.sendUpdate("toonFinished" , [senderId, len(self.finishedList), lastToon])
#print("toonFinished sent")
self.finishedList.append(senderId)
def gameEndingTimeHit(self, task):
"""Handle the game hitting its ending time."""
roomId = self.getLevelDoId()
room = simbase.air.doId2do.get(roomId)
if room:
playerIds = room.presentAvIds
for avId in playerIds:
av = simbase.air.doId2do.get(avId)
if av and (avId not in self.finishedList):
self.finishedList.append(avId)
self.sendUpdate("setGameOver", [])
def damageMe(self):
senderId = self.air.getAvatarIdFromSender()
av = simbase.air.doId2do.get(senderId)
roomId = self.getLevelDoId()
room = simbase.air.doId2do.get(roomId)
if room:
playerIds = room.presentAvIds
if av and (senderId in playerIds):
av.takeDamage(self.DamageOnFailure, quietly=0)
room.sendUpdate('forceOuch',[self.DamageOnFailure])
| [
"[email protected]"
] | |
ffc7b800c075f5e3f3b1a8b71954978a6eb9f058 | a46ccf3496712f85e92e15bac27b9f9df23436a4 | /notebooks/_solutions/11-xarray-intro2.py | 754c8f41d4e050bdd3517d6ecba4c51d5e0bbb59 | [
"BSD-3-Clause"
] | permissive | jorisvandenbossche/DS-python-geospatial | a30ef43daa1d8cfd9d5652ab047bd828573bb493 | cd5d63057a335e0cb157faf12b945ec664da4c10 | refs/heads/main | 2022-11-24T09:55:06.552714 | 2022-11-20T22:15:50 | 2022-11-20T22:15:50 | 244,412,214 | 134 | 39 | BSD-3-Clause | 2023-09-14T17:10:54 | 2020-03-02T15:57:19 | Jupyter Notebook | UTF-8 | Python | false | false | 70 | py | !gdalinfo ./data/gent/raster/2020-09-17_Sentinel_2_L1C_True_color.tiff | [
"[email protected]"
] | |
e71df24b87f48a17c62c90b1f716500fac38224f | 5c4136623d2ffea23d9e4d25e4f930f06cb627f3 | /tapis_cli/commands/taccapis/v2/jobs/helpers/error.py | 0457ab4a363f6ab7a9217d436d081e16e837c4c8 | [
"BSD-3-Clause"
] | permissive | shwetagopaul92/tapis-cli-ng | 3ca8ee0127fca435151a2f2e6dbdb26ab501d470 | 6f424b8352c0d034d4f5547fac21d5c8dd097a7f | refs/heads/master | 2020-12-13T09:31:33.334743 | 2020-01-08T19:14:51 | 2020-01-08T19:14:51 | 234,377,360 | 0 | 0 | BSD-3-Clause | 2020-01-16T17:45:14 | 2020-01-16T17:45:13 | null | UTF-8 | Python | false | false | 2,221 | py | """Exceptions and error handlers
"""
import logging
import re
import os
import time
from agavepy.agave import AgaveError
from attrdict import AttrDict
from requests.exceptions import HTTPError
__all__ = [
'AgaveError', 'HTTPError', 'HTTPNotFoundError', 'TapisOperationFailed',
'ImportNotCompleteError', 'OutputFileExistsError', 'read_tapis_http_error',
'handle_http_error'
]
class OutputFileExistsError(IOError):
pass
class TapisOperationFailed(AgaveError):
pass
class HTTPNotFoundError(HTTPError):
pass
class ImportNotCompleteError(HTTPNotFoundError):
pass
def read_tapis_http_error(http_error_object):
"""Extract useful details from an exception raised by interactting
with a Tapis API
"""
h = http_error_object
# extract HTTP response code
code = -1
try:
code = h.response.status_code
assert isinstance(code, int)
except Exception:
# we have no idea what happened
code = 418
# extract HTTP reason
reason = 'UNKNOWN ERROR'
try:
reason = h.response.reason
except Exception:
pass
# Tapis APIs will give JSON responses if the target web service is at all
# capable of fulfilling the request. Therefore, try first to extract fields
# from the JSON response, then fall back to returning the plain text from
# the response.
err_msg = 'Unexpected encountered by the web service'
status_msg = 'error'
version_msg = 'unknown'
try:
j = h.response.json()
if 'message' in j:
err_msg = j['message']
if 'status' in j:
status_msg = j['status']
if 'version' in j:
version_msg = j['version']
except Exception:
err_msg = h.response.text
httperror = 'HTTPError - {} {}; message: {}; status: {}; version: {}; response.content: {}'
return httperror.format(code, reason, err_msg, status_msg, version_msg,
h.response.content)
def handle_http_error(httperror):
decorated_http_error = read_tapis_http_error(httperror)
if httperror.response.status_code == 404:
raise HTTPNotFoundError(httperror)
else:
raise decorated_http_error
| [
"[email protected]"
] | |
3ce314bb3d0814413a90400f26fbcd5b9b62d98b | 177338a720f904f63926da055364cc0e2c0a850c | /spark/pyspark(by Leaderman git)/1.5.1/examples/app/spark_app_read_data_from_rcfile.py | b07906362ca93ab5697ccd402ee00a50a9c8df24 | [
"Apache-2.0"
] | permissive | xuefenga616/mygit | 60ef7bf7201603e13d4621cf7a39dea8ec92e0b7 | be3b8003fcc900ce7ca6616a9ddebb0edcbc1407 | refs/heads/master | 2020-09-13T11:50:55.448041 | 2017-08-27T10:59:00 | 2017-08-27T10:59:00 | 67,042,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 670 | py | from pyspark import SparkConf, SparkContext
conf = SparkConf().setAppName("spark_app_read_data_from_rcfile")
sc = SparkContext(conf=conf)
rowRDD = sc.hadoopFile(path="hdfs://dip.cdh5.dev:8020/user/yurun/rcfile",
inputFormatClass="org.apache.hadoop.hive.ql.io.RCFileInputFormat",
keyClass="org.apache.hadoop.io.LongWritable",
valueClass="org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable",
valueConverter="com.sina.dip.spark.converter.BytesRefArrayWritableToObjectArrayConverter")
pairs = rowRDD.collect()
for pair in pairs:
print pair[0], pair[1]
sc.stop()
| [
"[email protected]"
] | |
5e3d144b21f3cc4d6896b030c0544b9efcf416f2 | f09155ed20774a4fbf2a117ae43b38223642e9d8 | /businessworldenv/bin/virtualenv | 6caf2a67fb43d5247454f93bc71a794b29e74332 | [] | no_license | krsumit/bwn | eeebbd8cf21cf7e8cc3ba7b180605d664e655876 | 17d894760099640d44ec70ecaf5b15c764efd658 | refs/heads/master | 2023-01-04T14:06:03.426185 | 2019-10-01T04:17:20 | 2019-10-01T04:17:20 | 212,008,994 | 0 | 0 | null | 2022-12-26T20:15:56 | 2019-10-01T03:58:29 | Python | UTF-8 | Python | false | false | 250 | #!/home/sumit/businessworld/businessworldenv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from virtualenv import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
f9de43be1783e9e199bc07fac2b6171e9cd208b6 | 888a39e5e75f0e8311bbef63749685acf67fe715 | /clubs/migrations/0004_auto_20150313_0759.py | dcccaf36853555bf052545438947b8beb7c4d180 | [] | no_license | mozilla/teach-api | 94820e4ef9d51b1735b046a2a76cfbb9a37321da | 35a38df7addb8ca37e014705fe056755c3c1564f | refs/heads/master | 2023-09-01T09:00:37.995896 | 2016-12-15T19:42:23 | 2016-12-15T19:42:23 | 32,082,559 | 3 | 12 | null | 2019-03-29T05:11:24 | 2015-03-12T14:50:19 | Python | UTF-8 | Python | false | false | 994 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('clubs', '0003_auto_20150313_0020'),
]
operations = [
migrations.RemoveField(
model_name='club',
name='creator',
),
migrations.AddField(
model_name='club',
name='owner',
field=models.ForeignKey(default=1, to=settings.AUTH_USER_MODEL, help_text=b'The user who owns the Club and can change it.'),
preserve_default=False,
),
migrations.AlterField(
model_name='club',
name='longitude',
field=models.FloatField(help_text=b'Longitude of the club. Leave blank to automatically determine.', null=True, blank=True),
preserve_default=True,
),
]
| [
"[email protected]"
] | |
d091faee1b1bd72dac81cd9817bad618455a9452 | a3978d2c5367b116e77c73637d6c04f6cb8bc8a8 | /1807-1/07day/14-登录.py | 1bbd9e9ee574d863a8484430290440535cae5329 | [] | no_license | liuchenghao2000/1807 | 9acb1abd0035f93113cd7c1bda5b2b30f511a4d7 | cdbec3aae134b6f5ed0208fb5f8ee3e2a874a4ae | refs/heads/master | 2020-03-27T07:17:25.369702 | 2018-09-06T01:46:28 | 2018-09-06T01:46:28 | 139,957,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | account = 123456
password = 'abc'
a = int(input('请输入账号:'))
p = input('请输入密码:')
if a == account and p == password:
print('登陆成功')
elif a != account:
print('账号不对')
elif p != password:
print('密码错误')
| [
"[email protected]"
] | |
a7d48e2bafd809734b00ae7ea6448ee5177d6c71 | 68a088346090ae4e929c208906b14181da0f92f6 | /第一阶段/2. Python01/day04/exercise/08_number_10_1.py | 50ffea441d3c73ca42350f9b2ebcdb270aa294c1 | [] | no_license | LONG990122/PYTHON | d1530e734ae48416b5f989a4d97bd1d66d165b91 | 59a2a2a0b033c8ad0cb33d6126c252e9d574eff7 | refs/heads/master | 2020-07-07T09:38:03.501705 | 2019-09-23T16:28:31 | 2019-09-23T16:28:31 | 203,316,565 | 0 | 0 | null | 2019-10-23T15:02:33 | 2019-08-20T06:47:44 | HTML | UTF-8 | Python | false | false | 105 | py | # 3. 用while语句打印 10 ~ 1的所有整数(包含1)
i = 10
while i >= 1:
print(i)
i -= 1
| [
"[email protected]"
] | |
115dec5e597d87a852c7cd0e1e0fb2a005e0196e | 54c5ddf4d427c52447a983b8e26409fdb4de6c11 | /src/architectures/jet_transforms/nmp/fixed_nmp/fixed_nmp.py | b844083ab5732d9827823ede20402583841bff3a | [
"BSD-3-Clause"
] | permissive | XintianHan/jets | eda3b0f4f1256b57669cc59664d22b38d3999bc1 | 2ee178eaa7f6629d25ca87ee1096e5ea5d15bfd3 | refs/heads/master | 2021-04-18T21:35:03.263843 | 2018-03-22T21:32:35 | 2018-03-22T21:32:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,785 | py | import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
#from .adjacency import construct_physics_based_adjacency_matrix
#from ..stacked_nmp.attention_pooling import construct_pooling_layer
from ..message_passing import MP_LAYERS
#from ..message_passing.adjacency import construct_adjacency_matrix_layer
from ..adjacency import construct_adjacency
from .....architectures.readout import READOUTS
from .....architectures.embedding import EMBEDDINGS
from .....monitors import Histogram
from .....monitors import Collect
from .....monitors import BatchMatrixMonitor
class FixedNMP(nn.Module):
def __init__(self,
features=None,
hidden=None,
iters=None,
readout=None,
matrix=None,
emb_init=None,
mp_layer=None,
**kwargs
):
super().__init__()
self.iters = iters
emb_kwargs = {x: kwargs[x] for x in ['act', 'wn']}
self.embedding = EMBEDDINGS['n'](dim_in=features, dim_out=hidden, n_layers=int(emb_init), **emb_kwargs)
mp_kwargs = {x: kwargs[x] for x in ['act', 'wn', 'update', 'message']}
MPLayer = MP_LAYERS[mp_layer]
self.mp_layers = nn.ModuleList([MPLayer(hidden=hidden,**mp_kwargs) for _ in range(iters)])
Readout = READOUTS[readout]
self.readout = Readout(hidden, hidden)
self.adjacency_matrix = construct_adjacency(matrix=matrix, dim_in=features, dim_out=hidden, **kwargs)
def forward(self, jets, mask=None, **kwargs):
h = self.embedding(jets)
dij = self.adjacency_matrix(jets, mask=mask, **kwargs)
for mp in self.mp_layers:
h = mp(h=h, mask=mask, dij=dij, **kwargs)
out = self.readout(h)
return out
| [
"[email protected]"
] | |
619650f861eaee6a6fc6e373f4559eec6ede90b5 | 5963c12367490ffc01c9905c028d1d5480078dec | /homeassistant/components/rachio/webhooks.py | 94c79a1504f06214aa4c2cdbc3d1318956b902b8 | [
"Apache-2.0"
] | permissive | BenWoodford/home-assistant | eb03f73165d11935e8d6a9756272014267d7d66a | 2fee32fce03bc49e86cf2e7b741a15621a97cce5 | refs/heads/dev | 2023-03-05T06:13:30.354545 | 2021-07-18T09:51:53 | 2021-07-18T09:51:53 | 117,122,037 | 11 | 6 | Apache-2.0 | 2023-02-22T06:16:51 | 2018-01-11T16:10:19 | Python | UTF-8 | Python | false | false | 4,360 | py | """Webhooks used by rachio."""
from aiohttp import web
from homeassistant.const import URL_API
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
CONF_CLOUDHOOK_URL,
CONF_WEBHOOK_ID,
DOMAIN,
KEY_EXTERNAL_ID,
KEY_TYPE,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
SIGNAL_RACHIO_RAIN_SENSOR_UPDATE,
SIGNAL_RACHIO_SCHEDULE_UPDATE,
SIGNAL_RACHIO_ZONE_UPDATE,
)
# Device webhook values
TYPE_CONTROLLER_STATUS = "DEVICE_STATUS"
SUBTYPE_OFFLINE = "OFFLINE"
SUBTYPE_ONLINE = "ONLINE"
SUBTYPE_OFFLINE_NOTIFICATION = "OFFLINE_NOTIFICATION"
SUBTYPE_COLD_REBOOT = "COLD_REBOOT"
SUBTYPE_SLEEP_MODE_ON = "SLEEP_MODE_ON"
SUBTYPE_SLEEP_MODE_OFF = "SLEEP_MODE_OFF"
SUBTYPE_BROWNOUT_VALVE = "BROWNOUT_VALVE"
# Rain delay values
TYPE_RAIN_DELAY_STATUS = "RAIN_DELAY"
SUBTYPE_RAIN_DELAY_ON = "RAIN_DELAY_ON"
SUBTYPE_RAIN_DELAY_OFF = "RAIN_DELAY_OFF"
# Rain sensor values
TYPE_RAIN_SENSOR_STATUS = "RAIN_SENSOR_DETECTION"
SUBTYPE_RAIN_SENSOR_DETECTION_ON = "RAIN_SENSOR_DETECTION_ON"
SUBTYPE_RAIN_SENSOR_DETECTION_OFF = "RAIN_SENSOR_DETECTION_OFF"
# Schedule webhook values
TYPE_SCHEDULE_STATUS = "SCHEDULE_STATUS"
SUBTYPE_SCHEDULE_STARTED = "SCHEDULE_STARTED"
SUBTYPE_SCHEDULE_STOPPED = "SCHEDULE_STOPPED"
SUBTYPE_SCHEDULE_COMPLETED = "SCHEDULE_COMPLETED"
SUBTYPE_WEATHER_NO_SKIP = "WEATHER_INTELLIGENCE_NO_SKIP"
SUBTYPE_WEATHER_SKIP = "WEATHER_INTELLIGENCE_SKIP"
SUBTYPE_WEATHER_CLIMATE_SKIP = "WEATHER_INTELLIGENCE_CLIMATE_SKIP"
SUBTYPE_WEATHER_FREEZE = "WEATHER_INTELLIGENCE_FREEZE"
# Zone webhook values
TYPE_ZONE_STATUS = "ZONE_STATUS"
SUBTYPE_ZONE_STARTED = "ZONE_STARTED"
SUBTYPE_ZONE_STOPPED = "ZONE_STOPPED"
SUBTYPE_ZONE_COMPLETED = "ZONE_COMPLETED"
SUBTYPE_ZONE_CYCLING = "ZONE_CYCLING"
SUBTYPE_ZONE_CYCLING_COMPLETED = "ZONE_CYCLING_COMPLETED"
SUBTYPE_ZONE_PAUSED = "ZONE_PAUSED"
# Webhook callbacks
LISTEN_EVENT_TYPES = [
"DEVICE_STATUS_EVENT",
"ZONE_STATUS_EVENT",
"RAIN_DELAY_EVENT",
"RAIN_SENSOR_DETECTION_EVENT",
"SCHEDULE_STATUS_EVENT",
]
WEBHOOK_CONST_ID = "homeassistant.rachio:"
WEBHOOK_PATH = URL_API + DOMAIN
SIGNAL_MAP = {
TYPE_CONTROLLER_STATUS: SIGNAL_RACHIO_CONTROLLER_UPDATE,
TYPE_RAIN_DELAY_STATUS: SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
TYPE_RAIN_SENSOR_STATUS: SIGNAL_RACHIO_RAIN_SENSOR_UPDATE,
TYPE_SCHEDULE_STATUS: SIGNAL_RACHIO_SCHEDULE_UPDATE,
TYPE_ZONE_STATUS: SIGNAL_RACHIO_ZONE_UPDATE,
}
@callback
def async_register_webhook(hass, webhook_id, entry_id):
"""Register a webhook."""
async def _async_handle_rachio_webhook(hass, webhook_id, request):
"""Handle webhook calls from the server."""
data = await request.json()
try:
auth = data.get(KEY_EXTERNAL_ID, "").split(":")[1]
assert auth == hass.data[DOMAIN][entry_id].rachio.webhook_auth
except (AssertionError, IndexError):
return web.Response(status=web.HTTPForbidden.status_code)
update_type = data[KEY_TYPE]
if update_type in SIGNAL_MAP:
async_dispatcher_send(hass, SIGNAL_MAP[update_type], data)
return web.Response(status=web.HTTPNoContent.status_code)
hass.components.webhook.async_register(
DOMAIN, "Rachio", webhook_id, _async_handle_rachio_webhook
)
async def async_get_or_create_registered_webhook_id_and_url(hass, entry):
"""Generate webhook ID."""
config = entry.data.copy()
updated_config = False
webhook_url = None
webhook_id = config.get(CONF_WEBHOOK_ID)
if not webhook_id:
webhook_id = hass.components.webhook.async_generate_id()
config[CONF_WEBHOOK_ID] = webhook_id
updated_config = True
if hass.components.cloud.async_active_subscription():
cloudhook_url = config.get(CONF_CLOUDHOOK_URL)
if not cloudhook_url:
cloudhook_url = await hass.components.cloud.async_create_cloudhook(
webhook_id
)
config[CONF_CLOUDHOOK_URL] = cloudhook_url
updated_config = True
webhook_url = cloudhook_url
if not webhook_url:
webhook_url = hass.components.webhook.async_generate_url(webhook_id)
if updated_config:
hass.config_entries.async_update_entry(entry, data=config)
return webhook_id, webhook_url
| [
"[email protected]"
] | |
6abfb37f56100c04c5a8cbe2d7015c80c253cf6e | 711756b796d68035dc6a39060515200d1d37a274 | /output_cog_tags/optimized_5276.py | f96515b50804aa1965f08969e7801c0d9cc5cbeb | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,580 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog1_Anch" not in marker_sets:
s=new_marker_set('Cog1_Anch')
marker_sets["Cog1_Anch"]=s
s= marker_sets["Cog1_Anch"]
mark=s.place_marker((454.295, 440.981, 300.65), (0, 0, 1), 21.9005)
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((355.946, 424.19, 500.869), (1, 0.5, 0), 21.9005)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((412.644, 593.559, 557.081), (1, 0.5, 0), 21.9005)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((623.79, 589.94, 378.073), (1, 0.5, 0), 21.9005)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((362.824, 473.038, 475.95), (1, 0.87, 0), 21.9005)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((354.356, 461.616, 487.857), (1, 0.87, 0), 21.9005)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((512.882, 307.961, 572.428), (1, 0.87, 0), 21.9005)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((574.303, 418.992, 435.075), (0.97, 0.51, 0.75), 21.9005)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((595.04, 300.665, 517.279), (0.97, 0.51, 0.75), 21.9005)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((538.74, 324.189, 563.835), (0.97, 0.51, 0.75), 21.9005)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((436.049, 499.418, 452.124), (0.39, 0.31, 0.14), 21.9005)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((349.642, 471.117, 512.95), (0.39, 0.31, 0.14), 21.9005)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((536.204, 554.528, 366.226), (0.39, 0.31, 0.14), 21.9005)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((401.287, 486.686, 510.966), (0.6, 0.31, 0.64), 21.9005)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((409.575, 412.839, 422.354), (0.6, 0.31, 0.64), 21.9005)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((450.761, 413.68, 636.658), (0.6, 0.31, 0.64), 21.9005)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((399.115, 444.379, 445.665), (0.89, 0.1, 0.1), 21.9005)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((640.451, 524.817, 544.187), (0.89, 0.1, 0.1), 21.9005)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((529.327, 534.872, 611.099), (0.89, 0.1, 0.1), 21.9005)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((400.713, 509.226, 479.628), (0.3, 0.69, 0.29), 21.9005)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((497.055, 607.308, 396.047), (0.3, 0.69, 0.29), 21.9005)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"[email protected]"
] | |
e98489d6b8cb40edbfccddb2db4baba39162bb6f | d7016f69993570a1c55974582cda899ff70907ec | /sdk/monitor/azure-mgmt-monitor/azure/mgmt/monitor/v2018_03_01/aio/operations/_metric_alerts_status_operations.py | 1576cd774f361745d159f2234176c397ca099b72 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 7,849 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._metric_alerts_status_operations import build_list_by_name_request, build_list_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class MetricAlertsStatusOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~$(python-base-namespace).v2018_03_01.aio.MonitorManagementClient`'s
:attr:`metric_alerts_status` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def list(
self, resource_group_name: str, rule_name: str, **kwargs: Any
) -> _models.MetricAlertStatusCollection:
"""Retrieve an alert rule status.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param rule_name: The name of the rule. Required.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MetricAlertStatusCollection or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.MetricAlertStatusCollection
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-03-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.MetricAlertStatusCollection]
request = build_list_request(
resource_group_name=resource_group_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("MetricAlertStatusCollection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/metricAlerts/{ruleName}/status"} # type: ignore
@distributed_trace_async
async def list_by_name(
self, resource_group_name: str, rule_name: str, status_name: str, **kwargs: Any
) -> _models.MetricAlertStatusCollection:
"""Retrieve an alert rule status.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param rule_name: The name of the rule. Required.
:type rule_name: str
:param status_name: The name of the status. Required.
:type status_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MetricAlertStatusCollection or the result of cls(response)
:rtype: ~$(python-base-namespace).v2018_03_01.models.MetricAlertStatusCollection
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2018-03-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.MetricAlertStatusCollection]
request = build_list_by_name_request(
resource_group_name=resource_group_name,
rule_name=rule_name,
status_name=status_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_name.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("MetricAlertStatusCollection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_name.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Insights/metricAlerts/{ruleName}/status/{statusName}"} # type: ignore
| [
"[email protected]"
] | |
b70ee8da1219dd9283206acf0b078fbbe721eef6 | 558157826e0d25ff8f8e1e3a99a602832b49cfae | /model_0/mtl_model.py | 78ab9c906397f549608234f6815d6fb3659eb38a | [] | no_license | xljhtq/Transfer-learning- | 3ed5d6b8663cc852193f5b5935e7932732158910 | 44e83b30753dd92dbc5d4fae2d85aa8aaef2c82c | refs/heads/master | 2020-03-22T21:45:00.927073 | 2018-08-08T06:56:24 | 2018-08-08T06:56:24 | 140,711,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,166 | py | # encoding=utf8
import tensorflow as tf
from base_model import *
TASK_NUM = 2
class MTLModel():
def __init__(self,
max_len=25,
filter_sizes=1,
num_filters=1,
num_hidden=1,
word_vocab=None,
l2_reg_lambda=0.0,
learning_rate=1,
adv=True):
self.dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")
self.input_left_0 = tf.placeholder(tf.int32, [None, max_len], name="input_left_0")
self.input_right_0 = tf.placeholder(tf.int32, [None, max_len], name="input_right_0")
self.input_y_0 = tf.placeholder(tf.int32, [None, 2], name="input_y_0")
self.input_task_0 = tf.placeholder(tf.int32, name="input_task_0")
print("input_left", self.input_left_0.name)
print("input_right", self.input_right_0.name)
print("dropout_keep_prob", self.dropout_keep_prob.name)
print ("input_y", self.input_y_0.name)
print ("input_task", self.input_task_0.name)
self.input_left_1 = tf.placeholder(tf.int32, [None, max_len], name="input_left_1")
self.input_right_1 = tf.placeholder(tf.int32, [None, max_len], name="input_right_1")
self.input_y_1 = tf.placeholder(tf.int32, [None, 2], name="input_y_1")
self.input_task_1 = tf.placeholder(tf.int32, name="input_task_1")
self.adv = adv
self.word_vocab = word_vocab
self.filter_sizes = filter_sizes
self.num_filters = num_filters
self.max_len = max_len
self.num_hidden = num_hidden
self.l2_reg_lambda = l2_reg_lambda
self.learning_rate = learning_rate
wordInitial = tf.constant(word_vocab.word_vecs)
self.word_embed = tf.get_variable("word_embedding",
trainable=False,
initializer=wordInitial,
dtype=tf.float32)
self.shared_conv = ConvLayer(layer_name='conv_shared',
filter_sizes=self.filter_sizes,
num_filters=self.num_filters)
self.shared_linear = LinearLayer('linear_shared', TASK_NUM, True)
self.tensors = []
with tf.name_scope("task_0"):
self.build_task_graph(task_label=self.input_task_0,
labels=self.input_y_0,
sentence_0=self.input_left_0,
sentence_1=self.input_right_0)
with tf.name_scope("task_1"):
self.build_task_graph(task_label=self.input_task_1,
labels=self.input_y_1,
sentence_0=self.input_left_1,
sentence_1=self.input_right_1)
def build_task_graph(self,
task_label,
labels,
sentence_0,
sentence_1):
sentence_0 = tf.nn.embedding_lookup(self.word_embed, sentence_0)
sentence_1 = tf.nn.embedding_lookup(self.word_embed, sentence_1)
sentence_0 = tf.nn.dropout(sentence_0, self.dropout_keep_prob)
sentence_1 = tf.nn.dropout(sentence_1, self.dropout_keep_prob)
######## layer
conv_layer = ConvLayer(layer_name='conv_task',
filter_sizes=self.filter_sizes,
num_filters=self.num_filters)
########
conv_out_0 = conv_layer(sentence_0)
conv_out_0 = max_pool(conv_outs=conv_out_0,
max_len=self.max_len,
num_filters=self.num_filters)
conv_out_1 = conv_layer(sentence_1)
conv_out_1 = max_pool(conv_outs=conv_out_1,
max_len=self.max_len,
num_filters=self.num_filters)
task_output = tf.concat(axis=1, values=[conv_out_0, conv_out_1], name='task_output')
shared_out_0 = self.shared_conv(sentence_0)
shared_out_0 = max_pool(conv_outs=shared_out_0,
max_len=self.max_len,
num_filters=self.num_filters)
shared_out_1 = self.shared_conv(sentence_1)
shared_out_1 = max_pool(conv_outs=shared_out_1,
max_len=self.max_len,
num_filters=self.num_filters)
shared_output = tf.concat(axis=1, values=[shared_out_0, shared_out_1], name='shared_output')
if self.adv:
feature = tf.concat([task_output, shared_output], axis=1)
else:
feature = task_output
feature = tf.nn.dropout(feature, self.dropout_keep_prob)
# Map the features to 2 classes
linear = LinearLayer('linear', 2, True)
logits, loss_l2 = linear(feature)
logits_prob = tf.nn.softmax(logits, name='prob')
print ("logits_prob: ",logits_prob.name)
xentropy = tf.nn.softmax_cross_entropy_with_logits(
labels=labels,
logits=logits)
loss_ce = tf.reduce_mean(xentropy)
loss_adv, loss_adv_l2 = self.adversarial_loss(shared_output, task_label, labels)
loss_diff = self.diff_loss(shared_output, task_output)
if self.adv:
print ("Adv is True")
loss = loss_ce + 0.05 * loss_adv + self.l2_reg_lambda * (loss_l2 + loss_adv_l2) + 0.01 * loss_diff
else:
print ("Adv is False")
loss = loss_ce + self.l2_reg_lambda * loss_l2
pred = tf.argmax(logits, axis=1)
labels_2 = tf.argmax(labels, axis=1)
acc = tf.cast(tf.equal(pred, labels_2), tf.float32)
acc = tf.reduce_mean(acc)
self.tensors.append((acc, loss, loss_adv, loss_ce))
def adversarial_loss(self, feature, task_label, y_label):
'''make the task classifier cannot reliably predict the task based on
the shared feature
'''
# input = tf.stop_gradient(input)
feature = flip_gradient(feature) ## let adv_loss increasing
feature = tf.nn.dropout(feature, self.dropout_keep_prob)
# Map the features to TASK_NUM classes
logits, loss_l2 = self.shared_linear(feature)
label = tf.reshape(tf.one_hot(task_label, 2, axis=-1),
shape=[1, 2])
medium = tf.slice(y_label, begin=[0, 0], size=[-1, 1])
label = tf.matmul(tf.fill(tf.shape(medium), 1.0), label)
loss_adv = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=label, logits=logits))
return loss_adv, loss_l2
def diff_loss(self, shared_feat, task_feat):
'''Orthogonality Constraints from https://github.com/tensorflow/models,
in directory research/domain_adaptation
'''
task_feat -= tf.reduce_mean(task_feat, 0) # 按列求得平均
shared_feat -= tf.reduce_mean(shared_feat, 0)
task_feat = tf.nn.l2_normalize(task_feat, 1) # 按行归一化
shared_feat = tf.nn.l2_normalize(shared_feat, 1)
correlation_matrix = tf.matmul(task_feat, shared_feat, transpose_a=True)
cost = tf.reduce_mean(tf.square(correlation_matrix)) * 0.01
cost = tf.where(cost > 0, cost, 0, name='value')
assert_op = tf.Assert(tf.is_finite(cost), [cost])
with tf.control_dependencies([assert_op]):
loss_diff = tf.identity(cost)
return loss_diff
# def build_train_op(self):
# self.train_ops = []
# for _, loss, _ in self.tensors:
# global_step = tf.Variable(0, name="global_step", trainable=False)
# train_op = optimize(loss,
# global_step,
# self.learning_rate)
# self.train_ops.append([train_op, global_step])
def build_train_op(self):
self.train_ops = []
for _, loss, _, _ in self.tensors:
train_op = optimize(loss, self.learning_rate)
self.train_ops.append(train_op)
| [
"[email protected]"
] | |
ecdb4d5eb81b318690c4501b23d6c5cb5c461614 | 3f8114f42e5d86f539827ec502fef390f8986908 | /tools/Polygraphy/polygraphy/backend/base/runner.py | ab0c28c6ba8ae53688fb8547abc041991fa05523 | [
"Apache-2.0",
"BSD-3-Clause",
"ISC",
"BSD-2-Clause",
"MIT"
] | permissive | MilanPutnik/TensorRT | 47dc6fc612a0dce02d0bb3b448d9124136443bd6 | eb8442dba3c9e85ffb77e0d870d2e29adcb0a4aa | refs/heads/master | 2023-06-29T12:42:01.122631 | 2021-08-05T19:15:26 | 2021-08-05T20:14:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,994 | py | #
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import time
from collections import defaultdict
from polygraphy import config, func, mod, util
from polygraphy.logger import G_LOGGER, LogMode
np = mod.lazy_import("numpy")
@mod.export()
class BaseRunner(object):
"""
Base class for Polygraphy runners. All runners should override the functions and attributes specified here.
"""
RUNNER_COUNTS = defaultdict(int)
def __init__(self, name=None, prefix=None):
"""
Args:
name (str):
The name to use for this runner.
prefix (str):
The human-readable name prefix to use for this runner.
A runner count and timestamp will be appended to this prefix.
Only used if name is not provided.
"""
prefix = util.default(prefix, "Runner")
if name is None:
count = BaseRunner.RUNNER_COUNTS[prefix]
BaseRunner.RUNNER_COUNTS[prefix] += 1
name = "{:}-N{:}-{:}-{:}".format(prefix, count, time.strftime("%x"), time.strftime("%X"))
self.name = name
self.inference_time = None
self.is_active = False
"""bool: Whether this runner has been activated, either via context manager, or by calling ``activate()``."""
@func.constantmethod
def last_inference_time(self):
"""
Returns the total inference time required during the last call to ``infer()``.
Returns:
float: The time in seconds, or None if runtime was not measured by the runner.
"""
if self.inference_time is None:
G_LOGGER.warning(
"{:35} | inference_time was not set. Inference time will be incorrect!"
"To correctly compare runtimes, please set the inference_time property in the"
"infer() function".format(self.name),
mode=LogMode.ONCE,
)
return None
return self.inference_time
def __enter__(self):
"""
Activate the runner for inference. This may involve allocating GPU buffers, for example.
"""
self.activate()
return self
def __exit__(self, exc_type, exc_value, traceback):
"""
Deactivate the runner.
If the POLYGRAPHY_INTERNAL_CORRECTNESS_CHECKS environment variable is set to `1`, this
will also check that the runner was reset to its state prior to activation.
"""
self.deactivate()
def activate_impl(self):
"""
Implementation for runner activation. Derived classes should override this function
rather than ``activate()``.
"""
pass
def activate(self):
"""
Activate the runner for inference. This may involve allocating GPU buffers, for example.
Generally, you should use a context manager instead of manually activating and deactivating.
For example:
::
with RunnerType(...) as runner:
runner.infer(...)
"""
if self.is_active:
G_LOGGER.warning(
"{:35} | Already active; will not activate again. If you really want to "
"activate this runner again, call activate_impl() directly".format(self.name)
)
return
if config.INTERNAL_CORRECTNESS_CHECKS:
self._pre_activate_runner_state = copy.copy(vars(self))
self.activate_impl()
self.is_active = True
def infer_impl(self, feed_dict):
"""
Implementation for runner inference. Derived classes should override this function
rather than ``infer()``
"""
raise NotImplementedError("BaseRunner is an abstract class")
def infer(self, feed_dict, check_inputs=True):
"""
Runs inference using the provided feed_dict.
Args:
feed_dict (OrderedDict[str, numpy.ndarray]):
A mapping of input tensor names to corresponding input NumPy arrays.
check_inputs (bool):
Whether to check that the provided ``feed_dict`` includes the expected inputs
with the expected data types and shapes.
Returns:
OrderedDict[str, numpy.ndarray]:
A mapping of output tensor names to their corresponding NumPy arrays.
IMPORTANT: Runners may reuse these output buffers. Thus, if you need to save
outputs from multiple inferences, you should make a copy with ``copy.deepcopy(outputs)``.
"""
if not self.is_active:
G_LOGGER.critical("{:35} | Must be activated prior to calling infer()".format(self.name))
if check_inputs:
input_metadata = self.get_input_metadata()
G_LOGGER.verbose("Runner input metadata is: {:}".format(input_metadata))
util.check_dict_contains(
feed_dict, input_metadata.keys(), dict_name="feed_dict", log_func=G_LOGGER.critical
)
for name, inp in feed_dict.items():
meta = input_metadata[name]
if not np.issubdtype(inp.dtype, meta.dtype):
G_LOGGER.critical(
"Input tensor: {:} | Received unexpected dtype: {:}.\n"
"Note: Expected type: {:}".format(name, inp.dtype, meta.dtype)
)
if not util.is_valid_shape_override(inp.shape, meta.shape):
G_LOGGER.critical(
"Input tensor: {:} | Received incompatible shape: {:}.\n"
"Note: Expected a shape compatible with: {:}".format(name, inp.shape, meta.shape)
)
return self.infer_impl(feed_dict)
@func.constantmethod
def get_input_metadata_impl(self):
"""
Implemenation for `get_input_metadata`. Derived classes should override this function
rather than `get_input_metadata`.
"""
raise NotImplementedError("BaseRunner is an abstract class")
def get_input_metadata(self):
"""
Returns information about the inputs of the model.
Shapes here may include dynamic dimensions, represented by ``None``.
Must be called only after activate() and before deactivate().
Returns:
TensorMetadata: Input names, shapes, and data types.
"""
return self.get_input_metadata_impl()
def deactivate_impl(self):
"""
Implementation for runner deactivation. Derived classes should override this function
rather than ``deactivate()``.
"""
pass
def deactivate(self):
"""
Deactivate the runner.
If the POLYGRAPHY_INTERNAL_CORRECTNESS_CHECKS environment variable is set to `1`, this
will also check that the runner was reset to its state prior to activation.
Generally, you should use a context manager instead of manually activating and deactivating.
For example:
::
with RunnerType(...) as runner:
runner.infer(...)
"""
if not self.is_active:
G_LOGGER.warning(
"{:35} | Not active; will not deactivate. If you really want to "
"deactivate this runner, call deactivate_impl() directly".format(self.name)
)
return
self.inference_time = None
self.is_active = None
try:
self.deactivate_impl()
except:
raise # Needed so we can have the else clause
else:
self.is_active = False
if config.INTERNAL_CORRECTNESS_CHECKS:
old_state = self._pre_activate_runner_state
del self._pre_activate_runner_state
if old_state != vars(self):
G_LOGGER.internal_error(
"Runner state was not reset after deactivation. "
"Note:\nOld state: {:}\nNew state: {:}".format(old_state, vars(self))
)
def __del__(self):
if self.is_active:
# __del__ is not guaranteed to be called, but when it is, this could be a useful warning.
print("[W] {:35} | Was activated but never deactivated. This could cause a memory leak!".format(self.name))
| [
"[email protected]"
] | |
16928356a06c1cffc5c619e3e67d56b0eaf4414c | 18825807a4cf373f00419e46ac70566d17115e9e | /top_interview_questions/easy/linked_list/merge_two_sorted_lists.py | 029cef4d6de1b99c91d8a393ac56ff86aff1ced9 | [] | no_license | StefanRankovic/leetcode | 51154d7297b4674c62e481c6c13016097207b4d0 | bbed81b50acaef025186648c61110dbf65e5f6cb | refs/heads/master | 2023-02-20T06:16:02.913457 | 2021-01-24T09:42:50 | 2021-01-24T09:42:50 | 266,200,324 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def mergeTwoLists(self, l1: ListNode, l2: ListNode) -> ListNode:
head = current = ListNode()
while l1 and l2:
if l1.val <= l2.val:
current.next = l1
l1 = l1.next
else:
current.next = l2
l2 = l2.next
current = current.next
if l1:
current.next = l1
else:
current.next = l2
return head.next
| [
"[email protected]"
] | |
624993d2163e8fe840c9d4e7291e4d17025c0920 | e47afb9557d5143da9397faa9d63003ec7b84b0d | /4_5_List comprehension and generator expressions/Main.py | 0f3bb1163ed1bf5569b3eda82ee2c2ab4e9b0f01 | [] | no_license | Pavlo-Olshansky/Python_learns_1 | feb12d6a258d98162844efec465343b7cfc14788 | 98d278198dca833887e456f09934a4b4c336bf40 | refs/heads/master | 2021-01-19T13:29:47.174180 | 2019-12-16T08:16:35 | 2019-12-16T08:16:35 | 82,393,126 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 886 | py | xyz = [i for i in range(5)]
print(xyz)
xyz = (i for i in range(5)) # generator - it's in your memory now
print(xyz)
for i in xyz:
print(i)
# Generator's faster creating, but for working with them - it's longer
print('-------- 5 ----------\n')
input_list = [1, 5, 7, 10, 3, 20, 25, 12, 5]
def div_by_five(num):
if num % 5 == 0:
return True
else:
return False
xyz = (i for i in input_list if div_by_five(i)) # We sabing a memory, but it's slower
[print(i) for i in xyz]
xyz = [i for i in input_list if div_by_five(i)] # We don't sabing a memory, but it's faster
[print(i, end=' ') for i in xyz]
[[print(i, ii) for ii in range(3)] for i in range(3)]
board = ([(i+1, ii+1) for ii in range(555)] for i in range(5)) # Generator with 2 loops - must iterate
[print(i) for i in board] #like a board
# [[print(ii) for ii in i] for i in board] # in line
| [
"[email protected]"
] | |
e1480a36e4bd0529d505b4b9880353a5e348cd05 | 818c7b09d7e264c130a849f86dabaa3d43656870 | /pycharm学习-基础篇/python模块/1.1时间模块.py | 4938bf771a3a97e2f539239d9f336f6ae9b1c548 | [] | no_license | rishinkaku/python_course | b89d51b38f64248e074ba66230353b1b335475ab | 6a2f2c175c863776a0e1125a8359a8ea97b95456 | refs/heads/master | 2020-03-28T11:50:20.231058 | 2018-08-29T13:01:59 | 2018-08-29T13:01:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,584 | py | # 时间戮
import time
print(time.time()) # 通常时间表示从1970年1月1日00:00:00开始以秒为单位,返回的是一个float型数据
print(type(time.time())) # 通常时间表示从1970年1月1日00:00:00开始以秒为单位,返回的是一个float型数据
print(time.clock()) # 函数以浮点数计算的秒数返回当前的CPU时间,用来衡量不同程序的耗时
'''
符号 简述
%a 本地简化的星期的名称
%A 本地完整的星期的名称
%b 本地简化的月份的名称
%B 本地完整的月份的名称
%c 本地相应的日期的表示和时间表示
%d 月内的一天(0-31)
%H 24小时(0-23)
%I 12小时(0-12)
%J 年内的一天(1-365)
%m 月份(1-12)
%M 分钟(00-59)
%s 秒(00-59)
%y 两位数的年份表示(00-99)
%Y 四位数的年份表示(00-9999)
'''
'''
元组(struct time):共有9个属性
索引 属性 值
0 tm_year(年) 2018
1 tm_mon(月) 1-12
2 tm_mday(日) 1-31
3 tm_hour(时) 0-23
4 tm_min(分钟) 0-59
5 tm_sec(秒) 0-61 !
6 tm_wday(星期) 0-6 !
7 tm_yday(时) 1-366 #一年中的第几天
8 tm_isdst(是否是夏令时)默认值为-1,结果1表示是,0表示否
'''
# 1.UTC(世界协调时):格林威治天文时间,世界的标准时间 中国: UTC+8
# 1.localtime:获取当前时间的struct_time形式
print(time.localtime()) # 以元组为标准得到9个属性 #例如:time.struct_time(tm_year=2018, tm_mon=5, tm_mday=30, tm_hour=13, tm_min=44, tm_sec=27, tm_wday=2, tm_yday=150, tm_isdst=0)
# 2.获取当天的时间
import datetime
print(datetime.datetime.now()) # 当天详细时间
print(datetime.date.today()) # 当天时间
# step2:获取昨天的日期
def getTesterday():
today = datetime.date.today() # 获取当天时间
oneday = datetime.timedelta(days=1) # ays=1,表示时间跨度为1天
yesterday = today - oneday
print(yesterday)
return yesterday
yesterday = getTesterday()
print("昨天的时间为:", yesterday)
# 3.转换时间和日期的格式
import time
import datetime
def strTodaytime(datestr,format): # datestr为对应时间
return datetime.datetime.strptime(datestr,format) # datetime.datetime.strptime函数
print(time.strftime("%y-%m-%d ",time.localtime())) # time.strftime表示格式转换,"%Y-%m-%d %H:%M:%S"格式见上面详细说明 !!! 18-05-30
print(time.strftime("%y-%m-%d %H:%M:%S", time.localtime())) # time.strftime表示格式转换,"%Y-%m-%d %H:%M:%S"格式见上面详细说明 !!! 18-05-30 14:13:36
print(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())) # time.strftime表示格式转换,"%Y-%m-%d %H:%M:%S"格式见上面详细说明 !!! 2018-05-30 14:13:36
print(strTodaytime("2014-2-16", "%Y-%m-%d")) # 结果为2014-02-16 00:00:00
print('\n')
# 4.获取日历相关的信息
import calendar
# 获取个月的日历,返回字符串的类型
cal = calendar.month(2015, 12) # 获取2015年12月的日历
print(cal)
# 获取一年的日历
cal = calendar.calendar(2018)
print(cal)
# 设置日历的第一天
calendar.setfirstweekday(calendar.SUNDAY) # 以星期天为第一列输出日历,当然默认是以星期一为第一天
cal = calendar.month(2015, 12)
print(cal)
# step2: 得到日历的HTML格式,当然这个一般是用不了的
cal = calendar.HTMLCalendar(calendar.MONDAY)
print(cal.formatmonth(2015, 12))
| [
"[email protected]"
] | |
2b1d066588fdf49853d3a8579e4a7f5074b31883 | ba694353a3cb1cfd02a6773b40f693386d0dba39 | /sdk/python/pulumi_google_native/compute/beta/get_region_target_https_proxy.py | 0cb9326854df08d9d949161935495de42c541b87 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | pulumi/pulumi-google-native | cc57af8bd3d1d6b76f1f48333ed1f1b31d56f92b | 124d255e5b7f5440d1ef63c9a71e4cc1d661cd10 | refs/heads/master | 2023-08-25T00:18:00.300230 | 2023-07-20T04:25:48 | 2023-07-20T04:25:48 | 323,680,373 | 69 | 16 | Apache-2.0 | 2023-09-13T00:28:04 | 2020-12-22T16:39:01 | Python | UTF-8 | Python | false | false | 16,607 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetRegionTargetHttpsProxyResult',
'AwaitableGetRegionTargetHttpsProxyResult',
'get_region_target_https_proxy',
'get_region_target_https_proxy_output',
]
@pulumi.output_type
class GetRegionTargetHttpsProxyResult:
def __init__(__self__, authentication=None, authorization=None, authorization_policy=None, certificate_map=None, creation_timestamp=None, description=None, fingerprint=None, http_filters=None, kind=None, name=None, proxy_bind=None, quic_override=None, region=None, self_link=None, server_tls_policy=None, ssl_certificates=None, ssl_policy=None, url_map=None):
if authentication and not isinstance(authentication, str):
raise TypeError("Expected argument 'authentication' to be a str")
pulumi.set(__self__, "authentication", authentication)
if authorization and not isinstance(authorization, str):
raise TypeError("Expected argument 'authorization' to be a str")
pulumi.set(__self__, "authorization", authorization)
if authorization_policy and not isinstance(authorization_policy, str):
raise TypeError("Expected argument 'authorization_policy' to be a str")
pulumi.set(__self__, "authorization_policy", authorization_policy)
if certificate_map and not isinstance(certificate_map, str):
raise TypeError("Expected argument 'certificate_map' to be a str")
pulumi.set(__self__, "certificate_map", certificate_map)
if creation_timestamp and not isinstance(creation_timestamp, str):
raise TypeError("Expected argument 'creation_timestamp' to be a str")
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if fingerprint and not isinstance(fingerprint, str):
raise TypeError("Expected argument 'fingerprint' to be a str")
pulumi.set(__self__, "fingerprint", fingerprint)
if http_filters and not isinstance(http_filters, list):
raise TypeError("Expected argument 'http_filters' to be a list")
pulumi.set(__self__, "http_filters", http_filters)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if proxy_bind and not isinstance(proxy_bind, bool):
raise TypeError("Expected argument 'proxy_bind' to be a bool")
pulumi.set(__self__, "proxy_bind", proxy_bind)
if quic_override and not isinstance(quic_override, str):
raise TypeError("Expected argument 'quic_override' to be a str")
pulumi.set(__self__, "quic_override", quic_override)
if region and not isinstance(region, str):
raise TypeError("Expected argument 'region' to be a str")
pulumi.set(__self__, "region", region)
if self_link and not isinstance(self_link, str):
raise TypeError("Expected argument 'self_link' to be a str")
pulumi.set(__self__, "self_link", self_link)
if server_tls_policy and not isinstance(server_tls_policy, str):
raise TypeError("Expected argument 'server_tls_policy' to be a str")
pulumi.set(__self__, "server_tls_policy", server_tls_policy)
if ssl_certificates and not isinstance(ssl_certificates, list):
raise TypeError("Expected argument 'ssl_certificates' to be a list")
pulumi.set(__self__, "ssl_certificates", ssl_certificates)
if ssl_policy and not isinstance(ssl_policy, str):
raise TypeError("Expected argument 'ssl_policy' to be a str")
pulumi.set(__self__, "ssl_policy", ssl_policy)
if url_map and not isinstance(url_map, str):
raise TypeError("Expected argument 'url_map' to be a str")
pulumi.set(__self__, "url_map", url_map)
@property
@pulumi.getter
def authentication(self) -> str:
"""
[Deprecated] Use serverTlsPolicy instead.
"""
warnings.warn("""[Deprecated] Use serverTlsPolicy instead.""", DeprecationWarning)
pulumi.log.warn("""authentication is deprecated: [Deprecated] Use serverTlsPolicy instead.""")
return pulumi.get(self, "authentication")
@property
@pulumi.getter
def authorization(self) -> str:
"""
[Deprecated] Use authorizationPolicy instead.
"""
warnings.warn("""[Deprecated] Use authorizationPolicy instead.""", DeprecationWarning)
pulumi.log.warn("""authorization is deprecated: [Deprecated] Use authorizationPolicy instead.""")
return pulumi.get(self, "authorization")
@property
@pulumi.getter(name="authorizationPolicy")
def authorization_policy(self) -> str:
"""
Optional. A URL referring to a networksecurity.AuthorizationPolicy resource that describes how the proxy should authorize inbound traffic. If left blank, access will not be restricted by an authorization policy. Refer to the AuthorizationPolicy resource for additional details. authorizationPolicy only applies to a global TargetHttpsProxy attached to globalForwardingRules with the loadBalancingScheme set to INTERNAL_SELF_MANAGED. Note: This field currently has no impact.
"""
return pulumi.get(self, "authorization_policy")
@property
@pulumi.getter(name="certificateMap")
def certificate_map(self) -> str:
"""
URL of a certificate map that identifies a certificate map associated with the given target proxy. This field can only be set for global target proxies. If set, sslCertificates will be ignored. Accepted format is //certificatemanager.googleapis.com/projects/{project }/locations/{location}/certificateMaps/{resourceName}.
"""
return pulumi.get(self, "certificate_map")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> str:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter
def description(self) -> str:
"""
An optional description of this resource. Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def fingerprint(self) -> str:
"""
Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. This field will be ignored when inserting a TargetHttpsProxy. An up-to-date fingerprint must be provided in order to patch the TargetHttpsProxy; otherwise, the request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the TargetHttpsProxy.
"""
return pulumi.get(self, "fingerprint")
@property
@pulumi.getter(name="httpFilters")
def http_filters(self) -> Sequence[str]:
"""
URLs to networkservices.HttpFilter resources enabled for xDS clients using this configuration. For example, https://networkservices.googleapis.com/beta/projects/project/locations/ locationhttpFilters/httpFilter Only filters that handle outbound connection and stream events may be specified. These filters work in conjunction with a default set of HTTP filters that may already be configured by Traffic Director. Traffic Director will determine the final location of these filters within xDS configuration based on the name of the HTTP filter. If Traffic Director positions multiple filters at the same location, those filters will be in the same order as specified in this list. httpFilters only applies for loadbalancers with loadBalancingScheme set to INTERNAL_SELF_MANAGED. See ForwardingRule for more details.
"""
return pulumi.get(self, "http_filters")
@property
@pulumi.getter
def kind(self) -> str:
"""
Type of resource. Always compute#targetHttpsProxy for target HTTPS proxies.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="proxyBind")
def proxy_bind(self) -> bool:
"""
This field only applies when the forwarding rule that references this target proxy has a loadBalancingScheme set to INTERNAL_SELF_MANAGED. When this field is set to true, Envoy proxies set up inbound traffic interception and bind to the IP address and port specified in the forwarding rule. This is generally useful when using Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar proxy). The Envoy proxy listens for inbound requests and handles requests when it receives them. The default is false.
"""
return pulumi.get(self, "proxy_bind")
@property
@pulumi.getter(name="quicOverride")
def quic_override(self) -> str:
"""
Specifies the QUIC override policy for this TargetHttpsProxy resource. This setting determines whether the load balancer attempts to negotiate QUIC with clients. You can specify NONE, ENABLE, or DISABLE. - When quic-override is set to NONE, Google manages whether QUIC is used. - When quic-override is set to ENABLE, the load balancer uses QUIC when possible. - When quic-override is set to DISABLE, the load balancer doesn't use QUIC. - If the quic-override flag is not specified, NONE is implied.
"""
return pulumi.get(self, "quic_override")
@property
@pulumi.getter
def region(self) -> str:
"""
URL of the region where the regional TargetHttpsProxy resides. This field is not applicable to global TargetHttpsProxies.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> str:
"""
Server-defined URL for the resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="serverTlsPolicy")
def server_tls_policy(self) -> str:
"""
Optional. A URL referring to a networksecurity.ServerTlsPolicy resource that describes how the proxy should authenticate inbound traffic. serverTlsPolicy only applies to a global TargetHttpsProxy attached to globalForwardingRules with the loadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL or EXTERNAL_MANAGED. For details which ServerTlsPolicy resources are accepted with INTERNAL_SELF_MANAGED and which with EXTERNAL, EXTERNAL_MANAGED loadBalancingScheme consult ServerTlsPolicy documentation. If left blank, communications are not encrypted.
"""
return pulumi.get(self, "server_tls_policy")
@property
@pulumi.getter(name="sslCertificates")
def ssl_certificates(self) -> Sequence[str]:
"""
URLs to SslCertificate resources that are used to authenticate connections between users and the load balancer. At least one SSL certificate must be specified. Currently, you may specify up to 15 SSL certificates. sslCertificates do not apply when the load balancing scheme is set to INTERNAL_SELF_MANAGED.
"""
return pulumi.get(self, "ssl_certificates")
@property
@pulumi.getter(name="sslPolicy")
def ssl_policy(self) -> str:
"""
URL of SslPolicy resource that will be associated with the TargetHttpsProxy resource. If not set, the TargetHttpsProxy resource has no SSL policy configured.
"""
return pulumi.get(self, "ssl_policy")
@property
@pulumi.getter(name="urlMap")
def url_map(self) -> str:
"""
A fully-qualified or valid partial URL to the UrlMap resource that defines the mapping from URL to the BackendService. For example, the following are all valid URLs for specifying a URL map: - https://www.googleapis.compute/v1/projects/project/global/urlMaps/ url-map - projects/project/global/urlMaps/url-map - global/urlMaps/url-map
"""
return pulumi.get(self, "url_map")
class AwaitableGetRegionTargetHttpsProxyResult(GetRegionTargetHttpsProxyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRegionTargetHttpsProxyResult(
authentication=self.authentication,
authorization=self.authorization,
authorization_policy=self.authorization_policy,
certificate_map=self.certificate_map,
creation_timestamp=self.creation_timestamp,
description=self.description,
fingerprint=self.fingerprint,
http_filters=self.http_filters,
kind=self.kind,
name=self.name,
proxy_bind=self.proxy_bind,
quic_override=self.quic_override,
region=self.region,
self_link=self.self_link,
server_tls_policy=self.server_tls_policy,
ssl_certificates=self.ssl_certificates,
ssl_policy=self.ssl_policy,
url_map=self.url_map)
def get_region_target_https_proxy(project: Optional[str] = None,
region: Optional[str] = None,
target_https_proxy: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRegionTargetHttpsProxyResult:
"""
Returns the specified TargetHttpsProxy resource in the specified region.
"""
__args__ = dict()
__args__['project'] = project
__args__['region'] = region
__args__['targetHttpsProxy'] = target_https_proxy
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('google-native:compute/beta:getRegionTargetHttpsProxy', __args__, opts=opts, typ=GetRegionTargetHttpsProxyResult).value
return AwaitableGetRegionTargetHttpsProxyResult(
authentication=pulumi.get(__ret__, 'authentication'),
authorization=pulumi.get(__ret__, 'authorization'),
authorization_policy=pulumi.get(__ret__, 'authorization_policy'),
certificate_map=pulumi.get(__ret__, 'certificate_map'),
creation_timestamp=pulumi.get(__ret__, 'creation_timestamp'),
description=pulumi.get(__ret__, 'description'),
fingerprint=pulumi.get(__ret__, 'fingerprint'),
http_filters=pulumi.get(__ret__, 'http_filters'),
kind=pulumi.get(__ret__, 'kind'),
name=pulumi.get(__ret__, 'name'),
proxy_bind=pulumi.get(__ret__, 'proxy_bind'),
quic_override=pulumi.get(__ret__, 'quic_override'),
region=pulumi.get(__ret__, 'region'),
self_link=pulumi.get(__ret__, 'self_link'),
server_tls_policy=pulumi.get(__ret__, 'server_tls_policy'),
ssl_certificates=pulumi.get(__ret__, 'ssl_certificates'),
ssl_policy=pulumi.get(__ret__, 'ssl_policy'),
url_map=pulumi.get(__ret__, 'url_map'))
@_utilities.lift_output_func(get_region_target_https_proxy)
def get_region_target_https_proxy_output(project: Optional[pulumi.Input[Optional[str]]] = None,
region: Optional[pulumi.Input[str]] = None,
target_https_proxy: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetRegionTargetHttpsProxyResult]:
"""
Returns the specified TargetHttpsProxy resource in the specified region.
"""
...
| [
"[email protected]"
] | |
368bcb43fae4cd5bd1b4080cb962bf41bf443ccd | ecb6b752523a126ef17895854b18e02df41c4cfe | /api_restful/user/api.py | 419c2cc52c3f7985a8cb407d107b4cbb59b9a520 | [
"MIT"
] | permissive | zhanghe06/bearing_project | cd6a1b2ba509392da37e5797a3619454ca464276 | 25729aa7a8a5b38906e60b370609b15e8911ecdd | refs/heads/master | 2023-05-27T17:23:22.561045 | 2023-05-23T09:26:07 | 2023-05-23T09:39:14 | 126,219,603 | 2 | 5 | MIT | 2022-12-08T03:11:27 | 2018-03-21T17:54:44 | JavaScript | UTF-8 | Python | false | false | 3,193 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@author: zhanghe
@software: PyCharm
@file: api.py
@time: 2020-02-28 21:26
"""
import datetime
from api_restful.databases.bearing import db_bearing
from app_common.maps.status_delete import STATUS_DEL_OK
from api_restful.models.model_bearing import User
from app_common.libs.mysql_orm_op import DbInstance
db_instance = DbInstance(db_bearing)
def get_user_row_by_id(user_id):
"""
通过 id 获取信息
:param user_id:
:return: None/object
"""
return db_instance.get_row_by_id(User, user_id)
def get_user_row(*args, **kwargs):
"""
获取信息
:param args:
:param kwargs:
:return: None/object
"""
return db_instance.get_row(User, *args, **kwargs)
def get_user_rows(*args, **kwargs):
"""
获取列表
:param args:
:param kwargs:
:return:
"""
return db_instance.get_rows(User, *args, **kwargs)
def get_user_limit_rows_by_last_id(last_pk, limit_num, *args, **kwargs):
"""
通过最后一个主键 id 获取最新信息列表
:param last_pk:
:param limit_num:
:param args:
:param kwargs:
:return:
"""
return db_instance.get_limit_rows_by_last_id(User, last_pk, limit_num, *args, **kwargs)
def add_user(user_data):
"""
添加信息
:param user_data:
:return: None/Value of user.id
:except:
"""
return db_instance.add(User, user_data)
def edit_user(user_id, user_data):
"""
修改信息
:param user_id:
:param user_data:
:return: Number of affected rows (Example: 0/1)
:except:
"""
return db_instance.edit(User, user_id, user_data)
def delete_user(user_id, force=False):
"""
删除信息
:param user_id:
:param force:
:return: Number of affected rows (Example: 0/1)
:except:
"""
if force:
return db_instance.delete(User, user_id)
else:
data = {
'status_delete': STATUS_DEL_OK,
'delete_time': datetime.datetime.now()
}
if isinstance(user_id, list):
return db_instance.update_rows(User, data, User.id.in_(user_id))
else:
return db_instance.edit(User, user_id, data)
def get_user_pagination(page=1, size=10, *args, **kwargs):
"""
获取列表(分页)
Usage:
items: 信息列表
has_next: 如果本页之后还有超过一个分页,则返回True
has_prev: 如果本页之前还有超过一个分页,则返回True
next_num: 返回下一页的页码
prev_num: 返回上一页的页码
iter_pages(): 页码列表
iter_pages(left_edge=2, left_current=2, right_current=5, right_edge=2) 页码列表默认参数
:param page:
:param size:
:param args:
:param kwargs:
:return:
"""
rows = db_instance.get_pagination(User, page, size, *args, **kwargs)
return rows
def delete_user_table():
"""
清空表
:return:
"""
return db_instance.delete_table(User)
def count_user(*args, **kwargs):
"""
计数
:param args:
:param kwargs:
:return:
"""
return db_instance.count(User, *args, **kwargs)
| [
"[email protected]"
] | |
cb4beb5018f9938b6d550942065e3d19aee69687 | 8fa162cddb2046cb47f3a06c72743ed67685d03a | /dvc/dependency/__init__.py | 50222f02926bcad6b80fa0a1227b00b7b0ee84f1 | [
"Apache-2.0"
] | permissive | franekp/dvc | be9c123f03b77daa39781bd7e62fa25b9fae449f | e380a4a8586da643bf4e0d2281b13aee0d5e5207 | refs/heads/master | 2020-03-19T18:35:47.416381 | 2018-06-10T14:35:49 | 2018-06-10T14:35:49 | 136,816,230 | 0 | 0 | Apache-2.0 | 2018-06-10T14:32:53 | 2018-06-10T14:32:52 | null | UTF-8 | Python | false | false | 1,817 | py | import schema
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from dvc.exceptions import DvcException
from dvc.config import Config
from dvc.dependency.base import DependencyBase
from dvc.dependency.s3 import DependencyS3
from dvc.dependency.gs import DependencyGS
from dvc.dependency.local import DependencyLOCAL
from dvc.dependency.hdfs import DependencyHDFS
from dvc.remote import Remote
from dvc.remote.local import RemoteLOCAL
from dvc.remote.s3 import RemoteS3
from dvc.remote.gs import RemoteGS
from dvc.remote.ssh import RemoteSSH
from dvc.remote.hdfs import RemoteHDFS
DEPS = [DependencyHDFS, DependencyS3, DependencyGS, DependencyLOCAL]
DEP_MAP = {'': DependencyLOCAL,
's3': DependencyS3,
'gs': DependencyGS,
'hdfs': DependencyHDFS,}
SCHEMA = {
DependencyBase.PARAM_PATH: str,
schema.Optional(RemoteLOCAL.PARAM_MD5): schema.Or(str, None),
schema.Optional(RemoteS3.PARAM_ETAG): schema.Or(str, None),
schema.Optional(RemoteHDFS.PARAM_CHECKSUM): schema.Or(str, None),
}
def _get(stage, p, info):
parsed = urlparse(p)
if parsed.scheme == 'remote':
sect = stage.project.config._config[Config.SECTION_REMOTE_FMT.format(parsed.netloc)]
remote = Remote(stage.project, sect)
return DEP_MAP[remote.scheme](stage, p, info, remote=remote)
for d in DEPS:
if d.supported(p):
return d(stage, p, info)
raise DvcException('Dependency \'{}\' is not supported'.format(p))
def loadd_from(stage, d_list):
ret = []
for d in d_list:
p = d.pop(DependencyBase.PARAM_PATH)
ret.append(_get(stage, p, d))
return ret
def loads_from(stage, s_list):
ret = []
for s in s_list:
ret.append(_get(stage, s, {}))
return ret
| [
"[email protected]"
] | |
a21b985a33b211dbc95789f7ecfdc4031a33ccba | 5a281cb78335e06c631181720546f6876005d4e5 | /openstack-heat-12.0.0/heat/engine/clients/os/nova.py | d19fa19c347834f3d56c2d1e010fd5736da3769e | [
"Apache-2.0"
] | permissive | scottwedge/OpenStack-Stein | d25b2a5bb54a714fc23f0ff0c11fb1fdacad85e8 | 7077d1f602031dace92916f14e36b124f474de15 | refs/heads/master | 2021-03-22T16:07:19.561504 | 2020-03-15T01:31:10 | 2020-03-15T01:31:10 | 247,380,811 | 0 | 0 | Apache-2.0 | 2020-03-15T01:24:15 | 2020-03-15T01:24:15 | null | UTF-8 | Python | false | false | 31,690 | py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import email
from email.mime import multipart
from email.mime import text
import os
import pkgutil
import string
from neutronclient.common import exceptions as q_exceptions
from novaclient import api_versions
from novaclient import client as nc
from novaclient import exceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import netutils
import six
from six.moves.urllib import parse as urlparse
import tenacity
from heat.common import exception
from heat.common.i18n import _
from heat.engine.clients import client_exception
from heat.engine.clients import client_plugin
from heat.engine.clients import microversion_mixin
from heat.engine.clients import os as os_client
from heat.engine import constraints
LOG = logging.getLogger(__name__)
CLIENT_NAME = 'nova'
class NovaClientPlugin(microversion_mixin.MicroversionMixin,
client_plugin.ClientPlugin):
deferred_server_statuses = ['BUILD',
'HARD_REBOOT',
'PASSWORD',
'REBOOT',
'RESCUE',
'RESIZE',
'REVERT_RESIZE',
'SHUTOFF',
'SUSPENDED',
'VERIFY_RESIZE']
exceptions_module = exceptions
NOVA_API_VERSION = '2.1'
max_microversion = cfg.CONF.max_nova_api_microversion
service_types = [COMPUTE] = ['compute']
def _get_service_name(self):
return self.COMPUTE
def _create(self, version=None):
if not version:
# TODO(prazumovsky): remove all unexpected calls from tests and
# add default_version after that.
version = self.NOVA_API_VERSION
args = self._get_args(version)
client = nc.Client(version, **args)
return client
def _get_args(self, version):
endpoint_type = self._get_client_option(CLIENT_NAME, 'endpoint_type')
extensions = nc.discover_extensions(version)
return {
'session': self.context.keystone_session,
'extensions': extensions,
'endpoint_type': endpoint_type,
'service_type': self.COMPUTE,
'region_name': self._get_region_name(),
'http_log_debug': self._get_client_option(CLIENT_NAME,
'http_log_debug')
}
def get_max_microversion(self):
if not self.max_microversion:
client = self._create()
self.max_microversion = client.versions.get_current().version
return self.max_microversion
def is_version_supported(self, version):
api_ver = api_versions.get_api_version(version)
max_api_ver = api_versions.get_api_version(
self.get_max_microversion())
return max_api_ver >= api_ver
def is_not_found(self, ex):
return isinstance(ex, (exceptions.NotFound,
q_exceptions.NotFound))
def is_over_limit(self, ex):
return isinstance(ex, exceptions.OverLimit)
def is_bad_request(self, ex):
return isinstance(ex, exceptions.BadRequest)
def is_conflict(self, ex):
return isinstance(ex, exceptions.Conflict)
def is_unprocessable_entity(self, ex):
http_status = (getattr(ex, 'http_status', None) or
getattr(ex, 'code', None))
return (isinstance(ex, exceptions.ClientException) and
http_status == 422)
@tenacity.retry(
stop=tenacity.stop_after_attempt(
max(cfg.CONF.client_retry_limit + 1, 0)),
retry=tenacity.retry_if_exception(
client_plugin.retry_if_connection_err),
reraise=True)
def get_server(self, server):
"""Return fresh server object.
Substitutes Nova's NotFound for Heat's EntityNotFound,
to be returned to user as HTTP error.
"""
try:
return self.client().servers.get(server)
except exceptions.NotFound:
raise exception.EntityNotFound(entity='Server', name=server)
def fetch_server(self, server_id):
"""Fetch fresh server object from Nova.
Log warnings and return None for non-critical API errors.
Use this method in various ``check_*_complete`` resource methods,
where intermittent errors can be tolerated.
"""
server = None
try:
server = self.client().servers.get(server_id)
except exceptions.OverLimit as exc:
LOG.warning("Received an OverLimit response when "
"fetching server (%(id)s) : %(exception)s",
{'id': server_id,
'exception': exc})
except exceptions.ClientException as exc:
if ((getattr(exc, 'http_status', getattr(exc, 'code', None)) in
(500, 503))):
LOG.warning("Received the following exception when "
"fetching server (%(id)s) : %(exception)s",
{'id': server_id,
'exception': exc})
else:
raise
return server
def refresh_server(self, server):
"""Refresh server's attributes.
Also log warnings for non-critical API errors.
"""
try:
server.get()
except exceptions.OverLimit as exc:
LOG.warning("Server %(name)s (%(id)s) received an OverLimit "
"response during server.get(): %(exception)s",
{'name': server.name,
'id': server.id,
'exception': exc})
except exceptions.ClientException as exc:
if ((getattr(exc, 'http_status', getattr(exc, 'code', None)) in
(500, 503))):
LOG.warning('Server "%(name)s" (%(id)s) received the '
'following exception during server.get(): '
'%(exception)s',
{'name': server.name,
'id': server.id,
'exception': exc})
else:
raise
def get_ip(self, server, net_type, ip_version):
"""Return the server's IP of the given type and version."""
if net_type in server.addresses:
for ip in server.addresses[net_type]:
if ip['version'] == ip_version:
return ip['addr']
def get_status(self, server):
"""Return the server's status.
:param server: server object
:returns: status as a string
"""
# Some clouds append extra (STATUS) strings to the status, strip it
return server.status.split('(')[0]
def _check_active(self, server, res_name='Server'):
"""Check server status.
Accepts both server IDs and server objects.
Returns True if server is ACTIVE,
raises errors when server has an ERROR or unknown to Heat status,
returns False otherwise.
:param res_name: name of the resource to use in the exception message
"""
# not checking with is_uuid_like as most tests use strings e.g. '1234'
if isinstance(server, six.string_types):
server = self.fetch_server(server)
if server is None:
return False
else:
status = self.get_status(server)
else:
status = self.get_status(server)
if status != 'ACTIVE':
self.refresh_server(server)
status = self.get_status(server)
if status in self.deferred_server_statuses:
return False
elif status == 'ACTIVE':
return True
elif status == 'ERROR':
fault = getattr(server, 'fault', {})
raise exception.ResourceInError(
resource_status=status,
status_reason=_("Message: %(message)s, Code: %(code)s") % {
'message': fault.get('message', _('Unknown')),
'code': fault.get('code', _('Unknown'))
})
else:
raise exception.ResourceUnknownStatus(
resource_status=server.status,
result=_('%s is not active') % res_name)
def find_flavor_by_name_or_id(self, flavor):
"""Find the specified flavor by name or id.
:param flavor: the name of the flavor to find
:returns: the id of :flavor:
"""
return self._find_flavor_id(self.context.tenant_id,
flavor)
@os_client.MEMOIZE_FINDER
def _find_flavor_id(self, tenant_id, flavor):
# tenant id in the signature is used for the memoization key,
# that would differentiate similar resource names across tenants.
return self.get_flavor(flavor).id
def get_flavor(self, flavor_identifier):
"""Get the flavor object for the specified flavor name or id.
:param flavor_identifier: the name or id of the flavor to find
:returns: a flavor object with name or id :flavor:
"""
try:
flavor = self.client().flavors.get(flavor_identifier)
except exceptions.NotFound:
flavor = self.client().flavors.find(name=flavor_identifier)
return flavor
def get_host(self, host_name):
"""Get the host id specified by name.
:param host_name: the name of host to find
:returns: the list of match hosts
:raises exception.EntityNotFound:
"""
host_list = self.client().hosts.list()
for host in host_list:
if host.host_name == host_name and host.service == self.COMPUTE:
return host
raise exception.EntityNotFound(entity='Host', name=host_name)
def get_keypair(self, key_name):
"""Get the public key specified by :key_name:
:param key_name: the name of the key to look for
:returns: the keypair (name, public_key) for :key_name:
:raises exception.EntityNotFound:
"""
try:
return self.client().keypairs.get(key_name)
except exceptions.NotFound:
raise exception.EntityNotFound(entity='Key', name=key_name)
def build_userdata(self, metadata, userdata=None, instance_user=None,
user_data_format='HEAT_CFNTOOLS'):
"""Build multipart data blob for CloudInit.
Data blob includes user-supplied Metadata, user data, and the required
Heat in-instance configuration.
:param resource: the resource implementation
:type resource: heat.engine.Resource
:param userdata: user data string
:type userdata: str or None
:param instance_user: the user to create on the server
:type instance_user: string
:param user_data_format: Format of user data to return
:type user_data_format: string
:returns: multipart mime as a string
"""
if user_data_format == 'RAW':
return userdata
is_cfntools = user_data_format == 'HEAT_CFNTOOLS'
is_software_config = user_data_format == 'SOFTWARE_CONFIG'
def make_subpart(content, filename, subtype=None):
if subtype is None:
subtype = os.path.splitext(filename)[0]
if content is None:
content = ''
try:
content.encode('us-ascii')
charset = 'us-ascii'
except UnicodeEncodeError:
charset = 'utf-8'
msg = (text.MIMEText(content, _subtype=subtype, _charset=charset)
if subtype else text.MIMEText(content, _charset=charset))
msg.add_header('Content-Disposition', 'attachment',
filename=filename)
return msg
def read_cloudinit_file(fn):
return pkgutil.get_data(
'heat', 'cloudinit/%s' % fn).decode('utf-8')
if instance_user:
config_custom_user = 'user: %s' % instance_user
# FIXME(shadower): compatibility workaround for cloud-init 0.6.3.
# We can drop this once we stop supporting 0.6.3 (which ships
# with Ubuntu 12.04 LTS).
#
# See bug https://bugs.launchpad.net/heat/+bug/1257410
boothook_custom_user = r"""useradd -m %s
echo -e '%s\tALL=(ALL)\tNOPASSWD: ALL' >> /etc/sudoers
""" % (instance_user, instance_user)
else:
config_custom_user = ''
boothook_custom_user = ''
cloudinit_config = string.Template(
read_cloudinit_file('config')).safe_substitute(
add_custom_user=config_custom_user)
cloudinit_boothook = string.Template(
read_cloudinit_file('boothook.sh')).safe_substitute(
add_custom_user=boothook_custom_user)
attachments = [(cloudinit_config, 'cloud-config'),
(cloudinit_boothook, 'boothook.sh', 'cloud-boothook'),
(read_cloudinit_file('part_handler.py'),
'part-handler.py')]
if is_cfntools:
attachments.append((userdata, 'cfn-userdata', 'x-cfninitdata'))
elif is_software_config:
# attempt to parse userdata as a multipart message, and if it
# is, add each part as an attachment
userdata_parts = None
try:
userdata_parts = email.message_from_string(userdata)
except Exception:
pass
if userdata_parts and userdata_parts.is_multipart():
for part in userdata_parts.get_payload():
attachments.append((part.get_payload(),
part.get_filename(),
part.get_content_subtype()))
else:
attachments.append((userdata, ''))
if is_cfntools:
attachments.append((read_cloudinit_file('loguserdata.py'),
'loguserdata.py', 'x-shellscript'))
if metadata:
attachments.append((jsonutils.dumps(metadata),
'cfn-init-data', 'x-cfninitdata'))
if is_cfntools:
heat_client_plugin = self.context.clients.client_plugin('heat')
cfn_md_url = heat_client_plugin.get_cfn_metadata_server_url()
attachments.append((cfn_md_url,
'cfn-metadata-server', 'x-cfninitdata'))
# Create a boto config which the cfntools on the host use to know
# where the cfn API is to be accessed
cfn_url = urlparse.urlparse(cfn_md_url)
is_secure = cfg.CONF.instance_connection_is_secure
vcerts = cfg.CONF.instance_connection_https_validate_certificates
boto_cfg = "\n".join(["[Boto]",
"debug = 0",
"is_secure = %s" % is_secure,
"https_validate_certificates = %s" % vcerts,
"cfn_region_name = heat",
"cfn_region_endpoint = %s" %
cfn_url.hostname])
attachments.append((boto_cfg,
'cfn-boto-cfg', 'x-cfninitdata'))
subparts = [make_subpart(*args) for args in attachments]
mime_blob = multipart.MIMEMultipart(_subparts=subparts)
return mime_blob.as_string()
def check_delete_server_complete(self, server_id):
"""Wait for server to disappear from Nova."""
try:
server = self.fetch_server(server_id)
except Exception as exc:
self.ignore_not_found(exc)
return True
if not server:
return False
task_state_in_nova = getattr(server, 'OS-EXT-STS:task_state', None)
# the status of server won't change until the delete task has done
if task_state_in_nova == 'deleting':
return False
status = self.get_status(server)
if status == 'DELETED':
return True
if status == 'SOFT_DELETED':
self.client().servers.force_delete(server_id)
elif status == 'ERROR':
fault = getattr(server, 'fault', {})
message = fault.get('message', 'Unknown')
code = fault.get('code')
errmsg = _("Server %(name)s delete failed: (%(code)s) "
"%(message)s") % dict(name=server.name,
code=code,
message=message)
raise exception.ResourceInError(resource_status=status,
status_reason=errmsg)
return False
def rename(self, server, name):
"""Update the name for a server."""
server.update(name)
def resize(self, server_id, flavor_id):
"""Resize the server."""
server = self.fetch_server(server_id)
if server:
server.resize(flavor_id)
return True
else:
return False
def check_resize(self, server_id, flavor):
"""Verify that a resizing server is properly resized.
If that's the case, confirm the resize, if not raise an error.
"""
server = self.fetch_server(server_id)
# resize operation is asynchronous so the server resize may not start
# when checking server status (the server may stay ACTIVE instead
# of RESIZE).
if not server or server.status in ('RESIZE', 'ACTIVE'):
return False
if server.status == 'VERIFY_RESIZE':
return True
else:
raise exception.Error(
_("Resizing to '%(flavor)s' failed, status '%(status)s'") %
dict(flavor=flavor, status=server.status))
def verify_resize(self, server_id):
server = self.fetch_server(server_id)
if not server:
return False
status = self.get_status(server)
if status == 'VERIFY_RESIZE':
server.confirm_resize()
return True
else:
msg = _("Could not confirm resize of server %s") % server_id
raise exception.ResourceUnknownStatus(
result=msg, resource_status=status)
def check_verify_resize(self, server_id):
server = self.fetch_server(server_id)
if not server:
return False
status = self.get_status(server)
if status == 'ACTIVE':
return True
if status == 'VERIFY_RESIZE':
return False
else:
msg = _("Confirm resize for server %s failed") % server_id
raise exception.ResourceUnknownStatus(
result=msg, resource_status=status)
def rebuild(self, server_id, image_id, password=None,
preserve_ephemeral=False, meta=None, files=None):
"""Rebuild the server and call check_rebuild to verify."""
server = self.fetch_server(server_id)
if server:
server.rebuild(image_id, password=password,
preserve_ephemeral=preserve_ephemeral,
meta=meta, files=files)
return True
else:
return False
def check_rebuild(self, server_id):
"""Verify that a rebuilding server is rebuilt.
Raise error if it ends up in an ERROR state.
"""
server = self.fetch_server(server_id)
if server is None or server.status == 'REBUILD':
return False
if server.status == 'ERROR':
raise exception.Error(
_("Rebuilding server failed, status '%s'") % server.status)
else:
return True
def meta_serialize(self, metadata):
"""Serialize non-string metadata values before sending them to Nova."""
if not isinstance(metadata, collections.Mapping):
raise exception.StackValidationFailed(message=_(
"nova server metadata needs to be a Map."))
return dict((key, (value if isinstance(value,
six.string_types)
else jsonutils.dumps(value))
) for (key, value) in metadata.items())
def meta_update(self, server, metadata):
"""Delete/Add the metadata in nova as needed."""
metadata = self.meta_serialize(metadata)
current_md = server.metadata
to_del = sorted(set(current_md) - set(metadata))
client = self.client()
if len(to_del) > 0:
client.servers.delete_meta(server, to_del)
client.servers.set_meta(server, metadata)
def server_to_ipaddress(self, server):
"""Return the server's IP address, fetching it from Nova."""
try:
server = self.client().servers.get(server)
except exceptions.NotFound as ex:
LOG.warning('Instance (%(server)s) not found: %(ex)s',
{'server': server, 'ex': ex})
else:
for n in sorted(server.networks, reverse=True):
if len(server.networks[n]) > 0:
return server.networks[n][0]
@tenacity.retry(
stop=tenacity.stop_after_attempt(
max(cfg.CONF.client_retry_limit + 1, 0)),
retry=tenacity.retry_if_exception(
client_plugin.retry_if_connection_err),
reraise=True)
def absolute_limits(self):
"""Return the absolute limits as a dictionary."""
limits = self.client().limits.get()
return dict([(limit.name, limit.value)
for limit in list(limits.absolute)])
def get_console_urls(self, server):
"""Return dict-like structure of server's console urls.
The actual console url is lazily resolved on access.
"""
nc = self.client
class ConsoleUrls(collections.Mapping):
def __init__(self, server):
self.console_method = server.get_console_url
self.support_console_types = ['novnc', 'xvpvnc',
'spice-html5', 'rdp-html5',
'serial', 'webmks']
def __getitem__(self, key):
try:
if key not in self.support_console_types:
raise exceptions.UnsupportedConsoleType(key)
if key == 'webmks':
data = nc().servers.get_console_url(
server, key)
else:
data = self.console_method(key)
console_data = data.get(
'remote_console', data.get('console'))
url = console_data['url']
except exceptions.UnsupportedConsoleType as ex:
url = ex.message
except Exception as e:
url = _('Cannot get console url: %s') % six.text_type(e)
return url
def __len__(self):
return len(self.support_console_types)
def __iter__(self):
return (key for key in self.support_console_types)
return ConsoleUrls(server)
def attach_volume(self, server_id, volume_id, device):
try:
va = self.client().volumes.create_server_volume(
server_id=server_id,
volume_id=volume_id,
device=device)
except Exception as ex:
if self.is_client_exception(ex):
raise exception.Error(_(
"Failed to attach volume %(vol)s to server %(srv)s "
"- %(err)s") % {'vol': volume_id,
'srv': server_id,
'err': ex})
else:
raise
return va.id
def detach_volume(self, server_id, attach_id):
# detach the volume using volume_attachment
try:
self.client().volumes.delete_server_volume(server_id, attach_id)
except Exception as ex:
if not (self.is_not_found(ex)
or self.is_bad_request(ex)):
raise exception.Error(
_("Could not detach attachment %(att)s "
"from server %(srv)s.") % {'srv': server_id,
'att': attach_id})
def check_detach_volume_complete(self, server_id, attach_id):
"""Check that nova server lost attachment.
This check is needed for immediate reattachment when updating:
there might be some time between cinder marking volume as 'available'
and nova removing attachment from its own objects, so we
check that nova already knows that the volume is detached.
"""
try:
self.client().volumes.get_server_volume(server_id, attach_id)
except Exception as ex:
self.ignore_not_found(ex)
LOG.info("Volume %(vol)s is detached from server %(srv)s",
{'vol': attach_id, 'srv': server_id})
return True
else:
LOG.debug("Server %(srv)s still has attachment %(att)s.",
{'att': attach_id, 'srv': server_id})
return False
def associate_floatingip(self, server_id, floatingip_id):
iface_list = self.fetch_server(server_id).interface_list()
if len(iface_list) == 0:
raise client_exception.InterfaceNotFound(id=server_id)
if len(iface_list) > 1:
LOG.warning("Multiple interfaces found for server %s, "
"using the first one.", server_id)
port_id = iface_list[0].port_id
fixed_ips = iface_list[0].fixed_ips
fixed_address = next(ip['ip_address'] for ip in fixed_ips
if netutils.is_valid_ipv4(ip['ip_address']))
request_body = {
'floatingip': {
'port_id': port_id,
'fixed_ip_address': fixed_address}}
self.clients.client('neutron').update_floatingip(floatingip_id,
request_body)
def dissociate_floatingip(self, floatingip_id):
request_body = {
'floatingip': {
'port_id': None,
'fixed_ip_address': None}}
self.clients.client('neutron').update_floatingip(floatingip_id,
request_body)
def associate_floatingip_address(self, server_id, fip_address):
fips = self.clients.client(
'neutron').list_floatingips(
floating_ip_address=fip_address)['floatingips']
if len(fips) == 0:
args = {'ip_address': fip_address}
raise client_exception.EntityMatchNotFound(entity='floatingip',
args=args)
self.associate_floatingip(server_id, fips[0]['id'])
def dissociate_floatingip_address(self, fip_address):
fips = self.clients.client(
'neutron').list_floatingips(
floating_ip_address=fip_address)['floatingips']
if len(fips) == 0:
args = {'ip_address': fip_address}
raise client_exception.EntityMatchNotFound(entity='floatingip',
args=args)
self.dissociate_floatingip(fips[0]['id'])
def interface_detach(self, server_id, port_id):
with self.ignore_not_found:
server = self.fetch_server(server_id)
if server:
server.interface_detach(port_id)
return True
def interface_attach(self, server_id, port_id=None, net_id=None, fip=None,
security_groups=None):
server = self.fetch_server(server_id)
if server:
attachment = server.interface_attach(port_id, net_id, fip)
if not port_id and security_groups:
props = {'security_groups': security_groups}
self.clients.client('neutron').update_port(
attachment.port_id, {'port': props})
return True
else:
return False
@tenacity.retry(
stop=tenacity.stop_after_attempt(
cfg.CONF.max_interface_check_attempts),
wait=tenacity.wait_exponential(multiplier=0.5, max=12.0),
retry=tenacity.retry_if_result(client_plugin.retry_if_result_is_false))
def check_interface_detach(self, server_id, port_id):
with self.ignore_not_found:
server = self.fetch_server(server_id)
if server:
interfaces = server.interface_list()
for iface in interfaces:
if iface.port_id == port_id:
return False
return True
@tenacity.retry(
stop=tenacity.stop_after_attempt(
cfg.CONF.max_interface_check_attempts),
wait=tenacity.wait_fixed(0.5),
retry=tenacity.retry_if_result(client_plugin.retry_if_result_is_false))
def check_interface_attach(self, server_id, port_id):
if not port_id:
return True
server = self.fetch_server(server_id)
if server:
interfaces = server.interface_list()
for iface in interfaces:
if iface.port_id == port_id:
return True
return False
@os_client.MEMOIZE_EXTENSIONS
def _list_extensions(self):
extensions = self.client().list_extensions.show_all()
return set(extension.alias for extension in extensions)
def has_extension(self, alias):
"""Check if specific extension is present."""
return alias in self._list_extensions()
class NovaBaseConstraint(constraints.BaseCustomConstraint):
resource_client_name = CLIENT_NAME
class ServerConstraint(NovaBaseConstraint):
resource_getter_name = 'get_server'
class KeypairConstraint(NovaBaseConstraint):
resource_getter_name = 'get_keypair'
def validate_with_client(self, client, key_name):
if not key_name:
# Don't validate empty key, which can happen when you
# use a KeyPair resource
return True
super(KeypairConstraint, self).validate_with_client(client, key_name)
class FlavorConstraint(NovaBaseConstraint):
expected_exceptions = (exceptions.NotFound,)
resource_getter_name = 'find_flavor_by_name_or_id'
class HostConstraint(NovaBaseConstraint):
resource_getter_name = 'get_host'
| [
"Wayne [email protected]"
] | Wayne [email protected] |
2930fa9babf35f81f35faa2bbf66995d8fec0ff7 | 1e6405a73cdcbf973422c49286f1684eaeba02c3 | /venv/lib/python3.6/io.py | 38b2f96ea8f2a9f9b26028ce56e01933d13c3535 | [] | no_license | hluffy/energy | f398472df8bf3986ffb2e3a357bca40aafd41d13 | d0d4a11f16ee23aa3fbf171d9a978be2a7538e3d | refs/heads/master | 2021-05-05T12:32:40.487642 | 2018-01-29T15:06:37 | 2018-01-29T15:06:37 | 118,255,373 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 42 | py | /Users/rikka/anaconda3/lib/python3.6/io.py | [
"[email protected]"
] | |
ada4e614cc5df9247d515e08a82694f458afec8b | 4d0cf37d15fda02881c5b8dcdb1f0076bb31c778 | /lifelike/utils.py | 333c2bc8c859c626173cd738f634a22aa3c49bef | [
"MIT"
] | permissive | sudoentropy/lifelike | b53d70e666d94fdd266ea3567aa9291baf42efdf | 2af3a9d25e777f67648f772f805ee66d6982bab0 | refs/heads/master | 2023-01-19T05:28:35.152335 | 2020-11-15T02:48:30 | 2020-11-15T02:48:30 | 290,666,687 | 0 | 0 | MIT | 2020-08-27T03:33:52 | 2020-08-27T03:33:51 | null | UTF-8 | Python | false | false | 620 | py | from functools import wraps
import cloudpickle as pickle
def must_be_compiled_first(function):
@wraps(function)
def f(self, *args, **kwargs):
if not self.is_compiled:
raise ValueError(
"You must run .compile first on the model before using this method."
)
return function(self, *args, **kwargs)
return f
def dump(model, filepath):
"""filepath is a string"""
with open(filepath, "wb") as f:
pickle.dump(model, f)
def load(filepath):
"""filepath is a string"""
with open(filepath, "rb") as f:
return pickle.load(f)
| [
"[email protected]"
] | |
256e9f2acc7001d59650a9633bf384d5377f1b05 | 66b82b6b95dd4eb85f037ed8c70a9dc0cccc16e4 | /app/view_models/book.py | 91e719be3070dc400a9b4ce0fd4a227e56128885 | [] | no_license | jpch89/fishbook | 6c359b5efec88cbb414b72504a9131f924fe5f60 | 97db589046b9d59107033fa0ba9c071e63b6fcc0 | refs/heads/master | 2022-12-10T17:39:45.835372 | 2019-03-07T08:53:13 | 2019-03-07T08:53:13 | 144,078,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,148 | py | # -*- coding: utf-8 -*-
# @Author: jpch89
# @Time: 2018/8/14 22:44
class BookViewModel:
def __init__(self, book):
self.title = book['title']
self.publisher = book['publisher']
self.author = '、'.join(book['author'])
self.image = book['image']
self.price = book['price']
self.summary = book['summary']
self.isbn = book['isbn']
self.pages = book['pages']
self.pubdate = book['pubdate']
self.binding = book['binding']
@property
def intro(self):
intros = filter(lambda x: True if x else False,
[self.author, self.publisher, self.price])
return ' / '.join(intros)
class BookCollection:
def __init__(self):
self.total = 0
self.books = []
self.keyword = ''
def fill(self, yushu_book, keyword):
self.total = yushu_book.total
self.keyword = keyword
self.books = [BookViewModel(book) for book in yushu_book.books]
class _BookViewModel:
@classmethod
def package_single(cls, data, keyword):
returned = {
'books': [],
'total': 0,
'keyword': keyword
}
if data:
returned['total'] = 1
returned['books'] = [cls.__cut_book_data(data)]
return returned
@classmethod
def package_collection(cls, data, keyword):
returned = {
'books': [],
'total': 0,
'keyword': keyword
}
if data:
returned['total'] = data['total']
returned['books'] = [cls.__cut_book_data(book) for book in data['books']]
return returned
@classmethod
def __cut_book_data(cls, data):
book = {
'title': data['title'],
'publisher': data['publisher'],
'pages': data['pages'] or '',
'author': '、'.join(data['author']),
'price': data['price'],
'summary': data['summary'] or '',
'image': data['image']
}
return book
| [
"[email protected]"
] | |
ea316e15a1e1e0ff59fbe6cb5228abfa0b24ffb3 | 1e013dc5f0de0f61e27f2867557803a01c01f4da | /Language/python/grammer/decorator/6_func_impl.py | 63f6ff403a1a6df4d392df63c3735e914233c4a4 | [] | no_license | chengyi818/kata | a2941ce8675c6e7a47169a0eae4c757d3f6f5bf9 | a7cb7ad499037bcc168aaa0eaba857b33c04ef14 | refs/heads/master | 2023-04-10T18:39:09.518433 | 2023-01-08T15:22:12 | 2023-01-08T15:22:12 | 53,040,540 | 1 | 0 | null | 2023-03-25T00:46:51 | 2016-03-03T10:06:58 | C++ | UTF-8 | Python | false | false | 554 | py | #!/usr/bin/env python3
# Author: ChengYi
# Mail: [email protected]
# created time: Thu 21 Sep 2017 10:29:02 AM CST
def make_bold(func):
print('Initialize')
def wrapper():
print('Call')
return '<b>{}</b>'.format(func())
return wrapper
def get_content():
return "hello world"
@make_bold
def get_content2():
return "hello world"
if __name__ == "__main__":
get_content = make_bold(get_content)
print(type(get_content))
print(get_content())
print(get_content2())
print(type(get_content2))
| [
"[email protected]"
] | |
16529f3d07c226ff0580848d78c9cdaa4cb386d7 | c5222000bf859009850452ec7bcf1533f7bf59f7 | /color_picker.py | 441acc9eaccbd29186422b5dc395c9f72cf5debc | [] | no_license | nss-day-cohort-25/07-06-orientation-classes | 38b016995c0a6273268d07104c0db2124383592a | 9ed6046550e7b5254f8237efab47a057b74c94de | refs/heads/master | 2020-03-22T14:52:01.360486 | 2018-07-09T17:31:10 | 2018-07-09T17:31:10 | 140,211,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 283 | py | class ColorPicker():
def __init__(self, primary_color, **kwargs):
self.primary_color = primary_color
for key, value in kwargs.items():
setattr(self, key + "_color", value)
def get_colors(self):
return {k: v for k, v in self.__dict__.items() if 'color' in k}
| [
"[email protected]"
] | |
775999792fcec355ac984e6f1f97f26fbf4e854b | 7950c4faf15ec1dc217391d839ddc21efd174ede | /leetcode-cn/2038.0_Remove_Colored_Pieces_if_Both_Neighbors_are_the_Same_Color.py | aa42c48cf130143120865f6675418d6da1b8c5b4 | [] | no_license | lixiang2017/leetcode | f462ecd269c7157aa4f5854f8c1da97ca5375e39 | f93380721b8383817fe2b0d728deca1321c9ef45 | refs/heads/master | 2023-08-25T02:56:58.918792 | 2023-08-22T16:43:36 | 2023-08-22T16:43:36 | 153,090,613 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,461 | py | '''
T: O(N)
S: O(1)
执行用时:112 ms, 在所有 Python3 提交中击败了98.98% 的用户
内存消耗:15.6 MB, 在所有 Python3 提交中击败了28.57% 的用户
通过测试用例:83 / 83
'''
class Solution:
def winnerOfGame(self, colors: str) -> bool:
a, b, pre = 0, 0, None
part_a = part_b = 0
for ch in colors:
if ch == pre:
if ch == 'A':
part_a += 1
else:
part_b += 1
else:
if ch == 'A':
if part_b >= 3:
b += part_b - 2
pre = 'A'
part_a = 1
else:
if part_a >= 3:
a += part_a - 2
pre = 'B'
part_b = 1
# do not forget last part!!!
if pre == 'A' and part_a >= 3:
a += part_a - 2
if pre == 'B' and part_b >= 3:
b += part_b - 2
return a > b
'''
T: O(N)
S: O(1)
执行用时:220 ms, 在所有 Python3 提交中击败了52.04% 的用户
内存消耗:15.5 MB, 在所有 Python3 提交中击败了55.10% 的用户
通过测试用例:83 / 83
'''
class Solution:
def winnerOfGame(self, colors: str) -> bool:
a = b = 0
n = len(colors)
for i in range(n - 2): # n-3 +2
# i, i + 1, i + 2
part = colors[i: i + 3]
if part == "AAA":
a += 1
elif part == "BBB":
b += 1
return a > b
'''
执行用时:240 ms, 在所有 Python3 提交中击败了47.96% 的用户
内存消耗:15.5 MB, 在所有 Python3 提交中击败了45.92% 的用户
通过测试用例:83 / 83
'''
class Solution:
def winnerOfGame(self, colors: str) -> bool:
a = b = 0
n, c = len(colors), colors
for i in range(n - 2): # n-3 +2
# i, i + 1, i + 2
if c[i] == 'A' and c[i + 1] == 'A' and c[i + 2] == 'A':
a += 1
elif c[i] == 'B' and c[i + 1] == 'B' and c[i + 2] == 'B':
b += 1
return a > b
'''
DP, T: O(N), S: O(1)
执行用时:336 ms, 在所有 Python3 提交中击败了23.47% 的用户
内存消耗:15.6 MB, 在所有 Python3 提交中击败了36.74% 的用户
通过测试用例:83 / 83
'''
class Solution:
def winnerOfGame(self, colors: str) -> bool:
# a b
freq = [0, 0]
move = [0, 0]
c = 'C'
for ch in colors:
if ch != c:
c = ch
freq[ord(ch) - ord('A')] = 1
else:
freq[ord(ch) - ord('A')] += 1
if freq[ord(ch) - ord('A')] >= 3:
move[ord(ch) - ord('A')] += 1
return move[0] > move[1]
'''
DP, T: O(N), S: O(1)
执行用时:164 ms, 在所有 Python3 提交中击败了78.57% 的用户
内存消耗:15.6 MB, 在所有 Python3 提交中击败了31.63% 的用户
通过测试用例:83 / 83
'''
class Solution:
def winnerOfGame(self, colors: str) -> bool:
# a b
move = [0, 0]
c, cnt = 'C', 0
for ch in colors:
if ch != c:
c = ch
cnt = 1
else:
cnt += 1
if cnt >= 3:
move[ord(ch) - ord('A')] += 1
return move[0] > move[1]
| [
"[email protected]"
] | |
bf375adb961dbcf915e1628a1b202e385aba4351 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/226/users/4422/codes/1585_842.py | db14692d52f1f0a69e0aaedd361b0687c76c6431 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | # Teste seu codigo aos poucos.
# Nao teste tudo no final, pois fica mais dificil de identificar erros.
# Nao se intimide com as mensagens de erro. Elas ajudam a corrigir seu codigo.
x = int(input("Digite um numero: "))
print(x)
| [
"[email protected]"
] | |
89e76250fd7a81725656d24793ecea39670bdd46 | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/sympy_sympy/sympy-master/sympy/physics/secondquant.py | 8dba14cc405a810a1bba93a2e215cdeee27fee98 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 88,597 | py | """
Second quantization operators and states for bosons.
This follow the formulation of Fetter and Welecka, "Quantum Theory
of Many-Particle Systems."
"""
from __future__ import print_function, division
from collections import defaultdict
from sympy import (Add, Basic, cacheit, Dummy, Expr, Function, I,
KroneckerDelta, Mul, Pow, S, sqrt, Symbol, sympify, Tuple,
zeros)
from sympy.printing.str import StrPrinter
from sympy.core.compatibility import range
from sympy.utilities.iterables import has_dups
from sympy.utilities import default_sort_key
__all__ = [
'Dagger',
'KroneckerDelta',
'BosonicOperator',
'AnnihilateBoson',
'CreateBoson',
'AnnihilateFermion',
'CreateFermion',
'FockState',
'FockStateBra',
'FockStateKet',
'FockStateBosonKet',
'FockStateBosonBra',
'BBra',
'BKet',
'FBra',
'FKet',
'F',
'Fd',
'B',
'Bd',
'apply_operators',
'InnerProduct',
'BosonicBasis',
'VarBosonicBasis',
'FixedBosonicBasis',
'Commutator',
'matrix_rep',
'contraction',
'wicks',
'NO',
'evaluate_deltas',
'AntiSymmetricTensor',
'substitute_dummies',
'PermutationOperator',
'simplify_index_permutations',
]
class SecondQuantizationError(Exception):
pass
class AppliesOnlyToSymbolicIndex(SecondQuantizationError):
pass
class ContractionAppliesOnlyToFermions(SecondQuantizationError):
pass
class ViolationOfPauliPrinciple(SecondQuantizationError):
pass
class SubstitutionOfAmbigousOperatorFailed(SecondQuantizationError):
pass
class WicksTheoremDoesNotApply(SecondQuantizationError):
pass
class Dagger(Expr):
"""
Hermitian conjugate of creation/annihilation operators.
Examples
========
>>> from sympy import I
>>> from sympy.physics.secondquant import Dagger, B, Bd
>>> Dagger(2*I)
-2*I
>>> Dagger(B(0))
CreateBoson(0)
>>> Dagger(Bd(0))
AnnihilateBoson(0)
"""
def __new__(cls, arg):
arg = sympify(arg)
r = cls.eval(arg)
if isinstance(r, Basic):
return r
obj = Basic.__new__(cls, arg)
return obj
@classmethod
def eval(cls, arg):
"""
Evaluates the Dagger instance.
Examples
========
>>> from sympy import I
>>> from sympy.physics.secondquant import Dagger, B, Bd
>>> Dagger(2*I)
-2*I
>>> Dagger(B(0))
CreateBoson(0)
>>> Dagger(Bd(0))
AnnihilateBoson(0)
The eval() method is called automatically.
"""
try:
d = arg._dagger_()
except AttributeError:
if isinstance(arg, Basic):
if arg.is_Add:
return Add(*tuple(map(Dagger, arg.args)))
if arg.is_Mul:
return Mul(*tuple(map(Dagger, reversed(arg.args))))
if arg.is_Number:
return arg
if arg.is_Pow:
return Pow(Dagger(arg.args[0]), arg.args[1])
if arg == I:
return -arg
else:
return None
else:
return d
def _dagger_(self):
return self.args[0]
class TensorSymbol(Expr):
is_commutative = True
class AntiSymmetricTensor(TensorSymbol):
"""Stores upper and lower indices in separate Tuple's.
Each group of indices is assumed to be antisymmetric.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import AntiSymmetricTensor
>>> i, j = symbols('i j', below_fermi=True)
>>> a, b = symbols('a b', above_fermi=True)
>>> AntiSymmetricTensor('v', (a, i), (b, j))
AntiSymmetricTensor(v, (a, i), (b, j))
>>> AntiSymmetricTensor('v', (i, a), (b, j))
-AntiSymmetricTensor(v, (a, i), (b, j))
As you can see, the indices are automatically sorted to a canonical form.
"""
def __new__(cls, symbol, upper, lower):
try:
upper, signu = _sort_anticommuting_fermions(
upper, key=cls._sortkey)
lower, signl = _sort_anticommuting_fermions(
lower, key=cls._sortkey)
except ViolationOfPauliPrinciple:
return S.Zero
symbol = sympify(symbol)
upper = Tuple(*upper)
lower = Tuple(*lower)
if (signu + signl) % 2:
return -TensorSymbol.__new__(cls, symbol, upper, lower)
else:
return TensorSymbol.__new__(cls, symbol, upper, lower)
@classmethod
def _sortkey(cls, index):
"""Key for sorting of indices.
particle < hole < general
FIXME: This is a bottle-neck, can we do it faster?
"""
h = hash(index)
if isinstance(index, Dummy):
if index.assumptions0.get('above_fermi'):
return (20, h)
elif index.assumptions0.get('below_fermi'):
return (21, h)
else:
return (22, h)
if index.assumptions0.get('above_fermi'):
return (10, h)
elif index.assumptions0.get('below_fermi'):
return (11, h)
else:
return (12, h)
def _latex(self, printer):
return "%s^{%s}_{%s}" % (
self.symbol,
"".join([ i.name for i in self.args[1]]),
"".join([ i.name for i in self.args[2]])
)
@property
def symbol(self):
"""
Returns the symbol of the tensor.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import AntiSymmetricTensor
>>> i, j = symbols('i,j', below_fermi=True)
>>> a, b = symbols('a,b', above_fermi=True)
>>> AntiSymmetricTensor('v', (a, i), (b, j))
AntiSymmetricTensor(v, (a, i), (b, j))
>>> AntiSymmetricTensor('v', (a, i), (b, j)).symbol
v
"""
return self.args[0]
@property
def upper(self):
"""
Returns the upper indices.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import AntiSymmetricTensor
>>> i, j = symbols('i,j', below_fermi=True)
>>> a, b = symbols('a,b', above_fermi=True)
>>> AntiSymmetricTensor('v', (a, i), (b, j))
AntiSymmetricTensor(v, (a, i), (b, j))
>>> AntiSymmetricTensor('v', (a, i), (b, j)).upper
(a, i)
"""
return self.args[1]
@property
def lower(self):
"""
Returns the lower indices.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import AntiSymmetricTensor
>>> i, j = symbols('i,j', below_fermi=True)
>>> a, b = symbols('a,b', above_fermi=True)
>>> AntiSymmetricTensor('v', (a, i), (b, j))
AntiSymmetricTensor(v, (a, i), (b, j))
>>> AntiSymmetricTensor('v', (a, i), (b, j)).lower
(b, j)
"""
return self.args[2]
def __str__(self):
return "%s(%s,%s)" % self.args
def doit(self, **kw_args):
"""
Returns self.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import AntiSymmetricTensor
>>> i, j = symbols('i,j', below_fermi=True)
>>> a, b = symbols('a,b', above_fermi=True)
>>> AntiSymmetricTensor('v', (a, i), (b, j)).doit()
AntiSymmetricTensor(v, (a, i), (b, j))
"""
return self
class SqOperator(Expr):
"""
Base class for Second Quantization operators.
"""
op_symbol = 'sq'
is_commutative = False
def __new__(cls, k):
obj = Basic.__new__(cls, sympify(k))
return obj
@property
def state(self):
"""
Returns the state index related to this operator.
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F, Fd, B, Bd
>>> p = Symbol('p')
>>> F(p).state
p
>>> Fd(p).state
p
>>> B(p).state
p
>>> Bd(p).state
p
"""
return self.args[0]
@property
def is_symbolic(self):
"""
Returns True if the state is a symbol (as opposed to a number).
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> p = Symbol('p')
>>> F(p).is_symbolic
True
>>> F(1).is_symbolic
False
"""
if self.state.is_Integer:
return False
else:
return True
def doit(self, **kw_args):
"""
FIXME: hack to prevent crash further up...
"""
return self
def __repr__(self):
return NotImplemented
def __str__(self):
return "%s(%r)" % (self.op_symbol, self.state)
def apply_operator(self, state):
"""
Applies an operator to itself.
"""
raise NotImplementedError('implement apply_operator in a subclass')
class BosonicOperator(SqOperator):
pass
class Annihilator(SqOperator):
pass
class Creator(SqOperator):
pass
class AnnihilateBoson(BosonicOperator, Annihilator):
"""
Bosonic annihilation operator.
Examples
========
>>> from sympy.physics.secondquant import B
>>> from sympy.abc import x
>>> B(x)
AnnihilateBoson(x)
"""
op_symbol = 'b'
def _dagger_(self):
return CreateBoson(self.state)
def apply_operator(self, state):
"""
Apply state to self if self is not symbolic and state is a FockStateKet, else
multiply self by state.
Examples
========
>>> from sympy.physics.secondquant import B, BKet
>>> from sympy.abc import x, y, n
>>> B(x).apply_operator(y)
y*AnnihilateBoson(x)
>>> B(0).apply_operator(BKet((n,)))
sqrt(n)*FockStateBosonKet((n - 1,))
"""
if not self.is_symbolic and isinstance(state, FockStateKet):
element = self.state
amp = sqrt(state[element])
return amp*state.down(element)
else:
return Mul(self, state)
def __repr__(self):
return "AnnihilateBoson(%s)" % self.state
class CreateBoson(BosonicOperator, Creator):
"""
Bosonic creation operator.
"""
op_symbol = 'b+'
def _dagger_(self):
return AnnihilateBoson(self.state)
def apply_operator(self, state):
"""
Apply state to self if self is not symbolic and state is a FockStateKet, else
multiply self by state.
Examples
========
>>> from sympy.physics.secondquant import B, Dagger, BKet
>>> from sympy.abc import x, y, n
>>> Dagger(B(x)).apply_operator(y)
y*CreateBoson(x)
>>> B(0).apply_operator(BKet((n,)))
sqrt(n)*FockStateBosonKet((n - 1,))
"""
if not self.is_symbolic and isinstance(state, FockStateKet):
element = self.state
amp = sqrt(state[element] + 1)
return amp*state.up(element)
else:
return Mul(self, state)
def __repr__(self):
return "CreateBoson(%s)" % self.state
B = AnnihilateBoson
Bd = CreateBoson
class FermionicOperator(SqOperator):
@property
def is_restricted(self):
"""
Is this FermionicOperator restricted with respect to fermi level?
Return values:
1 : restricted to orbits above fermi
0 : no restriction
-1 : restricted to orbits below fermi
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F, Fd
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_restricted
1
>>> Fd(a).is_restricted
1
>>> F(i).is_restricted
-1
>>> Fd(i).is_restricted
-1
>>> F(p).is_restricted
0
>>> Fd(p).is_restricted
0
"""
ass = self.args[0].assumptions0
if ass.get("below_fermi"):
return -1
if ass.get("above_fermi"):
return 1
return 0
@property
def is_above_fermi(self):
"""
Does the index of this FermionicOperator allow values above fermi?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_above_fermi
True
>>> F(i).is_above_fermi
False
>>> F(p).is_above_fermi
True
The same applies to creation operators Fd
"""
return not self.args[0].assumptions0.get("below_fermi")
@property
def is_below_fermi(self):
"""
Does the index of this FermionicOperator allow values below fermi?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_below_fermi
False
>>> F(i).is_below_fermi
True
>>> F(p).is_below_fermi
True
The same applies to creation operators Fd
"""
return not self.args[0].assumptions0.get("above_fermi")
@property
def is_only_below_fermi(self):
"""
Is the index of this FermionicOperator restricted to values below fermi?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_only_below_fermi
False
>>> F(i).is_only_below_fermi
True
>>> F(p).is_only_below_fermi
False
The same applies to creation operators Fd
"""
return self.is_below_fermi and not self.is_above_fermi
@property
def is_only_above_fermi(self):
"""
Is the index of this FermionicOperator restricted to values above fermi?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_only_above_fermi
True
>>> F(i).is_only_above_fermi
False
>>> F(p).is_only_above_fermi
False
The same applies to creation operators Fd
"""
return self.is_above_fermi and not self.is_below_fermi
def _sortkey(self):
h = hash(self)
label = str(self.args[0])
if self.is_only_q_creator:
return 1, label, h
if self.is_only_q_annihilator:
return 4, label, h
if isinstance(self, Annihilator):
return 3, label, h
if isinstance(self, Creator):
return 2, label, h
class AnnihilateFermion(FermionicOperator, Annihilator):
"""
Fermionic annihilation operator.
"""
op_symbol = 'f'
def _dagger_(self):
return CreateFermion(self.state)
def apply_operator(self, state):
"""
Apply state to self if self is not symbolic and state is a FockStateKet, else
multiply self by state.
Examples
========
>>> from sympy.physics.secondquant import B, Dagger, BKet
>>> from sympy.abc import x, y, n
>>> Dagger(B(x)).apply_operator(y)
y*CreateBoson(x)
>>> B(0).apply_operator(BKet((n,)))
sqrt(n)*FockStateBosonKet((n - 1,))
"""
if isinstance(state, FockStateFermionKet):
element = self.state
return state.down(element)
elif isinstance(state, Mul):
c_part, nc_part = state.args_cnc()
if isinstance(nc_part[0], FockStateFermionKet):
element = self.state
return Mul(*(c_part + [nc_part[0].down(element)] + nc_part[1:]))
else:
return Mul(self, state)
else:
return Mul(self, state)
@property
def is_q_creator(self):
"""
Can we create a quasi-particle? (create hole or create particle)
If so, would that be above or below the fermi surface?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_q_creator
0
>>> F(i).is_q_creator
-1
>>> F(p).is_q_creator
-1
"""
if self.is_below_fermi:
return -1
return 0
@property
def is_q_annihilator(self):
"""
Can we destroy a quasi-particle? (annihilate hole or annihilate particle)
If so, would that be above or below the fermi surface?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=1)
>>> i = Symbol('i', below_fermi=1)
>>> p = Symbol('p')
>>> F(a).is_q_annihilator
1
>>> F(i).is_q_annihilator
0
>>> F(p).is_q_annihilator
1
"""
if self.is_above_fermi:
return 1
return 0
@property
def is_only_q_creator(self):
"""
Always create a quasi-particle? (create hole or create particle)
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_only_q_creator
False
>>> F(i).is_only_q_creator
True
>>> F(p).is_only_q_creator
False
"""
return self.is_only_below_fermi
@property
def is_only_q_annihilator(self):
"""
Always destroy a quasi-particle? (annihilate hole or annihilate particle)
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import F
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> F(a).is_only_q_annihilator
True
>>> F(i).is_only_q_annihilator
False
>>> F(p).is_only_q_annihilator
False
"""
return self.is_only_above_fermi
def __repr__(self):
return "AnnihilateFermion(%s)" % self.state
def _latex(self, printer):
return "a_{%s}" % self.state.name
class CreateFermion(FermionicOperator, Creator):
"""
Fermionic creation operator.
"""
op_symbol = 'f+'
def _dagger_(self):
return AnnihilateFermion(self.state)
def apply_operator(self, state):
"""
Apply state to self if self is not symbolic and state is a FockStateKet, else
multiply self by state.
Examples
========
>>> from sympy.physics.secondquant import B, Dagger, BKet
>>> from sympy.abc import x, y, n
>>> Dagger(B(x)).apply_operator(y)
y*CreateBoson(x)
>>> B(0).apply_operator(BKet((n,)))
sqrt(n)*FockStateBosonKet((n - 1,))
"""
if isinstance(state, FockStateFermionKet):
element = self.state
return state.up(element)
elif isinstance(state, Mul):
c_part, nc_part = state.args_cnc()
if isinstance(nc_part[0], FockStateFermionKet):
element = self.state
return Mul(*(c_part + [nc_part[0].up(element)] + nc_part[1:]))
return Mul(self, state)
@property
def is_q_creator(self):
"""
Can we create a quasi-particle? (create hole or create particle)
If so, would that be above or below the fermi surface?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import Fd
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> Fd(a).is_q_creator
1
>>> Fd(i).is_q_creator
0
>>> Fd(p).is_q_creator
1
"""
if self.is_above_fermi:
return 1
return 0
@property
def is_q_annihilator(self):
"""
Can we destroy a quasi-particle? (annihilate hole or annihilate particle)
If so, would that be above or below the fermi surface?
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import Fd
>>> a = Symbol('a', above_fermi=1)
>>> i = Symbol('i', below_fermi=1)
>>> p = Symbol('p')
>>> Fd(a).is_q_annihilator
0
>>> Fd(i).is_q_annihilator
-1
>>> Fd(p).is_q_annihilator
-1
"""
if self.is_below_fermi:
return -1
return 0
@property
def is_only_q_creator(self):
"""
Always create a quasi-particle? (create hole or create particle)
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import Fd
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> Fd(a).is_only_q_creator
True
>>> Fd(i).is_only_q_creator
False
>>> Fd(p).is_only_q_creator
False
"""
return self.is_only_above_fermi
@property
def is_only_q_annihilator(self):
"""
Always destroy a quasi-particle? (annihilate hole or annihilate particle)
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import Fd
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> Fd(a).is_only_q_annihilator
False
>>> Fd(i).is_only_q_annihilator
True
>>> Fd(p).is_only_q_annihilator
False
"""
return self.is_only_below_fermi
def __repr__(self):
return "CreateFermion(%s)" % self.state
def _latex(self, printer):
return "a^\\dagger_{%s}" % self.state.name
Fd = CreateFermion
F = AnnihilateFermion
class FockState(Expr):
"""
Many particle Fock state with a sequence of occupation numbers.
Anywhere you can have a FockState, you can also have S.Zero.
All code must check for this!
Base class to represent FockStates.
"""
is_commutative = False
def __new__(cls, occupations):
"""
occupations is a list with two possible meanings:
- For bosons it is a list of occupation numbers.
Element i is the number of particles in state i.
- For fermions it is a list of occupied orbits.
Element 0 is the state that was occupied first, element i
is the i'th occupied state.
"""
occupations = list(map(sympify, occupations))
obj = Basic.__new__(cls, Tuple(*occupations))
return obj
def __getitem__(self, i):
i = int(i)
return self.args[0][i]
def __repr__(self):
return ("FockState(%r)") % (self.args)
def __str__(self):
return "%s%r%s" % (self.lbracket, self._labels(), self.rbracket)
def _labels(self):
return self.args[0]
def __len__(self):
return len(self.args[0])
class BosonState(FockState):
"""
Base class for FockStateBoson(Ket/Bra).
"""
def up(self, i):
"""
Performs the action of a creation operator.
Examples
========
>>> from sympy.physics.secondquant import BBra
>>> b = BBra([1, 2])
>>> b
FockStateBosonBra((1, 2))
>>> b.up(1)
FockStateBosonBra((1, 3))
"""
i = int(i)
new_occs = list(self.args[0])
new_occs[i] = new_occs[i] + S.One
return self.__class__(new_occs)
def down(self, i):
"""
Performs the action of an annihilation operator.
Examples
========
>>> from sympy.physics.secondquant import BBra
>>> b = BBra([1, 2])
>>> b
FockStateBosonBra((1, 2))
>>> b.down(1)
FockStateBosonBra((1, 1))
"""
i = int(i)
new_occs = list(self.args[0])
if new_occs[i] == S.Zero:
return S.Zero
else:
new_occs[i] = new_occs[i] - S.One
return self.__class__(new_occs)
class FermionState(FockState):
"""
Base class for FockStateFermion(Ket/Bra).
"""
fermi_level = 0
def __new__(cls, occupations, fermi_level=0):
occupations = list(map(sympify, occupations))
if len(occupations) > 1:
try:
(occupations, sign) = _sort_anticommuting_fermions(
occupations, key=hash)
except ViolationOfPauliPrinciple:
return S.Zero
else:
sign = 0
cls.fermi_level = fermi_level
if cls._count_holes(occupations) > fermi_level:
return S.Zero
if sign % 2:
return S.NegativeOne*FockState.__new__(cls, occupations)
else:
return FockState.__new__(cls, occupations)
def up(self, i):
"""
Performs the action of a creation operator.
If below fermi we try to remove a hole,
if above fermi we try to create a particle.
if general index p we return Kronecker(p,i)*self
where i is a new symbol with restriction above or below.
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import FKet
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
>>> FKet([]).up(a)
FockStateFermionKet((a,))
A creator acting on vacuum below fermi vanishes
>>> FKet([]).up(i)
0
"""
present = i in self.args[0]
if self._only_above_fermi(i):
if present:
return S.Zero
else:
return self._add_orbit(i)
elif self._only_below_fermi(i):
if present:
return self._remove_orbit(i)
else:
return S.Zero
else:
if present:
hole = Dummy("i", below_fermi=True)
return KroneckerDelta(i, hole)*self._remove_orbit(i)
else:
particle = Dummy("a", above_fermi=True)
return KroneckerDelta(i, particle)*self._add_orbit(i)
def down(self, i):
"""
Performs the action of an annihilation operator.
If below fermi we try to create a hole,
if above fermi we try to remove a particle.
if general index p we return Kronecker(p,i)*self
where i is a new symbol with restriction above or below.
>>> from sympy import Symbol
>>> from sympy.physics.secondquant import FKet
>>> a = Symbol('a', above_fermi=True)
>>> i = Symbol('i', below_fermi=True)
>>> p = Symbol('p')
An annihilator acting on vacuum above fermi vanishes
>>> FKet([]).down(a)
0
Also below fermi, it vanishes, unless we specify a fermi level > 0
>>> FKet([]).down(i)
0
>>> FKet([],4).down(i)
FockStateFermionKet((i,))
"""
present = i in self.args[0]
if self._only_above_fermi(i):
if present:
return self._remove_orbit(i)
else:
return S.Zero
elif self._only_below_fermi(i):
if present:
return S.Zero
else:
return self._add_orbit(i)
else:
if present:
hole = Dummy("i", below_fermi=True)
return KroneckerDelta(i, hole)*self._add_orbit(i)
else:
particle = Dummy("a", above_fermi=True)
return KroneckerDelta(i, particle)*self._remove_orbit(i)
@classmethod
def _only_below_fermi(cls, i):
"""
Tests if given orbit is only below fermi surface.
If nothing can be concluded we return a conservative False.
"""
if i.is_number:
return i <= cls.fermi_level
if i.assumptions0.get('below_fermi'):
return True
return False
@classmethod
def _only_above_fermi(cls, i):
"""
Tests if given orbit is only above fermi surface.
If fermi level has not been set we return True.
If nothing can be concluded we return a conservative False.
"""
if i.is_number:
return i > cls.fermi_level
if i.assumptions0.get('above_fermi'):
return True
return not cls.fermi_level
def _remove_orbit(self, i):
"""
Removes particle/fills hole in orbit i. No input tests performed here.
"""
new_occs = list(self.args[0])
pos = new_occs.index(i)
del new_occs[pos]
if (pos) % 2:
return S.NegativeOne*self.__class__(new_occs, self.fermi_level)
else:
return self.__class__(new_occs, self.fermi_level)
def _add_orbit(self, i):
"""
Adds particle/creates hole in orbit i. No input tests performed here.
"""
return self.__class__((i,) + self.args[0], self.fermi_level)
@classmethod
def _count_holes(cls, list):
"""
returns number of identified hole states in list.
"""
return len([i for i in list if cls._only_below_fermi(i)])
def _negate_holes(self, list):
return tuple([-i if i <= self.fermi_level else i for i in list])
def __repr__(self):
if self.fermi_level:
return "FockStateKet(%r, fermi_level=%s)" % (self.args[0], self.fermi_level)
else:
return "FockStateKet(%r)" % (self.args[0],)
def _labels(self):
return self._negate_holes(self.args[0])
class FockStateKet(FockState):
"""
Representation of a ket.
"""
lbracket = '|'
rbracket = '>'
class FockStateBra(FockState):
"""
Representation of a bra.
"""
lbracket = '<'
rbracket = '|'
def __mul__(self, other):
if isinstance(other, FockStateKet):
return InnerProduct(self, other)
else:
return Expr.__mul__(self, other)
class FockStateBosonKet(BosonState, FockStateKet):
"""
Many particle Fock state with a sequence of occupation numbers.
Occupation numbers can be any integer >= 0.
Examples
========
>>> from sympy.physics.secondquant import BKet
>>> BKet([1, 2])
FockStateBosonKet((1, 2))
"""
def _dagger_(self):
return FockStateBosonBra(*self.args)
class FockStateBosonBra(BosonState, FockStateBra):
"""
Describes a collection of BosonBra particles.
Examples
========
>>> from sympy.physics.secondquant import BBra
>>> BBra([1, 2])
FockStateBosonBra((1, 2))
"""
def _dagger_(self):
return FockStateBosonKet(*self.args)
class FockStateFermionKet(FermionState, FockStateKet):
"""
Many-particle Fock state with a sequence of occupied orbits.
Each state can only have one particle, so we choose to store a list of
occupied orbits rather than a tuple with occupation numbers (zeros and ones).
states below fermi level are holes, and are represented by negative labels
in the occupation list.
For symbolic state labels, the fermi_level caps the number of allowed hole-
states.
Examples
========
>>> from sympy.physics.secondquant import FKet
>>> FKet([1, 2]) #doctest: +SKIP
FockStateFermionKet((1, 2))
"""
def _dagger_(self):
return FockStateFermionBra(*self.args)
class FockStateFermionBra(FermionState, FockStateBra):
"""
See Also
========
FockStateFermionKet
Examples
========
>>> from sympy.physics.secondquant import FBra
>>> FBra([1, 2]) #doctest: +SKIP
FockStateFermionBra((1, 2))
"""
def _dagger_(self):
return FockStateFermionKet(*self.args)
BBra = FockStateBosonBra
BKet = FockStateBosonKet
FBra = FockStateFermionBra
FKet = FockStateFermionKet
def _apply_Mul(m):
"""
Take a Mul instance with operators and apply them to states.
This method applies all operators with integer state labels
to the actual states. For symbolic state labels, nothing is done.
When inner products of FockStates are encountered (like <a|b>),
they are converted to instances of InnerProduct.
This does not currently work on double inner products like,
<a|b><c|d>.
If the argument is not a Mul, it is simply returned as is.
"""
if not isinstance(m, Mul):
return m
c_part, nc_part = m.args_cnc()
n_nc = len(nc_part)
if n_nc == 0 or n_nc == 1:
return m
else:
last = nc_part[-1]
next_to_last = nc_part[-2]
if isinstance(last, FockStateKet):
if isinstance(next_to_last, SqOperator):
if next_to_last.is_symbolic:
return m
else:
result = next_to_last.apply_operator(last)
if result == 0:
return S.Zero
else:
return _apply_Mul(Mul(*(c_part + nc_part[:-2] + [result])))
elif isinstance(next_to_last, Pow):
if isinstance(next_to_last.base, SqOperator) and \
next_to_last.exp.is_Integer:
if next_to_last.base.is_symbolic:
return m
else:
result = last
for i in range(next_to_last.exp):
result = next_to_last.base.apply_operator(result)
if result == 0:
break
if result == 0:
return S.Zero
else:
return _apply_Mul(Mul(*(c_part + nc_part[:-2] + [result])))
else:
return m
elif isinstance(next_to_last, FockStateBra):
result = InnerProduct(next_to_last, last)
if result == 0:
return S.Zero
else:
return _apply_Mul(Mul(*(c_part + nc_part[:-2] + [result])))
else:
return m
else:
return m
def apply_operators(e):
"""
Take a sympy expression with operators and states and apply the operators.
Examples
========
>>> from sympy.physics.secondquant import apply_operators
>>> from sympy import sympify
>>> apply_operators(sympify(3)+4)
7
"""
e = e.expand()
muls = e.atoms(Mul)
subs_list = [(m, _apply_Mul(m)) for m in iter(muls)]
return e.subs(subs_list)
class InnerProduct(Basic):
"""
An unevaluated inner product between a bra and ket.
Currently this class just reduces things to a product of
Kronecker Deltas. In the future, we could introduce abstract
states like ``|a>`` and ``|b>``, and leave the inner product unevaluated as
``<a|b>``.
"""
is_commutative = True
def __new__(cls, bra, ket):
if not isinstance(bra, FockStateBra):
raise TypeError("must be a bra")
if not isinstance(ket, FockStateKet):
raise TypeError("must be a key")
return cls.eval(bra, ket)
@classmethod
def eval(cls, bra, ket):
result = S.One
for i, j in zip(bra.args[0], ket.args[0]):
result *= KroneckerDelta(i, j)
if result == 0:
break
return result
@property
def bra(self):
"""Returns the bra part of the state"""
return self.args[0]
@property
def ket(self):
"""Returns the ket part of the state"""
return self.args[1]
def __repr__(self):
sbra = repr(self.bra)
sket = repr(self.ket)
return "%s|%s" % (sbra[:-1], sket[1:])
def __str__(self):
return self.__repr__()
def matrix_rep(op, basis):
"""
Find the representation of an operator in a basis.
Examples
========
>>> from sympy.physics.secondquant import VarBosonicBasis, B, matrix_rep
>>> b = VarBosonicBasis(5)
>>> o = B(0)
>>> matrix_rep(o, b)
Matrix([
[0, 1, 0, 0, 0],
[0, 0, sqrt(2), 0, 0],
[0, 0, 0, sqrt(3), 0],
[0, 0, 0, 0, 2],
[0, 0, 0, 0, 0]])
"""
a = zeros(len(basis))
for i in range(len(basis)):
for j in range(len(basis)):
a[i, j] = apply_operators(Dagger(basis[i])*op*basis[j])
return a
class BosonicBasis(object):
"""
Base class for a basis set of bosonic Fock states.
"""
pass
class VarBosonicBasis(object):
"""
A single state, variable particle number basis set.
Examples
========
>>> from sympy.physics.secondquant import VarBosonicBasis
>>> b = VarBosonicBasis(5)
>>> b
[FockState((0,)), FockState((1,)), FockState((2,)),
FockState((3,)), FockState((4,))]
"""
def __init__(self, n_max):
self.n_max = n_max
self._build_states()
def _build_states(self):
self.basis = []
for i in range(self.n_max):
self.basis.append(FockStateBosonKet([i]))
self.n_basis = len(self.basis)
def index(self, state):
"""
Returns the index of state in basis.
Examples
========
>>> from sympy.physics.secondquant import VarBosonicBasis
>>> b = VarBosonicBasis(3)
>>> state = b.state(1)
>>> b
[FockState((0,)), FockState((1,)), FockState((2,))]
>>> state
FockStateBosonKet((1,))
>>> b.index(state)
1
"""
return self.basis.index(state)
def state(self, i):
"""
The state of a single basis.
Examples
========
>>> from sympy.physics.secondquant import VarBosonicBasis
>>> b = VarBosonicBasis(5)
>>> b.state(3)
FockStateBosonKet((3,))
"""
return self.basis[i]
def __getitem__(self, i):
return self.state(i)
def __len__(self):
return len(self.basis)
def __repr__(self):
return repr(self.basis)
class FixedBosonicBasis(BosonicBasis):
"""
Fixed particle number basis set.
Examples
========
>>> from sympy.physics.secondquant import FixedBosonicBasis
>>> b = FixedBosonicBasis(2, 2)
>>> state = b.state(1)
>>> b
[FockState((2, 0)), FockState((1, 1)), FockState((0, 2))]
>>> state
FockStateBosonKet((1, 1))
>>> b.index(state)
1
"""
def __init__(self, n_particles, n_levels):
self.n_particles = n_particles
self.n_levels = n_levels
self._build_particle_locations()
self._build_states()
def _build_particle_locations(self):
tup = ["i%i" % i for i in range(self.n_particles)]
first_loop = "for i0 in range(%i)" % self.n_levels
other_loops = ''
for cur, prev in zip(tup[1:], tup):
temp = "for %s in range(%s + 1) " % (cur, prev)
other_loops = other_loops + temp
tup_string = "(%s)" % ", ".join(tup)
list_comp = "[%s %s %s]" % (tup_string, first_loop, other_loops)
result = eval(list_comp)
if self.n_particles == 1:
result = [(item,) for item in result]
self.particle_locations = result
def _build_states(self):
self.basis = []
for tuple_of_indices in self.particle_locations:
occ_numbers = self.n_levels*[0]
for level in tuple_of_indices:
occ_numbers[level] += 1
self.basis.append(FockStateBosonKet(occ_numbers))
self.n_basis = len(self.basis)
def index(self, state):
"""Returns the index of state in basis.
Examples
========
>>> from sympy.physics.secondquant import FixedBosonicBasis
>>> b = FixedBosonicBasis(2, 3)
>>> b.index(b.state(3))
3
"""
return self.basis.index(state)
def state(self, i):
"""Returns the state that lies at index i of the basis
Examples
========
>>> from sympy.physics.secondquant import FixedBosonicBasis
>>> b = FixedBosonicBasis(2, 3)
>>> b.state(3)
FockStateBosonKet((1, 0, 1))
"""
return self.basis[i]
def __getitem__(self, i):
return self.state(i)
def __len__(self):
return len(self.basis)
def __repr__(self):
return repr(self.basis)
class Commutator(Function):
"""
The Commutator: [A, B] = A*B - B*A
The arguments are ordered according to .__cmp__()
>>> from sympy import symbols
>>> from sympy.physics.secondquant import Commutator
>>> A, B = symbols('A,B', commutative=False)
>>> Commutator(B, A)
-Commutator(A, B)
Evaluate the commutator with .doit()
>>> comm = Commutator(A,B); comm
Commutator(A, B)
>>> comm.doit()
A*B - B*A
For two second quantization operators the commutator is evaluated
immediately:
>>> from sympy.physics.secondquant import Fd, F
>>> a = symbols('a', above_fermi=True)
>>> i = symbols('i', below_fermi=True)
>>> p,q = symbols('p,q')
>>> Commutator(Fd(a),Fd(i))
2*NO(CreateFermion(a)*CreateFermion(i))
But for more complicated expressions, the evaluation is triggered by
a call to .doit()
>>> comm = Commutator(Fd(p)*Fd(q),F(i)); comm
Commutator(CreateFermion(p)*CreateFermion(q), AnnihilateFermion(i))
>>> comm.doit(wicks=True)
-KroneckerDelta(i, p)*CreateFermion(q) +
KroneckerDelta(i, q)*CreateFermion(p)
"""
is_commutative = False
@classmethod
def eval(cls, a, b):
"""
The Commutator [A,B] is on canonical form if A < B.
Examples
========
>>> from sympy.physics.secondquant import Commutator, F, Fd
>>> from sympy.abc import x
>>> c1 = Commutator(F(x), Fd(x))
>>> c2 = Commutator(Fd(x), F(x))
>>> Commutator.eval(c1, c2)
0
"""
if not (a and b):
return S.Zero
if a == b:
return S.Zero
if a.is_commutative or b.is_commutative:
return S.Zero
#
# [A+B,C] -> [A,C] + [B,C]
#
a = a.expand()
if isinstance(a, Add):
return Add(*[cls(term, b) for term in a.args])
b = b.expand()
if isinstance(b, Add):
return Add(*[cls(a, term) for term in b.args])
#
# [xA,yB] -> xy*[A,B]
#
ca, nca = a.args_cnc()
cb, ncb = b.args_cnc()
c_part = list(ca) + list(cb)
if c_part:
return Mul(Mul(*c_part), cls(Mul._from_args(nca), Mul._from_args(ncb)))
#
# single second quantization operators
#
if isinstance(a, BosonicOperator) and isinstance(b, BosonicOperator):
if isinstance(b, CreateBoson) and isinstance(a, AnnihilateBoson):
return KroneckerDelta(a.state, b.state)
if isinstance(a, CreateBoson) and isinstance(b, AnnihilateBoson):
return S.NegativeOne*KroneckerDelta(a.state, b.state)
else:
return S.Zero
if isinstance(a, FermionicOperator) and isinstance(b, FermionicOperator):
return wicks(a*b) - wicks(b*a)
#
# Canonical ordering of arguments
#
if a.sort_key() > b.sort_key():
return S.NegativeOne*cls(b, a)
def doit(self, **hints):
"""
Enables the computation of complex expressions.
Examples
========
>>> from sympy.physics.secondquant import Commutator, F, Fd
>>> from sympy import symbols
>>> i, j = symbols('i,j', below_fermi=True)
>>> a, b = symbols('a,b', above_fermi=True)
>>> c = Commutator(Fd(a)*F(i),Fd(b)*F(j))
>>> c.doit(wicks=True)
0
"""
a = self.args[0]
b = self.args[1]
if hints.get("wicks"):
a = a.doit(**hints)
b = b.doit(**hints)
try:
return wicks(a*b) - wicks(b*a)
except ContractionAppliesOnlyToFermions:
pass
except WicksTheoremDoesNotApply:
pass
return (a*b - b*a).doit(**hints)
def __repr__(self):
return "Commutator(%s,%s)" % (self.args[0], self.args[1])
def __str__(self):
return "[%s,%s]" % (self.args[0], self.args[1])
def _latex(self, printer):
return "\\left[%s,%s\\right]" % tuple([
printer._print(arg) for arg in self.args])
class NO(Expr):
"""
This Object is used to represent normal ordering brackets.
i.e. {abcd} sometimes written :abcd:
Applying the function NO(arg) to an argument means that all operators in
the argument will be assumed to anticommute, and have vanishing
contractions. This allows an immediate reordering to canonical form
upon object creation.
>>> from sympy import symbols
>>> from sympy.physics.secondquant import NO, F, Fd
>>> p,q = symbols('p,q')
>>> NO(Fd(p)*F(q))
NO(CreateFermion(p)*AnnihilateFermion(q))
>>> NO(F(q)*Fd(p))
-NO(CreateFermion(p)*AnnihilateFermion(q))
Note:
If you want to generate a normal ordered equivalent of an expression, you
should use the function wicks(). This class only indicates that all
operators inside the brackets anticommute, and have vanishing contractions.
Nothing more, nothing less.
"""
is_commutative = False
def __new__(cls, arg):
"""
Use anticommutation to get canonical form of operators.
Employ associativity of normal ordered product: {ab{cd}} = {abcd}
but note that {ab}{cd} /= {abcd}.
We also employ distributivity: {ab + cd} = {ab} + {cd}.
Canonical form also implies expand() {ab(c+d)} = {abc} + {abd}.
"""
# {ab + cd} = {ab} + {cd}
arg = sympify(arg)
arg = arg.expand()
if arg.is_Add:
return Add(*[ cls(term) for term in arg.args])
if arg.is_Mul:
# take coefficient outside of normal ordering brackets
c_part, seq = arg.args_cnc()
if c_part:
coeff = Mul(*c_part)
if not seq:
return coeff
else:
coeff = S.One
# {ab{cd}} = {abcd}
newseq = []
foundit = False
for fac in seq:
if isinstance(fac, NO):
newseq.extend(fac.args)
foundit = True
else:
newseq.append(fac)
if foundit:
return coeff*cls(Mul(*newseq))
# We assume that the user don't mix B and F operators
if isinstance(seq[0], BosonicOperator):
raise NotImplementedError
try:
newseq, sign = _sort_anticommuting_fermions(seq)
except ViolationOfPauliPrinciple:
return S.Zero
if sign % 2:
return (S.NegativeOne*coeff)*cls(Mul(*newseq))
elif sign:
return coeff*cls(Mul(*newseq))
else:
pass # since sign==0, no permutations was necessary
# if we couldn't do anything with Mul object, we just
# mark it as normal ordered
if coeff != S.One:
return coeff*cls(Mul(*newseq))
return Expr.__new__(cls, Mul(*newseq))
if isinstance(arg, NO):
return arg
# if object was not Mul or Add, normal ordering does not apply
return arg
@property
def has_q_creators(self):
"""
Return 0 if the leftmost argument of the first argument is a not a
q_creator, else 1 if it is above fermi or -1 if it is below fermi.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import NO, F, Fd
>>> a = symbols('a', above_fermi=True)
>>> i = symbols('i', below_fermi=True)
>>> NO(Fd(a)*Fd(i)).has_q_creators
1
>>> NO(F(i)*F(a)).has_q_creators
-1
>>> NO(Fd(i)*F(a)).has_q_creators #doctest: +SKIP
0
"""
return self.args[0].args[0].is_q_creator
@property
def has_q_annihilators(self):
"""
Return 0 if the rightmost argument of the first argument is a not a
q_annihilator, else 1 if it is above fermi or -1 if it is below fermi.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import NO, F, Fd
>>> a = symbols('a', above_fermi=True)
>>> i = symbols('i', below_fermi=True)
>>> NO(Fd(a)*Fd(i)).has_q_annihilators
-1
>>> NO(F(i)*F(a)).has_q_annihilators
1
>>> NO(Fd(a)*F(i)).has_q_annihilators
0
"""
return self.args[0].args[-1].is_q_annihilator
def doit(self, **kw_args):
"""
Either removes the brackets or enables complex computations
in its arguments.
Examples
========
>>> from sympy.physics.secondquant import NO, Fd, F
>>> from textwrap import fill
>>> from sympy import symbols, Dummy
>>> p,q = symbols('p,q', cls=Dummy)
>>> print(fill(str(NO(Fd(p)*F(q)).doit())))
KroneckerDelta(_a, _p)*KroneckerDelta(_a,
_q)*CreateFermion(_a)*AnnihilateFermion(_a) + KroneckerDelta(_a,
_p)*KroneckerDelta(_i, _q)*CreateFermion(_a)*AnnihilateFermion(_i) -
KroneckerDelta(_a, _q)*KroneckerDelta(_i,
_p)*AnnihilateFermion(_a)*CreateFermion(_i) - KroneckerDelta(_i,
_p)*KroneckerDelta(_i, _q)*AnnihilateFermion(_i)*CreateFermion(_i)
"""
if kw_args.get("remove_brackets", True):
return self._remove_brackets()
else:
return self.__new__(type(self), self.args[0].doit(**kw_args))
def _remove_brackets(self):
"""
Returns the sorted string without normal order brackets.
The returned string have the property that no nonzero
contractions exist.
"""
# check if any creator is also an annihilator
subslist = []
for i in self.iter_q_creators():
if self[i].is_q_annihilator:
assume = self[i].state.assumptions0
# only operators with a dummy index can be split in two terms
if isinstance(self[i].state, Dummy):
# create indices with fermi restriction
assume.pop("above_fermi", None)
assume["below_fermi"] = True
below = Dummy('i', **assume)
assume.pop("below_fermi", None)
assume["above_fermi"] = True
above = Dummy('a', **assume)
cls = type(self[i])
split = (
self[i].__new__(cls, below)
* KroneckerDelta(below, self[i].state)
+ self[i].__new__(cls, above)
* KroneckerDelta(above, self[i].state)
)
subslist.append((self[i], split))
else:
raise SubstitutionOfAmbigousOperatorFailed(self[i])
if subslist:
result = NO(self.subs(subslist))
if isinstance(result, Add):
return Add(*[term.doit() for term in result.args])
else:
return self.args[0]
def _expand_operators(self):
"""
Returns a sum of NO objects that contain no ambiguous q-operators.
If an index q has range both above and below fermi, the operator F(q)
is ambiguous in the sense that it can be both a q-creator and a q-annihilator.
If q is dummy, it is assumed to be a summation variable and this method
rewrites it into a sum of NO terms with unambiguous operators:
{Fd(p)*F(q)} = {Fd(a)*F(b)} + {Fd(a)*F(i)} + {Fd(j)*F(b)} -{F(i)*Fd(j)}
where a,b are above and i,j are below fermi level.
"""
return NO(self._remove_brackets)
def __getitem__(self, i):
if isinstance(i, slice):
indices = i.indices(len(self))
return [self.args[0].args[i] for i in range(*indices)]
else:
return self.args[0].args[i]
def __len__(self):
return len(self.args[0].args)
def iter_q_annihilators(self):
"""
Iterates over the annihilation operators.
Examples
========
>>> from sympy import symbols
>>> i, j = symbols('i j', below_fermi=True)
>>> a, b = symbols('a b', above_fermi=True)
>>> from sympy.physics.secondquant import NO, F, Fd
>>> no = NO(Fd(a)*F(i)*F(b)*Fd(j))
>>> no.iter_q_creators()
<generator object... at 0x...>
>>> list(no.iter_q_creators())
[0, 1]
>>> list(no.iter_q_annihilators())
[3, 2]
"""
ops = self.args[0].args
iter = range(len(ops) - 1, -1, -1)
for i in iter:
if ops[i].is_q_annihilator:
yield i
else:
break
def iter_q_creators(self):
"""
Iterates over the creation operators.
Examples
========
>>> from sympy import symbols
>>> i, j = symbols('i j', below_fermi=True)
>>> a, b = symbols('a b', above_fermi=True)
>>> from sympy.physics.secondquant import NO, F, Fd
>>> no = NO(Fd(a)*F(i)*F(b)*Fd(j))
>>> no.iter_q_creators()
<generator object... at 0x...>
>>> list(no.iter_q_creators())
[0, 1]
>>> list(no.iter_q_annihilators())
[3, 2]
"""
ops = self.args[0].args
iter = range(0, len(ops))
for i in iter:
if ops[i].is_q_creator:
yield i
else:
break
def get_subNO(self, i):
"""
Returns a NO() without FermionicOperator at index i.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import F, NO
>>> p,q,r = symbols('p,q,r')
>>> NO(F(p)*F(q)*F(r)).get_subNO(1) # doctest: +SKIP
NO(AnnihilateFermion(p)*AnnihilateFermion(r))
"""
arg0 = self.args[0] # it's a Mul by definition of how it's created
mul = arg0._new_rawargs(arg0.args[:i] + arg0.args[i + 1:])
return NO(mul)
def _latex(self, printer):
return "\\left\\{%s\\right\\}" % printer._print(self.args[0])
def __repr__(self):
return "NO(%s)" % self.args[0]
def __str__(self):
return ":%s:" % self.args[0]
def contraction(a, b):
"""
Calculates contraction of Fermionic operators a and b.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.secondquant import F, Fd, contraction
>>> p, q = symbols('p,q')
>>> a, b = symbols('a,b', above_fermi=True)
>>> i, j = symbols('i,j', below_fermi=True)
A contraction is non-zero only if a quasi-creator is to the right of a
quasi-annihilator:
>>> contraction(F(a),Fd(b))
KroneckerDelta(a, b)
>>> contraction(Fd(i),F(j))
KroneckerDelta(i, j)
For general indices a non-zero result restricts the indices to below/above
the fermi surface:
>>> contraction(Fd(p),F(q))
KroneckerDelta(_i, q)*KroneckerDelta(p, q)
>>> contraction(F(p),Fd(q))
KroneckerDelta(_a, q)*KroneckerDelta(p, q)
Two creators or two annihilators always vanishes:
>>> contraction(F(p),F(q))
0
>>> contraction(Fd(p),Fd(q))
0
"""
if isinstance(b, FermionicOperator) and isinstance(a, FermionicOperator):
if isinstance(a, AnnihilateFermion) and isinstance(b, CreateFermion):
if b.state.assumptions0.get("below_fermi"):
return S.Zero
if a.state.assumptions0.get("below_fermi"):
return S.Zero
if b.state.assumptions0.get("above_fermi"):
return KroneckerDelta(a.state, b.state)
if a.state.assumptions0.get("above_fermi"):
return KroneckerDelta(a.state, b.state)
return (KroneckerDelta(a.state, b.state)*
KroneckerDelta(b.state, Dummy('a', above_fermi=True)))
if isinstance(b, AnnihilateFermion) and isinstance(a, CreateFermion):
if b.state.assumptions0.get("above_fermi"):
return S.Zero
if a.state.assumptions0.get("above_fermi"):
return S.Zero
if b.state.assumptions0.get("below_fermi"):
return KroneckerDelta(a.state, b.state)
if a.state.assumptions0.get("below_fermi"):
return KroneckerDelta(a.state, b.state)
return (KroneckerDelta(a.state, b.state)*
KroneckerDelta(b.state, Dummy('i', below_fermi=True)))
# vanish if 2xAnnihilator or 2xCreator
return S.Zero
else:
#not fermion operators
t = ( isinstance(i, FermionicOperator) for i in (a, b) )
raise ContractionAppliesOnlyToFermions(*t)
def _sqkey(sq_operator):
"""Generates key for canonical sorting of SQ operators."""
return sq_operator._sortkey()
def _sort_anticommuting_fermions(string1, key=_sqkey):
"""Sort fermionic operators to canonical order, assuming all pairs anticommute.
Uses a bidirectional bubble sort. Items in string1 are not referenced
so in principle they may be any comparable objects. The sorting depends on the
operators '>' and '=='.
If the Pauli principle is violated, an exception is raised.
Returns
=======
tuple (sorted_str, sign)
sorted_str: list containing the sorted operators
sign: int telling how many times the sign should be changed
(if sign==0 the string was already sorted)
"""
verified = False
sign = 0
rng = list(range(len(string1) - 1))
rev = list(range(len(string1) - 3, -1, -1))
keys = list(map(key, string1))
key_val = dict(list(zip(keys, string1)))
while not verified:
verified = True
for i in rng:
left = keys[i]
right = keys[i + 1]
if left == right:
raise ViolationOfPauliPrinciple([left, right])
if left > right:
verified = False
keys[i:i + 2] = [right, left]
sign = sign + 1
if verified:
break
for i in rev:
left = keys[i]
right = keys[i + 1]
if left == right:
raise ViolationOfPauliPrinciple([left, right])
if left > right:
verified = False
keys[i:i + 2] = [right, left]
sign = sign + 1
string1 = [ key_val[k] for k in keys ]
return (string1, sign)
def evaluate_deltas(e):
"""
We evaluate KroneckerDelta symbols in the expression assuming Einstein summation.
If one index is repeated it is summed over and in effect substituted with
the other one. If both indices are repeated we substitute according to what
is the preferred index. this is determined by
KroneckerDelta.preferred_index and KroneckerDelta.killable_index.
In case there are no possible substitutions or if a substitution would
imply a loss of information, nothing is done.
In case an index appears in more than one KroneckerDelta, the resulting
substitution depends on the order of the factors. Since the ordering is platform
dependent, the literal expression resulting from this function may be hard to
predict.
Examples
========
We assume the following:
>>> from sympy import symbols, Function, Dummy, KroneckerDelta
>>> from sympy.physics.secondquant import evaluate_deltas
>>> i,j = symbols('i j', below_fermi=True, cls=Dummy)
>>> a,b = symbols('a b', above_fermi=True, cls=Dummy)
>>> p,q = symbols('p q', cls=Dummy)
>>> f = Function('f')
>>> t = Function('t')
The order of preference for these indices according to KroneckerDelta is
(a, b, i, j, p, q).
Trivial cases:
>>> evaluate_deltas(KroneckerDelta(i,j)*f(i)) # d_ij f(i) -> f(j)
f(_j)
>>> evaluate_deltas(KroneckerDelta(i,j)*f(j)) # d_ij f(j) -> f(i)
f(_i)
>>> evaluate_deltas(KroneckerDelta(i,p)*f(p)) # d_ip f(p) -> f(i)
f(_i)
>>> evaluate_deltas(KroneckerDelta(q,p)*f(p)) # d_qp f(p) -> f(q)
f(_q)
>>> evaluate_deltas(KroneckerDelta(q,p)*f(q)) # d_qp f(q) -> f(p)
f(_p)
More interesting cases:
>>> evaluate_deltas(KroneckerDelta(i,p)*t(a,i)*f(p,q))
f(_i, _q)*t(_a, _i)
>>> evaluate_deltas(KroneckerDelta(a,p)*t(a,i)*f(p,q))
f(_a, _q)*t(_a, _i)
>>> evaluate_deltas(KroneckerDelta(p,q)*f(p,q))
f(_p, _p)
Finally, here are some cases where nothing is done, because that would
imply a loss of information:
>>> evaluate_deltas(KroneckerDelta(i,p)*f(q))
f(_q)*KroneckerDelta(_i, _p)
>>> evaluate_deltas(KroneckerDelta(i,p)*f(i))
f(_i)*KroneckerDelta(_i, _p)
"""
# We treat Deltas only in mul objects
# for general function objects we don't evaluate KroneckerDeltas in arguments,
# but here we hard code exceptions to this rule
accepted_functions = (
Add,
)
if isinstance(e, accepted_functions):
return e.func(*[evaluate_deltas(arg) for arg in e.args])
elif isinstance(e, Mul):
# find all occurences of delta function and count each index present in
# expression.
deltas = []
indices = {}
for i in e.args:
for s in i.free_symbols:
if s in indices:
indices[s] += 1
else:
indices[s] = 0 # geek counting simplifies logic below
if isinstance(i, KroneckerDelta):
deltas.append(i)
for d in deltas:
# If we do something, and there are more deltas, we should recurse
# to treat the resulting expression properly
if indices[d.killable_index]:
e = e.subs(d.killable_index, d.preferred_index)
if len(deltas) > 1:
return evaluate_deltas(e)
elif indices[d.preferred_index] and d.indices_contain_equal_information:
e = e.subs(d.preferred_index, d.killable_index)
if len(deltas) > 1:
return evaluate_deltas(e)
else:
pass
return e
# nothing to do, maybe we hit a Symbol or a number
else:
return e
def substitute_dummies(expr, new_indices=False, pretty_indices={}):
"""
Collect terms by substitution of dummy variables.
This routine allows simplification of Add expressions containing terms
which differ only due to dummy variables.
The idea is to substitute all dummy variables consistently depending on
the structure of the term. For each term, we obtain a sequence of all
dummy variables, where the order is determined by the index range, what
factors the index belongs to and its position in each factor. See
_get_ordered_dummies() for more inforation about the sorting of dummies.
The index sequence is then substituted consistently in each term.
Examples
========
>>> from sympy import symbols, Function, Dummy
>>> from sympy.physics.secondquant import substitute_dummies
>>> a,b,c,d = symbols('a b c d', above_fermi=True, cls=Dummy)
>>> i,j = symbols('i j', below_fermi=True, cls=Dummy)
>>> f = Function('f')
>>> expr = f(a,b) + f(c,d); expr
f(_a, _b) + f(_c, _d)
Since a, b, c and d are equivalent summation indices, the expression can be
simplified to a single term (for which the dummy indices are still summed over)
>>> substitute_dummies(expr)
2*f(_a, _b)
Controlling output:
By default the dummy symbols that are already present in the expression
will be reused in a different permuation. However, if new_indices=True,
new dummies will be generated and inserted. The keyword 'pretty_indices'
can be used to control this generation of new symbols.
By default the new dummies will be generated on the form i_1, i_2, a_1,
etc. If you supply a dictionary with key:value pairs in the form:
{ index_group: string_of_letters }
The letters will be used as labels for the new dummy symbols. The
index_groups must be one of 'above', 'below' or 'general'.
>>> expr = f(a,b,i,j)
>>> my_dummies = { 'above':'st', 'below':'uv' }
>>> substitute_dummies(expr, new_indices=True, pretty_indices=my_dummies)
f(_s, _t, _u, _v)
If we run out of letters, or if there is no keyword for some index_group
the default dummy generator will be used as a fallback:
>>> p,q = symbols('p q', cls=Dummy) # general indices
>>> expr = f(p,q)
>>> substitute_dummies(expr, new_indices=True, pretty_indices=my_dummies)
f(_p_0, _p_1)
"""
# setup the replacing dummies
if new_indices:
letters_above = pretty_indices.get('above', "")
letters_below = pretty_indices.get('below', "")
letters_general = pretty_indices.get('general', "")
len_above = len(letters_above)
len_below = len(letters_below)
len_general = len(letters_general)
def _i(number):
try:
return letters_below[number]
except IndexError:
return 'i_' + str(number - len_below)
def _a(number):
try:
return letters_above[number]
except IndexError:
return 'a_' + str(number - len_above)
def _p(number):
try:
return letters_general[number]
except IndexError:
return 'p_' + str(number - len_general)
aboves = []
belows = []
generals = []
dummies = expr.atoms(Dummy)
if not new_indices:
dummies = sorted(dummies, key=default_sort_key)
# generate lists with the dummies we will insert
a = i = p = 0
for d in dummies:
assum = d.assumptions0
if assum.get("above_fermi"):
if new_indices:
sym = _a(a)
a += 1
l1 = aboves
elif assum.get("below_fermi"):
if new_indices:
sym = _i(i)
i += 1
l1 = belows
else:
if new_indices:
sym = _p(p)
p += 1
l1 = generals
if new_indices:
l1.append(Dummy(sym, **assum))
else:
l1.append(d)
expr = expr.expand()
terms = Add.make_args(expr)
new_terms = []
for term in terms:
i = iter(belows)
a = iter(aboves)
p = iter(generals)
ordered = _get_ordered_dummies(term)
subsdict = {}
for d in ordered:
if d.assumptions0.get('below_fermi'):
subsdict[d] = next(i)
elif d.assumptions0.get('above_fermi'):
subsdict[d] = next(a)
else:
subsdict[d] = next(p)
subslist = []
final_subs = []
for k, v in subsdict.items():
if k == v:
continue
if v in subsdict:
# We check if the sequence of substitutions end quickly. In
# that case, we can avoid temporary symbols if we ensure the
# correct substitution order.
if subsdict[v] in subsdict:
# (x, y) -> (y, x), we need a temporary variable
x = Dummy('x')
subslist.append((k, x))
final_subs.append((x, v))
else:
# (x, y) -> (y, a), x->y must be done last
# but before temporary variables are resolved
final_subs.insert(0, (k, v))
else:
subslist.append((k, v))
subslist.extend(final_subs)
new_terms.append(term.subs(subslist))
return Add(*new_terms)
class KeyPrinter(StrPrinter):
"""Printer for which only equal objects are equal in print"""
def _print_Dummy(self, expr):
return "(%s_%i)" % (expr.name, expr.dummy_index)
def __kprint(expr):
p = KeyPrinter()
return p.doprint(expr)
def _get_ordered_dummies(mul, verbose=False):
"""Returns all dummies in the mul sorted in canonical order
The purpose of the canonical ordering is that dummies can be substituted
consistently across terms with the result that equivalent terms can be
simplified.
It is not possible to determine if two terms are equivalent based solely on
the dummy order. However, a consistent substitution guided by the ordered
dummies should lead to trivially (non-)equivalent terms, thereby revealing
the equivalence. This also means that if two terms have identical sequences of
dummies, the (non-)equivalence should already be apparent.
Strategy
--------
The canoncial order is given by an arbitrary sorting rule. A sort key
is determined for each dummy as a tuple that depends on all factors where
the index is present. The dummies are thereby sorted according to the
contraction structure of the term, instead of sorting based solely on the
dummy symbol itself.
After all dummies in the term has been assigned a key, we check for identical
keys, i.e. unorderable dummies. If any are found, we call a specialized
method, _determine_ambiguous(), that will determine a unique order based
on recursive calls to _get_ordered_dummies().
Key description
---------------
A high level description of the sort key:
1. Range of the dummy index
2. Relation to external (non-dummy) indices
3. Position of the index in the first factor
4. Position of the index in the second factor
The sort key is a tuple with the following components:
1. A single character indicating the range of the dummy (above, below
or general.)
2. A list of strings with fully masked string representations of all
factors where the dummy is present. By masked, we mean that dummies
are represented by a symbol to indicate either below fermi, above or
general. No other information is displayed about the dummies at
this point. The list is sorted stringwise.
3. An integer number indicating the position of the index, in the first
factor as sorted in 2.
4. An integer number indicating the position of the index, in the second
factor as sorted in 2.
If a factor is either of type AntiSymmetricTensor or SqOperator, the index
position in items 3 and 4 is indicated as 'upper' or 'lower' only.
(Creation operators are considered upper and annihilation operators lower.)
If the masked factors are identical, the two factors cannot be ordered
unambiguously in item 2. In this case, items 3, 4 are left out. If several
indices are contracted between the unorderable factors, it will be handled by
_determine_ambiguous()
"""
# setup dicts to avoid repeated calculations in key()
args = Mul.make_args(mul)
fac_dum = dict([ (fac, fac.atoms(Dummy)) for fac in args] )
fac_repr = dict([ (fac, __kprint(fac)) for fac in args] )
all_dums = set().union(*fac_dum.values())
mask = {}
for d in all_dums:
if d.assumptions0.get('below_fermi'):
mask[d] = '0'
elif d.assumptions0.get('above_fermi'):
mask[d] = '1'
else:
mask[d] = '2'
dum_repr = {d: __kprint(d) for d in all_dums}
def _key(d):
dumstruct = [ fac for fac in fac_dum if d in fac_dum[fac] ]
other_dums = set().union(*[fac_dum[fac] for fac in dumstruct])
fac = dumstruct[-1]
if other_dums is fac_dum[fac]:
other_dums = fac_dum[fac].copy()
other_dums.remove(d)
masked_facs = [ fac_repr[fac] for fac in dumstruct ]
for d2 in other_dums:
masked_facs = [ fac.replace(dum_repr[d2], mask[d2])
for fac in masked_facs ]
all_masked = [ fac.replace(dum_repr[d], mask[d])
for fac in masked_facs ]
masked_facs = dict(list(zip(dumstruct, masked_facs)))
# dummies for which the ordering cannot be determined
if has_dups(all_masked):
all_masked.sort()
return mask[d], tuple(all_masked) # positions are ambiguous
# sort factors according to fully masked strings
keydict = dict(list(zip(dumstruct, all_masked)))
dumstruct.sort(key=lambda x: keydict[x])
all_masked.sort()
pos_val = []
for fac in dumstruct:
if isinstance(fac, AntiSymmetricTensor):
if d in fac.upper:
pos_val.append('u')
if d in fac.lower:
pos_val.append('l')
elif isinstance(fac, Creator):
pos_val.append('u')
elif isinstance(fac, Annihilator):
pos_val.append('l')
elif isinstance(fac, NO):
ops = [ op for op in fac if op.has(d) ]
for op in ops:
if isinstance(op, Creator):
pos_val.append('u')
else:
pos_val.append('l')
else:
# fallback to position in string representation
facpos = -1
while 1:
facpos = masked_facs[fac].find(dum_repr[d], facpos + 1)
if facpos == -1:
break
pos_val.append(facpos)
return (mask[d], tuple(all_masked), pos_val[0], pos_val[-1])
dumkey = dict(list(zip(all_dums, list(map(_key, all_dums)))))
result = sorted(all_dums, key=lambda x: dumkey[x])
if has_dups(iter(dumkey.values())):
# We have ambiguities
unordered = defaultdict(set)
for d, k in dumkey.items():
unordered[k].add(d)
for k in [ k for k in unordered if len(unordered[k]) < 2 ]:
del unordered[k]
unordered = [ unordered[k] for k in sorted(unordered) ]
result = _determine_ambiguous(mul, result, unordered)
return result
def _determine_ambiguous(term, ordered, ambiguous_groups):
# We encountered a term for which the dummy substitution is ambiguous.
# This happens for terms with 2 or more contractions between factors that
# cannot be uniquely ordered independent of summation indices. For
# example:
#
# Sum(p, q) v^{p, .}_{q, .}v^{q, .}_{p, .}
#
# Assuming that the indices represented by . are dummies with the
# same range, the factors cannot be ordered, and there is no
# way to determine a consistent ordering of p and q.
#
# The strategy employed here, is to relabel all unambiguous dummies with
# non-dummy symbols and call _get_ordered_dummies again. This procedure is
# applied to the entire term so there is a possibility that
# _determine_ambiguous() is called again from a deeper recursion level.
# break recursion if there are no ordered dummies
all_ambiguous = set()
for dummies in ambiguous_groups:
all_ambiguous |= dummies
all_ordered = set(ordered) - all_ambiguous
if not all_ordered:
# FIXME: If we arrive here, there are no ordered dummies. A method to
# handle this needs to be implemented. In order to return something
# useful nevertheless, we choose arbitrarily the first dummy and
# determine the rest from this one. This method is dependent on the
# actual dummy labels which violates an assumption for the canonization
# procedure. A better implementation is needed.
group = [ d for d in ordered if d in ambiguous_groups[0] ]
d = group[0]
all_ordered.add(d)
ambiguous_groups[0].remove(d)
stored_counter = _symbol_factory._counter
subslist = []
for d in [ d for d in ordered if d in all_ordered ]:
nondum = _symbol_factory._next()
subslist.append((d, nondum))
newterm = term.subs(subslist)
neworder = _get_ordered_dummies(newterm)
_symbol_factory._set_counter(stored_counter)
# update ordered list with new information
for group in ambiguous_groups:
ordered_group = [ d for d in neworder if d in group ]
ordered_group.reverse()
result = []
for d in ordered:
if d in group:
result.append(ordered_group.pop())
else:
result.append(d)
ordered = result
return ordered
class _SymbolFactory(object):
def __init__(self, label):
self._counterVar = 0
self._label = label
def _set_counter(self, value):
"""
Sets counter to value.
"""
self._counterVar = value
@property
def _counter(self):
"""
What counter is currently at.
"""
return self._counterVar
def _next(self):
"""
Generates the next symbols and increments counter by 1.
"""
s = Symbol("%s%i" % (self._label, self._counterVar))
self._counterVar += 1
return s
_symbol_factory = _SymbolFactory('_]"]_') # most certainly a unique label
@cacheit
def _get_contractions(string1, keep_only_fully_contracted=False):
"""
Returns Add-object with contracted terms.
Uses recursion to find all contractions. -- Internal helper function --
Will find nonzero contractions in string1 between indices given in
leftrange and rightrange.
"""
# Should we store current level of contraction?
if keep_only_fully_contracted and string1:
result = []
else:
result = [NO(Mul(*string1))]
for i in range(len(string1) - 1):
for j in range(i + 1, len(string1)):
c = contraction(string1[i], string1[j])
if c:
sign = (j - i + 1) % 2
if sign:
coeff = S.NegativeOne*c
else:
coeff = c
#
# Call next level of recursion
# ============================
#
# We now need to find more contractions among operators
#
# oplist = string1[:i]+ string1[i+1:j] + string1[j+1:]
#
# To prevent overcounting, we don't allow contractions
# we have already encountered. i.e. contractions between
# string1[:i] <---> string1[i+1:j]
# and string1[:i] <---> string1[j+1:].
#
# This leaves the case:
oplist = string1[i + 1:j] + string1[j + 1:]
if oplist:
result.append(coeff*NO(
Mul(*string1[:i])*_get_contractions( oplist,
keep_only_fully_contracted=keep_only_fully_contracted)))
else:
result.append(coeff*NO( Mul(*string1[:i])))
if keep_only_fully_contracted:
break # next iteration over i leaves leftmost operator string1[0] uncontracted
return Add(*result)
def wicks(e, **kw_args):
"""
Returns the normal ordered equivalent of an expression using Wicks Theorem.
Examples
========
>>> from sympy import symbols, Function, Dummy
>>> from sympy.physics.secondquant import wicks, F, Fd, NO
>>> p,q,r = symbols('p,q,r')
>>> wicks(Fd(p)*F(q)) # doctest: +SKIP
d(p, q)*d(q, _i) + NO(CreateFermion(p)*AnnihilateFermion(q))
By default, the expression is expanded:
>>> wicks(F(p)*(F(q)+F(r))) # doctest: +SKIP
NO(AnnihilateFermion(p)*AnnihilateFermion(q)) + NO(
AnnihilateFermion(p)*AnnihilateFermion(r))
With the keyword 'keep_only_fully_contracted=True', only fully contracted
terms are returned.
By request, the result can be simplified in the following order:
-- KroneckerDelta functions are evaluated
-- Dummy variables are substituted consistently across terms
>>> p, q, r = symbols('p q r', cls=Dummy)
>>> wicks(Fd(p)*(F(q)+F(r)), keep_only_fully_contracted=True) # doctest: +SKIP
KroneckerDelta(_i, _q)*KroneckerDelta(
_p, _q) + KroneckerDelta(_i, _r)*KroneckerDelta(_p, _r)
"""
if not e:
return S.Zero
opts = {
'simplify_kronecker_deltas': False,
'expand': True,
'simplify_dummies': False,
'keep_only_fully_contracted': False
}
opts.update(kw_args)
# check if we are already normally ordered
if isinstance(e, NO):
if opts['keep_only_fully_contracted']:
return S.Zero
else:
return e
elif isinstance(e, FermionicOperator):
if opts['keep_only_fully_contracted']:
return S.Zero
else:
return e
# break up any NO-objects, and evaluate commutators
e = e.doit(wicks=True)
# make sure we have only one term to consider
e = e.expand()
if isinstance(e, Add):
if opts['simplify_dummies']:
return substitute_dummies(Add(*[ wicks(term, **kw_args) for term in e.args]))
else:
return Add(*[ wicks(term, **kw_args) for term in e.args])
# For Mul-objects we can actually do something
if isinstance(e, Mul):
# we dont want to mess around with commuting part of Mul
# so we factorize it out before starting recursion
c_part = []
string1 = []
for factor in e.args:
if factor.is_commutative:
c_part.append(factor)
else:
string1.append(factor)
n = len(string1)
# catch trivial cases
if n == 0:
result = e
elif n == 1:
if opts['keep_only_fully_contracted']:
return S.Zero
else:
result = e
else: # non-trivial
if isinstance(string1[0], BosonicOperator):
raise NotImplementedError
string1 = tuple(string1)
# recursion over higher order contractions
result = _get_contractions(string1,
keep_only_fully_contracted=opts['keep_only_fully_contracted'] )
result = Mul(*c_part)*result
if opts['expand']:
result = result.expand()
if opts['simplify_kronecker_deltas']:
result = evaluate_deltas(result)
return result
# there was nothing to do
return e
class PermutationOperator(Expr):
"""
Represents the index permutation operator P(ij).
P(ij)*f(i)*g(j) = f(i)*g(j) - f(j)*g(i)
"""
is_commutative = True
def __new__(cls, i, j):
i, j = sorted(map(sympify, (i, j)), key=default_sort_key)
obj = Basic.__new__(cls, i, j)
return obj
def get_permuted(self, expr):
"""
Returns -expr with permuted indices.
>>> from sympy import symbols, Function
>>> from sympy.physics.secondquant import PermutationOperator
>>> p,q = symbols('p,q')
>>> f = Function('f')
>>> PermutationOperator(p,q).get_permuted(f(p,q))
-f(q, p)
"""
i = self.args[0]
j = self.args[1]
if expr.has(i) and expr.has(j):
tmp = Dummy()
expr = expr.subs(i, tmp)
expr = expr.subs(j, i)
expr = expr.subs(tmp, j)
return S.NegativeOne*expr
else:
return expr
def _latex(self, printer):
return "P(%s%s)" % self.args
def simplify_index_permutations(expr, permutation_operators):
"""
Performs simplification by introducing PermutationOperators where appropriate.
Schematically:
[abij] - [abji] - [baij] + [baji] -> P(ab)*P(ij)*[abij]
permutation_operators is a list of PermutationOperators to consider.
If permutation_operators=[P(ab),P(ij)] we will try to introduce the
permutation operators P(ij) and P(ab) in the expression. If there are other
possible simplifications, we ignore them.
>>> from sympy import symbols, Function
>>> from sympy.physics.secondquant import simplify_index_permutations
>>> from sympy.physics.secondquant import PermutationOperator
>>> p,q,r,s = symbols('p,q,r,s')
>>> f = Function('f')
>>> g = Function('g')
>>> expr = f(p)*g(q) - f(q)*g(p); expr
f(p)*g(q) - f(q)*g(p)
>>> simplify_index_permutations(expr,[PermutationOperator(p,q)])
f(p)*g(q)*PermutationOperator(p, q)
>>> PermutList = [PermutationOperator(p,q),PermutationOperator(r,s)]
>>> expr = f(p,r)*g(q,s) - f(q,r)*g(p,s) + f(q,s)*g(p,r) - f(p,s)*g(q,r)
>>> simplify_index_permutations(expr,PermutList)
f(p, r)*g(q, s)*PermutationOperator(p, q)*PermutationOperator(r, s)
"""
def _get_indices(expr, ind):
"""
Collects indices recursively in predictable order.
"""
result = []
for arg in expr.args:
if arg in ind:
result.append(arg)
else:
if arg.args:
result.extend(_get_indices(arg, ind))
return result
def _choose_one_to_keep(a, b, ind):
# we keep the one where indices in ind are in order ind[0] < ind[1]
return min(a, b, key=lambda x: default_sort_key(_get_indices(x, ind)))
expr = expr.expand()
if isinstance(expr, Add):
terms = set(expr.args)
for P in permutation_operators:
new_terms = set([])
on_hold = set([])
while terms:
term = terms.pop()
permuted = P.get_permuted(term)
if permuted in terms | on_hold:
try:
terms.remove(permuted)
except KeyError:
on_hold.remove(permuted)
keep = _choose_one_to_keep(term, permuted, P.args)
new_terms.add(P*keep)
else:
# Some terms must get a second chance because the permuted
# term may already have canonical dummy ordering. Then
# substitute_dummies() does nothing. However, the other
# term, if it exists, will be able to match with us.
permuted1 = permuted
permuted = substitute_dummies(permuted)
if permuted1 == permuted:
on_hold.add(term)
elif permuted in terms | on_hold:
try:
terms.remove(permuted)
except KeyError:
on_hold.remove(permuted)
keep = _choose_one_to_keep(term, permuted, P.args)
new_terms.add(P*keep)
else:
new_terms.add(term)
terms = new_terms | on_hold
return Add(*terms)
return expr
| [
"[email protected]"
] | |
1a4bf86142afa60772834ed02506ed40060db509 | 0b9e884be78ecc22a44a94e2c1cabefd637b9ed0 | /Python_Talk/mpt-master/ch14/live_price.py | 04c8d8d261bcbee41454ee1c2b57b439f4d9d005 | [] | no_license | marcovnyc/penguin-code | 6ba3faa5f21186918e2d08f5a0fcacebb2697e56 | a0c1f91219ff74a8bb8e9fd3375b03b667056b54 | refs/heads/master | 2021-12-22T15:04:26.002512 | 2021-12-16T04:01:40 | 2021-12-16T04:01:40 | 7,264,458 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | from yahoo_fin import stock_info as si
# Start an infinite loop
while True:
# Obtain ticker symbol from you
ticker = input("What's the ticker symbol of the stock you are looking for?\n")
# If you want to stop, type in "done"
if ticker == "done":
break
# Otherwise, type in a stock ticker symbol
else:
# Obtain live stock price from Yahoo
price = si.get_live_price(ticker)
# Print out the stock price
print(f"The stock price for {ticker} is {price}.")
| [
"[email protected]"
] | |
d6dff94c7a3c17100e31444270570019c51afe06 | 611ffe1973f843626000e2320c1d425cdf0995ca | /lib/modules/synthesis_decoder.py | cddd9d53ae22e5c5bcb03badb09275f99bd209cc | [] | no_license | kapitsa2811/Text2Scene | f5b1ff803329a68f9035420481db651e5fb0cf54 | f471771cb8e6f8e1e27296f2c535b84fa27a79ed | refs/heads/master | 2020-06-11T03:07:09.853489 | 2019-06-19T05:19:53 | 2019-06-19T05:19:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,853 | py | #!/usr/bin/env python
import math, cv2
import numpy as np
import torch
import torch.nn as nn
from torchvision import models
from modules.separable_convolution import same_padding_size
# from modules.bilinear_downsample import BilinearDownsample
import torch.nn.functional as F
from modules.separable_convolution import separable_conv2d
class SynthesisDecoder(nn.Module):
def __init__(self, config):
super(SynthesisDecoder, self).__init__()
self.cfg = config
h, w = config.output_image_size
if config.use_color_volume:
in_channels = 3 * config.output_vocab_size
else:
in_channels = config.output_vocab_size + 4
self.block6 = nn.Sequential(self.make_layers(512 + in_channels, [512, 512], config.use_normalization, [h//64, w//64]))
self.block5 = nn.Sequential(self.make_layers(512 + 512 + in_channels, [512, 512], config.use_normalization, [h//32, w//32]))
self.block4 = nn.Sequential(self.make_layers(512 + 512 + in_channels, [512, 512], config.use_normalization, [h//16, w//16]))
self.block3 = nn.Sequential(self.make_layers(512 + 256 + in_channels, [512, 512], config.use_normalization, [h//8, w//8]))
self.block2 = nn.Sequential(self.make_layers(512 + 256 + in_channels, [512, 512], config.use_normalization, [h//4, w//4]))
self.block1 = nn.Sequential(self.make_layers(512 + 256 + in_channels, [256, 256], config.use_normalization, [h//2, w//2]))
self.block0 = nn.Sequential(self.make_layers(256 + in_channels, [256, 256], config.use_normalization, [h, w]))
# self.block_up = nn.Sequential(self.make_layers(256 + in_channels, [128, 128], config.use_normalization, [2*h, 2*w]))
if self.cfg.use_separable_convolution:
conv2d = separable_conv2d
else:
conv2d = nn.Conv2d
self.imaging = conv2d(256, 3, 1)
self.labeling = conv2d(256, config.output_vocab_size, 1)
def make_layers(self, inplace, places, use_layer_norm=False, resolution=None):
if self.cfg.use_separable_convolution:
conv2d = separable_conv2d
else:
conv2d = nn.Conv2d
layers = []
in_channels = inplace
for v in places:
if use_layer_norm:
current_conv2d = conv2d(in_channels, v, kernel_size=3, padding=1, bias=False)
current_lnorm = nn.LayerNorm([v, resolution[0], resolution[1]])
layers.extend([current_conv2d, current_lnorm])
else:
current_conv2d = conv2d(in_channels, v, kernel_size=3, padding=1, bias=True)
layers.append(current_conv2d)
layers.append(nn.LeakyReLU(0.2, inplace=True))
in_channels = v
return nn.Sequential(*layers)
def forward(self, inputs):
h, w = self.cfg.output_image_size
xx, x1, x2, x3, x4, x5, x6 = inputs
xx_d6 = F.interpolate(xx, size=[h//64, w//64], mode='bilinear', align_corners=True)
x = torch.cat([xx_d6, x6], dim=1)
x = self.block6(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
xx_d5 = F.interpolate(xx, size=[h//32, w//32], mode='bilinear', align_corners=True)
x = torch.cat([xx_d5, x5, x], dim=1)
x = self.block5(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
xx_d4 = F.interpolate(xx, size=[h//16, w//16], mode='bilinear', align_corners=True)
x = torch.cat([xx_d4, x4, x], dim=1)
x = self.block4(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
xx_d3 = F.interpolate(xx, size=[h//8, w//8], mode='bilinear', align_corners=True)
x = torch.cat([xx_d3, x3, x], dim=1)
x = self.block3(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
xx_d2 = F.interpolate(xx, size=[h//4, w//4], mode='bilinear', align_corners=True)
x = torch.cat([xx_d2, x2, x], dim=1)
x = self.block2(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
xx_d1 = F.interpolate(xx, size=[h//2, w//2], mode='bilinear', align_corners=True)
x = torch.cat([xx_d1, x1, x], dim=1)
x = self.block1(x)
x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
# x = torch.cat([x0, x], dim=1)
# x = self.block0(x)
# x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=True)
xx_d0 = xx #F.interpolate(xx, size=[h, w], mode='bilinear', align_corners=True)
x = torch.cat([xx_d0, x], dim=1)
x = self.block0(x)
image = self.imaging(x)
label = self.labeling(x)
image = 255.0*(image+1.0)/2.0
return image, label
| [
"[email protected]"
] | |
25189b66364fe5afb9c41481a00735828b4ffd74 | 14f455693213cae4506a01b7d0591e542c38de79 | /config/munin/hookbox | 95845775bd8e21f8cfa9f7bb60dcb248538dd6b1 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Cvalladares/Newsblur_Instrumented | f0b14d063759973330f202108a7eed3a29bcc033 | 4d6ee6aa9713879b1e2550ea5f2dbd819c73af12 | refs/heads/master | 2022-12-29T15:19:29.726455 | 2019-09-03T17:09:04 | 2019-09-03T17:09:04 | 206,130,022 | 0 | 0 | MIT | 2022-12-10T06:00:26 | 2019-09-03T17:07:04 | Python | UTF-8 | Python | false | false | 1,641 | #!/srv/newsblur/venv/newsblur/bin/python
import os
import json
import urllib
import urllib2
from vendor.munin import MuninPlugin
class HookboxPlugin(MuninPlugin):
title = 'hookbox'
args = "--base 1000"
vlabel = "Y"
info = "Subscibed users"
scale = False
def get_channels(self):
return os.environ.get('HOOKBOX_CHANNELS', '').split(',')
def get_url(self):
return os.environ.get('HOOKBOX_URL', 'http://localhost:8001/rest')
def get_secret(self):
return os.environ.get('HOOKBOX_SECRET', '')
@property
def fields(self):
return (
(channel, dict(
label=channel,
info="%s - users" % channel,
type="GAUGE",
))
for channel in self.get_channels()
)
def get_channel_info(self, channel_name):
values = {
'channel_name': channel_name,
'secret': self.get_secret(),
}
req = urllib2.Request("%s/get_channel_info?%s" % (self.get_url(), urllib.urlencode(values)))
resp = urllib2.urlopen(req)
return json.loads(resp.read())
def get_subscribers(self, channel_name):
try:
return len(self.get_channel_info(channel_name)[1]['subscribers'])
except (urllib2.URLError, KeyError), e:
return 'U'
def execute(self):
return dict(
(channel_name, self.get_subscribers(channel_name))
for channel_name in self.get_channels()
)
if __name__ == "__main__":
HookboxPlugin().run()
| [
"[email protected]"
] | ||
870388e6f5642063a0b73de0282f49f939814e28 | aedd3aeadfb13eda4489d26ee3d9762598878936 | /leetcode/111. 二叉树的最小深度.py | 70ee52bca1092dfa4ae3db932f4be11bf0763e60 | [] | no_license | AnJian2020/Leetcode | 657e8225c4d395e8764ef7c672d435bda40584c7 | cded97a52c422f98b55f2b3527a054d23541d5a4 | refs/heads/master | 2023-03-26T16:25:36.136647 | 2021-03-26T07:04:10 | 2021-03-26T07:04:10 | 283,940,538 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def minDepth(self, root: TreeNode) -> int:
if not root:
return 0
elif root.left is None and root.right is None:
return 1
else:
depth=10**9
if root.left:
depth=min(self.minDepth(root.left),depth)
if root.right:
depth=min(self.minDepth(root.right),depth)
return depth+1 | [
"[email protected]"
] | |
ef350d4222192afd3cb54baab0613d0cc93f46a1 | d9eef8dd3489682c8db41f2311e3058d1f369780 | /.history/abel-network-files/metis_transf_20180709120220.py | efc72c43810042d5e2eddbbadbc20d2d9643e947 | [] | no_license | McKenzie-Lamb/Gerrymandering | 93fe4a49fe39a0b307ed341e46ba8620ea1225be | b7a7c4129d6b0fcd760ba8952de51eafa701eac3 | refs/heads/master | 2021-01-25T06:06:43.824339 | 2018-10-16T14:27:01 | 2018-10-16T14:27:01 | 93,526,515 | 0 | 0 | null | 2018-07-12T19:07:35 | 2017-06-06T14:17:47 | Python | UTF-8 | Python | false | false | 1,080 | py | # Author: Abel Gonzalez
# Date: 06/26/18
#
# Description:
# This program uses the .shp file to create a network graph where each node
# represents a census tract and the edge represents adjacency between each
# tract, usign graph-tool instead of networkx
import graph_tool.all as gt
import metis
from pathlib import Path
# Paths
main_folder = Path("abel-network-files/")
data_folder = Path("abel-network-files/data/")
images_folder = Path("abel-network-files/images/")
# Loading the previous created Graph and creating the prop maps
graph = gt.load_graph(str(data_folder / "tmp_graph100.gt"))
name = graph.new_vertex_property('string')
adjlist = []
nodew = []
for i in graph.vertices():
neighbors = tuple([j for j in i.all_neighbors()])
adjlist.append(neighbors)
#print(graph.vp.data[i]['PERSONS'])
nodew.append(graph.vp.data[i]['PERSONS'])
metis_graph = metis.adjlist_to_metis(adjlist, nodew=nodew)
objval, parts = metis.part_graph(metis_graph, nparts=4)
for i range(len(parts)):
name[graph.vertex(i)] = parts[i]
gt.draw_graph(graph, vertex_text=name) | [
"[email protected]"
] | |
fe487c41d5826fb6bcb3c75532dc7c84986de7df | 0dc37ab83fc6603770ba60050fc3d46534b3ef65 | /backend/shah_rukh_2_dev_16118/settings.py | f652824a018a14d25dffd321a0319273f3cde280 | [] | no_license | crowdbotics-apps/shah-rukh-2-dev-16118 | c668cdbf67780b1c0715013725c49edad4c84e11 | 1d39dd000cee1dd28ff33a0c8837d7443bf667e6 | refs/heads/master | 2023-01-19T00:29:50.251660 | 2020-12-03T09:26:54 | 2020-12-03T09:26:54 | 317,791,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,035 | py | """
Django settings for shah_rukh_2_dev_16118 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
# start fcm_django push notifications
'fcm_django',
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'shah_rukh_2_dev_16118.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'shah_rukh_2_dev_16118.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")
}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
e6a1599936b8a69e0e220ace641024745c01d0ab | 85a1d1c79c152903e48ac29f2cd066c21e825485 | /lcbru_events/views/home.py | 19631feef3331bd6e46cf96532475d975774e3c0 | [
"MIT"
] | permissive | LCBRU/lcbru-events | 2fd02f196ec9cbf31f76ec2f2dff2e7c0ea7f4d3 | 97245fbc711fc1876643d23df635d82752509040 | refs/heads/master | 2021-06-06T19:42:43.750497 | 2016-10-27T10:40:12 | 2016-10-27T10:40:12 | 43,355,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | from flask import render_template
from lcbru_events import app
@app.route('/')
def index():
return render_template('index.html')
| [
"[email protected]"
] | |
76e85db8873a136388ecef88893460abfada2771 | a1b795c17832c1ec4a4a942b7e35c1f9d283b4db | /examples/timeline.py | a2075a79236a537b8ab42e32a2b7811b7b32b626 | [
"MIT"
] | permissive | ammunk/cassiopeia | 016f1a79bcdb4758953625a5953ca19f3b79ccf4 | 4e77919f53a130b3f16977c76cc5f54f3868d8e5 | refs/heads/master | 2020-04-12T23:19:13.401624 | 2018-12-19T15:51:21 | 2018-12-19T15:51:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | import cassiopeia as cass
from cassiopeia import Summoner
def print_newest_match(name: str, account: int, id: int, region: str):
summoner = Summoner(name=name, account=account, id=id, region=region)
match_history = summoner.match_history
match = match_history[0]
print('Match ID:', match.id)
print(match.timeline.frame_interval)
if __name__ == "__main__":
print_newest_match(name="Kalturi", account=34718348, id=21359666, region="NA")
| [
"[email protected]"
] | |
ffe4b244fa0304750d3e5f01849ae46acf1ed9d2 | 074421d31af92ae29c7c78bdb7e50f199a38eb9b | /weixin/code/rfid_plt/db_sync/db_sync_event_app/db_sync_exp_handler.py | eca438dee2310d1ebfc7ecb52f001794329dece9 | [] | no_license | allenforrest/wxbiz | 3f49ce66b37e281fc375f548610aa54a0f73268f | e78df71fbc5d73dd93ba9452d4b54183fe1e7e1f | refs/heads/master | 2016-09-06T15:17:49.420934 | 2013-08-05T13:13:40 | 2013-08-05T13:13:40 | null | 0 | 0 | null | null | null | null | GB18030 | Python | false | false | 11,326 | py | #coding=gbk
"""
Copyright (C), 2012-2015, Anything Connected Possibilities
Author: ACP2013
Version: 1.0
Date:
Description: 本文件中实现了处理数据导出命令的handler
Others:
Key Class&Method List:
1. DBSyncExportHandler: 处理数据导出命令的handler
History:
1. Date:
Author:
Modification:
"""
import os.path
import zipfile
import bundleframework as bf
import tracelog
import err_code_mgr
from utility import ftp_action
import plt_const_def
from dba import db_cfg_info
from db_sync_event_manager import DBSyncEventManager
import cmd_code_def
import db_sync_base
import call_oracle_cmd
import db_sync_common_const
class DBSyncExportHandler(bf.CmdHandler):
"""
Class: DBSyncExportHandler
Description: 处理数据导出命令的handler
Base: bf.CmdHandler
Others:
"""
def handle_cmd(self, frame):
"""
Method: handle_cmd
Description: 处理消息
Parameter:
frame: AppFrame
Return:
Others:
"""
req = db_sync_base.SyncFullRequest.deserialize(frame.get_data())
self.req = req
self.db_file_dir = os.path.join(self.get_worker().get_app().get_app_top_path()
, db_sync_common_const.DB_SYNC_DIR)
self.db_file_path = os.path.join(self.db_file_dir
, db_sync_common_const.DB_SYNC_FILE_NAME)
tmp_pwd = req.ftp_pwd
req.ftp_pwd = "******"
tracelog.info("start to export data for sync full. %r" % req)
req.ftp_pwd = tmp_pwd
# 删除同步表中全部记录(包括全同步通知)
ret = self.__delete_all_sync_events()
if ret != 0:
self.__send_ack(frame, ret)
return
ret = self.__init_sync_table_data()
if ret != 0:
self.__send_ack(frame, ret)
return
tracelog.info("init sync table data ok.")
# 导出数据
ret = self.__export_data()
if ret != 0:
self.__send_ack(frame, ret)
return
tracelog.info("export data ok.")
ret = self.__compress_data()
if ret != 0:
self.__send_ack(frame, ret)
return
tracelog.info("compress data ok.")
ret = self.__upload_data()
if ret != 0:
self.__send_ack(frame, ret)
return
tracelog.info("upload data ok.")
self.__send_ack(frame, 0)
def __send_ack(self, req_frame, ret, msg=None):
"""
Method: __send_ack
Description: 发送应答消息
Parameter:
req_frame: 请求消息
ret: 错误码
msg: 错误信息
Return:
Others:
"""
if msg is None:
msg = err_code_mgr.get_error_msg(ret)
rep = db_sync_base.SyncFullResponse()
rep.ne_id = self.req.ne_id
rep.return_code = ret
rep.description = msg
frame_ack = bf.AppFrame()
frame_ack.prepare_for_ack(req_frame)
frame_ack.add_data(rep.serialize())
frame_ack.set_receiver_pid(plt_const_def.IMC_PID)
frame_ack.set_next_pid(self.get_worker().get_pid("IMCGate"))
self.get_worker().dispatch_frame_to_process_by_pid(plt_const_def.IMC_PID, frame_ack)
#self.get_worker().send_ack(req_frame, )
def __delete_all_sync_events(self):
"""
Method: __delete_all_sync_events
Description: 删除数据库中全部的增量变更通知
Parameter: 无
Return: 错误码
Others:
"""
try:
con_pool = self.get_worker().get_app().get_conn_pool()
with con_pool.get_connection(db_cfg_info.ORACLE_SYNC_CON_NAME) as con:
DBSyncEventManager(con).remove_all_events()
except:
tracelog.exception("delete all sync events failed.")
return err_code_mgr.ER_SYNC_EXPORT_DATA_FAILED
return 0
def __delete_old_sync_table_data(self):
"""
Method: __delete_old_sync_table_data
Description: 删除同步表中的老数据
Parameter: 无
Return: 错误码
Others:
"""
try:
con_pool = self.get_worker().get_app().get_conn_pool()
with con_pool.get_connection(db_cfg_info.ORACLE_SYNC_CON_NAME) as con:
with con.get_query() as db_query:
sqls = self.get_worker().get_app().get_moc_data_to_sync_tbl_sqls()
for table_name, sql in sqls:
db_query.execute("truncate table user_sync.%s" % table_name)
except:
tracelog.exception("__delete_old_sync_table_data failed.")
return -1
return 0
def __lock_table(self, db_con):
"""
Method: __lock_table
Description: 锁定数据表tbl_OraSyncEvent
Parameter:
db_con: oracle数据库连接
Return: 错误码
Others: tbl_OraSyncEvent锁定后,其他链接不能修改该表了
"""
# 将通知表锁住,不允许其他进程写
# 其他进程修改MOC时,会写通知表
# 这样,其他进程也同样不会修改那些需要同步的MOC表了
try:
with db_con.get_query() as db_query:
db_query.execute("LOCK TABLE tbl_OraSyncEvent IN SHARE MODE")
# 清空通知表, 全同步后,不需要发送之前的增量通知了
db_query.execute("DELETE FROM tbl_OraSyncEvent")
except:
tracelog.exception("lock table tbl_OraSyncEvent failed.")
return -1
return 0
def __commit_tran(self, db_con):
"""
Method: __commit_tran
Description: 提交事务
Parameter:
db_con: 数据库连接
Return: 错误码
Others:
"""
try:
db_con.commit()
except:
tracelog.exception("commit transaction failed.")
return -1
return 0
def __update_sync_table(self):
"""
Method: __update_sync_table
Description: 更新数据同步表
Parameter:
db_con: 数据库连接
Return: 错误码
Others:
"""
try:
sqls = self.get_worker().get_app().get_moc_data_to_sync_tbl_sqls()
#tables = ["tbl_"+moc.get_moc_name() for moc in mocs.itervalues()]
# 这里使用另一个数据库连接,
# 为了防止insert语句造成事务过大,在每张表操作后提交事务
con_pool = self.get_worker().get_app().get_conn_pool()
with con_pool.get_connection(db_cfg_info.ORACLE_DEFAULT_CON_NAME, False) as db_con:
for table_name, sql in sqls:
#print "===", sql
db_con.begin()
try:
with db_con.get_query() as db_query:
db_query.execute(sql, (self.req.ne_id, ))
db_con.commit()
except:
db_con.rollback()
except:
tracelog.exception("update sync table failed.")
return -1
return 0
def __init_sync_table_data(self):
"""
Method: __init_sync_table_data
Description: 初始化数据同步表,将MOC数据,从user_acp用户
同步到user_sync用户的表中
Parameter: 无
Return: 错误码
Others:
"""
ret = self.__delete_old_sync_table_data()
if ret != 0:
return err_code_mgr.ER_SYNC_EXPORT_DATA_FAILED
con_pool = self.get_worker().get_app().get_conn_pool()
with con_pool.get_connection(db_cfg_info.ORACLE_DEFAULT_CON_NAME, False) as db_con:
db_con.begin()
ret = self.__lock_table(db_con)
if ret != 0:
db_con.rollback()
return err_code_mgr.ER_SYNC_EXPORT_DATA_FAILED
tracelog.info("lock table ok.")
ret = self.__update_sync_table()
if ret != 0:
db_con.rollback()
return err_code_mgr.ER_SYNC_EXPORT_DATA_FAILED
db_con.commit()
return ret
def __export_data(self):
"""
Method: __export_data
Description: 导出数据
Parameter: 无
Return: 错误码
Others:
"""
mocs = self.get_worker().get_app().get_synchronized_mocs()
tables = ["tbl_"+moc.get_moc_name() for moc in mocs.itervalues()]
if len(tables) == 0:
tracelog.error("No table need to sync.")
return err_code_mgr.ER_SYNC_NO_TABLE_NEED_SYNC
timeout = len(tables)*300 + 300 # 秒
ret = call_oracle_cmd.call_expdp(tables, self.db_file_dir, timeout)
if ret != 0:
tracelog.error("call_expdp failed. ret:%d" % ret)
return err_code_mgr.ER_SYNC_EXPORT_DATA_FAILED
return 0
def __compress_data(self):
"""
Method: __compress_data
Description: 压缩数据文件
Parameter: 无
Return: 错误码
Others:
"""
try:
zfile_path = os.path.join(self.db_file_dir
, db_sync_common_const.DB_SYNC_COMPRESSED_FILE_NAME)
# 先删除可能存在的老文件
if os.path.exists(zfile_path):
os.remove(zfile_path)
with zipfile.ZipFile(zfile_path, 'w', zipfile.ZIP_DEFLATED) as zfile:
zfile.write(self.db_file_path, db_sync_common_const.DB_SYNC_FILE_NAME)
self.db_file_path = zfile_path
except:
tracelog.exception("compress data failed.")
return err_code_mgr.ER_SYNC_EXPORT_DATA_FAILED
return 0
def __upload_data(self):
"""
Method: __upload_data
Description: 上载数据文件
Parameter: 无
Return: 错误码
Others:
"""
# 尝试3次
for i in xrange(3):
ret, msg = ftp_action.upload_file(self.req.ftp_ip
, self.req.ftp_port
, self.req.ftp_user
, self.req.ftp_pwd
, self.db_file_path
, self.req.file_path)
if ret == 0:
break
if ret != 0:
tracelog.error("upload file %s to (%s, %d) failed. %s" % (
self.db_file_path
, self.req.ftp_ip
, self.req.ftp_port
, msg))
return ret
| [
"[email protected]"
] | |
d69062af5e4d244aba9c8bd7406683449558f838 | 66f17204c2bc83c9edbde10eda1797752a9e4e37 | /download_data.py | 7df33ed7924489c3e153d839d2ea478aec8cbbee | [
"MIT"
] | permissive | lukassnoek/ICON2017 | a698df9dc583408c8ea9893bc65b18fa1397224e | 4797357978577317c65d8aed06d49aa842a6e653 | refs/heads/master | 2023-08-08T02:07:42.649391 | 2023-08-01T18:50:43 | 2023-08-01T18:50:43 | 95,113,866 | 23 | 6 | null | 2017-08-06T11:44:17 | 2017-06-22T12:35:47 | Jupyter Notebook | UTF-8 | Python | false | false | 1,410 | py | """ This script downloads the data for the ICON2017 MVPA workshop
from Surfdrive (a data storage repository/drive from the
Dutch institute for IT in science/academia) using cURL,
which should be cross-platform. """
from __future__ import print_function
import subprocess
import os
import zipfile
import os.path as op
import platform
cmd = "where" if platform.system() == "Windows" else "which"
with open(os.devnull, 'w') as devnull:
res = subprocess.call([cmd, 'curl'], stdout=devnull)
if res != 0:
raise OSError("The program 'curl' was not found on your computer! "
"Either install it or download the data from surfdrive "
" (link on website)")
this_dir = op.dirname(op.realpath(__file__))
dst_dir = op.join(this_dir, 'data')
if not op.isdir(dst_dir):
os.makedirs(dst_dir)
data_file = 'https://surfdrive.surf.nl/files/index.php/s/Iv5tNOAMZTJ0WiS/download'
dst_file = op.join(dst_dir, 'data.zip')
if not op.isfile(dst_file):
print("Downloading the data ...\n")
cmd = "curl -o %s %s" % (dst_file, data_file)
return_code = subprocess.call(cmd, shell=True)
print("\nDone!")
print("Unzipping ...", end='')
zip_ref = zipfile.ZipFile(dst_file, 'r')
zip_ref.extractall(dst_dir)
zip_ref.close()
print(" done!")
os.remove(dst_file)
else:
print("Data is already downloaded and located at %s" % dst_file)
| [
"[email protected]"
] | |
1610566f544070fb05ac4fa174c60cccc4d70b9d | ae6189642a07fd789f51caadb924328a54919cac | /100-problems/review/minimum-spanning-tree/65-finals.py | e4f3b830d0c3a9a5dadb40a5102023f83f0676f9 | [] | no_license | d-matsui/atcorder | 201e32403653b2fdf0d42188faf095eb8b793b86 | 22ec1af8206827e10a986cb24cf12acc52ab1d6a | refs/heads/master | 2020-09-27T23:15:27.281877 | 2020-09-01T13:24:34 | 2020-09-01T13:24:34 | 226,632,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,289 | py | #!/usr/bin/env python3
from pprint import pprint
from collections import deque, defaultdict
import itertools
import math
import sys
sys.setrecursionlimit(10 ** 6)
INF = float('inf')
N, M, K = map(int, input().split())
edges = []
for _ in range(M):
u, v, w = map(int, input().split())
edges.append([u-1, v-1, w])
def unite(u, v):
root_u = root(u)
root_v = root(v)
if root_u == root_v:
return
# assume size(root_u) >= size(root_v)
if size(root_u) < size(root_v):
root_u, root_v = root_v, root_u
parents[root_u] -= size(root_v)
parents[root_v] = root_u
def has_same_root(u, v):
return root(u) == root(v)
def root(v):
if parents[v] < 0:
return v
parents[v] = root(parents[v])
return parents[v]
def size(v):
return -parents[root(v)]
# Kruskal's algorithm
# 閉路を作らないようにしつつ、辺のコストが小さいものから順に選んでいく
# ある辺 e をグラフに追加したときに閉路を作るかどうかは、Union Find で判定できる
parents = [-1] * N
res = 0
count = 0
for u, v, w in sorted(edges, key=lambda e: e[2]):
if count == N - K:
break
if has_same_root(u, v):
continue
res += w
count += 1
unite(u, v)
print(res)
| [
"[email protected]"
] | |
004c6fef77001f731bfe2d9f676ea0d9f9ab8385 | 2eae961147a9627a2b9c8449fa61cb7292ad4f6a | /test/test_post_business_exchange_rates_business_exchange_rate.py | 22b1621a24d2d74a9197a5506af05ada3ce07a5b | [] | no_license | kgr-eureka/SageOneSDK | 5a57cc6f62ffc571620ec67c79757dcd4e6feca7 | 798e240eb8f4a5718013ab74ec9a0f9f9054399a | refs/heads/master | 2021-02-10T04:04:19.202332 | 2020-03-02T11:11:04 | 2020-03-02T11:11:04 | 244,350,350 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,920 | py | # coding: utf-8
"""
Sage Business Cloud Accounting - Accounts
Documentation of the Sage Business Cloud Accounting API. # noqa: E501
The version of the OpenAPI document: 3.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import openapi_client
from openapi_client.models.post_business_exchange_rates_business_exchange_rate import PostBusinessExchangeRatesBusinessExchangeRate # noqa: E501
from openapi_client.rest import ApiException
class TestPostBusinessExchangeRatesBusinessExchangeRate(unittest.TestCase):
"""PostBusinessExchangeRatesBusinessExchangeRate unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test PostBusinessExchangeRatesBusinessExchangeRate
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = openapi_client.models.post_business_exchange_rates_business_exchange_rate.PostBusinessExchangeRatesBusinessExchangeRate() # noqa: E501
if include_optional :
return PostBusinessExchangeRatesBusinessExchangeRate(
currency_id = '0',
rate = 1.337,
inverse_rate = 1.337,
base_currency_id = '0'
)
else :
return PostBusinessExchangeRatesBusinessExchangeRate(
currency_id = '0',
rate = 1.337,
)
def testPostBusinessExchangeRatesBusinessExchangeRate(self):
"""Test PostBusinessExchangeRatesBusinessExchangeRate"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
08ac7056e18731742d5bd1cc2e2715f239731609 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2351/60636/244985.py | 6927ce260bfd2b8f832d5b9567ff1180a363ec44 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,073 | py | n=int(input())
sources=[]
for i in range(n-1):
x=input().split(" ")
source=[]
for a in x:
source.append(int(a))
sources.append(source)
number=[]
for i in range(1,n+1):
delete=sources.copy()
for y in sources:
if(i in y):
delete.pop(delete.index(y))
all_length=[]
while(len(delete)!=0):
all=[]
all.append(delete[0][0])
all.append(delete[0][1])
delete.pop(0)
if(len(delete)==1):
all_length.append(1)
break
else:
save=delete.copy()
for i in delete:
if((i[0] in all)|(i[1] in all)):
if(not i[0] in all):
all.append(i[0])
elif(not i[1] in all):
all.append(i[1])
save.pop(save.index(i))
delete=save
all_length.append(len(all))
number.append(max(all_length))
res=""
for i in range(len(number)):
if(number[i]==min(number)):
res=res+str(i+1)+" "
print(res,end="") | [
"[email protected]"
] | |
3b60a0ef31e458ff25833ae08e6a1a19c405cd80 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02554/s906353100.py | 63f22ea43ad73184db4b8730747a390cd77a1b73 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 72 | py | N = int(input())
ans = 10**N - 2*(9**N) + 8**N
print(ans % 1000000007)
| [
"[email protected]"
] | |
7bfd897fff357e2cc168ed67c397530fca7c3742 | d41d18d3ea6edd2ec478b500386375a8693f1392 | /plotly/validators/choropleth/colorbar/_titleside.py | 3de81a3fa5674ea7244e6816a53d2b3a7e0e4379 | [
"MIT"
] | permissive | miladrux/plotly.py | 38921dd6618650d03be9891d6078e771ffccc99a | dbb79e43e2cc6c5762251537d24bad1dab930fff | refs/heads/master | 2020-03-27T01:46:57.497871 | 2018-08-20T22:37:38 | 2018-08-20T22:37:38 | 145,742,203 | 1 | 0 | MIT | 2018-08-22T17:37:07 | 2018-08-22T17:37:07 | null | UTF-8 | Python | false | false | 510 | py | import _plotly_utils.basevalidators
class TitlesideValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self,
plotly_name='titleside',
parent_name='choropleth.colorbar',
**kwargs
):
super(TitlesideValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='colorbars',
role='style',
values=['right', 'top', 'bottom'],
**kwargs
)
| [
"[email protected]"
] | |
c2c73c9559c6f08d54ca4743125ef3d87c63a6f0 | a6ca86b1dce35a9a59405b29e1f97e13bf1c7fc1 | /tests/src/TestSuites/FunctionalTestSuite/Run_SchoolInfraReport.py | 66f303df061a96feb40930453863ac3e89c44158 | [
"MIT"
] | permissive | htvenkatesh/cQube | 44a0bfd9a281b07992f17c2d885392e62b3d0a83 | c6e91d58fef5084b8b5962f313cd49f0400d7bfa | refs/heads/master | 2022-12-04T02:40:54.425582 | 2020-08-21T07:21:47 | 2020-08-21T07:21:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,756 | py | import time
from get_dir import pwd
from CRC import crc_report_functional_testing
from SI.MAP import School_Map_functional_testing
from SI.Report import School_report_functional_testing
import unittest
from HTMLTestRunner import HTMLTestRunner
from reuse_func import GetData
class MyTestSuite(unittest.TestCase):
@classmethod
def setUpClass(self):
self.data = GetData()
self.logger = self.data.get_functional_log("schoolinfrareport")
self.driver = self.data.get_driver()
self.data.open_cqube_appln(self.driver)
self.data.login_cqube(self.driver)
def test_Issue(self):
self.data.navigate_to_school_infrastructure()
time.sleep(2)
self.errMsg = self.data.get_data_status()
if self.errMsg.text == 'No data found':
print("No data in the school infrastructure report page")
else:
self.logger.info("school infra report execution started")
functional_test = unittest.TestSuite()
functional_test.addTests([
# file name .class name
unittest.defaultTestLoader.loadTestsFromTestCase(School_report_functional_testing.cQube_SI_Report)
])
p= pwd()
outfile = open(p.get_functional_report_path(), "a")
runner1 = HTMLTestRunner.HTMLTestRunner(
stream=outfile,
title='School Infra Report Functional Test Report',
verbosity=1,
)
runner1.run(functional_test)
outfile.close()
self.logger.info("school infra report execution ended")
@classmethod
def tearDownClass(self):
self.driver.close()
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
e825711d2ef3481e8e03c6fa417bda5c8e25bd25 | f1c6ccd9431b459a78b49e5a9d472a2f69de2c80 | /appdaemon/settings/apps/systems.py | 166bd6fb5842c3d765ab8e2875c0b477b152a35c | [] | no_license | vipk31/smart-home | 36fc1a726941ebf70d69104ce22881e281511713 | e1319faca579eebec0478e6b04f7f612571ccdb0 | refs/heads/master | 2020-07-15T11:25:06.126301 | 2019-08-31T06:03:22 | 2019-08-31T06:05:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,869 | py | """Define automations for various home systems."""
from typing import Callable, List, Optional, Union
import requests
import voluptuous as vol
from core import APP_SCHEMA, Base
from const import (
CONF_DURATION,
CONF_ENTITY_ID,
CONF_ENTITY_IDS,
CONF_NOTIFICATION_INTERVAL,
CONF_NOTIFICATION_TARGET,
CONF_PROPERTIES,
CONF_STATE,
)
from helpers import config_validation as cv
from notification import send_notification
CONF_AARON_ROUTER_TRACKER = "aaron_router_tracker"
CONF_BATTERIES_TO_MONITOR = "batteries_to_monitor"
CONF_BATTERY_LEVEL_THRESHOLD = "battery_level_threshold"
CONF_EXPIRY_THRESHOLD = "expiry_threshold"
CONF_PI_HOLE_ACTIVE_SENSOR = "pi_hole_active_sensor"
CONF_PI_HOLE_API_KEY = "pi_hole_api_key"
CONF_PI_HOLE_HOSTS = "pi_hole_hosts"
CONF_PI_HOLE_OFF_EVENT = "pi_hole_off_event"
CONF_PI_HOLE_ON_EVENT = "pi_hole_on_event"
HANDLE_BATTERY_LOW = "battery_low"
class AaronAccountability(Base):
"""Define features to keep me accountable on my phone."""
APP_SCHEMA = APP_SCHEMA.extend(
{
CONF_ENTITY_IDS: vol.Schema(
{vol.Required(CONF_AARON_ROUTER_TRACKER): cv.entity_id},
extra=vol.ALLOW_EXTRA,
)
}
)
def configure(self) -> None:
"""Configure."""
self.listen_state(
self._on_disconnect,
self.entity_ids[CONF_AARON_ROUTER_TRACKER],
new="not_home",
constrain_in_blackout=True,
constrain_anyone="home",
)
@property
def router_tracker_state(self) -> str:
"""Return the state of Aaron's Unifi tracker."""
return self.get_state(self.entity_ids[CONF_AARON_ROUTER_TRACKER])
def _on_disconnect(
self, entity: Union[str, dict], attribute: str, old: str, new: str, kwargs: dict
) -> None:
"""Send a notification when I disconnect during a blackout."""
self._send_notification()
def _send_notification(self) -> None:
"""Send notification to my love."""
send_notification(
self,
"ios_brittany_bachs_iphone",
"His phone shouldn't be off wifi during the night.",
title="Check on Aaron",
)
class LowBatteries(Base): # pylint: disable=too-few-public-methods
"""Define a feature to notify us of low batteries."""
APP_SCHEMA = APP_SCHEMA.extend(
{
CONF_ENTITY_IDS: vol.Schema(
{vol.Required(CONF_BATTERIES_TO_MONITOR): cv.ensure_list},
extra=vol.ALLOW_EXTRA,
),
CONF_PROPERTIES: vol.Schema(
{
vol.Required(CONF_BATTERY_LEVEL_THRESHOLD): int,
vol.Required(CONF_NOTIFICATION_INTERVAL): int,
},
extra=vol.ALLOW_EXTRA,
),
}
)
def configure(self) -> None:
"""Configure."""
self._registered = [] # type: List[str]
self._send_notification_func = None # type: Optional[Callable]
for entity in self.entity_ids[CONF_BATTERIES_TO_MONITOR]:
if entity.split(".")[0] == "binary_sensor":
self.listen_state(
self._on_battery_change, entity, new="on", attribute="all"
)
else:
self.listen_state(self._on_battery_change, entity, attribute="all")
def _on_battery_change(
self,
entity: Union[str, dict],
attribute: str,
old: str,
new: dict,
kwargs: dict,
) -> None:
"""Create OmniFocus todos whenever there's a low battery."""
name = new["attributes"]["friendly_name"]
try:
value = int(new["state"])
except ValueError:
# If the sensor value can't be parsed as an integer, it is either a binary
# battery sensor or the sensor is unavailable. The former should continue
# on; the latter should stop immediately:
if new["state"] != "on":
return
value = 0
notification_handle = "{0}_{1}".format(HANDLE_BATTERY_LOW, name)
def _send_notification():
"""Send the notification."""
self.handles[notification_handle] = send_notification(
self,
"slack",
"{0} has low batteries ({1}%). Replace them ASAP!".format(name, value),
when=self.datetime(),
interval=self.properties[CONF_NOTIFICATION_INTERVAL],
)
if value < self.properties[CONF_BATTERY_LEVEL_THRESHOLD]:
# If we've already registered that the battery is low, don't repeatedly
# register it:
if name in self._registered:
return
self.log("Low battery detected: {0}".format(name))
self._registered.append(name)
# If the automation is enabled when a battery is low, send a notification;
# if not, remember that we should send the notification when the automation
# becomes enabled:
if self.enabled:
_send_notification()
else:
self._send_notification_func = _send_notification
else:
try:
self._registered.remove(name)
self._send_notification_func = None
if notification_handle in self.handles:
cancel = self.handles.pop(notification_handle)
cancel()
except ValueError:
return
def on_enable(self) -> None:
"""Send the notification once the automation is enabled (if appropriate)."""
if self._send_notification_func:
self._send_notification_func()
self._send_notification_func = None
class LeftInState(Base): # pylint: disable=too-few-public-methods
"""Define a feature to monitor whether an entity is left in a state."""
APP_SCHEMA = APP_SCHEMA.extend(
{
CONF_ENTITY_IDS: vol.Schema(
{vol.Required(CONF_ENTITY_ID): cv.entity_id}, extra=vol.ALLOW_EXTRA
),
CONF_PROPERTIES: vol.Schema(
{vol.Required(CONF_DURATION): int, vol.Required(CONF_STATE): str},
extra=vol.ALLOW_EXTRA,
),
}
)
def configure(self) -> None:
"""Configure."""
self._send_notification_func = None # type: Optional[Callable]
self.listen_state(
self._on_limit,
self.entity_ids[CONF_ENTITY_ID],
new=self.properties[CONF_STATE],
duration=self.properties[CONF_DURATION],
)
def _on_limit(
self, entity: Union[str, dict], attribute: str, old: str, new: str, kwargs: dict
) -> None:
"""Notify when the threshold is reached."""
def _turn_off() -> None:
"""Turn the entity off."""
self.turn_off(self.entity_ids[CONF_ENTITY_ID])
def _send_notification() -> None:
"""Send a notification."""
message = "The {0} has been left {1} for {2} minutes".format(
self.get_state(
self.entity_ids[CONF_ENTITY_ID], attribute="friendly_name"
),
self.properties[CONF_STATE],
int(self.properties[CONF_DURATION]) / 60,
)
send_notification(self, self.properties[CONF_NOTIFICATION_TARGET], message)
# If the automation is enabled when a battery is low, send a notification;
# if not, remember that we should send the notification when the automation
# becomes enabled:
if self.enabled:
_send_notification()
else:
self._send_notification_func = _send_notification
def on_enable(self) -> None:
"""Send the notification once the automation is enabled (if appropriate)."""
if self._send_notification_func:
self._send_notification_func()
self._send_notification_func = None
class PiHoleSwitch(Base): # pylint: disable=too-few-public-methods
"""Define a switch to turn on/off all Pi-hole instances."""
APP_SCHEMA = APP_SCHEMA.extend(
{
CONF_ENTITY_IDS: vol.Schema(
{vol.Required(CONF_PI_HOLE_ACTIVE_SENSOR): cv.entity_id},
extra=vol.ALLOW_EXTRA,
),
CONF_PROPERTIES: vol.Schema(
{
vol.Required(CONF_PI_HOLE_API_KEY): str,
vol.Required(CONF_PI_HOLE_HOSTS): list,
vol.Required(CONF_PI_HOLE_OFF_EVENT): str,
vol.Required(CONF_PI_HOLE_ON_EVENT): str,
},
extra=vol.ALLOW_EXTRA,
),
}
)
def configure(self) -> None:
"""Configure."""
if self._is_enabled:
self._set_dummy_sensor("on")
else:
self._set_dummy_sensor("off")
self.listen_event(self._on_switch_off, self.properties[CONF_PI_HOLE_OFF_EVENT])
self.listen_event(self._on_switch_on, self.properties[CONF_PI_HOLE_ON_EVENT])
@property
def _is_enabled(self) -> bool:
"""Return whether any Pi-hole hosts are enabled."""
for host in self.properties[CONF_PI_HOLE_HOSTS]:
resp = self._request(host, "status")
status = resp.json()["status"]
if status == "enabled":
return True
return False
def _on_switch_off(self, event_name: str, data: dict, kwargs: dict) -> None:
"""Respond to the switch being turned off."""
self.disable_pi_hole()
def _on_switch_on(self, event_name: str, data: dict, kwargs: dict) -> None:
"""Respond to the switch being turned on."""
self.enable_pi_hole()
def _request(self, host: str, endpoint: str) -> requests.Response:
"""Send an HTTP request to Pi-hole."""
return requests.get(
"http://{0}/admin/api.php?{1}".format(host, endpoint),
params={"auth": self.properties[CONF_PI_HOLE_API_KEY]},
)
def _set_dummy_sensor(self, state: str) -> None:
"""Set the state of off a dummy sensor which informs the switch's state."""
self.set_state(
self.entity_ids[CONF_PI_HOLE_ACTIVE_SENSOR],
state=state,
attributes={"friendly_name": "Pi-hole"},
)
def disable_pi_hole(self) -> None:
"""Disable Pi-hole."""
self._set_dummy_sensor("off")
for host in self.properties[CONF_PI_HOLE_HOSTS]:
self._request(host, "disable")
def enable_pi_hole(self) -> None:
"""Enable Pi-hole."""
self._set_dummy_sensor("on")
for host in self.properties[CONF_PI_HOLE_HOSTS]:
self._request(host, "enable")
class StartHomeKitOnZwaveReady(Base):
"""Define a feature to start HomeKit when the Z-Wave network is ready."""
def configure(self) -> None:
"""Configure."""
self._on_scan({})
@property
def network_ready(self) -> bool:
"""Return whether the Z-Wave network is ready."""
zwave_devices = [
v
for k, v in self.get_state("zwave").items()
if k not in self.entity_ids["to_exclude"]
]
for attrs in zwave_devices:
try:
if attrs["state"] != "ready":
return False
except TypeError:
return False
return True
def _on_scan(self, kwargs: dict) -> None:
"""Start the _on_scanning process."""
if self.network_ready:
self.log("Z-Wave network is ready for HomeKit to start")
self.call_service("homekit/start")
return
self.run_in(self._on_scan, 60)
| [
"[email protected]"
] | |
c81d93a49136768a898af81f73879a9dc37451d7 | f2ec1298c00d813c7e973cac22184ea8f54eb60c | /MxShop/apps/goods/migrations/0009_auto_20190526_1746.py | 3161358484d5d2ccc8825db304559f7006c8be63 | [] | no_license | llf-1996/mx_drf | fcfaa028630eeb02be91af5e30fb2a200037400c | f4878c0d9857e7af7277d10cc32da5d9c522de0c | refs/heads/master | 2020-06-05T06:51:49.629343 | 2019-06-17T13:20:29 | 2019-06-17T13:20:29 | 192,350,339 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 410 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-05-26 17:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('goods', '0008_auto_20170826_1201'),
]
operations = [
migrations.AlterModelTable(
name='goodscategorybrand',
table='goods_goodsbrand',
),
]
| [
"[email protected]"
] | |
32eb948acc27bdfecd863a46de5660de37ba0f63 | 29da2ca6def1270be13a3096685a8e5d82828dff | /CIM14/IEC61970/Wires/RatioVariationCurve.py | e9599a428e11145a2c9f6036061938d3a0e6d732 | [
"MIT"
] | permissive | rimbendhaou/PyCIM | 75eb3bcd3729b2410c03f3d5c66d6f1e05e21df3 | d578bb0bf1af344342bd23344385ed9c06c2d0ee | refs/heads/master | 2022-04-28T01:16:12.673867 | 2020-04-16T02:19:09 | 2020-04-16T02:19:09 | 256,085,381 | 0 | 0 | MIT | 2020-04-16T02:15:20 | 2020-04-16T02:08:14 | null | UTF-8 | Python | false | false | 2,481 | py | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.Core.Curve import Curve
class RatioVariationCurve(Curve):
"""A Ratio Variation Curve describes the change in tap ratio in relationship to tap step changes. The tap step is represented using the xValue and the ratio using y1value.
"""
def __init__(self, RatioTapChanger=None, *args, **kw_args):
"""Initialises a new 'RatioVariationCurve' instance.
@param RatioTapChanger: A RatioVariationCurve defines tap ratio changes for a RatioTapChanger.
"""
self._RatioTapChanger = None
self.RatioTapChanger = RatioTapChanger
super(RatioVariationCurve, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["RatioTapChanger"]
_many_refs = []
def getRatioTapChanger(self):
"""A RatioVariationCurve defines tap ratio changes for a RatioTapChanger.
"""
return self._RatioTapChanger
def setRatioTapChanger(self, value):
if self._RatioTapChanger is not None:
self._RatioTapChanger._RatioVariationCurve = None
self._RatioTapChanger = value
if self._RatioTapChanger is not None:
self._RatioTapChanger.RatioVariationCurve = None
self._RatioTapChanger._RatioVariationCurve = self
RatioTapChanger = property(getRatioTapChanger, setRatioTapChanger)
| [
"[email protected]"
] | |
ac8cb4d10f03cfcf56bfca6e1eed5550f4a572df | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/3031.py | dcc2655c88b9b096dc4272fff8c76abbbbc62b74 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 26 | py | ii = [('FitzRNS3.py', 31)] | [
"[email protected]"
] | |
4d7e6d9123896e99a21eae4a42f8c64c3c09f2fb | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/pg0.py | 6dce41ec2e35de1734a93c08f6cc1f9885939979 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'pG0':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
33e488832c4747e6af26ad44f680379afbfb7ddf | 3280f0c0c41e157bff2d4ad420c1b871402d6f95 | /utils/cmfish/cmfish/middlewares.py | 4dd368481be51d12184948c37537d4409716b44e | [] | no_license | sui84/pytest | 5efae6843556c541bcffed1ff4c4a38d598bca85 | 696f05ec40adb78e9a4bd7fab1eef6d324cbb9bb | refs/heads/master | 2021-01-21T15:00:28.604917 | 2019-08-31T05:22:10 | 2019-08-31T05:22:10 | 95,368,315 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,904 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class CmfishSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"[email protected]"
] | |
d0eda4330f1ac653701045a4fc49b166fbca0fca | d6cf604d393a22fc5e071a0d045a4fadcaf128a6 | /Enterprise/Marine/2020/2020_D.py | 7e514768e74cacdb1d7271f0361b50aa5e7e7e78 | [] | no_license | shikixyx/AtCoder | bb400dfafd3745c95720b9009881e07bf6b3c2b6 | 7e402fa82a96bc69ce04b9b7884cb9a9069568c7 | refs/heads/master | 2021-08-03T21:06:45.224547 | 2021-07-24T11:58:02 | 2021-07-24T11:58:02 | 229,020,968 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | import sys
import numpy as np
from collections import defaultdict
sys.setrecursionlimit(10 ** 7)
read = sys.stdin.buffer.read
readline = sys.stdin.buffer.readline
readlines = sys.stdin.buffer.readlines
# 愚直解
# TLE
N = int(readline())
VW = [0] + [list(map(int, readline().split())) for _ in range(N)]
Q = int(readline())
ul = [list(map(int, readline().split())) for _ in range(Q)]
ul_s = sorted(ul, reverse=True)
cache = [None] * (N + 1)
S = 10 ** 5 + 10
TABLE = defaultdict(int)
def solve(q, l):
q = q[::-1]
mi = -1
for i in range(len(q)):
x = q[i]
if type(cache[x]) is np.ndarray:
mi = i
else:
break
x = q[mi]
dp = cache[x] if mi != -1 else np.zeros(S, dtype=np.int64)
q = q[mi + 1 :] if mi != -1 else q
ans = calc(q, l, dp)
return ans
def calc(q, l, dp):
for u in q:
v, w = VW[u]
tmp = np.zeros(S, dtype=np.int64)
tmp[w:] = dp[:-w] + v
dp = np.maximum(dp, tmp)
# cache[u] = np.copy(dp)
cache[u] = dp
return max(dp[: l + 1])
ans = []
for u, l in ul_s:
q = []
v = u
while v != 1:
q.append(v)
v //= 2
q.append(1)
t = solve(q, l)
TABLE[(u, l)] = t
for u, l in ul:
ans.append(TABLE[(u, l)])
print("\n".join(map(str, ans)))
| [
"[email protected]"
] | |
0346e52eb44f0345bff729be90cefcd608a480f3 | c46754b9600a12df4f9d7a6320dfc19aa96b1e1d | /src/transformers/models/groupvit/__init__.py | d0de4a00bd15005fe974f7240b9bc6c940f5b789 | [
"Apache-2.0"
] | permissive | huggingface/transformers | ccd52a0d7c59e5f13205f32fd96f55743ebc8814 | 4fa0aff21ee083d0197a898cdf17ff476fae2ac3 | refs/heads/main | 2023-09-05T19:47:38.981127 | 2023-09-05T19:21:33 | 2023-09-05T19:21:33 | 155,220,641 | 102,193 | 22,284 | Apache-2.0 | 2023-09-14T20:44:49 | 2018-10-29T13:56:00 | Python | UTF-8 | Python | false | false | 2,875 | py | # Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tf_available, is_torch_available
_import_structure = {
"configuration_groupvit": [
"GROUPVIT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"GroupViTConfig",
"GroupViTOnnxConfig",
"GroupViTTextConfig",
"GroupViTVisionConfig",
],
}
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_groupvit"] = [
"GROUPVIT_PRETRAINED_MODEL_ARCHIVE_LIST",
"GroupViTModel",
"GroupViTPreTrainedModel",
"GroupViTTextModel",
"GroupViTVisionModel",
]
try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_groupvit"] = [
"TF_GROUPVIT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFGroupViTModel",
"TFGroupViTPreTrainedModel",
"TFGroupViTTextModel",
"TFGroupViTVisionModel",
]
if TYPE_CHECKING:
from .configuration_groupvit import (
GROUPVIT_PRETRAINED_CONFIG_ARCHIVE_MAP,
GroupViTConfig,
GroupViTOnnxConfig,
GroupViTTextConfig,
GroupViTVisionConfig,
)
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_groupvit import (
GROUPVIT_PRETRAINED_MODEL_ARCHIVE_LIST,
GroupViTModel,
GroupViTPreTrainedModel,
GroupViTTextModel,
GroupViTVisionModel,
)
try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_groupvit import (
TF_GROUPVIT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFGroupViTModel,
TFGroupViTPreTrainedModel,
TFGroupViTTextModel,
TFGroupViTVisionModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
| [
"[email protected]"
] | |
0f7692a5b74a0bcdc450d391708db3d19733b7c8 | 0db97db08743783019efe022190f409d22ff95bd | /aliyun/api/rest/Rds20140815DescribeSQLLogReportListRequest.py | ade9beff607028d9b0aa607aa956e0d2735bff32 | [
"Apache-2.0"
] | permissive | snowyxx/aliyun-python-demo | 8052e2a165f1b869affe632dda484d6ca203bd9b | ed40887ddff440b85b77f9b2a1fcda11cca55c8b | refs/heads/master | 2021-01-10T03:37:31.657793 | 2016-01-21T02:03:14 | 2016-01-21T02:03:14 | 49,921,095 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | '''
Created by auto_sdk on 2015.08.07
'''
from aliyun.api.base import RestApi
class Rds20140815DescribeSQLLogReportListRequest(RestApi):
def __init__(self,domain='rds.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.DBInstanceId = None
self.EndTime = None
self.Page = None
self.PageNumbers = None
self.StartTime = None
def getapiname(self):
return 'rds.aliyuncs.com.DescribeSQLLogReportList.2014-08-15'
| [
"[email protected]"
] | |
dfdef595c1fdf2280519df6b76a1384d3bfc50b8 | 61747f324eaa757f3365fd7bf5ddd53ea0db47d1 | /casepro/cases/migrations/0041_populate_partner_users.py | e14bef94e0e4ce2732c88f32738b188cfc2b350d | [
"BSD-3-Clause"
] | permissive | BlueRidgeLabs/casepro | f8b0eefa8f961dd2fdb5da26a48b619ebc1f8c12 | 8ef509326f3dfa80bb44beae00b60cc6c4ac7a24 | refs/heads/master | 2022-01-24T09:01:18.881548 | 2017-12-05T18:46:05 | 2017-12-05T18:49:42 | 113,502,588 | 0 | 0 | null | 2017-12-07T21:57:37 | 2017-12-07T21:57:37 | null | UTF-8 | Python | false | false | 760 | py | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
from django.db import migrations, models
def populate_partner_users(apps, schema_editor):
User = apps.get_model('auth', 'User')
for user in User.objects.exclude(profile__partner=None):
partner = user.profile.partner
org = partner.org
if user in org.editors.all() or user in org.viewers.all():
partner.users.add(user)
else:
print("User %s no longer has permissions in org %s to be a partner user" % (user.email, org.name))
class Migration(migrations.Migration):
dependencies = [
('cases', '0040_partner_users'),
]
operations = [
migrations.RunPython(populate_partner_users)
]
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.